From 3fbcb5180c289ea2f465b0209f615b7e9cd74e9e Mon Sep 17 00:00:00 2001 From: zxcd <228587199@qq.com> Date: Tue, 27 Feb 2024 03:55:29 +0000 Subject: [PATCH] cherry pick: reduce log for type promotion. --- .../manual/eager_manual/forwards/multiply_fwd_func.cc | 10 ++++++---- .../eager/auto_code_generator/generator/eager_gen.py | 2 +- python/paddle/base/layers/math_op_patch.py | 6 ++++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc b/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc index 2bd9213cae610..47509d025722d 100644 --- a/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc +++ b/paddle/fluid/eager/api/manual/eager_manual/forwards/multiply_fwd_func.cc @@ -61,8 +61,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x, // Type promotion Logic if (phi::NeedTypePromotion(x.dtype(), y.dtype())) { VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion " - "automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) + << "got different data type, run type protmotion " + "automatically, this may cause data type been changed."; auto op_name = phi::TransToFluidOpName("multiply"); auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype()); @@ -407,8 +408,9 @@ paddle::Tensor multiply_ad_func(const paddle::Tensor& x, // Type promotion Logic if (phi::NeedTypePromotion(x.dtype(), y.dtype())) { VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion " - "automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) + << "got different data type, run type protmotion " + "automatically, this may cause data type been changed."; auto op_name = phi::TransToFluidOpName("multiply"); auto promotion_type = phi::GetPromoteDtype(op_name, x.dtype(), y.dtype()); diff --git a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py index 2a96fddccbce7..75d6cb94c6b5f 100644 --- a/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py +++ b/paddle/fluid/eager/auto_code_generator/generator/eager_gen.py @@ -528,7 +528,7 @@ class {} : public egr::GradNodeBase {{ TYPE_PROMOTION_LOGIC_TEMPLATE = """ if (phi::NeedTypePromotion({x}.dtype(), {y}.dtype())) {{ VLOG(5) << "got different data type, run type protmotion automatically."; - LOG(WARNING) << "got different data type, run type protmotion automatically, this may cause data type been changed."; + LOG_FIRST_N(WARNING, 1) << "got different data type, run type protmotion automatically, this may cause data type been changed."; {op_name} auto promotion_type = phi::GetPromoteDtype(op_name, {x}.dtype(), {y}.dtype()); diff --git a/python/paddle/base/layers/math_op_patch.py b/python/paddle/base/layers/math_op_patch.py index bf1d737970327..f3ba8aa5e197e 100644 --- a/python/paddle/base/layers/math_op_patch.py +++ b/python/paddle/base/layers/math_op_patch.py @@ -538,8 +538,10 @@ def __impl__(self, other_var): op_type, lhs_dtype, rhs_dtype ) warnings.warn( - f"The input dtypes of OP {op_type} are {lhs_dtype} and {rhs_dtype}, " - "the output will be auto-promoted to {common_dtype}" + f"The input dtypes of OP {op_type} are {lhs_dtype} and {rhs_dtype}, the output will be auto-promoted to {common_dtype}" + ) + warnings.filterwarnings( + "ignore", message="The input dtypes of OP" ) if rhs_dtype != common_dtype: other_var = astype(other_var, common_dtype)