From 0a279c24e26e12ef550d6257f431d7d3737b5831 Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 13 May 2025 14:07:45 +0800 Subject: [PATCH 1/3] remove 'enable_unused_var' related code --- .github/workflows/Coverage.yml | 2 -- paddle/fluid/framework/operator.cc | 18 ------------------ paddle/fluid/framework/operator.h | 4 ---- paddle/fluid/framework/unused_var_check.cc | 12 ------------ paddle/fluid/framework/unused_var_check.h | 1 - paddle/scripts/paddle_build.sh | 9 --------- test/cpp/fluid/framework/operator_test.cc | 9 --------- 7 files changed, 55 deletions(-) diff --git a/.github/workflows/Coverage.yml b/.github/workflows/Coverage.yml index e3393a868ae0a2..6b90f0e36f5ad7 100644 --- a/.github/workflows/Coverage.yml +++ b/.github/workflows/Coverage.yml @@ -79,7 +79,6 @@ jobs: CCACHE_LIMIT_MULTIPLE: 0.8 ON_INFER: "ON" PADDLE_CUDA_INSTALL_REQUIREMENTS: "ON" - FLAGS_enable_unused_var_check: 1 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} UT_RUN_TYPE_SETTING: WITHOUT_HYBRID @@ -124,7 +123,6 @@ jobs: -e CCACHE_LIMIT_MULTIPLE \ -e ON_INFER \ -e PADDLE_CUDA_INSTALL_REQUIREMENTS \ - -e FLAGS_enable_unused_var_check \ -e GITHUB_TOKEN \ -e GITHUB_API_TOKEN \ -e UT_RUN_TYPE_SETTING \ diff --git a/paddle/fluid/framework/operator.cc b/paddle/fluid/framework/operator.cc index c22b513f491c02..01e6b7e3f7a88f 100644 --- a/paddle/fluid/framework/operator.cc +++ b/paddle/fluid/framework/operator.cc @@ -63,7 +63,6 @@ class DenseTensor; COMMON_DECLARE_bool(benchmark); COMMON_DECLARE_bool(check_nan_inf); -PD_DECLARE_bool(enable_unused_var_check); COMMON_DECLARE_bool(run_kp_kernel); PHI_DECLARE_bool(enable_host_event_recorder_hook); @@ -1153,8 +1152,6 @@ bool ExecutionContext::HasOutput(const std::string& name) const { } const Variable* ExecutionContext::InputVar(const std::string& name) const { - LogVarUsageIfUnusedVarCheckEnabled(name); - auto it = ctx_.inputs.find(name); if (it == ctx_.inputs.end()) return nullptr; @@ -1185,8 +1182,6 @@ Variable* ExecutionContext::OutputVar(const std::string& name) const { template <> const std::vector ExecutionContext::MultiInput(const std::string& name) const { - LogVarUsageIfUnusedVarCheckEnabled(name); - auto vars = MultiInputVar(name); if (vars.empty()) { return {}; @@ -2046,10 +2041,6 @@ void OperatorWithKernel::RunImpl(const Scope& scope, Type(), Attrs(), infer_shape_ctx, *runtime_ctx, Id()); } - if (FLAGS_enable_unused_var_check) { - GetThreadLocalUsedVarNameSet()->clear(); - } - // TODO(panyx0718): ExecutionContext should only depend on RuntimeContext // not Scope. Imperative mode only pass inputs and get outputs. { @@ -2121,15 +2112,6 @@ void OperatorWithKernel::RunImpl(const Scope& scope, HandleComplexGradToRealGrad(scope, runtime_ctx); } - if (FLAGS_enable_unused_var_check) { - // skip op that uses onednn because it has different memory reuse strategy. - // use attr here because some GradMakers (like ActivationGradOpMaker) add - // input when use_mkldnn=true; - if (!(HasAttr("use_mkldnn") && Attr("use_mkldnn"))) { - CheckUnusedVar(*this, scope); - } - } - /*For profiling/benchmark only*/ if (FLAGS_benchmark) { dev_ctx->Wait(); diff --git a/paddle/fluid/framework/operator.h b/paddle/fluid/framework/operator.h index e4b45696bc18a1..de86332ccbbddb 100644 --- a/paddle/fluid/framework/operator.h +++ b/paddle/fluid/framework/operator.h @@ -494,8 +494,6 @@ class ExecutionContext : public phi::KernelContext { virtual const std::vector MultiInputVar( const std::string& name) const { - LogVarUsageIfUnusedVarCheckEnabled(name); - auto it = ctx_.inputs.find(name); if (it == ctx_.inputs.end()) { return {}; @@ -536,8 +534,6 @@ class ExecutionContext : public phi::KernelContext { template const std::vector MultiInput(const std::string& name) const { - LogVarUsageIfUnusedVarCheckEnabled(name); - auto vars = MultiInputVar(name); if (vars.size() == 0) { return {}; diff --git a/paddle/fluid/framework/unused_var_check.cc b/paddle/fluid/framework/unused_var_check.cc index 8ce7712dbce811..cdb18c6d3abf24 100644 --- a/paddle/fluid/framework/unused_var_check.cc +++ b/paddle/fluid/framework/unused_var_check.cc @@ -23,11 +23,6 @@ limitations under the License. */ #include "paddle/fluid/framework/op_info.h" #include "paddle/fluid/framework/operator.h" #include "paddle/fluid/platform/enforce.h" -PHI_DEFINE_EXPORTED_bool( - enable_unused_var_check, - false, - "Checking whether operator contains unused inputs, " - "especially for grad operator. It should be in unittest."); namespace paddle::framework { @@ -36,13 +31,6 @@ std::unordered_set *GetThreadLocalUsedVarNameSet() { return &used_var_name_set; } -void LogVarUsageIfUnusedVarCheckEnabled(const std::string &name) { - if (FLAGS_enable_unused_var_check) { - VLOG(6) << "Variable used:" << name; - GetThreadLocalUsedVarNameSet()->insert(name); - } -} - static const std::unordered_set &GetOpWithUnusedVarAllowSet() { // NOTE(zhiqiu): Currently, there are some operators which involves unused // inputs and cannot be removed from the allow_list below. diff --git a/paddle/fluid/framework/unused_var_check.h b/paddle/fluid/framework/unused_var_check.h index f9289d8cd62fc8..4a9132748d6f19 100644 --- a/paddle/fluid/framework/unused_var_check.h +++ b/paddle/fluid/framework/unused_var_check.h @@ -29,7 +29,6 @@ class Scope; std::unordered_set* GetThreadLocalUsedVarNameSet(); -void LogVarUsageIfUnusedVarCheckEnabled(const std::string& name); void CheckUnusedVar(const OperatorBase& op, const Scope& scope); } // namespace framework diff --git a/paddle/scripts/paddle_build.sh b/paddle/scripts/paddle_build.sh index d2b0d61222fa18..30290f02e25716 100644 --- a/paddle/scripts/paddle_build.sh +++ b/paddle/scripts/paddle_build.sh @@ -3355,12 +3355,6 @@ function nv_test() { } -function enable_unused_var_check() { - # NOTE(zhiqiu): Set FLAGS_enable_unused_var_check=1 here to enable unused_var_check, - # which checks if an operator has unused input variable(s). - # Currently, use it in coverage CI job. - export FLAGS_enable_unused_var_check=1 -} function check_coverage_added_ut() { # NOTE(risemeup1):The step of checking added test can be placed on the cpu machine to save gpu resources bash $PADDLE_ROOT/tools/check_added_ut.sh @@ -4829,13 +4823,11 @@ function main() { cicheck) cmake_gen ${PYTHON_ABI:-""} build ${parallel_number} - enable_unused_var_check parallel_test ;; cicheck_coverage) check_diff_file_for_coverage run_setup ${PYTHON_ABI:-""} install ${parallel_number} - enable_unused_var_check parallel_test check_coverage ;; @@ -4843,7 +4835,6 @@ function main() { check_diff_file_for_coverage export ON_INFER=ON PADDLE_CUDA_INSTALL_REQUIREMENTS=ON run_setup ${PYTHON_ABI:-""} bdist_wheel ${parallel_number} - enable_unused_var_check check_coverage_added_ut check_coverage_build clean_build_files diff --git a/test/cpp/fluid/framework/operator_test.cc b/test/cpp/fluid/framework/operator_test.cc index 4b332826b894a5..2de12d885843e0 100644 --- a/test/cpp/fluid/framework/operator_test.cc +++ b/test/cpp/fluid/framework/operator_test.cc @@ -19,8 +19,6 @@ limitations under the License. */ #include "paddle/fluid/framework/op_registry.h" #include "paddle/fluid/platform/init.h" -PD_DECLARE_bool(enable_unused_var_check); - namespace paddle { namespace framework { @@ -614,8 +612,6 @@ REGISTER_OP_CPU_KERNEL(op_without_unused_var, // test with single input TEST(OpWithUnusedVar, all) { - // enable the unused_var_check - FLAGS_enable_unused_var_check = true; paddle::framework::InitDevices(); paddle::framework::proto::OpDesc op_desc; op_desc.set_type("op_with_unused_var"); @@ -634,13 +630,9 @@ TEST(OpWithUnusedVar, all) { auto op = paddle::framework::OpRegistry::CreateOp(op_desc); // should throw exception ASSERT_THROW(op->Run(scope, cpu_place), paddle::platform::EnforceNotMet); - FLAGS_enable_unused_var_check = false; } TEST(OpWithoutUnusedVar, all) { - // enable the unused_var_check - FLAGS_enable_unused_var_check = true; - paddle::framework::InitDevices(); paddle::framework::proto::OpDesc op_desc; op_desc.set_type("op_without_unused_var"); @@ -659,5 +651,4 @@ TEST(OpWithoutUnusedVar, all) { auto op = paddle::framework::OpRegistry::CreateOp(op_desc); // should not throw exception ASSERT_NO_THROW(op->Run(scope, cpu_place)); - FLAGS_enable_unused_var_check = false; } From 4011bfab416e9e3b190c9e87cc7e3afb8a281ade Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 13 May 2025 14:08:56 +0800 Subject: [PATCH 2/3] skip insert op or arg if already exists --- paddle/phi/core/compat/op_utils.h | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/paddle/phi/core/compat/op_utils.h b/paddle/phi/core/compat/op_utils.h index 9f0d7dd96d00d1..ec5cfb240e628b 100644 --- a/paddle/phi/core/compat/op_utils.h +++ b/paddle/phi/core/compat/op_utils.h @@ -82,13 +82,9 @@ class OpUtilsMap { fluid_op_to_phi_kernel_.insert({op_type, base_kernel_name}); } void InsertFluidOplName(std::string op_type, std::string base_kernel_name) { - PADDLE_ENFORCE_EQ( - phi_kernel_to_fluid_op_.count(base_kernel_name), - 0UL, - common::errors::AlreadyExists( - "Operator (%s)'s kernel name (%s) has been registered.", - op_type, - base_kernel_name)); + if (phi_kernel_to_fluid_op_.count(base_kernel_name)) { + return; + } phi_kernel_to_fluid_op_.insert({base_kernel_name, op_type}); } @@ -97,12 +93,9 @@ class OpUtilsMap { } void InsertArgumentMappingFn(std::string op_type, ArgumentMappingFn fn) { - PADDLE_ENFORCE_EQ( - arg_mapping_fn_map_.count(op_type), - 0UL, - common::errors::AlreadyExists( - "Operator (%s)'s argument mapping function has been registered.", - op_type)); + if (arg_mapping_fn_map_.count(op_type)) { + return; + } arg_mapping_fn_map_.insert({std::move(op_type), std::move(fn)}); } From 9d19d3ee1593ae4edfcd20bb0e2d3d7cf4b8b88e Mon Sep 17 00:00:00 2001 From: HydrogenSulfate <490868991@qq.com> Date: Tue, 13 May 2025 16:42:10 +0800 Subject: [PATCH 3/3] update UT --- test/cpp/fluid/framework/operator_test.cc | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/test/cpp/fluid/framework/operator_test.cc b/test/cpp/fluid/framework/operator_test.cc index 2de12d885843e0..4e54fd4203bdef 100644 --- a/test/cpp/fluid/framework/operator_test.cc +++ b/test/cpp/fluid/framework/operator_test.cc @@ -610,28 +610,6 @@ REGISTER_OP_WITHOUT_GRADIENT( REGISTER_OP_CPU_KERNEL(op_without_unused_var, paddle::framework::OpWithoutUnusedVarKernelTest); -// test with single input -TEST(OpWithUnusedVar, all) { - paddle::framework::InitDevices(); - paddle::framework::proto::OpDesc op_desc; - op_desc.set_type("op_with_unused_var"); - BuildVar("X", {"X"}, op_desc.add_inputs()); - BuildVar("Y", {"Y"}, op_desc.add_outputs()); - - phi::CPUPlace cpu_place; - paddle::framework::Scope scope; - auto* x = scope.Var("X")->GetMutable(); - auto* y = scope.Var("Y")->GetMutable(); - x->Resize({32, 64}); - y->Resize({32, 64}); - x->mutable_data(cpu_place); - y->mutable_data(cpu_place); - - auto op = paddle::framework::OpRegistry::CreateOp(op_desc); - // should throw exception - ASSERT_THROW(op->Run(scope, cpu_place), paddle::platform::EnforceNotMet); -} - TEST(OpWithoutUnusedVar, all) { paddle::framework::InitDevices(); paddle::framework::proto::OpDesc op_desc;