Skip to content

Commit a04c07a

Browse files
[PHI] Remove enable unused var (#72684)
* remove 'enable_unused_var' related code * skip insert op or arg if already exists * update UT
1 parent f8060f4 commit a04c07a

File tree

8 files changed

+6
-90
lines changed

8 files changed

+6
-90
lines changed

.github/workflows/Coverage.yml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,6 @@ jobs:
7979
CCACHE_LIMIT_MULTIPLE: 0.8
8080
ON_INFER: "ON"
8181
PADDLE_CUDA_INSTALL_REQUIREMENTS: "ON"
82-
FLAGS_enable_unused_var_check: 1
8382
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
8483
GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }}
8584
UT_RUN_TYPE_SETTING: WITHOUT_HYBRID
@@ -123,7 +122,6 @@ jobs:
123122
-e CCACHE_LIMIT_MULTIPLE \
124123
-e ON_INFER \
125124
-e PADDLE_CUDA_INSTALL_REQUIREMENTS \
126-
-e FLAGS_enable_unused_var_check \
127125
-e GITHUB_TOKEN \
128126
-e GITHUB_API_TOKEN \
129127
-e UT_RUN_TYPE_SETTING \

paddle/fluid/framework/operator.cc

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,6 @@ class DenseTensor;
6363

6464
COMMON_DECLARE_bool(benchmark);
6565
COMMON_DECLARE_bool(check_nan_inf);
66-
PD_DECLARE_bool(enable_unused_var_check);
6766
COMMON_DECLARE_bool(run_kp_kernel);
6867
PHI_DECLARE_bool(enable_host_event_recorder_hook);
6968

@@ -1153,8 +1152,6 @@ bool ExecutionContext::HasOutput(const std::string& name) const {
11531152
}
11541153

11551154
const Variable* ExecutionContext::InputVar(const std::string& name) const {
1156-
LogVarUsageIfUnusedVarCheckEnabled(name);
1157-
11581155
auto it = ctx_.inputs.find(name);
11591156
if (it == ctx_.inputs.end()) return nullptr;
11601157

@@ -1185,8 +1182,6 @@ Variable* ExecutionContext::OutputVar(const std::string& name) const {
11851182
template <>
11861183
const std::vector<const phi::DenseTensor*>
11871184
ExecutionContext::MultiInput<phi::DenseTensor>(const std::string& name) const {
1188-
LogVarUsageIfUnusedVarCheckEnabled(name);
1189-
11901185
auto vars = MultiInputVar(name);
11911186
if (vars.empty()) {
11921187
return {};
@@ -2046,10 +2041,6 @@ void OperatorWithKernel::RunImpl(const Scope& scope,
20462041
Type(), Attrs(), infer_shape_ctx, *runtime_ctx, Id());
20472042
}
20482043

2049-
if (FLAGS_enable_unused_var_check) {
2050-
GetThreadLocalUsedVarNameSet()->clear();
2051-
}
2052-
20532044
// TODO(panyx0718): ExecutionContext should only depend on RuntimeContext
20542045
// not Scope. Imperative mode only pass inputs and get outputs.
20552046
{
@@ -2121,15 +2112,6 @@ void OperatorWithKernel::RunImpl(const Scope& scope,
21212112
HandleComplexGradToRealGrad(scope, runtime_ctx);
21222113
}
21232114

2124-
if (FLAGS_enable_unused_var_check) {
2125-
// skip op that uses onednn because it has different memory reuse strategy.
2126-
// use attr here because some GradMakers (like ActivationGradOpMaker) add
2127-
// input when use_mkldnn=true;
2128-
if (!(HasAttr("use_mkldnn") && Attr<bool>("use_mkldnn"))) {
2129-
CheckUnusedVar(*this, scope);
2130-
}
2131-
}
2132-
21332115
/*For profiling/benchmark only*/
21342116
if (FLAGS_benchmark) {
21352117
dev_ctx->Wait();

paddle/fluid/framework/operator.h

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -494,8 +494,6 @@ class ExecutionContext : public phi::KernelContext {
494494

495495
virtual const std::vector<Variable*> MultiInputVar(
496496
const std::string& name) const {
497-
LogVarUsageIfUnusedVarCheckEnabled(name);
498-
499497
auto it = ctx_.inputs.find(name);
500498
if (it == ctx_.inputs.end()) {
501499
return {};
@@ -536,8 +534,6 @@ class ExecutionContext : public phi::KernelContext {
536534

537535
template <typename T>
538536
const std::vector<const T*> MultiInput(const std::string& name) const {
539-
LogVarUsageIfUnusedVarCheckEnabled(name);
540-
541537
auto vars = MultiInputVar(name);
542538
if (vars.size() == 0) {
543539
return {};

paddle/fluid/framework/unused_var_check.cc

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -23,11 +23,6 @@ limitations under the License. */
2323
#include "paddle/fluid/framework/op_info.h"
2424
#include "paddle/fluid/framework/operator.h"
2525
#include "paddle/fluid/platform/enforce.h"
26-
PHI_DEFINE_EXPORTED_bool(
27-
enable_unused_var_check,
28-
false,
29-
"Checking whether operator contains unused inputs, "
30-
"especially for grad operator. It should be in unittest.");
3126

3227
namespace paddle::framework {
3328

@@ -36,13 +31,6 @@ std::unordered_set<std::string> *GetThreadLocalUsedVarNameSet() {
3631
return &used_var_name_set;
3732
}
3833

39-
void LogVarUsageIfUnusedVarCheckEnabled(const std::string &name) {
40-
if (FLAGS_enable_unused_var_check) {
41-
VLOG(6) << "Variable used:" << name;
42-
GetThreadLocalUsedVarNameSet()->insert(name);
43-
}
44-
}
45-
4634
static const std::unordered_set<std::string> &GetOpWithUnusedVarAllowSet() {
4735
// NOTE(zhiqiu): Currently, there are some operators which involves unused
4836
// inputs and cannot be removed from the allow_list below.

paddle/fluid/framework/unused_var_check.h

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ class Scope;
2929

3030
std::unordered_set<std::string>* GetThreadLocalUsedVarNameSet();
3131

32-
void LogVarUsageIfUnusedVarCheckEnabled(const std::string& name);
3332
void CheckUnusedVar(const OperatorBase& op, const Scope& scope);
3433

3534
} // namespace framework

paddle/phi/core/compat/op_utils.h

Lines changed: 6 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -82,13 +82,9 @@ class OpUtilsMap {
8282
fluid_op_to_phi_kernel_.insert({op_type, base_kernel_name});
8383
}
8484
void InsertFluidOplName(std::string op_type, std::string base_kernel_name) {
85-
PADDLE_ENFORCE_EQ(
86-
phi_kernel_to_fluid_op_.count(base_kernel_name),
87-
0UL,
88-
common::errors::AlreadyExists(
89-
"Operator (%s)'s kernel name (%s) has been registered.",
90-
op_type,
91-
base_kernel_name));
85+
if (phi_kernel_to_fluid_op_.count(base_kernel_name)) {
86+
return;
87+
}
9288
phi_kernel_to_fluid_op_.insert({base_kernel_name, op_type});
9389
}
9490

@@ -97,12 +93,9 @@ class OpUtilsMap {
9793
}
9894

9995
void InsertArgumentMappingFn(std::string op_type, ArgumentMappingFn fn) {
100-
PADDLE_ENFORCE_EQ(
101-
arg_mapping_fn_map_.count(op_type),
102-
0UL,
103-
common::errors::AlreadyExists(
104-
"Operator (%s)'s argument mapping function has been registered.",
105-
op_type));
96+
if (arg_mapping_fn_map_.count(op_type)) {
97+
return;
98+
}
10699
arg_mapping_fn_map_.insert({std::move(op_type), std::move(fn)});
107100
}
108101

paddle/scripts/paddle_build.sh

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3355,12 +3355,6 @@ function nv_test() {
33553355
}
33563356

33573357

3358-
function enable_unused_var_check() {
3359-
# NOTE(zhiqiu): Set FLAGS_enable_unused_var_check=1 here to enable unused_var_check,
3360-
# which checks if an operator has unused input variable(s).
3361-
# Currently, use it in coverage CI job.
3362-
export FLAGS_enable_unused_var_check=1
3363-
}
33643358
function check_coverage_added_ut() {
33653359
# NOTE(risemeup1):The step of checking added test can be placed on the cpu machine to save gpu resources
33663360
bash $PADDLE_ROOT/tools/check_added_ut.sh
@@ -4829,21 +4823,18 @@ function main() {
48294823
cicheck)
48304824
cmake_gen ${PYTHON_ABI:-""}
48314825
build ${parallel_number}
4832-
enable_unused_var_check
48334826
parallel_test
48344827
;;
48354828
cicheck_coverage)
48364829
check_diff_file_for_coverage
48374830
run_setup ${PYTHON_ABI:-""} install ${parallel_number}
4838-
enable_unused_var_check
48394831
parallel_test
48404832
check_coverage
48414833
;;
48424834
cpu_cicheck_coverage)
48434835
check_diff_file_for_coverage
48444836
export ON_INFER=ON PADDLE_CUDA_INSTALL_REQUIREMENTS=ON
48454837
run_setup ${PYTHON_ABI:-""} bdist_wheel ${parallel_number}
4846-
enable_unused_var_check
48474838
check_coverage_added_ut
48484839
check_coverage_build
48494840
clean_build_files

test/cpp/fluid/framework/operator_test.cc

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ limitations under the License. */
1919
#include "paddle/fluid/framework/op_registry.h"
2020
#include "paddle/fluid/platform/init.h"
2121

22-
PD_DECLARE_bool(enable_unused_var_check);
23-
2422
namespace paddle {
2523
namespace framework {
2624

@@ -612,35 +610,7 @@ REGISTER_OP_WITHOUT_GRADIENT(
612610
REGISTER_OP_CPU_KERNEL(op_without_unused_var,
613611
paddle::framework::OpWithoutUnusedVarKernelTest<float>);
614612

615-
// test with single input
616-
TEST(OpWithUnusedVar, all) {
617-
// enable the unused_var_check
618-
FLAGS_enable_unused_var_check = true;
619-
paddle::framework::InitDevices();
620-
paddle::framework::proto::OpDesc op_desc;
621-
op_desc.set_type("op_with_unused_var");
622-
BuildVar("X", {"X"}, op_desc.add_inputs());
623-
BuildVar("Y", {"Y"}, op_desc.add_outputs());
624-
625-
phi::CPUPlace cpu_place;
626-
paddle::framework::Scope scope;
627-
auto* x = scope.Var("X")->GetMutable<phi::DenseTensor>();
628-
auto* y = scope.Var("Y")->GetMutable<phi::DenseTensor>();
629-
x->Resize({32, 64});
630-
y->Resize({32, 64});
631-
x->mutable_data<float>(cpu_place);
632-
y->mutable_data<float>(cpu_place);
633-
634-
auto op = paddle::framework::OpRegistry::CreateOp(op_desc);
635-
// should throw exception
636-
ASSERT_THROW(op->Run(scope, cpu_place), paddle::platform::EnforceNotMet);
637-
FLAGS_enable_unused_var_check = false;
638-
}
639-
640613
TEST(OpWithoutUnusedVar, all) {
641-
// enable the unused_var_check
642-
FLAGS_enable_unused_var_check = true;
643-
644614
paddle::framework::InitDevices();
645615
paddle::framework::proto::OpDesc op_desc;
646616
op_desc.set_type("op_without_unused_var");
@@ -659,5 +629,4 @@ TEST(OpWithoutUnusedVar, all) {
659629
auto op = paddle::framework::OpRegistry::CreateOp(op_desc);
660630
// should not throw exception
661631
ASSERT_NO_THROW(op->Run(scope, cpu_place));
662-
FLAGS_enable_unused_var_check = false;
663632
}

0 commit comments

Comments
 (0)