Skip to content

Commit

Permalink
fix single card 8 unittests in new executor (#37957)
Browse files Browse the repository at this point in the history
* fix single card 8 unittests in new executor

* fix

* fix
  • Loading branch information
2742195759 authored Dec 13, 2021
1 parent 10d9ab4 commit 9a4eec9
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,15 @@ void InterpreterCoreGarbageCollector::Add(paddle::framework::Variable* var,
} else if (var->IsType<
operators::reader::
OrderedMultiDeviceLoDTensorBlockingQueueHolder>()) {
// var->Clear(); // TODO(xiongkun03) can we clear directly? Why we must use
// Add interface?
// TODO(xiongkun03) in old executor, this type of variable is not support
// eager deletion. so we just leave it here ?
} else if (var->IsType<LoDRankTable>()) {
// TODO(xiongkun03) in old executor, this type of variable is not support
// eager deletion. so we just leave it here ?
} else if (var->IsType<SelectedRows>()) {
Add(var->GetMutable<SelectedRows>()->mutable_value()->MoveMemoryHolder(),
event, ctx);
var->GetMutable<SelectedRows>()->mutable_rows()->clear();
} else if (var->IsType<LoDTensorArray>()) {
auto* tensor_arr = var->GetMutable<LoDTensorArray>();
for (auto& t : *tensor_arr) {
Expand Down
12 changes: 12 additions & 0 deletions paddle/fluid/pybind/pybind.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1437,6 +1437,18 @@ All parameter, weight, gradient are variables in Paddle.
out (core.Variable|None): the found variable or None.
)DOC",
py::return_value_policy::reference)
.def("erase", &Scope::EraseVars, py::arg("names"),
R"DOC(
Find variable named :code:`name` in the current scope or
its parent scope. Return None if not found.
Args:
name (str): the variable names to be erase.
Returns:
None
)DOC",
py::return_value_policy::reference)
.def("new_scope", [](Scope &self) -> Scope * { return &self.NewScope(); },
R"DOC(
Create a new sub-scope of the current scope.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -552,9 +552,12 @@ def _reset_activation_persistable(self):
'''
Reset activations to be not persistable.
'''
to_erase = []
for var in self._program.list_vars():
if var.name in self._quantized_act_var_name:
var.persistable = False
to_erase.append(var.name)
self._scope.erase(to_erase)

def _sampling(self):
'''
Expand Down
12 changes: 11 additions & 1 deletion python/paddle/fluid/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,17 @@ def _is_enable_standalone_executor():


def _get_strong_program_cache_key(program, feed, fetch_list):
return str(id(program)) + _get_program_cache_key(feed, fetch_list)
# NOTE(xiongkun) id(proram) may be duplicate. So add addition var_name as cache key.
def _get_varname_from_block(block):
block_str = []
for var_name in list(block.vars.keys()):
block_str.append(var_name)
return "\n".join(block_str)

inner_program = program._program if isinstance(
program, compiler.CompiledProgram) else program
return _get_varname_from_block(inner_program.blocks[0]) + str(id(
program)) + _get_program_cache_key(feed, fetch_list)


def _get_program_cache_key(feed, fetch_list):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def make_all_inputs(input_fields):
# how many batches we use
batch_num = 5

np.random.seed = 90
np.random.seed(90)
src_word_np = np.arange(1, TrainTaskConfig.batch_size * seq_len + 1).reshape(
[TrainTaskConfig.batch_size, seq_len]).astype('int64')
src_pos_np = np.random.randint(
Expand Down Expand Up @@ -951,6 +951,8 @@ def transformer_sort_gradient_float32(self, is_sparse):

with guard():
fluid.set_flags({'FLAGS_sort_sum_gradient': True})
# NOTE(xiongkun03): In new executor, the inplace strategy is on by default, which will cause result of sumop have some differences. So we disable inplace.
fluid.set_flags({'FLAGS_new_executor_use_inplace': False})
paddle.seed(seed)
paddle.framework.random._manual_program_seed(seed)
transformer = TransFormer(
Expand Down

0 comments on commit 9a4eec9

Please sign in to comment.