Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
ZhennanQin committed Jul 16, 2019
1 parent 571c7c2 commit aef9e81
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 59 deletions.
16 changes: 9 additions & 7 deletions src/executor/graph_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1924,6 +1924,7 @@ Executor *Executor::SimpleBind(nnvm::Symbol symbol,
std::unordered_map<std::string, NDArray>* shared_buffer,
Executor* shared_exec) {
auto exec = new exec::GraphExecutor();
bool init = false;
if (!exec->subgraph_property().empty()) {
static bool verbose = dmlc::GetEnv("MXNET_SUBGRAPH_VERBOSE", false);
const auto& backend_name = exec->subgraph_property();
Expand All @@ -1937,19 +1938,18 @@ Executor *Executor::SimpleBind(nnvm::Symbol symbol,
std::vector<NDArray> tmp_in_args;
std::vector<NDArray> tmp_arg_grads;
std::vector<NDArray> tmp_aux_states;
const std::vector<std::string> arg_names = symbol.ListInputNames(nnvm::Symbol::kReadOnlyArgs);
const std::vector<std::string> aux_names = symbol.ListInputNames(nnvm::Symbol::kAuxiliaryStates);
const auto arg_names = symbol.ListInputNames(nnvm::Symbol::kReadOnlyArgs);
const auto aux_names = symbol.ListInputNames(nnvm::Symbol::kAuxiliaryStates);
symbol = exec::BuildSubgraph(symbol, backend, arg_shape_map, arg_dtype_map, arg_stype_map,
default_ctx, group2ctx, &tmp_in_arg_ctxes, &tmp_arg_grad_ctxes,
&tmp_grad_req_types, &tmp_aux_state_ctxes, verbose);
exec->Init(symbol, default_ctx, group2ctx, tmp_in_arg_ctxes, tmp_arg_grad_ctxes,
tmp_aux_state_ctxes, arg_shape_map, arg_dtype_map, arg_stype_map,
tmp_grad_req_types, shared_arg_names, &tmp_in_args, &tmp_arg_grads,
&tmp_aux_states, shared_buffer, shared_exec);
const std::vector<std::string> new_arg_names =
symbol.ListInputNames(nnvm::Symbol::kReadOnlyArgs);
const std::vector<std::string> new_aux_names =
symbol.ListInputNames(nnvm::Symbol::kAuxiliaryStates);
init = true;
const auto new_arg_names = symbol.ListInputNames(nnvm::Symbol::kReadOnlyArgs);
const auto new_aux_names = symbol.ListInputNames(nnvm::Symbol::kAuxiliaryStates);
std::unordered_map<std::string, size_t> new_arg_names_idx_map;
std::unordered_map<std::string, size_t> new_aux_names_idx_map;
for (size_t i = 0; i != new_arg_names.size(); ++i) {
Expand Down Expand Up @@ -1979,7 +1979,9 @@ Executor *Executor::SimpleBind(nnvm::Symbol symbol,
aux_states->emplace_back(std::move(tmp_aux_states[it->second]));
}
}
} else {
}
if (!init) {
// init without subgraph
exec->Init(symbol, default_ctx, group2ctx, in_arg_ctxes, arg_grad_ctxes, aux_state_ctxes,
arg_shape_map, arg_dtype_map, arg_stype_map, grad_req_types, shared_arg_names,
in_args, arg_grads, aux_states, shared_buffer, shared_exec);
Expand Down
114 changes: 62 additions & 52 deletions tests/python/unittest/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7789,41 +7789,46 @@ def get_output_names_callback(name, arr):
pass
for output_name, expected_name in zip(output_names, expected_names):
assert output_name == expected_name
# Disable subgraph in case subgraph will replace symbol
os.environ['MXNET_SUBGRAPH_BACKEND'] = "NONE"
data = mx.sym.Variable('data', shape=(10, 3, 10, 10))
conv_sym = mx.sym.Convolution(data, kernel=(2, 2), num_filter=1, name='conv')
check_name(conv_sym, ['conv_output'])
is_windows = sys.platform.startswith('win')
if (is_windows):
# Windows doesn't support set environment variable on the fly, so disable it for now
pass
else:
# Disable subgraph in case subgraph will replace symbol
os.environ['MXNET_SUBGRAPH_BACKEND'] = "NONE"
data = mx.sym.Variable('data', shape=(10, 3, 10, 10))
conv_sym = mx.sym.Convolution(data, kernel=(2, 2), num_filter=1, name='conv')
check_name(conv_sym, ['conv_output'])

deconv_sym = mx.sym.Deconvolution(data, kernel=(2, 2), num_filter=1, name='deconv')
check_name(deconv_sym, ['deconv_output'])
deconv_sym = mx.sym.Deconvolution(data, kernel=(2, 2), num_filter=1, name='deconv')
check_name(deconv_sym, ['deconv_output'])

fc_sym = mx.sym.FullyConnected(data, num_hidden=10, name='fc')
check_name(fc_sym, ['fc_output'])
fc_sym = mx.sym.FullyConnected(data, num_hidden=10, name='fc')
check_name(fc_sym, ['fc_output'])

lrn_sym = mx.sym.LRN(data, nsize=1, name='lrn')
check_name(lrn_sym, ['lrn_output', 'lrn_tmp_norm'])
lrn_sym = mx.sym.LRN(data, nsize=1, name='lrn')
check_name(lrn_sym, ['lrn_output', 'lrn_tmp_norm'])

act_sym = mx.sym.Activation(data, act_type='relu', name='act')
check_name(act_sym, ['act_output'])
act_sym = mx.sym.Activation(data, act_type='relu', name='act')
check_name(act_sym, ['act_output'])

cc_sym = mx.sym.concat(data, data, dim=0, name='concat')
check_name(cc_sym, ['concat_output'])
cc_sym = mx.sym.concat(data, data, dim=0, name='concat')
check_name(cc_sym, ['concat_output'])

sm_sym = mx.sym.softmax(data, name='softmax')
check_name(sm_sym, ['softmax_output'])
sm_sym = mx.sym.softmax(data, name='softmax')
check_name(sm_sym, ['softmax_output'])

sa_sym = mx.sym.SoftmaxActivation(data, name='softmax')
check_name(sa_sym, ['softmax_output'])
sa_sym = mx.sym.SoftmaxActivation(data, name='softmax')
check_name(sa_sym, ['softmax_output'])

us_sym = mx.sym.UpSampling(data, scale=2, sample_type='nearest',
name='upsampling')
check_name(us_sym, ['upsampling_output'])
us_sym = mx.sym.UpSampling(data, scale=2, sample_type='nearest',
name='upsampling')
check_name(us_sym, ['upsampling_output'])

us_sym = mx.sym.Pooling(data, kernel=(2, 2), pool_type='avg',
name='pooling')
check_name(us_sym, ['pooling_output'])
del os.environ['MXNET_SUBGRAPH_BACKEND']
us_sym = mx.sym.Pooling(data, kernel=(2, 2), pool_type='avg',
name='pooling')
check_name(us_sym, ['pooling_output'])
del os.environ['MXNET_SUBGRAPH_BACKEND']

def test_op_all_names_monitor():
def check_name(op_sym, expected_names):
Expand All @@ -7842,42 +7847,47 @@ def get_output_names_callback(name, arr):
pass
for output_name, expected_name in zip(output_names, expected_names):
assert output_name == expected_name
# Disable subgraph in case subgraph will replace symbol
os.environ['MXNET_SUBGRAPH_BACKEND'] = "NONE"
is_windows = sys.platform.startswith('win')
if (is_windows):
# Windows doesn't support set environment variable on the fly, so disable it for now
pass
else:
# Disable subgraph in case subgraph will replace symbol
os.environ['MXNET_SUBGRAPH_BACKEND'] = "NONE"

data = mx.sym.Variable('data', shape=(10, 3, 10, 10))
conv_sym = mx.sym.Convolution(data, kernel=(2, 2), num_filter=1, name='conv')
check_name(conv_sym, ['data', 'conv_data', 'conv_weight', 'conv_weight', 'conv_bias', 'conv_bias', 'conv_output'])
data = mx.sym.Variable('data', shape=(10, 3, 10, 10))
conv_sym = mx.sym.Convolution(data, kernel=(2, 2), num_filter=1, name='conv')
check_name(conv_sym, ['data', 'conv_data', 'conv_weight', 'conv_weight', 'conv_bias', 'conv_bias', 'conv_output'])

deconv_sym = mx.sym.Deconvolution(data, kernel=(2, 2), num_filter=1, name='deconv')
check_name(deconv_sym, ['data', 'deconv_data', 'deconv_weight', 'deconv_weight', 'deconv_output'])
deconv_sym = mx.sym.Deconvolution(data, kernel=(2, 2), num_filter=1, name='deconv')
check_name(deconv_sym, ['data', 'deconv_data', 'deconv_weight', 'deconv_weight', 'deconv_output'])

fc_sym = mx.sym.FullyConnected(data, num_hidden=10, name='fc')
check_name(fc_sym, ['data', 'fc_data', 'fc_weight', 'fc_weight', 'fc_bias', 'fc_bias', 'fc_output'])
fc_sym = mx.sym.FullyConnected(data, num_hidden=10, name='fc')
check_name(fc_sym, ['data', 'fc_data', 'fc_weight', 'fc_weight', 'fc_bias', 'fc_bias', 'fc_output'])

lrn_sym = mx.sym.LRN(data, nsize=1, name='lrn')
check_name(lrn_sym, ['data', 'lrn_data', 'lrn_output', 'lrn_tmp_norm'])
lrn_sym = mx.sym.LRN(data, nsize=1, name='lrn')
check_name(lrn_sym, ['data', 'lrn_data', 'lrn_output', 'lrn_tmp_norm'])

act_sym = mx.sym.Activation(data, act_type='relu', name='act')
check_name(act_sym, ['data', 'act_input0', 'act_output'])
act_sym = mx.sym.Activation(data, act_type='relu', name='act')
check_name(act_sym, ['data', 'act_input0', 'act_output'])

cc_sym = mx.sym.concat(data, data, dim=0, name='concat')
check_name(cc_sym, ['data', 'concat_arg0', 'data', 'concat_arg1', 'concat_output'])
cc_sym = mx.sym.concat(data, data, dim=0, name='concat')
check_name(cc_sym, ['data', 'concat_arg0', 'data', 'concat_arg1', 'concat_output'])

sm_sym = mx.sym.softmax(data, name='softmax')
check_name(sm_sym, ['data', 'softmax_input0', 'softmax_output'])
sm_sym = mx.sym.softmax(data, name='softmax')
check_name(sm_sym, ['data', 'softmax_input0', 'softmax_output'])

sa_sym = mx.sym.SoftmaxActivation(data, name='softmax')
check_name(sa_sym, ['data', 'softmax_input0', 'softmax_output'])
sa_sym = mx.sym.SoftmaxActivation(data, name='softmax')
check_name(sa_sym, ['data', 'softmax_input0', 'softmax_output'])

us_sym = mx.sym.UpSampling(data, scale=2, sample_type='nearest',
name='upsampling')
check_name(us_sym, ['data', 'upsampling_arg0', 'upsampling_output'])
us_sym = mx.sym.UpSampling(data, scale=2, sample_type='nearest',
name='upsampling')
check_name(us_sym, ['data', 'upsampling_arg0', 'upsampling_output'])

us_sym = mx.sym.Pooling(data, kernel=(2, 2), pool_type='avg',
name='pooling')
check_name(us_sym, ['data', 'pooling_data', 'pooling_output'])
del os.environ['MXNET_SUBGRAPH_BACKEND']
us_sym = mx.sym.Pooling(data, kernel=(2, 2), pool_type='avg',
name='pooling')
check_name(us_sym, ['data', 'pooling_data', 'pooling_output'])
del os.environ['MXNET_SUBGRAPH_BACKEND']

@with_seed()
@unittest.skip("test fails intermittently. temporarily disabled till it gets fixed. tracked at https://github.com/apache/incubator-mxnet/issues/13915")
Expand Down

0 comments on commit aef9e81

Please sign in to comment.