Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Address PR comments
Browse files Browse the repository at this point in the history
Signed-off-by: Serge Panev <[email protected]>
  • Loading branch information
Kh4L committed Jul 6, 2020
1 parent 11a7a5c commit 8dffde3
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 6 deletions.
11 changes: 9 additions & 2 deletions python/mxnet/gluon/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -1068,6 +1068,11 @@ def _build_cache(self, *args):
input_names = out.list_inputs()
data_indices = []
param_indices = []

# In the default case, _cached_ops_args contains all the parameters from params (the sets are identical)
# In the case of Partition API optimized graph _cached_ops_args might contain some parameters from params,
# might contain some new parameters created during optimization and added to `arg_dict/aux_dict`,
# and might not contain some parameters that were deleted during optimization.
self._cached_op_args = []
for i, name in enumerate(input_names):
pair = None
Expand All @@ -1079,13 +1084,15 @@ def _build_cache(self, *args):
if name in params:
param = params[name]
else:
assert self._backend, "Parameter " + name + " is missing from block params"
# The param is missing from the original params dictionary, which means the param must have
# been added by the Partition API backend
if name in arg_dict or name:
param_data = arg_dict[name]
elif name in aux_dict:
param_data = aux_dict[name]
else:
raise RuntimeError('Expected inputs missing from arg and aux after partioning. '
raise RuntimeError('A parameter was added to the graph during optimization but it was not added '
'to the parameter dicts.\n'
'Please check the backend.')

param = Parameter(name)
Expand Down
9 changes: 5 additions & 4 deletions python/mxnet/symbol/symbol.py
Original file line number Diff line number Diff line change
Expand Up @@ -1557,8 +1557,9 @@ def optimize_for(self, backend, args=None, aux=None, ctx=None, **kwargs):
if a_n in args:
args.pop(a_n)
else:
warnings.warn('optimize_for deleted some argument. \n' +
'Provide a dictionary to the arg argument to optimize_for')
warnings.warn('A param was deleted during optimization, but no args dictionary was provided.\n' +
'Please ensure that your model weights match the newly optimized model.')

aux_names = self.list_auxiliary_states()
new_aux_names = new_sym.list_auxiliary_states()
deleted_aux_names = set([item for item in aux_names
Expand All @@ -1569,8 +1570,8 @@ def optimize_for(self, backend, args=None, aux=None, ctx=None, **kwargs):
if a_n in aux:
aux.pop(a_n)
else:
warnings.warn('optimize_for deleted some aux argument. \n' +
'Provide a dictionary to the aux argument to optimize_for')
warnings.warn('A param was deleted during optimization, but no args dictionary was provided.\n' +
'Please ensure that your model weights match the newly optimized model.')

return new_sym

Expand Down

0 comments on commit 8dffde3

Please sign in to comment.