Skip to content

Commit

Permalink
[MXNET-1411] solve pylint error issue#14851 (apache#15113)
Browse files Browse the repository at this point in the history
* fix pylint error: no-else-raise in _export_helper.py

* fix pylint error: no-else-raise in _translation_utils.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in vocab.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in trainer.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in utils.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in detection.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in image.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in model.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in sparse.py

* fix pylint error: Bad option value 'no-else-raise' (bad-option-value) in test_utils.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for vocab.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for model.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for _translation_utils.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for _export_helper.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for test_utils.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for image.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for trainer.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for detection.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for utils.py

* fix pylint error: R1720: Unnecessary else after raise (no-else-raise) for sparse.py

* fix pylint error:R1719: The if expression can be replaced with 'bool(test)' (simplifiable-if-expression)
  • Loading branch information
cchung100m authored and Ubuntu committed Aug 20, 2019
1 parent e0919b0 commit c4faf7e
Show file tree
Hide file tree
Showing 10 changed files with 88 additions and 90 deletions.
38 changes: 19 additions & 19 deletions python/mxnet/contrib/onnx/mx2onnx/_export_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,26 +40,26 @@ def load_module(sym_filepath, params_filepath):
params : params object
Model weights including both arg and aux params.
"""
if not (os.path.isfile(sym_filepath) and os.path.isfile(params_filepath)): # pylint: disable=no-else-raise
if not (os.path.isfile(sym_filepath) and os.path.isfile(params_filepath)):
raise ValueError("Symbol and params files provided are invalid")
else:
try:
# reads symbol.json file from given path and
# retrieves model prefix and number of epochs
model_name = sym_filepath.rsplit('.', 1)[0].rsplit('-', 1)[0]
params_file_list = params_filepath.rsplit('.', 1)[0].rsplit('-', 1)
# Setting num_epochs to 0 if not present in filename
num_epochs = 0 if len(params_file_list) == 1 else int(params_file_list[1])
except IndexError:
logging.info("Model and params name should be in format: "
"prefix-symbol.json, prefix-epoch.params")
raise

sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, num_epochs)
try:
# reads symbol.json file from given path and
# retrieves model prefix and number of epochs
model_name = sym_filepath.rsplit('.', 1)[0].rsplit('-', 1)[0]
params_file_list = params_filepath.rsplit('.', 1)[0].rsplit('-', 1)
# Setting num_epochs to 0 if not present in filename
num_epochs = 0 if len(params_file_list) == 1 else int(params_file_list[1])
except IndexError:
logging.info("Model and params name should be in format: "
"prefix-symbol.json, prefix-epoch.params")
raise

# Merging arg and aux parameters
params = {}
params.update(arg_params)
params.update(aux_params)
sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, num_epochs)

return sym, params
# Merging arg and aux parameters
params = {}
params.update(arg_params)
params.update(aux_params)

return sym, params
30 changes: 15 additions & 15 deletions python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,23 +178,23 @@ def _fix_channels(op_name, attrs, inputs, proto_obj):
these attributes. We check the shape of weights provided to get the number.
"""
weight_name = inputs[1].name
if not weight_name in proto_obj._params: # pylint: disable=no-else-raise
if not weight_name in proto_obj._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = proto_obj._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)

if op_name == 'FullyConnected':
attrs['num_hidden'] = wshape[0]
else:
if op_name == 'Convolution':
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op_name == 'Deconvolution':
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
wshape = proto_obj._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)

if op_name == 'FullyConnected':
attrs['num_hidden'] = wshape[0]
else:
if op_name == 'Convolution':
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op_name == 'Deconvolution':
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs


Expand Down
5 changes: 2 additions & 3 deletions python/mxnet/contrib/text/vocab.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,8 @@ def to_tokens(self, indices):

tokens = []
for idx in indices:
if not isinstance(idx, int) or idx > max_idx: # pylint: disable=no-else-raise
if not isinstance(idx, int) or idx > max_idx:
raise ValueError('Token index %d in the provided `indices` is invalid.' % idx)
else:
tokens.append(self.idx_to_token[idx])
tokens.append(self.idx_to_token[idx])

return tokens[0] if to_reduce else tokens
12 changes: 6 additions & 6 deletions python/mxnet/gluon/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,11 +249,11 @@ def _init_kvstore(self):

@property
def learning_rate(self):
if not isinstance(self._optimizer, opt.Optimizer): # pylint: disable=no-else-raise
if not isinstance(self._optimizer, opt.Optimizer):
raise UserWarning("Optimizer has to be defined before its learning "
"rate can be accessed.")
else:
return self._optimizer.learning_rate

return self._optimizer.learning_rate

@property
def optimizer(self):
Expand All @@ -270,11 +270,11 @@ def set_learning_rate(self, lr):
lr : float
The new learning rate of the optimizer.
"""
if not isinstance(self._optimizer, opt.Optimizer): # pylint: disable=no-else-raise
if not isinstance(self._optimizer, opt.Optimizer):
raise UserWarning("Optimizer has to be defined before its learning "
"rate is mutated.")
else:
self._optimizer.set_learning_rate(lr)

self._optimizer.set_learning_rate(lr)

def _row_sparse_pull(self, parameter, out, row_id, full_idx=False):
"""Internal method to invoke pull operations on KVStore. If `full_idx` is set to True,
Expand Down
8 changes: 4 additions & 4 deletions python/mxnet/gluon/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,11 +341,11 @@ def download(url, path=None, overwrite=False, sha1_hash=None, retries=5, verify_
break
except Exception as e:
retries -= 1
if retries <= 0: # pylint: disable=no-else-raise
if retries <= 0:
raise e
else:
print('download failed due to {}, retrying, {} attempt{} left'
.format(repr(e), retries, 's' if retries > 1 else ''))

print('download failed due to {}, retrying, {} attempt{} left'
.format(repr(e), retries, 's' if retries > 1 else ''))

return fname

Expand Down
18 changes: 9 additions & 9 deletions python/mxnet/image/detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,23 +809,23 @@ def next(self):
pad = batch_size - i
# handle padding for the last batch
if pad != 0:
if self.last_batch_handle == 'discard': # pylint: disable=no-else-raise
if self.last_batch_handle == 'discard':
raise StopIteration
# if the option is 'roll_over', throw StopIteration and cache the data
elif self.last_batch_handle == 'roll_over' and \
if self.last_batch_handle == 'roll_over' and \
self._cache_data is None:
self._cache_data = batch_data
self._cache_label = batch_label
self._cache_idx = i
raise StopIteration

_ = self._batchify(batch_data, batch_label, i)
if self.last_batch_handle == 'pad':
self._allow_read = False
else:
_ = self._batchify(batch_data, batch_label, i)
if self.last_batch_handle == 'pad':
self._allow_read = False
else:
self._cache_data = None
self._cache_label = None
self._cache_idx = None
self._cache_data = None
self._cache_label = None
self._cache_idx = None

return io.DataBatch([batch_data], [batch_label], pad=pad)

Expand Down
24 changes: 12 additions & 12 deletions python/mxnet/image/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -1198,10 +1198,10 @@ def __init__(self, batch_size, data_shape, label_width=1,
logging.info('%s: loading recordio %s...',
class_name, path_imgrec)
if path_imgidx:
self.imgrec = recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r') # pylint: disable=redefined-variable-type
self.imgrec = recordio.MXIndexedRecordIO(path_imgidx, path_imgrec, 'r')
self.imgidx = list(self.imgrec.keys)
else:
self.imgrec = recordio.MXRecordIO(path_imgrec, 'r') # pylint: disable=redefined-variable-type
self.imgrec = recordio.MXRecordIO(path_imgrec, 'r')
self.imgidx = None
else:
self.imgrec = None
Expand All @@ -1224,7 +1224,7 @@ def __init__(self, batch_size, data_shape, label_width=1,
imgkeys = []
index = 1
for img in imglist:
key = str(index) # pylint: disable=redefined-variable-type
key = str(index)
index += 1
if len(img) > 2:
label = nd.array(img[:-1], dtype=dtype)
Expand Down Expand Up @@ -1374,23 +1374,23 @@ def next(self):
pad = batch_size - i
# handle padding for the last batch
if pad != 0:
if self.last_batch_handle == 'discard': # pylint: disable=no-else-raise
if self.last_batch_handle == 'discard':
raise StopIteration
# if the option is 'roll_over', throw StopIteration and cache the data
elif self.last_batch_handle == 'roll_over' and \
if self.last_batch_handle == 'roll_over' and \
self._cache_data is None:
self._cache_data = batch_data
self._cache_label = batch_label
self._cache_idx = i
raise StopIteration

_ = self._batchify(batch_data, batch_label, i)
if self.last_batch_handle == 'pad':
self._allow_read = False
else:
_ = self._batchify(batch_data, batch_label, i)
if self.last_batch_handle == 'pad':
self._allow_read = False
else:
self._cache_data = None
self._cache_label = None
self._cache_idx = None
self._cache_data = None
self._cache_label = None
self._cache_idx = None

return io.DataBatch([batch_data], [batch_label], pad=pad)

Expand Down
5 changes: 2 additions & 3 deletions python/mxnet/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,10 +642,9 @@ def _init_iter(self, X, y, is_train):
"""Initialize the iterator given input."""
if isinstance(X, (np.ndarray, nd.NDArray)):
if y is None:
if is_train: # pylint: disable=no-else-raise
if is_train:
raise ValueError('y must be specified when X is numpy.ndarray')
else:
y = np.zeros(X.shape[0])
y = np.zeros(X.shape[0])
if not isinstance(y, (np.ndarray, nd.NDArray)):
raise TypeError('y must be ndarray when X is numpy.ndarray')
if X.shape[0] != y.shape[0]:
Expand Down
10 changes: 5 additions & 5 deletions python/mxnet/ndarray/sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -639,10 +639,10 @@ def __getitem__(self, key):
if isinstance(key, int):
raise Exception("__getitem__ with int key is not implemented for RowSparseNDArray yet")
if isinstance(key, py_slice):
if key.step is not None or key.start is not None or key.stop is not None: # pylint: disable=no-else-raise
if key.step is not None or key.start is not None or key.stop is not None:
raise Exception('RowSparseNDArray only supports [:] for __getitem__')
else:
return self

return self
if isinstance(key, tuple):
raise ValueError('Multi-dimension indexing is not supported')
raise ValueError('Undefined behaviour for {}'.format(key))
Expand Down Expand Up @@ -1102,9 +1102,9 @@ def row_sparse_array(arg1, shape=None, ctx=None, dtype=None):
# construct a row sparse array from (D0, D1 ..) or (data, indices)
if isinstance(arg1, tuple):
arg_len = len(arg1)
if arg_len < 2: # pylint: disable=no-else-raise
if arg_len < 2:
raise ValueError("Unexpected length of input tuple: " + str(arg_len))
elif arg_len > 2:
if arg_len > 2:
# empty ndarray with shape
_check_shape(arg1, shape)
return empty('row_sparse', arg1, ctx=ctx, dtype=dtype)
Expand Down
28 changes: 14 additions & 14 deletions python/mxnet/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,11 +212,11 @@ def _get_powerlaw_dataset_csr(num_rows, num_cols, density=0.1, dtype=None):
return mx.nd.array(output_arr).tostype("csr")
col_max = col_max * 2

if unused_nnz > 0: # pylint: disable=no-else-raise
if unused_nnz > 0:
raise ValueError("not supported for this density: %s"
" for this shape (%s,%s)" % (density, num_rows, num_cols))
else:
return mx.nd.array(output_arr).tostype("csr")

return mx.nd.array(output_arr).tostype("csr")


def assign_each(the_input, function):
Expand Down Expand Up @@ -1407,10 +1407,10 @@ def check_consistency(sym, ctx_list, scale=1.0, grad_req='write',
except AssertionError as e:
print('Predict Err: ctx %d vs ctx %d at %s'%(i, max_idx, name))
traceback.print_exc()
if raise_on_err: # pylint: disable=no-else-raise
if raise_on_err:
raise e
else:
print(str(e))

print(str(e))

# train
if grad_req != 'null':
Expand All @@ -1434,10 +1434,10 @@ def check_consistency(sym, ctx_list, scale=1.0, grad_req='write',
except AssertionError as e:
print('Train Err: ctx %d vs ctx %d at %s'%(i, max_idx, name))
traceback.print_exc()
if raise_on_err: # pylint: disable=no-else-raise
if raise_on_err:
raise e
else:
print(str(e))

print(str(e))

return gt

Expand Down Expand Up @@ -1514,11 +1514,11 @@ def download(url, fname=None, dirname=None, overwrite=False, retries=5):
break
except Exception as e:
retries -= 1
if retries <= 0: # pylint: disable=no-else-raise
if retries <= 0:
raise e
else:
print("download failed, retrying, {} attempt{} left"
.format(retries, 's' if retries > 1 else ''))

print("download failed, retrying, {} attempt{} left"
.format(retries, 's' if retries > 1 else ''))
logging.info("downloaded %s into %s successfully", url, fname)
return fname

Expand Down Expand Up @@ -1661,7 +1661,7 @@ def get_mnist_iterator(batch_size, input_shape, num_parts=1, part_index=0):
"""

get_mnist_ubyte()
flat = False if len(input_shape) == 3 else True # pylint: disable=simplifiable-if-expression
flat = not bool(len(input_shape) == 3)

train_dataiter = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
Expand Down

0 comments on commit c4faf7e

Please sign in to comment.