From 5e5a59ec6b145286b13d80a761c304674469cb73 Mon Sep 17 00:00:00 2001 From: Jake Lee Date: Thu, 2 May 2019 07:58:10 +0800 Subject: [PATCH] Upgrade Pylint version to 2.3.1 (#14807) * upgrade pylint to latest version & change the Makefile to use python3 pylint * fix pylint when upgrade * specify the pylint version * suppress the pylint not-callable bug * remove pylint from python2 * fix the typo * upgrade cpp-package pylint * move pylint disable to each block --- Makefile | 2 +- ci/docker/install/ubuntu_publish.sh | 4 ++-- ci/docker/install/ubuntu_python.sh | 4 ++-- cpp-package/tests/travis/setup.sh | 2 +- docs/install/requirements.txt | 2 +- python/mxnet/base.py | 2 +- .../mxnet/contrib/onnx/mx2onnx/_export_helper.py | 2 +- .../mxnet/contrib/onnx/mx2onnx/_op_translations.py | 2 +- .../contrib/onnx/onnx2mx/_translation_utils.py | 2 +- python/mxnet/contrib/quantization.py | 1 + python/mxnet/contrib/text/vocab.py | 2 +- python/mxnet/gluon/parameter.py | 2 +- python/mxnet/gluon/trainer.py | 4 ++-- python/mxnet/gluon/utils.py | 2 +- python/mxnet/image/detection.py | 2 +- python/mxnet/image/image.py | 2 +- python/mxnet/io/io.py | 2 ++ python/mxnet/model.py | 2 +- python/mxnet/module/base_module.py | 2 +- python/mxnet/module/python_module.py | 2 +- python/mxnet/ndarray/contrib.py | 4 ++-- python/mxnet/ndarray/sparse.py | 4 ++-- python/mxnet/ndarray_doc.py | 2 +- python/mxnet/operator.py | 2 +- python/mxnet/optimizer/optimizer.py | 2 +- python/mxnet/recordio.py | 4 ++++ python/mxnet/rnn/rnn_cell.py | 4 ++-- python/mxnet/symbol/symbol.py | 2 +- python/mxnet/test_utils.py | 14 +++++++------- tools/caffe_converter/compare_layers.py | 5 ----- tools/caffe_converter/test_converter.py | 2 -- 31 files changed, 45 insertions(+), 45 deletions(-) diff --git a/Makefile b/Makefile index 981a86830ba9..df0fe8809456 100644 --- a/Makefile +++ b/Makefile @@ -592,7 +592,7 @@ cpplint: --exclude_path src/operator/contrib/ctc_include pylint: - pylint --rcfile=$(ROOTDIR)/ci/other/pylintrc --ignore-patterns=".*\.so$$,.*\.dll$$,.*\.dylib$$" python/mxnet tools/caffe_converter/*.py + python3 -m pylint --rcfile=$(ROOTDIR)/ci/other/pylintrc --ignore-patterns=".*\.so$$,.*\.dll$$,.*\.dylib$$" python/mxnet tools/caffe_converter/*.py doc: docs diff --git a/ci/docker/install/ubuntu_publish.sh b/ci/docker/install/ubuntu_publish.sh index 1fb7bf165b9a..2d8b019372c7 100755 --- a/ci/docker/install/ubuntu_publish.sh +++ b/ci/docker/install/ubuntu_publish.sh @@ -66,5 +66,5 @@ python2 get-pip.py apt-get remove -y python3-urllib3 -pip2 install nose cpplint==1.3.0 pylint==1.9.3 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 -pip3 install nose cpplint==1.3.0 pylint==2.1.1 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 +pip2 install nose cpplint==1.3.0 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 +pip3 install nose cpplint==1.3.0 pylint==2.3.1 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 diff --git a/ci/docker/install/ubuntu_python.sh b/ci/docker/install/ubuntu_python.sh index ee05058f227e..23158ba4c068 100755 --- a/ci/docker/install/ubuntu_python.sh +++ b/ci/docker/install/ubuntu_python.sh @@ -30,5 +30,5 @@ wget -nv https://bootstrap.pypa.io/get-pip.py python3 get-pip.py python2 get-pip.py -pip2 install nose cpplint==1.3.0 pylint==1.9.3 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 -pip3 install nose cpplint==1.3.0 pylint==2.1.1 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 +pip2 install nose cpplint==1.3.0 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 +pip3 install nose cpplint==1.3.0 pylint==2.3.1 'numpy<=1.15.2,>=1.8.2' nose-timer 'requests<2.19.0,>=2.18.4' h5py==2.8.0rc1 scipy==1.0.1 boto3 diff --git a/cpp-package/tests/travis/setup.sh b/cpp-package/tests/travis/setup.sh index 5a3813ee34eb..e0c850ed39a9 100755 --- a/cpp-package/tests/travis/setup.sh +++ b/cpp-package/tests/travis/setup.sh @@ -19,5 +19,5 @@ if [ ${TASK} == "lint" ]; then - pip install cpplint 'pylint==1.4.4' 'astroid==1.3.6' --user + pip3 install cpplint 'pylint==2.3.1' --user fi diff --git a/docs/install/requirements.txt b/docs/install/requirements.txt index dfc3f70c96fb..b3620d607740 100644 --- a/docs/install/requirements.txt +++ b/docs/install/requirements.txt @@ -3,6 +3,6 @@ h5py==2.8.0rc1 nose nose-timer numpy<=1.15.2,>=1.8.2 -pylint==1.8.3 +pylint==2.3.1; python_version >= '3.0' requests<2.19.0,>=2.18.4 scipy==1.0.1 diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 58f222dc1e85..53414016e39e 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -16,7 +16,7 @@ # under the License. # coding: utf-8 -# pylint: disable=invalid-name, no-member, trailing-comma-tuple, bad-mcs-classmethod-argument +# pylint: disable=invalid-name, no-member, trailing-comma-tuple, bad-mcs-classmethod-argument, unnecessary-pass """ctypes library of mxnet and helper functions.""" from __future__ import absolute_import diff --git a/python/mxnet/contrib/onnx/mx2onnx/_export_helper.py b/python/mxnet/contrib/onnx/mx2onnx/_export_helper.py index 781fb4cfbbc1..e73ff70fa5b0 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_export_helper.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_export_helper.py @@ -40,7 +40,7 @@ def load_module(sym_filepath, params_filepath): params : params object Model weights including both arg and aux params. """ - if not (os.path.isfile(sym_filepath) and os.path.isfile(params_filepath)): + if not (os.path.isfile(sym_filepath) and os.path.isfile(params_filepath)): # pylint: disable=no-else-raise raise ValueError("Symbol and params files provided are invalid") else: try: diff --git a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py index f9d170d81c13..35f4ff451cdb 100644 --- a/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py +++ b/python/mxnet/contrib/onnx/mx2onnx/_op_translations.py @@ -762,7 +762,7 @@ def convert_leakyrelu(node, **kwargs): act_name = {"elu": "Elu", "leaky": "LeakyRelu", "prelu": "PRelu", "selu": "Selu"} - if act_type == "prelu" or act_type == "selu": + if act_type in ("prelu", "selu"): node = onnx.helper.make_node( act_name[act_type], inputs=input_nodes, diff --git a/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py b/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py index ce55a0b7d66a..48ede28ab022 100644 --- a/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py +++ b/python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py @@ -178,7 +178,7 @@ def _fix_channels(op_name, attrs, inputs, proto_obj): these attributes. We check the shape of weights provided to get the number. """ weight_name = inputs[1].name - if not weight_name in proto_obj._params: + if not weight_name in proto_obj._params: # pylint: disable=no-else-raise raise ValueError("Unable to get channels/units attr from onnx graph.") else: wshape = proto_obj._params[weight_name].shape diff --git a/python/mxnet/contrib/quantization.py b/python/mxnet/contrib/quantization.py index 9e5f8c1e2311..b94b5a8da32a 100644 --- a/python/mxnet/contrib/quantization.py +++ b/python/mxnet/contrib/quantization.py @@ -61,6 +61,7 @@ def _quantize_params(qsym, params, th_dict): if name.endswith(('weight_quantize', 'bias_quantize')): original_name = name[:-len('_quantize')] param = params[original_name] + # pylint: disable=unbalanced-tuple-unpacking val, vmin, vmax = ndarray.contrib.quantize(data=param, min_range=ndarray.min(param), max_range=ndarray.max(param), diff --git a/python/mxnet/contrib/text/vocab.py b/python/mxnet/contrib/text/vocab.py index ede2ca535712..6e9920d601b6 100644 --- a/python/mxnet/contrib/text/vocab.py +++ b/python/mxnet/contrib/text/vocab.py @@ -210,7 +210,7 @@ def to_tokens(self, indices): tokens = [] for idx in indices: - if not isinstance(idx, int) or idx > max_idx: + if not isinstance(idx, int) or idx > max_idx: # pylint: disable=no-else-raise raise ValueError('Token index %d in the provided `indices` is invalid.' % idx) else: tokens.append(self.idx_to_token[idx]) diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py index 2e130d498c14..f660b97f8789 100644 --- a/python/mxnet/gluon/parameter.py +++ b/python/mxnet/gluon/parameter.py @@ -16,7 +16,7 @@ # under the License. # coding: utf-8 -# pylint: disable= +# pylint: disable=unnecessary-pass """Neural network parameter.""" __all__ = ['DeferredInitializationError', 'Parameter', 'Constant', 'ParameterDict', 'tensor_types'] diff --git a/python/mxnet/gluon/trainer.py b/python/mxnet/gluon/trainer.py index a95417cf523b..6935c2752e1a 100644 --- a/python/mxnet/gluon/trainer.py +++ b/python/mxnet/gluon/trainer.py @@ -249,7 +249,7 @@ def _init_kvstore(self): @property def learning_rate(self): - if not isinstance(self._optimizer, opt.Optimizer): + if not isinstance(self._optimizer, opt.Optimizer): # pylint: disable=no-else-raise raise UserWarning("Optimizer has to be defined before its learning " "rate can be accessed.") else: @@ -263,7 +263,7 @@ def set_learning_rate(self, lr): lr : float The new learning rate of the optimizer. """ - if not isinstance(self._optimizer, opt.Optimizer): + if not isinstance(self._optimizer, opt.Optimizer): # pylint: disable=no-else-raise raise UserWarning("Optimizer has to be defined before its learning " "rate is mutated.") else: diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py index b00cc043d493..861542220927 100644 --- a/python/mxnet/gluon/utils.py +++ b/python/mxnet/gluon/utils.py @@ -340,7 +340,7 @@ def download(url, path=None, overwrite=False, sha1_hash=None, retries=5, verify_ break except Exception as e: retries -= 1 - if retries <= 0: + if retries <= 0: # pylint: disable=no-else-raise raise e else: print('download failed due to {}, retrying, {} attempt{} left' diff --git a/python/mxnet/image/detection.py b/python/mxnet/image/detection.py index d5b5ecab528a..a70e5723072f 100644 --- a/python/mxnet/image/detection.py +++ b/python/mxnet/image/detection.py @@ -809,7 +809,7 @@ def next(self): pad = batch_size - i # handle padding for the last batch if pad != 0: - if self.last_batch_handle == 'discard': + if self.last_batch_handle == 'discard': # pylint: disable=no-else-raise raise StopIteration # if the option is 'roll_over', throw StopIteration and cache the data elif self.last_batch_handle == 'roll_over' and \ diff --git a/python/mxnet/image/image.py b/python/mxnet/image/image.py index 8bcf724ac4d2..f7dc27b72951 100644 --- a/python/mxnet/image/image.py +++ b/python/mxnet/image/image.py @@ -1374,7 +1374,7 @@ def next(self): pad = batch_size - i # handle padding for the last batch if pad != 0: - if self.last_batch_handle == 'discard': + if self.last_batch_handle == 'discard': # pylint: disable=no-else-raise raise StopIteration # if the option is 'roll_over', throw StopIteration and cache the data elif self.last_batch_handle == 'roll_over' and \ diff --git a/python/mxnet/io/io.py b/python/mxnet/io/io.py index 2bd1d6115ac3..2a42840bcf22 100644 --- a/python/mxnet/io/io.py +++ b/python/mxnet/io/io.py @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. +# coding: utf-8 +# pylint: disable=unnecessary-pass """Data iterators for common data formats.""" from __future__ import absolute_import from collections import namedtuple diff --git a/python/mxnet/model.py b/python/mxnet/model.py index f44ff041e35d..9ff23b7afbc5 100644 --- a/python/mxnet/model.py +++ b/python/mxnet/model.py @@ -640,7 +640,7 @@ def _init_iter(self, X, y, is_train): """Initialize the iterator given input.""" if isinstance(X, (np.ndarray, nd.NDArray)): if y is None: - if is_train: + if is_train: # pylint: disable=no-else-raise raise ValueError('y must be specified when X is numpy.ndarray') else: y = np.zeros(X.shape[0]) diff --git a/python/mxnet/module/base_module.py b/python/mxnet/module/base_module.py index ca8463153686..754e369b4e63 100644 --- a/python/mxnet/module/base_module.py +++ b/python/mxnet/module/base_module.py @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=fixme, too-many-arguments, too-many-locals +# pylint: disable=fixme, too-many-arguments, too-many-locals, no-else-raise # pylint: disable=too-many-public-methods, too-many-branches, too-many-lines """`BaseModule` defines an API for modules.""" diff --git a/python/mxnet/module/python_module.py b/python/mxnet/module/python_module.py index 886851efc305..df1648e82694 100644 --- a/python/mxnet/module/python_module.py +++ b/python/mxnet/module/python_module.py @@ -15,7 +15,7 @@ # specific language governing permissions and limitations # under the License. -# pylint: disable=too-many-instance-attributes, too-many-arguments +# pylint: disable=too-many-instance-attributes, too-many-arguments, unnecessary-pass """Provide some handy classes for user to implement a simple computation module in Python easily. """ diff --git a/python/mxnet/ndarray/contrib.py b/python/mxnet/ndarray/contrib.py index 1718a2c68d13..601bc682db38 100644 --- a/python/mxnet/ndarray/contrib.py +++ b/python/mxnet/ndarray/contrib.py @@ -514,7 +514,7 @@ def isfinite(data): [0. 0. 0. 1.] """ - is_data_not_nan = data == data + is_data_not_nan = data == data # pylint: disable=comparison-with-itself is_data_not_infinite = data.abs() != np.inf return ndarray.logical_and(is_data_not_infinite, is_data_not_nan) @@ -542,7 +542,7 @@ def isnan(data): [1. 0.] """ - return data != data + return data != data # pylint: disable=comparison-with-itself def adamw_update(weight, grad, mean, var, rescale_grad, lr, eta, beta1=0.9, beta2=0.999, epsilon=1e-8, wd=0, clip_gradient=-1, out=None, name=None, **kwargs): diff --git a/python/mxnet/ndarray/sparse.py b/python/mxnet/ndarray/sparse.py index 928079749db5..4987cb57b6ea 100644 --- a/python/mxnet/ndarray/sparse.py +++ b/python/mxnet/ndarray/sparse.py @@ -639,7 +639,7 @@ def __getitem__(self, key): if isinstance(key, int): raise Exception("__getitem__ with int key is not implemented for RowSparseNDArray yet") if isinstance(key, py_slice): - if key.step is not None or key.start is not None or key.stop is not None: + if key.step is not None or key.start is not None or key.stop is not None: # pylint: disable=no-else-raise raise Exception('RowSparseNDArray only supports [:] for __getitem__') else: return self @@ -1102,7 +1102,7 @@ def row_sparse_array(arg1, shape=None, ctx=None, dtype=None): # construct a row sparse array from (D0, D1 ..) or (data, indices) if isinstance(arg1, tuple): arg_len = len(arg1) - if arg_len < 2: + if arg_len < 2: # pylint: disable=no-else-raise raise ValueError("Unexpected length of input tuple: " + str(arg_len)) elif arg_len > 2: # empty ndarray with shape diff --git a/python/mxnet/ndarray_doc.py b/python/mxnet/ndarray_doc.py index 9d6258a89a3d..20e541f94d0b 100644 --- a/python/mxnet/ndarray_doc.py +++ b/python/mxnet/ndarray_doc.py @@ -16,7 +16,7 @@ # under the License. # coding: utf-8 -# pylint: disable=unused-argument, too-many-arguments +# pylint: disable=unused-argument, too-many-arguments, unnecessary-pass """Extra symbol documents""" from __future__ import absolute_import as _abs import re as _re diff --git a/python/mxnet/operator.py b/python/mxnet/operator.py index 2c69b9b46521..33e9b89a032c 100644 --- a/python/mxnet/operator.py +++ b/python/mxnet/operator.py @@ -16,7 +16,7 @@ # under the License. # coding: utf-8 -# pylint: disable=invalid-name, protected-access, too-many-arguments, no-self-use, too-many-locals, broad-except, too-many-lines +# pylint: disable=invalid-name, protected-access, too-many-arguments, no-self-use, too-many-locals, broad-except, too-many-lines, unnecessary-pass """numpy interface for operators.""" from __future__ import absolute_import diff --git a/python/mxnet/optimizer/optimizer.py b/python/mxnet/optimizer/optimizer.py index 2e7fe86c5af9..613ae8985aca 100644 --- a/python/mxnet/optimizer/optimizer.py +++ b/python/mxnet/optimizer/optimizer.py @@ -298,7 +298,7 @@ def set_learning_rate(self, lr): lr : float The new learning rate of the optimizer. """ - if self.lr_scheduler is not None: + if self.lr_scheduler is not None: # pylint: disable=no-else-raise raise UserWarning("LRScheduler of the optimizer has already been " "defined. Note that set_learning_rate can mutate " "the value of the learning rate of the optimizer " diff --git a/python/mxnet/recordio.py b/python/mxnet/recordio.py index bdc63235d702..225df3beb0dc 100644 --- a/python/mxnet/recordio.py +++ b/python/mxnet/recordio.py @@ -80,6 +80,8 @@ def open(self): self.writable = False else: raise ValueError("Invalid flag %s"%self.flag) + # pylint: disable=not-callable + # It's bug from pylint(astroid). See https://github.com/PyCQA/pylint/issues/1699 self.pid = current_process().pid self.is_open = True @@ -114,6 +116,8 @@ def __setstate__(self, d): def _check_pid(self, allow_reset=False): """Check process id to ensure integrity, reset if in new process.""" + # pylint: disable=not-callable + # It's bug from pylint(astroid). See https://github.com/PyCQA/pylint/issues/1699 if not self.pid == current_process().pid: if allow_reset: self.reset() diff --git a/python/mxnet/rnn/rnn_cell.py b/python/mxnet/rnn/rnn_cell.py index 6738aa19e9cd..cc9e6067e9ee 100644 --- a/python/mxnet/rnn/rnn_cell.py +++ b/python/mxnet/rnn/rnn_cell.py @@ -515,7 +515,7 @@ def __call__(self, inputs, states): bias=self._hB, num_hidden=self._num_hidden * 3, name="%s_h2h" % name) - + # pylint: disable=unbalanced-tuple-unpacking i2h_r, i2h_z, i2h = symbol.SliceChannel(i2h, num_outputs=3, name="%s_i2h_slice" % name) h2h_r, h2h_z, h2h = symbol.SliceChannel(h2h, num_outputs=3, name="%s_h2h_slice" % name) @@ -1419,7 +1419,7 @@ def __call__(self, inputs, states): seq_idx = self._counter name = '%st%d_' % (self._prefix, seq_idx) i2h, h2h = self._conv_forward(inputs, states, name) - + # pylint: disable=unbalanced-tuple-unpacking i2h_r, i2h_z, i2h = symbol.SliceChannel(i2h, num_outputs=3, name="%s_i2h_slice" % name) h2h_r, h2h_z, h2h = symbol.SliceChannel(h2h, num_outputs=3, name="%s_h2h_slice" % name) diff --git a/python/mxnet/symbol/symbol.py b/python/mxnet/symbol/symbol.py index 0924b2fd9ee8..467d612700ec 100644 --- a/python/mxnet/symbol/symbol.py +++ b/python/mxnet/symbol/symbol.py @@ -1013,7 +1013,6 @@ def _infer_type_impl(self, partial, *args, **kwargs): return (arg_types, out_types, aux_types) else: return (None, None, None) - # pylint: enable=too-many-locals def infer_shape(self, *args, **kwargs): """Infers the shapes of all arguments and all outputs given the known shapes of @@ -1071,6 +1070,7 @@ def infer_shape(self, *args, **kwargs): List of auxiliary state shapes. The order is same as the order of list_auxiliary_states(). """ + # pylint: disable=too-many-locals try: res = self._infer_shape_impl(False, *args, **kwargs) if res[1] is None: diff --git a/python/mxnet/test_utils.py b/python/mxnet/test_utils.py index bbb12dd5d7af..d80fab58be42 100644 --- a/python/mxnet/test_utils.py +++ b/python/mxnet/test_utils.py @@ -206,7 +206,7 @@ def _get_powerlaw_dataset_csr(num_rows, num_cols, density=0.1, dtype=None): return mx.nd.array(output_arr).tostype("csr") col_max = col_max * 2 - if unused_nnz > 0: + if unused_nnz > 0: # pylint: disable=no-else-raise raise ValueError("not supported for this density: %s" " for this shape (%s,%s)" % (density, num_rows, num_cols)) else: @@ -1348,7 +1348,7 @@ def check_consistency(sym, ctx_list, scale=1.0, grad_req='write', except AssertionError as e: print('Predict Err: ctx %d vs ctx %d at %s'%(i, max_idx, name)) traceback.print_exc() - if raise_on_err: + if raise_on_err: # pylint: disable=no-else-raise raise e else: print(str(e)) @@ -1375,7 +1375,7 @@ def check_consistency(sym, ctx_list, scale=1.0, grad_req='write', except AssertionError as e: print('Train Err: ctx %d vs ctx %d at %s'%(i, max_idx, name)) traceback.print_exc() - if raise_on_err: + if raise_on_err: # pylint: disable=no-else-raise raise e else: print(str(e)) @@ -1455,7 +1455,7 @@ def download(url, fname=None, dirname=None, overwrite=False, retries=5): break except Exception as e: retries -= 1 - if retries <= 0: + if retries <= 0: # pylint: disable=no-else-raise raise e else: print("download failed, retrying, {} attempt{} left" @@ -1536,7 +1536,7 @@ def get_mnist_iterator(batch_size, input_shape, num_parts=1, part_index=0): """ get_mnist_ubyte() - flat = False if len(input_shape) == 3 else True + flat = False if len(input_shape) == 3 else True # pylint: disable=simplifiable-if-expression train_dataiter = mx.io.MNISTIter( image="data/train-images-idx3-ubyte", @@ -1990,7 +1990,7 @@ def compare_optimizer(opt1, opt2, shape, dtype, w_stype='default', g_stype='defa if w_stype == 'default': w2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype) w1 = w2.copyto(default_context()) - elif w_stype == 'row_sparse' or w_stype == 'csr': + elif w_stype in ('row_sparse', 'csr'): w2 = rand_ndarray(shape, w_stype, density=1, dtype=dtype) w1 = w2.copyto(default_context()).tostype('default') else: @@ -1998,7 +1998,7 @@ def compare_optimizer(opt1, opt2, shape, dtype, w_stype='default', g_stype='defa if g_stype == 'default': g2 = mx.random.uniform(shape=shape, ctx=default_context(), dtype=dtype) g1 = g2.copyto(default_context()) - elif g_stype == 'row_sparse' or g_stype == 'csr': + elif g_stype in ('row_sparse', 'csr'): g2 = rand_ndarray(shape, g_stype, dtype=dtype) g1 = g2.copyto(default_context()).tostype('default') else: diff --git a/tools/caffe_converter/compare_layers.py b/tools/caffe_converter/compare_layers.py index ed73ee991c81..8d6598c8b781 100644 --- a/tools/caffe_converter/compare_layers.py +++ b/tools/caffe_converter/compare_layers.py @@ -143,8 +143,6 @@ def convert_and_compare_caffe_to_mxnet(image_url, gpu, caffe_prototxt_path, caff compare_layers_from_nets(caffe_net, arg_params, aux_params, exe, layer_name_to_record, top_to_layers, mean_diff_allowed, max_diff_allowed) - return - def _bfs(root_node, process_node): """ @@ -280,7 +278,6 @@ def _process_layer_parameters(layer): warnings.warn('No handling for layer %s of type %s, should we ignore it?', layer.name, layer.type) - return def _process_layer_output(caffe_blob_name): @@ -332,8 +329,6 @@ def _process_layer_output(caffe_blob_name): for caffe_blob_name in caffe_net.blobs.keys(): _process_layer_output(caffe_blob_name) - return - def main(): """Entrypoint for compare_layers""" diff --git a/tools/caffe_converter/test_converter.py b/tools/caffe_converter/test_converter.py index 7b47278f51f9..3c325d6bdd63 100644 --- a/tools/caffe_converter/test_converter.py +++ b/tools/caffe_converter/test_converter.py @@ -76,8 +76,6 @@ def test_model_weights_and_outputs(model_name, image_url, gpu): convert_and_compare_caffe_to_mxnet(image_url, gpu, prototxt, caffemodel, mean, mean_diff_allowed=1e-03, max_diff_allowed=1e-01) - return - def main(): """Entrypoint for test_converter"""