diff --git a/python/mxnet/_numpy_op_doc.py b/python/mxnet/_numpy_op_doc.py index 501573463829..681f04ae711d 100644 --- a/python/mxnet/_numpy_op_doc.py +++ b/python/mxnet/_numpy_op_doc.py @@ -89,8 +89,7 @@ def _np_linalg_slogdet(a): ------- sign : (...) ndarray A number representing the sign of the determinant. For a real matrix, - this is 1, 0, or -1. For a complex matrix, this is a complex number - with absolute value 1 (i.e., it is on the unit circle), or else 0. + this is 1, 0, or -1. logdet : (...) array_like The natural log of the absolute value of the determinant. diff --git a/tests/python/unittest/test_numpy_op.py b/tests/python/unittest/test_numpy_op.py index 1a342ed0a171..58c6dbb40f76 100644 --- a/tests/python/unittest/test_numpy_op.py +++ b/tests/python/unittest/test_numpy_op.py @@ -135,8 +135,8 @@ def tensordot_backward(a, b, axes=2): test_tensordot = TestTensordot(axes) if hybridize: test_tensordot.hybridize() - a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray() - b = rand_ndarray(shape = b_shape, dtype = dtype).as_np_ndarray() + a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray() + b = rand_ndarray(shape=b_shape, dtype=dtype).as_np_ndarray() a.attach_grad() b.attach_grad() @@ -161,7 +161,7 @@ def tensordot_backward(a, b, axes=2): b_sym = mx.sym.Variable("b").as_np_ndarray() mx_sym = mx.sym.np.tensordot(a_sym, b_sym, axes).as_nd_ndarray() check_numeric_gradient(mx_sym, [a.as_nd_ndarray(), b.as_nd_ndarray()], - rtol=1e-1, atol=1e-1, dtype = dtype) + rtol=1e-1, atol=1e-1, dtype=dtype) @with_seed() @@ -236,14 +236,14 @@ def hybrid_forward(self, F, a): test_det = TestDet() if hybridize: test_det.hybridize() - a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray() + a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray() a.attach_grad() np_out = _np.linalg.det(a.asnumpy()) with mx.autograd.record(): mx_out = test_det(a) assert mx_out.shape == np_out.shape - assert_almost_equal(mx_out.asnumpy(), np_out, rtol = 1e-1, atol = 1e-1) + assert_almost_equal(mx_out.asnumpy(), np_out, rtol=1e-1, atol=1e-1) mx_out.backward() # Test imperative once again @@ -255,7 +255,7 @@ def hybrid_forward(self, F, a): a_sym = mx.sym.Variable("a").as_np_ndarray() mx_sym = mx.sym.np.linalg.det(a_sym).as_nd_ndarray() check_numeric_gradient(mx_sym, [a.as_nd_ndarray()], - rtol=1e-1, atol=1e-1, dtype = dtype) + rtol=1e-1, atol=1e-1, dtype=dtype) @with_seed() @@ -282,7 +282,7 @@ def hybrid_forward(self, F, a): test_slogdet = TestSlogdet() if hybridize: test_slogdet.hybridize() - a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray() + a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray() a.attach_grad() np_out = _np.linalg.slogdet(a.asnumpy())