Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Change style.
Browse files Browse the repository at this point in the history
  • Loading branch information
ckt624 committed Aug 17, 2019
1 parent 46f9d54 commit 0a4c9da
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
3 changes: 1 addition & 2 deletions python/mxnet/_numpy_op_doc.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,7 @@ def _np_linalg_slogdet(a):
-------
sign : (...) ndarray
A number representing the sign of the determinant. For a real matrix,
this is 1, 0, or -1. For a complex matrix, this is a complex number
with absolute value 1 (i.e., it is on the unit circle), or else 0.
this is 1, 0, or -1.
logdet : (...) array_like
The natural log of the absolute value of the determinant.
Expand Down
14 changes: 7 additions & 7 deletions tests/python/unittest/test_numpy_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ def tensordot_backward(a, b, axes=2):
test_tensordot = TestTensordot(axes)
if hybridize:
test_tensordot.hybridize()
a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray()
b = rand_ndarray(shape = b_shape, dtype = dtype).as_np_ndarray()
a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray()
b = rand_ndarray(shape=b_shape, dtype=dtype).as_np_ndarray()
a.attach_grad()
b.attach_grad()

Expand All @@ -161,7 +161,7 @@ def tensordot_backward(a, b, axes=2):
b_sym = mx.sym.Variable("b").as_np_ndarray()
mx_sym = mx.sym.np.tensordot(a_sym, b_sym, axes).as_nd_ndarray()
check_numeric_gradient(mx_sym, [a.as_nd_ndarray(), b.as_nd_ndarray()],
rtol=1e-1, atol=1e-1, dtype = dtype)
rtol=1e-1, atol=1e-1, dtype=dtype)


@with_seed()
Expand Down Expand Up @@ -236,14 +236,14 @@ def hybrid_forward(self, F, a):
test_det = TestDet()
if hybridize:
test_det.hybridize()
a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray()
a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray()
a.attach_grad()

np_out = _np.linalg.det(a.asnumpy())
with mx.autograd.record():
mx_out = test_det(a)
assert mx_out.shape == np_out.shape
assert_almost_equal(mx_out.asnumpy(), np_out, rtol = 1e-1, atol = 1e-1)
assert_almost_equal(mx_out.asnumpy(), np_out, rtol=1e-1, atol=1e-1)
mx_out.backward()

# Test imperative once again
Expand All @@ -255,7 +255,7 @@ def hybrid_forward(self, F, a):
a_sym = mx.sym.Variable("a").as_np_ndarray()
mx_sym = mx.sym.np.linalg.det(a_sym).as_nd_ndarray()
check_numeric_gradient(mx_sym, [a.as_nd_ndarray()],
rtol=1e-1, atol=1e-1, dtype = dtype)
rtol=1e-1, atol=1e-1, dtype=dtype)


@with_seed()
Expand All @@ -282,7 +282,7 @@ def hybrid_forward(self, F, a):
test_slogdet = TestSlogdet()
if hybridize:
test_slogdet.hybridize()
a = rand_ndarray(shape = a_shape, dtype = dtype).as_np_ndarray()
a = rand_ndarray(shape=a_shape, dtype=dtype).as_np_ndarray()
a.attach_grad()

np_out = _np.linalg.slogdet(a.asnumpy())
Expand Down

0 comments on commit 0a4c9da

Please sign in to comment.