Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle] use np.testing.assert_array_equal instead of self.assertTrue(np.array_equal(...)) #44947

Merged
merged 6 commits into from
Aug 10, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,10 @@ def test_custom_kernel_dot_run(self):
y = paddle.to_tensor(y_data)
out = paddle.dot(x, y)

self.assertTrue(
np.array_equal(out.numpy(), result),
"custom kernel dot out: {},\n numpy dot out: {}".format(
np.testing.assert_array_equal(
out.numpy(),
result,
err_msg='custom kernel dot out: {},\n numpy dot out: {}'.format(
out.numpy(), result))


Expand All @@ -72,9 +73,10 @@ def test_custom_kernel_dot_run(self):
y = paddle.to_tensor(y_data)
out = paddle.dot(x, y)

self.assertTrue(
np.array_equal(out.numpy(), result),
"custom kernel dot out: {},\n numpy dot out: {}".format(
np.testing.assert_array_equal(
out.numpy(),
result,
err_msg='custom kernel dot out: {},\n numpy dot out: {}'.format(
out.numpy(), result))


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,10 @@ def test_custom_kernel_dot_load(self):
y = paddle.to_tensor(y_data)
out = paddle.dot(x, y)

self.assertTrue(
np.array_equal(out.numpy(), result),
"custom kernel dot out: {},\n numpy dot out: {}".format(
np.testing.assert_array_equal(
out.numpy(),
result,
err_msg='custom kernel dot out: {},\n numpy dot out: {}'.format(
out.numpy(), result))

def tearDown(self):
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/fluid/tests/custom_op/test_context_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def use_context_pool(self):
x = paddle.ones([2, 2], dtype='float32')
out = custom_ops.context_pool_test(x)

self.assertTrue(np.array_equal(x.numpy(), out.numpy()))
np.testing.assert_array_equal(x.numpy(), out.numpy())

def test_using_context_pool(self):
with _test_eager_guard():
Expand Down
4 changes: 2 additions & 2 deletions python/paddle/fluid/tests/custom_op/test_custom_attrs_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def func_attr_value(self):
out.stop_gradient = False
out.backward()

self.assertTrue(np.array_equal(x.numpy(), out.numpy()))
np.testing.assert_array_equal(x.numpy(), out.numpy())

def test_attr_value(self):
with _test_eager_guard():
Expand All @@ -85,7 +85,7 @@ def func_const_attr_value(self):
out.stop_gradient = False
out.backward()

self.assertTrue(np.array_equal(x.numpy(), out.numpy()))
np.testing.assert_array_equal(x.numpy(), out.numpy())

def test_const_attr_value(self):
with _test_eager_guard():
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/tests/custom_op/test_custom_concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,10 @@ def setUp(self):
self.axises = [0, 1]

def check_output(self, out, pd_out, name):
self.assertTrue(
np.array_equal(out, pd_out),
"custom op {}: {},\n paddle api {}: {}".format(
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op {}: {},\n paddle api {}: {}'.format(
name, out, name, pd_out))

def func_dynamic(self):
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/tests/custom_op/test_custom_conj.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,10 @@ def setUp(self):
self.shape = [2, 20, 2, 3]

def check_output(self, out, pd_out, name):
self.assertTrue(
np.array_equal(out, pd_out),
"custom op {}: {},\n paddle api {}: {}".format(
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op {}: {},\n paddle api {}: {}'.format(
name, out, name, pd_out))

def run_dynamic(self, dtype, np_input):
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/tests/custom_op/test_custom_linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,10 @@ def setUp(self):
self.np_bias = np.ones([4], dtype="float32")

def check_output(self, out, pd_out, name):
self.assertTrue(
np.array_equal(out, pd_out),
"custom op {}: {},\n paddle api {}: {}".format(
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op {}: {},\n paddle api {}: {}'.format(
name, out, name, pd_out))

def test_static(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def test_static(self):
y1_value, y2_value = exe.run(paddle.static.default_main_program(),
feed={x.name: x_np},
fetch_list=[y1, y2])
self.assertTrue(np.array_equal(y1_value, y2_value))
np.testing.assert_array_equal(y1_value, y2_value)

paddle.disable_static()

Expand Down
36 changes: 16 additions & 20 deletions python/paddle/fluid/tests/custom_op/test_custom_relu_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,25 +121,23 @@ def func_train_eval(self):
if _in_legacy_dygraph():
custom_relu_dy2stat_train_out = self.train_model(
use_custom_op=True, dy2stat=True) # for to_static
self.assertTrue(
np.array_equal(origin_relu_train_out,
custom_relu_dy2stat_train_out))
np.testing.assert_array_equal(origin_relu_train_out,
custom_relu_dy2stat_train_out)

self.assertTrue(
np.array_equal(origin_relu_train_out, custom_relu_train_out))
np.testing.assert_array_equal(origin_relu_train_out,
custom_relu_train_out)

# for eval
origin_relu_eval_out = self.eval_model(use_custom_op=False)
custom_relu_eval_out = self.eval_model(use_custom_op=True)
if _in_legacy_dygraph():
custom_relu_dy2stat_eval_out = self.eval_model(
use_custom_op=True, dy2stat=True) # for to_static
self.assertTrue(
np.array_equal(origin_relu_eval_out,
custom_relu_dy2stat_eval_out))
np.testing.assert_array_equal(origin_relu_eval_out,
custom_relu_dy2stat_eval_out)

self.assertTrue(
np.array_equal(origin_relu_eval_out, custom_relu_eval_out))
np.testing.assert_array_equal(origin_relu_eval_out,
custom_relu_eval_out)

def test_train_eval(self):
with _test_eager_guard():
Expand Down Expand Up @@ -243,11 +241,10 @@ def test_train_eval(self):
use_custom_op=True,
use_pe=True)

self.assertTrue(
np.array_equal(original_relu_train_out, custom_relu_train_out))
self.assertTrue(
np.array_equal(original_relu_train_pe_out,
custom_relu_train_pe_out))
np.testing.assert_array_equal(original_relu_train_out,
custom_relu_train_out)
np.testing.assert_array_equal(original_relu_train_pe_out,
custom_relu_train_pe_out)

# for eval
original_relu_eval_out = self.eval_model(device,
Expand All @@ -261,11 +258,10 @@ def test_train_eval(self):
use_custom_op=True,
use_pe=True)

self.assertTrue(
np.array_equal(original_relu_eval_out, custom_relu_eval_out))
self.assertTrue(
np.array_equal(original_relu_eval_pe_out,
custom_relu_eval_pe_out))
np.testing.assert_array_equal(original_relu_eval_out,
custom_relu_eval_out)
np.testing.assert_array_equal(original_relu_eval_pe_out,
custom_relu_eval_pe_out)

def train_model(self, device, use_custom_op=False, use_pe=False):
# reset random seed
Expand Down
27 changes: 15 additions & 12 deletions python/paddle/fluid/tests/custom_op/test_custom_relu_op_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,11 @@ def test_static(self):
out = custom_relu_static(custom_op, device, dtype, x)
pd_out = custom_relu_static(custom_op, device, dtype, x,
False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
out, pd_out))
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.
format(out, pd_out))

def func_dynamic(self):
for device in self.devices:
Expand All @@ -87,14 +88,16 @@ def func_dynamic(self):
x)
pd_out, pd_x_grad = custom_relu_dynamic(
custom_op, device, dtype, x, False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
out, pd_out))
self.assertTrue(
np.array_equal(x_grad, pd_x_grad),
"custom op x grad: {},\n paddle api x grad: {}".format(
x_grad, pd_x_grad))
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.
format(out, pd_out))
np.testing.assert_array_equal(
x_grad,
pd_x_grad,
err_msg='custom op x grad: {},\n paddle api x grad: {}'.
format(x_grad, pd_x_grad))

def test_dynamic(self):
with _test_eager_guard():
Expand Down
68 changes: 38 additions & 30 deletions python/paddle/fluid/tests/custom_op/test_custom_relu_op_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,11 @@ def test_static(self):
out = custom_relu_static(custom_op, device, dtype, x)
pd_out = custom_relu_static(custom_op, device, dtype, x,
False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
out, pd_out))
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.
format(out, pd_out))

def test_static_pe(self):
for device in self.devices:
Expand All @@ -239,10 +240,11 @@ def test_static_pe(self):
out = custom_relu_static_pe(custom_op, device, dtype, x)
pd_out = custom_relu_static_pe(custom_op, device, dtype, x,
False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
out, pd_out))
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.
format(out, pd_out))

def func_dynamic(self):
for device in self.devices:
Expand All @@ -255,14 +257,16 @@ def func_dynamic(self):
x)
pd_out, pd_x_grad = custom_relu_dynamic(
custom_op, device, dtype, x, False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
out, pd_out))
self.assertTrue(
np.array_equal(x_grad, pd_x_grad),
"custom op x grad: {},\n paddle api x grad: {}".format(
x_grad, pd_x_grad))
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.
format(out, pd_out))
np.testing.assert_array_equal(
x_grad,
pd_x_grad,
err_msg='custom op x grad: {},\n paddle api x grad: {}'.
format(x_grad, pd_x_grad))

def test_dynamic(self):
with _test_eager_guard():
Expand All @@ -286,10 +290,11 @@ def test_static_save_and_load_inference_model(self):
predict_infer = exe.run(inference_program,
feed={feed_target_names[0]: np_data},
fetch_list=fetch_targets)
self.assertTrue(
np.array_equal(predict, predict_infer),
"custom op predict: {},\n custom op infer predict: {}".
format(predict, predict_infer))
np.testing.assert_array_equal(
predict,
predict_infer,
err_msg='custom op predict: {},\n custom op infer predict: {}'
.format(predict, predict_infer))
paddle.disable_static()

def test_static_save_and_run_inference_predictor(self):
Expand Down Expand Up @@ -331,14 +336,16 @@ def test_func_double_grad_dynamic(self):
self.custom_ops[0], device, dtype, x)
pd_out, pd_dx_grad = custom_relu_double_grad_dynamic(
self.custom_ops[0], device, dtype, x, False)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.format(
out, pd_out))
self.assertTrue(
np.array_equal(dx_grad, pd_dx_grad),
"custom op dx grad: {},\n paddle api dx grad: {}".format(
dx_grad, pd_dx_grad))
np.testing.assert_array_equal(
dx_grad,
pd_dx_grad,
err_msg='custom op dx grad: {},\n paddle api dx grad: {}'.
format(dx_grad, pd_dx_grad))

def test_with_dataloader(self):
for device in self.devices:
Expand All @@ -357,9 +364,10 @@ def test_with_dataloader(self):
for batch_id, (image, _) in enumerate(train_loader()):
out = self.custom_ops[0](image)
pd_out = paddle.nn.functional.relu(image)
self.assertTrue(
np.array_equal(out, pd_out),
"custom op out: {},\n paddle api out: {}".format(
np.testing.assert_array_equal(
out,
pd_out,
err_msg='custom op out: {},\n paddle api out: {}'.format(
out, pd_out))

if batch_id == 5:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,11 @@ def func_slice_output(self):
x = paddle.to_tensor(np_x)
custom_op_out = custom_ops.custom_simple_slice(x, 2, 3)
np_out = np_x[2:3]
self.assertTrue(
np.array_equal(custom_op_out, np_out),
"custom op: {},\n numpy: {}".format(np_out, custom_op_out.numpy()))
np.testing.assert_array_equal(
custom_op_out,
np_out,
err_msg='custom op: {},\n numpy: {}'.format(np_out,
custom_op_out.numpy()))

def test_slice_output(self):
with _test_eager_guard():
Expand Down
7 changes: 4 additions & 3 deletions python/paddle/fluid/tests/custom_op/test_dispatch_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,10 @@ def run_dispatch_test_impl(self, func, dtype):
np_x = x.numpy()
np_out = out.numpy()
self.assertTrue(dtype in str(np_out.dtype))
self.assertTrue(
np.array_equal(np_x, np_out),
"custom op x: {},\n custom op out: {}".format(np_x, np_out))
np.testing.assert_array_equal(
np_x,
np_out,
err_msg='custom op x: {},\n custom op out: {}'.format(np_x, np_out))

def run_dispatch_test(self, func, dtype):
with _test_eager_guard():
Expand Down
10 changes: 4 additions & 6 deletions python/paddle/fluid/tests/custom_op/test_multi_out_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,12 @@ def check_multi_outputs(self, outs, is_dynamic=False):
one_int32 = one_int32.numpy()
# Fake_float64
self.assertTrue('float64' in str(zero_float64.dtype))
self.assertTrue(
np.array_equal(zero_float64,
np.zeros([4, 8]).astype('float64')))
np.testing.assert_array_equal(zero_float64,
np.zeros([4, 8]).astype('float64'))
# ZFake_int32
self.assertTrue('int32' in str(one_int32.dtype))
self.assertTrue(
np.array_equal(one_int32,
np.ones([4, 8]).astype('int32')))
np.testing.assert_array_equal(one_int32,
np.ones([4, 8]).astype('int32'))

def test_static(self):
paddle.enable_static()
Expand Down
Loading