From 028dbcf96407cd390f53e390f2d71d451f49e649 Mon Sep 17 00:00:00 2001 From: gouzil <66515297+gouzil@users.noreply.github.com> Date: Fri, 10 Nov 2023 19:30:15 +0800 Subject: [PATCH] [Dy2St] pir dy2st unittest verification - Part 3 (#58890) --------- Co-authored-by: SigureMo --- .../dygraph_to_static_utils_new.py | 10 ++---- test/dygraph_to_static/test_drop_path.py | 23 ++++++------- .../test_duplicate_output.py | 12 +++---- test/dygraph_to_static/test_fetch_feed.py | 28 +++++++--------- test/dygraph_to_static/test_isinstance.py | 18 +++++++---- test/dygraph_to_static/test_multi_forward.py | 32 +++++++++++++------ test/dygraph_to_static/test_print.py | 17 ++-------- .../test_reinforcement_learning.py | 17 +++++----- 8 files changed, 74 insertions(+), 83 deletions(-) diff --git a/test/dygraph_to_static/dygraph_to_static_utils_new.py b/test/dygraph_to_static/dygraph_to_static_utils_new.py index e0af2406f77e4..fcb1b8fa54924 100644 --- a/test/dygraph_to_static/dygraph_to_static_utils_new.py +++ b/test/dygraph_to_static/dygraph_to_static_utils_new.py @@ -102,7 +102,7 @@ def impl(*args, **kwargs): def to_legacy_ir_test(fn): def impl(*args, **kwargs): - logger.info("[Program] running legacy ir") + logger.info("[LEGACY_IR] running legacy ir") return fn(*args, **kwargs) return impl @@ -117,8 +117,8 @@ def impl(*args, **kwargs): return with static.scope_guard(static.Scope()): with static.program_guard(static.Program()): + pir_flag = 'FLAGS_enable_pir_in_executor' try: - pir_flag = 'FLAGS_enable_pir_in_executor' os.environ[pir_flag] = 'True' set_flags({pir_flag: True}) ir_outs = fn(*args, **kwargs) @@ -202,12 +202,6 @@ def __new__(cls, name, bases, attrs): ) # Generate all test cases for to_static_mode, ir_mode in to_static_with_ir_modes: - # NOTE(gouzil): Temporarily not supported SOT + PIR, link: https://github.com/PaddlePaddle/Paddle/pull/58630 - if ( - to_static_mode == ToStaticMode.SOT - and ir_mode == IrMode.PIR_API - ): - continue new_attrs[ Dy2StTestMeta.test_case_name( fn_name, to_static_mode, ir_mode diff --git a/test/dygraph_to_static/test_drop_path.py b/test/dygraph_to_static/test_drop_path.py index d559ce7f55ac2..7bd5955c8b60c 100644 --- a/test/dygraph_to_static/test_drop_path.py +++ b/test/dygraph_to_static/test_drop_path.py @@ -15,7 +15,10 @@ import unittest import numpy as np -from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir +from dygraph_to_static_utils_new import ( + Dy2StTestBase, + test_legacy_and_pir_exe_and_pir_api, +) import paddle @@ -31,27 +34,21 @@ class DropPath(paddle.nn.Layer): def __init__(self): super().__init__() - @paddle.jit.to_static def forward(self, x): return drop_path(x, self.training) class TestTrainEval(Dy2StTestBase): - def setUp(self): - self.model = DropPath() - - def tearDown(self): - pass - - @test_legacy_and_pir + @test_legacy_and_pir_exe_and_pir_api def test_train_and_eval(self): + model = paddle.jit.to_static(DropPath()) x = paddle.to_tensor([1, 2, 3]).astype("int64") eval_out = x.numpy() train_out = x.numpy() * 2 - self.model.train() - np.testing.assert_allclose(self.model(x).numpy(), train_out, rtol=1e-05) - self.model.eval() - np.testing.assert_allclose(self.model(x).numpy(), eval_out, rtol=1e-05) + model.train() + np.testing.assert_allclose(model(x).numpy(), train_out, rtol=1e-05) + model.eval() + np.testing.assert_allclose(model(x).numpy(), eval_out, rtol=1e-05) if __name__ == "__main__": diff --git a/test/dygraph_to_static/test_duplicate_output.py b/test/dygraph_to_static/test_duplicate_output.py index 70637729671f0..22ededf59ad81 100644 --- a/test/dygraph_to_static/test_duplicate_output.py +++ b/test/dygraph_to_static/test_duplicate_output.py @@ -44,20 +44,18 @@ class TestDuplicateOutput(Dy2StTestBase): dependent on tensor in Dygraph into Static `base.layers.cond`. """ - def setUp(self): - self.net = paddle.jit.to_static(SimpleNet()) - self.x = paddle.to_tensor([1.0]) - - @test_legacy_and_pir def _run_static(self): - param = self.net.parameters() + net = paddle.jit.to_static(SimpleNet()) + x = paddle.to_tensor([1.0]) + param = net.parameters() param[0].clear_grad() - loss0, loss1 = self.net(self.x) + loss0, loss1 = net(x) loss0.backward() self.assertEqual(param[0].grad.numpy(), 1.0) + @test_legacy_and_pir def test_ast_to_func(self): self._run_static() diff --git a/test/dygraph_to_static/test_fetch_feed.py b/test/dygraph_to_static/test_fetch_feed.py index 0df5e766df317..7f88150fcff78 100644 --- a/test/dygraph_to_static/test_fetch_feed.py +++ b/test/dygraph_to_static/test_fetch_feed.py @@ -15,11 +15,12 @@ import unittest import numpy as np -from dygraph_to_static_utils_new import Dy2StTestBase, compare_legacy_with_pir +from dygraph_to_static_utils_new import ( + Dy2StTestBase, + test_legacy_and_pir_exe_and_pir_api, +) import paddle -from paddle import base -from paddle.jit.api import to_static SEED = 2020 @@ -29,7 +30,6 @@ def __init__(self): super().__init__() self.pool2d = paddle.nn.AvgPool2D(kernel_size=2, stride=1) - @to_static def forward(self, x): # Add func `get_result` for testing arg_name_to_idx in ast transformation. def get_result(x): @@ -54,7 +54,6 @@ def __init__(self, input_dim=10, output_dim=5): ) self.act = paddle.nn.ReLU() - # @to_static def forward(self, x): pre = self.fc(x) pre = self.act(pre) @@ -69,24 +68,22 @@ def setUp(self): def train(self, to_static=False): paddle.jit.enable_to_static(to_static) + dy_layer = paddle.jit.to_static(self.dygraph_class()) + x = paddle.to_tensor(self.data) + prediction = dy_layer(x) + if isinstance(prediction, (list, tuple)): + prediction = prediction[0] - with base.dygraph.guard(): - dy_layer = self.dygraph_class() - x = base.dygraph.to_variable(self.data) - prediction = dy_layer(x) - if isinstance(prediction, (list, tuple)): - prediction = prediction[0] + return prediction.numpy() - return prediction.numpy() - - @compare_legacy_with_pir def train_static(self): return self.train(to_static=True) def train_dygraph(self): return self.train(to_static=False) - def test_declarative(self): + @test_legacy_and_pir_exe_and_pir_api + def test_to_static(self): dygraph_res = self.train_dygraph() static_res = self.train_static() @@ -94,7 +91,6 @@ def test_declarative(self): dygraph_res, static_res, rtol=1e-05, - err_msg=f'dygraph_res is {dygraph_res}\n static_res is \n{static_res}', ) diff --git a/test/dygraph_to_static/test_isinstance.py b/test/dygraph_to_static/test_isinstance.py index 23dcc38edddf8..1c65a96177801 100644 --- a/test/dygraph_to_static/test_isinstance.py +++ b/test/dygraph_to_static/test_isinstance.py @@ -26,7 +26,11 @@ import unittest import numpy as np -from dygraph_to_static_utils_new import Dy2StTestBase, compare_legacy_with_pir +from dygraph_to_static_utils_new import ( + Dy2StTestBase, + test_legacy_and_pir, + test_legacy_and_pir_exe_and_pir_api, +) import paddle from paddle import nn @@ -52,7 +56,6 @@ def __init__(self, layer): super().__init__() self.layer = layer - @paddle.jit.to_static def forward(self, x): if isinstance(self.layer, (AddAttrLayer,)): self.layer.attr = x @@ -65,7 +68,6 @@ def __init__(self, layers): super().__init__() self.layers = nn.LayerList(layers) - @paddle.jit.to_static def forward(self, x): res = x for layer in self.layers: @@ -75,7 +77,6 @@ def forward(self, x): return res -@compare_legacy_with_pir def train(model, to_static): paddle.jit.enable_to_static(to_static) @@ -86,20 +87,23 @@ def train(model, to_static): class TestIsinstance(Dy2StTestBase): + @test_legacy_and_pir_exe_and_pir_api def test_isinstance_simple_return_layer(self): - model = IsInstanceLayer(SimpleReturnLayer()) + model = paddle.jit.to_static(IsInstanceLayer(SimpleReturnLayer())) self._test_model(model) + @test_legacy_and_pir def test_isinstance_add_attr_layer(self): - model = IsInstanceLayer(AddAttrLayer()) + model = paddle.jit.to_static(IsInstanceLayer(AddAttrLayer())) self._test_model(model) + @test_legacy_and_pir def test_sequential_layer(self): layers = [] for i in range(5): layers.append(SimpleReturnLayer()) layers.append(AddAttrLayer()) - model = SequentialLayer(layers) + model = paddle.jit.to_static(SequentialLayer(layers)) self._test_model(model) def _test_model(self, model): diff --git a/test/dygraph_to_static/test_multi_forward.py b/test/dygraph_to_static/test_multi_forward.py index bdcbda03de259..58e8b3fc0986d 100644 --- a/test/dygraph_to_static/test_multi_forward.py +++ b/test/dygraph_to_static/test_multi_forward.py @@ -14,7 +14,10 @@ import unittest -from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir +from dygraph_to_static_utils_new import ( + Dy2StTestBase, + test_legacy_and_pir_exe_and_pir_api, +) import paddle @@ -24,23 +27,25 @@ def __init__(self): super().__init__() self.linear = paddle.nn.Linear(1, 1) - @paddle.jit.to_static( - input_spec=[ - paddle.static.InputSpec(shape=[None, None], dtype=paddle.float32) - ] - ) def forward(self, x): return self.linear(x) class TestBackward(Dy2StTestBase): - @test_legacy_and_pir + @test_legacy_and_pir_exe_and_pir_api def test_order_0(self): """ loss = 1 * w * 1 + 2 * w * 2 delta_w = 5 """ - model = MyLayer() + model = paddle.jit.to_static( + function=MyLayer(), + input_spec=[ + paddle.static.InputSpec( + shape=[None, None], dtype=paddle.float32 + ) + ], + ) model.clear_gradients() inp = paddle.ones([1, 1]) out1 = model(inp * 1) @@ -49,13 +54,20 @@ def test_order_0(self): loss.backward() self.assertEqual(model.linear.weight.grad, 5) - @test_legacy_and_pir + @test_legacy_and_pir_exe_and_pir_api def test_order_1(self): """ loss = 2 * w * 2 + 1 * w * 1 delta_w = 5 """ - model = MyLayer() + model = paddle.jit.to_static( + function=MyLayer(), + input_spec=[ + paddle.static.InputSpec( + shape=[None, None], dtype=paddle.float32 + ) + ], + ) model.clear_gradients() inp = paddle.ones([1, 1]) out1 = model(inp * 1) diff --git a/test/dygraph_to_static/test_print.py b/test/dygraph_to_static/test_print.py index 35022512ce7f6..d215e4a730fc1 100644 --- a/test/dygraph_to_static/test_print.py +++ b/test/dygraph_to_static/test_print.py @@ -15,15 +15,12 @@ import unittest import numpy -from dygraph_to_static_utils_new import Dy2StTestBase, compare_legacy_with_pir +from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir import paddle -from paddle import base -from paddle.jit import to_static # 1. print Tensor -@to_static def dyfunc_print_variable(x): # NOTE: transform to static code, var name will be changed x_t = paddle.to_tensor(x) @@ -31,27 +28,23 @@ def dyfunc_print_variable(x): # 2. print ndarray -@to_static def dyfunc_print_ndarray(x): print(x) # 3. print Tensor with format -@to_static def dyfunc_print_with_format(x): x_t = paddle.to_tensor(x) print(f"PrintTensor: {x_t}") # 4. print Tensor with format 2 -@to_static def dyfunc_print_with_format2(x): x_t = paddle.to_tensor(x) print("PrintTensor: %s" % (x_t)) # 5. print Tensor in control flow1 -@to_static def dyfunc_print_with_ifelse(x): x_t = paddle.to_tensor(x) if len(x_t.shape) > 1: @@ -61,7 +54,6 @@ def dyfunc_print_with_ifelse(x): # 6. print multiple Tensor -@to_static def dyfunc_print_multi_tensor(x): x_t = paddle.to_tensor(x) y_t = x_t * 2 @@ -70,7 +62,6 @@ def dyfunc_print_multi_tensor(x): # 7. print continue Tensor -@to_static def dyfunc_print_continue_vars(x): x_t = paddle.to_tensor(x) y_t = x_t * 2 @@ -78,7 +69,6 @@ def dyfunc_print_continue_vars(x): # 8. print with kwargs -@to_static def dyfunc_print_with_kwargs(x): x_t = paddle.to_tensor(x) print("Tensor", x_t, end='\n\n', sep=': ') @@ -100,13 +90,11 @@ def set_test_func(self): def _run(self, to_static): paddle.jit.enable_to_static(to_static) - with base.dygraph.guard(): - self.dygraph_func(self.input) + paddle.jit.to_static(self.dygraph_func)(self.input) def get_dygraph_output(self): self._run(to_static=False) - @compare_legacy_with_pir def get_static_output(self): self._run(to_static=True) @@ -115,6 +103,7 @@ class TestPrintVariable(TestPrintBase): def set_test_func(self): self.dygraph_func = dyfunc_print_variable + @test_legacy_and_pir def test_transformed_static_result(self): self.get_dygraph_output() self.get_static_output() diff --git a/test/dygraph_to_static/test_reinforcement_learning.py b/test/dygraph_to_static/test_reinforcement_learning.py index a47607b561f8d..d67d3bf990787 100644 --- a/test/dygraph_to_static/test_reinforcement_learning.py +++ b/test/dygraph_to_static/test_reinforcement_learning.py @@ -18,13 +18,15 @@ import gym import numpy as np -from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir +from dygraph_to_static_utils_new import ( + Dy2StTestBase, + test_legacy_and_pir_exe_and_pir_api, +) import paddle import paddle.nn.functional as F from paddle import base from paddle.base.dygraph import to_variable -from paddle.jit.api import to_static from paddle.nn import Layer SEED = 2020 @@ -41,7 +43,6 @@ def __init__(self): self.saved_log_probs = [] self.rewards = [] - @to_static def forward(self, x): x = paddle.reshape(x, shape=[1, 4]) x = self.affine1(x) @@ -71,7 +72,7 @@ def train(args, place, to_static): paddle.framework.random._manual_program_seed(SEED) local_random = np.random.RandomState(SEED) - policy = Policy() + policy = paddle.jit.to_static(Policy()) eps = np.finfo(np.float32).eps.item() optimizer = paddle.optimizer.Adamax( @@ -206,13 +207,13 @@ def finish_episode(): class TestDeclarative(Dy2StTestBase): def setUp(self): self.place = ( - base.CUDAPlace(0) - if base.is_compiled_with_cuda() - else base.CPUPlace() + paddle.CUDAPlace(0) + if paddle.is_compiled_with_cuda() + else paddle.CPUPlace() ) self.args = Args() - @test_legacy_and_pir + @test_legacy_and_pir_exe_and_pir_api def test_train(self): st_out = train(self.args, self.place, to_static=True) dy_out = train(self.args, self.place, to_static=False)