Skip to content

Commit

Permalink
rm mlu ops eager guard tests (#48769)
Browse files Browse the repository at this point in the history
  • Loading branch information
yjjiang11 authored Dec 9, 2022
1 parent 5ecd0ad commit 96e5796
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
from op_test import OpTest
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import paddle.fluid as fluid
from paddle.fluid import compiler, Program, program_guard
import paddle
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import paddle.fluid as fluid
import paddle
from paddle.fluid import compiler, Program, program_guard, core
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import paddle.fluid as fluid
import paddle.fluid.core as core
import paddle.fluid.framework as framework
from paddle.fluid.framework import Program, program_guard, _test_eager_guard
from paddle.fluid.framework import Program, program_guard

paddle.enable_static()

Expand Down Expand Up @@ -203,9 +203,8 @@ def test_api_with_dygraph(self):
one_hot_label = paddle.nn.functional.one_hot(
fluid.dygraph.to_variable(label), depth
)
# with _test_eager_guard():
# one_hot_label = paddle.nn.functional.one_hot(
# paddle.to_tensor(label), depth)
# one_hot_label = paddle.nn.functional.one_hot(
# paddle.to_tensor(label), depth)

def _run(self, depth):
label = fluid.layers.data(name="label", shape=[1], dtype="int64")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from op_test import OpTest
from paddle.fluid.op import Operator
from paddle.fluid.executor import Executor
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down Expand Up @@ -76,17 +75,16 @@ def gaussian_random_test(self, place):
# directly
def gaussian_random_test_eager(self, place):
with fluid.dygraph.guard(place):
with _test_eager_guard():
out = paddle._C_ops.truncated_gaussian_random(
self.attrs["shape"],
self.attrs["mean"],
self.attrs["std"],
self.attrs["seed"],
core.VarDesc.VarType.FP32,
place,
)
self.assertAlmostEqual(numpy.mean(out.numpy()), 0.0, delta=0.1)
self.assertAlmostEqual(numpy.var(out.numpy()), 0.773, delta=0.1)
out = paddle._C_ops.truncated_gaussian_random(
self.attrs["shape"],
self.attrs["mean"],
self.attrs["std"],
self.attrs["seed"],
core.VarDesc.VarType.FP32,
place,
)
self.assertAlmostEqual(numpy.mean(out.numpy()), 0.0, delta=0.1)
self.assertAlmostEqual(numpy.var(out.numpy()), 0.773, delta=0.1)


if __name__ == "__main__":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from paddle.fluid import compiler, Program, program_guard
from paddle.fluid.op import Operator
from paddle.fluid.backward import append_backward
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import paddle.fluid as fluid
from paddle.fluid.op import Operator
from paddle.fluid.executor import Executor
from paddle.fluid.framework import _test_eager_guard

paddle.enable_static()

Expand Down

0 comments on commit 96e5796

Please sign in to comment.