Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rm unittests eager guard tests part17 number2pool1d #48840

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 1 addition & 7 deletions python/paddle/fluid/tests/unittests/test_number_count_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import paddle
import paddle.fluid.core as core
from paddle.distributed.models.moe import utils
from paddle.fluid.framework import _test_eager_guard


def count(x, upper_num):
Expand Down Expand Up @@ -68,17 +67,12 @@ def test_api_static(self):
res = exe.run(feed={'x': self.x}, fetch_list=[out])
assert np.allclose(res, self.out)

def func_api_dygraph(self):
def test_api_dygraph(self):
paddle.disable_static()
x = paddle.to_tensor(self.x)
out = utils._number_count(x, self.upper_num)
assert np.allclose(out.numpy(), self.out)

def test_api_dygraph(self):
with _test_eager_guard():
self.func_api_dygraph()
self.func_api_dygraph()


if __name__ == '__main__':
paddle.enable_static()
Expand Down
9 changes: 4 additions & 5 deletions python/paddle/fluid/tests/unittests/test_one_hot_v2_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import Program, _test_eager_guard, program_guard
from paddle.fluid.framework import Program, program_guard


class TestOneHotOp(OpTest):
Expand Down Expand Up @@ -182,10 +182,9 @@ def test_api_with_dygraph(self):
one_hot_label = paddle.nn.functional.one_hot(
fluid.dygraph.to_variable(label), depth
)
with _test_eager_guard():
one_hot_label = paddle.nn.functional.one_hot(
paddle.to_tensor(label), depth
)
one_hot_label = paddle.nn.functional.one_hot(
paddle.to_tensor(label), depth
)

def _run(self, depth):
label = fluid.layers.data(name="label", shape=[1], dtype="int64")
Expand Down
23 changes: 3 additions & 20 deletions python/paddle/fluid/tests/unittests/test_onnx_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import numpy as np

import paddle
from paddle.fluid.framework import _test_eager_guard


class LinearNet(paddle.nn.Layer):
Expand All @@ -41,33 +40,23 @@ def forward(self, x, y, z):


class TestExportWithTensor(unittest.TestCase):
def func_with_tensor(self):
def test_with_tensor(self):
self.x_spec = paddle.static.InputSpec(
shape=[None, 128], dtype='float32'
)
model = LinearNet()
paddle.onnx.export(model, 'linear_net', input_spec=[self.x_spec])

def test_with_tensor(self):
with _test_eager_guard():
self.func_with_tensor()
self.func_with_tensor()


class TestExportWithTensor1(unittest.TestCase):
def func_with_tensor(self):
def test_with_tensor(self):
self.x = paddle.to_tensor(np.random.random((1, 128)))
model = LinearNet()
paddle.onnx.export(model, 'linear_net', input_spec=[self.x])

def test_with_tensor(self):
with _test_eager_guard():
self.func_with_tensor()
self.func_with_tensor()


class TestExportPrunedGraph(unittest.TestCase):
def func_prune_graph(self):
def test_prune_graph(self):
model = Logic()
self.x = paddle.to_tensor(np.array([1]))
self.y = paddle.to_tensor(np.array([-1]))
Expand All @@ -77,12 +66,6 @@ def func_prune_graph(self):
model, 'pruned', input_spec=[self.x], output_spec=[out]
)

def test_prune_graph(self):
# test eager
with _test_eager_guard():
self.func_prune_graph()
self.func_prune_graph()


if __name__ == '__main__':
unittest.main()
6 changes: 0 additions & 6 deletions python/paddle/fluid/tests/unittests/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
from paddle.fluid.backward import append_backward
from paddle.fluid.framework import (
Program,
_test_eager_guard,
convert_np_dtype_to_dtype_,
program_guard,
)
Expand Down Expand Up @@ -1377,11 +1376,6 @@ def test_float64(self):
def test_float32(self):
self.check_with_dtype('float32')

def test_api_eager_dygraph(self):
with _test_eager_guard():
self.test_float64()
self.test_float32()


class TestMasterWeightSaveForFP16(unittest.TestCase):
'''
Expand Down
65 changes: 10 additions & 55 deletions python/paddle/fluid/tests/unittests/test_optimizer_for_varbase.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import paddle
import paddle.optimizer as optimizer
from paddle.fluid.framework import _in_legacy_dygraph, _test_eager_guard
from paddle.fluid.framework import _in_legacy_dygraph


class TestOptimizerForVarBase(unittest.TestCase):
Expand Down Expand Up @@ -59,71 +59,36 @@ def run_optimizer_minimize_with_varbase_list_input(self, optimizer):
x.numpy(), np.full([2, 3], -self.lr), rtol=1e-05
)

def func_test_adam_with_varbase_list_input(self):
def test_adam_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.Adam)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.Adam)

def test_adam_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_adam_with_varbase_list_input()
self.func_test_adam_with_varbase_list_input()

def func_test_sgd_with_varbase_list_input(self):
def test_sgd_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.SGD)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.SGD)

def test_sgd_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_sgd_with_varbase_list_input()
self.func_test_sgd_with_varbase_list_input()

def func_test_adagrad_with_varbase_list_input(self):
def test_adagrad_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.Adagrad)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.Adagrad)

def test_adagrad_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_adagrad_with_varbase_list_input()
self.func_test_adagrad_with_varbase_list_input()

def func_test_adamw_with_varbase_list_input(self):
def test_adamw_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.AdamW)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.AdamW)

def test_adamw_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_adamw_with_varbase_list_input()
self.func_test_adamw_with_varbase_list_input()

def func_test_adamax_with_varbase_list_input(self):
def test_adamax_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.Adamax)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.Adamax)

def test_adamax_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_adamax_with_varbase_list_input()
self.func_test_adamax_with_varbase_list_input()

def func_test_momentum_with_varbase_list_input(self):
def test_momentum_with_varbase_list_input(self):
self.run_optimizer_step_with_varbase_list_input(optimizer.Momentum)
self.run_optimizer_minimize_with_varbase_list_input(optimizer.Momentum)

def test_momentum_with_varbase_list_input(self):
with _test_eager_guard():
self.func_test_momentum_with_varbase_list_input()
self.func_test_momentum_with_varbase_list_input()

def func_test_optimizer_with_varbase_input(self):
def test_optimizer_with_varbase_input(self):
x = paddle.zeros([2, 3])
with self.assertRaises(TypeError):
optimizer.Adam(learning_rate=self.lr, parameters=x)

def test_optimizer_with_varbase_input(self):
with _test_eager_guard():
self.func_test_optimizer_with_varbase_input()
self.func_test_optimizer_with_varbase_input()

def func_test_create_param_lr_with_1_for_coverage(self):
def test_create_param_lr_with_1_for_coverage(self):
if _in_legacy_dygraph():
x = paddle.fluid.framework.ParamBase(
dtype="float32",
Expand Down Expand Up @@ -151,12 +116,7 @@ def func_test_create_param_lr_with_1_for_coverage(self):
z.backward()
opt.step()

def test_create_param_lr_with_1_for_coverage(self):
with _test_eager_guard():
self.func_test_create_param_lr_with_1_for_coverage()
self.func_test_create_param_lr_with_1_for_coverage()

def func_test_create_param_lr_with_no_1_value_for_coverage(self):
def test_create_param_lr_with_no_1_value_for_coverage(self):
if _in_legacy_dygraph():
x = paddle.fluid.framework.ParamBase(
dtype="float32",
Expand Down Expand Up @@ -184,11 +144,6 @@ def func_test_create_param_lr_with_no_1_value_for_coverage(self):
z.backward()
opt.step()

def test_create_param_lr_with_no_1_value_for_coverage(self):
with _test_eager_guard():
self.func_test_create_param_lr_with_1_for_coverage()
self.func_test_create_param_lr_with_1_for_coverage()


if __name__ == "__main__":
unittest.main()
15 changes: 2 additions & 13 deletions python/paddle/fluid/tests/unittests/test_outer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import numpy as np

import paddle
from paddle.fluid.framework import _test_eager_guard
from paddle.static import Program, program_guard


Expand Down Expand Up @@ -54,7 +53,7 @@ def _run_dynamic_graph_case(self, x_data, y_data):
res = paddle.outer(x, y)
return res.numpy()

def func_test_multiply(self):
def test_multiply(self):
np.random.seed(7)

# test static computation graph: 3-d array
Expand Down Expand Up @@ -113,14 +112,9 @@ def func_test_multiply(self):
res = self._run_dynamic_graph_case(x_data, y_data)
np.testing.assert_allclose(res, np.outer(x_data, y_data), rtol=1e-05)

def test_multiply(self):
with _test_eager_guard():
self.func_test_multiply()
self.func_test_multiply()


class TestMultiplyError(unittest.TestCase):
def func_test_errors(self):
def test_errors(self):
# test static computation graph: dtype can not be int8
paddle.enable_static()
with program_guard(Program(), Program()):
Expand Down Expand Up @@ -161,11 +155,6 @@ def func_test_errors(self):
y_data = np.random.randn(200).astype(np.float32)
self.assertRaises(ValueError, paddle.outer, x_data, y_data)

def test_errors(self):
with _test_eager_guard():
self.func_test_errors()
self.func_test_errors()


if __name__ == '__main__':
unittest.main()
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import paddle
import paddle.fluid as fluid
from paddle.fluid.framework import _in_legacy_dygraph, _test_eager_guard
from paddle.fluid.framework import _in_legacy_dygraph
from paddle.fluid.wrapped_decorator import wrap_decorator


Expand Down Expand Up @@ -68,7 +68,7 @@ def grad(
)

@dygraph_guard
def func_exception(self):
def test_exception(self):
with self.assertRaises(AssertionError):
self.grad(None, None)

Expand Down Expand Up @@ -101,13 +101,8 @@ def func_exception(self):
with self.assertRaises(AssertionError):
self.grad([random_var(shape)], [random_var(shape)], no_grad_vars=1)

def test_exception(self):
with _test_eager_guard():
self.func_exception()
self.func_exception()

@dygraph_guard
def func_simple_example(self):
def test_simple_example(self):
x = random_var(self.shape)
x.stop_gradient = False
y = x + 1
Expand Down Expand Up @@ -141,13 +136,8 @@ def func_simple_example(self):
grad_with_none_and_not_none.stop_gradient, create_graph
)

def test_simple_example(self):
with _test_eager_guard():
self.func_simple_example()
self.func_simple_example()

@dygraph_guard
def func_none_one_initial_gradient(self):
def test_none_one_initial_gradient(self):
numel = 1
for s in self.shape:
numel *= s
Expand Down Expand Up @@ -223,11 +213,6 @@ def func_none_one_initial_gradient(self):
grad_z.numpy(), original_random_grad_z
)

def test_none_one_initial_gradient(self):
with _test_eager_guard():
self.func_none_one_initial_gradient()
self.func_none_one_initial_gradient()

@dygraph_guard
def func_example_with_gradient_accumulation_and_create_graph(self):
x = random_var(self.shape)
Expand Down Expand Up @@ -269,13 +254,8 @@ def func_example_with_gradient_accumulation_and_create_graph(self):
x_grad_actual, x_grad_expected, rtol=1e-05
)

def test_example_with_gradient_accumulation_and_create_graph(self):
with _test_eager_guard():
self.func_example_with_gradient_accumulation_and_create_graph()
self.func_example_with_gradient_accumulation_and_create_graph()

@dygraph_guard
def func_example_with_gradient_accumulation_and_no_grad_vars(self):
def test_example_with_gradient_accumulation_and_no_grad_vars(self):
x = random_var(self.shape)
x_np = x.numpy()
numel = x_np.size
Expand Down Expand Up @@ -321,13 +301,8 @@ def func_example_with_gradient_accumulation_and_no_grad_vars(self):
x_grad_actual, x_grad_expected, rtol=1e-05
)

def test_example_with_gradient_accumulation_and_no_grad_vars(self):
with _test_eager_guard():
self.func_example_with_gradient_accumulation_and_no_grad_vars()
self.func_example_with_gradient_accumulation_and_no_grad_vars()

@dygraph_guard
def func_example_with_gradient_accumulation_and_not_create_graph(self):
def test_example_with_gradient_accumulation_and_not_create_graph(self):
x = random_var(self.shape)
x_np = x.numpy()
numel = x_np.size
Expand Down Expand Up @@ -363,11 +338,6 @@ def func_example_with_gradient_accumulation_and_not_create_graph(self):
x_grad_actual, x_grad_expected, rtol=1e-05
)

def test_example_with_gradient_accumulation_and_not_create_graph(self):
with _test_eager_guard():
self.func_example_with_gradient_accumulation_and_not_create_graph()
self.func_example_with_gradient_accumulation_and_not_create_graph()


class TestDygraphDoubleGradSortGradient(TestDygraphDoubleGrad):
def setUp(self):
Expand Down
Loading