Skip to content

Commit

Permalink
[Dy2St] pir dy2st unittest verification - Part 3 (PaddlePaddle#58890)
Browse files Browse the repository at this point in the history
---------

Co-authored-by: SigureMo <[email protected]>
  • Loading branch information
gouzil and SigureMo authored Nov 10, 2023
1 parent 5924770 commit 12a816a
Show file tree
Hide file tree
Showing 8 changed files with 74 additions and 83 deletions.
10 changes: 2 additions & 8 deletions test/dygraph_to_static/dygraph_to_static_utils_new.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def impl(*args, **kwargs):

def to_legacy_ir_test(fn):
def impl(*args, **kwargs):
logger.info("[Program] running legacy ir")
logger.info("[LEGACY_IR] running legacy ir")
return fn(*args, **kwargs)

return impl
Expand All @@ -117,8 +117,8 @@ def impl(*args, **kwargs):
return
with static.scope_guard(static.Scope()):
with static.program_guard(static.Program()):
pir_flag = 'FLAGS_enable_pir_in_executor'
try:
pir_flag = 'FLAGS_enable_pir_in_executor'
os.environ[pir_flag] = 'True'
set_flags({pir_flag: True})
ir_outs = fn(*args, **kwargs)
Expand Down Expand Up @@ -202,12 +202,6 @@ def __new__(cls, name, bases, attrs):
)
# Generate all test cases
for to_static_mode, ir_mode in to_static_with_ir_modes:
# NOTE(gouzil): Temporarily not supported SOT + PIR, link: https://github.com/PaddlePaddle/Paddle/pull/58630
if (
to_static_mode == ToStaticMode.SOT
and ir_mode == IrMode.PIR_API
):
continue
new_attrs[
Dy2StTestMeta.test_case_name(
fn_name, to_static_mode, ir_mode
Expand Down
23 changes: 10 additions & 13 deletions test/dygraph_to_static/test_drop_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@
import unittest

import numpy as np
from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir
from dygraph_to_static_utils_new import (
Dy2StTestBase,
test_legacy_and_pir_exe_and_pir_api,
)

import paddle

Expand All @@ -31,27 +34,21 @@ class DropPath(paddle.nn.Layer):
def __init__(self):
super().__init__()

@paddle.jit.to_static
def forward(self, x):
return drop_path(x, self.training)


class TestTrainEval(Dy2StTestBase):
def setUp(self):
self.model = DropPath()

def tearDown(self):
pass

@test_legacy_and_pir
@test_legacy_and_pir_exe_and_pir_api
def test_train_and_eval(self):
model = paddle.jit.to_static(DropPath())
x = paddle.to_tensor([1, 2, 3]).astype("int64")
eval_out = x.numpy()
train_out = x.numpy() * 2
self.model.train()
np.testing.assert_allclose(self.model(x).numpy(), train_out, rtol=1e-05)
self.model.eval()
np.testing.assert_allclose(self.model(x).numpy(), eval_out, rtol=1e-05)
model.train()
np.testing.assert_allclose(model(x).numpy(), train_out, rtol=1e-05)
model.eval()
np.testing.assert_allclose(model(x).numpy(), eval_out, rtol=1e-05)


if __name__ == "__main__":
Expand Down
12 changes: 5 additions & 7 deletions test/dygraph_to_static/test_duplicate_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,20 +44,18 @@ class TestDuplicateOutput(Dy2StTestBase):
dependent on tensor in Dygraph into Static `base.layers.cond`.
"""

def setUp(self):
self.net = paddle.jit.to_static(SimpleNet())
self.x = paddle.to_tensor([1.0])

@test_legacy_and_pir
def _run_static(self):
param = self.net.parameters()
net = paddle.jit.to_static(SimpleNet())
x = paddle.to_tensor([1.0])
param = net.parameters()
param[0].clear_grad()

loss0, loss1 = self.net(self.x)
loss0, loss1 = net(x)
loss0.backward()

self.assertEqual(param[0].grad.numpy(), 1.0)

@test_legacy_and_pir
def test_ast_to_func(self):
self._run_static()

Expand Down
28 changes: 12 additions & 16 deletions test/dygraph_to_static/test_fetch_feed.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@
import unittest

import numpy as np
from dygraph_to_static_utils_new import Dy2StTestBase, compare_legacy_with_pir
from dygraph_to_static_utils_new import (
Dy2StTestBase,
test_legacy_and_pir_exe_and_pir_api,
)

import paddle
from paddle import base
from paddle.jit.api import to_static

SEED = 2020

Expand All @@ -29,7 +30,6 @@ def __init__(self):
super().__init__()
self.pool2d = paddle.nn.AvgPool2D(kernel_size=2, stride=1)

@to_static
def forward(self, x):
# Add func `get_result` for testing arg_name_to_idx in ast transformation.
def get_result(x):
Expand All @@ -54,7 +54,6 @@ def __init__(self, input_dim=10, output_dim=5):
)
self.act = paddle.nn.ReLU()

# @to_static
def forward(self, x):
pre = self.fc(x)
pre = self.act(pre)
Expand All @@ -69,32 +68,29 @@ def setUp(self):

def train(self, to_static=False):
paddle.jit.enable_to_static(to_static)
dy_layer = paddle.jit.to_static(self.dygraph_class())
x = paddle.to_tensor(self.data)
prediction = dy_layer(x)
if isinstance(prediction, (list, tuple)):
prediction = prediction[0]

with base.dygraph.guard():
dy_layer = self.dygraph_class()
x = base.dygraph.to_variable(self.data)
prediction = dy_layer(x)
if isinstance(prediction, (list, tuple)):
prediction = prediction[0]
return prediction.numpy()

return prediction.numpy()

@compare_legacy_with_pir
def train_static(self):
return self.train(to_static=True)

def train_dygraph(self):
return self.train(to_static=False)

def test_declarative(self):
@test_legacy_and_pir_exe_and_pir_api
def test_to_static(self):
dygraph_res = self.train_dygraph()
static_res = self.train_static()

np.testing.assert_allclose(
dygraph_res,
static_res,
rtol=1e-05,
err_msg=f'dygraph_res is {dygraph_res}\n static_res is \n{static_res}',
)


Expand Down
18 changes: 11 additions & 7 deletions test/dygraph_to_static/test_isinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@
import unittest

import numpy as np
from dygraph_to_static_utils_new import Dy2StTestBase, compare_legacy_with_pir
from dygraph_to_static_utils_new import (
Dy2StTestBase,
test_legacy_and_pir,
test_legacy_and_pir_exe_and_pir_api,
)

import paddle
from paddle import nn
Expand All @@ -52,7 +56,6 @@ def __init__(self, layer):
super().__init__()
self.layer = layer

@paddle.jit.to_static
def forward(self, x):
if isinstance(self.layer, (AddAttrLayer,)):
self.layer.attr = x
Expand All @@ -65,7 +68,6 @@ def __init__(self, layers):
super().__init__()
self.layers = nn.LayerList(layers)

@paddle.jit.to_static
def forward(self, x):
res = x
for layer in self.layers:
Expand All @@ -75,7 +77,6 @@ def forward(self, x):
return res


@compare_legacy_with_pir
def train(model, to_static):
paddle.jit.enable_to_static(to_static)

Expand All @@ -86,20 +87,23 @@ def train(model, to_static):


class TestIsinstance(Dy2StTestBase):
@test_legacy_and_pir_exe_and_pir_api
def test_isinstance_simple_return_layer(self):
model = IsInstanceLayer(SimpleReturnLayer())
model = paddle.jit.to_static(IsInstanceLayer(SimpleReturnLayer()))
self._test_model(model)

@test_legacy_and_pir
def test_isinstance_add_attr_layer(self):
model = IsInstanceLayer(AddAttrLayer())
model = paddle.jit.to_static(IsInstanceLayer(AddAttrLayer()))
self._test_model(model)

@test_legacy_and_pir
def test_sequential_layer(self):
layers = []
for i in range(5):
layers.append(SimpleReturnLayer())
layers.append(AddAttrLayer())
model = SequentialLayer(layers)
model = paddle.jit.to_static(SequentialLayer(layers))
self._test_model(model)

def _test_model(self, model):
Expand Down
32 changes: 22 additions & 10 deletions test/dygraph_to_static/test_multi_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@

import unittest

from dygraph_to_static_utils_new import Dy2StTestBase, test_legacy_and_pir
from dygraph_to_static_utils_new import (
Dy2StTestBase,
test_legacy_and_pir_exe_and_pir_api,
)

import paddle

Expand All @@ -24,23 +27,25 @@ def __init__(self):
super().__init__()
self.linear = paddle.nn.Linear(1, 1)

@paddle.jit.to_static(
input_spec=[
paddle.static.InputSpec(shape=[None, None], dtype=paddle.float32)
]
)
def forward(self, x):
return self.linear(x)


class TestBackward(Dy2StTestBase):
@test_legacy_and_pir
@test_legacy_and_pir_exe_and_pir_api
def test_order_0(self):
"""
loss = 1 * w * 1 + 2 * w * 2
delta_w = 5
"""
model = MyLayer()
model = paddle.jit.to_static(
function=MyLayer(),
input_spec=[
paddle.static.InputSpec(
shape=[None, None], dtype=paddle.float32
)
],
)
model.clear_gradients()
inp = paddle.ones([1, 1])
out1 = model(inp * 1)
Expand All @@ -49,13 +54,20 @@ def test_order_0(self):
loss.backward()
self.assertEqual(model.linear.weight.grad, 5)

@test_legacy_and_pir
@test_legacy_and_pir_exe_and_pir_api
def test_order_1(self):
"""
loss = 2 * w * 2 + 1 * w * 1
delta_w = 5
"""
model = MyLayer()
model = paddle.jit.to_static(
function=MyLayer(),
input_spec=[
paddle.static.InputSpec(
shape=[None, None], dtype=paddle.float32
)
],
)
model.clear_gradients()
inp = paddle.ones([1, 1])
out1 = model(inp * 1)
Expand Down
Loading

0 comments on commit 12a816a

Please sign in to comment.