From beab73e37744e44b4d0968a121ceb22ef79ebbd8 Mon Sep 17 00:00:00 2001 From: liudongxue01 Date: Tue, 7 Mar 2023 07:39:19 +0000 Subject: [PATCH] DLTP-66486:implement log_grad by primitive logic --- .../prim/api/composite_backward/composite_backward_api.h | 8 ++++++++ paddle/phi/api/yaml/backward.yaml | 1 + .../paddle/fluid/tests/unittests/test_activation_op.py | 9 +++++++-- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/paddle/fluid/prim/api/composite_backward/composite_backward_api.h b/paddle/fluid/prim/api/composite_backward/composite_backward_api.h index 129f70428a003..f2339a886fa42 100644 --- a/paddle/fluid/prim/api/composite_backward/composite_backward_api.h +++ b/paddle/fluid/prim/api/composite_backward/composite_backward_api.h @@ -385,6 +385,14 @@ void expand_grad(const Tensor& x, } } +template +void log_grad(const Tensor& x, const Tensor& out_grad, Tensor* x_grad) { + if (x_grad) { + // dx = dout / x + set_output(out_grad / x, x_grad); + } +} + template void exp_grad(const Tensor& out, const Tensor& out_grad, Tensor* x_grad) { if (x_grad) { diff --git a/paddle/phi/api/yaml/backward.yaml b/paddle/phi/api/yaml/backward.yaml index fde5deeafe9fb..b70a752b2eca5 100644 --- a/paddle/phi/api/yaml/backward.yaml +++ b/paddle/phi/api/yaml/backward.yaml @@ -804,6 +804,7 @@ kernel : func : log_grad backward : log_double_grad + composite : log_grad(x, out_grad, x_grad) inplace : (out_grad -> x_grad) - backward_op : log_loss_grad diff --git a/python/paddle/fluid/tests/unittests/test_activation_op.py b/python/paddle/fluid/tests/unittests/test_activation_op.py index 453528517b64e..08cb8c141a310 100755 --- a/python/paddle/fluid/tests/unittests/test_activation_op.py +++ b/python/paddle/fluid/tests/unittests/test_activation_op.py @@ -2611,10 +2611,15 @@ class TestLog(TestActivation): def setUp(self): self.op_type = "log" self.check_eager = True + self.prim_op_type = "prim" self.python_api = paddle.log self.init_dtype() self.init_shape() + if len(self.shape) == 0: + # for 0-D tensor, skip cinn testing + self.enable_cinn = False + np.random.seed(1024) x = np.random.uniform(0.1, 1, self.shape).astype(self.dtype) out = np.log(x) @@ -2625,7 +2630,7 @@ def setUp(self): def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out', check_eager=True) + self.check_grad(['X'], 'Out', check_eager=True, check_prim=True) def test_error(self): in1 = paddle.static.data(name="in1", shape=[11, 17], dtype="int32") @@ -3834,7 +3839,7 @@ def test_check_grad(self): create_test_act_fp16_class(TestELU) create_test_act_fp16_class(TestCELU) create_test_act_fp16_class(TestReciprocal) -create_test_act_fp16_class(TestLog) +create_test_act_fp16_class(TestLog, check_prim=True) if core.is_compiled_with_rocm(): create_test_act_fp16_class(TestLog2, atol=5e-2, grad_atol=0.85) else: