From f6ae2164af353c181dbc203a812631e62328bea2 Mon Sep 17 00:00:00 2001 From: Zhan Rongrui <46243324+zrr1999@users.noreply.github.com> Date: Tue, 2 Jul 2024 23:44:32 +0800 Subject: [PATCH] [SOT][dynamic shape] Adapt some InferMeta for dynamic shape (#65517) Co-authored-by: SigureMo Co-authored-by: Winters Montagne <118546135+WintersMontagne10335@users.noreply.github.com> --- paddle/phi/infermeta/binary.cc | 56 +++++++------- paddle/phi/infermeta/fusion.cc | 2 +- paddle/phi/infermeta/multiary.cc | 7 +- paddle/phi/infermeta/ternary.cc | 4 +- .../test_dynamic_shape_infermeta.py | 74 +++++++++++++++++++ 5 files changed, 109 insertions(+), 34 deletions(-) create mode 100644 test/dygraph_to_static/test_dynamic_shape_infermeta.py diff --git a/paddle/phi/infermeta/binary.cc b/paddle/phi/infermeta/binary.cc index aace68e7d4e4c1..0d3178491d2f92 100644 --- a/paddle/phi/infermeta/binary.cc +++ b/paddle/phi/infermeta/binary.cc @@ -236,7 +236,7 @@ void BCELossInferMeta(const MetaTensor& input, bool check = true; if ((!config.is_runtime) && - (common::product(input_dims) <= 0 || common::product(label_dims) <= 0)) { + (contain_unknown_dim(input_dims) || contain_unknown_dim(label_dims))) { check = false; } @@ -644,34 +644,34 @@ void ConvInferMeta(const MetaTensor& input, ? filter_dims[filter_dims.size() - 1] : filter_dims[1]; - PADDLE_ENFORCE_EQ( - input_channels, - filter_channels * groups, - phi::errors::InvalidArgument( - "The number of input's channels should be equal to filter's channels " - "* groups for Op(Conv). But received: the input's channels is %d, " - "the input's shape is [%s]; the filter's channels is %d, the " - "filter's shape is [%s]; the groups is %d, the data_format is %s. " - "The error may come from wrong data_format setting.", - input_channels, - in_dims, - filter_channels, - filter_dims, - groups, - data_format)); - PADDLE_ENFORCE_EQ( - filter_dims[0] % groups, - 0, - phi::errors::InvalidArgument( - "The number of output's channels (filter's first dimension) of " - "Op(Conv) should be divided by groups. But received: " - "the output channels is %d, the filter's shape is [%s], " - "the groups is %d.", - filter_dims[0], - filter_dims, - groups)); - if (config.is_runtime) { + PADDLE_ENFORCE_EQ( + input_channels, + filter_channels * groups, + phi::errors::InvalidArgument( + "The number of input's channels should be equal to filter's " + "channels " + "* groups for Op(Conv). But received: the input's channels is %d, " + "the input's shape is [%s]; the filter's channels is %d, the " + "filter's shape is [%s]; the groups is %d, the data_format is %s. " + "The error may come from wrong data_format setting.", + input_channels, + in_dims, + filter_channels, + filter_dims, + groups, + data_format)); + PADDLE_ENFORCE_EQ( + filter_dims[0] % groups, + 0, + phi::errors::InvalidArgument( + "The number of output's channels (filter's first dimension) of " + "Op(Conv) should be divided by groups. But received: " + "the output channels is %d, the filter's shape is [%s], " + "the groups is %d.", + filter_dims[0], + filter_dims, + groups)); PADDLE_ENFORCE_GT( filter_dims[0], 0, diff --git a/paddle/phi/infermeta/fusion.cc b/paddle/phi/infermeta/fusion.cc index c8f5fe041255af..0332b04b2c0b92 100644 --- a/paddle/phi/infermeta/fusion.cc +++ b/paddle/phi/infermeta/fusion.cc @@ -2492,7 +2492,7 @@ void BNActXPUInferMeta(const MetaTensor& x, bool check = true; if ((!config.is_runtime) && - (common::product(scale_dim) <= 0 || common::product(bias_dim) <= 0)) { + (contain_unknown_dim(scale_dim) || contain_unknown_dim(bias_dim))) { check = false; } diff --git a/paddle/phi/infermeta/multiary.cc b/paddle/phi/infermeta/multiary.cc index 0b672eb6826d8a..1b68617882c5d1 100644 --- a/paddle/phi/infermeta/multiary.cc +++ b/paddle/phi/infermeta/multiary.cc @@ -912,9 +912,10 @@ void BatchNormInferMeta(const MetaTensor& x, } bool check = true; + if (!scale || !bias || - ((!config.is_runtime) && (common::product(scale.dims()) <= 0 || - common::product(bias.dims()) <= 0))) { + ((!config.is_runtime) && (contain_unknown_dim(scale.dims()) || + contain_unknown_dim(bias.dims()) || C == -1))) { check = false; } @@ -4947,7 +4948,7 @@ void SigmoidCrossEntropyWithLogitsInferMeta(const MetaTensor& x, bool check = true; if ((!config.is_runtime) && - (common::product(x_dims) <= 0 || common::product(labels_dims) <= 0)) { + (contain_unknown_dim(x_dims) || contain_unknown_dim(labels_dims))) { check = false; } diff --git a/paddle/phi/infermeta/ternary.cc b/paddle/phi/infermeta/ternary.cc index 187a29dea8d3c1..1346337596104d 100644 --- a/paddle/phi/infermeta/ternary.cc +++ b/paddle/phi/infermeta/ternary.cc @@ -593,7 +593,7 @@ void InstanceNormInferMeta(const MetaTensor& x, "of scale is [%d]", scale_dim, scale_dim.size())); - bool check = !((!config.is_runtime) && (common::product(scale_dim) <= 0)); + bool check = config.is_runtime || contain_unknown_dim(scale_dim); if (check) { PADDLE_ENFORCE_EQ(scale_dim[0], C, @@ -615,7 +615,7 @@ void InstanceNormInferMeta(const MetaTensor& x, "of bias is [%d]", bias_dim, bias_dim.size())); - bool check = !((!config.is_runtime) && (common::product(bias_dim) <= 0)); + bool check = config.is_runtime || !contain_unknown_dim(bias_dim); if (check) { PADDLE_ENFORCE_EQ(bias_dim[0], C, diff --git a/test/dygraph_to_static/test_dynamic_shape_infermeta.py b/test/dygraph_to_static/test_dynamic_shape_infermeta.py new file mode 100644 index 00000000000000..ece62ba2c6b3d8 --- /dev/null +++ b/test/dygraph_to_static/test_dynamic_shape_infermeta.py @@ -0,0 +1,74 @@ +# Copyright (c) 2024 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import unittest +from typing import Any, Callable, Sequence + +import numpy as np +from dygraph_to_static_utils import ( + Dy2StTestBase, + test_ast_only, + test_pir_only, +) + +import paddle +from paddle.static.input import InputSpec + + +class TestDynamicShapeInfermeta(Dy2StTestBase): + def check_dynamic_shape( + self, + fn: Callable[..., Any], + inputs: Sequence[paddle.Tensor], + input_specs: list[InputSpec], + ): + static_fn = paddle.jit.to_static( + fn, + full_graph=True, + input_spec=input_specs, + ) + np.testing.assert_allclose(static_fn(*inputs), fn(*inputs), rtol=1e-05) + + @test_pir_only + @test_ast_only + def test_conv2d(self): + self.check_dynamic_shape( + paddle.nn.Conv2D(3, 3, 3), + [paddle.randn([1, 3, 32, 32])], + [InputSpec(shape=[None, None, None, None], dtype='float32')], + ) + + @test_pir_only + @test_ast_only + def test_bn(self): + self.check_dynamic_shape( + paddle.nn.BatchNorm2D(3), + [paddle.randn([1, 3, 32, 32])], + [InputSpec(shape=[None, None, None, None], dtype='float32')], + ) + + @test_pir_only + @test_ast_only + def test_depthwise_conv2d(self): + self.check_dynamic_shape( + paddle.nn.Conv2D(3, 3, 3, groups=3), + [paddle.randn([1, 3, 32, 32])], + [InputSpec(shape=[None, None, None, None], dtype='float32')], + ) + + +if __name__ == '__main__': + unittest.main()