From c1d2e49221b9a92ff05d5d2881bbc61fdae8eafa Mon Sep 17 00:00:00 2001 From: Siju Samuel Date: Mon, 18 May 2020 11:06:53 +0530 Subject: [PATCH] [KERAS]Global MaxPool3d and AvgPool3d support --- python/tvm/relay/frontend/keras.py | 23 ++++++- python/tvm/relay/op/nn/nn.py | 69 +++++++++++++++++++++ tests/python/frontend/keras/test_forward.py | 12 ++++ 3 files changed, 101 insertions(+), 3 deletions(-) diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 43065bef838a..986995826724 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -373,7 +373,7 @@ def _convert_convolution3d(inexpr, keras_layer, etab): pad_d3 = _get_pad_pair(in_d3, dilated_kernel_d3, stride_d3) params['padding'] = [pad_d1[0], pad_d2[0], pad_d3[0], pad_d1[1], pad_d2[1], pad_d3[1]] else: - msg = 'Padding with {} is not supported for operator Convolution ' \ + msg = 'Padding with {} is not supported for operator Convolution3D ' \ 'in frontend Keras.' raise tvm.error.OpAttributeUnImplemented(msg.format(keras_layer.padding)) out = _op.nn.conv3d(data=inexpr, **params) @@ -543,6 +543,23 @@ def _convert_pooling3d(inexpr, keras_layer, etab): return _op.transpose(out, axes=(0, 2, 3, 4, 1)) + +def _convert_global_pooling3d(inexpr, keras_layer, etab): + _check_data_format(keras_layer) + pool_type = type(keras_layer).__name__ + + global_pool_params = {'layout': etab.data_layout} + if pool_type == 'GlobalMaxPooling3D': + out = _op.nn.global_max_pool3d(inexpr, **global_pool_params) + elif pool_type == 'GlobalAveragePooling3D': + out = _op.nn.global_avg_pool3d(inexpr, **global_pool_params) + else: + raise tvm.error.OpNotImplemented( + 'Operator {} is not supported for frontend Keras.'.format(keras_layer)) + + return _convert_flatten(out, keras_layer, etab) + + def _convert_upsample(inexpr, keras_layer, etab): _check_data_format(keras_layer) upsample_type = type(keras_layer).__name__ @@ -885,8 +902,8 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument # 'SeparableConv3D' : _convert_convolution3d, 'MaxPooling3D' : _convert_pooling3d, 'AveragePooling3D' : _convert_pooling3d, - # 'GlobalMaxPooling3D' : _convert_pooling3d, - # 'GlobalAveragePooling3D' : _convert_pooling3d, + 'GlobalMaxPooling3D' : _convert_global_pooling3d, + 'GlobalAveragePooling3D' : _convert_global_pooling3d, 'UpSampling3D' : _convert_upsample3d, 'ZeroPadding3D' : _convert_padding3d, diff --git a/python/tvm/relay/op/nn/nn.py b/python/tvm/relay/op/nn/nn.py index 888e0b6a0b7d..96708c9e51d0 100644 --- a/python/tvm/relay/op/nn/nn.py +++ b/python/tvm/relay/op/nn/nn.py @@ -2692,3 +2692,72 @@ def adaptive_avg_pool3d(data, """ output_size = [] or output_size return _make.adaptive_avg_pool3d(data, output_size, layout) + + +def global_max_pool3d(data, + layout="NCDHW"): + r"""3D global maximum pooling operator. + + This operator takes data as input and does 3D max value calculation + across each window represented by DxWxH. + + In the default case, where the data_layout is `NCDHW` + a data Tensor with shape `(batch_size, in_channels, depth, height, width)`, + to produce an output Tensor with the following rule: + + with data of shape (b, c, d, h, w) + .. math:: + + \mbox{out}(b, c, 1, 1, 1) = \max_{l=0, \ldots, d}, \max_{m=0, \ldots, h}, + \max_{n=0, \ldots, w} \mbox{data}(b, c, l, m, n) + + Parameters + ---------- + data : tvm.relay.Expr + The input data to the operator. + + layout : str, optional + Layout of the input. + + Returns + ------- + result : tvm.relay.Expr + The computed result. + """ + output_size = [1, 1, 1] + return _make.adaptive_max_pool3d(data, output_size, layout) + + +def global_avg_pool3d(data, + layout="NCDHW"): + r"""3D global average pooling operator. + + This operator takes data as input and does 3D average value calculation + across each window represented by DxWxH. + + In the default case, where the data_layout is `NCDHW` + a data Tensor with shape `(batch_size, in_channels, depth, height, width)`, + to produce an output Tensor with the following rule: + + with data of shape (b, c, d, h, w) + + .. math:: + + \mbox{out}(b, c, 1, 1, 1) = \frac{1}{d * h * w} \sum_{l=0}^{d-1} \sum_{m=0}^{h-1} + \sum_{n=0}^{w-1} \mbox{data}(b, c, l, m, n) + + Parameters + ---------- + data : tvm.relay.Expr + The input data to the operator. + + layout : str, optional + Layout of the input. + + Returns + ------- + result : tvm.relay.Expr + The computed result. + """ + output_size = [1, 1, 1] + return _make.adaptive_avg_pool3d(data, output_size, layout) diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index b4a18165df7e..ed0181f184d9 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -483,6 +483,17 @@ def test_forward_embedding(self, keras): keras_model = keras.models.Model(data, x) verify_keras_frontend(keras_model, need_transpose=False) + def test_forward_global_pool3d(self, keras): + data = keras.layers.Input(shape=(32, 32, 32, 1)) + pool_funcs = [# global maxpool + keras.layers.GlobalMaxPooling3D(), + # global avgpool + keras.layers.GlobalAveragePooling3D() + ] + for pool_func in pool_funcs: + x = pool_func(data) + keras_model = keras.models.Model(data, x) + verify_keras_frontend(keras_model, layout='NDHWC') if __name__ == '__main__': for k in [keras, tf_keras]: @@ -513,6 +524,7 @@ def test_forward_embedding(self, keras): sut.test_forward_mobilenet(keras=k, layout='NHWC') sut.test_forward_conv3d(keras=k) sut.test_forward_pool3d(keras=k) + sut.test_forward_global_pool3d(keras=k) sut.test_forward_upsample3d(keras=k) sut.test_forward_zero_padding3d(keras=k) sut.test_forward_embedding(keras=k)