diff --git a/model-optimizer/automation/package_BOM.txt b/model-optimizer/automation/package_BOM.txt index d54f44718d1def..e20c6671428b2b 100644 --- a/model-optimizer/automation/package_BOM.txt +++ b/model-optimizer/automation/package_BOM.txt @@ -107,6 +107,7 @@ extensions/front/caffe/reshape.py extensions/front/caffe/shufflechannel_ext.py extensions/front/caffe/sigmoid.py extensions/front/caffe/simplernms_ext.py +extensions/front/caffe/slice_ext.py extensions/front/caffe/slice_to_split.py extensions/front/caffe/softmax_ext.py extensions/front/caffe/spatial_transformer_ext.py @@ -231,6 +232,7 @@ extensions/front/onnx/activation_ext.py extensions/front/onnx/affine_ext.py extensions/front/onnx/argmax_ext.py extensions/front/onnx/aten_ext.py +extensions/front/onnx/AttributedSliceToSlice.py extensions/front/onnx/cast_ext.py extensions/front/onnx/clip_ext.py extensions/front/onnx/const_ext.py @@ -447,6 +449,7 @@ extensions/front/tf/SwitchMergeOptimization.py extensions/front/tf/TensorArrayExtractors.py extensions/front/tf/TensorArrayGatherV3.py extensions/front/tf/tensorflow_custom_operations_config_update.py +extensions/front/tf/TFSliceToSlice.py extensions/front/tf/tile_ext.py extensions/front/tf/topk_ext.py extensions/front/tf/transpose_ext.py @@ -624,7 +627,6 @@ extensions/ops/merge.py extensions/ops/mvn.py extensions/ops/mxrepeat.py extensions/ops/mxreshape.py -extensions/ops/mxslice.py extensions/ops/NextIteration.py extensions/ops/non_max_suppression.py extensions/ops/non_zero.py @@ -723,7 +725,6 @@ mo/front/caffe/extractors/crop.py mo/front/caffe/extractors/native_caffe.py mo/front/caffe/extractors/roipooling.py mo/front/caffe/extractors/scale.py -mo/front/caffe/extractors/slice.py mo/front/caffe/extractors/tile.py mo/front/caffe/extractors/utils.py mo/front/caffe/loader.py diff --git a/model-optimizer/extensions/front/caffe/slice_ext.py b/model-optimizer/extensions/front/caffe/slice_ext.py new file mode 100644 index 00000000000000..fb4f8b0cf55b38 --- /dev/null +++ b/model-optimizer/extensions/front/caffe/slice_ext.py @@ -0,0 +1,46 @@ +""" + Copyright (C) 2018-2020 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +from mo.front.common.partial_infer.utils import int64_array +from mo.front.extractor import FrontExtractorOp +from mo.ops.slice import CaffeSlice + + +class SliceFrontExtractor(FrontExtractorOp): + op = 'slice' + enabled = True + + @classmethod + def extract(cls, node): + proto_layer = node.pb + param = proto_layer.slice_param + + # slice_dim is deprecated parameter and is used as alias for axis + # however if slice_dim is defined and axis is default, we use slice_dim + if param.slice_dim != 1 and param.axis == 1: + axis = param.slice_dim + else: + axis = param.axis + + update_attrs = { + 'axis': axis, + 'slice_point': int64_array(param.slice_point), + 'in_ports_count': 1, + 'out_ports_count': len(param.slice_point) + 1, + } + + CaffeSlice.update_node_stat(node, update_attrs) + return cls.enabled diff --git a/model-optimizer/extensions/front/caffe/slice_to_split.py b/model-optimizer/extensions/front/caffe/slice_to_split.py index e50addbdee7f9c..f42b94fcc9d2ab 100644 --- a/model-optimizer/extensions/front/caffe/slice_to_split.py +++ b/model-optimizer/extensions/front/caffe/slice_to_split.py @@ -24,7 +24,7 @@ class SliceToVariadicSplit(FrontReplacementOp): - op = "Slice" + op = "CaffeSlice" enabled = True def replace_sub_graph(self, graph: Graph, match: dict): @@ -37,7 +37,7 @@ def replace_sub_graph(self, graph: Graph, match: dict): return assert node.has_valid('slice_point'), 'Slice operation `{}` has no `slice_point` parameter'.format(name) - slice_point = np.array(node.slice_point) + slice_point = node.slice_point if slice_point.size == 0: num_splits = len(node.out_ports()) diff --git a/model-optimizer/extensions/front/mxnet/slice_ext.py b/model-optimizer/extensions/front/mxnet/slice_ext.py index 731a47c462d10d..8e799ebdb0d005 100644 --- a/model-optimizer/extensions/front/mxnet/slice_ext.py +++ b/model-optimizer/extensions/front/mxnet/slice_ext.py @@ -16,9 +16,9 @@ import numpy as np -from extensions.ops.mxslice import MXSlice from mo.front.extractor import FrontExtractorOp from mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs +from mo.ops.slice import MXSlice class SliceFrontExtractor(FrontExtractorOp): diff --git a/model-optimizer/extensions/front/mxnet/slice_replacers.py b/model-optimizer/extensions/front/mxnet/slice_replacers.py index 29c546884f2fa1..1e93da80aa37ef 100644 --- a/model-optimizer/extensions/front/mxnet/slice_replacers.py +++ b/model-optimizer/extensions/front/mxnet/slice_replacers.py @@ -22,7 +22,7 @@ from mo.ops.strided_slice import StridedSlice -class SliceFrontReplacer(FrontReplacementOp): +class MXSliceToStridedSliceReplacer(FrontReplacementOp): op = 'MXSlice' enabled = True diff --git a/model-optimizer/extensions/front/onnx/AttributedSliceToSlice.py b/model-optimizer/extensions/front/onnx/AttributedSliceToSlice.py new file mode 100644 index 00000000000000..e23cc0c77c37d8 --- /dev/null +++ b/model-optimizer/extensions/front/onnx/AttributedSliceToSlice.py @@ -0,0 +1,38 @@ +""" + Copyright (C) 2018-2020 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +from mo.front.common.replacement import FrontReplacementOp +from mo.front.tf.graph_utils import create_op_with_const_inputs +from mo.graph.graph import Graph, rename_nodes +from mo.ops.slice import Slice + + +class AttributedSliceToSliceReplacer(FrontReplacementOp): + """ + This class replaces AttributedSlice -> Slice + """ + op = 'AttributedSlice' + enabled = True + + def replace_sub_graph(self, graph: Graph, match: dict): + node = match['op'] + slice_name = node.soft_get('name', node.id) + + slice_node = create_op_with_const_inputs(graph, Slice, {1: node.starts, 2: node.ends, 3: node.axes}) + rename_nodes([(node, slice_name + '/to_be_removed'), (slice_node, slice_name)]) + + node.in_port(0).get_connection().set_destination(slice_node.in_port(0)) + node.out_port(0).get_connection().set_source(slice_node.out_port(0)) diff --git a/model-optimizer/extensions/front/onnx/AttributedSliceToSlice_test.py b/model-optimizer/extensions/front/onnx/AttributedSliceToSlice_test.py new file mode 100644 index 00000000000000..ea478ba3287520 --- /dev/null +++ b/model-optimizer/extensions/front/onnx/AttributedSliceToSlice_test.py @@ -0,0 +1,62 @@ +""" + Copyright (C) 2018-2020 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +import unittest + +import numpy as np +from generator import generator, generate + +from extensions.front.onnx.AttributedSliceToSlice import AttributedSliceToSliceReplacer +from mo.utils.ir_engine.compare_graphs import compare_graphs +from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front + + +@generator +class SliceReplacerTest(unittest.TestCase): + @generate(*[ + {'op': 'AttributedSlice', 'type': None, 'starts': np.array([0, 0]), 'ends': np.array([1, -1]), 'axes': np.array([0, 1])} + ]) + def test_attributed_slice_replacer(self, attributed_slice_attrs): + nodes = { + **regular_op_with_empty_data('input', {'type': 'Parameter'}), + **regular_op_with_empty_data('attributed_slice', attributed_slice_attrs), + **result(), + + # nodes after replacement + **const('start', np.array([0, 0])), + **const('end', np.array([1, -1])), + **const('axis', np.array(np.array([0, 1]))), + **regular_op_with_empty_data('slice', {'op': 'Slice', 'type': None}), + } + + graph = build_graph(nodes_attrs=nodes, edges=[ + ('input', 'attributed_slice'), + ('attributed_slice', 'output'), + ], nodes_with_edges_only=True) + graph.stage = 'front' + + AttributedSliceToSliceReplacer().find_and_replace_pattern(graph) + + graph_ref = build_graph(nodes_attrs=nodes, edges=[ + ('input', 'slice'), + *connect_front('start', '1:slice'), + *connect_front('end', '2:slice'), + *connect_front('axis', '3:slice'), + ('slice', 'output'), + ], nodes_with_edges_only=True) + + (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) + self.assertTrue(flag, resp) diff --git a/model-optimizer/extensions/front/onnx/slice_ext.py b/model-optimizer/extensions/front/onnx/slice_ext.py index b62c613cb497b2..47f38ab809fe5e 100644 --- a/model-optimizer/extensions/front/onnx/slice_ext.py +++ b/model-optimizer/extensions/front/onnx/slice_ext.py @@ -16,9 +16,12 @@ import numpy as np +from mo.front.common.partial_infer.utils import int64_array from mo.front.extractor import FrontExtractorOp +from mo.front.onnx.extractors.utils import get_onnx_opset_version from mo.front.onnx.extractors.utils import onnx_attr -from mo.ops.slice import Slice +from mo.ops.slice import Slice, AttributedSlice +from mo.utils.error import Error class SliceFrontExtractor(FrontExtractorOp): @@ -27,17 +30,18 @@ class SliceFrontExtractor(FrontExtractorOp): @classmethod def extract(cls, node): - axis = np.array(onnx_attr(node, 'axes', 'ints', default=[]), dtype=np.int64) - start = np.array(onnx_attr(node, 'starts', 'ints', default=[]), dtype=np.int64) - end = np.array(onnx_attr(node, 'ends', 'ints', default=[]), dtype=np.int64) - - attrs = { - 'axis': axis if len(axis) != 0 else None, - 'start': start if len(start) != 0 else None, - 'end': end if len(end) != 0 else None, - 'format': 'onnx' - } - - # update the attributes of the node - Slice.update_node_stat(node, attrs) + if get_onnx_opset_version(node) < 10: + starts = int64_array(onnx_attr(node, 'starts', 'ints', default=[])) + ends = int64_array(onnx_attr(node, 'ends', 'ints', default=[])) + axes = int64_array(onnx_attr(node, 'axes', 'ints', default=[])) + + if len(starts) == 0 or len(ends) == 0: + raise Error("starts or/and ends are not specified for the node {}".format(node.name)) + if len(axes) == 0: + axes = np.arange(len(starts), dtype=np.int) + + attrs = {'axes': axes, 'starts': starts, 'ends': ends} + AttributedSlice.update_node_stat(node, attrs) + else: # onnx_opset_version >= 10 + Slice.update_node_stat(node) return cls.enabled diff --git a/model-optimizer/extensions/front/onnx/slice_ext_test.py b/model-optimizer/extensions/front/onnx/slice_ext_test.py deleted file mode 100644 index a692afae1c6c65..00000000000000 --- a/model-optimizer/extensions/front/onnx/slice_ext_test.py +++ /dev/null @@ -1,75 +0,0 @@ -""" - Copyright (C) 2018-2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - -import unittest - -import numpy as np -import onnx -from generator import generator, generate - -from extensions.front.onnx.slice_ext import SliceFrontExtractor -from mo.ops.op import Op -from mo.ops.slice import Slice -from mo.utils.unittest.extractors import PB - - -@generator -class TestSliceONNXExt(unittest.TestCase): - @staticmethod - def _create_slice_node(axes, starts, ends): - if axes is None: - pb = onnx.helper.make_node( - 'Slice', - inputs=['x'], - outputs=['y'], - starts=starts, - ends=ends, - ) - else: - pb = onnx.helper.make_node( - 'Slice', - inputs=['x'], - outputs=['y'], - axes=axes, - starts=starts, - ends=ends, - ) - - node = PB({'pb': pb}) - return node - - @classmethod - def setUpClass(cls): - Op.registered_ops['Slice'] = Slice - - @generate(*[([0, 1], [0, 0], [28, 28]), (None, [0, 0], [28, 28])]) - def test_slice_ext(self, axes, starts, ends): - node = self._create_slice_node(axes, starts, ends) - SliceFrontExtractor.extract(node) - - exp_res = { - 'op': 'Slice', - 'axis': axes, - 'start': starts, - 'end': ends, - 'infer': Slice.infer - } - - for key in exp_res.keys(): - if type(node[key]) in [list, np.ndarray]: - self.assertTrue(np.array_equal(np.array(node[key]), np.array(exp_res[key]))) - else: - self.assertEqual(node[key], exp_res[key]) diff --git a/model-optimizer/extensions/front/tf/TFSliceToSlice.py b/model-optimizer/extensions/front/tf/TFSliceToSlice.py new file mode 100644 index 00000000000000..8c03ca7376a1d6 --- /dev/null +++ b/model-optimizer/extensions/front/tf/TFSliceToSlice.py @@ -0,0 +1,77 @@ +""" + Copyright (C) 2018-2020 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +import numpy as np + +from extensions.ops.elementwise import Add, Equal +from extensions.ops.select import Select +from mo.front.common.replacement import FrontReplacementOp +from mo.graph.graph import Graph, rename_nodes +from mo.ops.const import Const +from mo.ops.slice import Slice + + +class TFSliceToSliceReplacer(FrontReplacementOp): + """ + This transformation converts TFSlice to internal Slice operation. + In TFSlice size[i] == -1 means take all elements on axis i up to the end including(!) the last + In internal MO Slice (which is borrowed from ONNX) -1 means take all excluding(!) the last (shape[i] - 1). + Also TFSlice has 'sizes' on the second input while Slice has 'ends'. + This transformation was added to avoid multiple if statements in future transformations. + """ + op = 'TFSlice' + enabled = True + + def replace_sub_graph(self, graph: Graph, match: dict): + node = match['op'] + slice_name = node.soft_get('name', node.id) + slice_node = Slice(graph).create_node() + rename_nodes([(node, slice_name + '/to_be_removed'), (slice_node, slice_name)]) + + eq_node = Equal(graph, {'name': slice_name + '/equal'}).create_node() + minus_one_node = Const(graph, {'name': slice_name + '/minus_one', 'value': np.array(-1)}).create_node() + int32_max_node = Const(graph, {'name': slice_name + '/int32_max', 'value': np.iinfo(np.int32).max}).create_node() + select_node = Select(graph, {'name': slice_name + '/select'}).create_node() + + # node to convert sizes to ends + sum_node = Add(graph, {'name': slice_name + '/end_const'}).create_node() + + # reconnect input from tfslice to slice + node.in_port(0).get_source().connect(slice_node.in_port(0)) + node.in_port(0).disconnect() + # reconnect begin of tfslice to start of slice + node.in_port(1).get_source().connect(slice_node.in_port(1)) + node.in_port(1).disconnect() + + # (size -> ends) reconnect begins and sizes to sum to evaluate ends for Slice + # connects begins to slice + slice_node.in_port(1).get_source().connect(sum_node.in_port(0)) + node.in_port(2).get_source().connect(sum_node.in_port(1)) + node.in_port(2).disconnect() + + # if size[i] == -1 when take int32_max as end[i] + sum_node.in_port(1).get_source().connect(eq_node.in_port(0)) + minus_one_node.out_port(0).connect(eq_node.in_port(1)) + # from equal to 0 port of select + eq_node.out_port(0).connect(select_node.in_port(0)) + # from int32_max to 1 of select + int32_max_node.out_port(0).connect(select_node.in_port(1)) + # from sum to 2nd of select + sum_node.out_port(0).connect(select_node.in_port(2)) + # out of select to end (2nd of slice) + select_node.out_port(0).connect(slice_node.in_port(2)) + + node.out_port(0).get_connection().set_source(slice_node.out_port(0)) diff --git a/model-optimizer/extensions/front/tf/TFSliceToSlice_test.py b/model-optimizer/extensions/front/tf/TFSliceToSlice_test.py new file mode 100644 index 00000000000000..14be81eb3c43cd --- /dev/null +++ b/model-optimizer/extensions/front/tf/TFSliceToSlice_test.py @@ -0,0 +1,106 @@ +""" + Copyright (C) 2018-2020 Intel Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +import unittest + +import numpy as np + +from extensions.front.tf.TFSliceToSlice import TFSliceToSliceReplacer +from mo.utils.ir_engine.compare_graphs import compare_graphs +from mo.utils.unittest.graph import build_graph, regular_op_with_empty_data, result, const, connect_front + +nodes = { + **regular_op_with_empty_data('input', {'type': 'Parameter'}), + **regular_op_with_empty_data('tfslice', {'op': 'TFSlice', 'type': None}), + **const('begin', np.array(0)), + **const('size', np.array([-1])), + **regular_op_with_empty_data('john_doe', {'op': 'Sum', 'type': None}), + **result(), + + # nodes after replacement + **const('minus_one', np.array(-1)), + **const('int32_max', np.array(np.iinfo(np.int32).max)), + **regular_op_with_empty_data('end_const', {'op': 'Add', 'type': 'Add'}), + **regular_op_with_empty_data('equal', {'op': 'Equal', 'type': 'Equal'}), + **regular_op_with_empty_data('select', {'op': 'Select', 'type': 'Select'}), + **regular_op_with_empty_data('slice', {'op': 'Slice', 'type': None}), +} + + +class SliceReplacerTest(unittest.TestCase): + # test case when input goes besides from TFSlice to other nodes + def test_slice_replacer_begin_with_2_inputs(self): + graph = build_graph(nodes_attrs=nodes, edges=[ + ('input', 'tfslice'), + *connect_front('begin:0', '1:tfslice'), + *connect_front('begin:0', '0:john_doe'), + *connect_front('size:0', '2:tfslice'), + *connect_front('tfslice:0', 'output'), + ], nodes_with_edges_only=True) + graph.stage = 'front' + + TFSliceToSliceReplacer().find_and_replace_pattern(graph) + + graph_ref = build_graph(nodes_attrs=nodes, edges=[ + *connect_front('input:0', 'slice'), + *connect_front('begin:0', 'slice:1'), + *connect_front('begin:0', 'john_doe:1'), + + *connect_front('begin:0', 'end_const:0'), + *connect_front('size:0', 'end_const:1'), + *connect_front('size:0', 'equal:0'), + + *connect_front('int32_max:0', 'select:1'), + *connect_front('minus_one:0', 'equal:1'), + + *connect_front('equal:0', 'select:0'), + + *connect_front('end_const:0', 'select:2'), + *connect_front('select:0', 'slice:2'), + + *connect_front('slice:0', 'output'), + ], nodes_with_edges_only=True) + + (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) + self.assertTrue(flag, resp) + + def test_slice_replacer(self): + graph = build_graph(nodes_attrs=nodes, edges=[ + *connect_front('input:0', 'tfslice'), + *connect_front('begin:0', '1:tfslice'), + *connect_front('size:0', '2:tfslice'), + *connect_front('tfslice:0', 'output'), + ], nodes_with_edges_only=True) + graph.stage = 'front' + + TFSliceToSliceReplacer().find_and_replace_pattern(graph) + + graph_ref = build_graph(nodes_attrs=nodes, edges=[ + *connect_front('input:0', 'slice'), + *connect_front('begin:0', '1:slice'), + *connect_front('begin:0', '0:end_const'), + *connect_front('size:0', '1:end_const'), + *connect_front('size:0', '0:equal'), + *connect_front('int32_max:0', '1:select'), + *connect_front('minus_one:0', '1:equal'), + *connect_front('equal:0', '0:select'), + *connect_front('end_const:0', '2:select'), + *connect_front('select:0', '2:slice'), + *connect_front('slice:0', 'output'), + ], nodes_with_edges_only=True) + + (flag, resp) = compare_graphs(graph, graph_ref, 'output', check_op_attrs=True) + self.assertTrue(flag, resp) diff --git a/model-optimizer/extensions/front/tf/slice_ext.py b/model-optimizer/extensions/front/tf/slice_ext.py index 2b5f3fa321cdf0..335b8fe33c954d 100644 --- a/model-optimizer/extensions/front/tf/slice_ext.py +++ b/model-optimizer/extensions/front/tf/slice_ext.py @@ -16,7 +16,7 @@ from mo.front.extractor import FrontExtractorOp from mo.graph.graph import Node -from mo.ops.slice import Slice +from mo.ops.slice import TFSlice class SliceExtractor(FrontExtractorOp): @@ -25,9 +25,5 @@ class SliceExtractor(FrontExtractorOp): @classmethod def extract(cls, node: Node): - Slice.update_node_stat(node, { - 'axis': None, - 'start': None, - 'end': None, - }) - return cls.enabled \ No newline at end of file + TFSlice.update_node_stat(node) + return cls.enabled diff --git a/model-optimizer/extensions/middle/SliceConverter.py b/model-optimizer/extensions/middle/SliceConverter.py index 73d1d5a2c3dbcd..5ed91a756ff11b 100644 --- a/model-optimizer/extensions/middle/SliceConverter.py +++ b/model-optimizer/extensions/middle/SliceConverter.py @@ -17,10 +17,9 @@ import numpy as np from mo.front.common.partial_infer.utils import int64_array -from mo.graph.graph import Graph, Node, rename_nodes +from mo.graph.graph import Graph, rename_nodes from mo.middle.replacement import MiddleReplacementPattern from mo.ops.const import Const -from mo.ops.crop import Crop from mo.ops.strided_slice import StridedSlice from mo.utils.error import Error @@ -33,7 +32,7 @@ def convert_negative_indices(indices: np.array, shape: np.array): class ConvertSlice(MiddleReplacementPattern): """ - This class convert Slice operation to Crop, Split or StridedSlice depends on parameters + This class converts Slice operation to StridedSlice """ enabled = True @@ -52,14 +51,8 @@ def pattern(self): edges=[] ) - @staticmethod - def convert_onnx_slice_opset10(node: Node): - """ - Converts the Slice node from ONNX opset10 to StridedSlice. - :param node: Slice node - :return: None - """ - graph = node.graph + def replace_pattern(self, graph: Graph, match: dict): + node = match['slice'] input_shape = node.in_port(0).data.get_shape() output_shape = node.out_port(0).data.get_shape() @@ -68,7 +61,7 @@ def convert_onnx_slice_opset10(node: Node): if starts is None or ends is None: raise Error('The input with starts or end is not constant for node {}'.format(node.id)) - # in ONNX the value for 'ends' is usually -1 which is translated to maximum possible value of int64. This + # the value for 'ends' is usually maximum possible value of int64. This # value must be converted to maximum of int32 because such big values do not fit into the int32 which is # supported by the StridedSlice layer ends = np.clip(ends, np.iinfo(np.int32).min, np.iinfo(np.int32).max) @@ -90,7 +83,7 @@ def convert_onnx_slice_opset10(node: Node): ss_end_mask = np.zeros(len(input_shape), dtype=np.int32) ss_begin = np.zeros(len(input_shape), dtype=np.int32) ss_end = np.zeros(len(input_shape), dtype=np.int32) - ss_steps = np.ones(len(input_shape), dtype=np.int32) + ss_step = np.ones(len(input_shape), dtype=np.int32) # prepare inputs and attributes for the StridedSlice layer for i, axis in enumerate(axes): @@ -101,13 +94,13 @@ def convert_onnx_slice_opset10(node: Node): ss_end_mask[axis] = 1 ss_end[axis] = ends[i] - ss_steps[axis] = steps[i] + ss_step[axis] = steps[i] slice_node_name = node.soft_get('name', node.id) begin_node = Const(graph, {'value': ss_begin, 'name': slice_node_name + '/begin'}).create_node() end_node = Const(graph, {'value': ss_end, 'name': slice_node_name + '/end'}).create_node() - strides_node = Const(graph, {'value': ss_steps, 'name': slice_node_name + '/stride'}).create_node() + strides_node = Const(graph, {'value': ss_step, 'name': slice_node_name + '/stride'}).create_node() ss = StridedSlice(graph, dict(new_axis_mask=np.zeros(len(output_shape), dtype=np.int32), shrink_axis_mask=np.zeros(len(output_shape), dtype=np.int32), @@ -120,74 +113,3 @@ def convert_onnx_slice_opset10(node: Node): end_node.out_port(0).connect(ss.in_port(2)) strides_node.out_port(0).connect(ss.in_port(3)) node.out_port(0).get_connection().set_source(ss.out_port(0)) - - def replace_pattern(self, graph: Graph, match: dict): - node = match['slice'] - - input = node.in_node(0) - output_data = node.out_node() - - # ONNX 10 opset case - if len(node.in_nodes()) >= 3 and node.has_valid('format') and node['format'] == 'onnx': - self.convert_onnx_slice_opset10(node) - return - - # Caffe case - if not node.has_valid('start') or not node.has_valid('end'): - return - - begin = node.start - end = node.end - axis = node.axis if node.has_valid('axis') else np.arange(begin.size) - - # Check whether operation use only one axis or not - axes_begin = np.zeros(len(input.shape), dtype=np.int32) - axes_end = np.zeros(len(input.shape), dtype=np.int32) - ss_begin = np.zeros(len(input.shape), dtype=np.int32) - ss_end = np.zeros(len(input.shape), dtype=np.int32) - dims = 0 - axes = np.zeros(begin.size) - for i in range(len(axis)): - if begin[i] != 0 or end[i] < input.shape[axis[i]]: - dims += 1 - axes[i] = 1 - if begin[i] != 0: - axes_begin[axis[i]] = 1 - ss_begin[axis[i]] = begin[i] - if end[i] < input.shape[axis[i]]: - axes_end[axis[i]] = 1 - ss_end[axis[i]] = end[i] - axes = np.array(axes, dtype=bool) - - slice_node_name = node.soft_get('name', node.id) - - if dims == 1 or dims == 0: - # If Slice use only one axis or no axis, than - # convert Slice to StridedSlice - ss = StridedSlice(graph, dict(new_axis_mask=np.zeros(len(output_data.shape), dtype=np.int32), - shrink_axis_mask=np.zeros(len(output_data.shape), dtype=np.int32), - ellipsis_mask=np.zeros(len(output_data.shape), dtype=np.int32), - begin_mask=axes_begin, - end_mask=axes_end)).create_node() - - convert_negative_indices(ss_begin, input.shape) - convert_negative_indices(ss_end, input.shape) - - begin_node = Const(graph, {'value': ss_begin, 'name': slice_node_name + '/begin'}).create_node() - end_node = Const(graph, {'value': ss_end, 'name': slice_node_name + '/end'}).create_node() - - rename_nodes([(node, slice_node_name + '_delete'), (ss, slice_node_name)]) - - node.in_port(0).get_connection().set_destination(ss.in_port(0)) - begin_node.out_port(0).connect(ss.in_port(1)) - end_node.out_port(0).connect(ss.in_port(2)) - node.out_port(0).get_connection().set_source(ss.out_port(0)) - else: - # If Slice use more than one axis use Crop layer - crop = Crop(graph, dict(axis=axis[axes], - offset=begin[axes], - dim=end[axes] - begin[axes])).create_node() - rename_nodes([(node, slice_node_name + '_delete'), (crop, slice_node_name)]) - - node.in_port(0).get_connection().set_destination(crop.in_port(0)) - node.out_port(0).get_connection().set_source(crop.out_port(0)) diff --git a/model-optimizer/extensions/middle/SliceConvert_test.py b/model-optimizer/extensions/middle/SliceConverter_test.py similarity index 71% rename from model-optimizer/extensions/middle/SliceConvert_test.py rename to model-optimizer/extensions/middle/SliceConverter_test.py index dca8fd365ff779..92b118d0605b20 100644 --- a/model-optimizer/extensions/middle/SliceConvert_test.py +++ b/model-optimizer/extensions/middle/SliceConverter_test.py @@ -40,185 +40,106 @@ 'output_op': {'type': 'Const', 'value': None, 'kind': 'op', 'op': 'Const'}, 'output_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, 'op_output': { 'kind': 'op', 'op': 'Result'}, - # Crop layer - 'crop': {'type': 'Crop', 'kind': 'op', 'op': 'Crop', 'axis': None, 'offset': None, 'dim': None}, - 'dim': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, # StridedSlice layer 'strided_slice': {'kind': 'op', 'op': 'StridedSlice', 'slices': None, 'shrink_axis_mask': None} } class ConvertSliceTests(unittest.TestCase): - def test_1(self): - """ - Testing case with non-constant path and multiple - slicing dimensions - :return: - """ - graph = build_graph(nodes_attributes, - [('placeholder_1', 'placeholder_1_data'), - ('placeholder_1_data', 'slice'), - ('slice', 'slice_data'), - ('slice_data', 'output_op'), - ('output_op', 'output_data'), - ('output_data', 'op_output') - ], - {'placeholder_1_data': {'shape': np.array([4, 5, 6])}, - 'slice': {'start': np.array([1, 2, 3]), 'end': np.array([3, 4, 4]), 'axis': None}, - }, nodes_with_edges_only=True, - ) - slice_node = Node(graph, 'slice') - Slice.infer(slice_node) - - pattern = ConvertSlice() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - - ss_node = Node(graph, graph.get_node_id_by_name('slice_node')) - assert ss_node.type == 'Crop', 'Something wrong with transformed Slice node' - - graph_ref = build_graph(nodes_attributes, - [('placeholder_1', 'placeholder_1_data'), - ('placeholder_1_data', 'crop'), - ('crop', 'slice_data'), - ('slice_data', 'output_op'), - ('output_op', 'output_data'), - ('output_data', 'op_output') - ], - {'placeholder_1_data': {'shape': np.array([4, 5, 6])}, - 'crop': {'axis': np.array([0, 1, 2]), 'offset': np.array([1, 2, 3]), - 'dim': np.array([2, 2, 1])}, - }, nodes_with_edges_only=True, - ) - (flag, resp) = compare_graphs(graph, graph_ref, 'output_op', check_op_attrs=True) - self.assertTrue(flag, resp) - - def test_2(self): - """ - Testing case with constant path and one - slicing dimension - """ - graph = build_graph(nodes_attributes, - [('placeholder_1', 'placeholder_1_data'), - ('placeholder_1_data', 'slice'), - ('slice', 'slice_data'), - ('slice_data', 'output_op'), - ('output_op', 'output_data'), - ('output_data', 'op_output') - ], - {'placeholder_1_data': {'shape': np.array([4, 5, 6])}, - 'slice': {'start': np.array([1]), 'end': np.array([3]), 'axis': None} - }, nodes_with_edges_only=True, - ) - graph.graph['layout'] = 'NHWC' - slice_node = Node(graph, 'slice') - Slice.infer(slice_node) - - pattern = ConvertSlice() - pattern.find_and_replace_pattern(graph) - graph.clean_up() - - ss_node = Node(graph, graph.get_node_id_by_name('slice_node')) - assert ss_node.type == 'StridedSlice', 'Something wrong with transformed Slice node' - - graph_ref = build_graph(nodes_attributes, - [('placeholder_1', 'placeholder_1_data'), - ('placeholder_2', 'placeholder_2_data'), - ('placeholder_3', 'placeholder_3_data'), - ('placeholder_1_data', 'strided_slice'), - ('placeholder_2_data', 'strided_slice'), - ('placeholder_3_data', 'strided_slice'), - ('strided_slice', 'slice_data'), - ('slice_data', 'output_op'), - ('output_op', 'output_data'), - ('output_data', 'op_output') - ], - {'placeholder_1_data': {'shape': np.array([4, 5, 6])}, - 'strided_slice': {'slices': np.array([slice(1, 3, 1),slice(0, 5, 1),slice(0, 6, 1)]), - 'shrink_axis_mask': np.array([False, False, False])}, - }, nodes_with_edges_only=True, - ) + nodes_attributes = { + # input data + 'placeholder_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, + 'placeholder_1_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + # Slice layer inputs + 'starts': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'starts_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + 'ends': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'ends_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + 'strides': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'strides_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + 'axes': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'axes_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + 'steps': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'steps_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, + # Slice layer + 'slice': {'type': 'Slice', 'kind': 'op', 'op': 'Slice', 'name': 'slice_node'}, + 'slice_data': {'value': None, 'shape': None, 'kind': 'data'}, + # Output operation + 'output_op': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, + 'output_data': {'shape': None, 'kind': 'data', 'data_type': None}, + 'op_output': {'kind': 'op', 'op': 'Result'}, + # StridedSlice layer + 'strided_slice': {'kind': 'op', 'op': 'StridedSlice', 'slices': None, 'shrink_axis_mask': None} + } - (flag, resp) = compare_graphs(graph, graph_ref, 'output_op', check_op_attrs=True) - self.assertTrue(flag, resp) + def test_slice_all_params(self): + input_shape = int64_array([5, 10, 20]) + starts_value = int64_array([4, 2]) + ends_value = int64_array([15, 8]) + axes_value = int64_array([2, 1]) + steps_value = int64_array([1, 1]) - def test_3(self): - """ - Testing case with constant path and one - slicing dimension - """ - graph = build_graph(nodes_attributes, + masks_value = np.zeros([len(input_shape)], dtype=np.int64) + graph = build_graph(self.nodes_attributes, [('placeholder_1', 'placeholder_1_data'), - ('placeholder_1_data', 'slice'), + ('placeholder_1_data', 'slice', {'in': 0}), + ('starts', 'starts_data'), + ('starts_data', 'slice', {'in': 1}), + ('ends', 'ends_data'), + ('ends_data', 'slice', {'in': 2}), + ('axes', 'axes_data'), + ('axes_data', 'slice', {'in': 3}), + ('steps', 'steps_data'), + ('steps_data', 'slice', {'in': 4}), ('slice', 'slice_data'), ('slice_data', 'output_op'), ('output_op', 'output_data'), ('output_data', 'op_output') ], - {'placeholder_1_data': {'shape': np.array([1, 5, 6])}, - 'slice': {'start': np.array([1]), 'end': np.array([3]), 'axis': np.array([1])} - }, nodes_with_edges_only=True, + {'placeholder_1_data': {'shape': input_shape}, + 'starts': {'shape': starts_value.shape, 'value': starts_value}, + 'starts_data': {'shape': starts_value.shape, 'value': starts_value}, + 'ends': {'shape': ends_value.shape, 'value': ends_value}, + 'ends_data': {'shape': ends_value.shape, 'value': ends_value}, + 'steps': {'shape': steps_value.shape, 'value': steps_value}, + 'steps_data': {'shape': steps_value.shape, 'value': steps_value}, + 'axes': {'shape': axes_value.shape, 'value': axes_value}, + 'axes_data': {'shape': axes_value.shape, 'value': axes_value}, + }, nodes_with_edges_only=True ) - graph.graph['layout'] = 'NHWC' slice_node = Node(graph, 'slice') Slice.infer(slice_node) pattern = ConvertSlice() pattern.find_and_replace_pattern(graph) - graph.clean_up() ss_node = Node(graph, graph.get_node_id_by_name('slice_node')) assert ss_node.type == 'StridedSlice', 'Something wrong with transformed Slice node' - graph_ref = build_graph(nodes_attributes, + graph_ref = build_graph(self.nodes_attributes, [('placeholder_1', 'placeholder_1_data'), - ('placeholder_2', 'placeholder_2_data'), - ('placeholder_3', 'placeholder_3_data'), - ('placeholder_1_data', 'strided_slice'), - ('placeholder_2_data', 'strided_slice'), - ('placeholder_3_data', 'strided_slice'), + ('placeholder_1_data', 'strided_slice', {'in': 0}), + ('starts', 'starts_data'), + ('starts_data', 'strided_slice', {'in': 1}), + ('ends', 'ends_data'), + ('ends_data', 'strided_slice', {'in': 2}), + ('strides', 'strides_data'), + ('strides_data', 'strided_slice', {'in': 3}), ('strided_slice', 'slice_data'), ('slice_data', 'output_op'), ('output_op', 'output_data'), ('output_data', 'op_output') ], - {'placeholder_1_data': {'shape': np.array([1, 5, 6])}, - 'strided_slice': {'slices': np.array([slice(0, 1, 1),slice(1, 3, 1),slice(0, 6, 1)]), - 'shrink_axis_mask': np.array([False, False, False])}, - }, nodes_with_edges_only=True, + {'placeholder_1_data': {'shape': input_shape}, + 'strided_slice': {'new_axis_mask': masks_value, 'shrink_axis_mask': masks_value, + 'ellipsis_mask': masks_value, 'begin_mask': int64_array([0, 1, 1]), + 'end_mask': int64_array([0, 1, 1])}, + 'slice_data': {'shape': int64_array([5, 6, 11])} + }, nodes_with_edges_only=True ) - (flag, resp) = compare_graphs(graph, graph_ref, 'output_op', check_op_attrs=True) self.assertTrue(flag, resp) - -class ConvertSliceONNXOpset10Tests(unittest.TestCase): - nodes_attributes = { - # input data - 'placeholder_1': {'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'}, - 'placeholder_1_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - # Slice layer inputs - 'starts': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'starts_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - 'ends': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'ends_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - 'strides': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'strides_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - 'axes': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'axes_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - 'steps': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'steps_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None}, - # Slice layer - 'slice': {'type': 'Slice', 'kind': 'op', 'op': 'Slice', 'format': 'onnx', 'end': None, 'name': 'slice_node'}, - 'slice_data': {'value': None, 'shape': None, 'kind': 'data'}, - # Output operation - 'output_op': {'type': 'Const', 'kind': 'op', 'op': 'Const'}, - 'output_data': {'shape': None, 'kind': 'data', 'data_type': None}, - 'op_output': {'kind': 'op', 'op': 'Result'}, - # StridedSlice layer - 'strided_slice': {'kind': 'op', 'op': 'StridedSlice', 'slices': None, 'shrink_axis_mask': None} - } - def test_no_steps_no_axes(self): input_shape = int64_array([5, 10, 20]) starts_value = int64_array([3, 2, 7]) diff --git a/model-optimizer/extensions/ops/mxslice.py b/model-optimizer/extensions/ops/mxslice.py deleted file mode 100644 index 9310c32448cec0..00000000000000 --- a/model-optimizer/extensions/ops/mxslice.py +++ /dev/null @@ -1,36 +0,0 @@ -""" - Copyright (C) 2018-2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - -from mo.graph.graph import Graph -from mo.ops.op import Op - - -class MXSlice(Op): - op = 'MXSlice' - enabled = True - - def __init__(self, graph: Graph, attrs: dict): - super().__init__(graph, { - 'kind': 'op', - 'type': None, - 'op': __class__.op, - 'in_ports_count': 1, - 'out_ports_count': 1, - 'infer': None - }, attrs) - - def supported_attrs(self): - return [] diff --git a/model-optimizer/mo/front/caffe/extractor.py b/model-optimizer/mo/front/caffe/extractor.py index 02948567419e76..d66dac26388de6 100644 --- a/model-optimizer/mo/front/caffe/extractor.py +++ b/model-optimizer/mo/front/caffe/extractor.py @@ -19,7 +19,6 @@ from mo.front.caffe.extractors.native_caffe import native_caffe_node_extractor from mo.front.caffe.extractors.roipooling import roipooling_ext from mo.front.caffe.extractors.scale import scale_ext -from mo.front.caffe.extractors.slice import slice_ext from mo.front.common.partial_infer.elemental import copy_shape_infer from mo.front.common.register_custom_ops import extension_op_extractor from mo.front.extractor import CaffePythonFrontExtractorOp @@ -49,7 +48,6 @@ def node_pb_arg(pb_extractor): # Utility Layers 'concat': node_pb_arg(concat_ext), - 'slice': node_pb_arg(slice_ext), # Custom, implemented in IE, Fast-RCNN-specific 'roipooling': node_pb_arg(roipooling_ext), diff --git a/model-optimizer/mo/front/caffe/extractors/slice.py b/model-optimizer/mo/front/caffe/extractors/slice.py deleted file mode 100644 index 49ffabaf5b2c45..00000000000000 --- a/model-optimizer/mo/front/caffe/extractors/slice.py +++ /dev/null @@ -1,41 +0,0 @@ -""" - Copyright (C) 2018-2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" -from mo.front.caffe.collect_attributes import merge_attrs -from mo.front.common.partial_infer.slice import caffe_slice_infer - - -def slice_ext(proto_layer, model_layer): - param = proto_layer.slice_param - # slice_dim is deprecated parameter and is used as alias for axis - # however if slice_dim is defined and axis is default, we use slice_dim - if param.slice_dim != 1 and param.axis == 1: - axis = param.slice_dim - else: - axis = param.axis - update_attrs = { - 'axis': axis, - 'slice_point': param.slice_point, - } - mapping_rule = merge_attrs(param, update_attrs) - if 'slice_point' not in mapping_rule: - mapping_rule['slice_point'] = [] - mapping_rule.update({ - 'type': 'Slice', - 'in_ports_count': 1, - 'out_ports_count': len(mapping_rule['slice_point']) + 1, - 'infer': caffe_slice_infer - }) - return mapping_rule diff --git a/model-optimizer/mo/front/caffe/extractors/slice_test.py b/model-optimizer/mo/front/caffe/extractors/slice_test.py deleted file mode 100644 index fe5481db5b3b0f..00000000000000 --- a/model-optimizer/mo/front/caffe/extractors/slice_test.py +++ /dev/null @@ -1,102 +0,0 @@ -""" - Copyright (C) 2018-2020 Intel Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -""" - -import unittest -from unittest.mock import patch - -import numpy as np - -from mo.front.caffe.extractors.slice import slice_ext -from mo.front.common.partial_infer.slice import caffe_slice_infer -from mo.utils.unittest.extractors import FakeMultiParam - - -class FakeProtoLayer: - def __init__(self, val): - self.slice_param = val - - -class TestSlice(unittest.TestCase): - @patch('mo.front.caffe.extractors.slice.merge_attrs') - def test_slice_ext(self, merge_attrs_mock): - params = { - 'type': 'Slice', - 'axis': 2, - 'slice_point': np.array([256]), - 'slice_dim': 3, - 'infer': caffe_slice_infer - } - merge_attrs_mock.return_value = { - **params, - 'test': 54, - 'test2': 'test3' - } - res = slice_ext(FakeProtoLayer(FakeMultiParam(params)), None) - exp_res = { - 'type': 'Slice', - 'axis': 2, - 'slice_point': np.array([256]), - 'infer': caffe_slice_infer - } - for i in exp_res: - self.assertEqual(res[i], exp_res[i]) - - @patch('mo.front.caffe.extractors.slice.merge_attrs') - def test_slice_ext_slice_dim(self, merge_attrs_mock): - params = { - 'type': 'Slice', - 'axis': 1, - 'slice_point': np.array([256]), - 'slice_dim': 3, - 'infer': caffe_slice_infer - } - merge_attrs_mock.return_value = { - **params, - 'axis': 3 - } - res = slice_ext(FakeProtoLayer(FakeMultiParam(params)), None) - exp_res = { - 'type': 'Slice', - 'axis': 3, - 'slice_point': np.array([256]), - 'infer': caffe_slice_infer - } - for i in exp_res: - self.assertEqual(res[i], exp_res[i]) - - @patch('mo.front.caffe.extractors.slice.merge_attrs') - def test_slice_ext_no_params(self, merge_attrs_mock): - params = { - 'type': 'Slice', - 'axis': 1, - 'slice_dim': 1, - 'slice_point': [], - 'infer': caffe_slice_infer - } - merge_attrs_mock.return_value = { - 'type': 'Slice', - 'axis': 1, - 'infer': caffe_slice_infer - } - res = slice_ext(FakeProtoLayer(FakeMultiParam(params)), None) - exp_res = { - 'type': 'Slice', - 'axis': 1, - 'slice_point': [], - 'infer': caffe_slice_infer - } - for i in exp_res: - self.assertEqual(res[i], exp_res[i]) diff --git a/model-optimizer/mo/front/common/partial_infer/slice.py b/model-optimizer/mo/front/common/partial_infer/slice.py index 5c850f20a89ae0..00dbc257a1eca6 100644 --- a/model-optimizer/mo/front/common/partial_infer/slice.py +++ b/model-optimizer/mo/front/common/partial_infer/slice.py @@ -121,48 +121,6 @@ def convert_negative_indices(indices: np.array, shape: np.array): indices[ind] += shape[ind] -def caffe_slice_infer(node): - """ - Slices an input layer to multiple output layers along a given dimension - with given slice indices - Parameters - ---------- - node - - """ - top_shape = node.in_node(0).shape - slice_axis = node.axis - bottom_slice_axis = node.in_node(0).shape[node.axis] - if len(node.slice_point) == 0: - new_shape = np.array(top_shape, dtype=np.int64) - new_shape[slice_axis] = bottom_slice_axis / len(node.out_nodes()) - for i in range(0, len(node.out_nodes())): - node.out_node(i).shape = new_shape - return - - assert (len(node.slice_point) == len(node.out_nodes()) - 1) - prev = 0 - slices = [] - for slice_point in node.slice_point: - if slice_point <= prev: - raise Error( - 'Check failed for the layer {}. Slice points should be ordered in increasing manner. '.format(node.id) + - 'Current slice point {} is not greater than the previous slice point {}. '.format(slice_point, prev) + - 'Please verify your model correctness') - slices.append(slice_point - prev) - prev = slice_point - - slices.append(bottom_slice_axis - prev) - if sum(slices) != bottom_slice_axis: - raise Error( - 'Check failed for the layer {}. Sum of slices points {} does not equal '.format(node.id, sum(slices)) + - 'to the value of input blob shape by the given slice axis {}'.format(bottom_slice_axis)) - for i in range(len(node.out_nodes())): - new_shape = np.array(top_shape, dtype=np.int64) - new_shape[slice_axis] = slices[i] - node.out_node(i).shape = new_shape - - def mxnet_slice_axis_infer(node): in_shape = node.in_node(0).shape node.axis = get_canonical_axis_index(in_shape, node.axis) diff --git a/model-optimizer/mo/front/common/partial_infer/slice_test.py b/model-optimizer/mo/front/common/partial_infer/slice_test.py index b3f83482fbe23a..fd42947e31bea3 100644 --- a/model-optimizer/mo/front/common/partial_infer/slice_test.py +++ b/model-optimizer/mo/front/common/partial_infer/slice_test.py @@ -18,8 +18,7 @@ import numpy as np -from mo.front.common.partial_infer.slice import caffe_slice_infer, tf_strided_slice_infer, \ - convert_negative_indices, mxnet_slice_axis_infer +from mo.front.common.partial_infer.slice import tf_strided_slice_infer, convert_negative_indices, mxnet_slice_axis_infer from mo.graph.graph import Node from mo.utils.unittest.graph import build_graph @@ -50,133 +49,6 @@ ('tf_slice', 'tf_slice_output')] -class TestSSliceInfer(unittest.TestCase): - def test_slice_infer_ideal(self): - graph = build_graph(nodes_attributes, - [('node_1', 'Slice_node'), - ('Slice_node', 'node_2'), - ('Slice_node', 'node_3'), - ('node_2', 'op_output'), - ('node_3', 'op_output_1') - ], - {'node_1': {'shape': np.array([1, 288, 56, 56])}, - 'node_2': {'shape': None}, - 'node_3': {'shape': None}, - 'Slice_node': {'axis': 1, 'slice_point': np.array([256])} - }) - - slice_node = Node(graph, 'Slice_node') - - caffe_slice_infer(slice_node) - exp_shape1 = np.array([1, 256, 56, 56]) - exp_shape2 = np.array([1, 32, 56, 56]) - res_shape1 = graph.node['node_2']['shape'] - res_shape2 = graph.node['node_3']['shape'] - - for i in range(0, len(exp_shape1)): - self.assertEqual(exp_shape1[i], res_shape1[i]) - - for i in range(0, len(exp_shape2)): - self.assertEqual(exp_shape2[i], res_shape2[i]) - - def test_slice_infer_no_slice_point(self): - graph = build_graph(nodes_attributes, - [('node_1', 'Slice_node'), - ('Slice_node', 'node_2'), - ('Slice_node', 'node_3'), - ('node_2', 'op_output'), - ('node_3', 'op_output_1') - ], - {'node_1': {'shape': np.array([1, 288, 56, 56])}, - 'node_2': {'shape': None}, - 'node_3': {'shape': None}, - 'Slice_node': {'axis': 1, 'slice_point': []} - }) - - slice_node = Node(graph, 'Slice_node') - - caffe_slice_infer(slice_node) - exp_shape = np.array([1, 144, 56, 56]) - res_shape1 = graph.node['node_2']['shape'] - res_shape2 = graph.node['node_3']['shape'] - - for i in range(0, len(exp_shape)): - self.assertEqual(exp_shape[i], res_shape1[i]) - - for i in range(0, len(exp_shape)): - self.assertEqual(exp_shape[i], res_shape2[i]) - - def test_slice_infer_3_outs_no_slice_point(self): - graph = build_graph(nodes_attributes, - [('node_1', 'Slice_node'), - ('Slice_node', 'node_2'), - ('Slice_node', 'node_3'), - ('Slice_node', 'node_4'), - ('node_2', 'op_output'), - ('node_3', 'op_output_1'), - ('node_2', 'op_output_2') - ], - {'node_1': {'shape': np.array([1, 288, 56, 56])}, - 'node_2': {'shape': None}, - 'node_3': {'shape': None}, - 'node_4': {'shape': None}, - 'Slice_node': {'axis': 1, 'slice_point': []} - }) - - slice_node = Node(graph, 'Slice_node') - - caffe_slice_infer(slice_node) - exp_shape = np.array([1, 96, 56, 56]) - res_shape1 = graph.node['node_2']['shape'] - res_shape2 = graph.node['node_3']['shape'] - res_shape3 = graph.node['node_4']['shape'] - - for i in range(0, len(exp_shape)): - self.assertEqual(exp_shape[i], res_shape1[i]) - - for i in range(0, len(exp_shape)): - self.assertEqual(exp_shape[i], res_shape2[i]) - - for i in range(0, len(exp_shape)): - self.assertEqual(exp_shape[i], res_shape3[i]) - - def test_slice_infer_3_outs(self): - graph = build_graph(nodes_attributes, - [('node_1', 'Slice_node'), - ('Slice_node', 'node_2'), - ('Slice_node', 'node_3'), - ('Slice_node', 'node_4'), - ('node_2', 'op_output'), - ('node_3', 'op_output_1'), - ('node_2', 'op_output_2') - ], - {'node_1': {'shape': np.array([1, 288, 56, 56])}, - 'node_2': {'shape': None}, - 'node_3': {'shape': None}, - 'node_4': {'shape': None}, - 'Slice_node': {'axis': 1, 'slice_point': [100, 150]} - }) - - slice_node = Node(graph, 'Slice_node') - - caffe_slice_infer(slice_node) - exp_shape1 = np.array([1, 100, 56, 56]) - exp_shape2 = np.array([1, 50, 56, 56]) - exp_shape3 = np.array([1, 138, 56, 56]) - res_shape1 = graph.node['node_2']['shape'] - res_shape2 = graph.node['node_3']['shape'] - res_shape3 = graph.node['node_4']['shape'] - - for i in range(0, len(exp_shape1)): - self.assertEqual(exp_shape1[i], res_shape1[i]) - - for i in range(0, len(exp_shape2)): - self.assertEqual(exp_shape2[i], res_shape2[i]) - - for i in range(0, len(exp_shape3)): - self.assertEqual(exp_shape3[i], res_shape3[i]) - - class TestTFStridedSliceInfer(unittest.TestCase): def build_test_graph2(self): return build_graph(nodes_attributes, diff --git a/model-optimizer/mo/ops/slice.py b/model-optimizer/mo/ops/slice.py index f37c3cb794025a..1002377aa6194b 100644 --- a/model-optimizer/mo/ops/slice.py +++ b/model-optimizer/mo/ops/slice.py @@ -13,133 +13,176 @@ See the License for the specific language governing permissions and limitations under the License. """ - -import logging as log +from typing import List import numpy as np from mo.graph.graph import Node, Graph from mo.ops.op import Op +from mo.utils.error import Error + +""" +Slicing operations have different semantic or different parameters/inputs in different frameworks. To distinguish them +several internal operations are introduced. The internal MO Slice operation behaves same as Slice in ONNX opset >= 10. +A number of transformations take place on the front phase to convert framework slicing: + - AttributedSlice, TFSlice -> Slice + - CaffeSlice -> Split + - MXSlice -> StridedSlice +""" + + +class AttributedSlice(Op): + """ + AttributedSlice is used in old versions of ONNX models (opset version < 10). + The operation is replaced with the OpenVINO Slice operation on the front phase. + """ + op = 'AttributedSlice' + enabled = False + + def __init__(self, graph: Graph, attrs: dict): + super().__init__(graph, { + 'type': None, + 'op': self.op, + 'in_ports_count': 1, + 'out_ports_count': 1, + 'infer': None, + }, attrs) + + +class CaffeSlice(Op): + """ + Slice in Caffe is equivalent to Split operation in OpenVINO. + https://caffe.berkeleyvision.org/tutorial/layers/slice.html + The operation is replaced with the OpenVINO Split operation on the front phase. + """ + op = 'CaffeSlice' + enabled = False + + def __init__(self, graph: Graph, attrs: dict): + super().__init__(graph, { + 'type': None, + 'op': self.op, + 'in_ports_count': 1, + 'out_ports_count': 1, + 'infer': None, + }, attrs) + + + +class TFSlice(Op): + """ + Slice operation in Tensorflow is different from Slice in ONNX, Caffe and MXNet. It has begin and size inputs while + ONNX Slice and internal MO Slice has start, end, step and axis parameters specified as inputs. + https://www.tensorflow.org/api_docs/python/tf/slice + The operation is replaced with the internal Slice on the front phase. + If size[i] == -1 is replaced to int32_max value for the end. + """ + op = 'TFSlice' + enabled = False + + def __init__(self, graph: Graph, attrs: dict): + super().__init__(graph, { + 'type': None, + 'op': self.op, + 'in_ports_count': 3, + 'out_ports_count': 1, + 'infer': None, + }, attrs) + + +class MXSlice(Op): + """ + Slice operation in MXNet is different from ONNX, Caffe, Tensorflow. It has begin, end & step attributes + https://mxnet.apache.org/versions/1.6/api/python/docs/api/symbol/op/index.html#mxnet.symbol.op.slice + The operation is replaced with the OpenVINO StridedSlice operation on the front phase. + """ + op = 'MXSlice' + enabled = False + + def __init__(self, graph: Graph, attrs: dict): + super().__init__(graph, { + 'kind': 'op', + 'type': None, + 'op': self.op, + 'in_ports_count': 1, + 'out_ports_count': 1, + 'infer': None + }, attrs) class Slice(Op): + """ + Semantic of MO internal Slice operation is identical to Slice in ONNX opset >= 10. + It has starts, ends, steps and axes inputs. + The operation is internal (not present in the OpenVINO opset) and is replaced to StridedSlice. + """ op = 'Slice' - enabled = True + enabled = False - def __init__(self, graph: Graph, attrs: dict): + def __init__(self, graph: Graph, attrs: dict = None): super().__init__(graph, { - 'type': __class__.op, + 'type': None, 'op': 'Slice', - 'in_ports_count': 3, + 'in_ports_count': 5, 'out_ports_count': 1, 'infer': __class__.infer }, attrs) - def supported_attrs(self): - return ['start', 'end', 'axis'] - @staticmethod def infer(node: Node): + input_value = node.in_port(0).data.get_value() input_shape = node.in_port(0).data.get_shape() - axis = None - steps = None - if len(node.in_nodes()) == 1: - # Caffe or ONNX before 10 opset - if node.has('start') and node.has('end') and node.has('axis'): - # ONNX case - if node.has_valid('start') and node.has_valid('end') and node.has('axis'): - start = node.start - end = node.end - axis = node.axis - else: - log.warning('Incorrect slice operation: no starts or end attr') - return - else: - # Caffe case - from mo.front.common.partial_infer.slice import caffe_slice_infer - caffe_slice_infer(node) - elif len(node.in_nodes()) >= 3: - if node.has('format') and node['format'] == 'onnx': - # ONNX 10 opset case - starts_node = node.in_node(1) - ends_node = node.in_node(2) - if starts_node.has_valid('value') and ends_node.has_valid('value'): - start = np.array(node.in_node(1).value, dtype=np.int64) - end = np.array(node.in_node(2).value, dtype=np.int64) - if 3 in node.in_nodes(): - if node.in_node(3).has_valid('value'): - axis = np.array(node.in_node(3).value, dtype=np.int64) - else: - log.warning('Incorrect slice operation: axes should be const') - return - if 4 in node.in_nodes(): - if node.in_node(4).has_valid('value'): - steps = np.array(node.in_node(4).value, dtype=np.int64) - else: - log.warning('Incorrect slice operation: steps should be const') - return - else: - log.warning('Incorrect slice operation: no starts or ends attr') - return - else: - # TF case - start_node = node.in_node(1) - size_node = node.in_node(2) - if start_node.has_valid('value') and size_node.has_valid('value'): - start = np.array(node.in_node(1).value, dtype=np.int64) - size = np.array(node.in_node(2).value, dtype=np.int64) - end = start + size - axis = None - - # Check for situation when size[i] == -1 in TF - for i in range(start.size): - if end[i] < start[i]: - end[i] = input_shape[i] - - # Delete edges to start, size nodes - node.graph.remove_edge(node.in_node(1).id, node.id) - node.graph.remove_edge(node.in_node(2).id, node.id) - - node['start'] = start - node['end'] = end - node['axis'] = None - else: - log.warning('Incorrect slice operation: no starts or end attr') - return - else: - log.warning('Incorrect number of input nodes in slice operation') - return - - # Update end param - node.end = end - value = node.in_node(0).value - # If value is None create dummy value for shape propagation - if value is None: - value = np.zeros(input_shape) + starts = node.in_port(1).data.get_value() + ends = node.in_port(2).data.get_value() + if starts is None or ends is None: + raise Error('The non-constant start/end values for Slice operation "{}" are not supported'.format(node.name)) - # Following ONNX and TF specification, in case of unknown axis, axises should be in greater order - if axis is None: - axis = [x for x in range(len(start))] + if node.is_in_port_connected(3): + axes = node.in_port(3).data.get_value() + if axes is None: + raise Error('The non-constant axes values for Slice operation "{}" is not supported'.format(node.name)) + else: + axes = [x for x in range(len(starts))] - if steps is None: - steps = np.ones(start.size, dtype=np.int64) + if node.is_in_port_connected(4): + steps = node.in_port(4).data.get_value() + if steps is None: + raise Error('The non-constant steps values for Slice operation "{}" is not supported'.format(node.name)) + else: + steps = np.ones(len(starts), dtype=np.int64) - # Calculate output value for slice operation - slice_idx = [None for x in range(len(node.in_node().shape))] - shrink_axis_mask = [False for x in range(len(node.in_node().shape))] - for id in range(len(axis)): + slice_idx = [slice(0, in_shape, 1) for in_shape in input_shape] + for i in range(len(axes)): # Ranged for output value for specified axis - slice_idx[axis[id]] = slice(start[id], end[id], steps[id]) - - for axis, s in enumerate(slice_idx): - if s is None: - slice_idx[axis] = slice(0, input_shape[axis], 1) - - # Add new parameters to node - node['slices'] = np.array(slice_idx) - node['shrink_axis_mask'] = np.array(shrink_axis_mask) - - value = value[tuple(slice_idx)] - node.out_node().value = value.copy() if node.in_node(0).value is not None else None - node.out_node().shape = np.array(value.shape) + slice_idx[axes[i]] = slice(starts[i], ends[i], steps[i]) + if input_value is None: + output_shape = get_shape_after_slice(input_shape, slice_idx) + if np.any(output_shape <= 0): + raise Error('Output shape: {} of node "{}" contains non-positive values'.format(output_shape, node.name)) + node.out_port(0).data.set_shape(output_shape) + else: + node.out_port(0).data.set_value(input_value[tuple(slice_idx)]) + + +def get_shape_after_slice(input_shape: np.ndarray, slice_idx: List[slice]) -> np.ndarray: + """ + Calculate shape of a tensor after slicing without actually creating the resulting tensor. + Is introduced to prevent potentially large memory consumption. + """ + output_shape = np.zeros(len(input_shape), dtype=np.int32) + for i, s in enumerate(slice_idx): + start, end, step = normalize_slice_indices(input_shape[i], s.start, s.stop, s.step) + output_shape[i] = (end - start + step - 1) / step + return output_shape + + +def normalize_slice_indices(size: int, start: int, end: int, step: int) -> (int, int, int): + # converts slice indices to format in which size of Slice can be calculated + start = size + start if start < 0 else start + end = size + end if end < 0 else end + start = np.clip(start, 0, size) + end = np.clip(end, 0, size) + if step < 0: + start, end, step = end, start, -step # if calculate size without values we can do that + return start, end, step diff --git a/model-optimizer/mo/ops/slice_test.py b/model-optimizer/mo/ops/slice_test.py index dac0c4fdb60210..05003442afc39f 100644 --- a/model-optimizer/mo/ops/slice_test.py +++ b/model-optimizer/mo/ops/slice_test.py @@ -16,134 +16,118 @@ import unittest import numpy as np -from generator import generator +from generator import generator, generate +from mo.front.common.partial_infer.utils import int64_array from mo.graph.graph import Node from mo.ops.slice import Slice -from mo.utils.unittest.graph import build_graph - -nodes_attributes = { - 'data_1': { - 'kind': 'data', - 'shape': None, - 'value': None, - }, - 'begin': { - 'kind': 'data', - 'shape': None, - 'value': None, - }, - 'size': { - 'kind': 'data', - 'shape': None, - 'value': None, - }, - 'starts': { - 'kind': 'data', - 'shape': None, - 'value': None, - }, - 'ends': { - 'kind': 'data', - 'shape': None, - 'value': None, - }, - 'slice': { - 'op': 'Slice', - 'axis': None, - 'start': None, - 'end': None, - 'kind': 'op', - }, - 'data_2': { - 'kind': 'data', - 'shape': None, - 'value': None, - } -} +from mo.utils.error import Error +from mo.utils.unittest.graph import build_graph, valued_const_with_data, valued_data, regular_op_with_empty_data, \ + connect, shaped_data, shaped_const_with_data @generator class TestSliceOp(unittest.TestCase): - def test_slice_infer_constant(self): - # Testing constant path case - graph = build_graph(nodes_attributes, - [('data_1', 'slice'), - ('begin', 'slice'), - ('size', 'slice'), - ('slice', 'data_2')], - {'data_1': {'shape': np.array([4]), 'value': np.array([1, 3, 224, 224])}, - 'slice': {'start': np.array([1]), 'end': np.array([2])}, - 'size': {'value': np.array([1])}, - 'begin': {'value': np.array([1])}}) - - slice_node = Node(graph, 'slice') - Slice.infer(slice_node) - - self.assertTrue(np.array_equal(slice_node.out_node().value, np.array([3]))) - self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([1]))) - self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 2, 1)]))) - - def test_slice_infer_non_constant(self): - # Testing non-constant path case (when value in input is None) - # with multiply params - graph = build_graph(nodes_attributes, - [('data_1', 'slice'), - ('begin', 'slice'), - ('size', 'slice'), - ('slice', 'data_2')], - {'data_1': {'shape': np.array([4, 5, 6])}, - 'slice': {'start': np.array([1, 2]), - 'end': np.array([4, 3])}, - 'size': {'value': np.array([3, 1])}, - 'begin': {'value': np.array([1, 2])}}) - - slice_node = Node(graph, 'slice') - - Slice.infer(slice_node) - self.assertTrue(np.array_equal(slice_node.out_node().value, None)) - self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([3, 1, 6]))) - self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 4, 1), slice(2, 3, 1), slice(0, 6, 1)]))) - - def test_slice_infer_multiply_params(self): - # Test case for TF when size[i] == -1 (that means all - # remaining elements in dimension i are included in the slice) - graph = build_graph(nodes_attributes, - [('data_1', 'slice'), - ('begin', 'slice'), - ('size', 'slice'), - ('slice', 'data_2')], - {'data_1': {'shape': np.array([4, 5, 6])}, - 'slice': {'start': np.array([1, 2]), - 'end': np.array([4, 1])}, - 'size': {'value': np.array([3, -1])}, - 'begin': {'value': np.array([1, 2])}}) - - slice_node = Node(graph, 'slice') - - Slice.infer(slice_node) - self.assertTrue(np.array_equal(slice_node.out_node().value, None)) - self.assertTrue(np.array_equal(slice_node.out_node().shape, np.array([3, 3, 6]))) - self.assertTrue(np.array_equal(slice_node['slices'], np.array([slice(1, 4, 1), slice(2, 5, 1), slice(0, 6, 1)]))) - - def test_slice_onnx_10_opset_case(self): - # check for negative end value in the case of ONNX 10 opset - input = np.array([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]]) - starts = np.array([0, 1]) - ends = np.array([3, -2]) - expected_values = np.array([[5], [3], [6]]) - - graph = build_graph(nodes_attributes, - [('data_1', 'slice'), - ('starts', 'slice'), - ('ends', 'slice'), - ('slice', 'data_2')], - {'data_1': {'value': input, 'shape': input.shape}, - 'starts': {'value': starts, 'shape': starts.shape}, - 'ends': {'value': ends, 'shape': ends.shape}, - 'slice': {'format': 'onnx'}}) - - slice_node = Node(graph, 'slice') - - Slice.infer(slice_node) - self.assertTrue(np.array_equal(slice_node.out_node().value, expected_values)) + @generate(*[ + # standard case + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], [3, 2], [0, 1], [1, 1], [[5], [3], [6]]), + # negative bounds + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], [3, -2], [0, 1], [1, 1], [[5], [3], [6]]), + # unusual order of axes + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], [3, -2], [1, 0], [1, 1], [[2, 3, 5]]), + # when only input_shape is defined without values (one from buttom element is shape) + (None, [1, 2], [4, 3], [0, 1], [1, 1], [3, 1, 6], [4, 5, 6]), + # boundary case + (None, [0, 2], [np.iinfo(np.int32).max, 3], [0, 1], [1, 1], [4, 1, 6], [4, 5, 6]), + # boundary case + (None, [np.iinfo(np.int32).min, 2], [3, 3], [0, 1], [1, 1], [3, 1, 6], [4, 5, 6]), + # 1D input + ([1, 3, 224, 224], [1], [2], [0], [1], [3]), + # 1D input with negative starts + (None, [-1], [1], [0], [-1], [2], [4]), + # 1D input with negative ends + (None, [1], [-1], [0], [1], [2], [4]), + # with rounding (e.g. take from 1st to 3rd with step 4 should give shape 1 not 0) + (None, [1], [3], [0], [4], [1], [4]), + # with rounding and negative steps (e.g. take from 1st to 3rd with step 4 should give shape 1 not 0) + (None, [7], [3], [0], [-7], [1], [10]), + ]) + def test_slice_infer(self, inp_value, starts, ends, axes, steps, expected, inp_shape=None): + if inp_value is None: + input_node = shaped_data('data_1', int64_array(inp_shape)) + else: + input_node = valued_data('data_1', int64_array(inp_value)) + + nodes = { + **input_node, + **regular_op_with_empty_data('slice', {'op': 'Slice'}), + **valued_const_with_data('starts', int64_array(starts)), + **valued_const_with_data('ends', int64_array(ends)), + **valued_const_with_data('axes', int64_array(axes)), + **valued_const_with_data('steps', int64_array(steps)), + } + + graph = build_graph(nodes, + [('data_1', 'slice'), + *connect('starts', '1:slice'), + *connect('ends', '2:slice'), + *connect('axes', '3:slice'), + *connect('steps', '4:slice'), + *connect('slice', 'slice_d')]) + + graph.stage = 'middle' + slice_node = Node(graph, 'slice') + + Slice.infer(slice_node) + if inp_value is not None: + self.assertTrue(np.array_equal(slice_node.out_node().value, expected)) + else: + self.assertTrue(np.array_equal(slice_node.out_node().shape, expected)) + + # negative tests + @generate(*[ + # starts are non-constant + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], None, [3, 2], [0, 1], [1, 1], [[5], [3], [6]]), + # ends are non-constant + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], None, [0, 1], [1, 1], [[5], [3], [6]]), + # axes are non-constant + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], [3, -2], None, [1, 1], [[5], [3], [6]]), + # steps are non-constant + ([[4, 5, 6, 7], [2, 3, 5, 6], [5, 6, 8, 9], [5, 6, 8, 9]], [0, 1], [3, -2], [0, 1], None, [[5], [3], [6]]), + # 1D input with negative starts + (None, [1], [-1], [0], [-2], [-6], [20]), + # case when output shape has zero elements + (None, [1], [1], [0], [1], [0], [4]) + ]) + def test_slice_infer_negative(self, inp_value, starts, ends, axes, steps, expected, inp_shape=None): + if inp_value is None: + input_node = shaped_data('data_1', int64_array(inp_shape)) + else: + input_node = valued_data('data_1', int64_array(inp_value)) + + def convert_args(val, name=''): + if val is not None: + return valued_const_with_data(name, int64_array(val)) + else: + return shaped_const_with_data(name, [0]) #fake shape + + starts = convert_args(starts, 'starts') + ends = convert_args(ends, 'ends') + axes = convert_args(axes, 'axes') + steps = convert_args(steps, 'steps') + + nodes = { **input_node, + **regular_op_with_empty_data('slice', {'op': 'Slice'}), + **starts, **ends, **axes, **steps } + + graph = build_graph(nodes, + [('data_1', 'slice'), + *connect('starts', '1:slice'), + *connect('ends', '2:slice'), + *connect('axes', '3:slice'), + *connect('steps', '4:slice'), + *connect('slice', 'slice_d')]) + + graph.stage = 'middle' + slice_node = Node(graph, 'slice') + self.assertRaises(Error, Slice.infer, slice_node) diff --git a/model-optimizer/mo/utils/shape.py b/model-optimizer/mo/utils/shape.py index ec725a276ffa2c..90ba349bd87a9c 100644 --- a/model-optimizer/mo/utils/shape.py +++ b/model-optimizer/mo/utils/shape.py @@ -13,12 +13,13 @@ See the License for the specific language governing permissions and limitations under the License. """ + from extensions.ops.elementwise import Add from extensions.ops.gather import Gather from extensions.ops.range import Range from mo.front.common.partial_infer.utils import int64_array from mo.front.tf.graph_utils import create_op_node_with_second_input -from mo.graph.graph import Node, Graph +from mo.graph.graph import Node from mo.graph.port import Port from mo.ops.concat import Concat from mo.ops.const import Const @@ -219,3 +220,4 @@ def get_shape_and_rank_nodes_by_port(port: Port, return_as_a_scalar: bool = True rank = create_op_node_with_second_input(graph, Squeeze, int64_array([0]), {'name': input_node_name + '/0dRankOf'}, rank_1_d) return shape, rank + diff --git a/model-optimizer/mo/utils/unittest/graph.py b/model-optimizer/mo/utils/unittest/graph.py index 3e581a2c5869fa..3ee413cde47c34 100644 --- a/model-optimizer/mo/utils/unittest/graph.py +++ b/model-optimizer/mo/utils/unittest/graph.py @@ -125,8 +125,10 @@ def build_graph_with_attrs(nodes_with_attrs: list, edges_with_attrs: list, new_n for node_id in graph.nodes(): node = Node(graph, node_id) - check_and_update_ports(node, [graph.get_edge_data(edge[0], node_id)[0] for edge in graph.in_edges(node_id)], True) - check_and_update_ports(node, [graph.get_edge_data(node_id, edge[1])[0] for edge in graph.out_edges(node_id)], False) + check_and_update_ports(node, [graph.get_edge_data(edge[0], node_id)[0] for edge in graph.in_edges(node_id)], + True) + check_and_update_ports(node, [graph.get_edge_data(node_id, edge[1])[0] for edge in graph.out_edges(node_id)], + False) for node in graph.get_op_nodes(): # Add in_ports attribute @@ -330,16 +332,20 @@ def get_name_and_port(tensor_name): return node_name, 0 -def connect(first_tensor_name, second_tensor_name, skip_data=False): +def connect(first_tensor_name, second_tensor_name, skip_data=False, front_phase=False): # ports could be skipped -- then zero in/out ports would be used # first_tensor_name = first_op_name:out_port # second_tensor_name = in_port:second_op_name + # if skip_data is True connect directly from data node with postfix '_d' to second + # if front_phase is True connect nodes directly without postfixes and data nodes first_op_name, out_port = get_name_and_port(first_tensor_name) second_op_name, in_port = get_name_and_port(second_tensor_name) if skip_data: return [(first_op_name + '_d', second_op_name, {'in': in_port})] + if front_phase: + return [(first_op_name, second_op_name, {'out': out_port, 'in': in_port})] return [ (first_op_name, first_op_name + '_d', {'out': out_port}), (first_op_name + '_d', second_op_name, {'in': in_port}), @@ -348,3 +354,7 @@ def connect(first_tensor_name, second_tensor_name, skip_data=False): def connect_data(first_tensor_name, second_tensor_name): return connect(first_tensor_name, second_tensor_name, skip_data=True) + + +def connect_front(first_tensor_name, second_tensor_name): + return connect(first_tensor_name, second_tensor_name, skip_data=False, front_phase=True)