From baa6f28a80b55e6e8d793df99de8a595f8c316a2 Mon Sep 17 00:00:00 2001 From: Mateusz Tabaka Date: Mon, 13 Dec 2021 13:03:06 +0000 Subject: [PATCH] [MO] Move reverse input channels before mean/scale --- .../convert_model/Converting_Model_General.md | 13 ++-- .../openvino/tools/mo/back/preprocessing.py | 12 ++-- .../mo/openvino/tools/mo/utils/cli_parser.py | 10 +-- .../mo/back/moc_preprocessing_test_actual.py | 64 +++++++++++++++---- 4 files changed, 70 insertions(+), 29 deletions(-) diff --git a/docs/MO_DG/prepare_model/convert_model/Converting_Model_General.md b/docs/MO_DG/prepare_model/convert_model/Converting_Model_General.md index 2e708e1eaa1650..07f8886a0e9109 100644 --- a/docs/MO_DG/prepare_model/convert_model/Converting_Model_General.md +++ b/docs/MO_DG/prepare_model/convert_model/Converting_Model_General.md @@ -59,11 +59,14 @@ Framework-agnostic parameters: --reverse_input_channels Switch the input channels order from RGB to BGR (or vice versa). Applied to original inputs of the model - if and only if a number of channels equals 3. Applied - after application of --mean_values and --scale_values - options, so numbers in --mean_values and - --scale_values go in the order of channels used in the - original model. + if and only if a number of channels equals 3. + When --mean_values/--scale_values are also specified, + reversing of channels will be applied to user's input + data first, so that numbers in --mean_values and + --scale_values go in the order of channels used in + the original model. In other words, if both options are + specified then the data flow in the model looks as following: + Parameter -> ReverseInputChannels -> Mean/Scale apply -> the original body of the model. --log_level {CRITICAL,ERROR,WARN,WARNING,INFO,DEBUG,NOTSET} Logger level --input INPUT Quoted list of comma-separated input nodes names with diff --git a/tools/mo/openvino/tools/mo/back/preprocessing.py b/tools/mo/openvino/tools/mo/back/preprocessing.py index 58e9569e3228f0..c61da606d4ab44 100644 --- a/tools/mo/openvino/tools/mo/back/preprocessing.py +++ b/tools/mo/openvino/tools/mo/back/preprocessing.py @@ -372,6 +372,12 @@ def apply_preprocessing(ov_function: Model, argv: argparse.Namespace): else: prep.output(node_name).tensor().set_layout(Layout(layout_value['target_layout'])) + # Apply reverse_input_channels + if need_reverse: + for name, _ in suitable_params_ric: + prep.input(name).preprocess().reverse_channels() + log.debug('reverse_input_channels pre-processing applied to {}'.format(name)) + for node_name, node_mean_scale_values in mean_scale_values.items(): # Apply mean first, then scale if node_mean_scale_values['mean'] is not None: @@ -380,12 +386,6 @@ def apply_preprocessing(ov_function: Model, argv: argparse.Namespace): prep.input(node_name).preprocess().scale(node_mean_scale_values['scale']) log.debug('Mean/Scale pre-processing applied to {}'.format(node_name)) - # Apply reverse_input_channels - if need_reverse: - for name, _ in suitable_params_ric: - prep.input(name).preprocess().reverse_channels() - log.debug('reverse_input_channels pre-processing applied to {}'.format(name)) - # Apply pre-processing builder to a function ov_function = prep.build() diff --git a/tools/mo/openvino/tools/mo/utils/cli_parser.py b/tools/mo/openvino/tools/mo/utils/cli_parser.py index 5e79c75ef9c9bf..8d89ab000f141c 100644 --- a/tools/mo/openvino/tools/mo/utils/cli_parser.py +++ b/tools/mo/openvino/tools/mo/utils/cli_parser.py @@ -271,10 +271,12 @@ def get_common_cli_parser(parser: argparse.ArgumentParser = None): 'the original input of the model.') common_group.add_argument('--reverse_input_channels', help='Switch the input channels order from RGB to BGR (or vice versa). Applied to ' - 'original inputs of the model if and only if a number of channels equals 3. Applied ' - 'after application of --mean_values and --scale_values options, so numbers in ' - '--mean_values and --scale_values go in the order of channels used in the original ' - 'model.', + 'original inputs of the model if and only if a number of channels equals 3. ' + 'When --mean_values/--scale_values are also specified, reversing of channels will ' + 'be applied to user\'s input data first, so that numbers in --mean_values ' + 'and --scale_values go in the order of channels used in the original model. ' + 'In other words, if both options are specified, then the data flow in the model ' + 'looks as following: Parameter -> ReverseInputChannels -> Mean/Scale apply -> the original body of the model.', action='store_true') common_group.add_argument('--log_level', help='Logger level', diff --git a/tools/mo/unit_tests/mo/back/moc_preprocessing_test_actual.py b/tools/mo/unit_tests/mo/back/moc_preprocessing_test_actual.py index 5f1391baff4fae..5a1288b482aa57 100644 --- a/tools/mo/unit_tests/mo/back/moc_preprocessing_test_actual.py +++ b/tools/mo/unit_tests/mo/back/moc_preprocessing_test_actual.py @@ -56,25 +56,23 @@ class TestPreprocessingMOC(unittest.TestCase): def setUp(self): pass - def check_scale_constant(self, node, expected, shape=None): - const_node = node.input(1).get_source_output().get_node() + def check_constant(self, const_node, expected, shape=None): self.assertEqual(const_node.get_type_name(), 'Constant') - if node.get_type_name() == 'Divide': - self.assertTrue(np.allclose(const_node.get_vector(), expected)) - else: - self.assertTrue(np.allclose(const_node.get_vector(), 1. / expected)) - if shape: + self.assertTrue(np.allclose(const_node.get_vector(), expected)) + if shape is not None: assert const_node.shape == PartialShape(shape) + def check_scale_constant(self, node, expected, shape=None): + const_node = node.input(1).get_source_output().get_node() + if node.get_type_name() != 'Divide': + expected = 1. / expected + self.check_constant(const_node, expected, shape) + def check_mean_constant(self, node, expected, shape=None): const_node = node.input(1).get_source_output().get_node() - self.assertEqual(const_node.get_type_name(), 'Constant') - if node.get_type_name() == 'Subtract': - self.assertTrue(np.allclose(const_node.get_vector(), expected)) - else: - self.assertTrue(np.allclose(const_node.get_vector(), -expected.toList())) - if shape: - self.assertEqual(const_node.shape, PartialShape(shape)) + if node.get_type_name() != 'Subtract': + expected = -expected.toList() + self.check_constant(const_node, expected, shape) def test_scale_single_value(self): argv = Namespace(mean_scale_values=None, scale=2.0) @@ -615,3 +613,41 @@ def test_no_reverse_channels_even_with_layout(self): self.assertTrue(op_node0.get_type_name() == 'Relu') op_node1 = list(function.get_parameters()[1].output(0).get_target_inputs())[0].get_node() self.assertTrue(op_node1.get_type_name() == 'Relu') + + def test_reverse_channels_and_mean_scale(self): + argv = Namespace(reverse_input_channels=True, mean_scale_values={ + 'input2a': { + 'mean': np.array([1., 2., 3.]), + 'scale': np.array([2., 4., 8.])}}, + scale=None) + function = create_function2(shape2=[1, 3, 224, 224]) + process_function(ov_function=function, argv=argv) + + # Verify that first is gather, then subtract 'mean', then 'scale' + gather = list(function.get_parameters()[1].output(0).get_target_inputs())[0].get_node() + self.assertTrue(gather.get_type_name() == 'Gather') + range_node = gather.input(1).get_source_output().get_node() + self.assertTrue(range_node.get_type_name() == 'Range') + start = range_node.input(0).get_source_output().get_node() + end = range_node.input(1).get_source_output().get_node() + step = range_node.input(2).get_source_output().get_node() + self.check_constant(start, expected=[2], shape=[]) + self.check_constant(end, expected=[-1], shape=[]) + self.check_constant(step, expected=[-1], shape=[]) + axes = gather.input(2).get_source_output().get_node() + self.check_constant(axes, expected=[1], shape=[1]) + + op_node = list(gather.output(0).get_target_inputs())[0].get_node() + self.assertTrue(op_node.get_type_name() == 'Subtract' or op_node.get_type_name() == 'Add') + self.check_mean_constant(op_node, expected=[1., 2., 3.], shape=[1, 3, 1, 1]) + + op_node = list(op_node.output(0).get_target_inputs())[0].get_node() + self.assertTrue(op_node.get_type_name() == 'Divide' or op_node.get_type_name() == 'Multiply') + self.check_scale_constant(op_node, expected=[2., 4., 8.], shape=[1, 3, 1, 1]) + + # Verify that input1 is not affected + op_node = list(function.get_parameters()[0].output(0).get_target_inputs())[0].get_node() + self.assertEqual(op_node.get_type_name(), 'Relu') + + # Verify that guessed layout (?C??) is not appeared in input2 + self.assertEqual(function.get_parameters()[1].layout, Layout())