From 1de9ee094b22b9155d1cb3b1e5123ee4cea1eb29 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 1 Feb 2021 18:00:12 +0100 Subject: [PATCH 01/19] first version of dropout and maxpool impl, added ignoring optinal outputs --- .../frontend/onnx_import/src/core/graph.cpp | 7 +- .../frontend/onnx_import/src/op/dropout.hpp | 93 ++++++++++++- .../frontend/onnx_import/src/op/max_pool.cpp | 6 + .../frontend/onnx_import/src/ops_bridge.cpp | 1 + ngraph/python/tests/test_onnx/test_backend.py | 9 -- ...dropout12_no_training_return_mask.prototxt | 76 +++++++++++ .../onnx/dropout12_training_mode.prototxt | 128 ++++++++++++++++++ ngraph/test/onnx/onnx_import.in.cpp | 33 +++++ 8 files changed, 338 insertions(+), 15 deletions(-) create mode 100644 ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt create mode 100644 ngraph/test/models/onnx/dropout12_training_mode.prototxt diff --git a/ngraph/frontend/onnx_import/src/core/graph.cpp b/ngraph/frontend/onnx_import/src/core/graph.cpp index 278dfad457cc55..2f31d7110b9b08 100644 --- a/ngraph/frontend/onnx_import/src/core/graph.cpp +++ b/ngraph/frontend/onnx_import/src/core/graph.cpp @@ -20,6 +20,7 @@ #include #include "core/graph.hpp" +#include "core/null_node.hpp" #include "exceptions.hpp" #include "ngraph/log.hpp" #include "ngraph/node.hpp" @@ -222,7 +223,11 @@ namespace ngraph OutputVector results; for (const auto& output : m_graph_proto->output()) { - results.emplace_back(get_ng_node_from_cache(output.name())); + const auto& ng_output = get_ng_node_from_cache(output.name()); + if (!ngraph::op::is_null(ng_output)) // ignore optional outputs + { + results.emplace_back(ng_output); + } } return results; } diff --git a/ngraph/frontend/onnx_import/src/op/dropout.hpp b/ngraph/frontend/onnx_import/src/op/dropout.hpp index 5b6ee83b731d5e..03219411f9c331 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.hpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.hpp @@ -19,6 +19,9 @@ #include #include "core/null_node.hpp" +#include "default_opset.hpp" +#include "exceptions.hpp" +#include "ngraph/log.hpp" #include "ngraph/node.hpp" #include "onnx_import/core/node.hpp" @@ -28,14 +31,94 @@ namespace ngraph { namespace op { + namespace + { + OutputVector build_dropout(const Output& input_data, + float drop_probability, + bool training_mode, + int64_t output_size) + { + if (drop_probability == 0 || !training_mode) + { + const bool return_mask = output_size > 1; + if (return_mask) + { + NGRAPH_WARN << "Default mask for Dropout is ignored, " + << "because of unsupported constant networks"; + /*const auto mask = std::make_shared( + default_opset::Constant::create(ngraph::element::boolean, + Shape{}, {true}), + std::make_shared(input_data));*/ + // If constant network is supported mask should be returned instead of + // NullNode (40957) + return {input_data, std::make_shared()}; + } + else + { + return {input_data, std::make_shared()}; + } + } + throw ngraph::ngraph_error( + "Training mode is not supported for Dropout op if drop_probability is not " + "equal 0"); + } + } + + namespace set_12 + { + OutputVector dropout(const Node& node) + { + const auto ng_inputs = node.get_ng_inputs(); + const auto& input_data = ng_inputs.at(0); + // seed attribute is ignored because traning mode is not supported anyway + + // default values of inputs + double ratio = 0.5f; + bool training_mode = false; + + if (ng_inputs.size() > 1) + { + if (!ngraph::op::is_null(ng_inputs.at(1))) + { + CHECK_VALID_NODE( + node, + ngraph::op::is_constant(ng_inputs.at(1).get_node_shared_ptr()), + "Not constant (or omitted) ratio input is not supported."); + ratio = as_type_ptr( + ng_inputs.at(1).get_node_shared_ptr()) + ->cast_vector()[0]; + } + } + if (ng_inputs.size() > 2) + { + if (!ngraph::op::is_null(ng_inputs.at(2))) + { + CHECK_VALID_NODE( + node, + ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), + "Not constant (or omitted) ratio input is not supported."); + training_mode = as_type_ptr( + ng_inputs.at(2).get_node_shared_ptr()) + ->cast_vector()[0]; + } + } + return build_dropout(input_data, ratio, training_mode, node.get_outputs_size()); + } + } + namespace set_1 { - inline OutputVector dropout(const Node& node) + OutputVector dropout(const Node& node) { - // First value is actual output of Dropout, - // the second one is just a placeholder for optional trailing output. - return {node.get_ng_inputs().at(0).get_node_shared_ptr(), - std::make_shared()}; + CHECK_VALID_NODE(node, + node.has_attribute("consumed_inputs"), + "Legacy consumed_inputs attrubute is not supported."); + + const auto& input_data = node.get_ng_inputs().at(0); + const bool is_test = node.get_attribute_value("is_test", 0); + const auto ratio = node.get_attribute_value("ratio", 0.5f); + + return build_dropout(input_data, ratio, is_test, node.get_outputs_size()); } } // namespace set_1 diff --git a/ngraph/frontend/onnx_import/src/op/max_pool.cpp b/ngraph/frontend/onnx_import/src/op/max_pool.cpp index c53c4cea6bb207..3533c30b577dbc 100644 --- a/ngraph/frontend/onnx_import/src/op/max_pool.cpp +++ b/ngraph/frontend/onnx_import/src/op/max_pool.cpp @@ -17,6 +17,7 @@ #include #include "core/null_node.hpp" +#include "ngraph/log.hpp" #include "ngraph/op/max_pool.hpp" #include "op/max_pool.hpp" #include "utils/pooling_factory.hpp" @@ -31,6 +32,11 @@ namespace ngraph { OutputVector max_pool(const Node& node) { + if (node.get_outputs_size() > 1) + { + NGRAPH_WARN + << "Indices output is not supported for MaxPooling and was ignored"; + } auto max_pool = pooling::PoolingFactory(node).make_max_pool(); max_pool.emplace_back(std::make_shared()); // Indices (optional) return max_pool; diff --git a/ngraph/frontend/onnx_import/src/ops_bridge.cpp b/ngraph/frontend/onnx_import/src/ops_bridge.cpp index e19098c3d6d5d3..59055a859951ca 100644 --- a/ngraph/frontend/onnx_import/src/ops_bridge.cpp +++ b/ngraph/frontend/onnx_import/src/ops_bridge.cpp @@ -338,6 +338,7 @@ namespace ngraph REGISTER_OPERATOR("Div", 1, div); REGISTER_OPERATOR("Div", 7, div); REGISTER_OPERATOR("Dropout", 1, dropout); + REGISTER_OPERATOR("Dropout", 12, dropout); REGISTER_OPERATOR("Elu", 1, elu); REGISTER_OPERATOR("Equal", 1, equal); REGISTER_OPERATOR("Erf", 1, erf); diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index 6f56f0fac3e79e..ece5109bad31b9 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -44,7 +44,6 @@ xfail_issue_38701, xfail_issue_33595, xfail_issue_33651, - xfail_issue_38705, xfail_issue_38706, xfail_issue_38736, xfail_issue_38707, @@ -288,14 +287,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_tfidfvectorizer_tf_only_bigrams_skip0_cpu", "OnnxBackendNodeModelTest.test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu", "OnnxBackendNodeModelTest.test_tfidfvectorizer_tf_onlybigrams_skip5_cpu"), - (xfail_issue_38705, - "OnnxBackendNodeModelTest.test_training_dropout_mask_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_default_mask_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu", - "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_strides_cpu", - "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu", - "OnnxBackendNodeModelTest.test_dropout_default_mask_cpu", - "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu"), (xfail_issue_38706, "OnnxBackendNodeModelTest.test_split_zero_size_splits_cpu"), (xfail_issue_38736, diff --git a/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt b/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt new file mode 100644 index 00000000000000..21a77d44c89ba1 --- /dev/null +++ b/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt @@ -0,0 +1,76 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + output: "z" + op_type: "Dropout" + attribute { + name: "seed" + i: 0 + type: INT + } + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "z" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 12 +} diff --git a/ngraph/test/models/onnx/dropout12_training_mode.prototxt b/ngraph/test/models/onnx/dropout12_training_mode.prototxt new file mode 100644 index 00000000000000..518a0e1af4fff7 --- /dev/null +++ b/ngraph/test/models/onnx/dropout12_training_mode.prototxt @@ -0,0 +1,128 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + input: "ratio" + input: "training_mode" + output: "y" + output: "z" + op_type: "Dropout" + attribute { + name: "seed" + i: 0 + type: INT + } + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + input { + name: "ratio" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + input { + name: "training_mode" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "z" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +initializer { + dims: 1 + data_type: 1 + float_data: 3 + name: "ratio" +} +initializer { + dims: 1 + data_type: 9 + int32_data: 00000001 + name: "training_mode" +} +} +opset_import { + version: 12 +} diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index ec0a046fdc1dc3..710a523a0e41b9 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3966,3 +3966,36 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_mvn_v6) 1.2906139, 1.1860244, -0.92945826, 0.0721334, -0.38174, -1.7799333}); test_case.run(); } + +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) +{ + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout12_no_training_return_mask.prototxt")); + + auto test_case = test::TestCase(function); + const std::vector data(3 * 4 * 5, 2.0f); + test_case.add_input(data); + test_case.add_expected_output(Shape{3, 4, 5}, data); + // constant network not supported + // test_case.add_expected_output(Shape{3, 4, 5}, std::vector(3*4*5, true)); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_training_mode) +{ + try + { + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout12_training_mode.prototxt")); + FAIL() << "Expected exception was not thrown"; + } + catch (const ngraph::ngraph_error& e) + { + EXPECT_HAS_SUBSTRING(e.what(), std::string("Training mode is not supported for Dropout op " + "if drop_probability is not equal 0")); + } + catch (...) + { + FAIL() << "Expected ngraph_error exception was not thrown"; + } +} From e681f32d3fdb28ad5d4e40efde4a6a61bae42c31 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 08:25:37 +0100 Subject: [PATCH 02/19] more tests, impl refactor --- .../frontend/onnx_import/src/op/dropout.hpp | 57 ++++---- ...pout12_no_training_no_return_mask.prototxt | 56 ++++++++ ...dropout12_not_const_training_mode.prototxt | 122 ++++++++++++++++++ ngraph/test/onnx/onnx_import.in.cpp | 37 +++++- 4 files changed, 240 insertions(+), 32 deletions(-) create mode 100644 ngraph/test/models/onnx/dropout12_no_training_no_return_mask.prototxt create mode 100644 ngraph/test/models/onnx/dropout12_not_const_training_mode.prototxt diff --git a/ngraph/frontend/onnx_import/src/op/dropout.hpp b/ngraph/frontend/onnx_import/src/op/dropout.hpp index 03219411f9c331..25dbc7a7aea43d 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.hpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.hpp @@ -33,34 +33,33 @@ namespace ngraph { namespace { - OutputVector build_dropout(const Output& input_data, - float drop_probability, - bool training_mode, - int64_t output_size) + OutputVector + build_dropout(const Node& node, float drop_probability, bool training_mode) { - if (drop_probability == 0 || !training_mode) - { - const bool return_mask = output_size > 1; - if (return_mask) - { - NGRAPH_WARN << "Default mask for Dropout is ignored, " - << "because of unsupported constant networks"; - /*const auto mask = std::make_shared( - default_opset::Constant::create(ngraph::element::boolean, - Shape{}, {true}), - std::make_shared(input_data));*/ - // If constant network is supported mask should be returned instead of - // NullNode (40957) - return {input_data, std::make_shared()}; - } - else - { - return {input_data, std::make_shared()}; - } - } - throw ngraph::ngraph_error( + CHECK_VALID_NODE( + node, + drop_probability == 0 || !training_mode, "Training mode is not supported for Dropout op if drop_probability is not " "equal 0"); + const auto input_data = node.get_ng_inputs().at(0); + + const bool return_mask = node.get_outputs_size() > 1; + if (return_mask) + { + NGRAPH_WARN << "Default mask for Dropout is ignored, " + << "because of unsupported constant networks"; + /*const auto mask = std::make_shared( + default_opset::Constant::create(ngraph::element::boolean, + Shape{}, {true}), + std::make_shared(input_data));*/ + // If constant network is supported mask should be returned instead of + // NullNode (40957) + return {input_data, std::make_shared()}; + } + else + { + return {input_data}; + } } } @@ -69,7 +68,6 @@ namespace ngraph OutputVector dropout(const Node& node) { const auto ng_inputs = node.get_ng_inputs(); - const auto& input_data = ng_inputs.at(0); // seed attribute is ignored because traning mode is not supported anyway // default values of inputs @@ -96,13 +94,13 @@ namespace ngraph CHECK_VALID_NODE( node, ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), - "Not constant (or omitted) ratio input is not supported."); + "Not constant (or omitted) training_mode input is not supported."); training_mode = as_type_ptr( ng_inputs.at(2).get_node_shared_ptr()) ->cast_vector()[0]; } } - return build_dropout(input_data, ratio, training_mode, node.get_outputs_size()); + return build_dropout(node, ratio, training_mode); } } @@ -114,11 +112,10 @@ namespace ngraph node.has_attribute("consumed_inputs"), "Legacy consumed_inputs attrubute is not supported."); - const auto& input_data = node.get_ng_inputs().at(0); const bool is_test = node.get_attribute_value("is_test", 0); const auto ratio = node.get_attribute_value("ratio", 0.5f); - return build_dropout(input_data, ratio, is_test, node.get_outputs_size()); + return build_dropout(node, ratio, is_test); } } // namespace set_1 diff --git a/ngraph/test/models/onnx/dropout12_no_training_no_return_mask.prototxt b/ngraph/test/models/onnx/dropout12_no_training_no_return_mask.prototxt new file mode 100644 index 00000000000000..e28fb21b4e7bb5 --- /dev/null +++ b/ngraph/test/models/onnx/dropout12_no_training_no_return_mask.prototxt @@ -0,0 +1,56 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + op_type: "Dropout" + attribute { + name: "seed" + i: 0 + type: INT + } + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 12 +} diff --git a/ngraph/test/models/onnx/dropout12_not_const_training_mode.prototxt b/ngraph/test/models/onnx/dropout12_not_const_training_mode.prototxt new file mode 100644 index 00000000000000..780f8f3d7246c8 --- /dev/null +++ b/ngraph/test/models/onnx/dropout12_not_const_training_mode.prototxt @@ -0,0 +1,122 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + input: "ratio" + input: "training_mode" + output: "y" + output: "z" + op_type: "Dropout" + attribute { + name: "seed" + i: 0 + type: INT + } + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + input { + name: "ratio" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + input { + name: "training_mode" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "z" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +initializer { + dims: 1 + data_type: 1 + float_data: 3 + name: "ratio" +} +} +opset_import { + version: 12 +} diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 710a523a0e41b9..fc83b8a4915295 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3967,6 +3967,18 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_mvn_v6) test_case.run(); } +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_no_return_mask) +{ + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout12_no_training_no_return_mask.prototxt")); + + auto test_case = test::TestCase(function); + const std::vector data(3 * 4 * 5, 2.0f); + test_case.add_input(data); + test_case.add_expected_output(Shape{3, 4, 5}, data); + test_case.run(); +} + NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) { auto function = onnx_import::import_onnx_model( @@ -3991,8 +4003,29 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_training_mode) } catch (const ngraph::ngraph_error& e) { - EXPECT_HAS_SUBSTRING(e.what(), std::string("Training mode is not supported for Dropout op " - "if drop_probability is not equal 0")); + EXPECT_HAS_SUBSTRING(e.what(), + std::string("Training mode is not supported for Dropout op " + "if drop_probability is not equal 0")); + } + catch (...) + { + FAIL() << "Expected ngraph_error exception was not thrown"; + } +} + +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_not_const_training_mode) +{ + try + { + auto function = onnx_import::import_onnx_model(file_util::path_join( + SERIALIZED_ZOO, "onnx/dropout12_not_const_training_mode.prototxt")); + FAIL() << "Expected exception was not thrown"; + } + catch (const ngraph::ngraph_error& e) + { + EXPECT_HAS_SUBSTRING( + e.what(), + std::string("Not constant (or omitted) training_mode input is not supported.")); } catch (...) { From b20a4af02cbe556e2891f92aeccf7fa19e42f4b0 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 09:11:30 +0100 Subject: [PATCH 03/19] Added tests to dropout in opsets<12 --- .../frontend/onnx_import/src/op/dropout.hpp | 5 +- ...opout1_no_training_no_return_mask.prototxt | 51 ++++++++++++ .../dropout1_no_training_return_mask.prototxt | 82 +++++++++++++++++++ ngraph/test/onnx/onnx_import.in.cpp | 24 ++++++ 4 files changed, 160 insertions(+), 2 deletions(-) create mode 100644 ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt create mode 100644 ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt diff --git a/ngraph/frontend/onnx_import/src/op/dropout.hpp b/ngraph/frontend/onnx_import/src/op/dropout.hpp index 25dbc7a7aea43d..d5aa8538f0396c 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.hpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.hpp @@ -41,9 +41,10 @@ namespace ngraph drop_probability == 0 || !training_mode, "Training mode is not supported for Dropout op if drop_probability is not " "equal 0"); - const auto input_data = node.get_ng_inputs().at(0); + const auto input_data = node.get_ng_inputs().at(0); const bool return_mask = node.get_outputs_size() > 1; + if (return_mask) { NGRAPH_WARN << "Default mask for Dropout is ignored, " @@ -109,7 +110,7 @@ namespace ngraph OutputVector dropout(const Node& node) { CHECK_VALID_NODE(node, - node.has_attribute("consumed_inputs"), + !node.has_attribute("consumed_inputs"), "Legacy consumed_inputs attrubute is not supported."); const bool is_test = node.get_attribute_value("is_test", 0); diff --git a/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt b/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt new file mode 100644 index 00000000000000..a142ba2608bd12 --- /dev/null +++ b/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt @@ -0,0 +1,51 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + op_type: "Dropout" + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 1 +} diff --git a/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt new file mode 100644 index 00000000000000..7f0dd9c1d576d8 --- /dev/null +++ b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt @@ -0,0 +1,82 @@ + +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + output: "z" + op_type: "Dropout" + attribute { + name: "is_test" + i: 0 + type: INT + } + attribute { + name: "ratio" + f: 0.1 + type: FLOAT + } + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "z" + type { + tensor_type { + elem_type: 9 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 6 +} diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index fc83b8a4915295..580ed6276a03ac 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3967,6 +3967,30 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_mvn_v6) test_case.run(); } +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout1_no_training_no_return_mask) +{ + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout1_no_training_no_return_mask.prototxt")); + + auto test_case = test::TestCase(function); + const std::vector data(3 * 4 * 5, 2.0f); + test_case.add_input(data); + test_case.add_expected_output(Shape{3, 4, 5}, data); + test_case.run(); +} + +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout1_no_training_return_mask) +{ + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout1_no_training_return_mask.prototxt")); + + auto test_case = test::TestCase(function); + const std::vector data(3 * 4 * 5, 2.0f); + test_case.add_input(data); + test_case.add_expected_output(Shape{3, 4, 5}, data); + test_case.run(); +} + NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_no_return_mask) { auto function = onnx_import::import_onnx_model( From 3ad9eca90bffc76b487841c0e3c937276137f926 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 13:57:52 +0100 Subject: [PATCH 04/19] added tests for MaxPool --- .../max_pool_with_indices_output.prototxt | 94 +++++++++++++++++++ .../test/onnx/onnx_import_dyn_shapes.in.cpp | 25 +++++ 2 files changed, 119 insertions(+) create mode 100644 ngraph/test/models/onnx/dynamic_shapes/max_pool_with_indices_output.prototxt diff --git a/ngraph/test/models/onnx/dynamic_shapes/max_pool_with_indices_output.prototxt b/ngraph/test/models/onnx/dynamic_shapes/max_pool_with_indices_output.prototxt new file mode 100644 index 00000000000000..6105d792f4582f --- /dev/null +++ b/ngraph/test/models/onnx/dynamic_shapes/max_pool_with_indices_output.prototxt @@ -0,0 +1,94 @@ +ir_version: 3 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + output: "z" + op_type: "MaxPool" + attribute { + name: "kernel_shape" + ints: 5 + ints: 5 + type: INTS + } + attribute { + name: "pads" + ints: 2 + ints: 2 + ints: 2 + ints: 2 + type: INTS + } + } + name: "test_maxpool_with_argmax_2d_precomputed_pads" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 5 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 5 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "z" + type { + tensor_type { + elem_type: 7 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 1 + } + dim { + dim_value: 5 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 9 +} diff --git a/ngraph/test/onnx/onnx_import_dyn_shapes.in.cpp b/ngraph/test/onnx/onnx_import_dyn_shapes.in.cpp index c93026cde92090..10a9882e2f679d 100644 --- a/ngraph/test/onnx/onnx_import_dyn_shapes.in.cpp +++ b/ngraph/test/onnx/onnx_import_dyn_shapes.in.cpp @@ -330,6 +330,31 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dyn_shapes_max_pool_dyn_shape) test_case.run(); } +NGRAPH_TEST(${BACKEND_NAME}, onnx_dyn_shapes_max_pool_with_indices_output) +{ + const auto function = onnx_import::import_onnx_model(file_util::path_join( + SERIALIZED_ZOO, "onnx/dynamic_shapes/max_pool_with_indices_output.prototxt")); + + auto test_case = test::TestCase(function); + + const Shape shape{1, 1, 5, 5}; + std::vector input_values(shape_size(shape)); + std::iota(input_values.begin(), input_values.end(), 1.f); + + test_case.add_input(shape, input_values); + + std::vector expected_values{13.f, 14.f, 15.f, 15.f, 15.f, 18.f, 19.f, 20.f, 20.f, + 20.f, 23.f, 24.f, 25.f, 25.f, 25.f, 23.f, 24.f, 25.f, + 25.f, 25.f, 23.f, 24.f, 25.f, 25.f, 25.f}; + test_case.add_expected_output(Shape{1, 1, 5, 5}, expected_values); + + // indices output is not supported and is ingored in current implementation + // std::vector expected_indices{12, 13, 14, 14, 14, 17, 18, 19, 19, 19, 22, 23, 24, 24, + // 24, 22, 23, 24, 24, 24, 22, 23, 24, 24, 24}; + // test_case.add_expected_output(Shape{1, 1, 5, 5}, expected_indices); + test_case.run(); +} + NGRAPH_TEST(${BACKEND_NAME}, onnx_dyn_shapes_global_avg_pool_dyn_shape) { const auto function = onnx_import::import_onnx_model(file_util::path_join( From 35dbe6691c372735c8b9cfdef8bbfcd64c49e7c3 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 14:16:26 +0100 Subject: [PATCH 05/19] update xfail list --- ngraph/python/tests/__init__.py | 6 ++++-- ngraph/python/tests/test_onnx/test_backend.py | 17 +++++++++++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/ngraph/python/tests/__init__.py b/ngraph/python/tests/__init__.py index 053760b209b4ed..6cd64676ec5815 100644 --- a/ngraph/python/tests/__init__.py +++ b/ngraph/python/tests/__init__.py @@ -107,8 +107,6 @@ def xfail_test(reason="Mark the test as expected to fail", strict=True): xfail_issue_38699 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" "ai.onnx.preview.training.Gradient") xfail_issue_38701 = xfail_test(reason="RuntimeError: unsupported element type: STRING") -xfail_issue_38705 = xfail_test(reason="IndexError: deque::_M_range_check: __n (which is 0)" - ">= this->size() (which is 0)") xfail_issue_38706 = xfail_test(reason="RuntimeError: output_3.0 has zero dimension which is not allowed") xfail_issue_38707 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" "SoftmaxCrossEntropyLoss") @@ -158,6 +156,7 @@ def xfail_test(reason="Mark the test as expected to fail", strict=True): "ai.onnx.preview.training.Adagrad") xfail_issue_38736 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" "NegativeLogLikelihoodLoss") +xfail_issue_48052 = xfail_test(reason="Dropout op is not supported in traning mode") xfail_issue_45177 = xfail_test(reason="RuntimeError: axes has zero dimension which is not allowed") xfail_issue_45180 = xfail_test(reason="RuntimeError: Unsupported dynamic op: ReduceSum") xfail_issue_44839 = xfail_test(reason="Huge computation missmatch") @@ -182,6 +181,9 @@ def xfail_test(reason="Mark the test as expected to fail", strict=True): xfail_issue_47330 = xfail_test(reason="RuntimeError: Eltwise node with name `[name]` doesn't support " "FP64 precision.") xfail_issue_47337 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v1::OneHot") +xfail_issue_33593 = xfail_test(reason="Current implementation of MaxPool doesn't support indices output") +xfail_issue_48055 = xfail_test(reason="Dropout doesn't return mask in non-traning mode, " + "because of lack of Constant network support") # Model MSFT issues: xfail_issue_37957 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index ece5109bad31b9..a5ff1e8476d97e 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -88,7 +88,10 @@ xfail_issue_46765, xfail_issue_47317, xfail_issue_47323, - xfail_issue_47330) + xfail_issue_47330, + xfail_issue_48052, + xfail_issue_33593, + xfail_issue_48055) def expect_fail(test_case_path, xfail): # type: (str) -> None @@ -610,6 +613,11 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None (xfail_issue_38735, "OnnxBackendNodeModelTest.test_adagrad_multiple_cpu", "OnnxBackendNodeModelTest.test_adagrad_cpu"), + (xfail_issue_48052, + "OnnxBackendNodeModelTest.test_training_dropout_mask_cpu", + "OnnxBackendNodeModelTest.test_training_dropout_default_mask_cpu", + "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu", + "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu",), (xfail_issue_45177, "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_random_cpu", @@ -673,7 +681,12 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_squeeze_cpu", "OnnxBackendNodeModelTest.test_squeeze_negative_axes_cpu",), (xfail_issue_44976, - "OnnxBackendNodeModelTest.test_quantizelinear_axis_cpu",) + "OnnxBackendNodeModelTest.test_quantizelinear_axis_cpu",), + (xfail_issue_33593, + "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_strides_cpu", + "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu",), + (xfail_issue_48055, + "OnnxBackendNodeModelTest.test_dropout_default_mask_cpu",) ] for test_group in tests_expected_to_fail: From d7e4390246f390e2b95b32aef755ba5ee88f062e Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 14:38:10 +0100 Subject: [PATCH 06/19] move dropout impl to cpp --- .../frontend/onnx_import/src/op/dropout.cpp | 125 ++++++++++++++++++ .../frontend/onnx_import/src/op/dropout.hpp | 92 +------------ ngraph/test/onnx/onnx_import.in.cpp | 2 +- 3 files changed, 129 insertions(+), 90 deletions(-) create mode 100644 ngraph/frontend/onnx_import/src/op/dropout.cpp diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp new file mode 100644 index 00000000000000..7e5d71b6f95393 --- /dev/null +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -0,0 +1,125 @@ +//***************************************************************************** +// Copyright 2017-2021 Intel Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +//***************************************************************************** + +#include + +#include "core/null_node.hpp" +#include "default_opset.hpp" +#include "exceptions.hpp" +#include "ngraph/log.hpp" +#include "ngraph/node.hpp" +#include "op/dropout.hpp" + +namespace ngraph +{ + namespace onnx_import + { + namespace op + { + namespace + { + OutputVector + build_dropout(const Node& node, float drop_probability, bool training_mode) + { + CHECK_VALID_NODE( + node, + drop_probability == 0 || !training_mode, + "Training mode is not supported for Dropout op if drop_probability is not " + "equal 0"); + + const auto input_data = node.get_ng_inputs().at(0); + const bool return_mask = node.get_outputs_size() > 1; + + if (return_mask) + { + NGRAPH_WARN << "Default mask for Dropout is ignored, " + << "because of unsupported constant networks"; + /*const auto mask = std::make_shared( + default_opset::Constant::create(ngraph::element::boolean, + Shape{}, {true}), + std::make_shared(input_data));*/ + // If constant network is supported mask should be returned instead of + // NullNode (ticket 48055) + return {input_data, std::make_shared()}; + } + else + { + return {input_data}; + } + } + } + + namespace set_12 + { + OutputVector dropout(const Node& node) + { + const auto ng_inputs = node.get_ng_inputs(); + // seed attribute is ignored because traning mode is not supported anyway + + // default values of inputs + double ratio = 0.5f; + bool training_mode = false; + + if (ng_inputs.size() > 1) + { + if (!ngraph::op::is_null(ng_inputs.at(1))) + { + CHECK_VALID_NODE( + node, + ngraph::op::is_constant(ng_inputs.at(1).get_node_shared_ptr()), + "Not constant (or omitted) ratio input is not supported."); + ratio = as_type_ptr( + ng_inputs.at(1).get_node_shared_ptr()) + ->cast_vector()[0]; + } + } + if (ng_inputs.size() > 2) + { + if (!ngraph::op::is_null(ng_inputs.at(2))) + { + CHECK_VALID_NODE( + node, + ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), + "Not constant (or omitted) training_mode input is not supported."); + training_mode = as_type_ptr( + ng_inputs.at(2).get_node_shared_ptr()) + ->cast_vector()[0]; + } + } + return build_dropout(node, ratio, training_mode); + } + } // namespace set_12 + + namespace set_1 + { + OutputVector dropout(const Node& node) + { + CHECK_VALID_NODE(node, + !node.has_attribute("consumed_inputs"), + "Legacy consumed_inputs attrubute is not supported."); + + const bool is_test = node.get_attribute_value("is_test", 0); + const auto ratio = node.get_attribute_value("ratio", 0.5f); + + return build_dropout(node, ratio, is_test); + } + } // namespace set_1 + + } // namespace op + + } // namespace onnx_import + +} // namespace ngraph diff --git a/ngraph/frontend/onnx_import/src/op/dropout.hpp b/ngraph/frontend/onnx_import/src/op/dropout.hpp index d5aa8538f0396c..8a075461a2e535 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.hpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.hpp @@ -16,13 +16,6 @@ #pragma once -#include - -#include "core/null_node.hpp" -#include "default_opset.hpp" -#include "exceptions.hpp" -#include "ngraph/log.hpp" -#include "ngraph/node.hpp" #include "onnx_import/core/node.hpp" namespace ngraph @@ -31,93 +24,14 @@ namespace ngraph { namespace op { - namespace - { - OutputVector - build_dropout(const Node& node, float drop_probability, bool training_mode) - { - CHECK_VALID_NODE( - node, - drop_probability == 0 || !training_mode, - "Training mode is not supported for Dropout op if drop_probability is not " - "equal 0"); - - const auto input_data = node.get_ng_inputs().at(0); - const bool return_mask = node.get_outputs_size() > 1; - - if (return_mask) - { - NGRAPH_WARN << "Default mask for Dropout is ignored, " - << "because of unsupported constant networks"; - /*const auto mask = std::make_shared( - default_opset::Constant::create(ngraph::element::boolean, - Shape{}, {true}), - std::make_shared(input_data));*/ - // If constant network is supported mask should be returned instead of - // NullNode (40957) - return {input_data, std::make_shared()}; - } - else - { - return {input_data}; - } - } - } - namespace set_12 { - OutputVector dropout(const Node& node) - { - const auto ng_inputs = node.get_ng_inputs(); - // seed attribute is ignored because traning mode is not supported anyway - - // default values of inputs - double ratio = 0.5f; - bool training_mode = false; - - if (ng_inputs.size() > 1) - { - if (!ngraph::op::is_null(ng_inputs.at(1))) - { - CHECK_VALID_NODE( - node, - ngraph::op::is_constant(ng_inputs.at(1).get_node_shared_ptr()), - "Not constant (or omitted) ratio input is not supported."); - ratio = as_type_ptr( - ng_inputs.at(1).get_node_shared_ptr()) - ->cast_vector()[0]; - } - } - if (ng_inputs.size() > 2) - { - if (!ngraph::op::is_null(ng_inputs.at(2))) - { - CHECK_VALID_NODE( - node, - ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), - "Not constant (or omitted) training_mode input is not supported."); - training_mode = as_type_ptr( - ng_inputs.at(2).get_node_shared_ptr()) - ->cast_vector()[0]; - } - } - return build_dropout(node, ratio, training_mode); - } - } + OutputVector dropout(const Node& node); + } // namespace set_12 namespace set_1 { - OutputVector dropout(const Node& node) - { - CHECK_VALID_NODE(node, - !node.has_attribute("consumed_inputs"), - "Legacy consumed_inputs attrubute is not supported."); - - const bool is_test = node.get_attribute_value("is_test", 0); - const auto ratio = node.get_attribute_value("ratio", 0.5f); - - return build_dropout(node, ratio, is_test); - } + OutputVector dropout(const Node& node); } // namespace set_1 } // namespace op diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 580ed6276a03ac..b5bf817317d44d 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -4012,7 +4012,7 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) const std::vector data(3 * 4 * 5, 2.0f); test_case.add_input(data); test_case.add_expected_output(Shape{3, 4, 5}, data); - // constant network not supported + // constant network not supported (ticket 48055) // test_case.add_expected_output(Shape{3, 4, 5}, std::vector(3*4*5, true)); test_case.run(); } From 9da00f52e53478d6179b2d4778baff758d79e6dc Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 15:07:11 +0100 Subject: [PATCH 07/19] fixed is_test bug --- ngraph/frontend/onnx_import/src/op/dropout.cpp | 4 ++-- .../models/onnx/dropout1_no_training_no_return_mask.prototxt | 5 +++++ .../models/onnx/dropout1_no_training_return_mask.prototxt | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index 7e5d71b6f95393..d0ebc93919a9d7 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -111,10 +111,10 @@ namespace ngraph !node.has_attribute("consumed_inputs"), "Legacy consumed_inputs attrubute is not supported."); - const bool is_test = node.get_attribute_value("is_test", 0); + const bool training_mode = !node.get_attribute_value("is_test", 0); const auto ratio = node.get_attribute_value("ratio", 0.5f); - return build_dropout(node, ratio, is_test); + return build_dropout(node, ratio, training_mode); } } // namespace set_1 diff --git a/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt b/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt index a142ba2608bd12..9c106663844e9b 100644 --- a/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt +++ b/ngraph/test/models/onnx/dropout1_no_training_no_return_mask.prototxt @@ -5,6 +5,11 @@ graph { input: "x" output: "y" op_type: "Dropout" + attribute { + name: "is_test" + i: 1 + type: INT + } } name: "test_dropout_default_mask" input { diff --git a/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt index 7f0dd9c1d576d8..0eab9230662b05 100644 --- a/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt +++ b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt @@ -9,7 +9,7 @@ graph { op_type: "Dropout" attribute { name: "is_test" - i: 0 + i: 1 type: INT } attribute { From 8ef8b5ba1b5f58c879fc7f172025ec55f76d1881 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 16:16:49 +0100 Subject: [PATCH 08/19] added dropout in opset 7 --- .../frontend/onnx_import/src/op/dropout.cpp | 12 +++++ .../frontend/onnx_import/src/op/dropout.hpp | 5 ++ .../frontend/onnx_import/src/ops_bridge.cpp | 1 + .../onnx/dropout7_no_return_mask.prototxt | 51 +++++++++++++++++++ ngraph/test/onnx/onnx_import.in.cpp | 12 +++++ 5 files changed, 81 insertions(+) create mode 100644 ngraph/test/models/onnx/dropout7_no_return_mask.prototxt diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index d0ebc93919a9d7..220f32833a3a29 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -103,6 +103,18 @@ namespace ngraph } } // namespace set_12 + namespace set_7 + { + OutputVector dropout(const Node& node) + { + // "is_test" attribute was removed + const bool training_mode = false; + const auto ratio = node.get_attribute_value("ratio", 0.5f); + + return build_dropout(node, ratio, training_mode); + } + } // namespace set_1 + namespace set_1 { OutputVector dropout(const Node& node) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.hpp b/ngraph/frontend/onnx_import/src/op/dropout.hpp index 8a075461a2e535..a1a083cb71912f 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.hpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.hpp @@ -29,6 +29,11 @@ namespace ngraph OutputVector dropout(const Node& node); } // namespace set_12 + namespace set_7 + { + OutputVector dropout(const Node& node); + } // namespace set_7 + namespace set_1 { OutputVector dropout(const Node& node); diff --git a/ngraph/frontend/onnx_import/src/ops_bridge.cpp b/ngraph/frontend/onnx_import/src/ops_bridge.cpp index 59055a859951ca..6162bc100a1c32 100644 --- a/ngraph/frontend/onnx_import/src/ops_bridge.cpp +++ b/ngraph/frontend/onnx_import/src/ops_bridge.cpp @@ -338,6 +338,7 @@ namespace ngraph REGISTER_OPERATOR("Div", 1, div); REGISTER_OPERATOR("Div", 7, div); REGISTER_OPERATOR("Dropout", 1, dropout); + REGISTER_OPERATOR("Dropout", 7, dropout); REGISTER_OPERATOR("Dropout", 12, dropout); REGISTER_OPERATOR("Elu", 1, elu); REGISTER_OPERATOR("Equal", 1, equal); diff --git a/ngraph/test/models/onnx/dropout7_no_return_mask.prototxt b/ngraph/test/models/onnx/dropout7_no_return_mask.prototxt new file mode 100644 index 00000000000000..ced7fbca21ea13 --- /dev/null +++ b/ngraph/test/models/onnx/dropout7_no_return_mask.prototxt @@ -0,0 +1,51 @@ +ir_version: 7 +producer_name: "backend-test" +graph { + node { + input: "x" + output: "y" + op_type: "Dropout" + } + name: "test_dropout_default_mask" + input { + name: "x" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } + output { + name: "y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 3 + } + dim { + dim_value: 4 + } + dim { + dim_value: 5 + } + } + } + } + } +} +opset_import { + version: 7 +} diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index b5bf817317d44d..5a14b07c44bd7c 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3991,6 +3991,18 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout1_no_training_return_mask) test_case.run(); } +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout7_no_return_mask) +{ + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout7_no_return_mask.prototxt")); + + auto test_case = test::TestCase(function); + const std::vector data(3 * 4 * 5, 2.0f); + test_case.add_input(data); + test_case.add_expected_output(Shape{3, 4, 5}, data); + test_case.run(); +} + NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_no_return_mask) { auto function = onnx_import::import_onnx_model( From a767c5d0bc76036ae9bd223337cac5e4f397f935 Mon Sep 17 00:00:00 2001 From: mbencer Date: Tue, 2 Feb 2021 16:18:06 +0100 Subject: [PATCH 09/19] typo --- ngraph/frontend/onnx_import/src/op/dropout.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index 220f32833a3a29..8ce2b3da2d874c 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -113,7 +113,7 @@ namespace ngraph return build_dropout(node, ratio, training_mode); } - } // namespace set_1 + } // namespace set_7 namespace set_1 { From f814085c3fdd24186900467022736529b34b11a9 Mon Sep 17 00:00:00 2001 From: mbencer Date: Fri, 5 Feb 2021 18:17:52 +0100 Subject: [PATCH 10/19] added no const ratio test --- ...ropout12_no_traning_no_const_rato.prototxt | 98 +++++++++++++++++++ ngraph/test/onnx/onnx_import.in.cpp | 18 ++++ 2 files changed, 116 insertions(+) create mode 100644 ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt diff --git a/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt b/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt new file mode 100644 index 00000000000000..6bc6288a3e31f0 --- /dev/null +++ b/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt @@ -0,0 +1,98 @@ +ir_version: 7 +producer_name: "onnx-importer-test" +graph { + node { + output: "N" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 1 + float_data: 1.0 + name: "const_tensor_N" + } + type: TENSOR + } + } + node { + input: "X" + output: "A" + op_type: "Relu" + } + node { + input: "A" + input: "N" + output: "B" + op_type: "Pow" + } + node { + input: "B" + input: "R" + input: "T" + output: "C" + op_type: "Dropout" + } + node { + input: "C" + output: "Y" + op_type: "Relu" + } + name: "test-model" + input { + name: "X" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + } + } + } + } + input { + name: "R" + type { + tensor_type { + elem_type: 1 + shape { + } + } + } + } + input { + name: "T" + type { + tensor_type { + elem_type: 9 + shape { + } + } + } + } + output { + name: "Y" + type { + tensor_type { + elem_type: 1 + shape { + dim { + dim_value: 1 + } + dim { + dim_value: 4 + } + } + } + } + } +} +opset_import { + domain: "" + version: 12 +} \ No newline at end of file diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 5a14b07c44bd7c..e1930be18ada60 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -4029,6 +4029,24 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) test_case.run(); } +NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_traning_no_const_rato) +{ + try + { + auto function = onnx_import::import_onnx_model(file_util::path_join( + SERIALIZED_ZOO, "onnx/dropout12_no_traning_no_const_rato.prototxt")); + } + catch (const ngraph::ngraph_error& e) + { + EXPECT_HAS_SUBSTRING( + e.what(), std::string("Not constant (or omitted) ratio input is not supported.")); + } + catch (...) + { + FAIL() << "Expected ngraph_error exception was not thrown"; + } +} + NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_training_mode) { try From 3621d50aff8bab24ed79b9cf59c5b2c6777b34d1 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 8 Feb 2021 11:30:41 +0100 Subject: [PATCH 11/19] remove checking legacy attribute --- ngraph/frontend/onnx_import/src/op/dropout.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index 8ce2b3da2d874c..4577428643db6f 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -120,9 +120,8 @@ namespace ngraph OutputVector dropout(const Node& node) { CHECK_VALID_NODE(node, - !node.has_attribute("consumed_inputs"), - "Legacy consumed_inputs attrubute is not supported."); + // legacy consumed_inputs attribute ignored const bool training_mode = !node.get_attribute_value("is_test", 0); const auto ratio = node.get_attribute_value("ratio", 0.5f); From ab41cd33088c42168b9339c25c8154bf9e6684e6 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 8 Feb 2021 11:45:29 +0100 Subject: [PATCH 12/19] removed not needed code --- ngraph/frontend/onnx_import/src/op/dropout.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index 4577428643db6f..d27f345ae963b9 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -119,9 +119,7 @@ namespace ngraph { OutputVector dropout(const Node& node) { - CHECK_VALID_NODE(node, - - // legacy consumed_inputs attribute ignored + // legacy consumed_inputs attribute ignored const bool training_mode = !node.get_attribute_value("is_test", 0); const auto ratio = node.get_attribute_value("ratio", 0.5f); From 053df1523c3fe900db50b6a33032b87a548dd7e0 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 8 Feb 2021 12:37:01 +0100 Subject: [PATCH 13/19] enable default mask path --- ngraph/frontend/onnx_import/src/op/dropout.cpp | 14 +++++--------- .../dropout12_no_training_return_mask.prototxt | 12 +++++++++++- ngraph/test/onnx/onnx_import.in.cpp | 4 ++-- 3 files changed, 18 insertions(+), 12 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index d27f345ae963b9..f0d8b62c214735 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -45,15 +45,11 @@ namespace ngraph if (return_mask) { - NGRAPH_WARN << "Default mask for Dropout is ignored, " - << "because of unsupported constant networks"; - /*const auto mask = std::make_shared( - default_opset::Constant::create(ngraph::element::boolean, - Shape{}, {true}), - std::make_shared(input_data));*/ - // If constant network is supported mask should be returned instead of - // NullNode (ticket 48055) - return {input_data, std::make_shared()}; + const auto mask = std::make_shared( + default_opset::Constant::create( + ngraph::element::boolean, Shape{}, {true}), + std::make_shared(input_data)); + return {input_data, mask}; } else { diff --git a/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt b/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt index 21a77d44c89ba1..51046ebb8f4636 100644 --- a/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt +++ b/ngraph/test/models/onnx/dropout12_no_training_return_mask.prototxt @@ -12,6 +12,16 @@ graph { type: INT } } + node { + input: "z" + op_type: "Cast" + output: "z_out" + attribute { + name: "to" + i: 6 + type: INT + } + } name: "test_dropout_default_mask" input { name: "x" @@ -52,7 +62,7 @@ graph { } } output { - name: "z" + name: "z_out" type { tensor_type { elem_type: 9 diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 0d83ba68867fcd..04c711852a3db2 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -4028,8 +4028,8 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) const std::vector data(3 * 4 * 5, 2.0f); test_case.add_input(data); test_case.add_expected_output(Shape{3, 4, 5}, data); - // constant network not supported (ticket 48055) - // test_case.add_expected_output(Shape{3, 4, 5}, std::vector(3*4*5, true)); + test_case.add_expected_output( + Shape{3, 4, 5}, std::vector(3 * 4 * 5, 1)); // bool converted to i32 test_case.run(); } From cc9a07d18c3b71b7737d4f4719282b6e8c616f69 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 8 Feb 2021 14:51:30 +0100 Subject: [PATCH 14/19] Ignore ratio in training mode --- .../frontend/onnx_import/src/op/dropout.cpp | 40 +++++-------------- ngraph/python/tests/test_onnx/test_backend.py | 3 +- ...ropout12_no_traning_no_const_rato.prototxt | 25 +++++++----- .../dropout1_no_training_return_mask.prototxt | 12 +++++- ngraph/test/onnx/onnx_import.in.cpp | 28 ++++++------- 5 files changed, 48 insertions(+), 60 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index f0d8b62c214735..f83bc679a5d08d 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -31,14 +31,10 @@ namespace ngraph { namespace { - OutputVector - build_dropout(const Node& node, float drop_probability, bool training_mode) + OutputVector build_dropout(const Node& node, bool training_mode) { CHECK_VALID_NODE( - node, - drop_probability == 0 || !training_mode, - "Training mode is not supported for Dropout op if drop_probability is not " - "equal 0"); + node, !training_mode, "Training mode is not supported for Dropout op"); const auto input_data = node.get_ng_inputs().at(0); const bool return_mask = node.get_outputs_size() > 1; @@ -63,25 +59,9 @@ namespace ngraph OutputVector dropout(const Node& node) { const auto ng_inputs = node.get_ng_inputs(); - // seed attribute is ignored because traning mode is not supported anyway - - // default values of inputs - double ratio = 0.5f; - bool training_mode = false; - - if (ng_inputs.size() > 1) - { - if (!ngraph::op::is_null(ng_inputs.at(1))) - { - CHECK_VALID_NODE( - node, - ngraph::op::is_constant(ng_inputs.at(1).get_node_shared_ptr()), - "Not constant (or omitted) ratio input is not supported."); - ratio = as_type_ptr( - ng_inputs.at(1).get_node_shared_ptr()) - ->cast_vector()[0]; - } - } + // seed attribute and ratio input are ignored because traning mode is not + // supported anyway + bool training_mode = false; // default value if (ng_inputs.size() > 2) { if (!ngraph::op::is_null(ng_inputs.at(2))) @@ -95,7 +75,7 @@ namespace ngraph ->cast_vector()[0]; } } - return build_dropout(node, ratio, training_mode); + return build_dropout(node, training_mode); } } // namespace set_12 @@ -104,10 +84,10 @@ namespace ngraph OutputVector dropout(const Node& node) { // "is_test" attribute was removed + // ratio attribute is ignored because traning mode is not supported const bool training_mode = false; - const auto ratio = node.get_attribute_value("ratio", 0.5f); - return build_dropout(node, ratio, training_mode); + return build_dropout(node, training_mode); } } // namespace set_7 @@ -116,10 +96,10 @@ namespace ngraph OutputVector dropout(const Node& node) { // legacy consumed_inputs attribute ignored + // ratio attribute is ignored because traning mode is not supported const bool training_mode = !node.get_attribute_value("is_test", 0); - const auto ratio = node.get_attribute_value("ratio", 0.5f); - return build_dropout(node, ratio, training_mode); + return build_dropout(node, training_mode); } } // namespace set_1 diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index 812a3231fa5990..ce3177e89d67e8 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -615,8 +615,7 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None (xfail_issue_48052, "OnnxBackendNodeModelTest.test_training_dropout_mask_cpu", "OnnxBackendNodeModelTest.test_training_dropout_default_mask_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu", - "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu",), + "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu",), (xfail_issue_45177, "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_random_cpu", diff --git a/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt b/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt index 6bc6288a3e31f0..a286df855933cc 100644 --- a/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt +++ b/ngraph/test/models/onnx/dropout12_no_traning_no_const_rato.prototxt @@ -15,6 +15,19 @@ graph { type: TENSOR } } + node { + output: "T" + op_type: "Constant" + attribute { + name: "value" + t { + dims: 1 + data_type: 9 + int32_data: 0 + } + type: TENSOR + } + } node { input: "X" output: "A" @@ -65,16 +78,6 @@ graph { } } } - input { - name: "T" - type { - tensor_type { - elem_type: 9 - shape { - } - } - } - } output { name: "Y" type { @@ -95,4 +98,4 @@ graph { opset_import { domain: "" version: 12 -} \ No newline at end of file +} diff --git a/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt index 0eab9230662b05..abc400dcdd175c 100644 --- a/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt +++ b/ngraph/test/models/onnx/dropout1_no_training_return_mask.prototxt @@ -18,6 +18,16 @@ graph { type: FLOAT } } + node { + input: "z" + op_type: "Cast" + output: "z_out" + attribute { + name: "to" + i: 6 + type: INT + } + } name: "test_dropout_default_mask" input { name: "x" @@ -58,7 +68,7 @@ graph { } } output { - name: "z" + name: "z_out" type { tensor_type { elem_type: 9 diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 04c711852a3db2..dc9b11ec5b1a6c 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3992,6 +3992,8 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout1_no_training_return_mask) const std::vector data(3 * 4 * 5, 2.0f); test_case.add_input(data); test_case.add_expected_output(Shape{3, 4, 5}, data); + test_case.add_expected_output( + Shape{3, 4, 5}, std::vector(3 * 4 * 5, 1)); // // bool converted to i32 test_case.run(); } @@ -4035,20 +4037,15 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_training_return_mask) NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_no_traning_no_const_rato) { - try - { - auto function = onnx_import::import_onnx_model(file_util::path_join( - SERIALIZED_ZOO, "onnx/dropout12_no_traning_no_const_rato.prototxt")); - } - catch (const ngraph::ngraph_error& e) - { - EXPECT_HAS_SUBSTRING( - e.what(), std::string("Not constant (or omitted) ratio input is not supported.")); - } - catch (...) - { - FAIL() << "Expected ngraph_error exception was not thrown"; - } + auto function = onnx_import::import_onnx_model( + file_util::path_join(SERIALIZED_ZOO, "onnx/dropout12_no_traning_no_const_rato.prototxt")); + + auto test_case = test::TestCase(function); + test_case.add_input({1, 2, 3, 4}); + // test_case.add_input(Shape{}, {0.5}); // ratio input is ignored + + test_case.add_expected_output(Shape{1, 4}, {1., 2., 3., 4.}); + test_case.run(); } NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_training_mode) @@ -4062,8 +4059,7 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_training_mode) catch (const ngraph::ngraph_error& e) { EXPECT_HAS_SUBSTRING(e.what(), - std::string("Training mode is not supported for Dropout op " - "if drop_probability is not equal 0")); + std::string("Training mode is not supported for Dropout op")); } catch (...) { From 64a0a3f22537f5cbdb421c3eeb25433548ad2eb2 Mon Sep 17 00:00:00 2001 From: mbencer Date: Mon, 8 Feb 2021 15:12:54 +0100 Subject: [PATCH 15/19] update test backend list --- ngraph/python/tests/test_onnx/test_backend.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index ce3177e89d67e8..0fcfbb1123524d 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -684,7 +684,8 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_strides_cpu", "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu",), (xfail_issue_48055, - "OnnxBackendNodeModelTest.test_dropout_default_mask_cpu",) + "OnnxBackendNodeModelTest.test_dropout_default_mask_cpu", + "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu",) ] for test_group in tests_expected_to_fail: From 733b3a77782ffc6fd5870ec3ad824320480f49ab Mon Sep 17 00:00:00 2001 From: mbencer Date: Wed, 10 Feb 2021 11:52:39 +0100 Subject: [PATCH 16/19] fixed constant bool network, setting precission to output blobs --- .../src/transformations/convert_precision.cpp | 2 +- .../transformations/convert_precision.cpp | 8 +++++ ngraph/python/tests/runtime.py | 31 ++++++++++++++++--- ngraph/python/tests/test_ngraph/test_basic.py | 12 +++---- .../tests/test_ngraph/test_ops_reshape.py | 1 - ngraph/python/tests/test_onnx/test_backend.py | 5 --- 6 files changed, 41 insertions(+), 18 deletions(-) diff --git a/inference-engine/src/transformations/src/transformations/convert_precision.cpp b/inference-engine/src/transformations/src/transformations/convert_precision.cpp index 2e09b554cd78ea..e1362847ac5a99 100644 --- a/inference-engine/src/transformations/src/transformations/convert_precision.cpp +++ b/inference-engine/src/transformations/src/transformations/convert_precision.cpp @@ -391,7 +391,7 @@ bool fuse_type_to_constant(std::shared_ptr & node, element::Type to, const } new_const->validate_and_infer_types(); - if (constant->get_output_target_inputs(0).size() == consumers.size()) { + if (constant->get_output_size() == consumers.size()) { new_const->set_friendly_name(constant->get_friendly_name()); } } diff --git a/inference-engine/tests/functional/inference_engine/transformations/convert_precision.cpp b/inference-engine/tests/functional/inference_engine/transformations/convert_precision.cpp index d67d67d29ae932..35d63367718028 100644 --- a/inference-engine/tests/functional/inference_engine/transformations/convert_precision.cpp +++ b/inference-engine/tests/functional/inference_engine/transformations/convert_precision.cpp @@ -565,8 +565,10 @@ TEST(TransformationTests, ConvertPrecision_Variables) { template void constant_convert_test(element::Type_t type_from, element::Type_t type_to, From value, To expected) { std::shared_ptr f(nullptr); + std::string expected_friendly_name; { auto c = opset4::Constant::create(type_from, Shape{}, {value}); + expected_friendly_name = c->get_friendly_name(); f = std::make_shared(NodeVector{c}, ParameterVector{}); pass::Manager manager; @@ -576,6 +578,7 @@ void constant_convert_test(element::Type_t type_from, element::Type_t type_to, F auto ops = f->get_ordered_ops(); auto c = std::dynamic_pointer_cast(ops[0]); ASSERT_NE(c, nullptr); + ASSERT_EQ(c->get_friendly_name(), expected_friendly_name); auto actual = c->cast_vector()[0]; ASSERT_EQ(expected, actual); @@ -622,3 +625,8 @@ TEST(TransformationTests, ConvertPrecision_ConstantConversion_U32MaxToI32) { TEST(TransformationTests, ConvertPrecision_ConstantConversion_U32ToI32) { constant_convert_test(element::Type_t::u32, element::Type_t::i32, 42, 42); } + +TEST(TransformationTests, ConvertPrecision_ConstantConversion_BoolToU8) { + constant_convert_test(element::Type_t::boolean, element::Type_t::u8, true, 1); + constant_convert_test(element::Type_t::boolean, element::Type_t::u8, false, 0); +} diff --git a/ngraph/python/tests/runtime.py b/ngraph/python/tests/runtime.py index 5397d7874b34f2..d77f3e5599834c 100644 --- a/ngraph/python/tests/runtime.py +++ b/ngraph/python/tests/runtime.py @@ -18,10 +18,10 @@ from typing import Dict, List, Union import numpy as np -from openvino.inference_engine import IECore, IENetwork, Blob +from openvino.inference_engine import IECore, IENetwork, Blob, DataPtr from ngraph.exceptions import UserInputError -from ngraph.impl import Function, Node, PartialShape +from ngraph.impl import Function, Node, PartialShape, Type from ngraph.opset1.ops import result from ngraph.utils.types import NumericData, get_shape, get_dtype @@ -55,6 +55,18 @@ def _convert_inputs(cnn_network: IENetwork) -> None: pass +def apply_ng_type(output: DataPtr, ng_type: Type): + ng_ie_supported_type_map = { + Type.boolean.get_type_name(): "BOOL", + Type.f32.get_type_name(): "FP32", + Type.i8.get_type_name(): "I8", + Type.i32.get_type_name(): "I32", + Type.u8.get_type_name(): "U8", + } + if ng_type.get_type_name() in ng_ie_supported_type_map: + output.precision = ng_ie_supported_type_map[ng_type.get_type_name()] + + class Runtime(object): """Represents an nGraph runtime environment.""" @@ -103,15 +115,19 @@ def __repr__(self) -> str: params_string = ", ".join([param.name for param in self.parameters]) return "".format(self.function.get_name(), params_string) - def __get_ie_output_blob_buffer(self, output_blobs: Dict[str, Blob], ng_result: result) -> np.ndarray: + def __get_ie_output_blob_name(self, outputs: Dict, ng_result: result) -> str: if len(self.results) == 1: - return next(iter(output_blobs.values())).buffer + return next(iter(outputs.keys())) else: prev_layer = ng_result.input(0).get_source_output() out_name = prev_layer.get_node().get_friendly_name() if prev_layer.get_node().get_output_size() != 1: out_name += "." + str(prev_layer.get_index()) - return output_blobs[out_name].buffer + return out_name + + def __get_ie_output_blob_buffer(self, output_blobs: Dict[str, Blob], ng_result: result) -> np.ndarray: + out_name = self.__get_ie_output_blob_name(output_blobs, ng_result) + return output_blobs[out_name].buffer def __call__(self, *input_values: NumericData) -> List[NumericData]: """Run computation on input values and return result.""" @@ -131,6 +147,11 @@ def __call__(self, *input_values: NumericData) -> List[NumericData]: else: cnn_network = self.network_cache[str(input_shapes)] + # set output blobs precission based on nG results + for ng_result in self.results: + ie_out_name = self.__get_ie_output_blob_name(cnn_network.outputs, ng_result) + apply_ng_type(cnn_network.outputs[ie_out_name], ng_result.get_output_element_type(0)) + executable_network = self.runtime.backend.load_network(cnn_network, self.runtime.backend_name) # Input validation diff --git a/ngraph/python/tests/test_ngraph/test_basic.py b/ngraph/python/tests/test_ngraph/test_basic.py index 16240bccdbe439..3c234061edc80f 100644 --- a/ngraph/python/tests/test_ngraph/test_basic.py +++ b/ngraph/python/tests/test_ngraph/test_basic.py @@ -80,15 +80,15 @@ def test_simple_computation_on_ndarrays(dtype): value_a = np.array([[1, 2], [3, 4]], dtype=dtype) value_b = np.array([[5, 6], [7, 8]], dtype=dtype) - value_c = np.array([[9, 10], [11, 12]], dtype=dtype) + value_c = np.array([[2, 3], [4, 5]], dtype=dtype) result = computation(value_a, value_b, value_c) - assert np.allclose(result, np.array([[54, 80], [110, 144]], dtype=dtype)) + assert np.allclose(result, np.array([[12, 24], [40, 60]], dtype=dtype)) - value_a = np.array([[13, 14], [15, 16]], dtype=dtype) - value_b = np.array([[17, 18], [19, 20]], dtype=dtype) - value_c = np.array([[21, 22], [23, 24]], dtype=dtype) + value_a = np.array([[9, 10], [11, 12]], dtype=dtype) + value_b = np.array([[13, 14], [15, 16]], dtype=dtype) + value_c = np.array([[5, 4], [3, 2]], dtype=dtype) result = computation(value_a, value_b, value_c) - assert np.allclose(result, np.array([[630, 704], [782, 864]], dtype=dtype)) + assert np.allclose(result, np.array([[110, 96], [78, 56]], dtype=dtype)) def test_serialization(): diff --git a/ngraph/python/tests/test_ngraph/test_ops_reshape.py b/ngraph/python/tests/test_ngraph/test_ops_reshape.py index f0aa63bd4160a7..b74658fcd0b086 100644 --- a/ngraph/python/tests/test_ngraph/test_ops_reshape.py +++ b/ngraph/python/tests/test_ngraph/test_ops_reshape.py @@ -223,7 +223,6 @@ def test_reshape_v1(): assert np.allclose(result, expected) -@xfail_issue_40957 def test_shape_of(): input_tensor = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32) diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index 0fcfbb1123524d..11a3de08a9b574 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -200,10 +200,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_constant_cpu", "OnnxBackendNodeModelTest.test_eyelike_populate_off_main_diagonal_cpu", "OnnxBackendNodeModelTest.test_eyelike_without_dtype_cpu", - "OnnxBackendNodeModelTest.test_shape_cpu", - "OnnxBackendNodeModelTest.test_shape_example_cpu", - "OnnxBackendNodeModelTest.test_size_cpu", - "OnnxBackendNodeModelTest.test_size_example_cpu", "OnnxBackendNodeModelTest.test_dropout_default_ratio_cpu", "OnnxBackendNodeModelTest.test_training_dropout_default_cpu", "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_cpu", @@ -684,7 +680,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_strides_cpu", "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu",), (xfail_issue_48055, - "OnnxBackendNodeModelTest.test_dropout_default_mask_cpu", "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu",) ] From 869864ed4de6e72de891f3d9bc59c4d1ee1787a7 Mon Sep 17 00:00:00 2001 From: mbencer Date: Wed, 10 Feb 2021 14:52:18 +0100 Subject: [PATCH 17/19] ignore not used test values --- ngraph/python/tests/__init__.py | 3 --- ngraph/python/tests/runtime.py | 13 +++++++---- ngraph/python/tests/test_onnx/test_backend.py | 23 +++++-------------- 3 files changed, 14 insertions(+), 25 deletions(-) diff --git a/ngraph/python/tests/__init__.py b/ngraph/python/tests/__init__.py index 8db5b5505a2d2a..ec98e6b489a546 100644 --- a/ngraph/python/tests/__init__.py +++ b/ngraph/python/tests/__init__.py @@ -157,7 +157,6 @@ def xfail_test(reason="Mark the test as expected to fail", strict=True): xfail_issue_38736 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" "NegativeLogLikelihoodLoss") xfail_issue_48052 = xfail_test(reason="Dropout op is not supported in traning mode") -xfail_issue_45177 = xfail_test(reason="RuntimeError: axes has zero dimension which is not allowed") xfail_issue_45180 = xfail_test(reason="RuntimeError: Unsupported dynamic op: ReduceSum") xfail_issue_44839 = xfail_test(reason="Huge computation missmatch") xfail_issue_44848 = xfail_test(reason="E Unsupported dynamic op: Range") @@ -182,8 +181,6 @@ def xfail_test(reason="Mark the test as expected to fail", strict=True): "FP64 precision.") xfail_issue_47337 = xfail_test(reason="RuntimeError: Unsupported dynamic ops: v1::OneHot") xfail_issue_33593 = xfail_test(reason="Current implementation of MaxPool doesn't support indices output") -xfail_issue_48055 = xfail_test(reason="Dropout doesn't return mask in non-traning mode, " - "because of lack of Constant network support") # Model MSFT issues: xfail_issue_37957 = xfail_test(reason="RuntimeError: nGraph does not support the following ONNX operations:" diff --git a/ngraph/python/tests/runtime.py b/ngraph/python/tests/runtime.py index d77f3e5599834c..aadbe0c96d9342 100644 --- a/ngraph/python/tests/runtime.py +++ b/ngraph/python/tests/runtime.py @@ -131,6 +131,14 @@ def __get_ie_output_blob_buffer(self, output_blobs: Dict[str, Blob], ng_result: def __call__(self, *input_values: NumericData) -> List[NumericData]: """Run computation on input values and return result.""" + # Input validation + if len(input_values) < len(self.parameters): + raise UserInputError( + "Expected %s params, received not enough %s values.", len(self.parameters), len(input_values) + ) + # ignore not needed input values + input_values = input_values[:len(self.parameters)] + input_values = [np.array(input_value) for input_value in input_values] input_shapes = [get_shape(input_value) for input_value in input_values] @@ -154,11 +162,6 @@ def __call__(self, *input_values: NumericData) -> List[NumericData]: executable_network = self.runtime.backend.load_network(cnn_network, self.runtime.backend_name) - # Input validation - if len(input_values) != len(self.parameters): - raise UserInputError( - "Expected %s parameters, received %s.", len(self.parameters), len(input_values) - ) for parameter, input in zip(self.parameters, input_values): parameter_shape = parameter.get_output_partial_shape(0) input_shape = PartialShape(input.shape) diff --git a/ngraph/python/tests/test_onnx/test_backend.py b/ngraph/python/tests/test_onnx/test_backend.py index 11a3de08a9b574..bd45464f5696bd 100644 --- a/ngraph/python/tests/test_onnx/test_backend.py +++ b/ngraph/python/tests/test_onnx/test_backend.py @@ -69,7 +69,6 @@ xfail_issue_38732, xfail_issue_38734, xfail_issue_38735, - xfail_issue_45177, xfail_issue_45180, xfail_issue_43742, xfail_issue_44839, @@ -90,8 +89,7 @@ xfail_issue_47323, xfail_issue_47330, xfail_issue_48052, - xfail_issue_33593, - xfail_issue_48055) + xfail_issue_33593) def expect_fail(test_case_path, xfail): # type: (str) -> None @@ -200,10 +198,6 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_constant_cpu", "OnnxBackendNodeModelTest.test_eyelike_populate_off_main_diagonal_cpu", "OnnxBackendNodeModelTest.test_eyelike_without_dtype_cpu", - "OnnxBackendNodeModelTest.test_dropout_default_ratio_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_default_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_cpu", "OnnxBackendNodeModelTest.test_eyelike_with_dtype_cpu"), (xfail_issue_35915, "OnnxBackendNodeModelTest.test_min_uint8_cpu"), @@ -609,15 +603,12 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_adagrad_multiple_cpu", "OnnxBackendNodeModelTest.test_adagrad_cpu"), (xfail_issue_48052, + "OnnxBackendNodeModelTest.test_training_dropout_cpu", "OnnxBackendNodeModelTest.test_training_dropout_mask_cpu", + "OnnxBackendNodeModelTest.test_training_dropout_default_cpu", + "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_cpu", "OnnxBackendNodeModelTest.test_training_dropout_default_mask_cpu", - "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu",), - (xfail_issue_45177, - "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_example_cpu", - "OnnxBackendNodeModelTest.test_reduce_sum_default_axes_keepdims_random_cpu", - "OnnxBackendNodeModelTest.test_reduce_sum_empty_axes_input_noop_example_cpu", - "OnnxBackendNodeModelTest.test_reduce_sum_empty_axes_input_noop_random_cpu", - "OnnxBackendNodeModelTest.test_reduce_sum_negative_axes_keepdims_random_cpu"), + "OnnxBackendNodeModelTest.test_training_dropout_zero_ratio_mask_cpu"), (xfail_issue_45180, "OnnxBackendNodeModelTest.test_reduce_sum_do_not_keepdims_example_cpu", "OnnxBackendNodeModelTest.test_reduce_sum_do_not_keepdims_random_cpu", @@ -678,9 +669,7 @@ def expect_fail(test_case_path, xfail): # type: (str) -> None "OnnxBackendNodeModelTest.test_quantizelinear_axis_cpu",), (xfail_issue_33593, "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_strides_cpu", - "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu",), - (xfail_issue_48055, - "OnnxBackendNodeModelTest.test_dropout_default_mask_ratio_cpu",) + "OnnxBackendNodeModelTest.test_maxpool_with_argmax_2d_precomputed_pads_cpu",) ] for test_group in tests_expected_to_fail: From a0478fdfc45bc5f84c2cda52e542f3f528f30f79 Mon Sep 17 00:00:00 2001 From: mbencer Date: Wed, 10 Feb 2021 18:29:21 +0100 Subject: [PATCH 18/19] removed check constant->get_output_size() --- .../transformations/src/transformations/convert_precision.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/inference-engine/src/transformations/src/transformations/convert_precision.cpp b/inference-engine/src/transformations/src/transformations/convert_precision.cpp index 6a7257cdfa271d..59d9bf7c4dc72a 100644 --- a/inference-engine/src/transformations/src/transformations/convert_precision.cpp +++ b/inference-engine/src/transformations/src/transformations/convert_precision.cpp @@ -412,9 +412,7 @@ bool fuse_type_to_constant(std::shared_ptr & node, element::Type to, const } new_const->validate_and_infer_types(); - if (constant->get_output_size() == consumers.size()) { - new_const->set_friendly_name(constant->get_friendly_name()); - } + new_const->set_friendly_name(constant->get_friendly_name()); } return false; } From 0d8c1e977af8f4a3ad94d04375d4c146acef3602 Mon Sep 17 00:00:00 2001 From: mbencer Date: Thu, 11 Feb 2021 19:13:04 +0100 Subject: [PATCH 19/19] dropout review remarks --- .../frontend/onnx_import/src/op/dropout.cpp | 19 ++++++++----------- ngraph/test/onnx/onnx_import.in.cpp | 5 ++--- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/ngraph/frontend/onnx_import/src/op/dropout.cpp b/ngraph/frontend/onnx_import/src/op/dropout.cpp index f83bc679a5d08d..fd4f8f93f133ef 100644 --- a/ngraph/frontend/onnx_import/src/op/dropout.cpp +++ b/ngraph/frontend/onnx_import/src/op/dropout.cpp @@ -62,18 +62,15 @@ namespace ngraph // seed attribute and ratio input are ignored because traning mode is not // supported anyway bool training_mode = false; // default value - if (ng_inputs.size() > 2) + if (ng_inputs.size() > 2 && !ngraph::op::is_null(ng_inputs.at(2))) { - if (!ngraph::op::is_null(ng_inputs.at(2))) - { - CHECK_VALID_NODE( - node, - ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), - "Not constant (or omitted) training_mode input is not supported."); - training_mode = as_type_ptr( - ng_inputs.at(2).get_node_shared_ptr()) - ->cast_vector()[0]; - } + CHECK_VALID_NODE( + node, + ngraph::op::is_constant(ng_inputs.at(2).get_node_shared_ptr()), + "Non-constant training_mode input is not supported."); + training_mode = as_type_ptr( + ng_inputs.at(2).get_node_shared_ptr()) + ->cast_vector()[0]; } return build_dropout(node, training_mode); } diff --git a/ngraph/test/onnx/onnx_import.in.cpp b/ngraph/test/onnx/onnx_import.in.cpp index 28dc4bfdbd7d7d..65ab5d75368a86 100644 --- a/ngraph/test/onnx/onnx_import.in.cpp +++ b/ngraph/test/onnx/onnx_import.in.cpp @@ -3942,9 +3942,8 @@ NGRAPH_TEST(${BACKEND_NAME}, onnx_dropout12_not_const_training_mode) } catch (const ngraph::ngraph_error& e) { - EXPECT_HAS_SUBSTRING( - e.what(), - std::string("Not constant (or omitted) training_mode input is not supported.")); + EXPECT_HAS_SUBSTRING(e.what(), + std::string("Non-constant training_mode input is not supported.")); } catch (...) {