diff --git a/cmake/templates/InferenceEngineConfig.cmake.in b/cmake/templates/InferenceEngineConfig.cmake.in index 261edbf3d730f3..43408483f9af6e 100644 --- a/cmake/templates/InferenceEngineConfig.cmake.in +++ b/cmake/templates/InferenceEngineConfig.cmake.in @@ -73,6 +73,10 @@ function(_ie_target_no_deprecation_error) else() set(flags "-Wno-error=deprecated-declarations") endif() + if(CMAKE_CROSSCOMPILING) + set_target_properties(${ARGV} PROPERTIES + INTERFACE_LINK_OPTIONS "-Wl,--allow-shlib-undefined") + endif() set_target_properties(${ARGV} PROPERTIES INTERFACE_COMPILE_OPTIONS ${flags}) endif() diff --git a/inference-engine/src/inference_engine/CMakeLists.txt b/inference-engine/src/inference_engine/CMakeLists.txt index aeb0386e85c878..ffbfc0a1a14160 100644 --- a/inference-engine/src/inference_engine/CMakeLists.txt +++ b/inference-engine/src/inference_engine/CMakeLists.txt @@ -124,6 +124,7 @@ target_compile_definitions(${TARGET_NAME}_obj PRIVATE IMPLEMENT_INFERENCE_ENGINE target_include_directories(${TARGET_NAME}_obj SYSTEM PRIVATE $ $ + $ $) target_include_directories(${TARGET_NAME}_obj PRIVATE "${CMAKE_CURRENT_SOURCE_DIR}" @@ -160,7 +161,7 @@ if (TBBBIND_2_4_FOUND) endif() target_link_libraries(${TARGET_NAME} PRIVATE pugixml::static openvino::itt ${CMAKE_DL_LIBS} Threads::Threads - ngraph inference_engine_transformations) + ngraph ngraph::frontend_manager inference_engine_transformations) target_include_directories(${TARGET_NAME} INTERFACE $ @@ -200,7 +201,7 @@ if(WIN32) set_target_properties(${TARGET_NAME}_s PROPERTIES COMPILE_PDB_NAME ${TARGET_NAME}_s) endif() -target_link_libraries(${TARGET_NAME}_s PRIVATE openvino::itt ${CMAKE_DL_LIBS} ngraph +target_link_libraries(${TARGET_NAME}_s PRIVATE openvino::itt ${CMAKE_DL_LIBS} ngraph ngraph::frontend_manager inference_engine_transformations pugixml::static) target_compile_definitions(${TARGET_NAME}_s PUBLIC USE_STATIC_IE) diff --git a/inference-engine/src/inference_engine/ie_network_reader.cpp b/inference-engine/src/inference_engine/ie_network_reader.cpp index 6043303712dc02..7189a0a098aaa9 100644 --- a/inference-engine/src/inference_engine/ie_network_reader.cpp +++ b/inference-engine/src/inference_engine/ie_network_reader.cpp @@ -9,6 +9,7 @@ #include #include #include +#include #include #include @@ -226,6 +227,26 @@ CNNNetwork details::ReadNetwork(const std::string& modelPath, const std::string& return reader->read(modelStream, exts); } } + // Try to load with FrontEndManager + static ngraph::frontend::FrontEndManager manager; + ngraph::frontend::FrontEnd::Ptr FE; + ngraph::frontend::InputModel::Ptr inputModel; + if (!binPath.empty()) { +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + std::wstring weights_path = FileUtils::multiByteCharToWString(binPath.c_str()); +#else + std::string weights_path = binPath; +#endif + FE = manager.load_by_model(model_path, weights_path); + if (FE) inputModel = FE->load(model_path, weights_path); + } else { + FE = manager.load_by_model(model_path); + if (FE) inputModel = FE->load(model_path); + } + if (inputModel) { + auto ngFunc = FE->convert(inputModel); + return CNNNetwork(ngFunc); + } IE_THROW() << "Unknown model format! Cannot find reader for model format: " << fileExt << " and read the model: " << modelPath << ". Please check that reader library exists in your PATH."; } @@ -248,4 +269,4 @@ CNNNetwork details::ReadNetwork(const std::string& model, const Blob::CPtr& weig IE_THROW() << "Unknown model format! Cannot find reader for the model and read it. Please check that reader library exists in your PATH."; } -} // namespace InferenceEngine \ No newline at end of file +} // namespace InferenceEngine diff --git a/inference-engine/tests/functional/inference_engine/CMakeLists.txt b/inference-engine/tests/functional/inference_engine/CMakeLists.txt index 80d95d875e3ab7..4154e95ba860f9 100644 --- a/inference-engine/tests/functional/inference_engine/CMakeLists.txt +++ b/inference-engine/tests/functional/inference_engine/CMakeLists.txt @@ -55,6 +55,11 @@ if(NGRAPH_ONNX_IMPORT_ENABLE) add_dependencies(${TARGET_NAME} inference_engine_onnx_reader) endif() +if(NGRAPH_PDPD_FRONTEND_ENABLE) + target_compile_definitions(${TARGET_NAME} PRIVATE + PDPD_TEST_MODELS="${CMAKE_CURRENT_SOURCE_DIR}/pdpd_reader/models/") +endif() + ie_faster_build(${TARGET_NAME} PCH PRIVATE "precomp.hpp" ) diff --git a/inference-engine/tests/functional/inference_engine/pdpd_reader/models/relu.pdmodel b/inference-engine/tests/functional/inference_engine/pdpd_reader/models/relu.pdmodel new file mode 100644 index 00000000000000..9bb64f2a50d5dc Binary files /dev/null and b/inference-engine/tests/functional/inference_engine/pdpd_reader/models/relu.pdmodel differ diff --git "a/inference-engine/tests/functional/inference_engine/pdpd_reader/models/\343\201\262\343\202\211\343\201\214\343\201\252\346\227\245\346\234\254\350\252\236.pdmodel" "b/inference-engine/tests/functional/inference_engine/pdpd_reader/models/\343\201\262\343\202\211\343\201\214\343\201\252\346\227\245\346\234\254\350\252\236.pdmodel" new file mode 100644 index 00000000000000..9bb64f2a50d5dc Binary files /dev/null and "b/inference-engine/tests/functional/inference_engine/pdpd_reader/models/\343\201\262\343\202\211\343\201\214\343\201\252\346\227\245\346\234\254\350\252\236.pdmodel" differ diff --git a/inference-engine/tests/functional/inference_engine/pdpd_reader/read_pdpd_model_test.cpp b/inference-engine/tests/functional/inference_engine/pdpd_reader/read_pdpd_model_test.cpp new file mode 100644 index 00000000000000..5ec2077da1ef0b --- /dev/null +++ b/inference-engine/tests/functional/inference_engine/pdpd_reader/read_pdpd_model_test.cpp @@ -0,0 +1,84 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include "common_test_utils/ngraph_test_utils.hpp" + +TEST(PDPD_Reader_Tests, ImportBasicModelToCore) { + auto model = std::string(PDPD_TEST_MODELS) + "relu.pdmodel"; + InferenceEngine::Core ie; + auto cnnNetwork = ie.ReadNetwork(model); + auto function = cnnNetwork.getFunction(); + + const auto inputType = ngraph::element::f32; + const auto inputShape = ngraph::Shape{ 3 }; + + const auto data = std::make_shared(inputType, inputShape); + data->set_friendly_name("x"); + data->output(0).get_tensor().add_names({ "x" }); + const auto relu = std::make_shared(data->output(0)); + relu->set_friendly_name("relu_0.tmp_0"); + relu->output(0).get_tensor().add_names({ "relu_0.tmp_0" }); + const auto scale = std::make_shared(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector{1}); + const auto bias = std::make_shared(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector{0}); + const auto node_multiply = std::make_shared(relu->output(0), scale); + const auto node_add = std::make_shared(node_multiply, bias); + node_add->set_friendly_name("save_infer_model/scale_0.tmp_1"); + node_add->output(0).get_tensor().add_names({ "save_infer_model/scale_0.tmp_1" }); + const auto result = std::make_shared(node_add->output(0)); + result->set_friendly_name("save_infer_model/scale_0.tmp_1/Result"); + const auto reference = std::make_shared( + ngraph::NodeVector{ result }, + ngraph::ParameterVector{ data }, + "RefPDPDFunction"); + const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES); + const FunctionsComparator::Result res = func_comparator(function, reference); + ASSERT_TRUE(res.valid); +} + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) +TEST(PDPD_Reader_Tests, ImportBasicModelToCoreWstring) { + std::string win_dir_path{ PDPD_TEST_MODELS }; + std::replace(win_dir_path.begin(), win_dir_path.end(), '/', '\\'); + const std::wstring unicode_win_dir_path = FileUtils::multiByteCharToWString(win_dir_path.c_str()); + auto model = unicode_win_dir_path + L"ひらがな日本語.pdmodel"; + InferenceEngine::Core ie; + auto cnnNetwork = ie.ReadNetwork(model); + auto function = cnnNetwork.getFunction(); + + const auto inputType = ngraph::element::f32; + const auto inputShape = ngraph::Shape{ 3 }; + + const auto data = std::make_shared(inputType, inputShape); + data->set_friendly_name("x"); + data->output(0).get_tensor().add_names({ "x" }); + const auto relu = std::make_shared(data->output(0)); + relu->set_friendly_name("relu_0.tmp_0"); + relu->output(0).get_tensor().add_names({ "relu_0.tmp_0" }); + const auto scale = std::make_shared(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector{1}); + const auto bias = std::make_shared(ngraph::element::f32, ngraph::Shape{ 1 }, std::vector{0}); + const auto node_multiply = std::make_shared(relu->output(0), scale); + const auto node_add = std::make_shared(node_multiply, bias); + node_add->set_friendly_name("save_infer_model/scale_0.tmp_1"); + node_add->output(0).get_tensor().add_names({ "save_infer_model/scale_0.tmp_1" }); + const auto result = std::make_shared(node_add->output(0)); + result->set_friendly_name("save_infer_model/scale_0.tmp_1/Result"); + const auto reference = std::make_shared( + ngraph::NodeVector{ result }, + ngraph::ParameterVector{ data }, + "RefPDPDFunction"); + const FunctionsComparator func_comparator = FunctionsComparator::with_default().enable(FunctionsComparator::NAMES); + const FunctionsComparator::Result res = func_comparator(function, reference); + ASSERT_TRUE(res.valid); +} +#endif diff --git a/model-optimizer/mo/moc_frontend/pipeline.py b/model-optimizer/mo/moc_frontend/pipeline.py index 55c1ba96fd6d53..5ddccc15f41a7a 100644 --- a/model-optimizer/mo/moc_frontend/pipeline.py +++ b/model-optimizer/mo/moc_frontend/pipeline.py @@ -24,7 +24,7 @@ def moc_pipeline(argv: argparse.Namespace): str(fem.get_available_front_ends()))) log.debug('Initializing new FE for framework {}'.format(argv.framework)) fe = fem.load_by_framework(argv.framework) - input_model = fe.load_from_file(argv.input_model) + input_model = fe.load(argv.input_model) user_shapes, outputs, freeze_placeholder = fe_user_data_repack( input_model, argv.placeholder_shapes, argv.placeholder_data_types, diff --git a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.cpp b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.cpp index 66ea0ab35a0208..9ddbba38040770 100644 --- a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.cpp +++ b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.cpp @@ -25,7 +25,7 @@ extern "C" MOCK_API void* GetFrontEndData() { FrontEndPluginInfo* res = new FrontEndPluginInfo(); res->m_name = "mock_mo_ngraph_frontend"; - res->m_creator = [](FrontEndCapFlags flags) { return std::make_shared(flags); }; + res->m_creator = []() { return std::make_shared(); }; return res; } \ No newline at end of file diff --git a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.hpp b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.hpp index d22980794c3a23..ab0de8f3357e7c 100644 --- a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.hpp +++ b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_ngraph_frontend/mock_mo_frontend.hpp @@ -292,11 +292,9 @@ class MOCK_API InputModelMockPy : public InputModel /// was called with correct arguments during test execution struct MOCK_API FeStat { - FrontEndCapFlags m_load_flags; std::vector m_load_paths; int m_convert_model = 0; // Getters - FrontEndCapFlags load_flags() const { return m_load_flags; } std::vector load_paths() const { return m_load_paths; } int convert_model() const { return m_convert_model; } }; @@ -309,13 +307,8 @@ class MOCK_API FrontEndMockPy : public FrontEnd static FeStat m_stat; public: - FrontEndMockPy(FrontEndCapFlags flags) { m_stat.m_load_flags = flags; } + FrontEndMockPy() {} - InputModel::Ptr load_from_file(const std::string& path) const override - { - m_stat.m_load_paths.push_back(path); - return std::make_shared(); - } std::shared_ptr convert(InputModel::Ptr model) const override { @@ -326,4 +319,15 @@ class MOCK_API FrontEndMockPy : public FrontEnd static FeStat get_stat() { return m_stat; } static void clear_stat() { m_stat = {}; } + +protected: + InputModel::Ptr load_impl(const std::vector>& params) const override + { + if (params.size() > 0 && is_type>(params[0])) + { + auto path = as_type_ptr>(params[0])->get(); + m_stat.m_load_paths.push_back(path); + } + return std::make_shared(); + } }; diff --git a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_python_api/mock_mo_python_api.cpp b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_python_api/mock_mo_python_api.cpp index d9bbe52ab69b5f..d5b7978932688f 100644 --- a/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_python_api/mock_mo_python_api.cpp +++ b/model-optimizer/unit_tests/mock_mo_frontend/mock_mo_python_api/mock_mo_python_api.cpp @@ -17,7 +17,6 @@ static void register_mock_frontend_stat(py::module m) m.def("clear_frontend_statistic", &FrontEndMockPy::clear_stat); py::class_ feStat(m, "FeStat", py::dynamic_attr()); - feStat.def_property_readonly("load_flags", &FeStat::load_flags); feStat.def_property_readonly("load_paths", &FeStat::load_paths); feStat.def_property_readonly("convert_model", &FeStat::convert_model); } diff --git a/ngraph/core/include/ngraph/variant.hpp b/ngraph/core/include/ngraph/variant.hpp index aeb67f79b9e204..d39be393c76321 100644 --- a/ngraph/core/include/ngraph/variant.hpp +++ b/ngraph/core/include/ngraph/variant.hpp @@ -75,4 +75,27 @@ namespace ngraph { } }; + + template + inline std::shared_ptr make_variant(const T& p) + { + return std::dynamic_pointer_cast>(std::make_shared>(p)); + } + + template + inline std::shared_ptr make_variant(const char (&s)[N]) + { + return std::dynamic_pointer_cast>( + std::make_shared>(s)); + } + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + template + inline std::shared_ptr make_variant(const wchar_t (&s)[N]) + { + return std::dynamic_pointer_cast>( + std::make_shared>(s)); + } +#endif + } // namespace ngraph diff --git a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp index 97a18112d82959..da54a1f7993a95 100644 --- a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp +++ b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend.hpp @@ -10,6 +10,7 @@ #include "frontend_manager_defs.hpp" #include "input_model.hpp" #include "ngraph/function.hpp" +#include "ngraph/variant.hpp" namespace ngraph { @@ -26,43 +27,31 @@ namespace ngraph virtual ~FrontEnd(); - /// \brief Loads an input model by specified model file path - /// If model is stored in several files (e.g. model topology and model weights) - - /// frontend implementation is responsible to handle this case, generally frontend may - /// retrieve other file names from main file - /// \param path Main model file path - /// \return Loaded input model - virtual InputModel::Ptr load_from_file(const std::string& path) const; - - /// \brief Loads an input model by specified number of model files - /// This shall be used for cases when client knows all model files (model, weights, etc) - /// \param paths Array of model files - /// \return Loaded input model - virtual InputModel::Ptr load_from_files(const std::vector& paths) const; - - /// \brief Loads an input model by already loaded memory buffer - /// Memory structure is frontend-defined and is not specified in generic API - /// \param model Model memory buffer - /// \return Loaded input model - virtual InputModel::Ptr load_from_memory(const void* model) const; - - /// \brief Loads an input model from set of memory buffers - /// Memory structure is frontend-defined and is not specified in generic API - /// \param modelParts Array of model memory buffers - /// \return Loaded input model - virtual InputModel::Ptr - load_from_memory_fragments(const std::vector& modelParts) const; - - /// \brief Loads an input model by input stream representing main model file - /// \param stream Input stream of main model - /// \return Loaded input model - virtual InputModel::Ptr load_from_stream(std::istream& stream) const; - - /// \brief Loads an input model by input streams representing all model files - /// \param streams Array of input streams for model - /// \return Loaded input model - virtual InputModel::Ptr - load_from_streams(const std::vector& streams) const; + /// \brief Validates if FrontEnd can recognize model with parameters specified. + /// Same parameters should be used to load model. + /// \param vars Any number of parameters of any type. What kind of parameters + /// are accepted is determined by each FrontEnd individually, typically it is + /// std::string containing path to the model file. For more information please + /// refer to specific FrontEnd documentation. + /// \return true if model recognized, false - otherwise. + template + inline bool supported(const Types&... vars) const + { + return supported_impl({make_variant(vars)...}); + } + + /// \brief Loads an input model by any specified arguments. Each FrontEnd separately + /// defines what arguments it can accept. + /// \param vars Any number of parameters of any type. What kind of parameters + /// are accepted is determined by each FrontEnd individually, typically it is + /// std::string containing path to the model file. For more information please + /// refer to specific FrontEnd documentation. + /// \return Loaded input model. + template + inline InputModel::Ptr load(const Types&... vars) const + { + return load_impl({make_variant(vars)...}); + } /// \brief Completely convert and normalize entire function, throws if it is not /// possible @@ -95,8 +84,20 @@ namespace ngraph /// \brief Runs normalization passes on function that was loaded with partial conversion /// \param function partially converted nGraph function virtual void normalize(std::shared_ptr function) const; + + protected: + virtual bool + supported_impl(const std::vector>& variants) const; + virtual InputModel::Ptr + load_impl(const std::vector>& variants) const; }; + template <> + inline bool FrontEnd::supported(const std::vector>& variants) const + { + return supported_impl(variants); + } + } // namespace frontend } // namespace ngraph diff --git a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend_manager.hpp b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend_manager.hpp index 764931ea9a89b9..2b92a6386b5552 100644 --- a/ngraph/frontend/frontend_manager/include/frontend_manager/frontend_manager.hpp +++ b/ngraph/frontend/frontend_manager/include/frontend_manager/frontend_manager.hpp @@ -8,36 +8,14 @@ #include #include "frontend.hpp" #include "frontend_manager_defs.hpp" +#include "ngraph/variant.hpp" namespace ngraph { namespace frontend { - /// Capabilities for requested FrontEnd - /// In general, frontend implementation may be divided into several libraries by capability - /// level It will allow faster load of frontend when only limited usage is expected by - /// client application as well as binary size can be minimized by removing not needed parts - /// from application's package - namespace FrontEndCapabilities - { - /// \brief Just reading and conversion, w/o any modifications; intended to be used in - /// Reader - static const int FEC_DEFAULT = 0; - - /// \brief Topology cutting capability - static const int FEC_CUT = 1; - - /// \brief Query entities by names, renaming and adding new names for operations and - /// tensors - static const int FEC_NAMES = 2; - - /// \brief Partial model conversion and decoding capability - static const int FEC_WILDCARDS = 4; - }; // namespace FrontEndCapabilities - // -------------- FrontEndManager ----------------- - using FrontEndCapFlags = int; - using FrontEndFactory = std::function; + using FrontEndFactory = std::function; /// \brief Frontend management class, loads available frontend plugins on construction /// Allows load of frontends for particular framework, register new and list available @@ -62,26 +40,22 @@ namespace ngraph /// \param framework Framework name. Throws exception if name is not in list of /// available frontends /// - /// \param fec Frontend capabilities. It is recommended to use only - /// those capabilities which are needed to minimize load time - /// /// \return Frontend interface for further loading of models - FrontEnd::Ptr - load_by_framework(const std::string& framework, - FrontEndCapFlags fec = FrontEndCapabilities::FEC_DEFAULT); + FrontEnd::Ptr load_by_framework(const std::string& framework); - /// \brief Loads frontend by model file path. Selects and loads appropriate frontend - /// depending on model file extension and other file info (header) + /// \brief Loads frontend by model fragments described by each FrontEnd documentation. + /// Selects and loads appropriate frontend depending on model file extension and other + /// file info (header) /// /// \param framework /// Framework name. Throws exception if name is not in list of available frontends /// - /// \param fec Frontend capabilities. It is recommended to use only those capabilities - /// which are needed to minimize load time - /// /// \return Frontend interface for further loading of model - FrontEnd::Ptr load_by_model(const std::string& path, - FrontEndCapFlags fec = FrontEndCapabilities::FEC_DEFAULT); + template + FrontEnd::Ptr load_by_model(const Types&... vars) + { + return load_by_model_impl({make_variant(vars)...}); + } /// \brief Gets list of registered frontends std::vector get_available_front_ends() const; @@ -97,6 +71,8 @@ namespace ngraph private: class Impl; + FrontEnd::Ptr load_by_model_impl(const std::vector>& variants); + std::unique_ptr m_impl; }; @@ -119,4 +95,31 @@ namespace ngraph } // namespace frontend + template <> + class FRONTEND_API VariantWrapper> + : public VariantImpl> + { + public: + static constexpr VariantTypeInfo type_info{"Variant::std::shared_ptr", 0}; + const VariantTypeInfo& get_type_info() const override { return type_info; } + VariantWrapper(const value_type& value) + : VariantImpl(value) + { + } + }; + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + template <> + class FRONTEND_API VariantWrapper : public VariantImpl + { + public: + static constexpr VariantTypeInfo type_info{"Variant::std::wstring", 0}; + const VariantTypeInfo& get_type_info() const override { return type_info; } + VariantWrapper(const value_type& value) + : VariantImpl(value) + { + } + }; +#endif + } // namespace ngraph diff --git a/ngraph/frontend/frontend_manager/src/frontend_manager.cpp b/ngraph/frontend/frontend_manager/src/frontend_manager.cpp index 037a2522523dc4..95dfe1ccbdedf5 100644 --- a/ngraph/frontend/frontend_manager/src/frontend_manager.cpp +++ b/ngraph/frontend/frontend_manager/src/frontend_manager.cpp @@ -8,6 +8,7 @@ #include "frontend_manager/frontend_exceptions.hpp" #include "frontend_manager/frontend_manager.hpp" #include "plugin_loader.hpp" +#include "utils.hpp" using namespace ngraph; using namespace ngraph::frontend; @@ -23,11 +24,11 @@ class FrontEndManager::Impl ~Impl() = default; - FrontEnd::Ptr loadByFramework(const std::string& framework, FrontEndCapFlags fec) + FrontEnd::Ptr loadByFramework(const std::string& framework) { FRONT_END_INITIALIZATION_CHECK( m_factories.count(framework), "FrontEnd for Framework ", framework, " is not found"); - return m_factories[framework](fec); + return m_factories[framework](); } std::vector availableFrontEnds() const @@ -42,9 +43,17 @@ class FrontEndManager::Impl return keys; } - FrontEnd::Ptr loadByModel(const std::string& path, FrontEndCapFlags fec) + FrontEnd::Ptr loadByModel(const std::vector>& variants) { - FRONT_END_NOT_IMPLEMENTED(loadByModel); + for (const auto& factory : m_factories) + { + auto FE = factory.second(); + if (FE->supported(variants)) + { + return FE; + } + } + return FrontEnd::Ptr(); } void registerFrontEnd(const std::string& name, FrontEndFactory creator) @@ -81,7 +90,7 @@ class FrontEndManager::Impl } else { - registerFromDir("."); + registerFromDir(getFrontendLibraryPath()); } } }; @@ -96,14 +105,15 @@ FrontEndManager& FrontEndManager::operator=(FrontEndManager&&) = default; FrontEndManager::~FrontEndManager() = default; -FrontEnd::Ptr FrontEndManager::load_by_framework(const std::string& framework, FrontEndCapFlags fec) +FrontEnd::Ptr FrontEndManager::load_by_framework(const std::string& framework) { - return m_impl->loadByFramework(framework, fec); + return m_impl->loadByFramework(framework); } -FrontEnd::Ptr FrontEndManager::load_by_model(const std::string& path, FrontEndCapFlags fec) +FrontEnd::Ptr + FrontEndManager::load_by_model_impl(const std::vector>& variants) { - return m_impl->loadByModel(path, fec); + return m_impl->loadByModel(variants); } std::vector FrontEndManager::get_available_front_ends() const @@ -122,37 +132,15 @@ FrontEnd::FrontEnd() = default; FrontEnd::~FrontEnd() = default; -InputModel::Ptr FrontEnd::load_from_file(const std::string& path) const -{ - FRONT_END_NOT_IMPLEMENTED(load_from_file); -} - -InputModel::Ptr FrontEnd::load_from_files(const std::vector& paths) const -{ - FRONT_END_NOT_IMPLEMENTED(load_from_files); -} - -InputModel::Ptr FrontEnd::load_from_memory(const void* model) const +bool FrontEnd::supported_impl(const std::vector>& variants) const { - FRONT_END_NOT_IMPLEMENTED(load_from_memory); + return false; } -InputModel::Ptr - FrontEnd::load_from_memory_fragments(const std::vector& modelParts) const +InputModel::Ptr FrontEnd::load_impl(const std::vector>& params) const { - FRONT_END_NOT_IMPLEMENTED(load_from_memory_fragments); + FRONT_END_NOT_IMPLEMENTED(load_impl); } - -InputModel::Ptr FrontEnd::load_from_stream(std::istream& path) const -{ - FRONT_END_NOT_IMPLEMENTED(load_from_stream); -} - -InputModel::Ptr FrontEnd::load_from_streams(const std::vector& paths) const -{ - FRONT_END_NOT_IMPLEMENTED(load_from_streams); -} - std::shared_ptr FrontEnd::convert(InputModel::Ptr model) const { FRONT_END_NOT_IMPLEMENTED(convert); @@ -422,3 +410,9 @@ Place::Ptr Place::get_source_tensor(int inputPortIndex) const { FRONT_END_NOT_IMPLEMENTED(get_source_tensor); } + +constexpr VariantTypeInfo VariantWrapper>::type_info; + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) +constexpr VariantTypeInfo VariantWrapper::type_info; +#endif diff --git a/ngraph/frontend/frontend_manager/src/utils.cpp b/ngraph/frontend/frontend_manager/src/utils.cpp new file mode 100644 index 00000000000000..e940512e6e7872 --- /dev/null +++ b/ngraph/frontend/frontend_manager/src/utils.cpp @@ -0,0 +1,68 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "utils.hpp" +#include "frontend_manager/frontend_exceptions.hpp" +#include "plugin_loader.hpp" + +#ifndef _WIN32 +#include +#include +#include +#ifdef ENABLE_UNICODE_PATH_SUPPORT +#include +#include +#endif +#else +#if defined(WINAPI_FAMILY) && !WINAPI_PARTITION_DESKTOP +#error "Only WINAPI_PARTITION_DESKTOP is supported, because of GetModuleHandleEx[A|W]" +#endif +#ifndef NOMINMAX +#define NOMINMAX +#endif +#include +#endif + +namespace +{ + std::string getPathName(const std::string& s) + { + size_t i = s.rfind(FileSeparator, s.length()); + if (i != std::string::npos) + { + return (s.substr(0, i)); + } + + return {}; + } + +} // namespace + +static std::string _getFrontendLibraryPath() +{ +#ifdef _WIN32 + CHAR ie_library_path[MAX_PATH]; + HMODULE hm = NULL; + if (!GetModuleHandleExA(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | + GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, + reinterpret_cast(ngraph::frontend::getFrontendLibraryPath), + &hm)) + { + FRONT_END_INITIALIZATION_CHECK(false, "GetModuleHandle returned ", GetLastError()); + } + GetModuleFileNameA(hm, (LPSTR)ie_library_path, sizeof(ie_library_path)); + return getPathName(std::string(ie_library_path)); +#elif defined(__APPLE__) || defined(__linux__) + Dl_info info; + dladdr(reinterpret_cast(ngraph::frontend::getFrontendLibraryPath), &info); + return getPathName(std::string(info.dli_fname)).c_str(); +#else +#error "Unsupported OS" +#endif // _WIN32 +} + +std::string ngraph::frontend::getFrontendLibraryPath() +{ + return _getFrontendLibraryPath(); +} diff --git a/ngraph/frontend/frontend_manager/src/utils.hpp b/ngraph/frontend/frontend_manager/src/utils.hpp new file mode 100644 index 00000000000000..26d6f5273c30e4 --- /dev/null +++ b/ngraph/frontend/frontend_manager/src/utils.hpp @@ -0,0 +1,14 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include "frontend_manager/frontend_manager_defs.hpp" + +namespace ngraph +{ + namespace frontend + { + FRONTEND_API std::string getFrontendLibraryPath(); + } // namespace frontend +} // namespace ngraph diff --git a/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/frontend.hpp b/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/frontend.hpp index 566ea9dd910cbc..410068b2e26fcc 100644 --- a/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/frontend.hpp +++ b/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/frontend.hpp @@ -14,44 +14,33 @@ namespace ngraph { class PDPD_API FrontEndPDPD : public FrontEnd { - static std::shared_ptr - convert_model(const std::shared_ptr& model); - public: FrontEndPDPD() = default; - /** - * @brief Reads model from file and deducts file names of weights - * @param path path to folder which contains __model__ file or path to .pdmodel file - * @return InputModel::Ptr - */ - InputModel::Ptr load_from_file(const std::string& path) const override; - - /** - * @brief Reads model and weights from files - * @param paths vector containing path to .pdmodel and .pdiparams files - * @return InputModel::Ptr - */ - InputModel::Ptr load_from_files(const std::vector& paths) const override; - - /** - * @brief Reads model from stream - * @param model_stream stream containing .pdmodel or __model__ files. Can only be used - * if model have no weights - * @return InputModel::Ptr - */ - InputModel::Ptr load_from_stream(std::istream& model_stream) const override; - - /** - * @brief Reads model from stream - * @param paths vector of streams containing .pdmodel and .pdiparams files. Can't be - * used in case of multiple weight files - * @return InputModel::Ptr - */ + /// \brief Completely convert the remaining, not converted part of a function. + /// \param partiallyConverted partially converted nGraph function + /// \return fully converted nGraph function + std::shared_ptr convert(InputModel::Ptr model) const override; + + protected: + /// \brief Check if FrontEndPDPD can recognize model from given parts + /// \param params Can be path to folder which contains __model__ file or path to + /// .pdmodel file + /// \return InputModel::Ptr + bool supported_impl( + const std::vector>& variants) const override; + + /// \brief Reads model from 1 or 2 given file names or 1 or 2 std::istream containing + /// model in protobuf format and weights + /// \param params Can contain path to folder with __model__ file or path to .pdmodel + /// file or 1 or 2 streams with model and weights + /// \return InputModel::Ptr InputModel::Ptr - load_from_streams(const std::vector& paths) const override; + load_impl(const std::vector>& params) const override; - std::shared_ptr convert(InputModel::Ptr model) const override; + private: + static std::shared_ptr + convert_model(const std::shared_ptr& model); }; } // namespace frontend diff --git a/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/model.hpp b/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/model.hpp index ddf63fd97e5630..1ab63ef6d10a9b 100644 --- a/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/model.hpp +++ b/ngraph/frontend/paddlepaddle/include/paddlepaddle_frontend/model.hpp @@ -13,7 +13,6 @@ namespace ngraph { class OpPlacePDPD; class TensorPlacePDPD; - class PDPD_API InputModelPDPD : public InputModel { friend class FrontEndPDPD; @@ -26,6 +25,9 @@ namespace ngraph public: explicit InputModelPDPD(const std::string& path); +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + explicit InputModelPDPD(const std::wstring& path); +#endif explicit InputModelPDPD(const std::vector& streams); std::vector get_inputs() const override; std::vector get_outputs() const override; diff --git a/ngraph/frontend/paddlepaddle/src/frontend.cpp b/ngraph/frontend/paddlepaddle/src/frontend.cpp index 51b87025495887..0995725b10fb24 100644 --- a/ngraph/frontend/paddlepaddle/src/frontend.cpp +++ b/ngraph/frontend/paddlepaddle/src/frontend.cpp @@ -2,31 +2,26 @@ // SPDX-License-Identifier: Apache-2.0 // -#include -#include #include #include -#include -#include #include -#include #include #include "framework.pb.h" +#include #include #include #include #include #include +#include -#include #include "decoder.hpp" #include "node_context.hpp" #include "op_table.hpp" - -#include +#include "pdpd_utils.hpp" #include "frontend_manager/frontend_manager.hpp" @@ -67,8 +62,45 @@ namespace ngraph } } - return CREATORS_MAP.at(op->type())( - NodeContext(DecoderPDPDProto(op_place), named_inputs)); + try + { + return CREATORS_MAP.at(op->type())( + NodeContext(DecoderPDPDProto(op_place), named_inputs)); + } + catch (...) + { + // TODO: define exception types + // In case of partial conversion we need to create generic ngraph op here + return NamedOutputs(); + } + } + + std::istream* variant_to_stream_ptr(const std::shared_ptr& variant, + std::ifstream& ext_stream) + { + if (is_type>>(variant)) + { + auto m_stream = + as_type_ptr>>(variant)->get(); + return m_stream.get(); + } + else if (is_type>(variant)) + { + const auto& model_path = + as_type_ptr>(variant)->get(); + ext_stream.open(model_path, std::ios::in | std::ifstream::binary); + } +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + else if (is_type>(variant)) + { + const auto& model_path = + as_type_ptr>(variant)->get(); + ext_stream.open(model_path, std::ios::in | std::ifstream::binary); + } +#endif + FRONT_END_INITIALIZATION_CHECK(ext_stream && ext_stream.is_open(), + "Cannot open model file."); + return &ext_stream; } } // namespace pdpd @@ -91,6 +123,7 @@ namespace ngraph const auto& type = inp_place->getElementType(); auto param = std::make_shared(type, shape); param->set_friendly_name(var->name()); + param->output(0).get_tensor().add_names({var->name()}); nodes_dict[var->name()] = param; parameter_nodes.push_back(param); } @@ -155,41 +188,102 @@ namespace ngraph return std::make_shared(result_nodes, parameter_nodes); } - InputModel::Ptr FrontEndPDPD::load_from_file(const std::string& path) const + bool FrontEndPDPD::supported_impl( + const std::vector>& variants) const { - return load_from_files({path}); - } + // FrontEndPDPD can only load model specified by one path, one file or two files. + if (variants.empty() || variants.size() > 2) + return false; - InputModel::Ptr FrontEndPDPD::load_from_files(const std::vector& paths) const - { - if (paths.size() == 1) + // Validating first path, it must contain a model + if (is_type>(variants[0])) { - // The case when folder with __model__ and weight files is provided or .pdmodel file - return std::make_shared(paths[0]); + std::string suffix = ".pdmodel"; + std::string model_path = + as_type_ptr>(variants[0])->get(); + if (!pdpd::endsWith(model_path, suffix)) + { + model_path += pdpd::get_path_sep() + "__model__"; + } + std::ifstream model_str(model_path, std::ios::in | std::ifstream::binary); + // It is possible to validate here that protobuf can read model from the stream, + // but it will complicate the check, while it should be as quick as possible + return model_str && model_str.is_open(); } - else if (paths.size() == 2) +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + else if (is_type>(variants[0])) { - // The case when .pdmodel and .pdparams files are provided - std::ifstream model_stream(paths[0], std::ios::in | std::ifstream::binary); - FRONT_END_INITIALIZATION_CHECK(model_stream && model_stream.is_open(), - "Cannot open model file."); - std::ifstream weights_stream(paths[1], std::ios::in | std::ifstream::binary); - FRONT_END_INITIALIZATION_CHECK(weights_stream && weights_stream.is_open(), - "Cannot open weights file."); - return load_from_streams({&model_stream, &weights_stream}); + std::wstring suffix = L".pdmodel"; + std::wstring model_path = + as_type_ptr>(variants[0])->get(); + if (!pdpd::endsWith(model_path, suffix)) + { + model_path += pdpd::get_path_sep() + L"__model__"; + } + std::ifstream model_str(model_path, std::ios::in | std::ifstream::binary); + // It is possible to validate here that protobuf can read model from the stream, + // but it will complicate the check, while it should be as quick as possible + return model_str && model_str.is_open(); } - FRONT_END_INITIALIZATION_CHECK(false, "Model can be loaded either from 1 or 2 files"); - } - - InputModel::Ptr FrontEndPDPD::load_from_stream(std::istream& model_stream) const - { - return load_from_streams({&model_stream}); +#endif + else if (is_type>>(variants[0])) + { + // Validating first stream, it must contain a model + std::shared_ptr p_model_stream = + as_type_ptr>>(variants[0])->get(); + paddle::framework::proto::ProgramDesc fw; + return fw.ParseFromIstream(p_model_stream.get()); + } + return false; } InputModel::Ptr - FrontEndPDPD::load_from_streams(const std::vector& streams) const + FrontEndPDPD::load_impl(const std::vector>& variants) const { - return std::make_shared(streams); + if (variants.size() == 1) + { + // The case when folder with __model__ and weight files is provided or .pdmodel file + if (is_type>(variants[0])) + { + std::string m_path = + as_type_ptr>(variants[0])->get(); + return std::make_shared(m_path); + } +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + else if (is_type>(variants[0])) + { + std::wstring m_path = + as_type_ptr>(variants[0])->get(); + return std::make_shared(m_path); + } +#endif + // The case with only model stream provided and no weights. This means model has + // no learnable weights + else if (is_type>>(variants[0])) + { + std::shared_ptr p_model_stream = + as_type_ptr>>(variants[0]) + ->get(); + return std::make_shared( + std::vector{p_model_stream.get()}); + } + } + else if (variants.size() == 2) + { + // The case when .pdmodel and .pdparams files are provided + std::ifstream model_stream; + std::ifstream weights_stream; + std::istream* p_model_stream = + pdpd::variant_to_stream_ptr(variants[0], model_stream); + std::istream* p_weights_stream = + pdpd::variant_to_stream_ptr(variants[1], weights_stream); + if (p_model_stream && p_weights_stream) + { + return std::make_shared( + std::vector{p_model_stream, p_weights_stream}); + } + } + PDPD_THROW("Model can be loaded either from 1 or 2 files/streams"); } std::shared_ptr FrontEndPDPD::convert(InputModel::Ptr model) const @@ -211,6 +305,6 @@ extern "C" PDPD_API void* GetFrontEndData() { FrontEndPluginInfo* res = new FrontEndPluginInfo(); res->m_name = "pdpd"; - res->m_creator = [](FrontEndCapFlags) { return std::make_shared(); }; + res->m_creator = []() { return std::make_shared(); }; return res; } \ No newline at end of file diff --git a/ngraph/frontend/paddlepaddle/src/model.cpp b/ngraph/frontend/paddlepaddle/src/model.cpp index f0d8c859623aa1..3315dc7e3a9b96 100644 --- a/ngraph/frontend/paddlepaddle/src/model.cpp +++ b/ngraph/frontend/paddlepaddle/src/model.cpp @@ -11,6 +11,12 @@ #include "decoder.hpp" #include "framework.pb.h" #include "node_context.hpp" +#include "pdpd_utils.hpp" + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) +#include +#include +#endif namespace ngraph { @@ -21,7 +27,8 @@ namespace ngraph class InputModelPDPD::InputModelPDPDImpl { public: - InputModelPDPDImpl(const std::string& path, const InputModel& input_model); + template + InputModelPDPDImpl(const std::basic_string& path, const InputModel& input_model); InputModelPDPDImpl(const std::vector& streams, const InputModel& input_model); std::vector getInputs() const; @@ -37,7 +44,6 @@ namespace ngraph void setElementType(Place::Ptr place, const ngraph::element::Type&); void setTensorValue(Place::Ptr place, const void* value); - std::vector readWeight(const std::string& name, int64_t len); std::vector> getOpPlaces() const { return m_op_places; } std::map> getVarPlaces() const { @@ -50,7 +56,9 @@ namespace ngraph private: void loadPlaces(); - void loadConsts(std::string folder_with_weights, std::istream* weight_stream); + template + void loadConsts(const std::basic_string& folder_with_weights, + std::istream* weight_stream); std::vector> m_op_places; std::map> m_var_places; @@ -142,16 +150,6 @@ namespace ngraph namespace pdpd { - bool endsWith(const std::string& str, const std::string& suffix) - { - if (str.length() >= suffix.length()) - { - return (0 == - str.compare(str.length() - suffix.length(), suffix.length(), suffix)); - } - return false; - } - void read_tensor(std::istream& is, char* data, size_t len) { std::vector header(16); @@ -163,16 +161,81 @@ namespace ngraph is.read(data, len); } + template + std::basic_string get_const_path(const std::basic_string& folder_with_weights, + const std::string& name) + { + return folder_with_weights + pdpd::get_path_sep() + name; + } + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + template <> + std::basic_string get_const_path(const std::basic_string& folder, + const std::string& name) + { + std::wstring_convert> converter; + std::wstring _name = converter.from_bytes(name); + return folder + pdpd::get_path_sep() + _name; + } +#endif + + template + std::basic_string get_model_path(const std::basic_string& path, + std::ifstream* weights_stream) + { + std::string model_file{path}; + std::string ext = ".pdmodel"; + if (pdpd::endsWith(model_file, ext)) + { + std::string params_ext = ".pdiparams"; + std::string weights_file{path}; + weights_file.replace(weights_file.size() - ext.size(), ext.size(), params_ext); + weights_stream->open(weights_file, std::ios::binary); + // Don't throw error if file isn't opened + // It may mean that model don't have constants + } + else + { + model_file += pdpd::get_path_sep() + "__model__"; + } + return model_file; + } + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + template <> + std::basic_string get_model_path(const std::basic_string& path, + std::ifstream* weights_stream) + { + std::wstring model_file{path}; + std::wstring ext = L".pdmodel"; + if (pdpd::endsWith(model_file, ext)) + { + std::wstring params_ext = L".pdiparams"; + std::wstring weights_file{path}; + weights_file.replace(weights_file.size() - ext.size(), ext.size(), params_ext); + weights_stream->open(weights_file, std::ios::binary); + // Don't throw error if file isn't opened + // It may mean that model don't have constants + } + else + { + model_file += pdpd::get_path_sep() + L"__model__"; + } + return model_file; + } +#endif } // namespace pdpd - void InputModelPDPD::InputModelPDPDImpl::loadConsts(std::string folder_with_weights, - std::istream* weight_stream) + template + void InputModelPDPD::InputModelPDPDImpl::loadConsts( + const std::basic_string& folder_with_weights, std::istream* weight_stream) { for (const auto& item : m_var_places) { const auto& var_desc = item.second->getDesc(); const auto& name = item.first; - if (pdpd::endsWith(name, "feed") || pdpd::endsWith(name, "fetch")) + if (pdpd::endsWith(name, std::string{"feed"}) || + pdpd::endsWith(name, std::string{"fetch"})) continue; if (!var_desc->persistable()) continue; @@ -192,7 +255,7 @@ namespace ngraph } else if (!folder_with_weights.empty()) { - std::ifstream is(folder_with_weights + "/" + name, + std::ifstream is(pdpd::get_const_path(folder_with_weights, name), std::ios::in | std::ifstream::binary); FRONT_END_GENERAL_CHECK(is && is.is_open(), "Cannot open file for constant value."); @@ -210,35 +273,24 @@ namespace ngraph } } - InputModelPDPD::InputModelPDPDImpl::InputModelPDPDImpl(const std::string& path, + template + InputModelPDPD::InputModelPDPDImpl::InputModelPDPDImpl(const std::basic_string& path, const InputModel& input_model) : m_fw_ptr{std::make_shared()} , m_input_model(input_model) { - std::string ext = ".pdmodel"; - std::string model_file(path); - std::unique_ptr weights_stream; - if (model_file.length() >= ext.length() && - (0 == model_file.compare(model_file.length() - ext.length(), ext.length(), ext))) - { - std::string weights_file(path); - weights_file.replace(weights_file.size() - ext.size(), ext.size(), ".pdiparams"); - weights_stream = std::unique_ptr( - new std::ifstream(weights_file, std::ios::binary)); - // Don't throw error if file isn't opened - // It may mean that model don't have constants - } - else - { - model_file += "/__model__"; - } + std::string empty_str = ""; + std::ifstream weights_stream; + std::ifstream pb_stream(pdpd::get_model_path(path, &weights_stream), + std::ios::in | std::ifstream::binary); - std::ifstream pb_stream(model_file, std::ios::binary); + FRONT_END_GENERAL_CHECK(pb_stream && pb_stream.is_open(), "Model file doesn't exist"); FRONT_END_GENERAL_CHECK(m_fw_ptr->ParseFromIstream(&pb_stream), "Model can't be parsed"); loadPlaces(); - loadConsts(weights_stream ? "" : path, weights_stream.get()); + loadConsts(weights_stream && weights_stream.is_open() ? std::basic_string{} : path, + &weights_stream); } InputModelPDPD::InputModelPDPDImpl::InputModelPDPDImpl( @@ -257,7 +309,7 @@ namespace ngraph loadPlaces(); if (streams.size() > 1) - loadConsts("", streams[1]); + loadConsts(std::string{""}, streams[1]); } std::vector InputModelPDPD::InputModelPDPDImpl::getInputs() const @@ -367,6 +419,13 @@ namespace ngraph { } +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + InputModelPDPD::InputModelPDPD(const std::wstring& path) + : _impl{std::make_shared(path, *this)} + { + } +#endif + InputModelPDPD::InputModelPDPD(const std::vector& streams) : _impl{std::make_shared(streams, *this)} { diff --git a/ngraph/frontend/paddlepaddle/src/op/scale.cpp b/ngraph/frontend/paddlepaddle/src/op/scale.cpp index 27d87e18c205c3..bfc7637dd877c1 100644 --- a/ngraph/frontend/paddlepaddle/src/op/scale.cpp +++ b/ngraph/frontend/paddlepaddle/src/op/scale.cpp @@ -32,12 +32,12 @@ namespace ngraph } else { - scale = builder::make_constant( - dtype, Shape{1}, node.get_attribute("scale")); + auto scale_val = node.get_attribute("scale"); + scale = ngraph::opset6::Constant::create(dtype, Shape{1}, {scale_val}); } - bias = - builder::make_constant(dtype, Shape{1}, node.get_attribute("bias")); + auto bias_val = node.get_attribute("bias"); + bias = ngraph::opset6::Constant::create(dtype, Shape{1}, {bias_val}); auto bias_after_scale = node.get_attribute("bias_after_scale"); std::shared_ptr result_node; diff --git a/ngraph/frontend/paddlepaddle/src/pdpd_utils.hpp b/ngraph/frontend/paddlepaddle/src/pdpd_utils.hpp new file mode 100644 index 00000000000000..80170b5edfa10e --- /dev/null +++ b/ngraph/frontend/paddlepaddle/src/pdpd_utils.hpp @@ -0,0 +1,51 @@ +// Copyright (C) 2018-2021 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "frontend_manager/frontend_exceptions.hpp" + +namespace ngraph +{ + namespace frontend + { + namespace pdpd + { +#ifdef _WIN32 + const char PATH_SEPARATOR = '\\'; +#if defined(ENABLE_UNICODE_PATH_SUPPORT) + const wchar_t WPATH_SEPARATOR = L'\\'; +#endif +#else + const char PATH_SEPARATOR = '/'; +#endif + + template + inline std::basic_string get_path_sep() + { + return std::basic_string{PATH_SEPARATOR}; + } + +#if defined(ENABLE_UNICODE_PATH_SUPPORT) && defined(_WIN32) + template <> + inline std::basic_string get_path_sep() + { + return std::basic_string{WPATH_SEPARATOR}; + } +#endif + + template + bool endsWith(const std::basic_string& str, const std::basic_string& suffix) + { + if (str.length() >= suffix.length()) + { + return (0 == + str.compare(str.length() - suffix.length(), suffix.length(), suffix)); + } + return false; + } + + } // namespace pdpd + } // namespace frontend +} // namespace ngraph \ No newline at end of file diff --git a/ngraph/python/src/ngraph/__init__.py b/ngraph/python/src/ngraph/__init__.py index 8647878d8dff5f..950a05e4edec41 100644 --- a/ngraph/python/src/ngraph/__init__.py +++ b/ngraph/python/src/ngraph/__init__.py @@ -17,7 +17,6 @@ from ngraph.impl import Node from ngraph.impl import PartialShape from ngraph.frontend import FrontEnd -from ngraph.frontend import FrontEndCapabilities from ngraph.frontend import FrontEndManager from ngraph.frontend import GeneralFailure from ngraph.frontend import NotImplementedFailure diff --git a/ngraph/python/src/ngraph/frontend/__init__.py b/ngraph/python/src/ngraph/frontend/__init__.py index 0ea21ad7c8827b..72bd47445afd92 100644 --- a/ngraph/python/src/ngraph/frontend/__init__.py +++ b/ngraph/python/src/ngraph/frontend/__init__.py @@ -11,7 +11,6 @@ # main classes from _pyngraph import FrontEndManager from _pyngraph import FrontEnd -from _pyngraph import FrontEndCapabilities from _pyngraph import InputModel from _pyngraph import Place diff --git a/ngraph/python/src/pyngraph/frontend/frontend.cpp b/ngraph/python/src/pyngraph/frontend/frontend.cpp index ecc736b37f0269..eb723aded423d8 100644 --- a/ngraph/python/src/pyngraph/frontend/frontend.cpp +++ b/ngraph/python/src/pyngraph/frontend/frontend.cpp @@ -19,10 +19,11 @@ void regclass_pyngraph_FrontEnd(py::module m) m, "FrontEnd", py::dynamic_attr()); fem.doc() = "ngraph.impl.FrontEnd wraps ngraph::frontend::FrontEnd"; - fem.def("load_from_file", - &ngraph::frontend::FrontEnd::load_from_file, - py::arg("path"), - R"( + fem.def( + "load", + [](ngraph::frontend::FrontEnd& self, const std::string& s) { return self.load(s); }, + py::arg("path"), + R"( Loads an input model by specified model file path. Parameters @@ -32,7 +33,7 @@ void regclass_pyngraph_FrontEnd(py::module m) Returns ---------- - load_from_file : InputModel + load : InputModel Loaded input model. )"); diff --git a/ngraph/python/src/pyngraph/frontend/frontend_manager.cpp b/ngraph/python/src/pyngraph/frontend/frontend_manager.cpp index 15f5a046a99993..dc0475ee973dab 100644 --- a/ngraph/python/src/pyngraph/frontend/frontend_manager.cpp +++ b/ngraph/python/src/pyngraph/frontend/frontend_manager.cpp @@ -38,7 +38,6 @@ void regclass_pyngraph_FrontEndManager(py::module m) fem.def("load_by_framework", &ngraph::frontend::FrontEndManager::load_by_framework, py::arg("framework"), - py::arg("capabilities") = ngraph::frontend::FrontEndCapabilities::FEC_DEFAULT, R"( Loads frontend by name of framework and capabilities. @@ -47,10 +46,6 @@ void regclass_pyngraph_FrontEndManager(py::module m) framework : str Framework name. Throws exception if name is not in list of available frontends. - capabilities : int - Frontend capabilities. Default is FrontEndCapabilities.FEC_DEFAULT. It is recommended to use only - those capabilities which are needed to minimize load time. - Returns ---------- load_by_framework : FrontEnd @@ -58,30 +53,6 @@ void regclass_pyngraph_FrontEndManager(py::module m) )"); } -void regclass_pyngraph_FEC(py::module m) -{ - class FeCaps - { - public: - int get_caps() const { return m_caps; } - - private: - int m_caps; - }; - - py::class_> type(m, "FrontEndCapabilities"); - // type.doc() = "FrontEndCapabilities"; - type.attr("DEFAULT") = ngraph::frontend::FrontEndCapabilities::FEC_DEFAULT; - type.attr("CUT") = ngraph::frontend::FrontEndCapabilities::FEC_CUT; - type.attr("NAMES") = ngraph::frontend::FrontEndCapabilities::FEC_NAMES; - type.attr("WILDCARDS") = ngraph::frontend::FrontEndCapabilities::FEC_WILDCARDS; - - type.def( - "__eq__", - [](const FeCaps& a, const FeCaps& b) { return a.get_caps() == b.get_caps(); }, - py::is_operator()); -} - void regclass_pyngraph_GeneralFailureFrontEnd(py::module m) { static py::exception exc(std::move(m), "GeneralFailure"); diff --git a/ngraph/python/src/pyngraph/frontend/frontend_manager.hpp b/ngraph/python/src/pyngraph/frontend/frontend_manager.hpp index 35caa7e5dd1def..969ddd6859ab88 100644 --- a/ngraph/python/src/pyngraph/frontend/frontend_manager.hpp +++ b/ngraph/python/src/pyngraph/frontend/frontend_manager.hpp @@ -9,7 +9,6 @@ namespace py = pybind11; void regclass_pyngraph_FrontEndManager(py::module m); -void regclass_pyngraph_FEC(py::module m); void regclass_pyngraph_NotImplementedFailureFrontEnd(py::module m); void regclass_pyngraph_InitializationFailureFrontEnd(py::module m); void regclass_pyngraph_OpConversionFailureFrontEnd(py::module m); diff --git a/ngraph/python/src/pyngraph/pyngraph.cpp b/ngraph/python/src/pyngraph/pyngraph.cpp index 0849de45f58554..c401a683654b8b 100644 --- a/ngraph/python/src/pyngraph/pyngraph.cpp +++ b/ngraph/python/src/pyngraph/pyngraph.cpp @@ -51,7 +51,6 @@ PYBIND11_MODULE(_pyngraph, m) regclass_pyngraph_OpConversionFailureFrontEnd(m); regclass_pyngraph_OpValidationFailureFrontEnd(m); regclass_pyngraph_NotImplementedFailureFrontEnd(m); - regclass_pyngraph_FEC(m); regclass_pyngraph_FrontEndManager(m); regclass_pyngraph_FrontEnd(m); regclass_pyngraph_InputModel(m); diff --git a/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.cpp b/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.cpp index e060da7563cdff..1377e8ba4aaf50 100644 --- a/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.cpp +++ b/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.cpp @@ -18,7 +18,7 @@ extern "C" MOCK_API void* GetFrontEndData() { FrontEndPluginInfo* res = new FrontEndPluginInfo(); res->m_name = "mock_py"; - res->m_creator = [](FrontEndCapFlags flags) { return std::make_shared(flags); }; + res->m_creator = []() { return std::make_shared(); }; return res; } diff --git a/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.hpp b/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.hpp index 651e9e53809683..c2654f7d8745ea 100644 --- a/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.hpp +++ b/ngraph/python/tests/mock/mock_py_ngraph_frontend/mock_py_frontend.hpp @@ -479,7 +479,6 @@ class MOCK_API InputModelMockPy : public InputModel struct MOCK_API FeStat { - FrontEndCapFlags m_load_flags; std::vector m_load_paths; int m_convert_model = 0; int m_convert = 0; @@ -487,7 +486,6 @@ struct MOCK_API FeStat int m_decode = 0; int m_normalize = 0; // Getters - FrontEndCapFlags load_flags() const { return m_load_flags; } std::vector load_paths() const { return m_load_paths; } int convert_model() const { return m_convert_model; } int convert() const { return m_convert; } @@ -501,11 +499,12 @@ class MOCK_API FrontEndMockPy : public FrontEnd mutable FeStat m_stat; public: - FrontEndMockPy(FrontEndCapFlags flags) { m_stat.m_load_flags = flags; } + FrontEndMockPy() {} - InputModel::Ptr load_from_file(const std::string& path) const override + InputModel::Ptr load_impl(const std::vector>& params) const override { - m_stat.m_load_paths.push_back(path); + if (params.size() > 0 && is_type>(params[0])) + m_stat.m_load_paths.push_back(as_type_ptr>(params[0])->get()); return std::make_shared(); } diff --git a/ngraph/python/tests/mock/pyngraph_fe_mock_api/pyngraph_mock_frontend_api.cpp b/ngraph/python/tests/mock/pyngraph_fe_mock_api/pyngraph_mock_frontend_api.cpp index ec87842d417330..1927e04b7a3d49 100644 --- a/ngraph/python/tests/mock/pyngraph_fe_mock_api/pyngraph_mock_frontend_api.cpp +++ b/ngraph/python/tests/mock/pyngraph_fe_mock_api/pyngraph_mock_frontend_api.cpp @@ -27,7 +27,6 @@ static void register_mock_frontend_stat(py::module m) py::arg("frontend")); py::class_ feStat(m, "FeStat", py::dynamic_attr()); - feStat.def_property_readonly("load_flags", &FeStat::load_flags); feStat.def_property_readonly("load_paths", &FeStat::load_paths); feStat.def_property_readonly("convert_model", &FeStat::convert_model); feStat.def_property_readonly("convert", &FeStat::convert); diff --git a/ngraph/python/tests/test_ngraph/test_frontendmanager.py b/ngraph/python/tests/test_ngraph/test_frontendmanager.py index 7ced949dff2b88..51882091fdae2e 100644 --- a/ngraph/python/tests/test_ngraph/test_frontendmanager.py +++ b/ngraph/python/tests/test_ngraph/test_frontendmanager.py @@ -4,7 +4,7 @@ import pickle from ngraph import PartialShape -from ngraph.frontend import FrontEndCapabilities, FrontEndManager, InitializationFailure +from ngraph.frontend import FrontEndManager, InitializationFailure from ngraph.utils.types import get_element_type import numpy as np @@ -31,28 +31,9 @@ def test_pickle(): pickle.dumps(fem) -@mock_needed -def test_load_by_framework_caps(): - frontEnds = fem.get_available_front_ends() - assert frontEnds is not None - assert "mock_py" in frontEnds - caps = [FrontEndCapabilities.DEFAULT, - FrontEndCapabilities.CUT, - FrontEndCapabilities.NAMES, - FrontEndCapabilities.WILDCARDS, - FrontEndCapabilities.CUT | FrontEndCapabilities.NAMES | FrontEndCapabilities.WILDCARDS] - for cap in caps: - fe = fem.load_by_framework(framework="mock_py", capabilities=cap) - stat = get_fe_stat(fe) - assert cap == stat.load_flags - for i in range(len(caps) - 1): - for j in range(i + 1, len(caps)): - assert caps[i] != caps[j] - - def test_load_by_unknown_framework(): frontEnds = fem.get_available_front_ends() - assert not("UnknownFramework" in frontEnds) + assert not ("UnknownFramework" in frontEnds) try: fem.load_by_framework("UnknownFramework") except InitializationFailure as exc: @@ -62,10 +43,10 @@ def test_load_by_unknown_framework(): @mock_needed -def test_load_from_file(): +def test_load(): fe = fem.load_by_framework(framework="mock_py") assert fe is not None - model = fe.load_from_file("abc.bin") + model = fe.load("abc.bin") assert model is not None stat = get_fe_stat(fe) assert "abc.bin" in stat.load_paths @@ -75,7 +56,7 @@ def test_load_from_file(): def test_convert_model(): fe = fem.load_by_framework(framework="mock_py") assert fe is not None - model = fe.load_from_file(path="") + model = fe.load(path="") func = fe.convert(model=model) assert func is not None stat = get_fe_stat(fe) @@ -86,7 +67,7 @@ def test_convert_model(): def test_convert_partially(): fe = fem.load_by_framework(framework="mock_py") assert fe is not None - model = fe.load_from_file(path="") + model = fe.load(path="") func = fe.convert_partially(model=model) stat = get_fe_stat(fe) assert stat.convert_partially == 1 @@ -99,7 +80,7 @@ def test_convert_partially(): def test_decode_and_normalize(): fe = fem.load_by_framework(framework="mock_py") assert fe is not None - model = fe.load_from_file(path="") + model = fe.load(path="") func = fe.decode(model=model) stat = get_fe_stat(fe) assert stat.decode == 1 @@ -113,7 +94,7 @@ def test_decode_and_normalize(): @mock_needed def init_model(): fe = fem.load_by_framework(framework="mock_py") - model = fe.load_from_file(path="") + model = fe.load(path="") return model @@ -379,7 +360,7 @@ def test_model_set_element_type(): @mock_needed def init_place(): fe = fem.load_by_framework(framework="mock_py") - model = fe.load_from_file(path="") + model = fe.load(path="") place = model.get_place_by_tensor_name(tensorName="") return model, place diff --git a/ngraph/test/CMakeLists.txt b/ngraph/test/CMakeLists.txt index 78e14870aa5dbf..cab60e4ecca57b 100644 --- a/ngraph/test/CMakeLists.txt +++ b/ngraph/test/CMakeLists.txt @@ -631,6 +631,7 @@ install(TARGETS unit-test EXCLUDE_FROM_ALL) ############ FRONTEND ############ +target_include_directories(unit-test PRIVATE ${FRONTEND_INCLUDE_PATH} frontend/shared/include) target_link_libraries(unit-test PRIVATE frontend_manager cnpy) add_subdirectory(frontend) diff --git a/ngraph/test/frontend/frontend_manager.cpp b/ngraph/test/frontend/frontend_manager.cpp index af70885d237901..218ecded3adf0a 100644 --- a/ngraph/test/frontend/frontend_manager.cpp +++ b/ngraph/test/frontend/frontend_manager.cpp @@ -35,7 +35,7 @@ TEST(FrontEndManagerTest, testAvailableFrontEnds) { FrontEndManager fem; ASSERT_NO_THROW(fem.register_front_end( - "mock", [](FrontEndCapFlags fec) { return std::make_shared(); })); + "mock", []() { return std::make_shared(); })); auto frontends = fem.get_available_front_ends(); ASSERT_NE(std::find(frontends.begin(), frontends.end(), "mock"), frontends.end()); FrontEnd::Ptr fe; @@ -50,26 +50,6 @@ TEST(FrontEndManagerTest, testAvailableFrontEnds) ASSERT_EQ(std::find(frontends.begin(), frontends.end(), "mock"), frontends.end()); } -TEST(FrontEndManagerTest, testLoadWithFlags) -{ - int expFlags = FrontEndCapabilities::FEC_CUT | FrontEndCapabilities::FEC_WILDCARDS | - FrontEndCapabilities::FEC_NAMES; - int actualFlags = FrontEndCapabilities::FEC_DEFAULT; - FrontEndManager fem; - ASSERT_NO_THROW(fem.register_front_end("mock", [&actualFlags](int fec) { - actualFlags = fec; - return std::make_shared(); - })); - auto frontends = fem.get_available_front_ends(); - ASSERT_NE(std::find(frontends.begin(), frontends.end(), "mock"), frontends.end()); - FrontEnd::Ptr fe; - ASSERT_NO_THROW(fe = fem.load_by_framework("mock", expFlags)); - ASSERT_TRUE(actualFlags & FrontEndCapabilities::FEC_CUT); - ASSERT_TRUE(actualFlags & FrontEndCapabilities::FEC_WILDCARDS); - ASSERT_TRUE(actualFlags & FrontEndCapabilities::FEC_NAMES); - ASSERT_EQ(expFlags, actualFlags); -} - TEST(FrontEndManagerTest, testMockPluginFrontEnd) { std::string fePath = ngraph::file_util::get_directory( @@ -86,17 +66,13 @@ TEST(FrontEndManagerTest, testMockPluginFrontEnd) TEST(FrontEndManagerTest, testDefaultFrontEnd) { FrontEndManager fem; - ASSERT_ANY_THROW(fem.load_by_model("")); + FrontEnd::Ptr fe; + ASSERT_NO_THROW(fe = fem.load_by_model("")); + ASSERT_FALSE(fe); std::unique_ptr fePtr(new FrontEnd()); // to verify base destructor - FrontEnd::Ptr fe = std::make_shared(); - ASSERT_ANY_THROW(fe->load_from_file("")); - ASSERT_ANY_THROW(fe->load_from_files({"", ""})); - ASSERT_ANY_THROW(fe->load_from_memory(nullptr)); - ASSERT_ANY_THROW(fe->load_from_memory_fragments({nullptr, nullptr})); - std::stringstream str; - ASSERT_ANY_THROW(fe->load_from_stream(str)); - ASSERT_ANY_THROW(fe->load_from_streams({&str, &str})); + fe = std::make_shared(); + ASSERT_ANY_THROW(fe->load("")); ASSERT_ANY_THROW(fe->convert(std::shared_ptr(nullptr))); ASSERT_ANY_THROW(fe->convert(InputModel::Ptr(nullptr))); ASSERT_ANY_THROW(fe->convert_partially(nullptr)); diff --git a/ngraph/test/frontend/mock_frontend.cpp b/ngraph/test/frontend/mock_frontend.cpp index 34c8d420b031fc..bb5fdf105ee092 100644 --- a/ngraph/test/frontend/mock_frontend.cpp +++ b/ngraph/test/frontend/mock_frontend.cpp @@ -29,6 +29,6 @@ extern "C" MOCK_API void* GetFrontEndData() { FrontEndPluginInfo* res = new FrontEndPluginInfo(); res->m_name = "mock1"; - res->m_creator = [](FrontEndCapFlags) { return std::make_shared(); }; + res->m_creator = []() { return std::make_shared(); }; return res; } \ No newline at end of file diff --git a/ngraph/test/frontend/paddlepaddle/basic_api.cpp b/ngraph/test/frontend/paddlepaddle/basic_api.cpp index d191e4fccda3af..a2568000f3184f 100644 --- a/ngraph/test/frontend/paddlepaddle/basic_api.cpp +++ b/ngraph/test/frontend/paddlepaddle/basic_api.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../shared/include/basic_api.hpp" +#include "basic_api.hpp" using namespace ngraph; using namespace ngraph::frontend; diff --git a/ngraph/test/frontend/paddlepaddle/cut_specific_model.cpp b/ngraph/test/frontend/paddlepaddle/cut_specific_model.cpp index 3251762b6f9421..04826bce96f4dd 100644 --- a/ngraph/test/frontend/paddlepaddle/cut_specific_model.cpp +++ b/ngraph/test/frontend/paddlepaddle/cut_specific_model.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../shared/include/cut_specific_model.hpp" +#include "cut_specific_model.hpp" using namespace ngraph; using namespace ngraph::frontend; diff --git a/ngraph/test/frontend/paddlepaddle/load_from.cpp b/ngraph/test/frontend/paddlepaddle/load_from.cpp index 2950c3d271f4f7..b8865b6df6eca1 100644 --- a/ngraph/test/frontend/paddlepaddle/load_from.cpp +++ b/ngraph/test/frontend/paddlepaddle/load_from.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../shared/include/load_from.hpp" +#include "load_from.hpp" using namespace ngraph; using namespace ngraph::frontend; diff --git a/ngraph/test/frontend/paddlepaddle/partial_shape.cpp b/ngraph/test/frontend/paddlepaddle/partial_shape.cpp index ddb7213f9ec75f..97989af8ca2ec0 100644 --- a/ngraph/test/frontend/paddlepaddle/partial_shape.cpp +++ b/ngraph/test/frontend/paddlepaddle/partial_shape.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../shared/include/partial_shape.hpp" +#include "partial_shape.hpp" using namespace ngraph; using namespace ngraph::frontend; diff --git a/ngraph/test/frontend/paddlepaddle/set_element_type.cpp b/ngraph/test/frontend/paddlepaddle/set_element_type.cpp index e53ea790ac869f..10781a0abe6eb5 100644 --- a/ngraph/test/frontend/paddlepaddle/set_element_type.cpp +++ b/ngraph/test/frontend/paddlepaddle/set_element_type.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../shared/include/set_element_type.hpp" +#include "set_element_type.hpp" using namespace ngraph; using namespace ngraph::frontend; diff --git a/ngraph/test/frontend/shared/src/basic_api.cpp b/ngraph/test/frontend/shared/src/basic_api.cpp index de321326b280cf..e316486e70228e 100644 --- a/ngraph/test/frontend/shared/src/basic_api.cpp +++ b/ngraph/test/frontend/shared/src/basic_api.cpp @@ -2,8 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../include/basic_api.hpp" -#include "../include/utils.hpp" +#include "basic_api.hpp" +#include "utils.hpp" using namespace ngraph; using namespace ngraph::frontend; @@ -34,7 +34,7 @@ void FrontEndBasicTest::doLoadFromFile() ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_feName)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_file(m_modelFile)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(m_modelFile)); ASSERT_NE(m_inputModel, nullptr); } diff --git a/ngraph/test/frontend/shared/src/cut_specific_model.cpp b/ngraph/test/frontend/shared/src/cut_specific_model.cpp index 9f8cb64b54188b..a9e7ee1a4ca4e0 100644 --- a/ngraph/test/frontend/shared/src/cut_specific_model.cpp +++ b/ngraph/test/frontend/shared/src/cut_specific_model.cpp @@ -2,8 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../include/cut_specific_model.hpp" -#include "../include/utils.hpp" +#include "cut_specific_model.hpp" +#include "utils.hpp" #include "ngraph/opsets/opset7.hpp" using namespace ngraph; @@ -44,7 +44,7 @@ void FrontEndCutModelTest::doLoadFromFile() ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_file(m_param.m_modelName)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(m_param.m_modelName)); ASSERT_NE(m_inputModel, nullptr); } diff --git a/ngraph/test/frontend/shared/src/load_from.cpp b/ngraph/test/frontend/shared/src/load_from.cpp index 6e1ec73512c26c..937f86f1f0ccf2 100644 --- a/ngraph/test/frontend/shared/src/load_from.cpp +++ b/ngraph/test/frontend/shared/src/load_from.cpp @@ -2,9 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../include/load_from.hpp" +#include "load_from.hpp" #include -#include "../include/utils.hpp" +#include "utils.hpp" using namespace ngraph; using namespace ngraph::frontend; @@ -23,18 +23,18 @@ void FrontEndLoadFromTest::SetUp() m_param = GetParam(); } -/////////////////////////////////////////////////////////////////// +///////////////////load from Variants////////////////////// -TEST_P(FrontEndLoadFromTest, testLoadFromFile) +TEST_P(FrontEndLoadFromTest, testLoadFromFilePath) { + std::string model_path = m_param.m_modelsPath + m_param.m_file; std::vector frontends; FrontEnd::Ptr fe; ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); - ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); + ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_path)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = - m_frontEnd->load_from_file(m_param.m_modelsPath + m_param.m_file)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_path)); ASSERT_NE(m_inputModel, nullptr); std::shared_ptr function; @@ -42,21 +42,17 @@ TEST_P(FrontEndLoadFromTest, testLoadFromFile) ASSERT_NE(function, nullptr); } -TEST_P(FrontEndLoadFromTest, testLoadFromFiles) +TEST_P(FrontEndLoadFromTest, testLoadFromTwoFiles) { + std::string model_path = m_param.m_modelsPath + m_param.m_files[0]; + std::string weights_path = m_param.m_modelsPath + m_param.m_files[1]; std::vector frontends; FrontEnd::Ptr fe; ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); - ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); + ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_path, weights_path)); ASSERT_NE(m_frontEnd, nullptr); - auto dir_files = m_param.m_files; - for (auto& file : dir_files) - { - file = m_param.m_modelsPath + file; - } - - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_files(dir_files)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_path, weights_path)); ASSERT_NE(m_inputModel, nullptr); std::shared_ptr function; @@ -66,14 +62,16 @@ TEST_P(FrontEndLoadFromTest, testLoadFromFiles) TEST_P(FrontEndLoadFromTest, testLoadFromStream) { + auto ifs = std::make_shared(m_param.m_modelsPath + m_param.m_stream, + std::ios::in | std::ifstream::binary); + auto is = std::dynamic_pointer_cast(ifs); std::vector frontends; FrontEnd::Ptr fe; ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); - ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); + ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(is)); ASSERT_NE(m_frontEnd, nullptr); - std::ifstream is(m_param.m_modelsPath + m_param.m_stream, std::ios::in | std::ifstream::binary); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_stream(is)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(is)); ASSERT_NE(m_inputModel, nullptr); std::shared_ptr function; @@ -81,23 +79,22 @@ TEST_P(FrontEndLoadFromTest, testLoadFromStream) ASSERT_NE(function, nullptr); } -TEST_P(FrontEndLoadFromTest, testLoadFromStreams) +TEST_P(FrontEndLoadFromTest, testLoadFromTwoStreams) { + auto model_ifs = std::make_shared(m_param.m_modelsPath + m_param.m_streams[0], + std::ios::in | std::ifstream::binary); + auto weights_ifs = std::make_shared(m_param.m_modelsPath + m_param.m_streams[1], + std::ios::in | std::ifstream::binary); + auto model_is = std::dynamic_pointer_cast(model_ifs); + auto weights_is = std::dynamic_pointer_cast(weights_ifs); + std::vector frontends; FrontEnd::Ptr fe; ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); - ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); + ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_model(model_is, weights_is)); ASSERT_NE(m_frontEnd, nullptr); - std::vector> is_vec; - std::vector is_ptr_vec; - for (auto& file : m_param.m_streams) - { - is_vec.push_back(std::make_shared(m_param.m_modelsPath + file, - std::ios::in | std::ifstream::binary)); - is_ptr_vec.push_back(is_vec.back().get()); - } - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_streams(is_ptr_vec)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(model_is, weights_is)); ASSERT_NE(m_inputModel, nullptr); std::shared_ptr function; diff --git a/ngraph/test/frontend/shared/src/op_fuzzy.cpp b/ngraph/test/frontend/shared/src/op_fuzzy.cpp index d7fa7ca8784a81..b43dd2946363a8 100644 --- a/ngraph/test/frontend/shared/src/op_fuzzy.cpp +++ b/ngraph/test/frontend/shared/src/op_fuzzy.cpp @@ -44,7 +44,7 @@ void FrontEndFuzzyOpTest::doLoadFromFile() ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_feName)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_file(m_modelFile)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(m_modelFile)); ASSERT_NE(m_inputModel, nullptr); } diff --git a/ngraph/test/frontend/shared/src/partial_shape.cpp b/ngraph/test/frontend/shared/src/partial_shape.cpp index e65554b88e215d..bfb63528f3fe85 100644 --- a/ngraph/test/frontend/shared/src/partial_shape.cpp +++ b/ngraph/test/frontend/shared/src/partial_shape.cpp @@ -2,8 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../include/partial_shape.hpp" -#include "../include/utils.hpp" +#include "partial_shape.hpp" +#include "utils.hpp" using namespace ngraph; using namespace ngraph::frontend; @@ -42,7 +42,7 @@ void FrontEndPartialShapeTest::doLoadFromFile() ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_baseParam.m_frontEndName)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_file(m_partShape.m_modelName)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(m_partShape.m_modelName)); ASSERT_NE(m_inputModel, nullptr); } diff --git a/ngraph/test/frontend/shared/src/set_element_type.cpp b/ngraph/test/frontend/shared/src/set_element_type.cpp index 1b6b77141ac8a1..647087d611994d 100644 --- a/ngraph/test/frontend/shared/src/set_element_type.cpp +++ b/ngraph/test/frontend/shared/src/set_element_type.cpp @@ -2,8 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "../include/set_element_type.hpp" -#include "../include/utils.hpp" +#include "set_element_type.hpp" +#include "utils.hpp" using namespace ngraph; using namespace ngraph::frontend; @@ -35,7 +35,7 @@ void FrontEndElementTypeTest::doLoadFromFile() ASSERT_NO_THROW(frontends = m_fem.get_available_front_ends()); ASSERT_NO_THROW(m_frontEnd = m_fem.load_by_framework(m_param.m_frontEndName)); ASSERT_NE(m_frontEnd, nullptr); - ASSERT_NO_THROW(m_inputModel = m_frontEnd->load_from_file(m_param.m_modelName)); + ASSERT_NO_THROW(m_inputModel = m_frontEnd->load(m_param.m_modelName)); ASSERT_NE(m_inputModel, nullptr); } diff --git a/ngraph/test/op.cpp b/ngraph/test/op.cpp index af7371db74c9d5..b3f04121a689ae 100644 --- a/ngraph/test/op.cpp +++ b/ngraph/test/op.cpp @@ -107,16 +107,16 @@ namespace ngraph TEST(op, variant) { - shared_ptr var_std_string = make_shared>("My string"); + shared_ptr var_std_string = make_variant("My string"); ASSERT_TRUE((is_type>(var_std_string))); EXPECT_EQ((as_type_ptr>(var_std_string)->get()), "My string"); - shared_ptr var_int64_t = make_shared>(27); + shared_ptr var_int64_t = make_variant(27); ASSERT_TRUE((is_type>(var_int64_t))); EXPECT_FALSE((is_type>(var_int64_t))); EXPECT_EQ((as_type_ptr>(var_int64_t)->get()), 27); - shared_ptr var_ship = make_shared>(Ship{"Lollipop", 3, 4}); + shared_ptr var_ship = make_variant(Ship{"Lollipop", 3, 4}); ASSERT_TRUE((is_type>(var_ship))); Ship& ship = as_type_ptr>(var_ship)->get(); EXPECT_EQ(ship.name, "Lollipop");