diff --git a/docs/IE_DG/Integrate_with_customer_application_new_API.md b/docs/IE_DG/Integrate_with_customer_application_new_API.md index 93482a9093887e..9e5ac71189cdd9 100644 --- a/docs/IE_DG/Integrate_with_customer_application_new_API.md +++ b/docs/IE_DG/Integrate_with_customer_application_new_API.md @@ -35,7 +35,7 @@ Integration process includes the following steps: @snippet snippets/Integrate_with_customer_application_new_API.cpp part1 -**Or read the model from ONNX format** (.onnx and .prototxt are supported formats). You can find more information about the ONNX format support in the document [ONNX format support in the OpenVINO™](./ONNX_Support.md). +**Or read the model from ONNX format**. You can find more information about the ONNX format support in the document [ONNX format support in the OpenVINO™](./ONNX_Support.md). @snippet snippets/Integrate_with_customer_application_new_API.cpp part2 diff --git a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pyx b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pyx index 109a6a51a0233f..944557084e9534 100644 --- a/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pyx +++ b/inference-engine/ie_bridges/python/src/openvino/inference_engine/ie_api.pyx @@ -304,7 +304,7 @@ cdef class IECore: return versions ## Reads a network from Intermediate Representation (IR) or ONNX formats and creates an `IENetwork`. - # @param model: A `.xml`, `.onnx`or `.prototxt` model file or string with IR. + # @param model: A `.xml` or `.onnx` model file or string with IR. # @param weights: A `.bin` file of the IR. Depending on `init_from_buffer` value, can be a string path or # bytes with file content. # @param init_from_buffer: Defines the way of how `model` and `weights` attributes are interpreted. diff --git a/inference-engine/samples/benchmark_app/benchmark_app.hpp b/inference-engine/samples/benchmark_app/benchmark_app.hpp index a752152ec2299a..352baa09b055a9 100644 --- a/inference-engine/samples/benchmark_app/benchmark_app.hpp +++ b/inference-engine/samples/benchmark_app/benchmark_app.hpp @@ -19,7 +19,7 @@ static const char input_message[] = /// @brief message for model argument static const char model_message[] = - "Required. Path to an .xml/.onnx/.prototxt file with a trained model or to a .blob files with " + "Required. Path to an .xml/.onnx file with a trained model or to a .blob files with " "a trained compiled model."; /// @brief message for execution mode diff --git a/inference-engine/src/inference_engine/include/ie/ie_core.hpp b/inference-engine/src/inference_engine/include/ie/ie_core.hpp index a02232bc39426e..3f899c720797b3 100644 --- a/inference-engine/src/inference_engine/include/ie/ie_core.hpp +++ b/inference-engine/src/inference_engine/include/ie/ie_core.hpp @@ -59,7 +59,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) { * For IR format (*.bin): * * if path is empty, will try to read bin file with the same name as xml and * * if bin file with the same name was not found, will load IR without weights. - * For ONNX format (*.onnx or *.prototxt): + * For ONNX format (*.onnx): * * binPath parameter is not used. * @return CNNNetwork */ @@ -73,7 +73,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) { * For IR format (*.bin): * * if path is empty, will try to read bin file with the same name as xml and * * if bin file with the same name was not found, will load IR without weights. - * For ONNX format (*.onnx or *.prototxt): + * For ONNX format (*.onnx): * * binPath parameter is not used. * @return CNNNetwork */ diff --git a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp index 0ee954167e711f..fc35257f830cf5 100644 --- a/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp +++ b/inference-engine/src/inference_engine/include/openvino/runtime/core.hpp @@ -69,7 +69,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) { * For IR format (*.bin): * * if path is empty, will try to read bin file with the same name as xml and * * if bin file with the same name was not found, will load IR without weights. - * For ONNX format (*.onnx or *.prototxt): + * For ONNX format (*.onnx): * * binPath parameter is not used. * @return Function */ @@ -83,7 +83,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) { * For IR format (*.bin): * * if path is empty, will try to read bin file with the same name as xml and * * if bin file with the same name was not found, will load IR without weights. - * For ONNX format (*.onnx or *.prototxt): + * For ONNX format (*.onnx): * * binPath parameter is not used. * @return Function */ diff --git a/tests/time_tests/scripts/run_timetest.py b/tests/time_tests/scripts/run_timetest.py index 7b14c1e9c7292c..8f938082624dee 100644 --- a/tests/time_tests/scripts/run_timetest.py +++ b/tests/time_tests/scripts/run_timetest.py @@ -151,7 +151,7 @@ def cli_parser(): required=True, dest="model", type=Path, - help='path to an .xml/.onnx/.prototxt file with a trained model or' + help='path to an .xml/.onnx file with a trained model or' ' to a .blob files with a trained compiled model') parser.add_argument('-d', required=True, diff --git a/tests/time_tests/src/timetests_helper/cli.h b/tests/time_tests/src/timetests_helper/cli.h index 3370f16eebdeab..9beaceda3730f7 100644 --- a/tests/time_tests/src/timetests_helper/cli.h +++ b/tests/time_tests/src/timetests_helper/cli.h @@ -14,7 +14,7 @@ static const char help_message[] = "Print a usage message"; /// @brief message for model argument static const char model_message[] = - "Required. Path to an .xml/.onnx/.prototxt file with a trained model or to " + "Required. Path to an .xml/.onnx file with a trained model or to " "a .blob files with a trained compiled model."; /// @brief message for target device argument diff --git a/tools/benchmark_tool/openvino/tools/benchmark/parameters.py b/tools/benchmark_tool/openvino/tools/benchmark/parameters.py index 2eb6f45f0ec0ad..212fb9e2414a77 100644 --- a/tools/benchmark_tool/openvino/tools/benchmark/parameters.py +++ b/tools/benchmark_tool/openvino/tools/benchmark/parameters.py @@ -35,7 +35,7 @@ def parse_args(): help='Optional. ' 'Path to a folder with images and/or binaries or to specific image or binary file.') args.add_argument('-m', '--path_to_model', type=str, required=True, - help='Required. Path to an .xml/.onnx/.prototxt file with a trained model or ' + help='Required. Path to an .xml/.onnx file with a trained model or ' 'to a .blob file with a trained compiled model.') args.add_argument('-d', '--target_device', type=str, required=False, default='CPU', help='Optional. Specify a target device to infer on (the list of available devices is shown below). '