From 6eb833df3816827c74f25e1445bb6a96a769fc6b Mon Sep 17 00:00:00 2001 From: Anastasia Kuporosova Date: Tue, 29 Aug 2023 08:46:04 +0000 Subject: [PATCH] apply comments --- docs/snippets/ie_common.py | 4 ++-- docs/snippets/ov_caching.py | 14 +++++++------- docs/snippets/ov_common.py | 7 ++++--- docs/snippets/ov_denormals.py | 1 - docs/snippets/ov_dynamic_shapes.py | 18 ++++++++++-------- docs/snippets/ov_preprocessing.py | 14 ++++---------- docs/snippets/ov_python_exclusives.py | 4 ++-- docs/snippets/utils.py | 2 +- 8 files changed, 30 insertions(+), 34 deletions(-) diff --git a/docs/snippets/ie_common.py b/docs/snippets/ie_common.py index 8eef17645b22d2..5a66212ec6805c 100644 --- a/docs/snippets/ie_common.py +++ b/docs/snippets/ie_common.py @@ -10,10 +10,10 @@ core = ie.IECore() #! [ie:create_core] -xml_path = get_path_to_model(True) +model_path = get_path_to_model(True) #! [ie:read_model] -network = core.read_network(xml_path) +network = core.read_network(model_path) #! [ie:read_model] #! [ie:compile_model] diff --git a/docs/snippets/ov_caching.py b/docs/snippets/ov_caching.py index 08900aa68ee7a4..ef3fd0b4861ae6 100644 --- a/docs/snippets/ov_caching.py +++ b/docs/snippets/ov_caching.py @@ -8,12 +8,12 @@ import openvino.runtime.properties as props device_name = 'GNA' -xml_path = get_path_to_model() -path_to_cash_dir = get_temp_dir() +model_path = get_path_to_model() +path_to_cache_dir = get_temp_dir() # ! [ov:caching:part0] core = ov.Core() -core.set_property({props.cache_dir(): path_to_cash_dir}) -model = core.read_model(model=xml_path) +core.set_property({props.cache_dir(): path_to_cache_dir}) +model = core.read_model(model=model_path) compiled_model = core.compile_model(model=model, device_name=device_name) # ! [ov:caching:part0] @@ -21,15 +21,15 @@ # ! [ov:caching:part1] core = ov.Core() -compiled_model = core.compile_model(model=xml_path, device_name=device_name) +compiled_model = core.compile_model(model=model_path, device_name=device_name) # ! [ov:caching:part1] assert compiled_model # ! [ov:caching:part2] core = ov.Core() -core.set_property({props.cache_dir(): path_to_cash_dir}) -compiled_model = core.compile_model(model=xml_path, device_name=device_name) +core.set_property({props.cache_dir(): path_to_cache_dir}) +compiled_model = core.compile_model(model=model_path, device_name=device_name) # ! [ov:caching:part2] assert compiled_model diff --git a/docs/snippets/ov_common.py b/docs/snippets/ov_common.py index 602a646aa636e9..92ca18973162fb 100644 --- a/docs/snippets/ov_common.py +++ b/docs/snippets/ov_common.py @@ -11,10 +11,10 @@ core = ov.Core() #! [ov_api_2_0:create_core] -xml_path = get_path_to_model() +model_path = get_path_to_model() #! [ov_api_2_0:read_model] -model = core.read_model(xml_path) +model = core.read_model(model_path) #! [ov_api_2_0:read_model] #! [ov_api_2_0:compile_model] @@ -107,7 +107,8 @@ def callback(request, frame_id): # process output data ... #! [ov_api_2_0:get_output_tensor_v10] -#! [ov_api_2_0:load_old_extension] path_to_extension_library = get_path_to_extension_library() + +#! [ov_api_2_0:load_old_extension] core.add_extension(path_to_extension_library) #! [ov_api_2_0:load_old_extension] diff --git a/docs/snippets/ov_denormals.py b/docs/snippets/ov_denormals.py index abb4b29b277265..45b77483fcc882 100644 --- a/docs/snippets/ov_denormals.py +++ b/docs/snippets/ov_denormals.py @@ -6,7 +6,6 @@ from utils import get_model device_name = 'CPU' -xml_path = 'modelWithDenormals.xml' model = get_model() # ! [ov:intel_cpu:denormals_optimization:part0] diff --git a/docs/snippets/ov_dynamic_shapes.py b/docs/snippets/ov_dynamic_shapes.py index 014a7c154cea8a..ce9474b6750f7a 100644 --- a/docs/snippets/ov_dynamic_shapes.py +++ b/docs/snippets/ov_dynamic_shapes.py @@ -2,15 +2,16 @@ # SPDX-License-Identifier: Apache-2.0 import numpy as np -from utils import get_model, get_dynamic_model +from utils import get_dynamic_model #! [import] import openvino as ov #! [import] +model = get_dynamic_model() + #! [reshape_undefined] core = ov.Core() -model = get_dynamic_model() # Set first dimension to be dynamic while keeping others static model.reshape([-1, 3, 224, 224]) @@ -37,9 +38,10 @@ print(model.input().partial_shape) #! [print_dynamic] -#! [detect_dynamic] model = get_dynamic_model() +#! [detect_dynamic] + if model.input(0).partial_shape.is_dynamic: # input is dynamic pass @@ -55,11 +57,12 @@ executable = core.compile_model(model) infer_request = executable.create_infer_request() +input_tensor_name = "input" #! [set_input_tensor] # For first inference call, prepare an input tensor with 1x128 shape and run inference request input_data1 = np.ones(shape=[1,128]) -infer_request.infer({"Parameter_319": input_data1}) +infer_request.infer({input_tensor_name: input_data1}) # Get resulting outputs output_tensor1 = infer_request.get_output_tensor() @@ -67,7 +70,7 @@ # For second inference call, prepare a 1x200 input tensor and run inference request input_data2 = np.ones(shape=[1,200]) -infer_request.infer({"Parameter_319": input_data2}) +infer_request.infer({input_tensor_name: input_data2}) # Get resulting outputs output_tensor2 = infer_request.get_output_tensor() @@ -98,10 +101,9 @@ data2 = output_tensor.data[:] #! [get_input_tensor] -#! [check_inputs] -core = ov.Core() -model = core.read_model("model.xml") +model = get_dynamic_model() +#! [check_inputs] # Print model input layer info for input_layer in model.inputs: print(input_layer.names, input_layer.partial_shape) diff --git a/docs/snippets/ov_preprocessing.py b/docs/snippets/ov_preprocessing.py index 44bb4bf032191e..8a055d4fd77f00 100644 --- a/docs/snippets/ov_preprocessing.py +++ b/docs/snippets/ov_preprocessing.py @@ -177,7 +177,7 @@ def custom_abs(output: Output): from openvino.runtime.passes import Manager, Serialize # ! [ov:preprocess:save_headers] -xml_path = get_path_to_model() +model_path = get_path_to_model() # ! [ov:preprocess:save] # ======== Step 0: read original model ========= @@ -213,20 +213,14 @@ def custom_abs(output: Output): set_batch(model, 2) # ======== Step 3: Save the model ================ -# First method - using serialize runtime wrapper -serialize(model, xml_path) - -# Second method - using Manager and Serialize pass -manager = Manager() -manager.register_pass(Serialize(xml_path)) -manager.run_passes(model) +serialize(model, model_path) # ! [ov:preprocess:save] -path_to_cash_dir = get_temp_dir() +path_to_cache_dir = get_temp_dir() # ! [ov:preprocess:save_load] core = Core() -core.set_property({props.cache_dir(): path_to_cash_dir}) +core.set_property({props.cache_dir(): path_to_cache_dir}) # In case that no preprocessing is needed anymore, we can load model on target device directly # With cached model available, it will also save some time on reading original model diff --git a/docs/snippets/ov_python_exclusives.py b/docs/snippets/ov_python_exclusives.py index 7d870d0a77fd97..bb31ea103657ad 100644 --- a/docs/snippets/ov_python_exclusives.py +++ b/docs/snippets/ov_python_exclusives.py @@ -157,7 +157,7 @@ def f(request, userdata): assert np.array_equal(unpacked_data , unt8_data) #! [unpacking] -xml_path = get_path_to_model() +model_path = get_path_to_model() path_to_image = get_path_to_image() @@ -180,7 +180,7 @@ def prepare_data(input, image_path): input_data.append(image) core = ov.Core() -model = core.read_model(xml_path) +model = core.read_model(model_path) # Create thread with prepare_data function as target and start it thread = Thread(target=prepare_data, args=[model.input(), path_to_image]) thread.start() diff --git a/docs/snippets/utils.py b/docs/snippets/utils.py index 109727d5b1c772..4b031faa315135 100644 --- a/docs/snippets/utils.py +++ b/docs/snippets/utils.py @@ -15,7 +15,7 @@ def get_dynamic_model(): - param = ops.parameter(ov.PartialShape([-1, -1])) + param = ops.parameter(ov.PartialShape([-1, -1]), name="input") return ov.Model(ops.relu(param), [param])