Skip to content

Commit

Permalink
Deprecated warnings fix in MO (openvinotoolkit#6016)
Browse files Browse the repository at this point in the history
* Add code style fixes

* Revert "Add code style fixes"

This reverts commit 490934f.

* Fix Invalid escape sequence

* Fix Invalid escape sequence
  • Loading branch information
evolosen authored and yekruglov committed Jun 7, 2021
1 parent 2054c06 commit 1265b2f
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 22 deletions.
6 changes: 3 additions & 3 deletions model-optimizer/mo/front/tf/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def get_output_node_names_list(graph_def, user_defined_output_node_names_list: l


def deducing_metagraph_path(meta_graph_file: str):
match = re.search('^(.*)\.(data-\d*-of-\d*|index|meta)$', meta_graph_file)
match = re.search(r'^(.*)\.(data-\d*-of-\d*|index|meta)$', meta_graph_file)
if match is not None:
deduced_meta_graph_file = match.group(1) + '.meta'
if not os.path.isfile(deduced_meta_graph_file):
Expand Down Expand Up @@ -173,7 +173,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
user_output_node_names_list: list = []):
# As a provisional solution, use a native TF methods to load a model protobuf
graph_def = tf_v1.GraphDef()
if isinstance(graph_file_name, str) and (re.match('.*\.(ckpt|meta)$', graph_file_name)):
if isinstance(graph_file_name, str) and (re.match(r'.*\.(ckpt|meta)$', graph_file_name)):
print('[ WARNING ] The value for the --input_model command line parameter ends with ".ckpt" or ".meta" '
'extension.\n'
'It means that the model is not frozen.\n'
Expand Down Expand Up @@ -208,7 +208,7 @@ def load_tf_graph_def(graph_file_name: str = "", is_binary: bool = True, checkpo
# pylint: disable=no-member
with tf_v1.Session() as sess:
restorer = tf_v1.train.import_meta_graph(input_meta_graph_def)
restorer.restore(sess, re.sub('\.meta$', '', meta_graph_file))
restorer.restore(sess, re.sub(r'\.meta$', '', meta_graph_file))
outputs = get_output_node_names_list(input_meta_graph_def.graph_def, user_output_node_names_list)
graph_def = tf_v1.graph_util.convert_variables_to_constants(sess, input_meta_graph_def.graph_def,
outputs)
Expand Down
2 changes: 1 addition & 1 deletion model-optimizer/mo/front/tf/partial_infer/tf.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def get_subgraph_output_tensors(node: Node):
tf_v1.import_graph_def(graph_def, name='')
all_constants, feed_dict = generate_feed_dict(graph, node)
for out_port, out_tensor_name in enumerate(node['output_tensors_names']):
if not match('.*:\d+', out_tensor_name):
if not match(r'.*:\d+', out_tensor_name):
out_tensor_name = out_tensor_name + ":" + str(out_port)
result_tensor = sess.run(graph.get_tensor_by_name(out_tensor_name), feed_dict=feed_dict)
result[out_port] = result_tensor
Expand Down
14 changes: 7 additions & 7 deletions model-optimizer/mo/utils/guess_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,17 @@ def deduce_framework_by_namespace(argv: Namespace):


def guess_framework_by_ext(input_model_path: str) -> int:
if re.match('^.*\.caffemodel$', input_model_path):
if re.match(r'^.*\.caffemodel$', input_model_path):
return 'caffe'
elif re.match('^.*\.pb$', input_model_path):
elif re.match(r'^.*\.pb$', input_model_path):
return 'tf'
elif re.match('^.*\.pbtxt$', input_model_path):
elif re.match(r'^.*\.pbtxt$', input_model_path):
return 'tf'
elif re.match('^.*\.params$', input_model_path):
elif re.match(r'^.*\.params$', input_model_path):
return 'mxnet'
elif re.match('^.*\.nnet$', input_model_path):
elif re.match(r'^.*\.nnet$', input_model_path):
return 'kaldi'
elif re.match('^.*\.mdl', input_model_path):
elif re.match(r'^.*\.mdl', input_model_path):
return 'kaldi'
elif re.match('^.*\.onnx$', input_model_path):
elif re.match(r'^.*\.onnx$', input_model_path):
return 'onnx'
4 changes: 2 additions & 2 deletions model-optimizer/unit_tests/mo/bom_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def setUpClass(cls):
cls.existing_files = [name.rstrip() for name in bom_file.readlines()]

cls.expected_header = [re.compile(pattern) for pattern in [
'^# Copyright \([cC]\) [0-9\-]+ Intel Corporation$',
'^# SPDX-License-Identifier: Apache-2.0$',
r'^# Copyright \([cC]\) [0-9\-]+ Intel Corporation$',
r'^# SPDX-License-Identifier: Apache-2.0$',
]]

def test_bom_file(self):
Expand Down
2 changes: 1 addition & 1 deletion model-optimizer/unit_tests/mo/front/tf/loader_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,4 @@ def test_helper_print_ckpt(self, path, out):
mock = Mock(__bool__=MagicMock(side_effect=Exception()))
self.assertRaises(Exception, load_tf_graph_def, path, meta_graph_file=mock)
self.assertRegex(out.getvalue(),
'\[ WARNING ] The value for the --input_model command line parameter ends with "\.ckpt"')
r'\[ WARNING ] The value for the --input_model command line parameter ends with "\.ckpt"')
4 changes: 2 additions & 2 deletions model-optimizer/unit_tests/mo/graph/graph_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ def test_check_shape_consistency_1(self):

del graph.node['2_data']['shape']

with self.assertRaisesRegex(Error, "Graph contains data nodes \(1\) with inconsistent shapes:.*"):
with self.assertRaisesRegex(Error, r"Graph contains data nodes \(1\) with inconsistent shapes:.*"):
graph.check_shapes_consistency()

def test_check_shape_consistency_2(self):
Expand All @@ -358,7 +358,7 @@ def test_check_shape_consistency_2(self):
graph.node['1_data']['shape'] = (1, 2, 3)
graph.node['2_data']['shape'] = (1, 2, 3)

with self.assertRaisesRegex(Error, "Graph contains data nodes \(2\) with inconsistent shapes:.*"):
with self.assertRaisesRegex(Error, r"Graph contains data nodes \(2\) with inconsistent shapes:.*"):
graph.check_shapes_consistency()


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,9 @@ def test_restore_tensor_names(self):
nodes_attributes = {
'input': {'kind': 'op', 'type': 'Parameter', 'ports': {0: (shape, 'abc,def')}},
'input_data': {'shape': shape, 'kind': 'data'},
'add': {'kind': 'op', 'type': 'Add', 'ports': {2: (shape, 'ghi\,jkl')}},
'add': {'kind': 'op', 'type': 'Add', 'ports': {2: (shape, r'ghi\,jkl')}},
'add_data': {'shape': shape, 'kind': 'data'},
'add_const': {'kind': 'op', 'type': 'Const', 'ports': {0: (shape, 'mno,pqr\,stu')}},
'add_const': {'kind': 'op', 'type': 'Const', 'ports': {0: (shape, r'mno,pqr\,stu')}},
'add_const_data': {'shape': shape, 'kind': 'data'},
'result': {'kind': 'op', 'type': 'Result', 'ports': {0: (shape, None)}}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,10 @@
correct_proto_message_9 = ' first_stage_anchor_generator {grid_anchor_generator {height_stride: 16, width_stride:' \
' 16 scales: [ 0.25, 0.5, 1.0, 2.0], aspect_ratios: [] }}'

correct_proto_message_10 = 'train_input_reader {label_map_path: "C:\mscoco_label_map.pbtxt"' \
correct_proto_message_10 = r'train_input_reader {label_map_path: "C:\mscoco_label_map.pbtxt"' \
' tf_record_input_reader { input_path: "PATH_TO_BE_CONFIGURED/ mscoco_train.record" }}'

correct_proto_message_11 = 'model {path: "C:\[{],}" other_value: [1, 2, 3, 4]}'
correct_proto_message_11 = r'model {path: "C:\[{],}" other_value: [1, 2, 3, 4]}'

incorrect_proto_message_1 = 'model { bad_no_value }'

Expand Down Expand Up @@ -121,14 +121,14 @@ def test_correct_proto_reader_from_string_with_redundant_commas(self):
def test_correct_proto_reader_from_string_with_windows_path(self):
result = SimpleProtoParser().parse_from_string(correct_proto_message_10)
expected_result = {
'train_input_reader': {'label_map_path': "C:\mscoco_label_map.pbtxt",
'train_input_reader': {'label_map_path': r"C:\mscoco_label_map.pbtxt",
'tf_record_input_reader': {
'input_path': "PATH_TO_BE_CONFIGURED/ mscoco_train.record"}}}
self.assertDictEqual(result, expected_result)

def test_correct_proto_reader_from_string_with_special_characters_in_string(self):
result = SimpleProtoParser().parse_from_string(correct_proto_message_11)
expected_result = {'model': {'path': "C:\[{],}",
expected_result = {'model': {'path': r"C:\[{],}",
'other_value': [1, 2, 3, 4]}}
self.assertDictEqual(result, expected_result)

Expand Down

0 comments on commit 1265b2f

Please sign in to comment.