Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix the TF tutorial to run against TF2.0 and TF1.x #4104

Merged
merged 15 commits into from
Nov 12, 2019
Merged
5 changes: 4 additions & 1 deletion python/tvm/relay/frontend/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,8 +306,10 @@ def _impl(inputs, attr, params):
use_bias = len(inputs) == 3
channel_axis = 1 if attr['data_format'] == "NCHW" else 3

# Ignore the new attributes from TF2.0, for now.
out = AttrCvt(
op_name=_dimension_picker('conv'),
ignores=['explicit_paddings'],
transforms={
'kernel_shape': 'kernel_size',
'data_format': 'data_layout',
Expand Down Expand Up @@ -404,8 +406,9 @@ def _impl(inputs, attr, params):
# NHWC
attr['layout'] = 'NHWC'

# Ignore the new attributes from TF2.0, for now.
return AttrCvt(op_name="resize",
ignores=['Tdim'],
ignores=['Tdim', 'half_pixel_centers'],
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it would be nice if add half_pixel_centers in tvm resize op. If the result is different from tensorflow, we might need to raise error instead of ignoring

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think half_pixel_centers is the new name of align_corners (used in TF 1.X) that we do support in tvm resize.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

then add transforms={'half_pixel_centers':'align_corners'} here should be working

Copy link
Contributor Author

@ic ic Oct 25, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It seems the two attributes serve different purposes. The doc says nothing really. The source code does half_pixel_centers processing is applied optionally on top of align_corners processing. E.g.

Eigen::Index in_y = std::min(
            (align_corners)
                ? static_cast<Eigen::Index>(roundf(scaler(y, height_scale)))
                : static_cast<Eigen::Index>(floorf(scaler(y, height_scale))),
            in_height - 1);
if (half_pixel_centers) {
  in_y = std::max(static_cast<Eigen::Index>(0), in_y);
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Idea: How about a unit test that compares a TF model using that attribute to Relay ignoring the attribute? I was reading the test suite to add to this PR, when I saw little coverage on such kind of issue, and fixing the tutorial seemed good to release early.

I'll try a test today, but if you have some guidance/advice/preference, I'd be happy to hear it. It may just be a different PR.

extras={'method': "bilinear"})(inputs, attr)
return _impl

Expand Down
14 changes: 7 additions & 7 deletions python/tvm/relay/testing/tf.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def AddShapesToGraphDef(session, out_node):

"""

graph_def = tf.graph_util.convert_variables_to_constants(
graph_def = tf.compat.v1.graph_util.convert_variables_to_constants(
session,
session.graph.as_graph_def(add_shapes=True),
[out_node],
Expand Down Expand Up @@ -112,13 +112,13 @@ def load(self, label_lookup_path, uid_lookup_path):
dict from integer node ID to human-readable string.

"""
if not tf.gfile.Exists(uid_lookup_path):
if not tf.compat.v1.io.gfile.exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
if not tf.compat.v1.io.gfile.exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)

# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
proto_as_ascii_lines = tf.compat.v1.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
Expand All @@ -129,7 +129,7 @@ def load(self, label_lookup_path, uid_lookup_path):

# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
proto_as_ascii = tf.compat.v1.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
Expand Down Expand Up @@ -209,7 +209,7 @@ def get_workload(model_path, model_sub_path=None):
path_model = download_testdata(model_url, model_path, module='tf')

# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(path_model, 'rb') as f:
with tf.compat.v1.gfile.FastGFile(path_model, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
graph = tf.import_graph_def(graph_def, name='')
Expand Down Expand Up @@ -299,7 +299,7 @@ def _create_ptb_vocabulary(data_dir):
file_name = 'ptb.train.txt'
def _read_words(filename):
"""Read the data for creating vocabulary"""
with tf.gfile.GFile(filename, "r") as f:
with tf.compat.v1.gfile.GFile(filename, "r") as f:
return f.read().encode("utf-8").decode("utf-8").replace("\n", "<eos>").split()

def _build_vocab(filename):
Expand Down
16 changes: 8 additions & 8 deletions tutorials/frontend/from_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,14 +89,14 @@
# ------------
# Creates tensorflow graph definition from protobuf file.

with tf.gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
with tf.compat.v1.gfile.GFile(model_path, 'rb') as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
graph = tf.import_graph_def(graph_def, name='')
# Call the utility to import the graph definition into default graph.
graph_def = tf_testing.ProcessGraphDefParam(graph_def)
# Add shapes to the graph.
with tf.Session() as sess:
with tf.compat.v1.Session() as sess:
graph_def = tf_testing.AddShapesToGraphDef(sess, 'softmax')

######################################################################
Expand Down Expand Up @@ -187,8 +187,8 @@
def create_graph():
"""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(model_path, 'rb') as f:
graph_def = tf.GraphDef()
with tf.compat.v1.gfile.GFile(model_path, 'rb') as f:
graph_def = tf.compat.v1.GraphDef()
graph_def.ParseFromString(f.read())
graph = tf.import_graph_def(graph_def, name='')
# Call the utility to import the graph definition into default graph.
Expand All @@ -206,14 +206,14 @@ def run_inference_on_image(image):
-------
Nothing
"""
if not tf.gfile.Exists(image):
if not tf.compat.v1.io.gfile.exists(image):
tf.logging.fatal('File does not exist %s', image)
image_data = tf.gfile.FastGFile(image, 'rb').read()
image_data = tf.compat.v1.gfile.GFile(image, 'rb').read()

# Creates graph from saved GraphDef.
create_graph()

with tf.Session() as sess:
with tf.compat.v1.Session() as sess:
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
predictions = sess.run(softmax_tensor,
{'DecodeJpeg/contents:0': image_data})
Expand Down