Skip to content

Commit

Permalink
[NNVM][FRONTEND] Review comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
srkreddy1238 committed Jun 5, 2018
1 parent 28dbe28 commit d42ae4f
Show file tree
Hide file tree
Showing 2 changed files with 122 additions and 88 deletions.
79 changes: 79 additions & 0 deletions nnvm/python/nnvm/testing/tf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
# pylint: disable=invalid-name, unused-variable, unused-argument, no-init
"""
Compile Tensorflow Models
=========================
Some helper definitions for tensorflow models.
"""
import re

# Tensorflow imports
import tensorflow as tf

######################################################################
# Some helper functions
# ---------------------

class NodeLookup(object):
"""Converts integer node ID's to human readable labels."""

def __init__(self,
label_lookup_path=None,
uid_lookup_path=None):
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)

def load(self, label_lookup_path, uid_lookup_path):
"""Loads a human readable English name for each softmax node.
Parameters
----------
label_lookup_path: String
File containing String UID to integer node ID mapping .
uid_lookup_path: String
File containing String UID to human-readable string mapping.
Returns
-------
node_id_to_name : dict
dict from integer node ID to human-readable string.
"""
if not tf.gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)

# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string

# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]

# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name

return node_id_to_name

def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]
131 changes: 43 additions & 88 deletions tutorials/nnvm/from_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,16 @@
import tvm
import numpy as np
import os.path
import re

# Tensorflow imports
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_util

import nnvm.testing.tf

repo_base = 'https://github.com/srkreddy1238/dmlc_data/raw/master/models/tensorflow/InceptionV1/'
repo_base = 'https://github.com/dmlc/web-data/raw/master/tensorflow/models/InceptionV1/'
img_name = 'elephant-299.jpg'
image_url = os.path.join(repo_base, img_name)
model_name = 'classify_image_graph_def-with_shapes.pb'
Expand All @@ -35,10 +35,36 @@


######################################################################
# Some helper functions
# Download processed tensorflow model
# -----------------------------------
# In this section, we download a pretrained Tensorflow model and classify an image.
from mxnet.gluon.utils import download

download(image_url, img_name)
download(model_url, model_name)
download(map_proto_url, map_proto)
download(lable_map_url, lable_map)


######################################################################
# Creates graph from saved graph_def.pb.
# --------------------------------------

def _ProcessGraphDefParam(graph_def):
"""Type-checks and possibly canonicalizes `graph_def`."""
"""Type-checks and possibly canonicalizes `graph_def`.
Parameters
----------
graph_def : Obj
tensorflow graph definition.
Returns
-------
graph_def : Obj
tensorflow graph devinition
"""

if not isinstance(graph_def, graph_pb2.GraphDef):
# `graph_def` could be a dynamically-created message, so try a duck-typed
# approach
Expand All @@ -50,84 +76,6 @@ def _ProcessGraphDefParam(graph_def):
raise TypeError('graph_def must be a GraphDef proto.')
return graph_def

class NodeLookup(object):
"""Converts integer node ID's to human readable labels."""

def __init__(self,
label_lookup_path=None,
uid_lookup_path=None):
if not label_lookup_path:
label_lookup_path = os.path.join(
"./", map_proto)
if not uid_lookup_path:
uid_lookup_path = os.path.join(
"./", lable_map)
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)

def load(self, label_lookup_path, uid_lookup_path):
"""Loads a human readable English name for each softmax node.
Args:
label_lookup_path: string UID to integer node ID.
uid_lookup_path: string UID to human-readable string.
Returns:
dict from integer node ID to human-readable string.
"""
if not tf.gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not tf.gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)

# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string

# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]

# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name

return node_id_to_name

def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]

######################################################################
# Download processed tensorflow model
# ---------------------------------------------
# In this section, we download a pretrained Tensorflow model and classify an image.
from mxnet.gluon.utils import download

download(image_url, img_name)
download(model_url, model_name)
download(map_proto_url, map_proto)
download(lable_map_url, lable_map)


######################################################################
# Creates graph from saved graph_def.pb.
with tf.gfile.FastGFile(os.path.join(
"./", model_name), 'rb') as f:
graph_def = tf.GraphDef()
Expand All @@ -138,6 +86,7 @@ def id_to_string(self, node_id):

######################################################################
# Decode image
# ------------
from PIL import Image
image = Image.open(img_name).resize((299, 299))

Expand All @@ -150,7 +99,7 @@ def transform_image(image):

######################################################################
# Import the graph to NNVM
# -----------------
# ------------------------
sym, params = nnvm.frontend.from_tensorflow(graph_def)

######################################################################
Expand All @@ -165,6 +114,7 @@ def transform_image(image):

######################################################################
# Save the compilation output.
# ----------------------------
"""
lib.export_library("imagenet_tensorflow.so")
with open("imagenet_tensorflow.json", "w") as fo:
Expand Down Expand Up @@ -197,7 +147,8 @@ def transform_image(image):
predictions = np.squeeze(predictions)

# Creates node ID --> English string lookup.
node_lookup = NodeLookup()
node_lookup = nnvm.testing.tf.NodeLookup(label_lookup_path=os.path.join("./", map_proto),
uid_lookup_path=os.path.join("./", lable_map))

top_k = predictions.argsort()[-5:][::-1]
for node_id in top_k:
Expand All @@ -221,10 +172,13 @@ def create_graph():
def run_inference_on_image(image):
"""Runs inference on an image.
Args:
image: Image file name.
Parameters
----------
image: String
Image file name.
Returns:
Returns
-------
Nothing
"""
if not tf.gfile.Exists(image):
Expand All @@ -242,7 +196,8 @@ def run_inference_on_image(image):
predictions = np.squeeze(predictions)

# Creates node ID --> English string lookup.
node_lookup = NodeLookup()
node_lookup = nnvm.testing.tf.NodeLookup(label_lookup_path=os.path.join("./", map_proto),
uid_lookup_path=os.path.join("./", lable_map))

top_k = predictions.argsort()[-5:][::-1]
print ("===== TENSORFLOW RESULTS =======")
Expand Down

0 comments on commit d42ae4f

Please sign in to comment.