-
Notifications
You must be signed in to change notification settings - Fork 3.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Tutorial enhancement to keep it clean on docs.tvm.ai #1450
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,17 +1,20 @@ | ||
""" | ||
Compile Tensorflow Models | ||
========================= | ||
This article is an introductory tutorial to deploy tensorflow models with NNVM. | ||
This article is an introductory tutorial to deploy tensorflow models with TVM. | ||
|
||
For us to begin with, tensorflow module is required to be installed. | ||
For us to begin with, tensorflow python module is required to be installed. | ||
|
||
A quick solution is to install tensorlfow from | ||
|
||
https://www.tensorflow.org/install/install_sources | ||
https://www.tensorflow.org/install | ||
""" | ||
|
||
# tvm and nnvm | ||
import nnvm | ||
import tvm | ||
|
||
# os and numpy | ||
import numpy as np | ||
import os.path | ||
|
||
|
@@ -21,23 +24,39 @@ | |
from tensorflow.python.framework import dtypes | ||
from tensorflow.python.framework import tensor_util | ||
|
||
# Tensorflow utility functions | ||
import nnvm.testing.tf | ||
|
||
# Base location for model related files. | ||
repo_base = 'https://github.com/dmlc/web-data/raw/master/tensorflow/models/InceptionV1/' | ||
|
||
# Test image | ||
img_name = 'elephant-299.jpg' | ||
image_url = os.path.join(repo_base, img_name) | ||
|
||
# InceptionV1 model protobuf | ||
# .. note:: | ||
# | ||
# protobuf should be exported with :any:`add_shapes=True` option. | ||
# Could use https://github.com/dmlc/web-data/tree/master/tensorflow/scripts/tf-to-nnvm.py | ||
# to add shapes for existing models. | ||
# | ||
model_name = 'classify_image_graph_def-with_shapes.pb' | ||
model_url = os.path.join(repo_base, model_name) | ||
|
||
# Image label map | ||
map_proto = 'imagenet_2012_challenge_label_map_proto.pbtxt' | ||
map_proto_url = os.path.join(repo_base, map_proto) | ||
|
||
# Human readable text for labels | ||
lable_map = 'imagenet_synset_to_human_label_map.txt' | ||
lable_map_url = os.path.join(repo_base, lable_map) | ||
|
||
|
||
###################################################################### | ||
# Download processed tensorflow model | ||
# ----------------------------------- | ||
# In this section, we download a pretrained Tensorflow model and classify an image. | ||
# Download required files | ||
# ----------------------- | ||
# Download files listed above. | ||
from mxnet.gluon.utils import download | ||
|
||
download(image_url, img_name) | ||
|
@@ -47,8 +66,9 @@ | |
|
||
|
||
###################################################################### | ||
# Creates graph from saved graph_def.pb. | ||
# -------------------------------------- | ||
# Import model | ||
# ------------ | ||
# Creates tensorflow graph definition from protobuf file. | ||
|
||
with tf.gfile.FastGFile(os.path.join( | ||
"./", model_name), 'rb') as f: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. with tf.gfile.FastGFile(os.path.join("./", model_name), 'rb') as f:
|
||
|
@@ -62,6 +82,13 @@ | |
###################################################################### | ||
# Decode image | ||
# ------------ | ||
# .. note:: | ||
# | ||
# tensorflow frontend import doesn't support preprocessing ops like JpegDecode | ||
# JpegDecode is a bypass (copy source) here. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. is bypassed |
||
# Hence we supply decoded frame to TVM instead. | ||
# | ||
|
||
from PIL import Image | ||
image = Image.open(img_name).resize((299, 299)) | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
|
@@ -74,10 +101,24 @@ def transform_image(image): | |
###################################################################### | ||
# Import the graph to NNVM | ||
# ------------------------ | ||
# Import tensorflow graph definition to nnvm. | ||
# | ||
# Results: | ||
# sym: nnvm graph for given tensorflow protobuf. | ||
# params: params converted from tensorflow params (tensor protobuf). | ||
sym, params = nnvm.frontend.from_tensorflow(graph_def) | ||
|
||
print ("Tensorflow protobuf imported as nnvm graph") | ||
###################################################################### | ||
# Now compile the graph through NNVM | ||
# NNVM Compilation | ||
# ---------------- | ||
# Compile the graph to llvm target with given input specification. | ||
# | ||
# Results: | ||
# graph: Final graph after compilation. | ||
# params: final params after compilation. | ||
# lib: target library which can be deployed on target with tvm runtime. | ||
|
||
import nnvm.compiler | ||
target = 'llvm' | ||
shape_dict = {'DecodeJpeg/contents': x.shape} | ||
|
@@ -87,7 +128,8 @@ def transform_image(image): | |
###################################################################### | ||
# Execute the portable graph on TVM | ||
# --------------------------------- | ||
# Now, we would like to reproduce the same forward computation using TVM. | ||
# Now we can try deploying the NNVM compiled model on cpu target. | ||
|
||
from tvm.contrib import graph_runtime | ||
ctx = tvm.cpu(0) | ||
dtype = 'uint8' | ||
|
@@ -100,26 +142,28 @@ def transform_image(image): | |
# get outputs | ||
tvm_output = m.get_output(0, tvm.nd.empty(((1, 1008)), 'float32')) | ||
|
||
|
||
###################################################################### | ||
# Process the output to human readable | ||
# ------------------------------------ | ||
# Process the output | ||
# ------------------ | ||
# Process the model output to human readable text for InceptionV1. | ||
predictions = tvm_output.asnumpy() | ||
predictions = np.squeeze(predictions) | ||
|
||
# Creates node ID --> English string lookup. | ||
node_lookup = nnvm.testing.tf.NodeLookup(label_lookup_path=os.path.join("./", map_proto), | ||
uid_lookup_path=os.path.join("./", lable_map)) | ||
|
||
# Print top 5 predictions from TVM output. | ||
top_k = predictions.argsort()[-5:][::-1] | ||
for node_id in top_k: | ||
human_string = node_lookup.id_to_string(node_id) | ||
score = predictions[node_id] | ||
print('%s (score = %.5f)' % (human_string, score)) | ||
|
||
###################################################################### | ||
# Run the same graph with tensorflow and dump output. | ||
# --------------------------------------------------- | ||
# Inference on tensorflow | ||
# ----------------------- | ||
# Now lets run the same on tensorflow | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. run the corresponding model on tensorflow |
||
|
||
def create_graph(): | ||
"""Creates a graph from saved GraphDef file and returns a saver.""" | ||
|
@@ -161,6 +205,7 @@ def run_inference_on_image(image): | |
node_lookup = nnvm.testing.tf.NodeLookup(label_lookup_path=os.path.join("./", map_proto), | ||
uid_lookup_path=os.path.join("./", lable_map)) | ||
|
||
# Print top 5 predictions from tensorflow. | ||
top_k = predictions.argsort()[-5:][::-1] | ||
print ("===== TENSORFLOW RESULTS =======") | ||
for node_id in top_k: | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
tensorlfow > tensorflow