Skip to content

Commit

Permalink
Add mod supoort in relay.build
Browse files Browse the repository at this point in the history
  • Loading branch information
apivovarov committed Jun 24, 2019
1 parent 25bad44 commit 4629d76
Show file tree
Hide file tree
Showing 16 changed files with 33 additions and 23 deletions.
22 changes: 17 additions & 5 deletions python/tvm/relay/build_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from a Relay expression.
"""
import numpy as np
import warnings

from tvm import expr as tvm_expr
from .. import nd as _nd, target as _target, autotvm
Expand All @@ -27,6 +28,7 @@
from . import ir_pass
from . import ty as _ty
from . import expr as _expr
from .module import Module as _Module
from .backend import interpreter as _interpreter
from .backend.vm import VMExecutor

Expand Down Expand Up @@ -137,14 +139,14 @@ def get_params(self):
return ret


def build(func, target=None, target_host=None, params=None):
def build(mod, target=None, target_host=None, params=None):
"""Helper function that builds a Relay function to run on TVM graph
runtime.
Parameters
----------
func: relay.Function
The function to build.
mod : relay.Module
The module to build. Using relay.Function is deprecated.
target : str, :any:`tvm.target.Target`, or dict of str(i.e. device/context
name) to str/tvm.target.Target, optional
Expand Down Expand Up @@ -175,6 +177,17 @@ def build(func, target=None, target_host=None, params=None):
params : dict
The parameters of the final graph.
"""
if isinstance(mod, _Module):
func = mod[mod.entry_func]
elif isinstance(mod, _expr.Function):
func = mod
warnings.warn(
"Please use input parameter mod (tvm.relay.module.Module) "
"instead of deprecated parameter func (tvm.relay.expr.Function)",
DeprecationWarning)
else:
raise ValueError("Type of input parameter mod must be tvm.relay.module.Module")

target = _update_target(target)

if isinstance(target_host, (str, _target.Target)):
Expand All @@ -192,8 +205,7 @@ def build(func, target=None, target_host=None, params=None):

with tophub_context:
bld_mod = BuildModule()
graph_json, mod, params = bld_mod.build(func, target, target_host,
params)
graph_json, mod, params = bld_mod.build(func, target, target_host, params)
return graph_json, mod, params


Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/caffe2/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_tvm_output(model,
mod, params = relay.frontend.from_caffe2(
model.init_net, model.predict_net, shape_dict, dtype_dict)
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)

m = graph_runtime.create(graph, lib, ctx)

Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/coreml/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def run_tvm_graph(coreml_model, target, ctx, input_data, input_name, output_shap

mod, params = relay.frontend.from_coreml(coreml_model, shape_dict)
with relay.transform.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)

from tvm.contrib import graph_runtime
m = graph_runtime.create(graph, lib, ctx)
Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/darknet/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _get_tvm_output(net, data, build_dtype='float32', states=None):
mod, params = relay.frontend.from_darknet(net, data.shape, dtype)
target = 'llvm'
shape_dict = {'data': data.shape}
graph, library, params = relay.build(mod[mod.entry_func],
graph, library, params = relay.build(mod,
target,
params=params)

Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def get_tvm_output(xs, target, ctx, dtype='float32'):
shape_dict = {name: x.shape for (name, x) in zip(keras_model.input_names, xs)}
mod, params = relay.frontend.from_keras(keras_model, shape_dict)
with relay.transform.build_config(opt_level=2):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target,
params=params)
m = graph_runtime.create(graph, lib, ctx)
Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/mxnet/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def get_tvm_output(symbol, x, args, auxs, target, ctx, dtype='float32'):
arg_params=args,
aux_params=auxs)
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)
m = graph_runtime.create(graph, lib, ctx)
# set inputs
m.set_input("data", tvm.nd.array(x.astype(dtype)))
Expand Down
2 changes: 1 addition & 1 deletion tests/python/frontend/onnx/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def get_tvm_output(graph_def, input_data, target, ctx, output_shape=None, output

mod, params = relay.frontend.from_onnx(graph_def, shape_dict)
with relay.build_config(opt_level=1):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target,
params=params)

Expand Down
4 changes: 2 additions & 2 deletions tests/python/frontend/tensorflow/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def run_tvm_graph(graph_def, input_data, input_node, num_output=1,
shape=shape_dict,
outputs=out_names)
with relay.build_config(opt_level=opt_level):
graph, lib, params = relay.build(mod[mod.entry_func], target, target_host, params)
graph, lib, params = relay.build(mod, target, target_host, params)

ctx = tvm.context(target, 0)
from tvm.contrib import graph_runtime
Expand Down Expand Up @@ -1436,7 +1436,7 @@ def _get_tvm_graph_module(graph_def):
'Model/RNN/RNN/multi_rnn_cell/cell_0/lstm_cell/LSTMBlockCell_h':'float32'}
target = 'llvm'
with relay.build_config(opt_level=0):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target,
params=params)
from tvm.contrib import graph_runtime
Expand Down
4 changes: 1 addition & 3 deletions tests/python/frontend/tflite/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,7 @@ def run_tvm_graph(tflite_model_buf, input_data, input_node, num_output=1, target
shape_dict=shape_dict,
dtype_dict=dtype_dict)
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func],
target,
params=params)
graph, lib, params = relay.build(mod, target, params=params)

ctx = tvm.context(target, 0)
from tvm.contrib import graph_runtime
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/deploy_model_on_android.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def transform_image(image):
mod, params = relay.frontend.from_keras(keras_mobilenet_v2, shape_dict)

with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target=target,
graph, lib, params = relay.build(mod, target=target,
target_host=target_host, params=params)

# After `relay.build`, you will get three return values: graph,
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/deploy_ssd_gluoncv.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@
def build(target):
mod, params = relay.frontend.from_mxnet(block, {"data": dshape})
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)
return graph, lib, params

######################################################################
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/from_caffe2.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def transform_image(image):
# target x86 CPU
target = 'llvm'
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)

######################################################################
# Execute on TVM
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/from_coreml.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
mod, params = relay.frontend.from_coreml(mlmodel, shape_dict)

with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target,
params=params)

Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/from_darknet.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
shape = {'data': data.shape}
print("Compiling the model...")
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target=target,
target_host=target_host,
params=params)
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/from_tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@
# lib: target library which can be deployed on target with TVM runtime.

with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func],
graph, lib, params = relay.build(mod,
target=target,
target_host=target_host,
params=params)
Expand Down
2 changes: 1 addition & 1 deletion tutorials/frontend/from_tflite.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def extract(path):
# target x86 CPU
target = "llvm"
with relay.build_config(opt_level=3):
graph, lib, params = relay.build(mod[mod.entry_func], target, params=params)
graph, lib, params = relay.build(mod, target, params=params)

######################################################################
# Execute on TVM
Expand Down

0 comments on commit 4629d76

Please sign in to comment.