Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[RUNTIME] Minimum graph runtime #484

Merged
merged 2 commits into from
Sep 25, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ endif()
tvm_option(USE_CUDA "Build with CUDA" OFF)
tvm_option(USE_OPENCL "Build with OpenCL" OFF)
tvm_option(USE_METAL "Build with Metal" OFF)
tvm_option(USE_RPC "Build with RPC" OFF)
tvm_option(USE_RPC "Build with RPC" ON)
tvm_option(USE_GRAPH_RUNTIME "Build with tiny graph runtime" ON)
tvm_option(USE_LLVM "Build with LLVM" OFF)
tvm_option(USE_RTTI "Build with RTTI" ON)
tvm_option(USE_MSVC_MT "Build with MT" OFF)
Expand Down
1 change: 1 addition & 0 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ stage('Build') {
echo USE_OPENCL=1 >> config.mk
echo LLVM_CONFIG=llvm-config-4.0 >> config.mk
echo USE_RPC=1 >> config.mk
echo USE_GRAPH_RUNTIME=1 >> config.mk
echo USE_BLAS=openblas >> config.mk
rm -f lib/libtvm_runtime.so lib/libtvm.so
"""
Expand Down
6 changes: 6 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ CUDA_SRC = $(wildcard src/runtime/cuda/*.cc)
ROCM_SRC = $(wildcard src/runtime/rocm/*.cc)
OPENCL_SRC = $(wildcard src/runtime/opencl/*.cc)
RPC_SRC = $(wildcard src/runtime/rpc/*.cc)
GRAPH_SRC = $(wildcard src/runtime/graph/*.cc)
RUNTIME_SRC = $(wildcard src/runtime/*.cc)

# Objectives
Expand All @@ -63,6 +64,7 @@ CUDA_OBJ = $(patsubst src/%.cc, build/%.o, $(CUDA_SRC))
ROCM_OBJ = $(patsubst src/%.cc, build/%.o, $(ROCM_SRC))
OPENCL_OBJ = $(patsubst src/%.cc, build/%.o, $(OPENCL_SRC))
RPC_OBJ = $(patsubst src/%.cc, build/%.o, $(RPC_SRC))
GRAPH_OBJ = $(patsubst src/%.cc, build/%.o, $(GRAPH_SRC))
CC_OBJ = $(patsubst src/%.cc, build/%.o, $(CC_SRC)) $(LLVM_OBJ)
RUNTIME_OBJ = $(patsubst src/%.cc, build/%.o, $(RUNTIME_SRC))
CONTRIB_OBJ =
Expand Down Expand Up @@ -124,6 +126,10 @@ ifeq ($(USE_RPC), 1)
RUNTIME_DEP += $(RPC_OBJ)
endif

ifeq ($(USE_GRAPH_RUNTIME), 1)
RUNTIME_DEP += $(GRAPH_OBJ)
endif

include make/contrib/cblas.mk
include make/contrib/nnpack.mk
include make/contrib/cudnn.mk
Expand Down
5 changes: 5 additions & 0 deletions docs/api/python/contrib.rst
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,11 @@ tvm.contrib.rpc
.. automodule:: tvm.contrib.rpc
:members:

tvm.contrib.graph
~~~~~~~~~~~~~~~~~
.. automodule:: tvm.contrib.graph
:members:

tvm.contrib.util
~~~~~~~~~~~~~~~~
.. automodule:: tvm.contrib.util
Expand Down
5 changes: 4 additions & 1 deletion make/config.mk
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,10 @@ USE_OPENCL = 0
USE_METAL = 0

# Whether enable RPC during compile
USE_RPC = 0
USE_RPC = 1

# Whether enable tiny embedded graph runtime.
USE_GRAPH_RUNTIME = 1

# whether build with LLVM support
# Requires LLVM version >= 4.0
Expand Down
131 changes: 131 additions & 0 deletions python/tvm/contrib/graph_runtime.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
"""Minimum graph runtime that executes graph containing TVM PackedFunc."""
from . import rpc
from .._ffi.base import string_types
from .._ffi.function import get_global_func
from .. import ndarray as nd


def create(graph_json_str, libmod, ctx):
"""Create a runtime executor module given a graph and module.

Parameters
----------
graph_json_str : str or graph class
The graph to be deployed in json format output by nnvm graph.
The graph can only contain one operator(tvm_op) that
points to the name of PackedFunc in the libmod.

libmod : tvm.Module
The module of the corresponding function

ctx : TVMContext
The context to deploy the module, can be local or remote.

Returns
-------
graph_module : GraphModule
Runtime graph module that can be used to execute the graph.
"""
if not isinstance(graph_json_str, string_types):
try:
graph_json_str = graph_json_str._tvm_graph_json()
except AttributeError:
raise ValueError("Type %s is not supported" % type(graph_json_str))
device_type = ctx.device_type
device_id = ctx.device_id
if device_type >= rpc.RPC_SESS_MASK:
assert libmod.type_key == "rpc"
assert rpc._SessTableIndex(libmod) == ctx._rpc_sess._tbl_index
hmod = rpc._ModuleHandle(libmod)
fcreate = ctx._rpc_sess.get_function("tvm.graph_runtime.remote_create")
device_type = device_type % rpc.RPC_SESS_MASK
return GraphModule(fcreate(graph_json_str, hmod, device_type, device_id), ctx)
fcreate = get_global_func("tvm.graph_runtime.create")
return GraphModule(fcreate(graph_json_str, libmod, device_type, device_id), ctx)


class GraphModule(object):
"""Wrapper runtime module.

This is a thin wrapper of the underlying TVM module.
you can also directly call set_input, run, and get_output
of underlying module functions

Parameters
----------
module : Module
The interal tvm module that holds the actual graph functions.

ctx : TVMContext
The context this module is under

Attributes
----------
module : Module
The interal tvm module that holds the actual graph functions.

ctx : TVMContext
The context this module is under
"""
def __init__(self, module, ctx):
self.module = module
self._set_input = module["set_input"]
self._run = module["run"]
self._get_output = module["get_output"]
self.ctx = ctx

def set_input(self, key=None, value=None, **params):
"""Set inputs to the module via kwargs

Parameters
----------
key : int or str
The input key

value : the input value.
The input key

params : dict of str to NDArray
Additonal arguments
"""
if key:
self._set_input(key, nd.array(value, ctx=self.ctx))
for k, v in params.items():
self._set_input(k, nd.array(v, ctx=self.ctx))
return self

def run(self, **input_dict):
"""Run forward execution of the graph

Parameters
----------
input_dict: dict of str to NDArray
List of input values to be feed to
"""
if input_dict:
self.set_input(**input_dict)
self._run()

def get_output(self, index, out):
"""Get index-th output to out

Parameters
----------
index : int
The input index

out : NDArray
The output array container
"""
self._get_output(index, out)
return out

def __getitem__(self, key):
"""Get internal module function

Parameters
----------
key : str
The key to the module.
"""
return self.module[key]
Loading