Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP][Runtime]Pipeline Executor For Compute graph pipeline #7892

Closed
wants to merge 28 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
5f0145f
Pipeline Compute Graph With New Subgraph Executor
huajsj Apr 20, 2021
76fe55a
Fix plint error
huajsj Apr 21, 2021
90b5761
fix task_build error.
huajsj Apr 22, 2021
4b9f92f
fix gpu task_build issue.
huajsj Apr 22, 2021
82ea9ad
Fix plint issue
huajsj Apr 22, 2021
0dc1d74
add build_pipeline logic
huajsj May 1, 2021
f850479
remove pipeline_graph
huajsj May 1, 2021
3a2e2a7
rename subgraph executor into pipeline executor
huajsj May 1, 2021
8793cdd
fix plint issue.
huajsj May 3, 2021
7e175de
add cuda check
huajsj May 3, 2021
ee90828
address review comments.
huajsj May 5, 2021
ca76cb2
do pipeline stop in executor deconstructor.
huajsj May 6, 2021
01cc700
fix plint issue.
huajsj May 6, 2021
681c4ef
Address review comments, handle final output and cross subgraph output
huajsj May 27, 2021
9d24a0f
Fix plint error.
huajsj May 27, 2021
fc30cac
add correct comments.
huajsj May 28, 2021
ecc71c0
address review comments.
huajsj Jun 4, 2021
3eb1b4c
add OTF input set support
huajsj Jun 15, 2021
4d19179
decouple pipeline_build and create.
huajsj Jun 22, 2021
df18253
address review comments.
huajsj Jun 23, 2021
29ecb2b
fix plin issue, improve Load function.
huajsj Jul 5, 2021
ad09daa
change output start from 0
huajsj Sep 11, 2021
47c5cc2
Add network split logic into test.
huajsj Oct 2, 2021
b42de5a
change moudle index into start 0.
huajsj Oct 7, 2021
25faf19
fix mod_idx = 0 crash issue, but still logic wrong
huajsj Oct 16, 2021
2e0c1c3
fix crash issue.
huajsj Oct 17, 2021
fba531e
fix input data not get correct set issue.
huajsj Oct 18, 2021
1ee5831
fix set param report error issue.
huajsj Nov 2, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,7 @@ if(USE_PROFILER)
set_source_files_properties(${RUNTIME_GRAPH_EXECUTOR_SRCS}
PROPERTIES COMPILE_DEFINITIONS "TVM_GRAPH_EXECUTOR_DEBUG")


file(GLOB RUNTIME_VM_PROFILER_SRCS src/runtime/vm/profiler/*.cc)
list(APPEND RUNTIME_SRCS ${RUNTIME_VM_PROFILER_SRCS})
endif(USE_PROFILER)
Expand All @@ -388,6 +389,12 @@ if(GTEST_INCLUDE_DIR AND GTEST_LIB)
include(GoogleTest)
endif()

if(USE_PIPELINE_EXECUTOR)
message(STATUS "Build with Subgraph Executor support...")
file(GLOB RUNTIME_PIPELINE_SRCS src/runtime/pipeline/*.cc)
list(APPEND RUNTIME_SRCS ${RUNTIME_PIPELINE_SRCS})
endif(USE_PIPELINE_EXECUTOR)

# Module rules
include(cmake/modules/VTA.cmake)
include(cmake/modules/StandaloneCrt.cmake)
Expand Down
3 changes: 3 additions & 0 deletions cmake/config.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,9 @@ set(USE_STACKVM_RUNTIME OFF)
# Whether enable tiny embedded graph executor.
set(USE_GRAPH_EXECUTOR ON)

# Whether enable subgraph runtime.
huajsj marked this conversation as resolved.
Show resolved Hide resolved
set(USE_PIPELINE_EXECUTOR OFF)

# Whether enable tiny graph executor with CUDA Graph
set(USE_GRAPH_EXECUTOR_CUDA_GRAPH OFF)

Expand Down
11 changes: 1 addition & 10 deletions python/tvm/contrib/graph_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ def __init__(self, module):
self._run = module["run"]
self._get_output = module["get_output"]
self._get_input = module["get_input"]
self._get_input_index = module["get_input_index"]
self._get_num_outputs = module["get_num_outputs"]
self._get_input_index = module["get_input_index"]
self._get_num_inputs = module["get_num_inputs"]
Expand Down Expand Up @@ -245,16 +246,6 @@ def get_input(self, index, out=None):

def get_input_index(self, name):
"""Get inputs index via input name.

Parameters
----------
name : str
The input key name

Returns
-------
index: int
The input index. -1 will be returned if the given input name is not found.
"""
return self._get_input_index(name)

Expand Down
287 changes: 287 additions & 0 deletions python/tvm/contrib/pipeline_executor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,287 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Pipeline executor that executes pipeline containing TVM PackedFunc."""
import json
import tvm._ffi
from tvm import relay
from tvm.contrib import graph_executor


def pipeline_executor_enabled():
"""check if pipeline executor enabled.
Return
------
enable: bool
return pipeline executor get enabled or not
"""
pipeline_enabled = False
try:
pipelinecreate = tvm._ffi.get_global_func("tvm.pipeline_executor.create")
assert pipelinecreate
pipeline_enabled = True
except ValueError:
print("pipeline executor not enabled!")

return pipeline_enabled


def write_file(file_name, data, mode):
"""write data into file

Parameters
----------
file_name: str
file name
data: str
data
mode: str
file open mode
"""
if file_name:
with open(file_name, mode) as file_handle:
file_handle.write(data)


huajsj marked this conversation as resolved.
Show resolved Hide resolved
def build_pipeline(mod_n_configs, export_path=None):
"""build module list that can use for pipeline execution.

Parameters
----------
mod_n_configs: Dict[IRModule, Dict[str, Any]]
build configuration informaton, structure like following.
{IRModule: {"target":target,
"target_host":target_host,
"params":params,
"mod_name"mod_name,
"build":build}}
export_path: str
export build result into file

Returns
-------
ret: List[IRModule]
list of IRModule
string_config: Dict[int, Dict[str, any]]
pipeline configuration
"""
mods = {}
config_len = len(mod_n_configs)
string_config = [{} for _ in range(config_len)]
for _, (ir_mod, mod_config) in enumerate(mod_n_configs.items()):
# init lib_name and json_name params with empty
lib_name = ""
json_name = ""
params_name = ""
# Get module configuration
assert "pipeline" in mod_config and "mod_indx" in mod_config["pipeline"]
# Get module index in pipeline configuration
mconf = mod_config["pipeline"].copy()
# Get mod device config
dev = mod_config["dev"]
mod_indx = mconf["mod_indx"]
assert mod_indx < config_len
build_func = relay.build
# if there is a self defined build function then use it.
if "build" in mod_config and mod_config["build"]:
build_func = mod_config["build"]

# build IRModule
mod = build_func(
ir_mod,
mod_config["target"],
params=mod_config["params"],
target_host=mod_config["target_host"],
mod_name=mod_config["mod_name"],
)

if export_path:
graph, lib, params = mod
lib_name = "{}/lib{}.so".format(export_path, mod_indx)
json_name = "{}/json{}".format(export_path, mod_indx)
params_name = "{}/params{}".format(export_path, mod_indx)
lib.export_library(lib_name)
write_file(json_name, graph, "w")
write_file(params_name, relay.save_param_dict(params), "wb")

mconf["lib_name"] = lib_name
mconf["json_name"] = json_name
mconf["params_name"] = params_name
mconf["dev"] = "{},{}".format(dev.device_type, dev.device_id)
# Create pipeline configuration
string_config[mod_indx] = mconf
# associate mod with device
mods[mod] = {"dev": dev}

if export_path:
write_file("{}/config".format(export_path), json.dumps(string_config), "w")
# with open("{}/config".format(export_path), "w") as config_file:
# config_file.write(json.dumps(string_config))

# return IRModule list and pipeline configuration
return mods, string_config


def create(pipeline_mods, mod_config):
"""Create a pipeline runtime executor.

Parameters
----------
pipeline_mods : List[IRModule]
list of IRModule

mod_config : Dict[int, Dict[str, Any]]
modules and modules dependency configuration informaiton.

Returns
-------
submodule : PipelineModule
Runtime pipeline module.
"""

mods = []
for pipeline_mod in pipeline_mods:
mod = graph_executor.GraphModule(
pipeline_mod["default"](pipeline_mods[pipeline_mod]["dev"])
)
mods.append(mod)

submodule = PipelineModule(mods, json.dumps(mod_config))
# submodule = PipelineModule(pipeline_mods, json.dumps(mod_config))
return submodule


class PipelineModule(object):
"""Wrapper runtime module. This is a thin wrapper of the underlying TVM module.
you can also directly call set_input, run, and get_output of underlying module functions.

Parameters
----------
graph_module : List[GraphModule]
The internal tvm module that holds the actual graph functions.

pipeline_config : Dict[IRModule, Dict[str, Any]]
modules and modules dependency configuration informaiton.

"""

def __init__(self, modules, pipeline_config):
mods = []
for module in modules:
mods.append(module.module)

pipelinecreate = tvm._ffi.get_global_func("tvm.pipeline_executor.create")
huajsj marked this conversation as resolved.
Show resolved Hide resolved
assert pipelinecreate
module = pipelinecreate(mods, pipeline_config)

self.graph_modules_ = modules

self._set_input = module["set_input"]
self._run = module["run"]
self._stop = module["stop"]
self._get_output = module["get_output"]
self._get_input = module["get_input"]
self._get_num_outputs = module["get_num_outputs"]
self._get_num_inputs = module["get_num_inputs"]

def set_input(self, key, value, mod_idx=0, params=None):
"""Set inputs to the module via kwargs

Parameters
----------
key : array_like
The input key

value : array_like.
The input key

mod_idx : int
the submodule index

huajsj marked this conversation as resolved.
Show resolved Hide resolved
params : dict of str to NDArray
Additional arguments
"""
assert mod_idx >= 0
self._set_input(key, tvm.nd.array(value, tvm.cpu()), mod_idx)

if params:
for param in params:
self.graph_modules_[mod_idx].set_input(**param)

def run(self):
"""Run forward execution of the graph"""
self._run()

def stop(self):
"""Stop pipeline run"""
self._stop()

def get_num_outputs(self):
"""Get the number of outputs from the graph

Returns
-------
count : int
The number of outputs.
"""
return self._get_num_outputs()

def get_num_inputs(self):
"""Get the number of inputs to the graph

Returns
-------
count : int
The number of inputs.
"""
return self._get_num_inputs()

def get_input(self, input_indx, runtime_index=0, out=None):
"""Get index-th input to out

Parameters
----------
index : int
The input index

out : NDArray
The output array container
"""
if out:
self._get_input(input_indx, runtime_index).copyto(out)
return out

return self._get_input(input_indx, runtime_index)

def get_output(self):
"""Get index-th output to out

Parameters
----------
index : int
The output index
"""
return self._get_output()

def __getitem__(self, key):
"""Get internal module function

Parameters
----------
key : str
The key to the module.
"""
return self.module[key]
Loading