You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
with micro.Session(dev_config) as sess:
ctx = tvm.micro_dev(0)
with tvm.transform.PassContext(disabled_pass={'FuseOps'}, config={"tir.disable_vectorize": True}):
graph, c_mod, params = relay.build(mod, target=target, params=params)
micro_mod = micro.create_micro_mod(c_mod, dev_config)
mod = graph_runtime.create(graph, micro_mod, ctx)
mod.set_input(**params)
#throw a simple single bogus number at the model
mod.set_input(input_tensor, tvm.nd.array(np.array([0.5], dtype="float32")))
mod.run()
tvm_output = mod.get_output(0).asnumpy()
print("result is: "+str(tvm_output))
The text was updated successfully, but these errors were encountered:
@areusch
The failure looks like
graph, c_mod, params = relay.build(mod, target=target, params=params)
Traceback (most recent call last):
File "./working-micro-st-tflite.py", line 48, in
micro_mod = micro.create_micro_mod(c_mod, dev_config)
File "/home/tgall/tvm/tvm/python/tvm/micro/base.py", line 213, in create_micro_mod
micro_mod = tvm.runtime.load_module(lib_obj_path)
File "/home/tgall/tvm/tvm/python/tvm/runtime/module.py", line 407, in load_module
return _ffi_api.ModuleLoadFromFile(path, fmt)
File "/home/tgall/tvm/tvm/python/tvm/_ffi/_ctypes/packed_func.py", line 225, in call
raise get_last_ffi_error()
tvm._ffi.base.TVMError: Traceback (most recent call last):
[bt] (6) /home/tgall/tvm/tvm/build/libtvm.so(TVMFuncCall+0x63) [0x7f0b6f5594e3]
[bt] (5) /home/tgall/tvm/tvm/build/libtvm.so(std::_Function_handler<void (tvm::runtime::TVMArgs, tvm::runtime::TVMRetValue*), tvm::runtime::TypedPackedFunc<tvm::runtime::Module (std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&, std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&)>::AssignTypedLambda<tvm::runtime::Module ()(std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&, std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&)>(tvm::runtime::Module ()(std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&, std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&))::{lambda(tvm::runtime::TVMArgs const&, tvm::runtime::TVMRetValue*)#1}>::_M_invoke(std::_Any_data const&, tvm::runtime::TVMArgs&&, tvm::runtime::TVMRetValue*&&)+0xa3) [0x7f0b6f57a9e3]
[bt] (4) /home/tgall/tvm/tvm/build/libtvm.so(tvm::runtime::Module::LoadFromFile(std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&, std::__cxx11::basic_string<char, std::char_traits, std::allocator > const&)+0x1d8) [0x7f0b6f575c38]
[bt] (3) /home/tgall/tvm/tvm/build/libtvm.so(+0x18a270a) [0x7f0b6f5f070a]
[bt] (2) /home/tgall/tvm/tvm/build/libtvm.so(tvm::runtime::MicroSession::LoadBinary(std::cxx11::basic_string<char, std::char_traits, std::allocator > const&, bool)+0x14f) [0x7f0b6f5f5e9f]
[bt] (1) /home/tgall/tvm/tvm/build/libtvm.so(tvm::runtime::MicroSession::AllocateInSection(tvm::runtime::SectionKind, unsigned long)+0x1a5) [0x7f0b6f5f5795]
[bt] (0) /home/tgall/tvm/tvm/build/libtvm.so(+0x18a4898) [0x7f0b6f5f2898]
File "/home/tgall/tvm/tvm/src/runtime/micro/micro_section_allocator.h", line 68
TVMError: Check failed: size + size < capacity: cannot alloc 208 bytes in section "rodata" (start_addr=0x20004650, used=0, capacity=100)
I think we've seen this before haven't we?
code
import os
import numpy as np
import tvm
import tvm.micro as micro
from tvm.contrib import graph_runtime, util
from tvm import relay
from tvm.contrib.download import download_testdata
target = "c --system-lib --runtime=c"
model_dir ="/home/tgall/tvm/utvm-exp/"
tflite_model_file = os.path.join(model_dir, "sine_model.tflite")
tflite_model_buf = open(tflite_model_file, "rb").read()
try:
import tflite
tflite_model = tflite.Model.GetRootAsModel(tflite_model_buf, 0)
version = tflite_model.Version()
print ("Model Version: " + version)
except AttributeError:
import tflite.Model
tflite_model = tflite.Model.Model.GetRootAsModel(tflite_model_buf, 0)
version = tflite_model.Version()
print ("Model Version: " + str(version))
input_tensor = "dense_4_input"
input_shape = (1,)
input_dtype = "float32"
dev_config = micro.device.arm.stm32f746xx.generate_config("127.0.0.1", 6666)
mod, params = relay.frontend.from_tflite(tflite_model,
shape_dict={input_tensor: input_shape},
dtype_dict={input_tensor: input_dtype})
with micro.Session(dev_config) as sess:
ctx = tvm.micro_dev(0)
The text was updated successfully, but these errors were encountered: