diff --git a/python/tvm/relay/backend/_backend.py b/python/tvm/relay/backend/_backend.py index 0a6232bf7f58..65b0c0ba87c7 100644 --- a/python/tvm/relay/backend/_backend.py +++ b/python/tvm/relay/backend/_backend.py @@ -80,7 +80,6 @@ def build(mod, target, target_host=None): """ if target_host == "": target_host = None - import pdb; pdb.set_trace() return tvm.driver.build(mod, target=target, target_host=target_host) diff --git a/src/relay/backend/graph_runtime_codegen.cc b/src/relay/backend/graph_runtime_codegen.cc index f47603d7352f..416540db2286 100644 --- a/src/relay/backend/graph_runtime_codegen.cc +++ b/src/relay/backend/graph_runtime_codegen.cc @@ -254,8 +254,6 @@ class GraphRuntimeCodegen : public backend::MemoizedExprTranslatorUpdate(main_module); ret.external_mods = lowered_module.external_mods; return ret; } @@ -410,7 +408,6 @@ class GraphRuntimeCodegen : public backend::MemoizedExprTranslatorop.as()) { if (op_node->name != "prim_fn_call") { diff --git a/src/relay/backend/tir_compiler.cc b/src/relay/backend/tir_compiler.cc index 1de9cb7ca64e..5df64c024a66 100644 --- a/src/relay/backend/tir_compiler.cc +++ b/src/relay/backend/tir_compiler.cc @@ -887,17 +887,9 @@ class LowerTensorExpr : public ExprMutator { } // Process inputs. - bool skip_first; Array args; - for (auto arg : expr->args) { - // The first input is a function, not a tensor. - if (skip_first) { - skip_first = false; - args.push_back(arg); - continue; - } - - args.push_back(VisitExpr(arg)); + for (size_t i = 0; i < expr->args.size(); i++) { + args.push_back(VisitExpr(expr->args[i])); } Target target;