From b9204cd33e00231cf65c5887fe6e091ce25b6f50 Mon Sep 17 00:00:00 2001 From: Qiang Zhang Date: Wed, 4 Aug 2021 09:32:58 +0800 Subject: [PATCH] [Relay] Change Default "opt_level" of Sequantial from 2 to 0 (#8634) --- python/tvm/ir/transform.py | 2 +- src/ir/transform.cc | 2 +- tests/python/relay/test_pass_manager.py | 28 +++++++++++++++++++++++++ 3 files changed, 30 insertions(+), 2 deletions(-) diff --git a/python/tvm/ir/transform.py b/python/tvm/ir/transform.py index 93aae45930e3..17995bfa7850 100644 --- a/python/tvm/ir/transform.py +++ b/python/tvm/ir/transform.py @@ -199,7 +199,7 @@ class Sequential(Pass): The list of passes that the sequential pass is dependent on. """ - def __init__(self, passes=None, opt_level=2, name="sequential", required=None): + def __init__(self, passes=None, opt_level=0, name="sequential", required=None): passes = passes if passes else [] if not isinstance(passes, (list, tuple)): raise TypeError("passes must be a list of Pass objects.") diff --git a/src/ir/transform.cc b/src/ir/transform.cc index 8120ca798ab2..426bdc9c1800 100644 --- a/src/ir/transform.cc +++ b/src/ir/transform.cc @@ -435,7 +435,7 @@ Sequential::Sequential(tvm::Array passes, PassInfo pass_info) { Sequential::Sequential(tvm::Array passes, String name) { auto n = make_object(); n->passes = std::move(passes); - PassInfo pass_info = PassInfo(2, std::move(name), {}); + PassInfo pass_info = PassInfo(0, std::move(name), {}); n->pass_info = std::move(pass_info); data_ = std::move(n); } diff --git a/tests/python/relay/test_pass_manager.py b/tests/python/relay/test_pass_manager.py index fb1094becb21..7e3634f8b7db 100644 --- a/tests/python/relay/test_pass_manager.py +++ b/tests/python/relay/test_pass_manager.py @@ -507,6 +507,34 @@ def expected(): assert tvm.ir.structural_equal(zz, zexpected) +def test_nested_sequential_with_scoping(): + def before(): + x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32") + w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32") + y = relay.nn.conv2d(x, w, padding=(1, 1)) + y = relay.reshape(y, newshape=(1, 16, -1)) + y = relay.reshape(y, newshape=(4, 8, -1, 16)) + y = relay.reverse_reshape(y, newshape=(32, 0, -1)) + return tvm.IRModule.from_expr(y) + + def expected(): + x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32") + w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32") + y = relay.nn.conv2d(x, w, padding=(1, 1)) + y = relay.reshape(y, newshape=(32, 16, 16)) + return tvm.IRModule.from_expr(y) + + z = before() + passes = [ + tvm.transform.Sequential([relay.transform.SimplifyExpr()]), + ] + with tvm.transform.PassContext(opt_level=1): + zz = tvm.transform.Sequential(passes)(z) + + expected = relay.transform.InferType()(expected()) + assert tvm.ir.structural_equal(zz, expected) + + def test_print_ir(capfd): shape = (1, 2, 3) tp = relay.TensorType(shape, "float32")