Skip to content

Commit

Permalink
rename namespace from dynamic to dyn
Browse files Browse the repository at this point in the history
  • Loading branch information
Matthew Brookhart committed Jun 22, 2020
1 parent aee4795 commit e31c03c
Show file tree
Hide file tree
Showing 10 changed files with 24 additions and 26 deletions.
2 changes: 1 addition & 1 deletion python/tvm/relay/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
from .op import annotation
from .op import vision
from .op import contrib
from .op import dynamic
from .op import dyn
from .op.reduce import *
from .op.tensor import *
from .op.transform import *
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@
"""Constructor APIs"""
import tvm._ffi

tvm._ffi._init_api("relay.op.dynamic._make", __name__)
tvm._ffi._init_api("relay.op.dyn._make", __name__)
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from tvm.te.hybrid import script
from .. import op as _reg

_reg.register_injective_schedule("dynamic.reshape")
_reg.register_injective_schedule("dyn.reshape")

@script
def _reshape_shape_func_input_data(data, newshape, ndim):
Expand Down Expand Up @@ -78,6 +78,6 @@ def _reshape_shape_func_input_data(data, newshape, ndim):
out[infer_idx] = old_size // new_size
return out

@_reg.register_shape_func("dynamic.reshape", True)
@_reg.register_shape_func("dyn.reshape", True)
def dynamic_reshape_shape_func(attrs, inputs, out_ndims):
return [_reshape_shape_func_input_data(*inputs, out_ndims[0])]
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

namespace tvm {
namespace relay {
namespace dynamic {
namespace dyn {

/* relay.reshape */
// TVM_REGISTER_NODE_TYPE(ReshapeAttrs);
Expand Down Expand Up @@ -78,13 +78,13 @@ Array<te::Tensor> ReshapeCompute(const Attrs& attrs, const Array<te::Tensor>& in
Expr MakeReshape(Expr data, Expr newshape) {
auto attrs = make_object<ReshapeAttrs>();
attrs->reverse = false;
static const Op& op = Op::Get("dynamic.reshape");
static const Op& op = Op::Get("dyn.reshape");
return Call(op, {data, newshape}, Attrs(attrs), {});
}

TVM_REGISTER_GLOBAL("relay.op.dynamic._make.reshape").set_body_typed(MakeReshape);
TVM_REGISTER_GLOBAL("relay.op.dyn._make.reshape").set_body_typed(MakeReshape);

RELAY_REGISTER_OP("dynamic.reshape")
RELAY_REGISTER_OP("dyn.reshape")
.describe(R"code(Reshapes the input array based on the values in the newshape array.
To give user more convenience in without doing manual shape inference,
Expand Down Expand Up @@ -131,6 +131,6 @@ RELAY_REGISTER_OP("dynamic.reshape")
.set_attr<FTVMCompute>("FTVMCompute", ReshapeCompute)
.set_attr<TOpPattern>("TOpPattern", kInjective);

} // namespace dynamic
} // namespace dyn
} // namespace relay
} // namespace tvm
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

namespace tvm {
namespace relay {
namespace dynamic {} // namespace dynamic
namespace dyn {} // namespace dynamic
} // namespace relay
} // namespace tvm
#endif // TVM_RELAY_OP_DYNAMIC_TENSOR_TRANSFORM_H_
2 changes: 1 addition & 1 deletion src/relay/transforms/dynamic_to_static.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ namespace relay {

class DynamicToStaticMutator : public MixedModeMutator {
public:
DynamicToStaticMutator() : dyn_reshape_op_(Op::Get("dynamic.reshape")) {}
DynamicToStaticMutator() : dyn_reshape_op_(Op::Get("dyn.reshape")) {}

private:
Expr Rewrite_(const CallNode* pre, const Expr& post) override {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ def verify_func(func, data, ref_res):
tvm.testing.assert_allclose(op_res.asnumpy(), ref_res, rtol=1e-5)
relay.backend.compile_engine.get().clear()

def test_dynamic_reshape():
def test_dyn_reshape():
def verify_reshape(shape, newshape, oshape):
x = relay.var("x", relay.TensorType(shape, "float32"))
y = relay.var("y", relay.TensorType((len(newshape), ), "int64"))
z = relay.dynamic.reshape(x, y)
z = relay.dyn.reshape(x, y)

func = relay.Function([x, y], z)
x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
Expand All @@ -56,11 +56,11 @@ def verify_reshape(shape, newshape, oshape):
verify_reshape((2, 3, 4, 5), (-3, -3), (6, 20))
verify_reshape((2, 3, 4), (0, -3), (2, 12))

def test_dynamic_shape_reshape():
def test_dyn_shape_reshape():
def verify_reshape(shape, newshape, oshape):
x = relay.var("x", relay.TensorType(shape, "float32"))
y = relay.var("y", relay.TensorType(newshape, "float32"))
z = relay.dynamic.reshape(x, relay.shape_of(y))
z = relay.dyn.reshape(x, relay.shape_of(y))

func = relay.Function([x, y], z)
x_data = np.random.uniform(low=-1, high=1, size=shape).astype("float32")
Expand All @@ -71,5 +71,5 @@ def verify_reshape(shape, newshape, oshape):
verify_reshape((4, 7), (2, 7, 2), (2, 7, 2))

if __name__ == "__main__":
test_dynamic_reshape()
test_dynamic_shape_reshape()
test_dyn_reshape()
test_dyn_shape_reshape()
16 changes: 7 additions & 9 deletions tests/python/relay/test_pass_dynamic_to_static.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,6 @@ def run_opt_pass(expr, opt_pass):
def verify_func(func, data, ref_res):
assert isinstance(data, list)
for target, ctx in ctx_list():
#TODO(mbrookhart): enable Cuda tests onces the VM supports dynamic shapes
if "llvm" not in target: continue
for kind in ["graph", "vm", "debug"]:
mod = tvm.ir.IRModule.from_expr(func)
intrp = relay.create_executor(kind, mod=mod, ctx=ctx, target=target)
Expand All @@ -46,7 +44,7 @@ def test_dynamic_to_static_reshape():
def verify_reshape(shape, newshape, oshape):
x = relay.var("x", relay.TensorType(shape, "float32"))
y = relay.var("y", relay.TensorType(newshape, "float32"))
z = relay.dynamic.reshape(x, relay.shape_of(y))
z = relay.dyn.reshape(x, relay.shape_of(y))
func = run_infer_type(relay.Function([x, y], z))
func2 = run_opt_pass(run_opt_pass(func, transform.DynamicToStatic()), transform.InferType())

Expand All @@ -68,8 +66,8 @@ def test_dynamic_to_static_double_reshape():
def verify_reshape(shape, newshape):
x = relay.var("x", relay.TensorType(shape, "float32"))
y = relay.var("y", relay.TensorType(newshape, "float32"))
z = relay.dynamic.reshape(x, relay.shape_of(y))
z = relay.dynamic.reshape(z, relay.shape_of(x))
z = relay.dyn.reshape(x, relay.shape_of(y))
z = relay.dyn.reshape(z, relay.shape_of(x))
func = run_infer_type(relay.Function([x, y], z))
func2 = run_opt_pass(run_opt_pass(func, transform.DynamicToStatic()), transform.InferType())

Expand All @@ -90,10 +88,10 @@ def test_dynamic_to_static_quad_reshape():
def verify_reshape(shape, newshape):
x = relay.var("x", relay.TensorType(shape, "float32"))
y = relay.var("y", relay.TensorType(newshape, "float32"))
z1 = relay.dynamic.reshape(x, relay.shape_of(y))
z2 = relay.dynamic.reshape(z1, relay.shape_of(x))
z3 = relay.dynamic.reshape(z2, relay.shape_of(z1))
z4 = relay.dynamic.reshape(z3, relay.shape_of(z2))
z1 = relay.dyn.reshape(x, relay.shape_of(y))
z2 = relay.dyn.reshape(z1, relay.shape_of(x))
z3 = relay.dyn.reshape(z2, relay.shape_of(z1))
z4 = relay.dyn.reshape(z3, relay.shape_of(z2))
func = run_infer_type(relay.Function([x, y], z4))
func2 = run_opt_pass(run_opt_pass(func, transform.DynamicToStatic()), transform.InferType())

Expand Down

0 comments on commit e31c03c

Please sign in to comment.