Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Relay][Pass] Merge two consecutive reshape ops #6052

Merged
merged 13 commits into from
Jul 16, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion python/tvm/relay/op/_transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
_reg.register_injective_schedule("take")
_reg.register_injective_schedule("transpose")
_reg.register_injective_schedule("stack")
_reg.register_injective_schedule("_contrib_reverse_reshape")
_reg.register_injective_schedule("contrib_reverse_reshape")
_reg.register_injective_schedule("gather")
_reg.register_injective_schedule("gather_nd")
_reg.register_injective_schedule("sequence_mask")
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/relay/op/transform.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,7 +908,7 @@ def reverse_reshape(data, newshape):
"""
if isinstance(newshape, int):
newshape = [newshape]
return _make._contrib_reverse_reshape(data, list(newshape))
return _make.contrib_reverse_reshape(data, list(newshape))


def gather(data, axis, indices):
Expand Down
1 change: 1 addition & 0 deletions python/tvm/relay/transform/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@
# transformation passes
from .transform import *
from . import memory_alloc
from .simplify_expr import SimplifyExpr
45 changes: 45 additions & 0 deletions python/tvm/relay/transform/simplify_expr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-argument
"""
A pass for simplifying the Relay expression.
"""
from . import transform
from ..dataflow_pattern import wildcard, is_op, DFPatternCallback, rewrite
from .. import op as _op

class SimplifyReshapeCallback(DFPatternCallback):
"""Callback to merge consecutive reshape ops"""
def __init__(self):
self.x = wildcard()
reshape1 = is_op("reshape") | is_op("contrib_reverse_reshape")
reshape2 = is_op("reshape") | is_op("contrib_reverse_reshape")
self.pattern = reshape1(reshape2(self.x))

def callback(self, pre, post, node_map):
x = node_map[self.x][0]
return _op.reshape(x, newshape=pre.checked_type.shape)


@transform.function_pass(opt_level=0, required=["InferType"])
class SimplifyExpr:
""" A pass to simplify the Relay expression."""
def __init__(self):
self.callbacks = [SimplifyReshapeCallback()]

def transform_function(self, func, mod, _):
return rewrite(self.callbacks, func)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

:) I've been thinking about putting together an algebraic simplifier for a while, this seems like a great first step.

4 changes: 2 additions & 2 deletions src/relay/ir/dataflow_matcher.cc
Original file line number Diff line number Diff line change
Expand Up @@ -740,10 +740,10 @@ class PatternRewriter : protected MixedModeMutator {
groups_ = grouper.GroupMatches(callback_->pattern_, post);
gid_assignments_ = grouper.GetGIDAssignments();
memo_.clear();
post = this->VisitExpr(post);
post = InferType(this->VisitExpr(post));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is failing all of the pattern language unit tests because they don't assume you need a typed graph for pattern matching. Maybe we should make this behavior optional? Or do we change the API to assert that Expressions have to be well typed to run the pattern rewriter?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree to make the InferType optional, but assertion may not work, as one pattern may rewrite a graph multiple times, so the rewritten nodes are still not typed even the original nodes are well typed before running rewriter. One solution is requiring users to manually type new nodes in the rewrite callback, but it seems not trivial.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree, in the multi-stage rewrite scenario, it makes sense to have the InferType here.

count++;
}
} while (last != post || count >= 100);
} while (!StructuralEqual()(last, post) || count >= 100);
if (count >= 100) {
throw("Observed 100 rewrite passes, possible conflicting passes?");
}
Expand Down
6 changes: 3 additions & 3 deletions src/relay/op/tensor/transform.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2573,13 +2573,13 @@ Expr MakeReverseReshape(Expr data, Array<Integer> newshape) {
auto attrs = make_object<ReshapeAttrs>();
attrs->newshape = std::move(newshape);
attrs->reverse = true;
static const Op& op = Op::Get("_contrib_reverse_reshape");
static const Op& op = Op::Get("contrib_reverse_reshape");
return Call(op, {data}, Attrs(attrs), {});
}

TVM_REGISTER_GLOBAL("relay.op._make._contrib_reverse_reshape").set_body_typed(MakeReverseReshape);
TVM_REGISTER_GLOBAL("relay.op._make.contrib_reverse_reshape").set_body_typed(MakeReverseReshape);

RELAY_REGISTER_OP("_contrib_reverse_reshape")
RELAY_REGISTER_OP("contrib_reverse_reshape")
.describe(R"code(Reshapes the input array where the special values are inferred from
right to left.

Expand Down
47 changes: 47 additions & 0 deletions tests/python/relay/test_pass_simplify_expr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import relay
from tvm.relay import transform
from tvm.relay.testing import run_opt_pass


def test_simplify_reshape():
def before():
x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32")
w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32")
y = relay.nn.conv2d(x, w, padding=(1, 1))
y = relay.reshape(y, newshape=(1, 16, -1))
y = relay.reshape(y, newshape=(4, 8, -1, 16))
icemelon marked this conversation as resolved.
Show resolved Hide resolved
y = relay.reverse_reshape(y, newshape=(32, 0, -1))
return relay.Function([x, w], y)

def expected():
x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32")
w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32")
y = relay.nn.conv2d(x, w, padding=(1, 1))
y = relay.reshape(y, newshape=(32, 16, 16))
return relay.Function([x, w], y)

z = before()
zz = run_opt_pass(z, transform.SimplifyExpr())
after = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(zz, after)


if __name__ == "__main__":
test_simplify_reshape()