-
Notifications
You must be signed in to change notification settings - Fork 3.5k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Relay][Pass] Merge two consecutive reshape ops #6052
Changes from 1 commit
8bce8da
8b69388
7b35182
842b5a7
b58b998
43a172a
f28a13f
d5c1efd
7bd9401
8479dd9
e0add95
45b2dcc
4ace1ca
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, | ||
# software distributed under the License is distributed on an | ||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
# KIND, either express or implied. See the License for the | ||
# specific language governing permissions and limitations | ||
# under the License. | ||
# pylint: disable=unused-argument | ||
""" | ||
A pass for simplifying the Relay expression. | ||
""" | ||
from . import transform | ||
from ..dataflow_pattern import wildcard, is_op, DFPatternCallback, rewrite | ||
from .. import op as _op | ||
|
||
class SimplifyReshapeCallback(DFPatternCallback): | ||
"""Callback to merge consecutive reshape ops""" | ||
def __init__(self): | ||
self.x = wildcard() | ||
reshape1 = is_op("reshape") | is_op("contrib_reverse_reshape") | ||
reshape2 = is_op("reshape") | is_op("contrib_reverse_reshape") | ||
self.pattern = reshape1(reshape2(self.x)) | ||
|
||
def callback(self, pre, post, node_map): | ||
x = node_map[self.x][0] | ||
return _op.reshape(x, newshape=pre.checked_type.shape) | ||
|
||
|
||
@transform.function_pass(opt_level=0, required=["InferType"]) | ||
class SimplifyExpr: | ||
""" A pass to simplify the Relay expression.""" | ||
def __init__(self): | ||
self.callbacks = [SimplifyReshapeCallback()] | ||
|
||
def transform_function(self, func, mod, _): | ||
return rewrite(self.callbacks, func) | ||
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -740,10 +740,10 @@ class PatternRewriter : protected MixedModeMutator { | |
groups_ = grouper.GroupMatches(callback_->pattern_, post); | ||
gid_assignments_ = grouper.GetGIDAssignments(); | ||
memo_.clear(); | ||
post = this->VisitExpr(post); | ||
post = InferType(this->VisitExpr(post)); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is failing all of the pattern language unit tests because they don't assume you need a typed graph for pattern matching. Maybe we should make this behavior optional? Or do we change the API to assert that Expressions have to be well typed to run the pattern rewriter? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree to make the InferType optional, but assertion may not work, as one pattern may rewrite a graph multiple times, so the rewritten nodes are still not typed even the original nodes are well typed before running rewriter. One solution is requiring users to manually type new nodes in the rewrite callback, but it seems not trivial. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree, in the multi-stage rewrite scenario, it makes sense to have the InferType here. |
||
count++; | ||
} | ||
} while (last != post || count >= 100); | ||
} while (!StructuralEqual()(last, post) || count >= 100); | ||
if (count >= 100) { | ||
throw("Observed 100 rewrite passes, possible conflicting passes?"); | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one | ||
# or more contributor license agreements. See the NOTICE file | ||
# distributed with this work for additional information | ||
# regarding copyright ownership. The ASF licenses this file | ||
# to you under the Apache License, Version 2.0 (the | ||
# "License"); you may not use this file except in compliance | ||
# with the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, | ||
# software distributed under the License is distributed on an | ||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
# KIND, either express or implied. See the License for the | ||
# specific language governing permissions and limitations | ||
# under the License. | ||
import tvm | ||
from tvm import relay | ||
from tvm.relay import transform | ||
from tvm.relay.testing import run_opt_pass | ||
|
||
|
||
def test_simplify_reshape(): | ||
def before(): | ||
x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32") | ||
w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32") | ||
y = relay.nn.conv2d(x, w, padding=(1, 1)) | ||
y = relay.reshape(y, newshape=(1, 16, -1)) | ||
y = relay.reshape(y, newshape=(4, 8, -1, 16)) | ||
icemelon marked this conversation as resolved.
Show resolved
Hide resolved
|
||
y = relay.reverse_reshape(y, newshape=(32, 0, -1)) | ||
return relay.Function([x, w], y) | ||
|
||
def expected(): | ||
x = relay.var("x", shape=(1, 16, 16, 16), dtype="float32") | ||
w = relay.var("w", shape=(32, 16, 3, 3), dtype="float32") | ||
y = relay.nn.conv2d(x, w, padding=(1, 1)) | ||
y = relay.reshape(y, newshape=(32, 16, 16)) | ||
return relay.Function([x, w], y) | ||
|
||
z = before() | ||
zz = run_opt_pass(z, transform.SimplifyExpr()) | ||
after = run_opt_pass(expected(), transform.InferType()) | ||
assert tvm.ir.structural_equal(zz, after) | ||
|
||
|
||
if __name__ == "__main__": | ||
test_simplify_reshape() |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
:) I've been thinking about putting together an algebraic simplifier for a while, this seems like a great first step.