From 81a83f494d4ccb18ad3777a53638c885a2ec2c6f Mon Sep 17 00:00:00 2001 From: wjj19950828 Date: Wed, 22 Sep 2021 22:07:24 +0800 Subject: [PATCH] fixed conflicts --- python/tvm/relay/frontend/paddlepaddle.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/python/tvm/relay/frontend/paddlepaddle.py b/python/tvm/relay/frontend/paddlepaddle.py index 7de1ed4698fc..56a99fd2b226 100644 --- a/python/tvm/relay/frontend/paddlepaddle.py +++ b/python/tvm/relay/frontend/paddlepaddle.py @@ -1772,16 +1772,15 @@ def make_init_param_inputs(g, node, layer): axes=[0], ) return init_h, init_c - else: - all_init_h = node.input("PreState")[0] - bidirect_len = 2 if node.attr("is_bidirec") else 1 - init_h = _op.strided_slice( - g.get_node(all_init_h), - [layer * bidirect_len], - [layer * bidirect_len + bidirect_len], - axes=[0], - ) - return init_h + all_init_h = node.input("PreState")[0] + bidirect_len = 2 if node.attr("is_bidirec") else 1 + init_h = _op.strided_slice( + g.get_node(all_init_h), + [layer * bidirect_len], + [layer * bidirect_len + bidirect_len], + axes=[0], + ) + return init_h hidden_size = op.attr("hidden_size") num_layers = op.attr("num_layers")