Skip to content

Commit

Permalink
pylint check
Browse files Browse the repository at this point in the history
  • Loading branch information
heliqi committed Aug 17, 2021
1 parent 80d308a commit 202893c
Showing 1 changed file with 31 additions and 28 deletions.
59 changes: 31 additions & 28 deletions python/tvm/relay/frontend/paddlepaddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@
# pylint: disable=invalid-name, import-self, len-as-condition, unused-argument, too-many-lines
# pylint: disable=import-outside-toplevel
"""Paddle: PArallel Distributed Deep LEarning."""
import copy
import warnings
import six

import numpy as np

Expand All @@ -43,6 +41,8 @@


def shape_of(x, dtype="int32"):
"""Get shape of a tensor"""

ttype = infer_type(x).checked_type
if not _ty.is_dynamic(ttype):
shape = list(ttype.shape)
Expand All @@ -51,6 +51,8 @@ def shape_of(x, dtype="int32"):


def _get_pad_size(in_size, dilated_kernel_size, stride_size):
"""calculate the paddings size"""

if stride_size == 1 or in_size % stride_size == 0:
pad = max(dilated_kernel_size - stride_size, 0)
else:
Expand Down Expand Up @@ -94,7 +96,6 @@ def convert_batch_norm(g, op, block):
mean_name = op.input("Mean")[0]
variance_name = op.input("Variance")[0]
epsilon = op.attr("epsilon")
momentum = op.attr("momentum")
out = _op.nn.batch_norm(
g.get_node(ipt_name),
g.get_node(scale_name),
Expand Down Expand Up @@ -127,16 +128,17 @@ def convert_concat(g, op, block):

def convert_conv2d(g, op, block):
"""Operator converter for conv2d."""

dilations = op.attr("dilations")
groups = op.attr("groups")
paddings = op.attr("paddings")
padding_algorithm = op.attr("padding_algorithm")
strides = op.attr("strides")

kernel = g.get_node(op.input("Filter")[0])
input = g.get_node(op.input("Input")[0])
input_x = g.get_node(op.input("Input")[0])
out_channels, _, k_h, k_w = infer_shape(kernel)
in_h, in_w = infer_shape(input)[2:]
in_h, in_w = infer_shape(input_x)[2:]
if padding_algorithm == "VALID":
paddings = [0, 0]
elif padding_algorithm == "SAME":
Expand All @@ -149,11 +151,11 @@ def convert_conv2d(g, op, block):
if len(paddings) == 4:
paddings = [paddings[0], paddings[2], paddings[1], paddings[3]]
else:
msg = 'Value {} in attribute "padding" of operator Conv is not ' "valid."
msg = 'Value {} in attribute "padding" of operator Conv is not "valid."'
raise tvm.error.OpAttributeInvalid(msg.format(padding_algorithm))

out = _op.nn.conv2d(
input,
input_x,
kernel,
strides=strides,
padding=paddings,
Expand Down Expand Up @@ -312,7 +314,6 @@ def convert_hard_sigmoid(g, op, block):
"""Operator converter for hard_sigmoid."""

slope = op.attr("slope")
offset = op.attr("offset")
x = g.get_node(op.input("X")[0])
out = x * _expr.const(slope) + _expr.const(0.5)
out = _op.clip(out, 0, 1)
Expand Down Expand Up @@ -346,7 +347,7 @@ def convert_layer_norm(g, op, block):

x_shape = infer_shape(x)
assert (
begin_norm_axis == -1 or begin_norm_axis == len(x_shape) - 1
begin_norm_axis in (len(x_shape) - 1, -1)
), "Support only normalization over last one dimension."

if bias_input:
Expand Down Expand Up @@ -379,8 +380,6 @@ def convert_lookup_table(g, op, block):

indices = g.get_node(op.input("Ids")[0])
padding_idx = op.attr("padding_idx")
is_sparse = op.attr("is_sparse")
height_sections = op.attr("height_sections")
if padding_idx != -1:
g.get_params[op.input("W")[0]][padding_idx] = 0.0
g.add_node(op.input("W")[0], _expr.const(g.params[op.input("W")[0]]))
Expand All @@ -395,11 +394,11 @@ def convert_matmul(g, op, block):
inputs = [g.get_node(op.input("X")[0]), g.get_node(op.input("Y")[0])]
a_shape = infer_shape(inputs[0])
b_shape = infer_shape(inputs[1])
try:
if op.has_attr("trans_x"):
# for matmul_v2
trans_x = op.attr("trans_x")
trans_y = op.attr("trans_y")
except:
else:
# for matmul
trans_x = op.attr("transpose_X")
trans_y = op.attr("transpose_Y")
Expand Down Expand Up @@ -491,12 +490,10 @@ def flatten_to_nd(x, x_shape, nd=3):
out = _op.nn.dense(inputs[0], input_1_t)
if b_rank == 1:
out = _op.squeeze(out, axis=[-1])
try:
if op.has_attr("alpha"):
alpha = op.attr("alpha")
if not np.isclose(alpha, 1.0):
out = out * _expr.const(alpha).astype("float32")
except:
pass
g.add_node(op.output("Out")[0], out)


Expand Down Expand Up @@ -559,8 +556,8 @@ def convert_pool2d(g, op, block):
adaptive = True
ksize = [1, 1]

input = g.get_node(op.input("X")[0])
in_h, in_w = infer_shape(input)[2:]
input_x = g.get_node(op.input("X")[0])
in_h, in_w = infer_shape(input_x)[2:]

op_map = {
"avg": "avg_pool2d",
Expand All @@ -586,16 +583,15 @@ def convert_pool2d(g, op, block):
if len(paddings) == 4:
paddings = [paddings[0], paddings[2], paddings[1], paddings[3]]
else:
msg = 'Value {} in attribute "padding" of operator Pool2d is not ' "valid."
msg = 'Value {} in attribute "padding" of operator Pool2d is not "valid."'
raise tvm.error.OpAttributeInvalid(msg.format(padding_algorithm))

x = g.get_node(op.input("X")[0])
if not adaptive:
out = getattr(_op.nn, op_map[pooling_type])(
x, pool_size=ksize, strides=strides, padding=paddings, ceil_mode=ceil_mode
input_x, pool_size=ksize, strides=strides, padding=paddings, ceil_mode=ceil_mode
)
else:
out = getattr(_op.nn, "adaptive_" + op_map[pooling_type])(x, output_size=ksize)
out = getattr(_op.nn, "adaptive_" + op_map[pooling_type])(input_x, output_size=ksize)
g.add_node(op.output("Out")[0], out)


Expand Down Expand Up @@ -764,7 +760,7 @@ def convert_unsqueeze(g, op, block):
}


class GraphProto(object):
class GraphProto:
"""A helper class for handling relay functions from PaddlePaddle model."""

def __init__(self):
Expand All @@ -773,13 +769,19 @@ def __init__(self):
self.shape_dict = None

def get_node(self, name):
"""get node from graph"""

assert name in self.nodes
return self.nodes[name]

def add_node(self, name, node):
"""add a node to graph"""

self.nodes[name] = fold_constant(node)

def get_params(self, name=None):
"""get params from graph"""

if name is None:
return self.params
assert name in self.params
Expand Down Expand Up @@ -809,17 +811,18 @@ def check_input_shape(self, op, block):
ipt_shape = block.var(ipt_name).shape
for i in ipt_shape:
if i < 0:
warning_msg = "Input {}(shape={}) has unkown dimension shapes. Specifying static values may improve performance".format(
warning_msg = "Input {}(shape={}) has unkown dimension shapes. \
Specifying static values may improve performance".format(
ipt_name, ipt_shape
)
warings.warn(warning_msg)
warnings.warn(warning_msg)

def check_unsupported_ops(self, program):
"""Check whether all the operators are supported."""

unsupported_ops = set()
for block in program.blocks:
for i, op in enumerate(block.ops):
for op in block.ops:
if op.type == "fetch":
continue
if op.type not in _convert_map:
Expand All @@ -836,7 +839,7 @@ def ops_to_relay(self, program, input_specs=None):
for input_spec in input_specs:
convert_feed(self, input_spec, None)
for block in program.blocks:
for i, op in enumerate(block.ops):
for op in block.ops:
if op.type == "fetch":
continue
convert_func = _convert_map[op.type]
Expand All @@ -856,7 +859,7 @@ def from_program(self, program, shape_dict, scope):

output_names = list()
for block in program.blocks:
for i, op in enumerate(block.ops):
for op in block.ops:
if op.type == "fetch":
output_names.append(op.input("X")[0])

Expand Down

0 comments on commit 202893c

Please sign in to comment.