Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Relay][Keras] Permute, Softmax support #3618

Merged
merged 1 commit into from
Jul 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions python/tvm/relay/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,9 @@ def _convert_activation(inexpr, keras_layer, _):

def _convert_advanced_activation(inexpr, keras_layer, etab):
act_type = type(keras_layer).__name__

if act_type == 'Softmax':
return _op.nn.softmax(inexpr, axis=1)
if act_type == 'ReLU':
if keras_layer.max_value:
return _op.clip(inexpr, a_min=0., a_max=float(keras_layer.max_value))
Expand Down Expand Up @@ -160,6 +163,8 @@ def _convert_merge(inexpr, keras_layer, _):
'Operator {} is not supported in frontend Keras.'.format(merge_type))
return ret

def _convert_permute(inexpr, keras_layer, _):
return _op.transpose(inexpr, axes=(0,) + keras_layer.dims)

def _convert_dense(inexpr, keras_layer, etab):
weightList = keras_layer.get_weights()
Expand Down Expand Up @@ -574,6 +579,7 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument
_convert_map = {
'Dense' : _convert_dense,
'Activation' : _convert_activation,
'Softmax' : _convert_advanced_activation,
'ReLU' : _convert_advanced_activation,
'LeakyReLU' : _convert_advanced_activation,
'PReLU' : _convert_advanced_activation,
Expand Down Expand Up @@ -620,7 +626,7 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument
'Average' : _convert_merge,
'Maximum' : _convert_merge,
# 'Dot' : _convert_merge,
# 'Permute' : _convert_permute,
'Permute' : _convert_permute,
# 'Embedding' : _convert_embedding,
# 'RepeatVector' : _convert_repeat_vector,

Expand All @@ -632,11 +638,15 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument


def _check_unsupported_layers(model):
missing_ops = set()
for layer in model.layers:
op_name = type(layer).__name__
if op_name not in _convert_map:
raise tvm.error.OpNotImplemented(
'Operator {} is not supported in frontend Keras.'.format(op_name))
missing_ops.add(op_name)

if missing_ops:
raise NotImplementedError( \
"The following operators are not implemented: {}".format(missing_ops))


def keras_op_to_relay(inexpr, keras_layer, outname, etab):
Expand Down
61 changes: 34 additions & 27 deletions tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def to_channels_last(arr):


def test_forward_merge():
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Conv2D(8, (3, 3), padding="same")(data)
y = keras.layers.Conv2D(8, (3, 3), padding="same")(x)
z = keras.layers.Conv2D(8, (3, 3), padding="same")(y)
Expand All @@ -93,7 +93,7 @@ def test_forward_merge():


def test_forward_activations():
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
act_funcs = [keras.layers.Activation('softmax'),
keras.layers.Activation('softplus'),
keras.layers.Activation('relu'),
Expand All @@ -103,6 +103,7 @@ def test_forward_activations():
keras.layers.Activation('tanh'),
keras.layers.Activation('linear'),
keras.layers.Activation('selu'),
keras.layers.Softmax(),
keras.layers.ReLU(),
keras.layers.ReLU(max_value=6.),
keras.layers.LeakyReLU(alpha=0.3),
Expand All @@ -116,13 +117,18 @@ def test_forward_activations():


def test_forward_dense():
data = keras.layers.Input(shape=(32,32,1))
data = keras.layers.Input(shape=(32, 32, 1))
x = keras.layers.Flatten()(data)
x = keras.layers.Dropout(0.5)(x)
x = keras.layers.Dense(10, activation='relu', kernel_initializer='uniform')(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_permute():
data = keras.layers.Input(shape=(2, 3, 4))
x = keras.layers.Permute([2, 3, 1])(data)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model, need_transpose=False)

def test_forward_sequential():
keras_model = keras.models.Sequential([
Expand All @@ -136,7 +142,7 @@ def test_forward_sequential():


def test_forward_pool():
data = keras.layers.Input(shape=(32,32,1))
data = keras.layers.Input(shape=(32, 32, 1))
# maxpool
x = keras.layers.MaxPooling2D((3, 3), strides=(1, 1), padding='same')(data)
keras_model = keras.models.Model(data, x)
Expand All @@ -148,36 +154,36 @@ def test_forward_pool():


def test_forward_conv():
data = keras.layers.Input(shape=(32,32,3))
conv_funcs = [keras.layers.Conv2D(filters=10, kernel_size=(3,3),
strides=(2,2), padding='same'),
keras.layers.Conv2D(filters=10, kernel_size=(3,3),
dilation_rate=(2,2), padding='same'),
keras.layers.DepthwiseConv2D(kernel_size=(3,3), padding='same'),
keras.layers.Conv2DTranspose(filters=10, kernel_size=(3,3), padding='valid'),
keras.layers.SeparableConv2D(filters=10, kernel_size=(3,3), padding='same')]
data = keras.layers.Input(shape=(32, 32, 3))
conv_funcs = [keras.layers.Conv2D(filters=10, kernel_size=(3, 3),
strides=(2, 2), padding='same'),
keras.layers.Conv2D(filters=10, kernel_size=(3, 3),
dilation_rate=(2, 2), padding='same'),
keras.layers.DepthwiseConv2D(kernel_size=(3, 3), padding='same'),
keras.layers.Conv2DTranspose(filters=10, kernel_size=(3, 3), padding='valid'),
keras.layers.SeparableConv2D(filters=10, kernel_size=(3, 3), padding='same')]
for conv_func in conv_funcs:
x = conv_func(data)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_upsample(interpolation='nearest'):
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.UpSampling2D(size=(3,3), interpolation=interpolation)(data)
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.UpSampling2D(size=(3, 3), interpolation=interpolation)(data)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_reshape():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Reshape(target_shape=(32,32,3))(data)
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Reshape(target_shape=(32, 32, 3))(data)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_crop():
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Cropping2D(cropping=((1, 1), (1, 1)))(data)
x = keras.layers.Cropping2D(cropping=(1, 1))(x)
x = keras.layers.Cropping2D(cropping=1)(x)
Expand All @@ -190,8 +196,8 @@ def test_forward_crop():


def test_forward_multi_inputs():
data1 = keras.layers.Input(shape=(32,32,3))
data2 = keras.layers.Input(shape=(32,32,3))
data1 = keras.layers.Input(shape=(32, 32, 3))
data2 = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Conv2D(8, (3, 3), padding="same")(data1)
y = keras.layers.Conv2D(8, (3, 3), padding="same")(data2)
z = keras.layers.Average()([x, y])
Expand All @@ -201,7 +207,7 @@ def test_forward_multi_inputs():


def test_forward_multi_outputs():
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Conv2D(8, (3, 3), padding="same")(data)
x = keras.layers.GlobalAveragePooling2D()(x)
y = keras.layers.Conv2D(8, (3, 3), padding="same")(data)
Expand All @@ -212,7 +218,7 @@ def test_forward_multi_outputs():

def test_forward_reuse_layers():
# reuse conv2d
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
conv2d = keras.layers.Conv2D(8, (3, 3), padding="same")
x = conv2d(data)
y = conv2d(data)
Expand All @@ -221,7 +227,7 @@ def test_forward_reuse_layers():
keras_model = keras.models.Model(data, z)
verify_keras_frontend(keras_model)
# reuse add
data = keras.layers.Input(shape=(32,32,3))
data = keras.layers.Input(shape=(32, 32, 3))
x = keras.layers.Conv2D(8, (3, 3), padding="same")(data)
add = keras.layers.Add()
x = add([x, x])
Expand All @@ -232,7 +238,7 @@ def test_forward_reuse_layers():


def test_forward_rnn():
data = keras.layers.Input(shape=(1,32))
data = keras.layers.Input(shape=(1, 32))
rnn_funcs = [keras.layers.LSTM(units=16, return_state=False,
recurrent_activation='sigmoid', activation='tanh'),
keras.layers.SimpleRNN(units=16, return_state=False,
Expand All @@ -247,32 +253,33 @@ def test_forward_rnn():

def test_forward_vgg16():
keras_model = keras.applications.VGG16(include_top=True, weights='imagenet',
input_shape=(224,224,3), classes=1000)
input_shape=(224, 224, 3), classes=1000)
verify_keras_frontend(keras_model)


def test_forward_xception():
keras_model = keras.applications.Xception(include_top=True, weights='imagenet',
input_shape=(299,299,3), classes=1000)
input_shape=(299, 299, 3), classes=1000)
verify_keras_frontend(keras_model)


def test_forward_resnet50():
keras_model = keras.applications.ResNet50(include_top=True, weights='imagenet',
input_shape=(224,224,3), classes=1000)
input_shape=(224, 224, 3), classes=1000)
verify_keras_frontend(keras_model)


def test_forward_mobilenet():
keras_model = keras.applications.MobileNet(include_top=True, weights='imagenet',
input_shape=(224,224,3), classes=1000)
input_shape=(224, 224, 3), classes=1000)
verify_keras_frontend(keras_model)


if __name__ == '__main__':
test_forward_merge()
test_forward_activations()
test_forward_dense()
test_forward_permute()
test_forward_sequential()
test_forward_pool()
test_forward_conv()
Expand Down