diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 91da87c84b809..92eb1eb9b2b16 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -115,6 +115,9 @@ def _convert_activation(inexpr, keras_layer, _): def _convert_advanced_activation(inexpr, keras_layer, etab): act_type = type(keras_layer).__name__ + + if act_type == 'Softmax': + return _op.nn.softmax(inexpr, axis=1) if act_type == 'ReLU': if keras_layer.max_value: return _op.clip(inexpr, a_min=0., a_max=float(keras_layer.max_value)) @@ -160,6 +163,9 @@ def _convert_merge(inexpr, keras_layer, _): 'Operator {} is not supported in frontend Keras.'.format(merge_type)) return ret +def _convert_permute(inexpr, keras_layer, etab): + ret = _op.transpose(inexpr, axes=(0,) + keras_layer.dims) + return ret#_op.shape_of(ret) def _convert_dense(inexpr, keras_layer, etab): weightList = keras_layer.get_weights() @@ -574,6 +580,7 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument _convert_map = { 'Dense' : _convert_dense, 'Activation' : _convert_activation, + 'Softmax' : _convert_advanced_activation, 'ReLU' : _convert_advanced_activation, 'LeakyReLU' : _convert_advanced_activation, 'PReLU' : _convert_advanced_activation, @@ -620,7 +627,7 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument 'Average' : _convert_merge, 'Maximum' : _convert_merge, # 'Dot' : _convert_merge, - # 'Permute' : _convert_permute, + 'Permute' : _convert_permute, # 'Embedding' : _convert_embedding, # 'RepeatVector' : _convert_repeat_vector, @@ -632,11 +639,15 @@ def _default_skip(inexpr, keras_layer, _): # pylint: disable=unused-argument def _check_unsupported_layers(model): + missing_ops = set() for layer in model.layers: op_name = type(layer).__name__ if op_name not in _convert_map: - raise tvm.error.OpNotImplemented( - 'Operator {} is not supported in frontend Keras.'.format(op_name)) + missing_ops.add(op_name) + + if missing_ops: + raise NotImplementedError( \ + "The following operators are not implemented: {}".format(missing_ops)) def keras_op_to_relay(inexpr, keras_layer, outname, etab): diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index 03117b21e3e23..a78a88400cb91 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -73,7 +73,7 @@ def to_channels_last(arr): def test_forward_merge(): - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) x = keras.layers.Conv2D(8, (3, 3), padding="same")(data) y = keras.layers.Conv2D(8, (3, 3), padding="same")(x) z = keras.layers.Conv2D(8, (3, 3), padding="same")(y) @@ -82,6 +82,7 @@ def test_forward_merge(): keras.layers.Multiply(), keras.layers.Maximum(), keras.layers.Average(), + keras.layers.Dot(axes=-1), keras.layers.Concatenate()] for merge_func in merge_funcs: if isinstance(merge_func, keras.layers.merge.Subtract): @@ -93,7 +94,7 @@ def test_forward_merge(): def test_forward_activations(): - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) act_funcs = [keras.layers.Activation('softmax'), keras.layers.Activation('softplus'), keras.layers.Activation('relu'), @@ -103,6 +104,7 @@ def test_forward_activations(): keras.layers.Activation('tanh'), keras.layers.Activation('linear'), keras.layers.Activation('selu'), + keras.layers.Softmax(), keras.layers.ReLU(), keras.layers.ReLU(max_value=6.), keras.layers.LeakyReLU(alpha=0.3), @@ -116,13 +118,26 @@ def test_forward_activations(): def test_forward_dense(): - data = keras.layers.Input(shape=(32,32,1)) + data = keras.layers.Input(shape=(32, 32, 1)) x = keras.layers.Flatten()(data) x = keras.layers.Dropout(0.5)(x) x = keras.layers.Dense(10, activation='relu', kernel_initializer='uniform')(x) keras_model = keras.models.Model(data, x) verify_keras_frontend(keras_model) +def test_forward_permute(): + data = keras.layers.Input(shape=(2, 3, 4)) + x = keras.layers.Permute([2, 3, 1])(data) + keras_model = keras.models.Model(data, x) + verify_keras_frontend(keras_model, need_transpose=False) + +def test_forward_dot(): + + pass + +def test_forward_softmax(): + pass + def test_forward_sequential(): keras_model = keras.models.Sequential([ @@ -136,7 +151,7 @@ def test_forward_sequential(): def test_forward_pool(): - data = keras.layers.Input(shape=(32,32,1)) + data = keras.layers.Input(shape=(32, 32, 1)) # maxpool x = keras.layers.MaxPooling2D((3, 3), strides=(1, 1), padding='same')(data) keras_model = keras.models.Model(data, x) @@ -148,14 +163,14 @@ def test_forward_pool(): def test_forward_conv(): - data = keras.layers.Input(shape=(32,32,3)) - conv_funcs = [keras.layers.Conv2D(filters=10, kernel_size=(3,3), - strides=(2,2), padding='same'), - keras.layers.Conv2D(filters=10, kernel_size=(3,3), - dilation_rate=(2,2), padding='same'), - keras.layers.DepthwiseConv2D(kernel_size=(3,3), padding='same'), - keras.layers.Conv2DTranspose(filters=10, kernel_size=(3,3), padding='valid'), - keras.layers.SeparableConv2D(filters=10, kernel_size=(3,3), padding='same')] + data = keras.layers.Input(shape=(32, 32, 3)) + conv_funcs = [keras.layers.Conv2D(filters=10, kernel_size=(3, 3), + strides=(2, 2), padding='same'), + keras.layers.Conv2D(filters=10, kernel_size=(3, 3), + dilation_rate=(2, 2), padding='same'), + keras.layers.DepthwiseConv2D(kernel_size=(3, 3), padding='same'), + keras.layers.Conv2DTranspose(filters=10, kernel_size=(3, 3), padding='valid'), + keras.layers.SeparableConv2D(filters=10, kernel_size=(3, 3), padding='same')] for conv_func in conv_funcs: x = conv_func(data) keras_model = keras.models.Model(data, x) @@ -163,21 +178,21 @@ def test_forward_conv(): def test_forward_upsample(interpolation='nearest'): - data = keras.layers.Input(shape=(32,32,3)) - x = keras.layers.UpSampling2D(size=(3,3), interpolation=interpolation)(data) + data = keras.layers.Input(shape=(32, 32, 3)) + x = keras.layers.UpSampling2D(size=(3, 3), interpolation=interpolation)(data) keras_model = keras.models.Model(data, x) verify_keras_frontend(keras_model) def test_forward_reshape(): - data = keras.layers.Input(shape=(32,32,3)) - x = keras.layers.Reshape(target_shape=(32,32,3))(data) + data = keras.layers.Input(shape=(32, 32, 3)) + x = keras.layers.Reshape(target_shape=(32, 32, 3))(data) keras_model = keras.models.Model(data, x) verify_keras_frontend(keras_model) def test_forward_crop(): - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) x = keras.layers.Cropping2D(cropping=((1, 1), (1, 1)))(data) x = keras.layers.Cropping2D(cropping=(1, 1))(x) x = keras.layers.Cropping2D(cropping=1)(x) @@ -190,8 +205,8 @@ def test_forward_crop(): def test_forward_multi_inputs(): - data1 = keras.layers.Input(shape=(32,32,3)) - data2 = keras.layers.Input(shape=(32,32,3)) + data1 = keras.layers.Input(shape=(32, 32, 3)) + data2 = keras.layers.Input(shape=(32, 32, 3)) x = keras.layers.Conv2D(8, (3, 3), padding="same")(data1) y = keras.layers.Conv2D(8, (3, 3), padding="same")(data2) z = keras.layers.Average()([x, y]) @@ -201,7 +216,7 @@ def test_forward_multi_inputs(): def test_forward_multi_outputs(): - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) x = keras.layers.Conv2D(8, (3, 3), padding="same")(data) x = keras.layers.GlobalAveragePooling2D()(x) y = keras.layers.Conv2D(8, (3, 3), padding="same")(data) @@ -212,7 +227,7 @@ def test_forward_multi_outputs(): def test_forward_reuse_layers(): # reuse conv2d - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) conv2d = keras.layers.Conv2D(8, (3, 3), padding="same") x = conv2d(data) y = conv2d(data) @@ -221,7 +236,7 @@ def test_forward_reuse_layers(): keras_model = keras.models.Model(data, z) verify_keras_frontend(keras_model) # reuse add - data = keras.layers.Input(shape=(32,32,3)) + data = keras.layers.Input(shape=(32, 32, 3)) x = keras.layers.Conv2D(8, (3, 3), padding="same")(data) add = keras.layers.Add() x = add([x, x]) @@ -232,7 +247,7 @@ def test_forward_reuse_layers(): def test_forward_rnn(): - data = keras.layers.Input(shape=(1,32)) + data = keras.layers.Input(shape=(1, 32)) rnn_funcs = [keras.layers.LSTM(units=16, return_state=False, recurrent_activation='sigmoid', activation='tanh'), keras.layers.SimpleRNN(units=16, return_state=False, @@ -247,25 +262,25 @@ def test_forward_rnn(): def test_forward_vgg16(): keras_model = keras.applications.VGG16(include_top=True, weights='imagenet', - input_shape=(224,224,3), classes=1000) + input_shape=(224, 224, 3), classes=1000) verify_keras_frontend(keras_model) def test_forward_xception(): keras_model = keras.applications.Xception(include_top=True, weights='imagenet', - input_shape=(299,299,3), classes=1000) + input_shape=(299, 299, 3), classes=1000) verify_keras_frontend(keras_model) def test_forward_resnet50(): keras_model = keras.applications.ResNet50(include_top=True, weights='imagenet', - input_shape=(224,224,3), classes=1000) + input_shape=(224, 224, 3), classes=1000) verify_keras_frontend(keras_model) def test_forward_mobilenet(): keras_model = keras.applications.MobileNet(include_top=True, weights='imagenet', - input_shape=(224,224,3), classes=1000) + input_shape=(224, 224, 3), classes=1000) verify_keras_frontend(keras_model) @@ -273,6 +288,8 @@ def test_forward_mobilenet(): test_forward_merge() test_forward_activations() test_forward_dense() + test_forward_permute() + test_forward_softmax() test_forward_sequential() test_forward_pool() test_forward_conv()