Skip to content

Commit

Permalink
[NNVM]Activations support added in Keras Frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
PariksheetPinjari909 committed May 31, 2018
1 parent 61debb2 commit 2690b38
Show file tree
Hide file tree
Showing 2 changed files with 82 additions and 8 deletions.
32 changes: 25 additions & 7 deletions nnvm/python/nnvm/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,27 +50,45 @@ def _convert_activation(insym, keras_layer, _):
elif act_type == 'softplus':
return _sym.log(_sym.__add_scalar__(_sym.exp(insym), scalar=1))
elif act_type == 'elu':
raise NotImplementedError('elu not implemented')
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)
elif act_type == 'selu':
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1.6732
gamma = keras_layer.gamma if hasattr(keras_layer, "gamma") else 1.0507
return gamma * (-alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym))
elif act_type == 'relu6':
return _sym.clip(insym, a_min=0, a_max=6)
elif act_type == 'softsign':
raise NotImplementedError('softsign not implemented')
return insym / (1 + (_sym.relu(insym) + _sym.relu(_sym.negative(insym))))
elif act_type == 'hard_sigmoid':
raise NotImplementedError('hard_sigmoid not implemented')
transformX = (0.2 * insym) + 0.5
return _sym.clip(transformX, a_min=0, a_max=1)
else:
raise TypeError("Unsupported activation type : {}".format(act_type))


def _convert_advanced_activation(insym, keras_layer, _):
def _convert_advanced_activation(insym, keras_layer, symtab):
act_type = type(keras_layer).__name__
if act_type == 'LeakyReLU':
return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU':
raise NotImplementedError('ELU not implemented')
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)
elif act_type == 'PReLU':
raise NotImplementedError('PReLU not implemented')
assert hasattr(keras_layer, "alpha"), \
"alpha required for PReLU."
_check_data_format(keras_layer)
transposeShape = []
size = len(keras_layer.alpha.shape)
transposeShape.append(size - 1)
for i in range(size - 1):
transposeShape.append(i)
return -symtab.new_const(keras_layer.get_weights()[0].transpose(transposeShape)) \
* _sym.relu(-insym) + _sym.relu(insym)
elif act_type == 'ThresholdedReLU':
raise NotImplementedError('ThresholdedReLU not implemented')
theta = keras_layer.theta if hasattr(keras_layer, "theta") else 1.0
theta_tensor = _sym.full_like(insym[0], fill_value=float(theta))
return _sym.elemwise_mul(insym[0], _sym.greater(insym[0], theta_tensor, out_type="float32"))
else:
raise TypeError("Unsupported advanced activation type : {}".format(act_type))

Expand Down
58 changes: 57 additions & 1 deletion nnvm/tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,57 @@ def test_forward_leaky_relu():
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_prelu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Conv2D(filters=10, kernel_size=(3,3), strides=(2,2), padding='same')(data)
weights = np.random.rand(1, 16, 16, 10)
x = keras.layers.PReLU(weights=weights, alpha_initializer="zero")(x)
x = keras.layers.Add()([x, x])
x = keras.layers.GlobalAveragePooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_elu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Conv2D(filters=10, kernel_size=(3,3), strides=(2,2), padding='same')(data)
x = keras.layers.ELU(alpha=0.5)(x)
x = keras.layers.Add()([x, x])
x = keras.layers.GlobalAveragePooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_selu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('selu')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_thresholdedrelu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Conv2D(filters=10, kernel_size=(3,3), strides=(2,2), padding='same')(data)
x = keras.layers.ThresholdedReLU(theta=0.5)(x)
x = keras.layers.Add()([x, x])
x = keras.layers.GlobalAveragePooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_softsign():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softsign')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_hardsigmoid():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('hard_sigmoid')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

def test_forward_dense():
data = keras.layers.Input(shape=(32,32,3))
Expand Down Expand Up @@ -171,13 +222,18 @@ def test_forward_mobilenet():
test_forward_softmax()
test_forward_softrelu()
test_forward_leaky_relu()
test_forward_prelu()
test_forward_elu()
test_forward_selu()
test_forward_thresholdedrelu()
test_forward_softsign()
test_forward_hardsigmoid()
test_forward_dense()
test_forward_transpose_conv()
test_forward_separable_conv()
test_forward_upsample()
test_forward_relu6()
test_forward_reshape()

test_forward_vgg16()
test_forward_xception()
test_forward_resnet50()
Expand Down

0 comments on commit 2690b38

Please sign in to comment.