Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[NNVM]Activations support added in Keras Frontend #1210

Merged
merged 3 commits into from
Jun 7, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 27 additions & 7 deletions nnvm/python/nnvm/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ def _get_pad_pair(input1d, kernel1d, stride1d):
pad_after = pad - pad_before
return [pad_before, pad_after]

def _get_elu(insym, alpha):
""" A helper method for elu.
"""
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)

def _convert_activation(insym, keras_layer, _):
if isinstance(keras_layer, str):
Expand Down Expand Up @@ -50,27 +54,43 @@ def _convert_activation(insym, keras_layer, _):
elif act_type == 'softplus':
return _sym.log(_sym.__add_scalar__(_sym.exp(insym), scalar=1))
elif act_type == 'elu':
raise NotImplementedError('elu not implemented')
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return _get_elu(insym, alpha)
elif act_type == 'selu':
# Alpha, Gamma values, obtained from https://arxiv.org/abs/1706.02515
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1.6732
gamma = keras_layer.gamma if hasattr(keras_layer, "gamma") else 1.0507
return gamma * _get_elu(insym, alpha)
elif act_type == 'relu6':
return _sym.clip(insym, a_min=0, a_max=6)
elif act_type == 'softsign':
raise NotImplementedError('softsign not implemented')
return insym / (1 + (_sym.relu(insym) + _sym.relu(_sym.negative(insym))))
elif act_type == 'hard_sigmoid':
raise NotImplementedError('hard_sigmoid not implemented')
transformX = (0.2 * insym) + 0.5
return _sym.clip(transformX, a_min=0, a_max=1)
else:
raise TypeError("Unsupported activation type : {}".format(act_type))


def _convert_advanced_activation(insym, keras_layer, _):
def _convert_advanced_activation(insym, keras_layer, symtab):
act_type = type(keras_layer).__name__
if act_type == 'LeakyReLU':
return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU':
raise NotImplementedError('ELU not implemented')
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return _get_elu(insym, alpha)
elif act_type == 'PReLU':
raise NotImplementedError('PReLU not implemented')
assert hasattr(keras_layer, "alpha"), \
"alpha required for PReLU."
_check_data_format(keras_layer)
size = len(keras_layer.alpha.shape)
return -symtab.new_const(keras_layer.get_weights()[0] \
.transpose(np.roll(range(size), 1))) \
* _sym.relu(-insym) + _sym.relu(insym)
elif act_type == 'ThresholdedReLU':
raise NotImplementedError('ThresholdedReLU not implemented')
theta = keras_layer.theta if hasattr(keras_layer, "theta") else 1.0
theta_tensor = _sym.full_like(insym[0], fill_value=float(theta))
return _sym.elemwise_mul(insym[0], _sym.greater(insym[0], theta_tensor, out_type="float32"))
else:
raise TypeError("Unsupported advanced activation type : {}".format(act_type))

Expand Down
65 changes: 22 additions & 43 deletions nnvm/tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,34 +58,6 @@ def test_forward_elemwise_add():
keras_model = keras.models.Model(data, y)
verify_keras_frontend(keras_model)


def test_forward_softmax():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softmax')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_softrelu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation('softplus')(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_leaky_relu():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.LeakyReLU(alpha=0.3)(data)
x = keras.layers.Add()([x, x])
x = keras.layers.GlobalAveragePooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_dense():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.MaxPooling2D(pool_size=(2,2))(data)
Expand Down Expand Up @@ -124,16 +96,6 @@ def test_forward_upsample():
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_relu6():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Activation(keras.applications.mobilenet.relu6)(data)
x = keras.layers.Concatenate()([x, x])
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)


def test_forward_reshape():
data = keras.layers.Input(shape=(32,32,3))
x = keras.layers.Reshape(target_shape=(32,32,3))(data)
Expand Down Expand Up @@ -165,19 +127,36 @@ def test_forward_mobilenet():
input_shape=(224,224,3), classes=1000)
verify_keras_frontend(keras_model)

def test_forward_activations():
data = keras.layers.Input(shape=(32,32,3))
weights = np.random.rand(1, 32, 32, 3)
act_funcs = [keras.layers.Activation('softmax'),
keras.layers.Activation('softplus'),
keras.layers.LeakyReLU(alpha=0.3),
keras.layers.Activation(keras.applications.mobilenet.relu6),
keras.layers.PReLU(weights=weights, alpha_initializer="zero"),
keras.layers.ELU(alpha=0.5),
keras.layers.Activation('selu'),
keras.layers.ThresholdedReLU(theta=0.5),
keras.layers.Activation('softsign'),
keras.layers.Activation('hard_sigmoid'),
keras.layers.Activation('sigmoid'),
keras.layers.Activation('tanh'),
keras.layers.Activation('linear')]
for act_func in act_funcs:
x = act_func(data)
x = keras.layers.GlobalMaxPooling2D()(x)
keras_model = keras.models.Model(data, x)
verify_keras_frontend(keras_model)

if __name__ == '__main__':
test_forward_elemwise_add()
test_forward_softmax()
test_forward_softrelu()
test_forward_leaky_relu()
test_forward_activations()
test_forward_dense()
test_forward_transpose_conv()
test_forward_separable_conv()
test_forward_upsample()
test_forward_relu6()
test_forward_reshape()

test_forward_vgg16()
test_forward_xception()
test_forward_resnet50()
Expand Down