Skip to content

Commit

Permalink
Helper for ELU added
Browse files Browse the repository at this point in the history
  • Loading branch information
PariksheetPinjari909 committed Jun 6, 2018
1 parent d365025 commit 6b4a7ad
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 11 deletions.
18 changes: 10 additions & 8 deletions nnvm/python/nnvm/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@ def _get_pad_pair(input1d, kernel1d, stride1d):
pad_after = pad - pad_before
return [pad_before, pad_after]

def _get_elu(insym, alpha):
""" A helper method for elu.
"""
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)

def _convert_activation(insym, keras_layer, _):
if isinstance(keras_layer, str):
Expand Down Expand Up @@ -51,11 +55,12 @@ def _convert_activation(insym, keras_layer, _):
return _sym.log(_sym.__add_scalar__(_sym.exp(insym), scalar=1))
elif act_type == 'elu':
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)
return _get_elu(insym, alpha)
elif act_type == 'selu':
# Alpha, Gamma values, obtained from https://arxiv.org/abs/1706.02515
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1.6732
gamma = keras_layer.gamma if hasattr(keras_layer, "gamma") else 1.0507
return gamma * (-alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym))
return gamma * _get_elu(insym, alpha)
elif act_type == 'relu6':
return _sym.clip(insym, a_min=0, a_max=6)
elif act_type == 'softsign':
Expand All @@ -73,17 +78,14 @@ def _convert_advanced_activation(insym, keras_layer, symtab):
return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU':
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
return -alpha * _sym.relu(1 - _sym.exp(insym)) + _sym.relu(insym)
return _get_elu(insym, alpha)
elif act_type == 'PReLU':
assert hasattr(keras_layer, "alpha"), \
"alpha required for PReLU."
_check_data_format(keras_layer)
transposeShape = []
size = len(keras_layer.alpha.shape)
transposeShape.append(size - 1)
for i in range(size - 1):
transposeShape.append(i)
return -symtab.new_const(keras_layer.get_weights()[0].transpose(transposeShape)) \
return -symtab.new_const(keras_layer.get_weights()[0] \
.transpose(np.roll(range(size), 1))) \
* _sym.relu(-insym) + _sym.relu(insym)
elif act_type == 'ThresholdedReLU':
theta = keras_layer.theta if hasattr(keras_layer, "theta") else 1.0
Expand Down
8 changes: 5 additions & 3 deletions nnvm/tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,9 +216,7 @@ def test_forward_mobilenet():
input_shape=(224,224,3), classes=1000)
verify_keras_frontend(keras_model)


if __name__ == '__main__':
test_forward_elemwise_add()
def test_forward_activations():
test_forward_softmax()
test_forward_softrelu()
test_forward_leaky_relu()
Expand All @@ -228,6 +226,10 @@ def test_forward_mobilenet():
test_forward_thresholdedrelu()
test_forward_softsign()
test_forward_hardsigmoid()

if __name__ == '__main__':
test_forward_elemwise_add()
test_forward_activations()
test_forward_dense()
test_forward_transpose_conv()
test_forward_separable_conv()
Expand Down

0 comments on commit 6b4a7ad

Please sign in to comment.