Skip to content

Commit

Permalink
keras.layers.ReLU added (apache#1530)
Browse files Browse the repository at this point in the history
  • Loading branch information
siju-samuel authored and sergei-mironov committed Aug 8, 2018
1 parent fd2cd0c commit 21b3c07
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 1 deletion.
5 changes: 4 additions & 1 deletion nnvm/python/nnvm/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,9 @@ def _convert_activation(insym, keras_layer, _):

def _convert_advanced_activation(insym, keras_layer, symtab):
act_type = type(keras_layer).__name__
if act_type == 'LeakyReLU':
if act_type == 'ReLU':
return _sym.relu(insym)
elif act_type == 'LeakyReLU':
return _sym.leaky_relu(insym, alpha=keras_layer.alpha)
elif act_type == 'ELU':
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1
Expand Down Expand Up @@ -382,6 +384,7 @@ def _default_skip(insym, keras_layer, _): # pylint: disable=unused-argument
_convert_map = {
'Dense' : _convert_dense,
'Activation' : _convert_activation,
'ReLU' : _convert_advanced_activation,
'LeakyReLU' : _convert_advanced_activation,
'PReLU' : _convert_advanced_activation,
'ELU' : _convert_advanced_activation,
Expand Down
1 change: 1 addition & 0 deletions nnvm/tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ def test_forward_activations():
weights = np.random.rand(1, 32, 32, 3)
act_funcs = [keras.layers.Activation('softmax'),
keras.layers.Activation('softplus'),
keras.layers.ReLU(),
keras.layers.LeakyReLU(alpha=0.3),
keras.layers.PReLU(weights=weights, alpha_initializer="zero"),
keras.layers.ELU(alpha=0.5),
Expand Down

0 comments on commit 21b3c07

Please sign in to comment.