Skip to content
This repository was archived by the owner on Oct 13, 2021. It is now read-only.

Commit 85e8132

Browse files
authored
Add both tf.nn.X and tf.compat.v1.nn.X to activation_map (#513)
1 parent 3e809bf commit 85e8132

File tree

2 files changed

+11
-1
lines changed

2 files changed

+11
-1
lines changed

keras2onnx/ke2onnx/activation.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,18 @@
3434
tf.nn.relu: apply_relu,
3535
tf.nn.relu6: apply_relu_6,
3636
tf.nn.elu: apply_elu,
37+
tf.nn.selu: apply_selu,
3738
tf.nn.tanh: apply_tanh}
3839

40+
if hasattr(tf.compat, 'v1'):
41+
activation_map.update({tf.compat.v1.nn.sigmoid: apply_sigmoid})
42+
activation_map.update({tf.compat.v1.nn.softmax: apply_softmax})
43+
activation_map.update({tf.compat.v1.nn.relu: apply_relu})
44+
activation_map.update({tf.compat.v1.nn.relu6: apply_relu_6})
45+
activation_map.update({tf.compat.v1.nn.elu: apply_elu})
46+
activation_map.update({tf.compat.v1.nn.selu: apply_selu})
47+
activation_map.update({tf.compat.v1.nn.tanh: apply_tanh})
48+
3949

4050
def convert_keras_activation(scope, operator, container):
4151
input_name = operator.input_full_names[0]

tests/test_layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1374,7 +1374,7 @@ def test_Softmax(advanced_activation_runner):
13741374

13751375

13761376
def test_tf_nn_activation(runner):
1377-
for activation in ['relu', tf.nn.relu, tf.nn.relu6]:
1377+
for activation in [tf.nn.relu, 'relu', tf.nn.relu6, tf.nn.softmax]:
13781378
model = keras.Sequential([
13791379
Dense(64, activation=activation, input_shape=[10]),
13801380
Dense(64, activation=activation),

0 commit comments

Comments
 (0)