6
6
import numpy as np
7
7
import tensorflow as tf
8
8
from ..proto import keras , is_tf_keras
9
- from ..common .onnx_ops import apply_elu , apply_hard_sigmoid , apply_relu , apply_relu_6 , apply_sigmoid , apply_tanh , \
10
- apply_softmax , apply_identity , apply_selu , apply_mul
9
+ from ..common .onnx_ops import apply_elu , apply_hard_sigmoid , apply_leaky_relu , apply_relu , apply_relu_6 , \
10
+ apply_tanh , apply_softmax , apply_identity , apply_selu , apply_mul , apply_prelu , apply_sigmoid
11
11
from onnx .mapping import TENSOR_TYPE_TO_NP_TYPE
12
12
13
13
activation_get = keras .activations .get
21
21
if not relu6 and hasattr (keras .applications .mobilenet , 'relu6' ):
22
22
relu6 = keras .applications .mobilenet .relu6
23
23
24
+
25
+ def apply_leaky_relu_keras (scope , input_name , output_name , container , operator_name = None , alpha = 0.2 ):
26
+ apply_leaky_relu (scope , input_name , output_name , container , operator_name , alpha )
27
+
28
+
24
29
activation_map = {activation_get ('sigmoid' ): apply_sigmoid ,
25
30
activation_get ('softmax' ): apply_softmax ,
26
31
activation_get ('linear' ): apply_identity ,
29
34
activation_get ('selu' ): apply_selu ,
30
35
activation_get ('tanh' ): apply_tanh ,
31
36
activation_get ('hard_sigmoid' ): apply_hard_sigmoid ,
37
+ tf .nn .leaky_relu : apply_leaky_relu_keras ,
32
38
tf .nn .sigmoid : apply_sigmoid ,
33
39
tf .nn .softmax : apply_softmax ,
34
40
tf .nn .relu : apply_relu ,
40
46
if hasattr (tf .compat , 'v1' ):
41
47
activation_map .update ({tf .compat .v1 .nn .sigmoid : apply_sigmoid })
42
48
activation_map .update ({tf .compat .v1 .nn .softmax : apply_softmax })
49
+ activation_map .update ({tf .compat .v1 .nn .leaky_relu : apply_leaky_relu_keras })
43
50
activation_map .update ({tf .compat .v1 .nn .relu : apply_relu })
44
51
activation_map .update ({tf .compat .v1 .nn .relu6 : apply_relu_6 })
45
52
activation_map .update ({tf .compat .v1 .nn .elu : apply_elu })
@@ -51,29 +58,38 @@ def convert_keras_activation(scope, operator, container):
51
58
input_name = operator .input_full_names [0 ]
52
59
output_name = operator .output_full_names [0 ]
53
60
activation = operator .raw_operator .activation
61
+ activation_type = type (activation )
54
62
if activation in [activation_get ('sigmoid' ), keras .activations .sigmoid ]:
55
63
apply_sigmoid (scope , input_name , output_name , container )
56
64
elif activation in [activation_get ('tanh' ), keras .activations .tanh ]:
57
65
apply_tanh (scope , input_name , output_name , container )
58
- elif activation in [activation_get ('relu' ), keras .activations .relu ]:
66
+ elif activation in [activation_get ('relu' ), keras .activations .relu ] or \
67
+ (hasattr (keras .layers .advanced_activations , 'ReLU' ) and
68
+ activation_type == keras .layers .advanced_activations .ReLU ):
59
69
apply_relu (scope , input_name , output_name , container )
60
- elif activation in [activation_get ('softmax' ), keras .activations .softmax ]:
70
+ elif activation in [activation_get ('softmax' ), keras .activations .softmax ] or \
71
+ activation_type == keras .layers .advanced_activations .Softmax :
61
72
apply_softmax (scope , input_name , output_name , container , axis = - 1 )
62
- elif activation in [activation_get ('elu' ), keras .activations .elu ]:
73
+ elif activation in [activation_get ('elu' ), keras .activations .elu ] or \
74
+ activation_type == keras .layers .advanced_activations .ELU :
63
75
apply_elu (scope , input_name , output_name , container , alpha = 1.0 )
64
76
elif activation in [activation_get ('hard_sigmoid' ), keras .activations .hard_sigmoid ]:
65
77
apply_hard_sigmoid (scope , input_name , output_name , container , alpha = 0.2 , beta = 0.5 )
66
78
elif activation in [activation_get ('linear' ), keras .activations .linear ]:
67
79
apply_identity (scope , input_name , output_name , container )
68
80
elif activation in [activation_get ('selu' ), keras .activations .selu ]:
69
81
apply_selu (scope , input_name , output_name , container , alpha = 1.673263 , gamma = 1.050701 )
70
- elif activation in [relu6 ] or activation .__name__ == 'relu6' :
82
+ elif activation_type == keras .layers .advanced_activations .LeakyReLU :
83
+ apply_leaky_relu (scope , input_name , output_name , container , alpha = activation .alpha .item (0 ))
84
+ elif activation_type == keras .layers .advanced_activations .PReLU :
85
+ apply_prelu (scope , input_name , output_name , container , slope = operator .raw_operator .get_weights ()[0 ])
86
+ elif activation in [relu6 ] or (hasattr (activation , '__name__' ) and activation .__name__ == 'relu6' ):
71
87
# relu6(x) = min(relu(x), 6)
72
88
np_type = TENSOR_TYPE_TO_NP_TYPE [operator .inputs [0 ].type .to_onnx_type ().tensor_type .elem_type ]
73
89
zero_value = np .zeros (shape = (1 ,), dtype = np_type )
74
90
apply_relu_6 (scope , input_name , output_name , container ,
75
91
zero_value = zero_value )
76
- elif activation .__name__ in [ 'swish' ] :
92
+ elif hasattr ( activation , '__name__' ) and activation .__name__ == 'swish' :
77
93
apply_sigmoid (scope , input_name , output_name + '_sig' , container )
78
94
apply_mul (scope , [input_name , output_name + '_sig' ], output_name , container )
79
95
else :
0 commit comments