Skip to content
This repository was archived by the owner on Oct 13, 2021. It is now read-only.

Commit d38a9c5

Browse files
authored
Fix softmax activation in conv and dense (#69)
* unet * Fix softmax activation
1 parent e392dce commit d38a9c5

File tree

3 files changed

+14
-11
lines changed

3 files changed

+14
-11
lines changed

keras2onnx/ke2onnx/conv.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -165,21 +165,23 @@ def convert_keras_conv_core(scope, operator, container, is_transpose, n_dims, in
165165
n_dims,
166166
weight_perm_axes, parameters, attrs['auto_pad'])
167167

168+
# Permute the output back of its original format
169+
transpose_output_name = scope.get_unique_variable_name('transpose_output')
170+
if not channels_first:
171+
# Generate a final transposer.
172+
apply_transpose(scope, intermediate_output_name, transpose_output_name, container, perm=output_perm_axes)
173+
else:
174+
apply_identity(scope, intermediate_output_name, transpose_output_name, container)
175+
168176
# The construction of convolution is done. Now, we create an activation operator to apply the activation specified
169177
# in this Keras layer.
170178
apply_activation_function = activation_map[op.activation]
171-
activation_output_name = scope.get_unique_variable_name('activation_output')
172-
if apply_activation_function in [activation_get('softmax'), keras.activations.softmax]:
173-
apply_softmax(scope, intermediate_output_name, activation_output_name, container, axis=-1)
179+
if op.activation in [activation_get('softmax'), keras.activations.softmax]:
180+
apply_softmax(scope, transpose_output_name, operator.outputs[0].full_name, container, axis=-1)
174181
else:
175-
apply_activation_function(scope, intermediate_output_name, activation_output_name, container)
182+
apply_activation_function(scope, transpose_output_name, operator.outputs[0].full_name, container)
183+
176184

177-
# Permute the output back of its original format
178-
if not channels_first:
179-
# Generate a final transposer.
180-
apply_transpose(scope, activation_output_name, operator.outputs[0].full_name, container, perm=output_perm_axes)
181-
else:
182-
apply_identity(scope, activation_output_name, operator.outputs[0].full_name, container)
183185

184186

185187
def get_converter_config(dims, is_conv_transpose):

keras2onnx/ke2onnx/dense.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def convert_keras_dense(scope, operator, container):
3737

3838
# Create an activation function node and apply activation function to the intermediate tensor
3939
apply_activation_function = activation_map[operator.raw_operator.activation]
40-
if apply_activation_function in [activation_get('softmax'), keras.activations.softmax]:
40+
if operator.raw_operator.activation in [activation_get('softmax'), keras.activations.softmax]:
4141
apply_softmax(scope, biased_tensor_name, operator.outputs[0].full_name, container, axis=-1)
4242
else:
4343
apply_activation_function(scope, biased_tensor_name, operator.outputs[0].full_name, container)

tests/test_layers.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -256,6 +256,7 @@ def test_conv2d_format(self):
256256

257257
def test_conv2d_activation(self):
258258
self._conv2_helper(3, 5, (2, 2), (1, 1), (5, 5), activation='relu')
259+
self._conv2_helper(3, 5, (2, 2), (1, 1), (5, 5), activation='softmax')
259260

260261
def test_conv2d_bias(self):
261262
self._conv2_helper(3, 5, (2, 2), (1, 1), (5, 5), bias=True)

0 commit comments

Comments
 (0)