Skip to content
This repository was archived by the owner on Oct 13, 2021. It is now read-only.

Add unit tests for upsample and pool #99

Merged
merged 15 commits into from
Jun 4, 2019
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
46ca059
Add unit tests for upsample and pool
jiafatom Jun 3, 2019
8753c74
Add unit tests for upsample and pool
jiafatom Jun 3, 2019
44b4b36
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
afb71ac
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
dba0a29
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
c39c2b4
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
4409b08
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
0d49b6a
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
2cd17db
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
5ba260f
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
4113043
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
c8506a5
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
e262de5
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
8390953
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
d10b2f5
Merge branch 'test_upsample' of https://github.com/jiafatom/keras-onn…
jiafatom Jun 3, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions keras2onnx/ke2onnx/pooling.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
def convert_keras_pooling_core(scope, operator, container, is_global, n_dims,
op_type, input_perm_axes, output_perm_axes):
op = operator.raw_operator
channels_first = n_dims > 1 and op.data_format == 'channels_first'
no_permutation_required = op.data_format == 'channels_first'

if channels_first:
if no_permutation_required:
adjusted_pooling_input = operator.inputs[0].full_name
else:
adjusted_pooling_input = scope.get_unique_variable_name('input_transposed')
Expand All @@ -23,25 +23,30 @@ def convert_keras_pooling_core(scope, operator, container, is_global, n_dims,
op_type_prefix = 'Global' if is_global else ''
onnx_op_type = "AveragePool" if op_type == 'Avg' else 'MaxPool'
attrs = {'name': operator.full_name}
op_version = 10 if container.target_opset >= 10 else 7
if not is_global:
attrs['strides'] = list(op.strides)
attrs['kernel_shape'] = op.pool_size
attrs['op_version'] = op_version
# In ONNX opset 10, the ceil_mode attribute was added to local MaxPool and AveragePool
if container.target_opset >= 10:
attrs['ceil_mode'] = 0

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for adding the ceil_mode opset10 changes! I can close my PR #96.

if op.padding == 'valid':
attrs['auto_pad'] = 'VALID'
elif op.padding == 'same':
attrs['auto_pad'] = 'SAME_UPPER'
else:
raise RuntimeError("Unsupported padding type '{0}'".format(op.padding))

if channels_first:
if no_permutation_required:
# In this case, the output of our Pool operator just match what Keras produces.
container.add_node(op_type_prefix + onnx_op_type, adjusted_pooling_input,
operator.outputs[0].full_name, **attrs)
operator.outputs[0].full_name, op_version=op_version, **attrs)
else:
# Put the output of Pool operator to an intermediate tensor. Laster we will apply a Transpose to match the
# original Keras output format
pooling_output_name = scope.get_unique_variable_name('pooling_output')
container.add_node(op_type_prefix + onnx_op_type, adjusted_pooling_input, pooling_output_name, **attrs)
container.add_node(op_type_prefix + onnx_op_type, adjusted_pooling_input, pooling_output_name, op_version=op_version, **attrs)

# Generate a final Transpose
postprocessor_type = 'Transpose'
Expand All @@ -63,14 +68,14 @@ def convert_keras_max_pooling_2d(scope, operator, container):


def convert_keras_max_pooling_3d(scope, operator, container):
input_perm_axes, output_perm_axes = get_permutation_config(2)
input_perm_axes, output_perm_axes = get_permutation_config(3)
convert_keras_pooling_core(scope, operator, container, is_global=False, n_dims=3, op_type='Max',
input_perm_axes=input_perm_axes, output_perm_axes=output_perm_axes)


def convert_keras_average_pooling_1d(scope, operator, container):
input_perm_axes, output_perm_axes = get_permutation_config(1)
convert_keras_pooling_core(scope, operator, container, is_global=True, n_dims=1, op_type='Avg',
convert_keras_pooling_core(scope, operator, container, is_global=False, n_dims=1, op_type='Avg',
input_perm_axes=input_perm_axes, output_perm_axes=output_perm_axes)


Expand Down
24 changes: 8 additions & 16 deletions keras2onnx/ke2onnx/upsample.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,43 +10,35 @@

def convert_keras_upsample(scope, operator, container, n_dims):
op = operator.raw_operator
# op.size type is tuple, even if we set a int in keras.layers API
if n_dims == 1:
scales = [1, int(op.size), 1]
elif n_dims == 2:
scales = [1] + list(d for d in op.size)
elif n_dims == 2 or n_dims == 3:
# Always create the list of sampling factors in channels_first format because the input will be converted into
# channels_first if it's in channels_last
if isinstance(op.size, collections.Iterable):
scales = [1, 1] + list(d for d in op.size)
else:
scales = [1, 1, int(op.size), int(op.size)]
elif n_dims == 3:
# Always create the list of sampling factors in channels_first format because the input will be converted into
# channels_first if it's in channels_last
if isinstance(op.size, collections.Iterable):
scales = [1, 1] + list(int(d) for d in op.size)
else:
scales = [1, 1] + [int(op.size)] * 3
scales = [1, 1] + list(d for d in op.size)
else:
raise ValueError('Unsupported dimension %s when converting Keras Upsampling layer' % n_dims)

# Derive permutation configuration. If the Keras input format is not channels_first, this configuration may be used
# to manipulate the input and output of ONNX Upsample.
input_perm_axes, output_perm_axes = get_permutation_config(n_dims)
channels_first = n_dims > 1 and op.data_format == 'channels_first'
no_permutation_required = channels_first or n_dims < 2

# Before creating the main Upsample operator, we need to permute the input tensor if the original operator is
# working under channels_last mode.
if channels_first:
if no_permutation_required:
# No permutation is required. Use input as it is.
input_tensor_name = operator.inputs[0].full_name
else:
# Permute the original input and then use the permuted result as the input of ONNX Upsample
input_tensor_name = scope.get_unique_variable_name(operator.inputs[0].full_name + '_permuted')
apply_transpose(scope, operator.inputs[0].full_name, input_tensor_name, container, perm=input_perm_axes)

# If channels_first is True, we don't need to permute the output of ONNX Upsample. Otherwise, similar to Crop's
# If no_permutation_required is True, we don't need to permute the output of ONNX Upsample. Otherwise, similar to Crop's
# conversion, a Transpose would be added.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

update channels_first comments to indicate change to no_permutation_required

if channels_first:
if no_permutation_required:
apply_upsample(scope, input_tensor_name, operator.outputs[0].full_name, container, scales=scales)
else:
upsampled_tensor_name = scope.get_unique_variable_name(input_tensor_name + '_upsampled')
Expand Down
33 changes: 22 additions & 11 deletions tests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,10 +326,10 @@ def test_repeat_vector(self):
expected = model.predict(data)
self.assertTrue(self.run_onnx_runtime('repeat_vector', onnx_model, data, expected))

def _pooling_test_helper(self, layer, ishape):
def _pooling_test_helper(self, layer, ishape, data_format='channels_last'):
model = keras.Sequential()
nlayer = layer(input_shape=ishape) if \
(layer.__name__.startswith("Global")) else layer(2, input_shape=ishape)
nlayer = layer(input_shape=ishape, data_format=data_format) if \
(layer.__name__.startswith("Global")) else layer(2, input_shape=ishape, data_format=data_format)

model.add(nlayer)
onnx_model = keras2onnx.convert_keras(model, model.name)
Expand All @@ -339,14 +339,15 @@ def _pooling_test_helper(self, layer, ishape):
expected = model.predict(data)
self.assertTrue(self.run_onnx_runtime(onnx_model.graph.name, onnx_model, data, expected))

@unittest.skip("ONNXRuntime doesn't support 3D average pooling yet.")
def test_pooling_avg3d(self):
self._pooling_test_helper(keras.layers.AveragePooling3D, (4, 4, 4, 3))

def test_pooling_max1d(self):
def test_pooling_1d(self):
self._pooling_test_helper(keras.layers.AveragePooling1D, (4, 6))
self._pooling_test_helper(keras.layers.AveragePooling1D, (4, 6), 'channels_first')
self._pooling_test_helper(keras.layers.MaxPool1D, (4, 6))
self._pooling_test_helper(keras.layers.MaxPool1D, (4, 6), 'channels_first')

def test_pooling_2d(self):
self._pooling_test_helper(keras.layers.AveragePooling2D, (4, 4, 3))

def test_pooling_max2d(self):
N, C, H, W = 2, 3, 5, 5
x = np.random.rand(N, H, W, C).astype(np.float32, copy=False)

Expand All @@ -365,6 +366,10 @@ def test_pooling_max2d(self):
expected = model.predict(x)
self.assertTrue(self.run_onnx_runtime('max_pooling_2d', onnx_model, x, expected))

def test_pooling_3d(self):
self._pooling_test_helper(keras.layers.AveragePooling3D, (4, 4, 4, 3))
self._pooling_test_helper(keras.layers.MaxPool3D, (4, 4, 4, 3))

def test_pooling_global(self):
self._pooling_test_helper(keras.layers.GlobalAveragePooling2D, (4, 6, 2))

Expand Down Expand Up @@ -452,7 +457,6 @@ def test_Softmax(self):
self.activationlayer_helper(layer, data)

def _misc_conv_helper(self, layer, ishape):
ishape = (20, 20, 1)
input = keras.Input(ishape)
out = layer(input)
model = keras.models.Model(input, out)
Expand All @@ -469,8 +473,15 @@ def test_crop(self):
self._misc_conv_helper(layer, ishape)

def test_upsample(self):
ishape = (20,)
layer = keras.layers.UpSampling1D(size=2)
self._misc_conv_helper(layer, ishape)
ishape = (20, 20, 1)
layer = keras.layers.UpSampling2D(size=(2, 3), data_format='channels_last')
for size in [2, (2, 3)]:
layer = keras.layers.UpSampling2D(size=size, data_format='channels_last')
self._misc_conv_helper(layer, ishape)
ishape = (20, 20, 20, 1)
layer = keras.layers.UpSampling3D(size=(2, 3, 4), data_format='channels_last')
self._misc_conv_helper(layer, ishape)

def test_padding(self):
Expand Down