Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Commit 1420bc0

Browse files
xziyatest
authored andcommitted
[Flaky test] Skip test_operator_gpu.test_convolution_independent_gradients (#15631)
* Skip test_convolution_independent_gradirents * Add an issue link * Fix inconsistent context of input array and binding op * Trigger CI * Retrigger CI
1 parent 94108db commit 1420bc0

File tree

1 file changed

+15
-13
lines changed

1 file changed

+15
-13
lines changed

tests/python/unittest/test_operator.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1909,10 +1909,12 @@ def test_depthwise_convolution():
19091909

19101910
@with_seed()
19111911
def test_convolution_independent_gradients():
1912-
ctx = default_context()
1913-
# set a low bar for autotuned cudnn conv
1914-
atol = 1.0e-1 if ctx.device_type == "gpu" else 1.0e-3
1915-
rtol = 1.0e-2 if ctx.device_type == "gpu" else 1.0e-3
1912+
# NOTE(zixuanweeei): Flaky test tracked by https://github.com/apache/incubator-mxnet/issues/15603.
1913+
# GPU context will be enabled after figuring out the possible issue tracked at
1914+
# https://github.com/apache/incubator-mxnet/issues/15638.
1915+
ctx = mx.cpu()
1916+
atol = 1.0e-3
1917+
rtol = 1.0e-3
19161918
reqs = ["null", "write", "add"]
19171919
var_names = ["x", "w", "b"]
19181920
dims = [1, 2]
@@ -1942,14 +1944,14 @@ def test_convolution_independent_gradients():
19421944
for req_kind in reqs:
19431945
# Binding args for conv with possible dependent gradients
19441946
base_args = {
1945-
'x': mx.nd.random.normal(shape=x_shape),
1946-
'w': mx.nd.random.normal(shape=w_shape),
1947-
'b': mx.nd.random.normal(shape=(num_filter, )) if not no_bias else None}
1947+
'x': mx.nd.random.normal(shape=x_shape, ctx=ctx),
1948+
'w': mx.nd.random.normal(shape=w_shape, ctx=ctx),
1949+
'b': mx.nd.random.normal(shape=(num_filter, ), ctx=ctx) if not no_bias else None}
19481950
args1 = copy.deepcopy(base_args)
19491951
grad1 = {
1950-
'x': mx.nd.zeros(shape=x_shape),
1951-
'w': mx.nd.zeros(shape=w_shape),
1952-
'b': mx.nd.zeros(shape=(num_filter, )) if not no_bias else None}
1952+
'x': mx.nd.zeros(shape=x_shape, ctx=ctx),
1953+
'w': mx.nd.zeros(shape=w_shape, ctx=ctx),
1954+
'b': mx.nd.zeros(shape=(num_filter, ), ctx=ctx) if not no_bias else None}
19531955

19541956
grad_req1 = [req_kind] * 3
19551957
grad_req1 = dict(zip(var_names, grad_req1))
@@ -1962,9 +1964,9 @@ def test_convolution_independent_gradients():
19621964
# Binding args for conv with independent gradients
19631965
args2 = copy.deepcopy(base_args) # Deepcopy the same params of `exe1`
19641966
grad2 = {
1965-
'x': mx.nd.zeros(shape=x_shape),
1966-
'w': mx.nd.zeros(shape=w_shape),
1967-
'b': mx.nd.zeros(shape=(num_filter, )) if not no_bias else None}
1967+
'x': mx.nd.zeros(shape=x_shape, ctx=ctx),
1968+
'w': mx.nd.zeros(shape=w_shape, ctx=ctx),
1969+
'b': mx.nd.zeros(shape=(num_filter, ), ctx=ctx) if not no_bias else None}
19681970
grad_req2 = {"x": x_req, "w": w_req, "b": b_req}
19691971
exe2 = conv.bind(ctx, args2, args_grad=grad2, grad_req=grad_req2)
19701972

0 commit comments

Comments
 (0)