Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

skipping tests that cannot fit in nightly CI machine #17450

Merged
merged 1 commit into from
Feb 5, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 25 additions & 4 deletions tests/nightly/test_large_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,18 @@
# under the License.

import os
import sys
import tempfile
import math
import numpy as np
import mxnet as mx

curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.append(os.path.join(curr_path, '../python/unittest/'))

from mxnet.test_utils import rand_ndarray, assert_almost_equal, rand_coord_2d, default_context, check_symbolic_forward, create_2d_tensor
from mxnet import gluon, nd
from tests.python.unittest.common import with_seed, with_post_test_cleanup, teardown
from common import with_seed, with_post_test_cleanup
from nose.tools import with_setup
import unittest

Expand Down Expand Up @@ -129,7 +133,8 @@ def np_softmax(x, axis=-1, temperature=1.0):
x /= np.sum(x, axis=axis, keepdims=True)
return x

@unittest.skip("log_softmax flaky, tracked at https://github.com/apache/incubator-mxnet/issues/17397")
@unittest.skip("log_softmax flaky, tracked at "
"https://github.com/apache/incubator-mxnet/issues/17397")
def check_log_softmax():
ndim = 2
shape = (SMALL_Y, LARGE_X)
Expand Down Expand Up @@ -476,7 +481,8 @@ def check_ndarray_random_uniform():
a = nd.random.uniform(shape=(LARGE_X, SMALL_Y))
assert a[-1][0] != 0

@unittest.skip("Randint flaky, tracked at https://github.com/apache/incubator-mxnet/issues/16172")
@unittest.skip("Randint flaky, tracked at "
"https://github.com/apache/incubator-mxnet/issues/16172")
@with_seed()
def check_ndarray_random_randint():
a = nd.random.randint(100, 10000, shape=(LARGE_X, SMALL_Y))
Expand Down Expand Up @@ -689,6 +695,8 @@ def check_pick():
res = mx.nd.pick(a, b)
assert res.shape == b.shape

@unittest.skip("Memory doesn't free up after stacked execution with other ops, "
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_depthtospace():
def numpy_depth_to_space(x, blocksize):
b, c, h, w = x.shape[0], x.shape[1], x.shape[2], x.shape[3]
Expand All @@ -706,6 +714,8 @@ def numpy_depth_to_space(x, blocksize):
output = mx.nd.depth_to_space(data, 2)
assert_almost_equal(output.asnumpy(), expected, atol=1e-3, rtol=1e-3)

@unittest.skip("Memory doesn't free up after stacked execution with other ops, "
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_spacetodepth():
def numpy_space_to_depth(x, blocksize):
b, c, h, w = x.shape[0], x.shape[1], x.shape[2], x.shape[3]
Expand Down Expand Up @@ -769,6 +779,8 @@ def check_unravel_index():
shape=(LARGE_X, SMALL_Y))
assert (indices_2d.asnumpy() == np.array(original_2d_indices)).all()

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_transpose():
check_dtypes = [np.float32, np.int64]
for dtype in check_dtypes:
Expand All @@ -778,12 +790,16 @@ def check_transpose():
ref_out = np.transpose(b.asnumpy())
assert_almost_equal(t.asnumpy(), ref_out, rtol=1e-10)

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_swapaxes():
b = create_2d_tensor(rows=LARGE_X, columns=SMALL_Y)
t = nd.swapaxes(b, dim1=0, dim2=1)
assert np.sum(t[:, -1].asnumpy() == (LARGE_X - 1)) == b.shape[1]
assert t.shape == (SMALL_Y, LARGE_X)

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_flip():
b = create_2d_tensor(rows=LARGE_X, columns=SMALL_Y)
t = nd.flip(b, axis=0)
Expand Down Expand Up @@ -1079,20 +1095,25 @@ def check_argmin():
idx = mx.nd.argmin(a, axis=0)
assert idx.shape[0] == SMALL_Y

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_argsort():
b = create_2d_tensor(rows=LARGE_X, columns=SMALL_Y)
s = nd.argsort(b, axis=0, is_ascend=False, dtype=np.int64)
mx.nd.waitall()
assert (s[0].asnumpy() == (LARGE_X - 1)).all()

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_sort():
b = create_2d_tensor(rows=LARGE_X, columns=SMALL_Y)
s = nd.sort(b, axis=0, is_ascend=False)
assert np.sum(s[-1][SMALL_Y//2:SMALL_Y].asnumpy() == 0).all()
s = nd.sort(b, is_ascend=False)
assert np.sum(s[0].asnumpy() == 0).all()

@unittest.skip("Topk takes lot of memory!, tracked at https://github.com/apache/incubator-mxnet/issues/17411")
@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_topk():
b = create_2d_tensor(rows=LARGE_X, columns=SMALL_Y)
k = nd.topk(b, k=10, axis=0, dtype=np.int64)
Expand Down
15 changes: 13 additions & 2 deletions tests/nightly/test_large_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,18 @@
# under the License.

import os
import sys
import tempfile
import math
import numpy as np
import mxnet as mx

curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
sys.path.append(os.path.join(curr_path, '../python/unittest/'))

from mxnet.test_utils import rand_ndarray, assert_almost_equal, rand_coord_2d, create_vector
from mxnet import gluon, nd
from tests.python.unittest.common import with_seed, teardown
from tests.python.unittest.common import with_seed
from nose.tools import with_setup
import unittest

Expand Down Expand Up @@ -179,7 +183,8 @@ def check_ndarray_random_uniform():
a = nd.random.uniform(shape=LARGE_X)
assert a[-1] != 0

@unittest.skip("Randint flaky, tracked at https://github.com/apache/incubator-mxnet/issues/16172")
@unittest.skip("Randint flaky, tracked at "
"https://github.com/apache/incubator-mxnet/issues/16172")
@with_seed()
def check_ndarray_random_randint():
# check if randint can generate value greater than 2**32 (large)
Expand Down Expand Up @@ -476,11 +481,15 @@ def check_argmin():
assert idx[0] == 0
assert idx.shape[0] == 1

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_argsort():
a = create_vector(size=LARGE_X)
s = nd.argsort(a, axis=0, is_ascend=False, dtype=np.int64)
assert s[0] == (LARGE_X - 1)

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_sort():
a = create_vector(size=LARGE_X)

Expand All @@ -495,6 +504,8 @@ def check_ascend(x):
check_descend(a)
check_ascend(a)

@unittest.skip("Memory doesn't free up after stacked execution with other ops, " +
"tracked at https://github.com/apache/incubator-mxnet/issues/17411")
def check_topk():
a = create_vector(size=LARGE_X)
ind = nd.topk(a, k=10, axis=0, dtype=np.int64)
Expand Down