Skip to content

Commit dd09c85

Browse files
authored
[CI] Update image tag to 20240428-060115-0b09ed018 (#16948)
* [CI] Update image tag to 20240428-060115-0b09ed018 * Skip a flaky test * Remove msg in pytest.skip * format
1 parent 2d7663c commit dd09c85

File tree

7 files changed

+22
-21
lines changed

7 files changed

+22
-21
lines changed

ci/jenkins/docker-images.ini

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,13 @@
1717

1818
# This data file is read during when Jenkins runs job to determine docker images.
1919
[jenkins]
20-
ci_arm: tlcpack/ci-arm:20240126-070121-8ade9c30e
21-
ci_cortexm: tlcpack/ci-cortexm:20240126-070121-8ade9c30e
22-
ci_cpu: tlcpack/ci_cpu:20240322-060059-89cd74c07
23-
ci_gpu: tlcpack/ci-gpu:20240126-070121-8ade9c30e
24-
ci_hexagon: tlcpack/ci-hexagon:20240126-070121-8ade9c30e
25-
ci_i386: tlcpack/ci-i386:20240126-070121-8ade9c30e
26-
ci_lint: tlcpack/ci-lint:20240126-070121-8ade9c30e
27-
ci_minimal: tlcpack/ci-minimal:20240126-070121-8ade9c30e
28-
ci_riscv: tlcpack/ci-riscv:20240126-070121-8ade9c30e
29-
ci_wasm: tlcpack/ci-wasm:20240126-070121-8ade9c30e
20+
ci_arm: tlcpack/ci-arm:20240428-060115-0b09ed018
21+
ci_cortexm: tlcpack/ci-cortexm:20240428-060115-0b09ed018
22+
ci_cpu: tlcpack/ci_cpu:20240428-060115-0b09ed018
23+
ci_gpu: tlcpack/ci-gpu:20240428-060115-0b09ed018
24+
ci_hexagon: tlcpack/ci-hexagon:20240428-060115-0b09ed018
25+
ci_i386: tlcpack/ci-i386:20240428-060115-0b09ed018
26+
ci_lint: tlcpack/ci-lint:20240428-060115-0b09ed018
27+
ci_minimal: tlcpack/ci-minimal:20240428-060115-0b09ed018
28+
ci_riscv: tlcpack/ci-riscv:20240428-060115-0b09ed018
29+
ci_wasm: tlcpack/ci-wasm:20240428-060115-0b09ed018

tests/micro/zephyr/test_zephyr.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -650,7 +650,7 @@ def test_debugging_enabled(workspace_dir):
650650
def test_qemu_make_fail(workspace_dir, board, microtvm_debug, serial_number):
651651
"""Testing QEMU make fail."""
652652
if not utils.ZEPHYR_BOARDS[board]["is_qemu"]:
653-
pytest.skip(msg="Only for QEMU targets.")
653+
pytest.skip("Only for QEMU targets.")
654654

655655
build_config = {"debug": microtvm_debug}
656656
shape = (10,)

tests/python/contrib/test_hexagon/metaschedule_e2e/test_resnet50_fp16.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def test_resnet50(hexagon_launcher):
4747
model_params = "resnet50_fp16.params"
4848

4949
if not os.path.exists(model_json):
50-
pytest.skip(msg="Run python export_models.py first.")
50+
pytest.skip("Run python export_models.py first.")
5151

5252
with open(model_json, "r") as file:
5353
mod = tvm.ir.load_json(file.read())

tests/python/contrib/test_hexagon/metaschedule_e2e/test_resnet50_int8.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
def load_model():
5555
"""Load renset50 model."""
5656
if not os.path.exists(MODEL_JSON):
57-
pytest.skip(msg="Run python export_models.py first.")
57+
pytest.skip("Run python export_models.py first.")
5858

5959
with open(MODEL_JSON, "r") as file:
6060
mod = tvm.ir.load_json(file.read())
@@ -172,7 +172,7 @@ def test_resnet50(hexagon_launcher):
172172
pytest.skip("Skipping test since it takes too long in CI.")
173173

174174
if not os.path.exists(MODEL_JSON):
175-
pytest.skip(msg="Run python export_models.py first.")
175+
pytest.skip("Run python export_models.py first.")
176176

177177
mod, params = load_model()
178178

tests/python/contrib/test_hexagon/test_meta_schedule.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def main(a: T.handle, b: T.handle, c: T.handle) -> None: # type: ignore
6969
def test_builder_runner(hexagon_launcher):
7070
"""Test builder and runner."""
7171
if hexagon_launcher.is_simulator():
72-
pytest.skip(msg="Tuning on simulator not supported.")
72+
pytest.skip("Tuning on simulator not supported.")
7373

7474
mod = MatmulModule
7575

@@ -191,7 +191,7 @@ def verify_dense(sch, target, m_size, n_size, k_size, hexagon_session):
191191
def test_vrmpy_dense(hexagon_launcher):
192192
"""Test vector reduce muliply dense."""
193193
if hexagon_launcher.is_simulator():
194-
pytest.skip(msg="Tuning on simulator not supported.")
194+
pytest.skip("Tuning on simulator not supported.")
195195

196196
do_tune = True
197197

@@ -302,7 +302,7 @@ def main( # type: ignore
302302
def test_vrmpy_dense_auto_tensorize(hexagon_launcher):
303303
"""Test VRMPY dense operator."""
304304
if hexagon_launcher.is_simulator():
305-
pytest.skip(msg="Tuning on simulator not supported.")
305+
pytest.skip("Tuning on simulator not supported.")
306306

307307
m_size, n_size, k_size = 128, 768, 768
308308
workload = te.create_prim_func(dense_compute(m_size, n_size, k_size))
@@ -367,7 +367,7 @@ def test_vrmpy_dense_auto_tensorize(hexagon_launcher):
367367
def test_conv2d_relay_auto_schedule(hexagon_launcher):
368368
"""Test conv2d using auto schedule."""
369369
if hexagon_launcher.is_simulator():
370-
pytest.skip(msg="Tuning on simulator not supported.")
370+
pytest.skip("Tuning on simulator not supported.")
371371

372372
i_size, o_size, h_size, w_size = 64, 64, 56, 56
373373
k_height_size = k_width_size = 3
@@ -447,7 +447,7 @@ def test_dense_relay_auto_schedule(hexagon_launcher):
447447
dense on Hexagon is extremely slow.
448448
"""
449449
if hexagon_launcher.is_simulator():
450-
pytest.skip(msg="Tuning on simulator not supported.")
450+
pytest.skip("Tuning on simulator not supported.")
451451

452452
target_hexagon = tvm.target.hexagon("v69")
453453
target = tvm.target.Target(target_hexagon, host=target_hexagon)

tests/python/contrib/test_hexagon/topi/slice_op/test_cast_slice.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def test_cast_fp16_fp32_slice(
7777
Top level testing function for cast fp16 to fp32
7878
"""
7979
if hexagon_session.is_simulator():
80-
pytest.skip(msg="Due to https://github.com/apache/tvm/issues/11957")
80+
pytest.skip("Due to https://github.com/apache/tvm/issues/11957")
8181

8282
cast_input = te.placeholder(input_shape, name="A", dtype=dtype)
8383
cast_output = sl.cast_f16_f32_compute(cast_input)
@@ -163,7 +163,7 @@ def test_cast_fp32_fp16_slice(
163163
Top level testing function for cast fp32 to fp16
164164
"""
165165
if hexagon_session.is_simulator():
166-
pytest.skip(msg="Due to https://github.com/apache/tvm/issues/11957")
166+
pytest.skip("Due to https://github.com/apache/tvm/issues/11957")
167167

168168
cast_input = te.placeholder(input_shape, name="A", dtype=dtype)
169169
cast_output = sl.cast_f32_f16_compute(cast_input)

tests/python/relax/test_codegen_cudnn.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ def test_conv2d_offload(data_shape, weight_shape, dtype, with_bias, activation):
198198
tvm.testing.assert_allclose(out, ref, rtol=1e-2, atol=1e-2)
199199

200200

201+
@pytest.mark.skip(reason="flaky test")
201202
@pytest.mark.parametrize(
202203
"data_shape, weight_shape, dtype, with_bias, activation",
203204
[

0 commit comments

Comments
 (0)