Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions libflashinfer/tests/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,18 @@
# Set global paths and initialize test list
set(ALL_TEST_TARGETS "")

# List of tests to skip. These tests are currently disabled due to known issues
# or pending fixes.
set(SKIP_TESTS
test_batch_prefill # This is being skipped because
# BatchPrefillWithPagedKVCache is not yet implemented for
# HIP.
test_layout_transform # This test is being skipped due to incorrect test
# cases
test_mfma_fp32_16x16x16fp16 # This test will be fixed in later update. Look
# at PR #52
test_cascade)

# Include centralized config utilities
include(ConfigureTargets)

Expand All @@ -13,6 +25,12 @@ if(FLASHINFER_ENABLE_HIP)

foreach(test_source IN LISTS HIP_TEST_SOURCES)
get_filename_component(test_name ${test_source} NAME_WE)

if(test_name IN_LIST SKIP_TESTS)
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do not think this is the right approach. I will still like to have build targets for the broken tests. Having a build target will help during development when a broken test is getting fixed or otherwise needs reevaluation.

I suggest the target should not be added to the build_tests convenience target that builds all tests in one shot. That way CI or otherwise can still build all "good" tests in one go, but a developer has the option to easily try out a broken test using cmake.

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are you suggesting something like

foreach(test_target IN LISTS ALL_TEST_TARGETS)
  if(TARGET ${test_target})
    if(test_name IN_LIST SKIP_TESTS)
        message(STATUS "Skipping HIP test: ${test_name}")
        continue()
    endif()
    gtest_discover_tests(${test_target})
  endif()
endforeach()

Copy link
Copy Markdown
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Wouldnt this only work for tests that build successfully but fail during runtime? Right now test_batch_prefill does not even build because changes to BatchPrefillWithPagedKVCache did not go in yet.

message(STATUS "Skipping HIP test: ${test_name}")
continue()
endif()

set(target_name "${test_name}_hip")
configure_flashinfer_target(
TARGET_NAME ${target_name}
Expand Down