@@ -1834,32 +1834,6 @@ def cache_permute_indices():
18341834 return _cache_permute_indices
18351835
18361836
1837- @pytest .fixture (autouse = True )
1838- def clear_cache_between_test_functions (cache_permute_indices , request ):
1839- """Automatically clear cache when switching between different test functions.
1840-
1841- This keeps the cache within the same test function (across all parametrized runs)
1842- but clears it when moving to a different test function.
1843- """
1844- # Get the base test function name (without parameters)
1845- test_function_name = request .node .originalname or request .node .name .split ("[" )[0 ]
1846-
1847- # Store the current test function name in the module
1848- if not hasattr (request .module , "_current_test_function" ):
1849- request .module ._current_test_function = test_function_name
1850- elif request .module ._current_test_function != test_function_name :
1851- # We've switched to a different test function, clear the cache and GPU state
1852- cache_permute_indices .clear ()
1853- request .module ._current_test_function = test_function_name
1854-
1855- # Synchronize and clear GPU memory/cache
1856- torch .cuda .synchronize ()
1857- torch .cuda .empty_cache ()
1858-
1859- yield # Run the test
1860- # No cleanup needed here - we clear at the start of the next different function
1861-
1862-
18631837def skip_checks (
18641838 moe_impl ,
18651839 routing_config ,
@@ -2262,6 +2236,7 @@ def test_deepseekv3_routing(
22622236 "compatible_intermediate_size" : [384 , 768 , 1024 , 2048 ],
22632237 },
22642238 id = "Renorm" ,
2239+ marks = pytest .mark .skip (reason = "Skip temporary" ),
22652240 ),
22662241 pytest .param (
22672242 {
0 commit comments