diff --git a/python/pyspark/sql/tests/connect/test_parity_functions.py b/python/pyspark/sql/tests/connect/test_parity_functions.py index dfa88169d9dfd..78ccbd49148b4 100644 --- a/python/pyspark/sql/tests/connect/test_parity_functions.py +++ b/python/pyspark/sql/tests/connect/test_parity_functions.py @@ -60,10 +60,6 @@ def test_assert_true(self): def test_basic_functions(self): super().test_basic_functions() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_between_function(self): - super().test_between_function() - @unittest.skip("Fails in Spark Connect, should enable.") def test_date_add_function(self): super().test_date_add_function() @@ -72,10 +68,6 @@ def test_date_add_function(self): def test_date_sub_function(self): super().test_date_sub_function() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_datetime_functions(self): - super().test_datetime_functions() - @unittest.skip("Fails in Spark Connect, should enable.") def test_dayofweek(self): super().test_dayofweek() @@ -84,10 +76,6 @@ def test_dayofweek(self): def test_explode(self): super().test_explode() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_expr(self): - super().test_expr() - @unittest.skip("Fails in Spark Connect, should enable.") def test_first_last_ignorenulls(self): super().test_first_last_ignorenulls() @@ -128,10 +116,6 @@ def test_lit_np_scalar(self): def test_map_functions(self): super().test_map_functions() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_math_functions(self): - super().test_math_functions() - @unittest.skip("Fails in Spark Connect, should enable.") def test_ndarray_input(self): super().test_ndarray_input() @@ -172,10 +156,6 @@ def test_sorting_functions_with_column(self): def test_window_functions(self): super().test_window_functions() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_window_functions_cumulative_sum(self): - super().test_window_functions_cumulative_sum() - @unittest.skip("Fails in Spark Connect, should enable.") def test_window_functions_without_partitionBy(self): super().test_window_functions_without_partitionBy() @@ -184,18 +164,6 @@ def test_window_functions_without_partitionBy(self): def test_window_time(self): super().test_window_time() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_corr(self): - super().test_corr() - - @unittest.skip("Fails in Spark Connect, should enable.") - def test_cov(self): - super().test_cov() - - @unittest.skip("Fails in Spark Connect, should enable.") - def test_crosstab(self): - super().test_crosstab() - @unittest.skip("Fails in Spark Connect, should enable.") def test_rand_functions(self): super().test_rand_functions() @@ -208,10 +176,6 @@ def test_reciprocal_trig_functions(self): def test_sampleby(self): super().test_sampleby() - @unittest.skip("Fails in Spark Connect, should enable.") - def test_approxQuantile(self): - super().test_approxQuantile() - if __name__ == "__main__": import unittest