From 8001530db831ab10278d1ba1833a97e935c4ac50 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 5 Dec 2022 23:13:54 -0800 Subject: [PATCH 1/3] [SPARK-41001][CONNECT][TESTS][FOLLOWUP] ChannelBuilderTests should be skipped by should_test_connect flag --- python/pyspark/sql/tests/connect/test_connect_basic.py | 8 ++++++-- python/pyspark/testing/connectutils.py | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/python/pyspark/sql/tests/connect/test_connect_basic.py b/python/pyspark/sql/tests/connect/test_connect_basic.py index 1f67f4c49de97..abc6ed785537b 100644 --- a/python/pyspark/sql/tests/connect/test_connect_basic.py +++ b/python/pyspark/sql/tests/connect/test_connect_basic.py @@ -32,13 +32,16 @@ if have_pandas: from pyspark.sql.connect.session import SparkSession as RemoteSparkSession from pyspark.sql.connect.client import ChannelBuilder + from pyspark.sql.connect.dataframe import DataFrame as CDataFrame from pyspark.sql.connect.function_builder import udf from pyspark.sql.connect.functions import lit, col + from pyspark.testing.pandasutils import PandasOnSparkTestCase +else: + from pyspark.testing.sqlutils import ReusedSQLTestCase as PandasOnSparkTestCase from pyspark.sql.dataframe import DataFrame import pyspark.sql.functions -from pyspark.sql.connect.dataframe import DataFrame as CDataFrame from pyspark.testing.connectutils import should_test_connect, connect_requirement_message -from pyspark.testing.pandasutils import PandasOnSparkTestCase +from pyspark.testing.sqlutils import have_pandas, pandas_requirement_message from pyspark.testing.utils import ReusedPySparkTestCase @@ -881,6 +884,7 @@ def test_crossjoin(self): ) +@unittest.skipIf(not should_test_connect, connect_requirement_message) class ChannelBuilderTests(ReusedPySparkTestCase): def test_invalid_connection_strings(self): invalid = [ diff --git a/python/pyspark/testing/connectutils.py b/python/pyspark/testing/connectutils.py index 05df6b02e6726..7f4250613cc20 100644 --- a/python/pyspark/testing/connectutils.py +++ b/python/pyspark/testing/connectutils.py @@ -97,6 +97,7 @@ def _session_sql(cls, query: str) -> "DataFrame": return DataFrame.withPlan(SQL(query), cls.connect) # type: ignore if have_pandas: + @classmethod def _with_plan(cls, plan: LogicalPlan) -> "DataFrame": return DataFrame.withPlan(plan, cls.connect) # type: ignore From 01dba89284a581810a9b19137f43429188c6be40 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 5 Dec 2022 23:22:01 -0800 Subject: [PATCH 2/3] remove unused --- python/pyspark/sql/tests/connect/test_connect_basic.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/pyspark/sql/tests/connect/test_connect_basic.py b/python/pyspark/sql/tests/connect/test_connect_basic.py index abc6ed785537b..913629400c406 100644 --- a/python/pyspark/sql/tests/connect/test_connect_basic.py +++ b/python/pyspark/sql/tests/connect/test_connect_basic.py @@ -41,7 +41,6 @@ from pyspark.sql.dataframe import DataFrame import pyspark.sql.functions from pyspark.testing.connectutils import should_test_connect, connect_requirement_message -from pyspark.testing.sqlutils import have_pandas, pandas_requirement_message from pyspark.testing.utils import ReusedPySparkTestCase From c0cd90d3c83dfe65481ac950e6212f128336b065 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Mon, 5 Dec 2022 23:56:48 -0800 Subject: [PATCH 3/3] ignore mypy check on fallback import statements --- python/pyspark/sql/tests/connect/test_connect_basic.py | 2 +- python/pyspark/sql/tests/connect/test_connect_function.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/pyspark/sql/tests/connect/test_connect_basic.py b/python/pyspark/sql/tests/connect/test_connect_basic.py index 913629400c406..f47035df490d3 100644 --- a/python/pyspark/sql/tests/connect/test_connect_basic.py +++ b/python/pyspark/sql/tests/connect/test_connect_basic.py @@ -37,7 +37,7 @@ from pyspark.sql.connect.functions import lit, col from pyspark.testing.pandasutils import PandasOnSparkTestCase else: - from pyspark.testing.sqlutils import ReusedSQLTestCase as PandasOnSparkTestCase + from pyspark.testing.sqlutils import ReusedSQLTestCase as PandasOnSparkTestCase # type: ignore from pyspark.sql.dataframe import DataFrame import pyspark.sql.functions from pyspark.testing.connectutils import should_test_connect, connect_requirement_message diff --git a/python/pyspark/sql/tests/connect/test_connect_function.py b/python/pyspark/sql/tests/connect/test_connect_function.py index 6d06421d0848f..ac0718fd6308b 100644 --- a/python/pyspark/sql/tests/connect/test_connect_function.py +++ b/python/pyspark/sql/tests/connect/test_connect_function.py @@ -26,7 +26,7 @@ from pyspark.sql.connect.session import SparkSession as RemoteSparkSession from pyspark.testing.pandasutils import PandasOnSparkTestCase else: - from pyspark.testing.sqlutils import ReusedSQLTestCase as PandasOnSparkTestCase + from pyspark.testing.sqlutils import ReusedSQLTestCase as PandasOnSparkTestCase # type: ignore from pyspark.sql.dataframe import DataFrame from pyspark.testing.connectutils import should_test_connect, connect_requirement_message from pyspark.testing.utils import ReusedPySparkTestCase