diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 2fda7894c0fb..1256fe3aad6a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -224,13 +224,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat "invalidValue" -> toSQLExpr(invalidValue))) } - def nullDataSourceOption(option: String): Throwable = { - new AnalysisException( - errorClass = "NULL_DATA_SOURCE_OPTION", - messageParameters = Map("option" -> option) - ) - } - def unorderablePivotColError(pivotCol: Expression): Throwable = { new AnalysisException( errorClass = "INCOMPARABLE_PIVOT_COLUMN", diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index f9f9b31a25aa..5755ad38fb29 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2937,4 +2937,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE ) ) } + + def nullDataSourceOption(option: String): Throwable = { + new SparkIllegalArgumentException( + errorClass = "NULL_DATA_SOURCE_OPTION", + messageParameters = Map("option" -> option) + ) + } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala index 481cc80fe522..e7a4c9b258c1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala @@ -25,7 +25,7 @@ import org.apache.commons.io.FilenameUtils import org.apache.spark.SparkFiles import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap -import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} +import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.TimestampNTZType import org.apache.spark.util.Utils @@ -56,7 +56,7 @@ class JDBCOptions( // If an option value is `null`, throw a user-friendly error. Keys here cannot be null, as // scala's implementation of Maps prohibits null keys. if (v == null) { - throw QueryCompilationErrors.nullDataSourceOption(k) + throw QueryExecutionErrors.nullDataSourceOption(k) } properties.setProperty(k, v) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala index bf9e091c5296..1ba7ec78f8bd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala @@ -24,7 +24,7 @@ import scala.util.control.NonFatal import test.org.apache.spark.sql.connector.catalog.functions.JavaStrLen.JavaStrLenStaticMagic -import org.apache.spark.{SparkConf, SparkException} +import org.apache.spark.{SparkConf, SparkException, SparkIllegalArgumentException} import org.apache.spark.sql.{AnalysisException, DataFrame, ExplainSuiteHelper, QueryTest, Row} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, IndexAlreadyExistsException, NoSuchIndexException} @@ -396,7 +396,7 @@ class JDBCV2Suite extends QueryTest with SharedSparkSession with ExplainSuiteHel .option("pushDownOffset", null) .table("h2.test.employee") checkError( - exception = intercept[AnalysisException] { + exception = intercept[SparkIllegalArgumentException] { df.collect() }, condition = "NULL_DATA_SOURCE_OPTION",