diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index 87e43fe0e38c7..d856f8ab0d8a0 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -3582,6 +3582,12 @@ ], "sqlState" : "0A000" }, + "UNSUPPORTED_DATA_SOURCE_SAVE_MODE" : { + "message" : [ + "The data source '' cannot be written in the mode. Please use either the \"Append\" or \"Overwrite\" mode instead." + ], + "sqlState" : "0A000" + }, "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : { "message" : [ "The datasource doesn't support the column of the type ." @@ -5397,11 +5403,6 @@ "There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '' to 'true'." ] }, - "_LEGACY_ERROR_TEMP_1308" : { - "message" : [ - "TableProvider implementation cannot be written with mode, please use Append or Overwrite modes instead." - ] - }, "_LEGACY_ERROR_TEMP_1309" : { "message" : [ "insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()." diff --git a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala index 6753f8be54bf2..5566785c4d56d 100644 --- a/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala +++ b/connector/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaSinkSuite.scala @@ -557,7 +557,7 @@ class KafkaSinkBatchSuiteV2 extends KafkaSinkBatchSuiteBase { test("batch - unsupported save modes") { testUnsupportedSaveModes((mode) => - Seq(s"cannot be written with ${mode.name} mode", "does not support truncate")) + Seq(s"cannot be written in the \"${mode.name}\" mode", "does not support truncate")) } test("generic - write big data with small producer buffer") { diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index 3f4074af9b780..b30e3c71b6665 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -2326,6 +2326,12 @@ Unsupported data source type for direct query on files: `` Unsupported data type ``. +### UNSUPPORTED_DATA_SOURCE_SAVE_MODE + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +The data source '``' cannot be written in the `` mode. Please use either the "Append" or "Overwrite" mode instead. + ### UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE [SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index bc847d1c00699..25171c6dac77b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -3186,10 +3186,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat def writeWithSaveModeUnsupportedBySourceError(source: String, createMode: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1308", + errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE", messageParameters = Map( "source" -> source, - "createMode" -> createMode)) + "createMode" -> toDSOption(createMode))) } def partitionByDoesNotAllowedWhenUsingInsertIntoError(): Throwable = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala index ea263b36c76c6..fbcbf287b4559 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala @@ -454,10 +454,10 @@ class DataSourceV2Suite extends QueryTest with SharedSparkSession with AdaptiveS .write.format(cls.getName) .option("path", path).mode("ignore").save() }, - errorClass = "_LEGACY_ERROR_TEMP_1308", + errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE", parameters = Map( "source" -> cls.getName, - "createMode" -> "Ignore" + "createMode" -> "\"Ignore\"" ) ) @@ -467,10 +467,10 @@ class DataSourceV2Suite extends QueryTest with SharedSparkSession with AdaptiveS .write.format(cls.getName) .option("path", path).mode("error").save() }, - errorClass = "_LEGACY_ERROR_TEMP_1308", + errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE", parameters = Map( "source" -> cls.getName, - "createMode" -> "ErrorIfExists" + "createMode" -> "\"ErrorIfExists\"" ) ) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala index 3e7cd82db8d72..def38d8427b7b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/PythonDataSourceSuite.scala @@ -622,12 +622,21 @@ class PythonDataSourceSuite extends QueryTest with SharedSparkSession { } withClue("without mode") { - val error = intercept[AnalysisException] { - spark.range(1).write.format(dataSourceName).save() - } - // TODO: improve this error message. - assert(error.getMessage.contains("TableProvider implementation SimpleDataSource " + - "cannot be written with ErrorIfExists mode, please use Append or Overwrite modes instead.")) + checkError( + exception = intercept[AnalysisException] { + spark.range(1).write.format(dataSourceName).save() + }, + errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE", + parameters = Map("source" -> "SimpleDataSource", "createMode" -> "\"ErrorIfExists\"")) + } + + withClue("with unsupported mode") { + checkError( + exception = intercept[AnalysisException] { + spark.range(1).write.format(dataSourceName).mode("ignore").save() + }, + errorClass = "UNSUPPORTED_DATA_SOURCE_SAVE_MODE", + parameters = Map("source" -> "SimpleDataSource", "createMode" -> "\"Ignore\"")) } }