diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala index 5f12d5f93a35..b676672b38cd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala @@ -598,11 +598,11 @@ object DataSource extends Logging { // Found the data source using fully qualified path dataSource case Failure(error) => - if (provider1.toLowerCase(Locale.ROOT) == "orc" || - provider1.startsWith("org.apache.spark.sql.hive.orc")) { + if (provider1.startsWith("org.apache.spark.sql.hive.orc")) { throw new AnalysisException( - "Hive-based ORC data source must be used with Hive support enabled. " + - "Please use native ORC data source instead") + "Hive built-in ORC data source must be used with Hive support enabled. " + + "Please use the native ORC data source by setting 'spark.sql.orc.impl' to " + + "'native'") } else if (provider1.toLowerCase(Locale.ROOT) == "avro" || provider1 == "com.databricks.spark.avro") { throw new AnalysisException( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 86bd9b95bca6..8ddddbeee598 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1666,7 +1666,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { e = intercept[AnalysisException] { sql(s"select id from `org.apache.spark.sql.hive.orc`.`file_path`") } - assert(e.message.contains("Hive-based ORC data source must be used with Hive support")) + assert(e.message.contains("Hive built-in ORC data source must be used with Hive support")) e = intercept[AnalysisException] { sql(s"select id from `com.databricks.spark.avro`.`file_path`") @@ -2790,7 +2790,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { val e = intercept[AnalysisException] { sql("CREATE TABLE spark_20728(a INT) USING ORC") } - assert(e.message.contains("Hive-based ORC data source must be used with Hive support")) + assert(e.message.contains("Hive built-in ORC data source must be used with Hive support")) } withSQLConf(SQLConf.ORC_IMPLEMENTATION.key -> "native") {