diff --git a/docs/sql-data-sources-jdbc.md b/docs/sql-data-sources-jdbc.md index 1ce411db190c..ef11a3a77dd8 100644 --- a/docs/sql-data-sources-jdbc.md +++ b/docs/sql-data-sources-jdbc.md @@ -365,7 +365,7 @@ logging into the data sources.
inferTimestampNTZTypepreferTimestampNTZtrue, all timestamps are inferred as TIMESTAMP WITHOUT TIME ZONE.
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
index 916ed99303b2..148cd9e9335e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCOptions.scala
@@ -234,9 +234,9 @@ class JDBCOptions(
val prepareQuery = parameters.get(JDBC_PREPARE_QUERY).map(_ + " ").getOrElse("")
// Infers timestamp values as TimestampNTZ type when reading data.
- val inferTimestampNTZType =
+ val preferTimestampNTZ =
parameters
- .get(JDBC_INFER_TIMESTAMP_NTZ)
+ .get(JDBC_PREFER_TIMESTAMP_NTZ)
.map(_.toBoolean)
.getOrElse(SQLConf.get.timestampType == TimestampNTZType)
}
@@ -301,5 +301,5 @@ object JDBCOptions {
val JDBC_REFRESH_KRB5_CONFIG = newOption("refreshKrb5Config")
val JDBC_CONNECTION_PROVIDER = newOption("connectionProvider")
val JDBC_PREPARE_QUERY = newOption("prepareQuery")
- val JDBC_INFER_TIMESTAMP_NTZ = newOption("inferTimestampNTZType")
+ val JDBC_PREFER_TIMESTAMP_NTZ = newOption("preferTimestampNTZ")
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
index de7dfeab643f..70e29f5d7195 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRDD.scala
@@ -68,7 +68,7 @@ object JDBCRDD extends Logging {
val rs = statement.executeQuery()
try {
JdbcUtils.getSchema(rs, dialect, alwaysNullable = true,
- isTimestampNTZ = options.inferTimestampNTZType)
+ isTimestampNTZ = options.preferTimestampNTZ)
} finally {
rs.close()
}
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 4b0d461e2377..6b3e355793d8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -251,7 +251,7 @@ object JdbcUtils extends Logging with SQLConfHelper {
try {
statement.setQueryTimeout(options.queryTimeout)
Some(getSchema(statement.executeQuery(), dialect,
- isTimestampNTZ = options.inferTimestampNTZType))
+ isTimestampNTZ = options.preferTimestampNTZ))
} catch {
case _: SQLException => None
} finally {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 975c2886948f..aa66fcd53041 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -1951,7 +1951,7 @@ class JDBCSuite extends QueryTest with SharedSparkSession {
} else {
TimestampType
}
- val res = readDf.option("inferTimestampNTZType", inferTimestampNTZ).load()
+ val res = readDf.option("preferTimestampNTZ", inferTimestampNTZ).load()
checkAnswer(res, Seq(Row(null)))
assert(res.schema.fields.head.dataType == tsType)
withSQLConf(SQLConf.TIMESTAMP_TYPE.key -> timestampType) {
@@ -1984,7 +1984,7 @@ class JDBCSuite extends QueryTest with SharedSparkSession {
DateTimeTestUtils.withDefaultTimeZone(zoneId) {
// Infer TimestmapNTZ column with data source option
val res = spark.read.format("jdbc")
- .option("inferTimestampNTZType", "true")
+ .option("preferTimestampNTZ", "true")
.option("url", urlWithUserAndPass)
.option("dbtable", tableName)
.load()