diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala index 9a7e077b658d..4487a2d7f435 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Row.scala @@ -165,8 +165,11 @@ trait Row extends Serializable { * StringType -> String * DecimalType -> java.math.BigDecimal * - * DateType -> java.sql.Date - * TimestampType -> java.sql.Timestamp + * DateType -> java.sql.Date if spark.sql.datetime.java8API.enabled is false + * DateType -> java.time.LocalDate if spark.sql.datetime.java8API.enabled is true + * + * TimestampType -> java.sql.Timestamp if spark.sql.datetime.java8API.enabled is false + * TimestampType -> java.time.Instant if spark.sql.datetime.java8API.enabled is true * * BinaryType -> byte array * ArrayType -> scala.collection.Seq (use getList for java.util.List) @@ -190,8 +193,11 @@ trait Row extends Serializable { * StringType -> String * DecimalType -> java.math.BigDecimal * - * DateType -> java.sql.Date - * TimestampType -> java.sql.Timestamp + * DateType -> java.sql.Date if spark.sql.datetime.java8API.enabled is false + * DateType -> java.time.LocalDate if spark.sql.datetime.java8API.enabled is true + * + * TimestampType -> java.sql.Timestamp if spark.sql.datetime.java8API.enabled is false + * TimestampType -> java.time.Instant if spark.sql.datetime.java8API.enabled is true * * BinaryType -> byte array * ArrayType -> scala.collection.Seq (use getList for java.util.List) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/sources/filters.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/sources/filters.scala index 319073e4475b..753379325351 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/sources/filters.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/sources/filters.scala @@ -25,7 +25,8 @@ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.parseColumnPath //////////////////////////////////////////////////////////////////////////////////////////////////// /** - * A filter predicate for data sources. + * A filter predicate for data sources. Mapping between Spark SQL types and filter value + * types follow the convention for return type of [[org.apache.spark.sql.Row#get(int)]]. * * @since 1.3.0 */