diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java index 7633d504d36b1..f6085b933c656 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java @@ -62,7 +62,7 @@ default String description() { /** * Returns the physical representation of this scan for batch query. By default this method throws * exception, data sources must overwrite this method to provide an implementation, if the - * {@link Table} that creates this scan returns {@link TableCapability#BATCH_READ} in its + * {@link Table} that creates this scan returns {@link TableCapability#BATCH_READ} support in its * {@link Table#capabilities()}. * * @throws UnsupportedOperationException diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/writer/WriteBuilder.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/writer/WriteBuilder.java index e08d34fbf453e..aab46b078c334 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/writer/WriteBuilder.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/writer/WriteBuilder.java @@ -19,6 +19,7 @@ import org.apache.spark.annotation.Evolving; import org.apache.spark.sql.sources.v2.Table; +import org.apache.spark.sql.sources.v2.TableCapability; import org.apache.spark.sql.sources.v2.writer.streaming.StreamingWrite; import org.apache.spark.sql.types.StructType; @@ -57,8 +58,8 @@ default WriteBuilder withInputDataSchema(StructType schema) { /** * Returns a {@link BatchWrite} to write data to batch source. By default this method throws * exception, data sources must overwrite this method to provide an implementation, if the - * {@link Table} that creates this write returns BATCH_WRITE support in its - * {@link Table#capabilities()}. + * {@link Table} that creates this write returns {@link TableCapability#BATCH_WRITE} support in + * its {@link Table#capabilities()}. * * Note that, the returned {@link BatchWrite} can be null if the implementation supports SaveMode, * to indicate that no writing is needed. We can clean it up after removing