diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsRead.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsRead.java index 67fc72e070dc..826fa2f8a072 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsRead.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsRead.java @@ -22,7 +22,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap; /** - * An internal base interface of mix-in interfaces for readable {@link Table}. This adds + * A mix-in interface of {@link Table}, to indicate that it's readable. This adds * {@link #newScanBuilder(CaseInsensitiveStringMap)} that is used to create a scan for batch, * micro-batch, or continuous processing. */ diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsWrite.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsWrite.java index b21596386821..c52e54569dc0 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsWrite.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/SupportsWrite.java @@ -22,7 +22,7 @@ import org.apache.spark.sql.util.CaseInsensitiveStringMap; /** - * An internal base interface of mix-in interfaces for writable {@link Table}. This adds + * A mix-in interface of {@link Table}, to indicate that it's writable. This adds * {@link #newWriteBuilder(CaseInsensitiveStringMap)} that is used to create a write * for batch or streaming. */ diff --git a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java index e97d0548c66f..7633d504d36b 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java +++ b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/Scan.java @@ -24,6 +24,7 @@ import org.apache.spark.sql.sources.v2.SupportsContinuousRead; import org.apache.spark.sql.sources.v2.SupportsMicroBatchRead; import org.apache.spark.sql.sources.v2.Table; +import org.apache.spark.sql.sources.v2.TableCapability; /** * A logical representation of a data source scan. This interface is used to provide logical @@ -32,8 +33,8 @@ * This logical representation is shared between batch scan, micro-batch streaming scan and * continuous streaming scan. Data sources must implement the corresponding methods in this * interface, to match what the table promises to support. For example, {@link #toBatch()} must be - * implemented, if the {@link Table} that creates this {@link Scan} returns BATCH_READ support in - * its {@link Table#capabilities()}. + * implemented, if the {@link Table} that creates this {@link Scan} returns + * {@link TableCapability#BATCH_READ} support in its {@link Table#capabilities()}. *

*/ @Evolving @@ -61,7 +62,8 @@ default String description() { /** * Returns the physical representation of this scan for batch query. By default this method throws * exception, data sources must overwrite this method to provide an implementation, if the - * {@link Table} that creates this returns batch read support in its {@link Table#capabilities()}. + * {@link Table} that creates this scan returns {@link TableCapability#BATCH_READ} in its + * {@link Table#capabilities()}. * * @throws UnsupportedOperationException */