1717
1818package org .apache .spark .sql .sources .v2
1919
20- import java .util
2120import java .util .{ArrayList , List => JList }
2221
2322import test .org .apache .spark .sql .sources .v2 ._
@@ -35,6 +34,8 @@ import org.apache.spark.sql.types.StructType
3534class DataSourceV2Suite extends QueryTest with SharedSQLContext {
3635 import testImplicits ._
3736
37+ private val dsName = " userDefinedDataSource"
38+
3839 test(" simplest implementation" ) {
3940 Seq (classOf [SimpleDataSourceV2 ], classOf [JavaSimpleDataSourceV2 ]).foreach { cls =>
4041 withClue(cls.getName) {
@@ -47,32 +48,18 @@ class DataSourceV2Suite extends QueryTest with SharedSQLContext {
4748 }
4849
4950 test(" simple implementation with config support" ) {
50- withSQLConf(SQLConf .PARQUET_SCHEMA_MERGING_ENABLED .key -> " false" ,
51- SQLConf .PARQUET_COMPRESSION .key -> " uncompressed" ,
52- SQLConf .PARALLEL_PARTITION_DISCOVERY_THRESHOLD .key -> " 32" ,
53- SQLConf .PARALLEL_PARTITION_DISCOVERY_PARALLELISM .key -> " 10000" ) {
51+ // Only match configs with keys start with "spark.datasource.${dsName}".
52+ withSQLConf(s " spark.datasource. $dsName.foo.bar " -> " false" ,
53+ s " spark.datasource. $dsName.whateverConfigName " -> " 123" ,
54+ s " spark.sql. $dsName.config.name " -> " false" ,
55+ s " spark.datasource.another.config.name " -> " 123" ) {
5456 val cs = classOf [DataSourceV2WithConfig ].newInstance().asInstanceOf [ConfigSupport ]
55- val confs = DataSourceV2ConfigSupport .withSessionConfig(cs, " parquet" , SQLConf .get)
56- assert(confs.size == 3 )
57- assert(confs.keySet.filter(_.startsWith(" spark.sql.parquet" )).size == 0 )
58- assert(confs.keySet.filter(_.startsWith(" not.exist.prefix" )).size == 0 )
59- assert(confs.keySet.contains(" compressionCodec" ))
60- assert(confs.keySet.contains(" sources.parallelPartitionDiscovery.threshold" ))
61- }
62- }
63-
64- test(" config support with validOptions" ) {
65- withSQLConf(SQLConf .PARQUET_SCHEMA_MERGING_ENABLED .key -> " false" ,
66- SQLConf .PARQUET_COMPRESSION .key -> " uncompressed" ,
67- SQLConf .PARALLEL_PARTITION_DISCOVERY_THRESHOLD .key -> " 32" ,
68- SQLConf .PARALLEL_PARTITION_DISCOVERY_PARALLELISM .key -> " 10000" ) {
69- val cs = classOf [DataSourceV2WithValidOptions ].newInstance().asInstanceOf [ConfigSupport ]
70- val confs = DataSourceV2ConfigSupport .withSessionConfig(cs, " parquet" , SQLConf .get)
57+ val confs = DataSourceV2ConfigSupport .withSessionConfig(cs.name, SQLConf .get)
7158 assert(confs.size == 2 )
72- assert(confs.keySet.filter(_.startsWith(" spark.sql.parquet " )).size == 0 )
59+ assert(confs.keySet.filter(_.startsWith(" spark.datasource " )).size == 0 )
7360 assert(confs.keySet.filter(_.startsWith(" not.exist.prefix" )).size == 0 )
74- assert(confs.keySet.contains(" compressionCodec " ))
75- assert(confs.keySet.contains(" sources.parallelPartitionDiscovery.threshold " ))
61+ assert(confs.keySet.contains(" foo.bar " ))
62+ assert(confs.keySet.contains(" whateverConfigName " ))
7663 }
7764 }
7865
@@ -214,29 +201,7 @@ class SimpleReadTask(start: Int, end: Int) extends ReadTask[Row] with DataReader
214201
215202class DataSourceV2WithConfig extends SimpleDataSourceV2 with ConfigSupport {
216203
217- override def getConfigPrefixes : JList [String ] = {
218- java.util.Arrays .asList(
219- " spark.sql.parquet" ,
220- " spark.sql.sources.parallelPartitionDiscovery.threshold" )
221- }
222-
223- override def getConfigMapping : util.Map [String , String ] = {
224- val configMap = new util.HashMap [String , String ]()
225- configMap.put(" spark.sql.parquet.compression.codec" , " compressionCodec" )
226- configMap
227- }
228-
229- override def getValidOptions : JList [String ] = new util.ArrayList [String ]()
230- }
231-
232- class DataSourceV2WithValidOptions extends DataSourceV2WithConfig {
233-
234- override def getValidOptions : JList [String ] = {
235- java.util.Arrays .asList(
236- " sources.parallelPartitionDiscovery.threshold" ,
237- " compressionCodec" ,
238- " not.exist.option" )
239- }
204+ override def name : String = " userDefinedDataSource"
240205}
241206
242207class AdvancedDataSourceV2 extends DataSourceV2 with ReadSupport {
0 commit comments