Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
02f5cf9
jdbc dialect can match non-lowercase URL prefixes
Jul 29, 2019
fa6b8a4
jdbc dialect matching is not case sensitive
Jul 31, 2019
2d52578
Merge branch 'master' of https://github.com/apache/spark into sql_dia…
Aug 1, 2019
c89c859
jdbc dialect matching is not case sensitive
Aug 1, 2019
0db9f75
jdbc dialect matching is not case sensitive
Aug 1, 2019
7591113
jdbc dialect matching is not case sensitive
Aug 1, 2019
7fc974e
jdbc dialect matching is not case sensitive
Aug 1, 2019
3b7a689
jdbc dialect matching is not case sensitive
Aug 1, 2019
0334680
jdbc dialect matching is not case sensitive
Aug 2, 2019
530741c
Merge branch 'master' of https://github.com/apache/spark into sql_dia…
Aug 2, 2019
5913568
Merge branch 'master' of https://github.com/apache/spark into sql_dia…
Oct 8, 2019
93027ca
jdbc dialect matching is not case sensitive
Oct 26, 2019
404bb34
jdbc dialect matching is not case sensitive
Oct 26, 2019
c27dfff
jdbc dialect matching is not case sensitive
Oct 28, 2019
acb42cf
jdbc dialect matching is not case sensitive
Oct 30, 2019
5c495aa
jdbc dialect matching is not case sensitive
Oct 30, 2019
fbb5b4d
dialect
Oct 30, 2019
a16a0b6
Merge branch 'sql_dialect' of https://github.com/teeyog/spark into sq…
Oct 30, 2019
76e80e7
jdbc dialect matching is not case sensitive
Oct 30, 2019
626d8a4
fix
Oct 30, 2019
28af8f9
Merge remote-tracking branch 'upstream/master' into sql_dialect
Oct 31, 2019
baef0d3
[SPARK-29653][SQL] Fix MICROS_PER_MONTH in IntervalUtils
yaooqinn Oct 30, 2019
6c55e42
[SPARK-29120][SQL][TESTS] Port create_view.sql
maropu Oct 30, 2019
6718c49
[SPARK-29668][DOCS] Deprecate Python 3 prior to version 3.6
dongjoon-hyun Oct 30, 2019
e808616
[SPARK-29646][BUILD] Allow pyspark version name format `${versionNumb…
jiangxb1987 Oct 30, 2019
c582128
[SPARK-29666][BUILD] Fix the publish release failure under dry-run mode
jiangxb1987 Oct 30, 2019
d3ab1b8
Prepare Spark release v3.0.0-preview-rc2
jiangxb1987 Oct 29, 2019
b587956
Revert "Prepare Spark release v3.0.0-preview-rc2"
jiangxb1987 Oct 31, 2019
48c662f
[SPARK-29277][SQL] Add early DSv2 filter and projection pushdown
rdblue Oct 31, 2019
bcfe98a
[SPARK-29651][SQL] Fix parsing of interval seconds fraction
MaxGekk Oct 31, 2019
c9155ac
[SPARK-29126][PYSPARK][DOC] Pandas Cogroup udf usage guide
d80tb7 Oct 31, 2019
d98fed4
Merge remote-tracking branch 'upstream/master'
Oct 31, 2019
d8acf7f
Merge branch 'master' of https://github.com/apache/spark
Oct 31, 2019
844c0aa
Merge branch 'master' of https://github.com/apache/spark
Oct 31, 2019
a5148f0
Merge branch 'master' into sql_dialect
teeyog Nov 1, 2019
14814ef
fix conflicts
Nov 1, 2019
3e58b81
fix conflict
Nov 1, 2019
ac70da4
fix conflict
Nov 1, 2019
e08c95f
Merge branch 'master' of https://github.com/apache/spark
Nov 4, 2019
1f8b514
update
Nov 4, 2019
68cdff8
update
Nov 4, 2019
3e1585b
update
Nov 4, 2019
2dba4b1
Merge branch 'master' of https://github.com/apache/spark
Nov 4, 2019
0f5f776
update
Nov 4, 2019
6f855b1
update
Nov 4, 2019
1883847
Merge branch 'sql_dialect' of https://github.com/teeyog/spark into sq…
Nov 4, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@
package org.apache.spark.sql.jdbc

import java.sql.Types
import java.util.Locale

import org.apache.spark.sql.types._

private object DB2Dialect extends JdbcDialect {

override def canHandle(url: String): Boolean = url.startsWith("jdbc:db2")
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:db2")

override def getCatalystType(
sqlType: Int,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@
package org.apache.spark.sql.jdbc

import java.sql.Types
import java.util.Locale

import org.apache.spark.sql.types._


private object DerbyDialect extends JdbcDialect {

override def canHandle(url: String): Boolean = url.startsWith("jdbc:derby")
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:derby")

override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@

package org.apache.spark.sql.jdbc

import java.util.Locale

import org.apache.spark.sql.types._


private object MsSqlServerDialect extends JdbcDialect {

override def canHandle(url: String): Boolean = url.startsWith("jdbc:sqlserver")
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:sqlserver")

override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,14 @@
package org.apache.spark.sql.jdbc

import java.sql.Types
import java.util.Locale

import org.apache.spark.sql.types.{BooleanType, DataType, LongType, MetadataBuilder}

private case object MySQLDialect extends JdbcDialect {

override def canHandle(url : String): Boolean = url.startsWith("jdbc:mysql")
override def canHandle(url : String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:mysql")

override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql.jdbc

import java.sql.{Date, Timestamp, Types}
import java.util.TimeZone
import java.util.{Locale, TimeZone}

import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.internal.SQLConf
Expand All @@ -30,7 +30,8 @@ private case object OracleDialect extends JdbcDialect {
private[jdbc] val BINARY_DOUBLE = 101
private[jdbc] val TIMESTAMPTZ = -101

override def canHandle(url: String): Boolean = url.startsWith("jdbc:oracle")
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:oracle")

private def supportTimeZoneTypes: Boolean = {
val timeZone = DateTimeUtils.getTimeZone(SQLConf.get.sessionLocalTimeZone)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,16 @@
package org.apache.spark.sql.jdbc

import java.sql.{Connection, Types}
import java.util.Locale

import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
import org.apache.spark.sql.types._


private object PostgresDialect extends JdbcDialect {

override def canHandle(url: String): Boolean = url.startsWith("jdbc:postgresql")
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:postgresql")

override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,15 @@

package org.apache.spark.sql.jdbc

import java.sql.Types
import java.util.Locale

import org.apache.spark.sql.types._


private case object TeradataDialect extends JdbcDialect {

override def canHandle(url: String): Boolean = { url.startsWith("jdbc:teradata") }
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:teradata")

override def getJDBCType(dt: DataType): Option[JdbcType] = dt match {
case StringType => Some(JdbcType("VARCHAR(255)", java.sql.Types.VARCHAR))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class JDBCSuite extends QueryTest
val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte)

val testH2Dialect = new JdbcDialect {
override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2")
override def canHandle(url: String): Boolean = url.startsWith("jdbc:h2")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] =
Some(StringType)
Expand Down Expand Up @@ -1662,4 +1662,21 @@ class JDBCSuite extends QueryTest
"Invalid value `test` for parameter `isolationLevel`. This can be " +
"`NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`."))
}

test("SPARK-28552: Case-insensitive database URLs in JdbcDialect") {
assert(JdbcDialects.get("jdbc:mysql://localhost/db") === MySQLDialect)
assert(JdbcDialects.get("jdbc:MySQL://localhost/db") === MySQLDialect)
assert(JdbcDialects.get("jdbc:postgresql://localhost/db") === PostgresDialect)
assert(JdbcDialects.get("jdbc:postGresql://localhost/db") === PostgresDialect)
assert(JdbcDialects.get("jdbc:db2://localhost/db") === DB2Dialect)
assert(JdbcDialects.get("jdbc:DB2://localhost/db") === DB2Dialect)
assert(JdbcDialects.get("jdbc:sqlserver://localhost/db") === MsSqlServerDialect)
assert(JdbcDialects.get("jdbc:sqlServer://localhost/db") === MsSqlServerDialect)
assert(JdbcDialects.get("jdbc:derby://localhost/db") === DerbyDialect)
assert(JdbcDialects.get("jdbc:derBy://localhost/db") === DerbyDialect)
assert(JdbcDialects.get("jdbc:oracle://localhost/db") === OracleDialect)
assert(JdbcDialects.get("jdbc:Oracle://localhost/db") === OracleDialect)
assert(JdbcDialects.get("jdbc:teradata://localhost/db") === TeradataDialect)
assert(JdbcDialects.get("jdbc:Teradata://localhost/db") === TeradataDialect)
}
}