forked from databricks/spark-csv
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sbt
executable file
·105 lines (81 loc) · 3.34 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
name := "spark-csv"
version := "1.4.0"
organization := "com.databricks"
scalaVersion := "2.11.7"
spName := "databricks/spark-csv"
crossScalaVersions := Seq("2.10.5", "2.11.7")
sparkVersion := "1.6.0"
val testSparkVersion = settingKey[String]("The version of Spark to test against.")
testSparkVersion := sys.props.get("spark.testVersion").getOrElse(sparkVersion.value)
sparkComponents := Seq("core", "sql")
libraryDependencies ++= Seq(
"org.apache.commons" % "commons-csv" % "1.1",
"com.univocity" % "univocity-parsers" % "1.5.1",
"org.slf4j" % "slf4j-api" % "1.7.5" % "provided",
"org.scalatest" %% "scalatest" % "2.2.1" % "test",
"com.novocode" % "junit-interface" % "0.9" % "test"
)
libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % testSparkVersion.value % "test" force(),
"org.apache.spark" %% "spark-sql" % testSparkVersion.value % "test" force(),
"org.scala-lang" % "scala-library" % scalaVersion.value % "compile"
)
// This is necessary because of how we explicitly specify Spark dependencies
// for tests rather than using the sbt-spark-package plugin to provide them.
spIgnoreProvided := true
publishMavenStyle := true
spAppendScalaVersion := true
spIncludeMaven := true
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (version.value.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
pomExtra := (
<url>https://github.com/databricks/spark-csv</url>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<url>[email protected]:databricks/spark-csv.git</url>
<connection>scm:git:[email protected]:databricks/spark-csv.git</connection>
</scm>
<developers>
<developer>
<id>falaki</id>
<name>Hossein Falaki</name>
<url>http://www.falaki.net</url>
</developer>
</developers>)
parallelExecution in Test := false
// Skip tests during assembly
test in assembly := {}
ScoverageSbtPlugin.ScoverageKeys.coverageHighlighting := {
if (scalaBinaryVersion.value == "2.10") false
else true
}
// -- MiMa binary compatibility checks ------------------------------------------------------------
import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.plugin.MimaKeys.binaryIssueFilters
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
mimaDefaultSettings ++ Seq(
previousArtifact := Some("com.databricks" %% "spark-csv" % "1.2.0"),
binaryIssueFilters ++= Seq(
// These classes are not intended to be public interfaces:
ProblemFilters.excludePackage("com.databricks.spark.csv.CsvRelation"),
ProblemFilters.excludePackage("com.databricks.spark.csv.util.InferSchema"),
ProblemFilters.excludePackage("com.databricks.spark.sql.readers"),
ProblemFilters.excludePackage("com.databricks.spark.csv.util.TypeCast"),
// We allowed the private `CsvRelation` type to leak into the public method signature:
ProblemFilters.exclude[IncompatibleResultTypeProblem](
"com.databricks.spark.csv.DefaultSource.createRelation")
)
)
// ------------------------------------------------------------------------------------------------