Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/connectors_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.13.8, 2.12.17]
scala: [2.13.13, 2.12.18]
steps:
- uses: actions/checkout@v2
- name: install java
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/kernel_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ jobs:
test:
runs-on: ubuntu-20.04
env:
SCALA_VERSION: 2.12.17
SCALA_VERSION: 2.12.18
steps:
- uses: actions/checkout@v3
- name: install java
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/spark_master_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jobs:
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.13.8]
scala: [2.13.13]
env:
SCALA_VERSION: ${{ matrix.scala }}
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/spark_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jobs:
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.12.17, 2.13.8]
scala: [2.12.18, 2.13.13]
env:
SCALA_VERSION: ${{ matrix.scala }}
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unidoc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.13.8, 2.12.17]
scala: [2.13.13, 2.12.18]
steps:
- name: install java
uses: actions/setup-java@v3
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/

name := "benchmarks"
scalaVersion := "2.12.17"
scalaVersion := "2.12.18"

lazy val root = (project in file("."))
.settings(
Expand Down
14 changes: 7 additions & 7 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ import Mima._
import Unidoc._

// Scala versions
val scala212 = "2.12.17"
val scala213 = "2.13.8"
val scala212 = "2.12.18"
val scala213 = "2.13.13"
val all_scala_versions = Seq(scala212, scala213)

// Due to how publishArtifact is determined for javaOnlyReleaseSettings, incl. storage
// It was necessary to change default_scala_version to scala213 in build.sbt
// to build the project with Scala 2.13 only
// As a setting, it's possible to set it on command line easily
// sbt 'set default_scala_version := 2.13.8' [commands]
// sbt 'set default_scala_version := 2.13.13' [commands]
// FIXME Why not use scalaVersion?
val default_scala_version = settingKey[String]("Default Scala version")
Global / default_scala_version := scala212
Expand Down Expand Up @@ -200,7 +200,7 @@ lazy val spark = (project in file("spark"))
// Test deps
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
"org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
"junit" % "junit" % "4.12" % "test",
"junit" % "junit" % "4.13.2" % "test",
"com.novocode" % "junit-interface" % "0.11" % "test",
"org.apache.spark" %% "spark-catalyst" % sparkVersion.value % "test" classifier "tests",
"org.apache.spark" %% "spark-core" % sparkVersion.value % "test" classifier "tests",
Expand Down Expand Up @@ -315,7 +315,7 @@ lazy val sharing = (project in file("sharing"))
// Test deps
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
"org.scalatestplus" %% "scalacheck-1-15" % "3.2.9.0" % "test",
"junit" % "junit" % "4.12" % "test",
"junit" % "junit" % "4.13.2" % "test",
"com.novocode" % "junit-interface" % "0.11" % "test",
"org.apache.spark" %% "spark-catalyst" % defaultSparkVersion % "test" classifier "tests",
"org.apache.spark" %% "spark-core" % defaultSparkVersion % "test" classifier "tests",
Expand All @@ -337,7 +337,7 @@ lazy val kernelApi = (project in file("kernel/kernel-api"))

"com.fasterxml.jackson.core" % "jackson-databind" % "2.13.5" % "test",
"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
"junit" % "junit" % "4.13" % "test",
"junit" % "junit" % "4.13.2" % "test",
"com.novocode" % "junit-interface" % "0.11" % "test",
"org.slf4j" % "slf4j-log4j12" % "1.7.36" % "test"
),
Expand All @@ -364,7 +364,7 @@ lazy val kernelDefaults = (project in file("kernel/kernel-defaults"))
"org.apache.parquet" % "parquet-hadoop" % "1.12.3",

"org.scalatest" %% "scalatest" % scalaTestVersion % "test",
"junit" % "junit" % "4.13" % "test",
"junit" % "junit" % "4.13.2" % "test",
"commons-io" % "commons-io" % "2.8.0" % "test",
"com.novocode" % "junit-interface" % "0.11" % "test",
"org.slf4j" % "slf4j-log4j12" % "1.7.36" % "test",
Expand Down
2 changes: 1 addition & 1 deletion connectors/.github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
matrix:
scala: [2.13.8, 2.12.17, 2.11.12]
scala: [2.13.13, 2.12.18, 2.11.12]
steps:
- uses: actions/checkout@v2
- name: install java
Expand Down
10 changes: 5 additions & 5 deletions connectors/examples/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ name := "examples"
organization := "com.examples"
organizationName := "examples"

scalaVersion := "2.12.17"
scalaVersion := "2.12.18"
version := "0.1.0"

lazy val commonSettings = Seq(
crossScalaVersions := Seq("2.13.8", "2.12.17", "2.11.12"),
crossScalaVersions := Seq("2.13.13", "2.12.18", "2.11.12"),
resolvers += Resolver.mavenLocal,
libraryDependencies ++= Seq(
"io.delta" %% "delta-standalone" % getStandaloneVersion(),
Expand All @@ -47,14 +47,14 @@ lazy val extraMavenRepo = sys.env.get("EXTRA_MAVEN_REPO").toSeq.map { repo =>

lazy val convertToDelta = (project in file("convert-to-delta")) settings (
name := "convert",
scalaVersion := "2.12.17",
scalaVersion := "2.12.18",
commonSettings,
extraMavenRepo
)

lazy val helloWorld = (project in file("hello-world")) settings (
name := "hello",
scalaVersion := "2.12.17",
scalaVersion := "2.12.18",
commonSettings,
extraMavenRepo
)
Expand All @@ -63,7 +63,7 @@ val flinkVersion = "1.16.1"
val flinkHadoopVersion = "3.1.0"
lazy val flinkExample = (project in file("flink-example")) settings (
name := "flink",
scalaVersion := "2.12.17",
scalaVersion := "2.12.18",
commonSettings,
extraMavenRepo,
resolvers += Resolver.mavenLocal,
Expand Down
8 changes: 4 additions & 4 deletions connectors/examples/run_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,8 @@ def __exit__(self, tpe, value, traceback):
by running the following commands in the root connectors folder.

build/sbt '++2.11.12 publishM2'
build/sbt '++2.12.17 publishM2'
build/sbt '++2.13.8 publishM2'
build/sbt '++2.12.18 publishM2'
build/sbt '++2.13.13 publishM2'
"""

# get the version of the package
Expand Down Expand Up @@ -146,5 +146,5 @@ def __exit__(self, tpe, value, traceback):
run_maven_proj(path.join(root_dir, dir), className, args.version, args.maven_repo, "2.13")

run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.11.12")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.17")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.13.8")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.18")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.13.13")
4 changes: 2 additions & 2 deletions examples/scala/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ name := "example"
organization := "com.example"
organizationName := "example"

val scala212 = "2.12.17"
val scala213 = "2.13.8"
val scala212 = "2.12.18"
val scala213 = "2.13.13"
val deltaVersion = "3.0.0"
val icebergVersion = "1.4.1"

Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,4 @@
# limitations under the License.
#

sbt.version=1.5.5
sbt.version=1.9.9
4 changes: 2 additions & 2 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@ addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")

addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.0")

addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.9.2")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.1.3")

addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3")

addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.15")

addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.6")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.11")
//Upgrade sbt-scoverage to 2.0.3+ because 2.0.0 is not compatible to Scala 2.12.17:
//sbt.librarymanagement.ResolveException: Error downloading org.scoverage:scalac-scoverage-plugin_2.12.17:2.0.0

Expand Down
2 changes: 1 addition & 1 deletion run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def run_sbt_tests(root_dir, test_group, coverage, scala_version=None):
cmd += ["+ %s" % test_cmd] # build/sbt ... "+ project/test" ...
else:
# when no scala version is specified, run test with only the specified scala version
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.8" "project/test" ...
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.13" "project/test" ...

if coverage:
cmd += ["coverageAggregate", "coverageOff"]
Expand Down