Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
ad9532a
[SPARK-31612][SQL][DOCS][FOLLOW-UP] Fix a few issues in SQL ref
huaxingao May 22, 2020
5a258b0
[SPARK-30915][SS] CompactibleFileStreamLog: Avoid reading the metadat…
HeartSaVioR May 22, 2020
7ca73f0
[SPARK-29854][SQL][TESTS] Add tests to check lpad/rpad throw an excep…
maropu May 22, 2020
721cba5
[SPARK-31791][CORE][TEST] Improve cache block migration test reliability
holdenk May 23, 2020
fbb3144
[SPARK-31642] Add Pagination Support for Structured Streaming Page
iRakson May 23, 2020
64ffc66
[SPARK-31786][K8S][BUILD] Upgrade kubernetes-client to 4.9.2
dongjoon-hyun May 23, 2020
9fdc2a0
[SPARK-31793][SQL] Reduce the memory usage in file scan location meta…
gengliangwang May 23, 2020
8441e93
Revert "[SPARK-31756][WEBUI] Add real headless browser support for UI…
sarutak May 24, 2020
cf7463f
[SPARK-31761][SQL] cast integer to Long to avoid IntegerOverflow for …
sandeep-katta May 24, 2020
d0fe433
[SPARK-31768][ML] add getMetrics in Evaluators
huaxingao May 24, 2020
753636e
[SPARK-31807][INFRA] Use python 3 style in release-build.sh
williamhyun May 25, 2020
a61911c
[SPARK-31788][CORE][PYTHON] Fix UnionRDD of PairRDDs
May 25, 2020
b90e10c
[SPARK-31377][SQL][TEST] Added unit tests to 'number of output rows m…
sririshindra May 25, 2020
7f36310
[SPARK-31802][SQL] Format Java date-time types in `Row.jsonValue` dir…
MaxGekk May 25, 2020
d400777
[SPARK-31734][ML][PYSPARK] Add weight support in ClusteringEvaluator
huaxingao May 25, 2020
0df8dd6
[SPARK-30352][SQL] DataSourceV2: Add CURRENT_CATALOG function
yaooqinn May 25, 2020
92685c0
[SPARK-31755][SQL][FOLLOWUP] Update date-time, CSV and JSON benchmark…
MaxGekk May 25, 2020
695cb61
[SPARK-31771][SQL] Disable Narrow TextStyle for datetime pattern 'G/M…
yaooqinn May 25, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 0 additions & 27 deletions common/tags/src/test/java/org/apache/spark/tags/ChromeUITest.java

This file was deleted.

18 changes: 18 additions & 0 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2904,6 +2904,24 @@ private[spark] object Utils extends Logging {
props.forEach((k, v) => resultProps.put(k, v))
resultProps
}

/**
* Convert a sequence of `Path`s to a metadata string. When the length of metadata string
* exceeds `stopAppendingThreshold`, stop appending paths for saving memory.
*/
def buildLocationMetadata(paths: Seq[Path], stopAppendingThreshold: Int): String = {
val metadata = new StringBuilder("[")
var index: Int = 0
while (index < paths.length && metadata.length < stopAppendingThreshold) {
if (index > 0) {
metadata.append(", ")
}
metadata.append(paths(index).toString)
index += 1
}
metadata.append("]")
metadata.toString
}
}

private[util] object CallerContext extends Logging {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ import java.util.concurrent.Semaphore
import scala.concurrent.TimeoutException
import scala.concurrent.duration._

import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkFunSuite}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkException, SparkFunSuite,
TestUtils}
import org.apache.spark.internal.config
import org.apache.spark.scheduler.cluster.StandaloneSchedulerBackend
import org.apache.spark.util.{RpcUtils, SerializableBuffer, ThreadUtils}
Expand All @@ -48,35 +49,38 @@ class WorkerDecommissionSuite extends SparkFunSuite with LocalSparkContext {

test("verify a task with all workers decommissioned succeeds") {
val input = sc.parallelize(1 to 10)
// Do a count to wait for the executors to be registered.
input.count()
val sleepyRdd = input.mapPartitions{ x =>
Thread.sleep(50)
x
}
// Listen for the job
val sem = new Semaphore(0)
sc.addSparkListener(new SparkListener {
override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = {
sem.release()
}
})
TestUtils.waitUntilExecutorsUp(sc = sc,
numExecutors = 2,
timeout = 10000) // 10s
val sleepyRdd = input.mapPartitions{ x =>
Thread.sleep(5000) // 5s
x
}
// Start the task.
val asyncCount = sleepyRdd.countAsync()
// Wait for the job to have started
sem.acquire(1)
// Give it time to make it to the worker otherwise we'll block
Thread.sleep(2000) // 2s
// Decommission all the executors, this should not halt the current task.
// decom.sh message passing is tested manually.
val sched = sc.schedulerBackend.asInstanceOf[StandaloneSchedulerBackend]
val execs = sched.getExecutorIds()
execs.foreach(execId => sched.decommissionExecutor(execId))
val asyncCountResult = ThreadUtils.awaitResult(asyncCount, 10.seconds)
val asyncCountResult = ThreadUtils.awaitResult(asyncCount, 20.seconds)
assert(asyncCountResult === 10)
// Try and launch task after decommissioning, this should fail
val postDecommissioned = input.map(x => x)
val postDecomAsyncCount = postDecommissioned.countAsync()
val thrown = intercept[java.util.concurrent.TimeoutException]{
val result = ThreadUtils.awaitResult(postDecomAsyncCount, 10.seconds)
val result = ThreadUtils.awaitResult(postDecomAsyncCount, 20.seconds)
}
assert(postDecomAsyncCount.isCompleted === false,
"After exec decommission new task could not launch")
Expand Down

This file was deleted.

This file was deleted.

27 changes: 27 additions & 0 deletions core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -773,6 +773,33 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
}

test("SPARK-31534: text for tooltip should be escaped") {
withSpark(newSparkContext()) { sc =>
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
sc.parallelize(1 to 10).collect

val driver = webDriver.asInstanceOf[HtmlUnitDriver]
driver.setJavascriptEnabled(true)

eventually(timeout(10.seconds), interval(50.milliseconds)) {
goToUi(sc, "/jobs")
val jobDesc =
driver.findElement(By.cssSelector("div[class='application-timeline-content']"))
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

goToUi(sc, "/jobs/job/?id=0")
val stageDesc = driver.findElement(By.cssSelector("div[class='job-timeline-content']"))
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

// Open DAG Viz.
driver.findElement(By.id("job-dag-viz")).click()
val nodeDesc = driver.findElement(By.cssSelector("g[class='node_0 node']"))
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
}
}
}

def goToUi(sc: SparkContext, path: String): Unit = {
goToUi(sc.ui.get, path)
}
Expand Down
8 changes: 8 additions & 0 deletions core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1301,6 +1301,14 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
assert(Utils.trimExceptCRLF(s"b${s}b") === s"b${s}b")
}
}

test("pathsToMetadata") {
val paths = (0 to 4).map(i => new Path(s"path$i"))
assert(Utils.buildLocationMetadata(paths, 5) == "[path0]")
assert(Utils.buildLocationMetadata(paths, 10) == "[path0, path1]")
assert(Utils.buildLocationMetadata(paths, 15) == "[path0, path1, path2]")
assert(Utils.buildLocationMetadata(paths, 25) == "[path0, path1, path2, path3]")
}
}

private class SimpleExtension
Expand Down
4 changes: 2 additions & 2 deletions dev/create-release/release-build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ if [[ "$1" == "publish-snapshot" ]]; then
echo "</server></servers></settings>" >> $tmp_settings

# Generate random point for Zinc
export ZINC_PORT=$(python -S -c "import random; print random.randrange(3030,4030)")
export ZINC_PORT=$(python -S -c "import random; print(random.randrange(3030,4030))")

$MVN -DzincPort=$ZINC_PORT --settings $tmp_settings -DskipTests $SCALA_2_12_PROFILES $PUBLISH_PROFILES deploy

Expand Down Expand Up @@ -412,7 +412,7 @@ if [[ "$1" == "publish-release" ]]; then
tmp_repo=$(mktemp -d spark-repo-XXXXX)

# Generate random point for Zinc
export ZINC_PORT=$(python -S -c "import random; print random.randrange(3030,4030)")
export ZINC_PORT=$(python -S -c "import random; print(random.randrange(3030,4030))")

# TODO: revisit for Scala 2.13 support

Expand Down
7 changes: 4 additions & 3 deletions dev/deps/spark-deps-hadoop-2.7-hive-1.2
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
jackson-core/2.10.0//jackson-core-2.10.0.jar
jackson-databind/2.10.0//jackson-databind-2.10.0.jar
jackson-dataformat-yaml/2.10.0//jackson-dataformat-yaml-2.10.0.jar
jackson-datatype-jsr310/2.10.3//jackson-datatype-jsr310-2.10.3.jar
jackson-jaxrs/1.9.13//jackson-jaxrs-1.9.13.jar
jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar
jackson-module-jaxb-annotations/2.10.0//jackson-module-jaxb-annotations-2.10.0.jar
Expand Down Expand Up @@ -137,9 +138,9 @@ jsr305/3.0.0//jsr305-3.0.0.jar
jta/1.1//jta-1.1.jar
jul-to-slf4j/1.7.30//jul-to-slf4j-1.7.30.jar
kryo-shaded/4.0.2//kryo-shaded-4.0.2.jar
kubernetes-client/4.7.1//kubernetes-client-4.7.1.jar
kubernetes-model-common/4.7.1//kubernetes-model-common-4.7.1.jar
kubernetes-model/4.7.1//kubernetes-model-4.7.1.jar
kubernetes-client/4.9.2//kubernetes-client-4.9.2.jar
kubernetes-model-common/4.9.2//kubernetes-model-common-4.9.2.jar
kubernetes-model/4.9.2//kubernetes-model-4.9.2.jar
leveldbjni-all/1.8//leveldbjni-all-1.8.jar
libfb303/0.9.3//libfb303-0.9.3.jar
libthrift/0.12.0//libthrift-0.12.0.jar
Expand Down
7 changes: 4 additions & 3 deletions dev/deps/spark-deps-hadoop-2.7-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
jackson-core/2.10.0//jackson-core-2.10.0.jar
jackson-databind/2.10.0//jackson-databind-2.10.0.jar
jackson-dataformat-yaml/2.10.0//jackson-dataformat-yaml-2.10.0.jar
jackson-datatype-jsr310/2.10.3//jackson-datatype-jsr310-2.10.3.jar
jackson-jaxrs/1.9.13//jackson-jaxrs-1.9.13.jar
jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar
jackson-module-jaxb-annotations/2.10.0//jackson-module-jaxb-annotations-2.10.0.jar
Expand Down Expand Up @@ -152,9 +153,9 @@ jsr305/3.0.0//jsr305-3.0.0.jar
jta/1.1//jta-1.1.jar
jul-to-slf4j/1.7.30//jul-to-slf4j-1.7.30.jar
kryo-shaded/4.0.2//kryo-shaded-4.0.2.jar
kubernetes-client/4.7.1//kubernetes-client-4.7.1.jar
kubernetes-model-common/4.7.1//kubernetes-model-common-4.7.1.jar
kubernetes-model/4.7.1//kubernetes-model-4.7.1.jar
kubernetes-client/4.9.2//kubernetes-client-4.9.2.jar
kubernetes-model-common/4.9.2//kubernetes-model-common-4.9.2.jar
kubernetes-model/4.9.2//kubernetes-model-4.9.2.jar
leveldbjni-all/1.8//leveldbjni-all-1.8.jar
libfb303/0.9.3//libfb303-0.9.3.jar
libthrift/0.12.0//libthrift-0.12.0.jar
Expand Down
7 changes: 4 additions & 3 deletions dev/deps/spark-deps-hadoop-3.2-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
jackson-core/2.10.0//jackson-core-2.10.0.jar
jackson-databind/2.10.0//jackson-databind-2.10.0.jar
jackson-dataformat-yaml/2.10.0//jackson-dataformat-yaml-2.10.0.jar
jackson-datatype-jsr310/2.10.3//jackson-datatype-jsr310-2.10.3.jar
jackson-jaxrs-base/2.9.5//jackson-jaxrs-base-2.9.5.jar
jackson-jaxrs-json-provider/2.9.5//jackson-jaxrs-json-provider-2.9.5.jar
jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar
Expand Down Expand Up @@ -164,9 +165,9 @@ kerby-pkix/1.0.1//kerby-pkix-1.0.1.jar
kerby-util/1.0.1//kerby-util-1.0.1.jar
kerby-xdr/1.0.1//kerby-xdr-1.0.1.jar
kryo-shaded/4.0.2//kryo-shaded-4.0.2.jar
kubernetes-client/4.7.1//kubernetes-client-4.7.1.jar
kubernetes-model-common/4.7.1//kubernetes-model-common-4.7.1.jar
kubernetes-model/4.7.1//kubernetes-model-4.7.1.jar
kubernetes-client/4.9.2//kubernetes-client-4.9.2.jar
kubernetes-model-common/4.9.2//kubernetes-model-common-4.9.2.jar
kubernetes-model/4.9.2//kubernetes-model-4.9.2.jar
leveldbjni-all/1.8//leveldbjni-all-1.8.jar
libfb303/0.9.3//libfb303-0.9.3.jar
libthrift/0.12.0//libthrift-0.12.0.jar
Expand Down
Loading