Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 0 additions & 27 deletions common/tags/src/test/java/org/apache/spark/tags/ChromeUITest.java

This file was deleted.

This file was deleted.

This file was deleted.

27 changes: 27 additions & 0 deletions core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -773,6 +773,33 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers with B
}
}

test("SPARK-31534: text for tooltip should be escaped") {
withSpark(newSparkContext()) { sc =>
sc.setLocalProperty(CallSite.LONG_FORM, "collect at <console>:25")
sc.setLocalProperty(CallSite.SHORT_FORM, "collect at <console>:25")
sc.parallelize(1 to 10).collect

val driver = webDriver.asInstanceOf[HtmlUnitDriver]
driver.setJavascriptEnabled(true)

eventually(timeout(10.seconds), interval(50.milliseconds)) {
goToUi(sc, "/jobs")
val jobDesc =
driver.findElement(By.cssSelector("div[class='application-timeline-content']"))
jobDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

goToUi(sc, "/jobs/job/?id=0")
val stageDesc = driver.findElement(By.cssSelector("div[class='job-timeline-content']"))
stageDesc.getAttribute("data-title") should include ("collect at &lt;console&gt;:25")

// Open DAG Viz.
driver.findElement(By.id("job-dag-viz")).click()
val nodeDesc = driver.findElement(By.cssSelector("g[class='node_0 node']"))
nodeDesc.getAttribute("name") should include ("collect at &lt;console&gt;:25")
}
}
}

def goToUi(sc: SparkContext, path: String): Unit = {
goToUi(sc.ui.get, path)
}
Expand Down
5 changes: 0 additions & 5 deletions dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@
from sparktestsupport.toposort import toposort_flatten
import sparktestsupport.modules as modules

always_excluded_tags = [
"org.apache.spark.tags.ChromeUITest"
]

# -------------------------------------------------------------------------------------------------
# Functions for traversing module dependency graph
Expand Down Expand Up @@ -609,8 +606,6 @@ def main():
print("[info] Found the following changed modules:",
", ".join(x.name for x in changed_modules))

excluded_tags.extend(always_excluded_tags)

# setup environment variables
# note - the 'root' module doesn't collect environment variables for all modules. Because the
# environment variables should not be set if a module is not changed, even if running the 'root'
Expand Down
2 changes: 0 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,6 @@
things breaking.
-->
<spark.test.home>${session.executionRootDirectory}</spark.test.home>
<spark.test.webdriver.chrome.driver></spark.test.webdriver.chrome.driver>

<CodeCacheSize>1g</CodeCacheSize>
</properties>
Expand Down Expand Up @@ -2513,7 +2512,6 @@
<spark.ui.enabled>false</spark.ui.enabled>
<spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
<spark.unsafe.exceptionOnMemoryLeak>true</spark.unsafe.exceptionOnMemoryLeak>
<spark.test.webdriver.chrome.driver>${spark.test.webdriver.chrome.driver}</spark.test.webdriver.chrome.driver>
<!-- Needed by sql/hive tests. -->
<test.src.tables>__not_used__</test.src.tables>
</systemProperties>
Expand Down