diff --git a/core/src/main/scala/org/apache/spark/ui/WebUI.scala b/core/src/main/scala/org/apache/spark/ui/WebUI.scala index 81659426792c..54ae258ba565 100644 --- a/core/src/main/scala/org/apache/spark/ui/WebUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/WebUI.scala @@ -17,13 +17,15 @@ package org.apache.spark.ui -import javax.servlet.http.HttpServletRequest +import java.util.EnumSet +import javax.servlet.DispatcherType +import javax.servlet.http.{HttpServlet, HttpServletRequest} import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap import scala.xml.Node -import org.eclipse.jetty.servlet.ServletContextHandler +import org.eclipse.jetty.servlet.{FilterHolder, FilterMapping, ServletContextHandler, ServletHolder} import org.json4s.JsonAST.{JNothing, JValue} import org.apache.spark.{SecurityManager, SparkConf, SSLOptions} @@ -59,6 +61,10 @@ private[spark] abstract class WebUI( def getTabs: Seq[WebUITab] = tabs def getHandlers: Seq[ServletContextHandler] = handlers + def getDelegatingHandlers: Seq[DelegatingServletContextHandler] = { + handlers.map(new DelegatingServletContextHandler(_)) + } + /** Attaches a tab to this UI, along with all of its attached pages. */ def attachTab(tab: WebUITab): Unit = { tab.pages.foreach(attachPage) @@ -95,6 +101,14 @@ private[spark] abstract class WebUI( serverInfo.foreach(_.addHandler(handler, securityManager)) } + /** Attaches a handler to this UI. */ + def attachHandler(contextPath: String, httpServlet: HttpServlet, pathSpec: String): Unit = { + val ctx = new ServletContextHandler() + ctx.setContextPath(contextPath) + ctx.addServlet(new ServletHolder(httpServlet), pathSpec) + attachHandler(ctx) + } + /** Detaches a handler from this UI. */ def detachHandler(handler: ServletContextHandler): Unit = synchronized { handlers -= handler @@ -193,3 +207,32 @@ private[spark] abstract class WebUIPage(var prefix: String) { def render(request: HttpServletRequest): Seq[Node] def renderJson(request: HttpServletRequest): JValue = JNothing } + +private[spark] class DelegatingServletContextHandler(handler: ServletContextHandler) { + + def prependFilterMapping( + filterName: String, + spec: String, + types: EnumSet[DispatcherType]): Unit = { + val mapping = new FilterMapping() + mapping.setFilterName(filterName) + mapping.setPathSpec(spec) + mapping.setDispatcherTypes(types) + handler.getServletHandler.prependFilterMapping(mapping) + } + + def addFilter( + filterName: String, + className: String, + filterParams: Map[String, String]): Unit = { + val filterHolder = new FilterHolder() + filterHolder.setName(filterName) + filterHolder.setClassName(className) + filterParams.foreach { case (k, v) => filterHolder.setInitParameter(k, v) } + handler.getServletHandler.addFilter(filterHolder) + } + + def filterCount(): Int = { + handler.getServletHandler.getFilters.length + } +} diff --git a/resource-managers/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala b/resource-managers/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala index a8472b49ae27..dda8172fb636 100644 --- a/resource-managers/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala +++ b/resource-managers/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackend.scala @@ -180,19 +180,9 @@ private[spark] abstract class YarnSchedulerBackend( } conf.set(UI_FILTERS, allFilters) - ui.getHandlers.map(_.getServletHandler()).foreach { h => - val holder = new FilterHolder() - holder.setName(filterName) - holder.setClassName(filterName) - filterParams.foreach { case (k, v) => holder.setInitParameter(k, v) } - h.addFilter(holder) - - val mapping = new FilterMapping() - mapping.setFilterName(filterName) - mapping.setPathSpec("/*") - mapping.setDispatcherTypes(EnumSet.allOf(classOf[DispatcherType])) - - h.prependFilterMapping(mapping) + ui.getDelegatingHandlers.foreach { h => + h.addFilter(filterName, filterName, filterParams) + h.prependFilterMapping(filterName, "/*", EnumSet.allOf(classOf[DispatcherType])) } } } diff --git a/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala b/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala index 583694412322..70f86aaa72f6 100644 --- a/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala +++ b/resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala @@ -101,9 +101,9 @@ class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with Loc yarnSchedulerBackend.addWebUIFilter(classOf[TestFilter2].getName(), Map("responseCode" -> HttpServletResponse.SC_NOT_ACCEPTABLE.toString), "") - sc.ui.get.getHandlers.foreach { h => + sc.ui.get.getDelegatingHandlers.foreach { h => // Two filters above + security filter. - assert(h.getServletHandler().getFilters().length === 3) + assert(h.filterCount() === 3) } // The filter should have been added first in the chain, so we should get SC_NOT_ACCEPTABLE @@ -117,11 +117,7 @@ class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with Loc } } - val ctx = new ServletContextHandler() - ctx.setContextPath("/new-handler") - ctx.addServlet(new ServletHolder(servlet), "/") - - sc.ui.get.attachHandler(ctx) + sc.ui.get.attachHandler("/new-handler", servlet, "/") val newUrl = new URL(sc.uiWebUrl.get + "/new-handler/") assert(TestUtils.httpResponseCode(newUrl) === HttpServletResponse.SC_NOT_ACCEPTABLE)