Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ private[rest] class CreateSubmissionRequest extends SubmitRestProtocolRequest {
super.doValidate()
assert(sparkProperties != null, "No Spark properties set!")
assertFieldIsSet(appResource, "appResource")
assertFieldIsSet(appArgs, "appArgs")
assertFieldIsSet(environmentVariables, "environmentVariables")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What if there are no args or environment variables for a particular job? Is the caller expected to pass in an empty array?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually If the caller wouldn't set "appArgs" or "environmentVariables" was causing a null pointer and leaving the Dispatcher inactive. So now I think the caller should pass an empty array, I could add a test for that case @susanxhuynh .

assertPropertyIsSet("spark.app.name")
assertPropertyIsBoolean("spark.driver.supervise")
assertPropertyIsNumeric("spark.driver.cores")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ class SubmitRestProtocolSuite extends SparkFunSuite {
message.clientSparkVersion = "1.2.3"
message.appResource = "honey-walnut-cherry.jar"
message.mainClass = "org.apache.spark.examples.SparkPie"
message.appArgs = Array("two slices")
message.environmentVariables = Map("PATH" -> "/dev/null")
val conf = new SparkConf(false)
conf.set("spark.app.name", "SparkPie")
message.sparkProperties = conf.getAll.toMap
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,16 @@ private[mesos] class MesosSubmitRequestServlet(
private def buildDriverDescription(request: CreateSubmissionRequest): MesosDriverDescription = {
// Required fields, including the main class because python is not yet supported
val appResource = Option(request.appResource).getOrElse {
throw new SubmitRestMissingFieldException("Application jar is missing.")
throw new SubmitRestMissingFieldException("Application jar 'appResource' is missing.")
}
val mainClass = Option(request.mainClass).getOrElse {
throw new SubmitRestMissingFieldException("Main class is missing.")
throw new SubmitRestMissingFieldException("Main class 'mainClass' is missing.")
}
val appArgs = Option(request.appArgs).getOrElse {
throw new SubmitRestMissingFieldException("Application arguments 'appArgs' are missing.")
}
val environmentVariables = Option(request.environmentVariables).getOrElse {
throw new SubmitRestMissingFieldException("Environment variables 'environmentVariables' are missing.")
Copy link
Member

@felixcheung felixcheung Dec 5, 2017

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so this is API changes - new required arguments for the request.
are folks ok with this?

(edit: it looks like previously-missing validation for a required arguments)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, the arguments were assumed to be there, but validation was missing.

}

// Optional fields
Expand All @@ -91,8 +97,6 @@ private[mesos] class MesosSubmitRequestServlet(
val superviseDriver = sparkProperties.get("spark.driver.supervise")
val driverMemory = sparkProperties.get("spark.driver.memory")
val driverCores = sparkProperties.get("spark.driver.cores")
val appArgs = request.appArgs
val environmentVariables = request.environmentVariables
val name = request.sparkProperties.getOrElse("spark.app.name", mainClass)

// Construct driver description
Expand Down