Skip to content

Commit 899e1d7

Browse files
committed
Merge remote-tracking branch 'origin/master' into SPARK-7375
Conflicts: sql/core/src/main/scala/org/apache/spark/sql/execution/Exchange.scala
2 parents 6a6bfce + c796be7 commit 899e1d7

File tree

245 files changed

+12347
-2179
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

245 files changed

+12347
-2179
lines changed

.rat-excludes

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,5 +74,12 @@ logs
7474
.*scalastyle-output.xml
7575
.*dependency-reduced-pom.xml
7676
known_translations
77+
json_expectation
78+
local-1422981759269/*
79+
local-1422981780767/*
80+
local-1425081759269/*
81+
local-1426533911241/*
82+
local-1426633911242/*
83+
local-1430917381534/*
7784
DESCRIPTION
7885
NAMESPACE

R/pkg/NAMESPACE

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,6 @@ exportMethods("cache",
4545
"showDF",
4646
"sortDF",
4747
"take",
48-
"toJSON",
49-
"toRDD",
5048
"unionAll",
5149
"unpersist",
5250
"where",
@@ -95,14 +93,12 @@ export("cacheTable",
9593
"createExternalTable",
9694
"dropTempTable",
9795
"jsonFile",
98-
"jsonRDD",
9996
"loadDF",
10097
"parquetFile",
10198
"sql",
10299
"table",
103100
"tableNames",
104101
"tables",
105-
"toDF",
106102
"uncacheTable")
107103

108104
export("sparkRSQL.init",

R/pkg/R/DataFrame.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ setMethod("names",
272272
setMethod("registerTempTable",
273273
signature(x = "DataFrame", tableName = "character"),
274274
function(x, tableName) {
275-
callJMethod(x@sdf, "registerTempTable", tableName)
275+
invisible(callJMethod(x@sdf, "registerTempTable", tableName))
276276
})
277277

278278
#' insertInto

R/pkg/inst/profile/shell.R

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,13 @@
2020
.libPaths(c(file.path(home, "R", "lib"), .libPaths()))
2121
Sys.setenv(NOAWT=1)
2222

23-
library(utils)
24-
library(SparkR)
25-
sc <- sparkR.init(Sys.getenv("MASTER", unset = ""))
23+
# Make sure SparkR package is the last loaded one
24+
old <- getOption("defaultPackages")
25+
options(defaultPackages = c(old, "SparkR"))
26+
27+
sc <- SparkR::sparkR.init(Sys.getenv("MASTER", unset = ""))
2628
assign("sc", sc, envir=.GlobalEnv)
27-
sqlCtx <- sparkRSQL.init(sc)
29+
sqlCtx <- SparkR::sparkRSQL.init(sc)
2830
assign("sqlCtx", sqlCtx, envir=.GlobalEnv)
2931
cat("\n Welcome to SparkR!")
3032
cat("\n Spark context is available as sc, SQL context is available as sqlCtx\n")

assembly/pom.xml

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,27 @@
9292
<skip>true</skip>
9393
</configuration>
9494
</plugin>
95+
<!-- zip pyspark archives to run python application on yarn mode -->
96+
<plugin>
97+
<groupId>org.apache.maven.plugins</groupId>
98+
<artifactId>maven-antrun-plugin</artifactId>
99+
<executions>
100+
<execution>
101+
<phase>package</phase>
102+
<goals>
103+
<goal>run</goal>
104+
</goals>
105+
</execution>
106+
</executions>
107+
<configuration>
108+
<target>
109+
<delete dir="${basedir}/../python/lib/pyspark.zip"/>
110+
<zip destfile="${basedir}/../python/lib/pyspark.zip">
111+
<fileset dir="${basedir}/../python/" includes="pyspark/**/*"/>
112+
</zip>
113+
</target>
114+
</configuration>
115+
</plugin>
95116
<!-- Use the shade plugin to create a big JAR with all the dependencies -->
96117
<plugin>
97118
<groupId>org.apache.maven.plugins</groupId>

core/pom.xml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -228,6 +228,14 @@
228228
<artifactId>json4s-jackson_${scala.binary.version}</artifactId>
229229
<version>3.2.10</version>
230230
</dependency>
231+
<dependency>
232+
<groupId>com.sun.jersey</groupId>
233+
<artifactId>jersey-server</artifactId>
234+
</dependency>
235+
<dependency>
236+
<groupId>com.sun.jersey</groupId>
237+
<artifactId>jersey-core</artifactId>
238+
</dependency>
231239
<dependency>
232240
<groupId>org.apache.mesos</groupId>
233241
<artifactId>mesos</artifactId>

core/src/main/java/org/apache/spark/JobExecutionStatus.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,15 @@
1717

1818
package org.apache.spark;
1919

20+
import org.apache.spark.util.EnumUtil;
21+
2022
public enum JobExecutionStatus {
2123
RUNNING,
2224
SUCCEEDED,
2325
FAILED,
24-
UNKNOWN
26+
UNKNOWN;
27+
28+
public static JobExecutionStatus fromString(String str) {
29+
return EnumUtil.parseIgnoreCase(JobExecutionStatus.class, str);
30+
}
2531
}
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.status.api.v1;
19+
20+
import org.apache.spark.util.EnumUtil;
21+
22+
public enum ApplicationStatus {
23+
COMPLETED,
24+
RUNNING;
25+
26+
public static ApplicationStatus fromString(String str) {
27+
return EnumUtil.parseIgnoreCase(ApplicationStatus.class, str);
28+
}
29+
30+
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.status.api.v1;
19+
20+
import org.apache.spark.util.EnumUtil;
21+
22+
public enum StageStatus {
23+
ACTIVE,
24+
COMPLETE,
25+
FAILED,
26+
PENDING;
27+
28+
public static StageStatus fromString(String str) {
29+
return EnumUtil.parseIgnoreCase(StageStatus.class, str);
30+
}
31+
}
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.status.api.v1;
19+
20+
import org.apache.spark.util.EnumUtil;
21+
22+
import java.util.HashSet;
23+
import java.util.Set;
24+
25+
public enum TaskSorting {
26+
ID,
27+
INCREASING_RUNTIME("runtime"),
28+
DECREASING_RUNTIME("-runtime");
29+
30+
private final Set<String> alternateNames;
31+
private TaskSorting(String... names) {
32+
alternateNames = new HashSet<String>();
33+
for (String n: names) {
34+
alternateNames.add(n);
35+
}
36+
}
37+
38+
public static TaskSorting fromString(String str) {
39+
String lower = str.toLowerCase();
40+
for (TaskSorting t: values()) {
41+
if (t.alternateNames.contains(lower)) {
42+
return t;
43+
}
44+
}
45+
return EnumUtil.parseIgnoreCase(TaskSorting.class, str);
46+
}
47+
48+
}

0 commit comments

Comments
 (0)