Skip to content

Commit 60c6643

Browse files
committed
Rerun plan stability suite
2 parents 1b0553e + 077cf2a commit 60c6643

File tree

926 files changed

+67893
-63425
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

926 files changed

+67893
-63425
lines changed

.github/workflows/build_and_test.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ jobs:
126126
- name: Install Python packages (Python 3.8)
127127
if: (contains(matrix.modules, 'sql') && !contains(matrix.modules, 'sql-'))
128128
run: |
129-
python3.8 -m pip install numpy 'pyarrow<3.0.0' pandas scipy xmlrunner
129+
python3.8 -m pip install numpy 'pyarrow<5.0.0' pandas scipy xmlrunner
130130
python3.8 -m pip list
131131
# Run the tests.
132132
- name: Run tests
@@ -217,7 +217,7 @@ jobs:
217217
# Ubuntu 20.04. See also SPARK-33162.
218218
- name: Install Python packages (Python 3.6)
219219
run: |
220-
python3.6 -m pip install numpy 'pyarrow<3.0.0' pandas scipy xmlrunner 'plotly>=4.8'
220+
python3.6 -m pip install numpy 'pyarrow<4.0.0' pandas scipy xmlrunner 'plotly>=4.8'
221221
python3.6 -m pip list
222222
- name: List Python packages (Python 3.9)
223223
run: |
@@ -388,7 +388,7 @@ jobs:
388388
# Jinja2 3.0.0+ causes error when building with Sphinx.
389389
# See also https://issues.apache.org/jira/browse/SPARK-35375.
390390
python3.6 -m pip install 'sphinx<3.1.0' mkdocs numpy pydata_sphinx_theme ipython nbsphinx numpydoc 'jinja2<3.0.0'
391-
python3.6 -m pip install sphinx_plotly_directive 'pyarrow<3.0.0' pandas 'plotly>=4.8'
391+
python3.6 -m pip install sphinx_plotly_directive 'pyarrow<5.0.0' pandas 'plotly>=4.8'
392392
apt-get update -y
393393
apt-get install -y ruby ruby-dev
394394
Rscript -e "install.packages(c('devtools', 'testthat', 'knitr', 'rmarkdown', 'roxygen2'), repos='https://cloud.r-project.org/')"
@@ -455,7 +455,7 @@ jobs:
455455
java-version: 11
456456
- name: Build with Maven
457457
run: |
458-
export MAVEN_OPTS="-Xss16m -Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
458+
export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
459459
export MAVEN_CLI_OPTS="--no-transfer-progress"
460460
# It uses Maven's 'install' intentionally, see https://github.com/apache/spark/pull/26414.
461461
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=11 install

.gitignore

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,10 @@
1515
.ensime_cache/
1616
.ensime_lucene
1717
.generated-mima*
18-
# The star is required for further !.idea/ to work, see https://git-scm.com/docs/gitignore
19-
.idea/*
18+
# All the files under .idea/ are ignore. To add new files under ./idea that are not in the VCS yet, please use `git add -f`
19+
.idea/
20+
# SPARK-35223: Add IssueNavigationLink to make IDEA support hyperlink on JIRA Ticket and GitHub PR on Git plugin.
2021
!.idea/vcs.xml
21-
python/.idea
2222
.idea_modules/
2323
.project
2424
.pydevproject

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ and Structured Streaming for stream processing.
99

1010
<https://spark.apache.org/>
1111

12+
[![GitHub Action Build](https://github.com/apache/spark/actions/workflows/build_and_test.yml/badge.svg?branch=master)](https://github.com/apache/spark/actions/workflows/build_and_test.yml?query=branch%3Amaster)
1213
[![Jenkins Build](https://amplab.cs.berkeley.edu/jenkins/job/spark-master-test-sbt-hadoop-3.2/badge/icon)](https://amplab.cs.berkeley.edu/jenkins/job/spark-master-test-sbt-hadoop-3.2)
1314
[![AppVeyor Build](https://img.shields.io/appveyor/ci/ApacheSoftwareFoundation/spark/master.svg?style=plastic&logo=appveyor)](https://ci.appveyor.com/project/ApacheSoftwareFoundation/spark)
1415
[![PySpark Coverage](https://img.shields.io/badge/dynamic/xml.svg?label=pyspark%20coverage&url=https%3A%2F%2Fspark-test.github.io%2Fpyspark-coverage-site&query=%2Fhtml%2Fbody%2Fdiv%5B1%5D%2Fdiv%2Fh1%2Fspan&colorB=brightgreen&style=plastic)](https://spark-test.github.io/pyspark-coverage-site)

build/sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ realpath () {
5353
declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy"
5454
declare -r sbt_opts_file=".sbtopts"
5555
declare -r etc_sbt_opts_file="/etc/sbt/sbtopts"
56-
declare -r default_sbt_opts="-Xss16m"
56+
declare -r default_sbt_opts="-Xss64m"
5757

5858
usage() {
5959
cat <<EOM

common/kvstore/src/test/java/org/apache/spark/util/kvstore/DBIteratorSuite.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -380,7 +380,7 @@ public void childIndexDescendingWithLast() throws Exception {
380380

381381
@Test
382382
public void testRefWithIntNaturalKey() throws Exception {
383-
LevelDBSuite.IntKeyType i = new LevelDBSuite.IntKeyType();
383+
IntKeyType i = new IntKeyType();
384384
i.key = 1;
385385
i.id = "1";
386386
i.values = Arrays.asList("1");
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.util.kvstore;
19+
20+
import java.util.List;
21+
22+
public class IntKeyType {
23+
24+
@KVIndex
25+
public int key;
26+
27+
@KVIndex("id")
28+
public String id;
29+
30+
public List<String> values;
31+
32+
@Override
33+
public boolean equals(Object o) {
34+
if (o instanceof IntKeyType) {
35+
IntKeyType other = (IntKeyType) o;
36+
return key == other.key && id.equals(other.id) && values.equals(other.values);
37+
}
38+
return false;
39+
}
40+
41+
@Override
42+
public int hashCode() {
43+
return id.hashCode();
44+
}
45+
46+
}
47+

common/kvstore/src/test/java/org/apache/spark/util/kvstore/LevelDBSuite.java

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -338,31 +338,4 @@ private int countKeys(Class<?> type) throws Exception {
338338

339339
return count;
340340
}
341-
342-
public static class IntKeyType {
343-
344-
@KVIndex
345-
public int key;
346-
347-
@KVIndex("id")
348-
public String id;
349-
350-
public List<String> values;
351-
352-
@Override
353-
public boolean equals(Object o) {
354-
if (o instanceof IntKeyType) {
355-
IntKeyType other = (IntKeyType) o;
356-
return key == other.key && id.equals(other.id) && values.equals(other.values);
357-
}
358-
return false;
359-
}
360-
361-
@Override
362-
public int hashCode() {
363-
return id.hashCode();
364-
}
365-
366-
}
367-
368341
}
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.network.client;
19+
20+
/**
21+
* A basic callback. This is extended by {@link RpcResponseCallback} and
22+
* {@link MergedBlockMetaResponseCallback} so that both RpcRequests and MergedBlockMetaRequests
23+
* can be handled in {@link TransportResponseHandler} a similar way.
24+
*
25+
* @since 3.2.0
26+
*/
27+
public interface BaseResponseCallback {
28+
29+
/** Exception either propagated from server or raised on client side. */
30+
void onFailure(Throwable e);
31+
}
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.network.client;
19+
20+
import org.apache.spark.network.buffer.ManagedBuffer;
21+
22+
/**
23+
* Callback for the result of a single
24+
* {@link org.apache.spark.network.protocol.MergedBlockMetaRequest}.
25+
*
26+
* @since 3.2.0
27+
*/
28+
public interface MergedBlockMetaResponseCallback extends BaseResponseCallback {
29+
/**
30+
* Called upon receipt of a particular merged block meta.
31+
*
32+
* The given buffer will initially have a refcount of 1, but will be release()'d as soon as this
33+
* call returns. You must therefore either retain() the buffer or copy its contents before
34+
* returning.
35+
*
36+
* @param numChunks number of merged chunks in the merged block
37+
* @param buffer the buffer contains an array of roaring bitmaps. The i-th roaring bitmap
38+
* contains the mapIds that were merged to the i-th merged chunk.
39+
*/
40+
void onSuccess(int numChunks, ManagedBuffer buffer);
41+
}

common/network-common/src/main/java/org/apache/spark/network/client/RpcResponseCallback.java

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,12 @@
2323
* Callback for the result of a single RPC. This will be invoked once with either success or
2424
* failure.
2525
*/
26-
public interface RpcResponseCallback {
26+
public interface RpcResponseCallback extends BaseResponseCallback {
2727
/**
2828
* Successful serialized result from server.
2929
*
3030
* After `onSuccess` returns, `response` will be recycled and its content will become invalid.
3131
* Please copy the content of `response` if you want to use it after `onSuccess` returns.
3232
*/
3333
void onSuccess(ByteBuffer response);
34-
35-
/** Exception either propagated from server or raised on client side. */
36-
void onFailure(Throwable e);
3734
}

0 commit comments

Comments
 (0)