Skip to content

Commit d364ac4

Browse files
Merge branch 'apache:trunk' into trunk
2 parents 0fba78f + 55a5769 commit d364ac4

File tree

233 files changed

+13741
-992
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

233 files changed

+13741
-992
lines changed

LICENSE-binary

+4-4
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,8 @@ com.fasterxml.woodstox:woodstox-core:5.4.0
229229
com.github.ben-manes.caffeine:caffeine:2.9.3
230230
com.github.davidmoten:rxjava-extras:0.8.0.17
231231
com.github.stephenc.jcip:jcip-annotations:1.0-1
232-
com.google:guice:4.0
233-
com.google:guice-servlet:4.0
232+
com.google:guice:5.1.0
233+
com.google:guice-servlet:5.1.0
234234
com.google.api.grpc:proto-google-common-protos:1.0.0
235235
com.google.code.gson:2.9.0
236236
com.google.errorprone:error_prone_annotations:2.2.0
@@ -362,7 +362,7 @@ org.objenesis:objenesis:2.6
362362
org.xerial.snappy:snappy-java:1.1.10.4
363363
org.yaml:snakeyaml:2.0
364364
org.wildfly.openssl:wildfly-openssl:1.1.3.Final
365-
software.amazon.awssdk:bundle:jar:2.24.6
365+
software.amazon.awssdk:bundle:jar:2.25.53
366366

367367

368368
--------------------------------------------------------------------------------
@@ -379,7 +379,7 @@ hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/util/tree
379379
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/compat/{fstatat|openat|unlinkat}.h
380380

381381
com.github.luben:zstd-jni:1.5.2-1
382-
dnsjava:dnsjava:2.1.7
382+
dnsjava:dnsjava:3.6.1
383383
org.codehaus.woodstox:stax2-api:4.2.1
384384

385385

dev-support/bin/create-release

+2-2
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ function set_defaults
205205
DOCKERRAN=false
206206

207207
CPU_ARCH=$(echo "$MACHTYPE" | cut -d- -f1)
208-
if [ "$CPU_ARCH" = "aarch64" ]; then
208+
if [[ "$CPU_ARCH" = "aarch64" || "$CPU_ARCH" = "arm64" ]]; then
209209
DOCKERFILE="${BASEDIR}/dev-support/docker/Dockerfile_aarch64"
210210
fi
211211

@@ -514,7 +514,7 @@ function dockermode
514514

515515
# we always force build with the OpenJDK JDK
516516
# but with the correct version
517-
if [ "$CPU_ARCH" = "aarch64" ]; then
517+
if [[ "$CPU_ARCH" = "aarch64" || "$CPU_ARCH" = "arm64" ]]; then
518518
echo "ENV JAVA_HOME /usr/lib/jvm/java-${JVM_VERSION}-openjdk-arm64"
519519
else
520520
echo "ENV JAVA_HOME /usr/lib/jvm/java-${JVM_VERSION}-openjdk-amd64"

dev-support/docker/Dockerfile_centos_7

+14-2
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,13 @@ COPY pkg-resolver pkg-resolver
3030
RUN chmod a+x pkg-resolver/*.sh pkg-resolver/*.py \
3131
&& chmod a+r pkg-resolver/*.json
3232

33+
######
34+
# Centos 7 has reached its EOL and the packages
35+
# are no longer available on mirror.centos.org site.
36+
# Please see https://www.centos.org/centos-linux-eol/
37+
######
38+
RUN pkg-resolver/set-vault-as-baseurl-centos.sh centos:7
39+
3340
######
3441
# Install packages from yum
3542
######
@@ -38,8 +45,13 @@ RUN yum update -y \
3845
&& yum groupinstall -y "Development Tools" \
3946
&& yum install -y \
4047
centos-release-scl \
41-
python3 \
42-
&& yum install -y $(pkg-resolver/resolve.py centos:7)
48+
python3
49+
50+
# Apply the script again because centos-release-scl creates new YUM repo files
51+
RUN pkg-resolver/set-vault-as-baseurl-centos.sh centos:7
52+
53+
# hadolint ignore=DL3008,SC2046
54+
RUN yum install -y $(pkg-resolver/resolve.py centos:7)
4355

4456
# Set GCC 9 as the default C/C++ compiler
4557
RUN echo "source /opt/rh/devtoolset-9/enable" >> /etc/bashrc

dev-support/docker/pkg-resolver/install-yasm.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ fi
4040

4141
if [ "$version_to_install" == "1.2.0-4" ]; then
4242
mkdir -p /tmp/yasm &&
43-
curl -L -s -S https://download-ib01.fedoraproject.org/pub/epel/7/x86_64/Packages/y/yasm-1.2.0-4.el7.x86_64.rpm \
43+
curl -L -s -S https://archives.fedoraproject.org/pub/archive/epel/7/x86_64/Packages/y/yasm-1.2.0-4.el7.x86_64.rpm \
4444
-o /tmp/yasm-1.2.0-4.el7.x86_64.rpm &&
4545
rpm -Uvh /tmp/yasm-1.2.0-4.el7.x86_64.rpm
4646
else

dev-support/docker/pkg-resolver/set-vault-as-baseurl-centos.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ fi
2424
if [ "$1" == "centos:7" ] || [ "$1" == "centos:8" ]; then
2525
cd /etc/yum.repos.d/ || exit &&
2626
sed -i 's/mirrorlist/#mirrorlist/g' /etc/yum.repos.d/CentOS-* &&
27-
sed -i 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* &&
27+
sed -i 's|# *baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' /etc/yum.repos.d/CentOS-* &&
2828
yum update -y &&
2929
cd /root || exit
3030
else

hadoop-client-modules/hadoop-client-check-invariants/src/test/resources/ensure-jars-have-correct-contents.sh

+2
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ allowed_expr+="|^[^-]*-default.xml$"
5151
allowed_expr+="|^[^-]*-version-info.properties$"
5252
# * Hadoop's application classloader properties file.
5353
allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
54+
# Comes from dnsjava, not sure if relocatable.
55+
allowed_expr+="|^messages.properties$"
5456
# public suffix list used by httpcomponents
5557
allowed_expr+="|^mozilla/$"
5658
allowed_expr+="|^mozilla/public-suffix-list.txt$"

hadoop-client-modules/hadoop-client-runtime/pom.xml

+3
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,8 @@
229229
<exclude>jnamed*</exclude>
230230
<exclude>lookup*</exclude>
231231
<exclude>update*</exclude>
232+
<exclude>META-INF/versions/21/*</exclude>
233+
<exclude>META-INF/versions/21/**/*</exclude>
232234
</excludes>
233235
</filter>
234236
<filter>
@@ -243,6 +245,7 @@
243245
<excludes>
244246
<exclude>META-INF/versions/9/module-info.class</exclude>
245247
<exclude>META-INF/versions/11/module-info.class</exclude>
248+
<exclude>META-INF/versions/21/module-info.class</exclude>
246249
</excludes>
247250
</filter>
248251

hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml

+6
Original file line numberDiff line numberDiff line change
@@ -454,4 +454,10 @@
454454
<Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
455455
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
456456
</Match>
457+
458+
<!-- class cast after an assignableFrom check. -->
459+
<Match>
460+
<Class name="org.apache.hadoop.util.dynamic.DynMethods" />
461+
<Bug pattern="BC_UNCONFIRMED_CAST" />
462+
</Match>
457463
</FindBugsFilter>

hadoop-common-project/hadoop-common/pom.xml

+11-10
Original file line numberDiff line numberDiff line change
@@ -708,6 +708,17 @@
708708
</filesets>
709709
</configuration>
710710
</plugin>
711+
712+
<plugin>
713+
<groupId>org.apache.maven.plugins</groupId>
714+
<artifactId>maven-javadoc-plugin</artifactId>
715+
<configuration>
716+
<sourceFileExcludes>
717+
<sourceFileExclude>**/FSProtos.java</sourceFileExclude>
718+
</sourceFileExcludes>
719+
<excludePackageNames>*.proto:*.tracing:*.protobuf</excludePackageNames>
720+
</configuration>
721+
</plugin>
711722
</plugins>
712723
</build>
713724

@@ -1279,16 +1290,6 @@
12791290
</execution>
12801291
</executions>
12811292
</plugin>
1282-
<plugin>
1283-
<groupId>org.apache.maven.plugins</groupId>
1284-
<artifactId>maven-javadoc-plugin</artifactId>
1285-
<configuration>
1286-
<sourceFileExcludes>
1287-
<sourceFileExclude>**/FSProtos.java</sourceFileExclude>
1288-
</sourceFileExcludes>
1289-
<excludePackageNames>*.proto:*.tracing:*.protobuf</excludePackageNames>
1290-
</configuration>
1291-
</plugin>
12921293
</plugins>
12931294
</build>
12941295
</profile>

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java

+22
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@
4949
import java.util.Arrays;
5050
import java.util.Collection;
5151
import java.util.Collections;
52+
import java.util.EnumSet;
5253
import java.util.Enumeration;
5354
import java.util.HashMap;
5455
import java.util.HashSet;
@@ -99,6 +100,7 @@
99100
import org.apache.hadoop.security.alias.CredentialProvider.CredentialEntry;
100101
import org.apache.hadoop.security.alias.CredentialProviderFactory;
101102
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
103+
import org.apache.hadoop.util.ConfigurationHelper;
102104
import org.apache.hadoop.util.Preconditions;
103105
import org.apache.hadoop.util.ReflectionUtils;
104106
import org.apache.hadoop.util.StringInterner;
@@ -1786,6 +1788,26 @@ public <T extends Enum<T>> T getEnum(String name, T defaultValue) {
17861788
: Enum.valueOf(defaultValue.getDeclaringClass(), val);
17871789
}
17881790

1791+
/**
1792+
* Build an enumset from a comma separated list of values.
1793+
* Case independent.
1794+
* Special handling of "*" meaning: all values.
1795+
* @param key key to look for
1796+
* @param enumClass class of enum
1797+
* @param ignoreUnknown should unknown values raise an exception?
1798+
* @return a mutable set of the identified enum values declared in the configuration
1799+
* @param <E> enumeration type
1800+
* @throws IllegalArgumentException if one of the entries was unknown and ignoreUnknown is false,
1801+
* or there are two entries in the enum which differ only by case.
1802+
*/
1803+
public <E extends Enum<E>> EnumSet<E> getEnumSet(
1804+
final String key,
1805+
final Class<E> enumClass,
1806+
final boolean ignoreUnknown) throws IllegalArgumentException {
1807+
final String value = get(key, "");
1808+
return ConfigurationHelper.parseEnumSet(key, value, enumClass, ignoreUnknown);
1809+
}
1810+
17891811
enum ParsedTimeDuration {
17901812
NS {
17911813
TimeUnit unit() { return TimeUnit.NANOSECONDS; }

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java

+16
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,20 @@ private static Transform tokenizeTransformation(String transformation)
177177
}
178178
return new Transform(parts[0], parts[1], parts[2]);
179179
}
180+
181+
public static boolean isSupported(CipherSuite suite) {
182+
Transform transform;
183+
int algMode;
184+
int padding;
185+
try {
186+
transform = tokenizeTransformation(suite.getName());
187+
algMode = AlgMode.get(transform.alg, transform.mode);
188+
padding = Padding.get(transform.padding);
189+
} catch (NoSuchAlgorithmException|NoSuchPaddingException e) {
190+
return false;
191+
}
192+
return isSupportedSuite(algMode, padding);
193+
}
180194

181195
/**
182196
* Initialize this cipher with a key and IV.
@@ -298,5 +312,7 @@ private native int doFinal(long context, ByteBuffer output, int offset,
298312

299313
private native void clean(long ctx, long engineNum);
300314

315+
private native static boolean isSupportedSuite(int alg, int padding);
316+
301317
public native static String getLibraryName();
302318
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslSm4CtrCryptoCodec.java

+4
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,10 @@ public OpensslSm4CtrCryptoCodec() {
4141
if (loadingFailureReason != null) {
4242
throw new RuntimeException(loadingFailureReason);
4343
}
44+
45+
if (!OpensslCipher.isSupported(CipherSuite.SM4_CTR_NOPADDING)) {
46+
throw new RuntimeException("The OpenSSL native library is built without SM4 CTR support");
47+
}
4448
}
4549

4650
@Override
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
package org.apache.hadoop.fs;
19+
20+
import org.apache.hadoop.classification.InterfaceAudience;
21+
import org.apache.hadoop.classification.InterfaceStability;
22+
23+
/**
24+
* Exception to denote if the underlying stream, cache or other closable resource
25+
* is closed.
26+
*/
27+
@InterfaceAudience.Public
28+
@InterfaceStability.Unstable
29+
public class ClosedIOException extends PathIOException {
30+
31+
/**
32+
* Appends the custom error-message to the default error message.
33+
* @param path path that encountered the closed resource.
34+
* @param message custom error message.
35+
*/
36+
public ClosedIOException(String path, String message) {
37+
super(path, message);
38+
}
39+
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java

+1
Original file line numberDiff line numberDiff line change
@@ -1022,6 +1022,7 @@ public class CommonConfigurationKeysPublic {
10221022
"fs.s3a.*.server-side-encryption.key",
10231023
"fs.s3a.encryption.algorithm",
10241024
"fs.s3a.encryption.key",
1025+
"fs.s3a.encryption.context",
10251026
"fs.azure\\.account.key.*",
10261027
"credential$",
10271028
"oauth.*secret",

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonPathCapabilities.java

+16
Original file line numberDiff line numberDiff line change
@@ -187,4 +187,20 @@ private CommonPathCapabilities() {
187187
*/
188188
public static final String BULK_DELETE = "fs.capability.bulk.delete";
189189

190+
/**
191+
* Capability string to probe for block locations returned in {@code LocatedFileStatus}
192+
* instances from calls such as {@code getBlockLocations()} and {@code listStatus()}l
193+
* to be 'virtual' rather than actual values resolved against a Distributed Filesystem including
194+
* HDFS: {@value}.
195+
* <p>
196+
* Key implications from this path capability being true:
197+
* <ol>
198+
* <li>Work can be scheduled anywhere</li>
199+
* <li>Creation of the location list is a low cost-client side operation</li>
200+
* </ol>
201+
* Implication #2 means there is no performance penalty from use of FileSystem operations which
202+
* return lists or iterators of {@code LocatedFileStatus}.
203+
*/
204+
public static final String VIRTUAL_BLOCK_LOCATIONS = "fs.capability.virtual.block.locations";
205+
190206
}

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataInputStream.java

+8
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,14 @@ public int read(long position, ByteBuffer buf) throws IOException {
262262
"by " + in.getClass().getCanonicalName());
263263
}
264264

265+
/**
266+
* Delegate to the underlying stream.
267+
* @param position position within file
268+
* @param buf the ByteBuffer to receive the results of the read operation.
269+
* @throws IOException on a failure from the nested stream.
270+
* @throws UnsupportedOperationException if the inner stream does not
271+
* support this operation.
272+
*/
265273
@Override
266274
public void readFully(long position, ByteBuffer buf) throws IOException {
267275
if (in instanceof ByteBufferPositionedReadable) {

0 commit comments

Comments
 (0)