Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdds.HddsUtils;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.ReconfigurationHandler;
Expand Down Expand Up @@ -66,7 +66,7 @@ protected HddsDatanodeClientProtocolServer(
HDDS_DATANODE_CLIENT_ADDRESS_KEY,
HddsUtils.getDatanodeRpcAddress(conf), rpcServer);
datanodeDetails.setPort(CLIENT_RPC, clientRpcAddress.getPort());
if (conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
rpcServer.refreshServiceAcl(conf, HddsPolicyProvider.getInstance());
}
Expand Down
3 changes: 2 additions & 1 deletion hadoop-hdds/dev-support/checkstyle/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,8 @@
<property name="regexp" value="true"/>
<property name="illegalPkgs" value="^sun\..*, ^.*\.relocated\..*, ^.*\.shaded\..*"/>
<property name="illegalClasses"
value="^org.apache.hadoop.test.(GenericTestUtils|LambdaTestUtils|TimedOutTestsListener)"/>
value="^org.apache.hadoop.test.(GenericTestUtils|LambdaTestUtils|TimedOutTestsListener)
|^org.apache.hadoop.fs.CommonConfigurationKeys"/>
</module>
<module name="RedundantImport"/>
<module name="UnusedImports"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.ConfServlet;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdds.annotation.InterfaceAudience;
import org.apache.hadoop.hdds.annotation.InterfaceStability;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
Expand Down Expand Up @@ -183,6 +183,8 @@ public final class HttpServer2 implements FilterContainer {
private static final String NO_CACHE_FILTER = "NoCacheFilter";

private static final String BIND_ADDRESS = "bind.address";
private static final String HADOOP_JETTY_LOGS_SERVE_ALIASES = "hadoop.jetty.logs.serve.aliases";
private static final boolean DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES = true;

private final AccessControlList adminsAcl;

Expand Down Expand Up @@ -761,16 +763,14 @@ protected void addDefaultApps(ContextHandlerCollection parent,
// and it's enabled.
String logDir = System.getProperty("hadoop.log.dir");
boolean logsEnabled = conf.getBoolean(
CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED,
CommonConfigurationKeys.HADOOP_HTTP_LOGS_ENABLED_DEFAULT);
CommonConfigurationKeysPublic.HADOOP_HTTP_LOGS_ENABLED,
CommonConfigurationKeysPublic.HADOOP_HTTP_LOGS_ENABLED_DEFAULT);
if (logDir != null && logsEnabled) {
ServletContextHandler logContext =
new ServletContextHandler(parent, "/logs");
logContext.setResourceBase(logDir);
logContext.addServlet(AdminAuthorizedServlet.class, "/*");
if (conf.getBoolean(
CommonConfigurationKeys.HADOOP_JETTY_LOGS_SERVE_ALIASES,
CommonConfigurationKeys.DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
if (conf.getBoolean(HADOOP_JETTY_LOGS_SERVE_ALIASES, DEFAULT_HADOOP_JETTY_LOGS_SERVE_ALIASES)) {
Map<String, String> params = logContext.getInitParams();
params.put("org.eclipse.jetty.servlet.Default.aliases", "true");
}
Expand Down Expand Up @@ -1441,7 +1441,7 @@ public static boolean isInstrumentationAccessAllowed(

boolean access = true;
boolean adminAccess = conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
CommonConfigurationKeysPublic.HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN,
false);
if (adminAccess) {
access = hasAdministratorAccess(servletContext, request, response);
Expand All @@ -1466,7 +1466,7 @@ public static boolean hasAdministratorAccess(
.getAttribute(CONF_CONTEXT_ATTRIBUTE);
// If there is no authorization, anybody has administrator access.
if (!conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) {
return true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
Expand Down Expand Up @@ -146,7 +146,7 @@ public SCMBlockProtocolServer(OzoneConfiguration conf,
updateRPCListenAddress(
conf, scm.getScmNodeDetails().getBlockProtocolServerAddressKey(),
scmBlockAddress, blockRpcServer);
if (conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
blockRpcServer.refreshServiceAcl(conf, SCMPolicyProvider.getInstance());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.ReconfigurationHandler;
Expand Down Expand Up @@ -187,7 +187,7 @@ public SCMClientProtocolServer(OzoneConfiguration conf,
updateRPCListenAddress(conf,
scm.getScmNodeDetails().getClientProtocolServerAddressKey(),
scmAddress, clientRpcServer);
if (conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
clientRpcServer.refreshServiceAcl(conf, SCMPolicyProvider.getInstance());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
import java.util.Map;
import java.util.OptionalLong;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
Expand Down Expand Up @@ -181,7 +181,7 @@ public SCMDatanodeProtocolServer(final OzoneConfiguration conf,
conf, getDatanodeAddressKey(), datanodeRpcAddr,
datanodeRpcServer);

if (conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
datanodeRpcServer.refreshServiceAcl(conf, getPolicyProvider());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.hadoop.tools.contract;

import static org.apache.hadoop.fs.CommonConfigurationKeys.IOSTATISTICS_LOGGING_LEVEL_INFO;
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
Expand Down Expand Up @@ -188,7 +187,7 @@ public void setup() throws Exception {
@Override
public void teardown() throws Exception {
// if remote FS supports IOStatistics log it.
logIOStatisticsAtLevel(LOG, IOSTATISTICS_LOGGING_LEVEL_INFO, getRemoteFS());
logIOStatisticsAtLevel(LOG, "info", getRemoteFS());
super.teardown();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.ozone.om.exceptions.OMException;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketEncryptionInfoProto;
import org.apache.hadoop.ozone.protocolPB.OMPBHelper;
Expand All @@ -42,7 +42,7 @@ public static BucketEncryptionInfoProto getBekInfo(
BucketEncryptionInfoProto.Builder bekb = null;
if (kmsProvider == null) {
throw new OMException("Invalid KMS provider, check configuration " +
CommonConfigurationKeys.HADOOP_SECURITY_KEY_PROVIDER_PATH,
CommonConfigurationKeysPublic.HADOOP_SECURITY_KEY_PROVIDER_PATH,
OMException.ResultCodes.INVALID_KMS_PROVIDER);
}
if (bek.getKeyName() == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,6 @@
import org.apache.hadoop.conf.StorageUnit;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
Expand Down Expand Up @@ -1440,7 +1439,7 @@ private RPC.Server startRpcServer(OzoneConfiguration conf,
HddsServerUtil.addPBProtocol(conf, ReconfigureProtocolOmPB.class,
reconfigureProtocolService, rpcServer);

if (conf.getBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
if (conf.getBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
rpcServer.refreshServiceAcl(conf, OMPolicyProvider.getInstance());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@
import java.util.concurrent.atomic.AtomicBoolean;
import javax.sql.DataSource;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.conf.ReconfigurationHandler;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
Expand Down Expand Up @@ -139,6 +138,8 @@ public class ReconStorageContainerManagerFacade
private static final Logger LOG = LoggerFactory
.getLogger(ReconStorageContainerManagerFacade.class);
public static final long CONTAINER_METADATA_SIZE = 1 * 1024 * 1024L;
private static final String IPC_MAXIMUM_DATA_LENGTH = "ipc.maximum.data.length";
private static final int IPC_MAXIMUM_DATA_LENGTH_DEFAULT = 128 * 1024 * 1024;

private final OzoneConfiguration ozoneConfiguration;
private final ReconDatanodeProtocolServer datanodeProtocolServer;
Expand Down Expand Up @@ -594,9 +595,7 @@ private long getContainerCountPerCall(long totalContainerCount) {
// Assumption of size of 1 container info object here is 1 MB
long containersMetaDataTotalRpcRespSizeMB =
CONTAINER_METADATA_SIZE * totalContainerCount;
long hadoopRPCSize = ozoneConfiguration.getInt(
CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH,
CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH_DEFAULT);
long hadoopRPCSize = ozoneConfiguration.getInt(IPC_MAXIMUM_DATA_LENGTH, IPC_MAXIMUM_DATA_LENGTH_DEFAULT);
long containerCountPerCall = containersMetaDataTotalRpcRespSizeMB <=
hadoopRPCSize ? totalContainerCount :
Math.round(Math.floor(
Expand Down