Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import com.qcloud.cos.auth.AnonymousCOSCredentials;
import com.qcloud.cos.auth.COSCredentials;
import com.qcloud.cos.auth.COSCredentialsProvider;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;

/**
* Example that uses <code>AuthenticatedURL</code>.
Expand All @@ -42,7 +42,7 @@ public static void main(String[] args) {
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(
conn.getInputStream(), Charset.forName("UTF-8")));
conn.getInputStream(), StandardCharsets.UTF_8));
String line = reader.readLine();
while (line != null) {
System.out.println(line);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@

import java.util.Locale;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;

/**
* This is a utility class designed to provide functionality related to
* {@link AuthenticationHandler}.
Expand All @@ -44,8 +42,10 @@ private AuthenticationHandlerUtil() {
* @return an instance of AuthenticationHandler implementation.
*/
public static String getAuthenticationHandlerClassName(String authHandler) {
String handlerName =
Preconditions.checkNotNull(authHandler).toLowerCase(Locale.ENGLISH);
if (authHandler == null) {
throw new NullPointerException();
}
String handlerName = authHandler.toLowerCase(Locale.ENGLISH);

String authHandlerClassName = null;

Expand Down Expand Up @@ -98,8 +98,14 @@ public static String checkAuthScheme(String scheme) {
* specified authentication scheme false Otherwise.
*/
public static boolean matchAuthScheme(String scheme, String auth) {
scheme = Preconditions.checkNotNull(scheme).trim();
auth = Preconditions.checkNotNull(auth).trim();
if (scheme == null) {
throw new NullPointerException();
}
scheme = scheme.trim();
if (auth == null) {
throw new NullPointerException();
}
auth = auth.trim();
return auth.regionMatches(true, 0, scheme, 0, scheme.length());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import org.slf4j.LoggerFactory;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;

/**
* The {@link LdapAuthenticationHandler} implements the BASIC authentication
Expand Down Expand Up @@ -144,15 +143,20 @@ public void init(Properties config) throws ServletException {
this.enableStartTls =
Boolean.valueOf(config.getProperty(ENABLE_START_TLS, "false"));

Preconditions
.checkNotNull(this.providerUrl, "The LDAP URI can not be null");
Preconditions.checkArgument((this.baseDN == null)
^ (this.ldapDomain == null),
"Either LDAP base DN or LDAP domain value needs to be specified");
if (this.providerUrl == null) {
throw new NullPointerException("The LDAP URI can not be null");
}
if (!((this.baseDN == null)
^ (this.ldapDomain == null))) {
throw new IllegalArgumentException(
"Either LDAP base DN or LDAP domain value needs to be specified");
}
if (this.enableStartTls) {
String tmp = this.providerUrl.toLowerCase();
Preconditions.checkArgument(!tmp.startsWith("ldaps"),
"Can not use ldaps and StartTLS option at the same time");
if (tmp.startsWith("ldaps")) {
throw new IllegalArgumentException(
"Can not use ldaps and StartTLS option at the same time");
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;

/**
Expand Down Expand Up @@ -114,10 +113,10 @@ public void init(Properties config) throws ServletException {
}

this.types.clear();

String schemesProperty =
Preconditions.checkNotNull(config.getProperty(SCHEMES_PROPERTY),
"%s system property is not specified.", SCHEMES_PROPERTY);
if (config.getProperty(SCHEMES_PROPERTY) == null) {
throw new NullPointerException(SCHEMES_PROPERTY + " system property is not specified.");
}
String schemesProperty = config.getProperty(SCHEMES_PROPERTY);
for (String scheme : STR_SPLITTER.split(schemesProperty)) {
scheme = AuthenticationHandlerUtil.checkAuthScheme(scheme);
if (schemeToAuthHandlerMapping.containsKey(scheme)) {
Expand All @@ -128,8 +127,10 @@ public void init(Properties config) throws ServletException {
String authHandlerPropName =
String.format(AUTH_HANDLER_PROPERTY, scheme).toLowerCase();
String authHandlerName = config.getProperty(authHandlerPropName);
Preconditions.checkNotNull(authHandlerName,
"No auth handler configured for scheme %s.", scheme);
if (authHandlerName == null) {
throw new NullPointerException(
"No auth handler configured for scheme " + scheme);
}

String authHandlerClassName =
AuthenticationHandlerUtil
Expand All @@ -145,7 +146,9 @@ public void init(Properties config) throws ServletException {
protected AuthenticationHandler initializeAuthHandler(
String authHandlerClassName, Properties config) throws ServletException {
try {
Preconditions.checkNotNull(authHandlerClassName);
if (authHandlerClassName == null) {
throw new NullPointerException();
}
logger.debug("Initializing Authentication handler of type "
+ authHandlerClassName);
Class<?> klass =
Expand Down Expand Up @@ -207,4 +210,4 @@ public AuthenticationToken authenticate(HttpServletRequest request,

return null;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletResponse;

import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Properties;

Expand Down Expand Up @@ -53,8 +53,6 @@ public class PseudoAuthenticationHandler implements AuthenticationHandler {
*/
public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";

private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");

private static final String PSEUDO_AUTH = "PseudoAuth";

private boolean acceptAnonymous;
Expand Down Expand Up @@ -146,7 +144,7 @@ private String getUserName(HttpServletRequest request) {
if(queryString == null || queryString.length() == 0) {
return null;
}
List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (PseudoAuthenticator.USER_NAME.equals(nv.getName())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;

import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import javax.servlet.ServletContext;

Expand All @@ -39,7 +39,7 @@ public void init(Properties config, ServletContext servletContext,
long tokenValidity) throws Exception {
String signatureSecret = config.getProperty(
AuthenticationFilter.SIGNATURE_SECRET, null);
secret = signatureSecret.getBytes(Charset.forName("UTF-8"));
secret = signatureSecret.getBytes(StandardCharsets.UTF_8);
secrets = new byte[][]{secret};
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;

import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import java.util.Random;
import javax.servlet.ServletContext;
Expand Down Expand Up @@ -140,11 +140,11 @@ public void testUpgradeChangeSecretLength() throws Exception {
long seed = System.currentTimeMillis();
Random rand = new Random(seed);
byte[] secret2 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
byte[] secret1 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
byte[] secret3 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
rand = new Random(seed);
// Secrets 4 and 5 get thrown away by ZK when the new secret provider tries
// to init
Expand Down Expand Up @@ -238,7 +238,7 @@ private class OldMockZKSignerSecretProvider

@Override
protected byte[] generateRandomSecret() {
return Long.toString(rand.nextLong()).getBytes(Charset.forName("UTF-8"));
return Long.toString(rand.nextLong()).getBytes(StandardCharsets.UTF_8);
}
}

Expand Down
4 changes: 4 additions & 0 deletions hadoop-common-project/hadoop-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,10 @@
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.yetus</groupId>
<artifactId>audience-annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
Expand Down Expand Up @@ -82,7 +83,6 @@
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;

import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.commons.collections.map.UnmodifiableMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
Expand Down Expand Up @@ -2881,7 +2881,7 @@ public Reader getConfResourceAsReader(String name) {
LOG.info("found resource " + name + " at " + url);
}

return new InputStreamReader(url.openStream(), Charsets.UTF_8);
return new InputStreamReader(url.openStream(), StandardCharsets.UTF_8);
} catch (Exception e) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
package org.apache.hadoop.conf;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.conf.ReconfigurationUtil.PropertyChange;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;

import java.io.IOException;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.ByteBufferPositionedReadable;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import org.apache.hadoop.fs.statistics.IOStatisticsSource;
import org.apache.hadoop.fs.impl.StoreImplementationUtils;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;

import static org.apache.hadoop.fs.statistics.IOStatisticsSupport.retrieveIOStatistics;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.fs.Seekable;
import org.apache.hadoop.util.CleanerUtil;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.crypto.random.OpensslSecureRandom;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

package org.apache.hadoop.crypto.key;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.CryptoCodec;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;

import static org.apache.hadoop.util.KMSUtil.checkNotEmpty;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
import org.slf4j.LoggerFactory;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;

/**
* A simple LoadBalancing KMSClientProvider that round-robins requests
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
import org.apache.hadoop.thirdparty.com.google.common.cache.LoadingCache;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.NativeCodeLoader;

import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.PerformanceAdvisory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down
Loading