Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
4d2ae5b
YARN-10498. Fix typo in CapacityScheduler Markdown document (#2484)
Nov 30, 2020
44910b5
HDFS-15699 Remove lz4 references in vcxproj (#2498)
GauthamBanasandra Nov 30, 2020
6a1d7d9
HDFS-15677. TestRouterRpcMultiDestination#testGetCachedDatanodeReport…
iwasakims Nov 30, 2020
918ba9e
HDFS-15694. Avoid calling UpdateHeartBeatState inside DataNodeDescrip…
amahussein Dec 1, 2020
fa773a8
YARN-10278: CapacityScheduler test framework ProportionalCapacityPree…
erichadoop Dec 1, 2020
2b5b556
HDFS-15695. NN should not let the balancer run in safemode (#2489). C…
amahussein Dec 2, 2020
60201cb
HDFS-15703. Don't generate edits for set operations that are no-op (#…
amahussein Dec 2, 2020
6ff2409
HDFS-14904. Add Option to let Balancer prefer highly utilized nodes i…
LeonGao91 Dec 2, 2020
42a2919
HDFS-15705. Fix a typo in SecondaryNameNode.java. Contributed by Sixi…
jojochuang Dec 3, 2020
9969745
YARN-9883. Reshape SchedulerHealth class. Contributed by D M Murali K…
Dec 3, 2020
717b835
HADOOP-17397: ABFS: SAS Test updates for version and permission update
ThomasMarquardt Dec 1, 2020
9170eb5
YARN-10511. Update yarn.nodemanager.env-whitelist value in docs (#2512)
ilpianista Dec 3, 2020
f94e927
HADOOP-17392. Remote exception messages should not include the except…
amahussein Dec 3, 2020
db73e99
HADOOP-16881. KerberosAuthentication does not disconnect HttpURLConne…
Dec 3, 2020
07655a7
HDFS-15706. HttpFS: Log more information on request failures. (#2515)
amahussein Dec 3, 2020
8c234fc
HADOOP-17389. KMS should log full UGI principal. (#2476)
amahussein Dec 4, 2020
e2c1268
HDFS-15240. Erasure Coding: dirty buffer causes reconstruction block …
ferhui Dec 4, 2020
7dda804
HDFS-14090. RBF: Improved isolation for downstream name nodes. {Stati…
ayushtkn Dec 4, 2020
ad40715
HDFS-15221. Add checking of effective filesystem during initializing …
ayushtkn Dec 7, 2020
da1ea25
HDFS-15660. StorageTypeProto is not compatiable between 3.x and 2.6. …
linyiqun Dec 7, 2020
32099e3
HDFS-15707. NNTop counts don't add up as expected. (#2516)
amahussein Dec 7, 2020
40f7543
HDFS-15709. Socket file descriptor leak in StripedBlockChecksumRecons…
crossfire Dec 7, 2020
7d3c8ef
YARN-10495. make the rpath of container-executor configurable. Contri…
ericbadger Dec 8, 2020
4ffec79
HDFS-15712. Upgrade googletest to 1.10.0 (#2523)
GauthamBanasandra Dec 8, 2020
01383a2
HDFS-15716. WaitforReplication in TestUpgradeDomainBlockPlacementPoli…
amahussein Dec 8, 2020
aaf9e3d
YARN-10491. Fix deprecation warnings in SLSWebApp.java (#2519)
ankitk-me Dec 9, 2020
d67ccd0
YARN-10380: Import logic of multi-node allocation in CapacitySchedule…
zhuqi-lucas Dec 9, 2020
0a45bd0
YARN-10520. Deprecated the residual nested class for the LCEResourceH…
Dec 9, 2020
c2cecfc
HADOOP-17425. Bump up snappy-java to 1.1.8.2. (#2536)
viirya Dec 10, 2020
3ec01b1
HDFS-15711. Add Metrics to HttpFS Server. (#2521) Contributed by Ahme…
amahussein Dec 10, 2020
9bd3c9b
HDFS-15720 namenode audit async logger should add some log4j config (…
Neilxzn Dec 10, 2020
be35fa1
HDFS-15717. Improve fsck logging. (#2529) Contributed by Kihwal Lee a…
amahussein Dec 11, 2020
6de1a8e
HADOOP-13571. ServerSocketUtil.getPort() should use loopback address,…
jbrennan333 Dec 11, 2020
9ed7370
HDFS-15725. Lease Recovery never completes for a committed block whic…
Dec 11, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,9 @@ public void authenticate(URL url, AuthenticatedURL.Token token)
if (!token.isSet()) {
this.url = url;
base64 = new Base64(0);
HttpURLConnection conn = null;
try {
HttpURLConnection conn = token.openConnection(url, connConfigurator);
conn = token.openConnection(url, connConfigurator);
conn.setRequestMethod(AUTH_HTTP_METHOD);
conn.connect();

Expand Down Expand Up @@ -218,6 +219,10 @@ public void authenticate(URL url, AuthenticatedURL.Token token)
} catch (AuthenticationException ex){
throw wrapExceptionWithMessage(ex,
"Error while authenticating with endpoint: " + url);
} finally {
if (conn != null) {
conn.disconnect();
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils;

/**
Expand Down Expand Up @@ -100,4 +101,20 @@ private static List<StorageType> getNonTransientTypes() {
}
return nonTransientTypes;
}

// The configuration header for different StorageType.
public static final String CONF_KEY_HEADER =
"dfs.datanode.storagetype.";

/**
* Get the configured values for different StorageType.
* @param conf - absolute or fully qualified path
* @param t - the StorageType
* @param name - the sub-name of key
* @return the file system of the path
*/
public static String getConf(Configuration conf,
StorageType t, String name) {
return conf.get(CONF_KEY_HEADER + t.toString() + "." + name);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ public synchronized ByteBuffer getBuffer(boolean direct, int length) {
ByteBuffer.allocate(length);
}
tree.remove(entry.getKey());
entry.getValue().clear();
return entry.getValue();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.ipc;

import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
Expand Down Expand Up @@ -857,7 +858,8 @@ public AuthMethod run()
}
} else if (UserGroupInformation.isSecurityEnabled()) {
if (!fallbackAllowed) {
throw new IOException("Server asks us to fall back to SIMPLE " +
throw new AccessControlException(
"Server asks us to fall back to SIMPLE " +
"auth, but this client is configured to only allow secure " +
"connections.");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2202,7 +2202,7 @@ private void doSaslReply(Message message) throws IOException {
private void doSaslReply(Exception ioe) throws IOException {
setupResponse(authFailedCall,
RpcStatusProto.FATAL, RpcErrorCodeProto.FATAL_UNAUTHORIZED,
null, ioe.getClass().getName(), ioe.toString());
null, ioe.getClass().getName(), ioe.getMessage());
sendResponse(authFailedCall);
}

Expand Down Expand Up @@ -2597,8 +2597,7 @@ private void processOneRpc(ByteBuffer bb)
final RpcCall call = new RpcCall(this, callId, retry);
setupResponse(call,
rse.getRpcStatusProto(), rse.getRpcErrorCodeProto(), null,
t.getClass().getName(),
t.getMessage() != null ? t.getMessage() : t.toString());
t.getClass().getName(), t.getMessage());
sendResponse(call);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@

import javax.net.SocketFactory;

import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.thirdparty.com.google.common.cache.Cache;
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;

Expand Down Expand Up @@ -874,6 +875,11 @@ public static IOException wrapException(final String destHost,
+ " failed on socket exception: " + exception
+ ";"
+ see("SocketException"));
} else if (exception instanceof AccessControlException) {
return wrapWithMessage(exception,
"Call From "
+ localHost + " to " + destHost + ":" + destPort
+ " failed: " + exception.getMessage());
} else {
// 1. Return instance of same type with exception msg if Exception has a
// String constructor.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -312,8 +312,9 @@ private Map doDelegationTokenOperation(URL url,
dt = ((DelegationTokenAuthenticatedURL.Token) token).getDelegationToken();
((DelegationTokenAuthenticatedURL.Token) token).setDelegationToken(null);
}
HttpURLConnection conn = null;
try {
HttpURLConnection conn = aUrl.openConnection(url, token);
conn = aUrl.openConnection(url, token);
conn.setRequestMethod(operation.getHttpMethod());
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
if (hasResponse) {
Expand All @@ -339,6 +340,9 @@ private Map doDelegationTokenOperation(URL url,
if (dt != null) {
((DelegationTokenAuthenticatedURL.Token) token).setDelegationToken(dt);
}
if (conn != null) {
conn.disconnect();
}
}
return ret;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,6 @@
<ItemGroup>
<ClCompile Include="src\org\apache\hadoop\io\compress\zlib\ZlibCompressor.c" Condition="Exists('$(ZLIB_HOME)')" />
<ClCompile Include="src\org\apache\hadoop\io\compress\zlib\ZlibDecompressor.c" Condition="Exists('$(ZLIB_HOME)')" />
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4hc.c" />
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
<ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c" />
<ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c" />
<ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
<ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ This section deals with important parameters to be specified in the given config
| `yarn.nodemanager.remote-app-log-dir` | */logs* | HDFS directory where the application logs are moved on application completion. Need to set appropriate permissions. Only applicable if log-aggregation is enabled. |
| `yarn.nodemanager.remote-app-log-dir-suffix` | *logs* | Suffix appended to the remote log dir. Logs will be aggregated to ${yarn.nodemanager.remote-app-log-dir}/${user}/${thisParam} Only applicable if log-aggregation is enabled. |
| `yarn.nodemanager.aux-services` | mapreduce\_shuffle | Shuffle service that needs to be set for Map Reduce applications. |
| `yarn.nodemanager.env-whitelist` | Environment properties to be inherited by containers from NodeManagers | For mapreduce application in addition to the default values HADOOP\_MAPRED_HOME should to be added. Property value should JAVA\_HOME,HADOOP\_COMMON\_HOME,HADOOP\_HDFS\_HOME,HADOOP\_CONF\_DIR,CLASSPATH\_PREPEND\_DISTCACHE,HADOOP\_YARN\_HOME,HADOOP\_MAPRED\_HOME |
| `yarn.nodemanager.env-whitelist` | Environment properties to be inherited by containers from NodeManagers | For mapreduce application in addition to the default values HADOOP\_MAPRED_HOME should to be added. Property value should JAVA\_HOME,HADOOP\_COMMON\_HOME,HADOOP\_HDFS\_HOME,HADOOP\_CONF\_DIR,CLASSPATH\_PREPEND\_DISTCACHE,HADOOP\_YARN\_HOME,HADOOP\_HOME,PATH,LANG,TZ,HADOOP\_MAPRED\_HOME |

* Configurations for History Server (Needs to be moved elsewhere):

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ The following instructions assume that 1. ~ 4. steps of [the above instructions]
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ,HADOOP_MAPRED_HOME</value>
</property>
</configuration>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -533,24 +533,27 @@ public void handle(Callback[] callbacks)
}

private static Pattern BadToken =
Pattern.compile(".*DIGEST-MD5: digest response format violation.*");
Pattern.compile("^" + RemoteException.class.getName() +
"\\("+ SaslException.class.getName() + "\\): " +
"DIGEST-MD5: digest response format violation.*");
private static Pattern KrbFailed =
Pattern.compile(".*Failed on local exception:.* " +
"Failed to specify server's Kerberos principal name.*");
private static Pattern Denied(AuthMethod method) {
return Pattern.compile(".*RemoteException.*AccessControlException.*: "
+ method + " authentication is not enabled.*");
return Pattern.compile("^" + RemoteException.class.getName() +
"\\(" + AccessControlException.class.getName() + "\\): "
+ method + " authentication is not enabled.*");
}
private static Pattern No(AuthMethod ... method) {
String methods = StringUtils.join(method, ",\\s*");
return Pattern.compile(".*Failed on local exception:.* " +
"Client cannot authenticate via:\\[" + methods + "\\].*");
}
private static Pattern NoTokenAuth =
Pattern.compile(".*IllegalArgumentException: " +
Pattern.compile("^" + IllegalArgumentException.class.getName() + ": " +
"TOKEN authentication requires a secret manager");
private static Pattern NoFallback =
Pattern.compile(".*Failed on local exception:.* " +
Pattern.compile("^" + AccessControlException.class.getName() + ":.* " +
"Server asks us to fall back to SIMPLE auth, " +
"but this client is configured to only allow secure connections.*");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.util.Random;

Expand Down Expand Up @@ -49,7 +50,8 @@ public static int getPort(int port, int retries) throws IOException {
if (tryPort == 0) {
continue;
}
try (ServerSocket s = new ServerSocket(tryPort)) {
try (ServerSocket s = new ServerSocket(tryPort, 50,
InetAddress.getLoopbackAddress())) {
LOG.info("Using port " + tryPort);
return tryPort;
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ private void logEvent(final OpStatus status, AuditEvent event) {
private void op(final OpStatus opStatus, final Object op,
final UserGroupInformation ugi, final String key, final String remoteHost,
final String extraMsg) {
final String user = ugi == null ? null: ugi.getShortUserName();
final String user = ugi == null ? null: ugi.getUserName();
if (!Strings.isNullOrEmpty(user) && !Strings.isNullOrEmpty(key)
&& (op != null)
&& AGGREGATE_OPS_WHITELIST.contains(op)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ class AuditEvent {
this.user = null;
this.impersonator = null;
} else {
this.user = ugi.getShortUserName();
this.user = ugi.getUserName();
if (ugi.getAuthenticationMethod()
== UserGroupInformation.AuthenticationMethod.PROXY) {
this.impersonator = ugi.getRealUser().getUserName();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.mockito.Mockito;

public class TestKMSAudit {

Expand All @@ -50,6 +49,8 @@ public class TestKMSAudit {
private PrintStream capturedOut;

private KMSAudit kmsAudit;
private UserGroupInformation luser =
UserGroupInformation.createUserForTesting("luser@REALM", new String[0]);

private static class FilterOut extends FilterOutputStream {
public FilterOut(OutputStream out) {
Expand Down Expand Up @@ -95,10 +96,7 @@ private String getAndResetLogOutput() {
}

@Test
@SuppressWarnings("checkstyle:linelength")
public void testAggregation() throws Exception {
UserGroupInformation luser = Mockito.mock(UserGroupInformation.class);
Mockito.when(luser.getShortUserName()).thenReturn("luser");
kmsAudit.ok(luser, KMSOp.DECRYPT_EEK, "k1", "testmsg");
kmsAudit.ok(luser, KMSOp.DECRYPT_EEK, "k1", "testmsg");
kmsAudit.ok(luser, KMSOp.DECRYPT_EEK, "k1", "testmsg");
Expand All @@ -120,27 +118,30 @@ public void testAggregation() throws Exception {
kmsAudit.evictCacheForTesting();
String out = getAndResetLogOutput();
System.out.println(out);
Assert.assertTrue(
out.matches(
"OK\\[op=DECRYPT_EEK, key=k1, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
boolean doesMatch = out.matches(
"OK\\[op=DECRYPT_EEK, key=k1, user=luser@REALM, accessCount=1, "
+ "interval=[^m]{1,4}ms\\] testmsg"
// Not aggregated !!
+ "OK\\[op=DELETE_KEY, key=k1, user=luser\\] testmsg"
+ "OK\\[op=ROLL_NEW_VERSION, key=k1, user=luser\\] testmsg"
+ "OK\\[op=INVALIDATE_CACHE, key=k1, user=luser\\] testmsg"
+ "OK\\[op=DELETE_KEY, key=k1, user=luser@REALM\\] testmsg"
+ "OK\\[op=ROLL_NEW_VERSION, key=k1, user=luser@REALM\\] testmsg"
+ "OK\\[op=INVALIDATE_CACHE, key=k1, user=luser@REALM\\] testmsg"
// Aggregated
+ "OK\\[op=DECRYPT_EEK, key=k1, user=luser, accessCount=6, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=DECRYPT_EEK, key=k1, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK, key=k1, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK, key=k1, user=luser, accessCount=3, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK_BATCH, key=k1, user=luser\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK_BATCH, key=k1, user=luser\\] testmsg"));
+ "OK\\[op=DECRYPT_EEK, key=k1, user=luser@REALM, accessCount=6, "
+ "interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=DECRYPT_EEK, key=k1, user=luser@REALM, accessCount=1, "
+ "interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK, key=k1, user=luser@REALM, "
+ "accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK, key=k1, user=luser@REALM, "
+ "accessCount=3, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK_BATCH, key=k1, user=luser@REALM\\] testmsg"
+ "OK\\[op=REENCRYPT_EEK_BATCH, key=k1, user=luser@REALM\\] "
+ "testmsg");
Assert.assertTrue(doesMatch);
}

@Test
@SuppressWarnings("checkstyle:linelength")
public void testAggregationUnauth() throws Exception {
UserGroupInformation luser = Mockito.mock(UserGroupInformation.class);
Mockito.when(luser.getShortUserName()).thenReturn("luser");
kmsAudit.unauthorized(luser, KMSOp.GENERATE_EEK, "k2");
kmsAudit.evictCacheForTesting();
kmsAudit.ok(luser, KMSOp.GENERATE_EEK, "k3", "testmsg");
Expand All @@ -159,25 +160,29 @@ public void testAggregationUnauth() throws Exception {
// The UNAUTHORIZED will trigger cache invalidation, which then triggers
// the aggregated OK (accessCount=5). But the order of the UNAUTHORIZED and
// the aggregated OK is arbitrary - no correctness concerns, but flaky here.
Assert.assertTrue(out.matches(
"UNAUTHORIZED\\[op=GENERATE_EEK, key=k2, user=luser\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=5, interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=GENERATE_EEK, key=k3, user=luser\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg")
|| out.matches(
"UNAUTHORIZED\\[op=GENERATE_EEK, key=k2, user=luser\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=GENERATE_EEK, key=k3, user=luser\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=5, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"));
boolean doesMatch = out.matches(
"UNAUTHORIZED\\[op=GENERATE_EEK, key=k2, user=luser@REALM\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=1,"
+ " interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=5,"
+ " interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=GENERATE_EEK, key=k3, user=luser@REALM\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=1,"
+ " interval=[^m]{1,4}ms\\] testmsg");
doesMatch = doesMatch || out.matches(
"UNAUTHORIZED\\[op=GENERATE_EEK, key=k2, user=luser@REALM\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=1,"
+ " interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=GENERATE_EEK, key=k3, user=luser@REALM\\] "
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=5,"
+ " interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k3, user=luser@REALM, accessCount=1,"
+ " interval=[^m]{1,4}ms\\] testmsg");
Assert.assertTrue(doesMatch);
}

@Test
@SuppressWarnings("checkstyle:linelength")
public void testAuditLogFormat() throws Exception {
UserGroupInformation luser = Mockito.mock(UserGroupInformation.class);
Mockito.when(luser.getShortUserName()).thenReturn("luser");
kmsAudit.ok(luser, KMSOp.GENERATE_EEK, "k4", "testmsg");
kmsAudit.ok(luser, KMSOp.GENERATE_EEK, "testmsg");
kmsAudit.evictCacheForTesting();
Expand All @@ -187,12 +192,15 @@ public void testAuditLogFormat() throws Exception {
String out = getAndResetLogOutput();
System.out.println(out);
Assert.assertTrue(out.matches(
"OK\\[op=GENERATE_EEK, key=k4, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, user=luser\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k4, user=luser, accessCount=1, interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=DECRYPT_EEK, key=k4, user=luser\\] "
+ "ERROR\\[user=luser\\] Method:'method' Exception:'testmsg'"
+ "UNAUTHENTICATED RemoteHost:remotehost Method:method URL:url ErrorMsg:'testmsg'"));
"OK\\[op=GENERATE_EEK, key=k4, user=luser@REALM, accessCount=1, "
+ "interval=[^m]{1,4}ms\\] testmsg"
+ "OK\\[op=GENERATE_EEK, user=luser@REALM\\] testmsg"
+ "OK\\[op=GENERATE_EEK, key=k4, user=luser@REALM, accessCount=1,"
+ " interval=[^m]{1,4}ms\\] testmsg"
+ "UNAUTHORIZED\\[op=DECRYPT_EEK, key=k4, user=luser@REALM\\] "
+ "ERROR\\[user=luser@REALM\\] Method:'method' Exception:'testmsg'"
+ "UNAUTHENTICATED RemoteHost:remotehost Method:method URL:url "
+ "ErrorMsg:'testmsg'"));
}

@SuppressWarnings("unchecked")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ message StorageTypeQuotaInfosProto {
}

message StorageTypeQuotaInfoProto {
required StorageTypeProto type = 1;
optional StorageTypeProto type = 1 [default = DISK];
required uint64 quota = 2;
required uint64 consumed = 3;
}
Expand Down
Loading