diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java index 12a47712c5bb..d969439c3a02 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClient.java @@ -35,6 +35,8 @@ import java.util.List; import java.util.Objects; import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; import static org.apache.hadoop.hdds.security.exception.OzoneSecurityException.ResultCodes.OM_PUBLIC_PRIVATE_KEY_FILE_NOT_EXIST; @@ -232,6 +234,15 @@ default void assertValidKeysAndCertificate() throws OzoneSecurityException { */ void registerNotificationReceiver(CertificateNotification receiver); + /** + * Registers a listener that will be notified if the CA certificates are + * changed. + * + * @param listener the listener to call with the actualized list of CA + * certificates. + */ + void registerRootCARotationListener( + Function, CompletableFuture> listener); /** * Initialize certificate client. diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java similarity index 100% rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java rename to hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/HDDSKeyGenerator.java diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/package-info.java new file mode 100644 index 000000000000..4fffbf7da72b --- /dev/null +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/keys/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/** + * Utils for private and public keys. + */ +package org.apache.hadoop.hdds.security.x509.keys; diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java index 33f607106cc3..85f1b7ec782f 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java @@ -23,77 +23,3 @@ * framework for HDDS. */ package org.apache.hadoop.hdds.security.x509; -/* - -Architecture of Certificate Infrastructure for SCM. -==================================================== - -The certificate infrastructure has two main parts, the certificate server or -the Certificate authority and the clients who want certificates. The CA is -responsible for issuing certificates to participating entities. - -To issue a certificate the CA has to verify the identity and the assertions -in the certificate. The client starts off making a request to CA for a -certificate. This request is called Certificate Signing Request or CSR -(PKCS#10). - -When a CSR arrives on the CA, CA will decode the CSR and verify that all the -fields in the CSR are in line with what the system expects. Since there are -lots of possible ways to construct an X.509 certificate, we rely on PKI -profiles. - -Generally, PKI profiles are policy documents or general guidelines that get -followed by the requester and CA. However, most of the PKI profiles that are -commonly available are general purpose and offers too much surface area. - -SCM CA infrastructure supports the notion of a PKI profile class which can -codify the RDNs, Extensions and other certificate policies. The CA when -issuing a certificate will invoke a certificate approver class, based on the -authentication method used. For example, out of the box, we support manual, -Kerberos, trusted network and testing authentication mechanisms. - -If there is no authentication mechanism in place, then when CA receives the -CSR, it runs the standard PKI profile over it verify that all the fields are -in expected ranges. Once that is done, The signing request is sent for human -review and approval. This form of certificate approval is called Manual, Of -all the certificate approval process this is the ** most secure **. This -approval needs to be done once for each data node. - -For existing clusters, where data nodes already have a Kerberos keytab, we -can leverage the Kerberos identity mechanism to identify the data node that -is requesting the certificate. In this case, users can configure the system -to leverage Kerberos while issuing certificates and SCM CA will be able to -verify the data nodes identity and issue certificates automatically. - -In environments like Kubernetes, we can leverage the base system services to -pass on a shared secret securely. In this model also, we can rely on these -secrets to make sure that is the right data node that is talking to us. This -kind of approval is called a Trusted network approval. In this process, each -data node not only sends the CSR but signs the request with a shared secret -with SCM. SCM then can issue a certificate without the intervention of a -human administrator. - -The last, TESTING method which never should be used other than in development - and testing clusters, is merely a mechanism to bypass all identity checks. If -this flag is setup, then CA will issue a CSR if the base approves all fields. - - * Please do not use this mechanism(TESTING) for any purpose other than - * testing. - -CA - Certificate Approval and Code Layout (as of Dec, 1st, 2018) -================================================================= -The CA implementation ( as of now it is called DefaultCA) receives a CSR from - the network layer. The network also tells the system what approver type to - use, that is if Kerberos or Shared secrets mechanism is used, it reports - that to Default CA. - -The default CA instantiates the approver based on the type of the approver -indicated by the network layer. This approver creates an instance of the PKI -profile and passes each field from the certificate signing request. The PKI -profile (as of today Dec 1st, 2018, we have one profile called Ozone profile) - verifies that each field in the CSR meets the approved set of values. - -Once the PKI Profile validates the request, it is either auto approved or -queued for manual review. - - */ diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/CertificateTestUtils.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/CertificateTestUtils.java new file mode 100644 index 000000000000..09d34fd0bb5c --- /dev/null +++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/CertificateTestUtils.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.hadoop.hdds.security.x509; + +import org.apache.hadoop.hdds.conf.ConfigurationSource; +import org.apache.hadoop.hdds.security.SecurityConfig; +import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator; +import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers; +import org.bouncycastle.asn1.x500.X500Name; +import org.bouncycastle.asn1.x509.AlgorithmIdentifier; +import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; +import org.bouncycastle.asn1.x509.BasicConstraints; +import org.bouncycastle.asn1.x509.Extension; +import org.bouncycastle.asn1.x509.SubjectKeyIdentifier; +import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; +import org.bouncycastle.cert.X509ExtensionUtils; +import org.bouncycastle.cert.X509v3CertificateBuilder; +import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; +import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; +import org.bouncycastle.jce.provider.BouncyCastleProvider; +import org.bouncycastle.operator.ContentSigner; +import org.bouncycastle.operator.DigestCalculator; +import org.bouncycastle.operator.OperatorCreationException; +import org.bouncycastle.operator.bc.BcDigestCalculatorProvider; +import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; + +import java.math.BigInteger; +import java.security.KeyPair; +import java.security.NoSuchAlgorithmException; +import java.security.NoSuchProviderException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.time.Instant; +import java.util.Date; + +/** + * Test utilities to create simple certificates/keys for testing. + */ +public final class CertificateTestUtils { + private CertificateTestUtils() { } + + private static final String HASH_ALGO = "SHA256WithRSA"; + + /** + * Generates a keypair using the HDDSKeyGenerator with the given config. + * + * @param conf the config applies to keys + * + * @return a newly generated keypair + * + * @throws NoSuchProviderException on wrong security provider in the config + * @throws NoSuchAlgorithmException on wrong encryption algo in the config + */ + public static KeyPair aKeyPair(ConfigurationSource conf) + throws NoSuchProviderException, NoSuchAlgorithmException { + return new HDDSKeyGenerator(new SecurityConfig(conf)).generateKey(); + } + + /** + * Creates a self-signed certificate and returns it as an X509Certificate. + * The given keys and common name are being used in the certificate. + * The certificate will have its serial id generated based on the hashcode + * of the public key, and will expire after 1 day. + * + * @param keys the keypair to use for the certificate + * @param commonName the common name used in the certificate + * + * @return the X509Certificate representing a self-signed certificate + * + * @throws Exception in case any error occurs during the certificate creation + */ + public static X509Certificate createSelfSignedCert(KeyPair keys, + String commonName) throws Exception { + return createSelfSignedCert(keys, commonName, Duration.ofDays(1)); + } + + /** + * Creates a self-signed certificate and returns it as an X509Certificate. + * The given keys and common name are being used in the certificate. + * The certificate will have its serial id generated based on the hashcode + * of the public key, and will expire after the specified duration. + * + * @param keys the keypair to use for the certificate + * @param commonName the common name used in the certificate + * @param expiresIn the lifespan of the certificate + * + * @return the X509Certificate representing a self-signed certificate + * + * @throws Exception in case any error occurs during the certificate creation + */ + public static X509Certificate createSelfSignedCert(KeyPair keys, + String commonName, Duration expiresIn) throws Exception { + final Instant now = Instant.now(); + final Date notBefore = Date.from(now); + final Date notAfter = Date.from(now.plus(expiresIn)); + final ContentSigner contentSigner = + new JcaContentSignerBuilder(HASH_ALGO).build(keys.getPrivate()); + final X500Name x500Name = new X500Name("CN=" + commonName); + + SubjectKeyIdentifier keyId = subjectKeyIdOf(keys); + AuthorityKeyIdentifier authorityKeyId = authorityKeyIdOf(keys); + BasicConstraints constraints = new BasicConstraints(true); + + final X509v3CertificateBuilder certificateBuilder = + new JcaX509v3CertificateBuilder( + x500Name, + BigInteger.valueOf(keys.getPublic().hashCode()), + notBefore, + notAfter, + x500Name, + keys.getPublic() + ); + certificateBuilder + .addExtension(Extension.subjectKeyIdentifier, false, keyId) + .addExtension(Extension.authorityKeyIdentifier, false, authorityKeyId) + .addExtension(Extension.basicConstraints, true, constraints); + + return new JcaX509CertificateConverter() + .setProvider(new BouncyCastleProvider()) + .getCertificate(certificateBuilder.build(contentSigner)); + } + + private static SubjectKeyIdentifier subjectKeyIdOf(KeyPair keys) + throws Exception { + return extensionUtil().createSubjectKeyIdentifier(pubKeyInfo(keys)); + } + + private static AuthorityKeyIdentifier authorityKeyIdOf(KeyPair keys) + throws Exception { + return extensionUtil().createAuthorityKeyIdentifier(pubKeyInfo(keys)); + } + + private static SubjectPublicKeyInfo pubKeyInfo(KeyPair keys) { + return SubjectPublicKeyInfo.getInstance(keys.getPublic().getEncoded()); + } + + private static X509ExtensionUtils extensionUtil() + throws OperatorCreationException { + DigestCalculator digest = + new BcDigestCalculatorProvider() + .get(new AlgorithmIdentifier(OIWObjectIdentifiers.idSHA1)); + + return new X509ExtensionUtils(digest); + } +} diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/package-info.java similarity index 100% rename from hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/package-info.java rename to hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/security/x509/package-info.java diff --git a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/DefaultCertificateClient.java b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/DefaultCertificateClient.java index 6dff86fd52ea..e066efef8c77 100644 --- a/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/DefaultCertificateClient.java +++ b/hadoop-hdds/framework/src/main/java/org/apache/hadoop/hdds/security/x509/certificate/client/DefaultCertificateClient.java @@ -58,6 +58,7 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; +import java.util.function.Function; import java.util.stream.Stream; import java.util.stream.Collectors; @@ -203,6 +204,14 @@ private void startRootCaRotationPoller() { } } + @Override + public void registerRootCARotationListener( + Function, CompletableFuture> listener) { + if (securityConfig.isAutoCARotationEnabled()) { + rootCaRotationPoller.addRootCARotationProcessor(listener); + } + } + private synchronized void readCertificateFile(Path filePath) { CertificateCodec codec = new CertificateCodec(securityConfig, component); String fileName = filePath.getFileName().toString(); diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClientTestImpl.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClientTestImpl.java index 40395ac128be..32fa5ef40eda 100644 --- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClientTestImpl.java +++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/CertificateClientTestImpl.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF @@ -37,10 +37,12 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hadoop.hdds.conf.OzoneConfiguration; @@ -386,6 +388,13 @@ public void registerNotificationReceiver(CertificateNotification receiver) { } } + @Override + public void registerRootCARotationListener( + Function, CompletableFuture> listener) { + // we do not have tests that rely on rootCA rotation atm, leaving this + // implementation blank for now. + } + @Override public void close() throws IOException { if (serverKeyStoresFactory != null) { diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestInterSCMGrpcProtocolService.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestInterSCMGrpcProtocolService.java index 998839b2d835..8e644c585a82 100644 --- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestInterSCMGrpcProtocolService.java +++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestInterSCMGrpcProtocolService.java @@ -19,38 +19,18 @@ import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.hadoop.hdds.HddsConfigKeys; -import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore; import org.apache.hadoop.hdds.scm.server.StorageContainerManager; import org.apache.hadoop.hdds.security.ssl.KeyStoresFactory; -import org.apache.hadoop.hdds.security.SecurityConfig; +import org.apache.hadoop.hdds.security.x509.CertificateTestUtils; import org.apache.hadoop.hdds.security.x509.certificate.client.SCMCertificateClient; -import org.apache.hadoop.hdds.security.x509.keys.HDDSKeyGenerator; import org.apache.hadoop.hdds.utils.TransactionInfo; import org.apache.hadoop.hdds.utils.db.DBCheckpoint; import org.apache.hadoop.hdds.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.ozone.OzoneConfigKeys; -import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers; -import org.bouncycastle.asn1.x500.X500Name; -import org.bouncycastle.asn1.x509.AlgorithmIdentifier; -import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; -import org.bouncycastle.asn1.x509.BasicConstraints; -import org.bouncycastle.asn1.x509.Extension; -import org.bouncycastle.asn1.x509.SubjectKeyIdentifier; -import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; -import org.bouncycastle.cert.X509ExtensionUtils; -import org.bouncycastle.cert.X509v3CertificateBuilder; -import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; -import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; -import org.bouncycastle.jce.provider.BouncyCastleProvider; -import org.bouncycastle.operator.ContentSigner; -import org.bouncycastle.operator.DigestCalculator; -import org.bouncycastle.operator.OperatorCreationException; -import org.bouncycastle.operator.bc.BcDigestCalculatorProvider; -import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -64,22 +44,17 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; -import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyPair; -import java.security.NoSuchAlgorithmException; -import java.security.NoSuchProviderException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; -import java.time.Duration; -import java.time.Instant; -import java.util.Date; import java.util.Random; import java.util.concurrent.CompletableFuture; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.hadoop.hdds.security.x509.CertificateTestUtils.createSelfSignedCert; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.ArgumentMatchers.any; @@ -225,8 +200,8 @@ private Table trInfoTable() private SCMCertificateClient setupCertificateClientForMTLS( OzoneConfiguration conf ) throws Exception { - KeyPair serviceKeys = aKeyPair(conf); - KeyPair clientKeys = aKeyPair(conf); + KeyPair serviceKeys = CertificateTestUtils.aKeyPair(conf); + KeyPair clientKeys = CertificateTestUtils.aKeyPair(conf); serviceCert = createSelfSignedCert(serviceKeys, "service"); clientCert = createSelfSignedCert(clientKeys, "client"); @@ -287,11 +262,6 @@ private X509KeyManager aKeyManagerWith(KeyPair keyPair, return keyManager; } - private KeyPair aKeyPair(ConfigurationSource conf) - throws NoSuchProviderException, NoSuchAlgorithmException { - return new HDDSKeyGenerator(new SecurityConfig(conf)).generateKey(); - } - private OzoneConfiguration setupConfiguration(int port) { OzoneConfiguration conf = new OzoneConfiguration(); conf.setInt(ScmConfigKeys.OZONE_SCM_GRPC_PORT_KEY, port); @@ -301,60 +271,4 @@ private OzoneConfiguration setupConfiguration(int port) { } - private static final String HASH_ALGO = "SHA256WithRSA"; - - private X509Certificate createSelfSignedCert(KeyPair keys, String commonName) - throws Exception { - final Instant now = Instant.now(); - final Date notBefore = Date.from(now); - final Date notAfter = Date.from(now.plus(Duration.ofDays(1))); - final ContentSigner contentSigner = - new JcaContentSignerBuilder(HASH_ALGO).build(keys.getPrivate()); - final X500Name x500Name = new X500Name("CN=" + commonName); - - SubjectKeyIdentifier keyId = subjectKeyIdOf(keys); - AuthorityKeyIdentifier authorityKeyId = authorityKeyIdOf(keys); - BasicConstraints constraints = new BasicConstraints(true); - - final X509v3CertificateBuilder certificateBuilder = - new JcaX509v3CertificateBuilder( - x500Name, - BigInteger.valueOf(keys.getPublic().hashCode()), - notBefore, - notAfter, - x500Name, - keys.getPublic() - ); - certificateBuilder - .addExtension(Extension.subjectKeyIdentifier, false, keyId) - .addExtension(Extension.authorityKeyIdentifier, false, authorityKeyId) - .addExtension(Extension.basicConstraints, true, constraints); - - return new JcaX509CertificateConverter() - .setProvider(new BouncyCastleProvider()) - .getCertificate(certificateBuilder.build(contentSigner)); - } - - private SubjectKeyIdentifier subjectKeyIdOf(KeyPair keys) throws Exception { - return extensionUtil().createSubjectKeyIdentifier(pubKeyInfo(keys)); - } - - private AuthorityKeyIdentifier authorityKeyIdOf(KeyPair keys) - throws Exception { - return extensionUtil().createAuthorityKeyIdentifier(pubKeyInfo(keys)); - } - - private SubjectPublicKeyInfo pubKeyInfo(KeyPair keys) { - return SubjectPublicKeyInfo.getInstance(keys.getPublic().getEncoded()); - } - - private X509ExtensionUtils extensionUtil() - throws OperatorCreationException { - DigestCalculator digest = - new BcDigestCalculatorProvider() - .get(new AlgorithmIdentifier(OIWObjectIdentifiers.idSHA1)); - - return new X509ExtensionUtils(digest); - } - } diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/root-ca-rotation.yaml b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/root-ca-rotation.yaml index b85785435542..0ca8d10c2408 100644 --- a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/root-ca-rotation.yaml +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/root-ca-rotation.yaml @@ -25,6 +25,7 @@ x-root-cert-rotation-config: - OZONE-SITE.XML_hdds.x509.renew.grace.duration=PT45S - OZONE-SITE.XML_hdds.x509.ca.rotation.check.interval=PT1S - OZONE-SITE.XML_hdds.x509.ca.rotation.ack.timeout=PT20S + - OZONE-SITE.XML_hdds.x509.rootca.certificate.polling.interval=PT10s - OZONE-SITE.XML_hdds.block.token.expiry.time=15s - OZONE-SITE.XML_ozone.manager.delegation.token.max-lifetime=15s - OZONE-SITE.XML_ozone.manager.delegation.token.renew-interval=15s diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-root-ca-rotation.sh b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-root-ca-rotation.sh index a823719ceaf6..2a851b2cb771 100755 --- a/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-root-ca-rotation.sh +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure-ha/test-root-ca-rotation.sh @@ -45,6 +45,7 @@ wait_for_execute_command scm1.org 240 "ozone admin cert info 2" # transfer leader to scm2.org execute_robot_test scm1.org scmha/scm-leader-transfer.robot wait_for_execute_command scm1.org 30 "jps | grep StorageContainerManagerStarter | sed 's/StorageContainerManagerStarter//' | xargs | xargs -I {} jstack {} | grep 'RootCARotationManager-Inactive'" +execute_robot_test scm1.org -v PREFIX:"rootca" certrotation/root-ca-rotation-client-checks.robot # verify om operations execute_commands_in_container scm1.org "ozone sh volume create /r-v1 && ozone sh bucket create /r-v1/r-b1" @@ -65,6 +66,11 @@ wait_for_execute_command scm4.org 30 "ozone admin cert list --role=scm | grep sc # wait for next root CA rotation wait_for_execute_command scm4.org 240 "ozone admin cert info 4" +wait_for_execute_command om1 30 "find /data/metadata/om/certs/ROOTCA-4.crt" +wait_for_execute_command om2 30 "find /data/metadata/om/certs/ROOTCA-4.crt" +wait_for_execute_command om3 30 "find /data/metadata/om/certs/ROOTCA-4.crt" +execute_robot_test scm4.org -v PREFIX:"rootca2" certrotation/root-ca-rotation-client-checks.robot + #transfer leader to scm4.org execute_robot_test scm4.org -v "TARGET_SCM:scm4.org" scmha/scm-leader-transfer.robot diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/test-root-ca-rotation.sh b/hadoop-ozone/dist/src/main/compose/ozonesecure/test-root-ca-rotation.sh index 9858d41eb8d4..2f28c8736713 100755 --- a/hadoop-ozone/dist/src/main/compose/ozonesecure/test-root-ca-rotation.sh +++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/test-root-ca-rotation.sh @@ -41,13 +41,20 @@ wait_for_execute_command scm 30 "jps | grep StorageContainerManagerStarter | se # wait and verify root CA is rotated wait_for_execute_command scm 180 "ozone admin cert info 2" wait_for_execute_command datanode 30 "find /data/metadata/dn/certs/ROOTCA-2.crt" +# We need to wait here for the new certificate in OM as well, because it might +# get to the OM later, and the client will not trust the DataNode with the new +# certificate and will not refetch the CA certs as that will be implemented in +# HDDS-8958. +wait_for_execute_command om 30 "find /data/metadata/om/certs/ROOTCA-2.crt" +execute_robot_test scm -v PREFIX:"rootca" certrotation/root-ca-rotation-client-checks.robot # verify om operations and data operations execute_commands_in_container scm "ozone sh volume create /r-v1 && ozone sh bucket create /r-v1/r-b1" # wait for second root CA rotation wait_for_execute_command scm 180 "ozone admin cert info 3" - +wait_for_execute_command om 30 "find /data/metadata/om/certs/ROOTCA-3.crt" +execute_robot_test scm -v PREFIX:"rootca2" certrotation/root-ca-rotation-client-checks.robot # check the metrics execute_robot_test scm scmha/root-ca-rotation.robot diff --git a/hadoop-ozone/dist/src/main/smoketest/certrotation/root-ca-rotation-client-checks.robot b/hadoop-ozone/dist/src/main/smoketest/certrotation/root-ca-rotation-client-checks.robot new file mode 100644 index 000000000000..8529b338e008 --- /dev/null +++ b/hadoop-ozone/dist/src/main/smoketest/certrotation/root-ca-rotation-client-checks.robot @@ -0,0 +1,46 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +*** Settings *** +Documentation Generate data +Library OperatingSystem +Library BuiltIn +Resource ../commonlib.robot +Test Timeout 5 minutes + +*** Variables *** +${PREFIX} rootca + +*** Test Cases *** +Create a volume and bucket + [Tags] create-volume-and-bucket + ${output} = Execute ozone sh volume create ${PREFIX}-volume + Should not contain ${output} Failed + ${output} = Execute ozone sh bucket create /${PREFIX}-volume/${PREFIX}-bucket + Should not contain ${output} Failed + +Create key + Execute and checkrc echo "${PREFIX}: key created using Ozone Shell" > /tmp/sourcekey 0 + ${output} = Execute ozone sh key put /${PREFIX}-volume/${PREFIX}-bucket/${PREFIX}-key /tmp/sourcekey + Should not contain ${output} Failed + Execute and checkrc rm /tmp/sourcekey 0 + +Read data from previously created key + ${random} = Generate Random String 5 [NUMBERS] + ${output} = Execute ozone sh key get /${PREFIX}-volume/${PREFIX}-bucket/${PREFIX}-key /tmp/key-${random} + Should not contain ${output} Failed + ${output} = Execute and checkrc cat /tmp/key-${random} 0 + Should contain ${output} ${PREFIX}: key created using Ozone Shell + Execute and checkrc rm /tmp/key-${random} 0 \ No newline at end of file diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java index 100f4400bc26..129ae506a9b7 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java @@ -923,6 +923,9 @@ public void testCertificateRotation() throws Exception { omStorage.forceInitialize(); CertificateCodec certCodec = new CertificateCodec(securityConfig, "om"); certCodec.writeCertificate(certHolder); + String caCertFileName = CAType.ROOT.getFileNamePrefix() + + certHolder.getSerialNumber().toString() + ".crt"; + certCodec.writeCertificate(certHolder, caCertFileName); // first renewed cert X509CertificateHolder newCertHolder = diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java index 3c0ffbdb7b61..67d5a41df162 100644 --- a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/OzoneManager.java @@ -110,7 +110,6 @@ import org.apache.hadoop.hdds.security.symmetric.DefaultSecretKeySignerClient; import org.apache.hadoop.hdds.security.token.OzoneBlockTokenSecretManager; import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient; -import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateSignRequest; import org.apache.hadoop.hdds.server.ServiceRuntimeInfoImpl; import org.apache.hadoop.hdds.server.http.RatisDropwizardExports; @@ -346,14 +345,13 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl private OzoneBlockTokenSecretManager blockTokenMgr; private CertificateClient certClient; private SecretKeySignerClient secretKeyClient; - private String caCertPem = null; - private List caCertPemList = new ArrayList<>(); private final Text omRpcAddressTxt; private OzoneConfiguration configuration; private RPC.Server omRpcServer; private GrpcOzoneManagerServer omS3gGrpcServer; private final InetSocketAddress omRpcAddress; private final String omId; + private ServiceInfoProvider serviceInfo; private OMMetadataManager metadataManager; private OMMultiTenantManager multiTenantManager; @@ -642,6 +640,9 @@ private OzoneManager(OzoneConfiguration conf, StartupOption startupOption) HddsServerUtil.getSecretKeyClientForOm(conf); secretKeyClient = new DefaultSecretKeySignerClient(secretKeyProtocol); } + serviceInfo = new ServiceInfoProvider(secConfig, this, certClient, + testSecureOmFlag); + if (secConfig.isBlockTokenEnabled()) { blockTokenMgr = createBlockTokenSecretManager(); } @@ -1114,6 +1115,7 @@ public void setCertClient(CertificateClient newClient) throws IOException { certClient.close(); } certClient = newClient; + serviceInfo = new ServiceInfoProvider(secConfig, this, certClient); } /** @@ -1636,13 +1638,6 @@ public void start() throws IOException { versionManager.getMetadataLayoutVersion(), layoutVersionInDB); } - // Perform this to make it work with old clients. - if (certClient != null) { - caCertPem = - CertificateCodec.getPEMEncodedString(certClient.getCACertificate()); - caCertPemList = HAUtils.buildCAList(certClient, configuration); - } - // Set metrics and start metrics back ground thread metrics.setNumVolumes(metadataManager.countRowsInTable(metadataManager .getVolumeTable())); @@ -3091,7 +3086,7 @@ public List getServiceList() throws IOException { @Override public ServiceInfoEx getServiceInfo() throws IOException { - return new ServiceInfoEx(getServiceList(), caCertPem, caCertPemList); + return serviceInfo.provide(); } @Override diff --git a/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ServiceInfoProvider.java b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ServiceInfoProvider.java new file mode 100644 index 000000000000..d2f81353325b --- /dev/null +++ b/hadoop-ozone/ozone-manager/src/main/java/org/apache/hadoop/ozone/om/ServiceInfoProvider.java @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.hadoop.ozone.om; + +import org.apache.hadoop.hdds.security.SecurityConfig; +import org.apache.hadoop.hdds.security.exception.SCMSecurityException; +import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient; +import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; +import org.apache.hadoop.ozone.om.helpers.ServiceInfoEx; +import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol; +import org.slf4j.Logger; + +import java.io.IOException; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyList; +import static java.util.Comparator.comparing; +import static org.slf4j.LoggerFactory.getLogger; + +/** + * A helper class for Ozone Manager, to take on the responsibility of caching + * the actual CA certificates in PEM format, and handle the update of these + * cached values, so that OM can provide this information to clients that needs + * to trust the DataNode certificates in a secure environment. + */ +final class ServiceInfoProvider { + + private static final Logger LOG = getLogger(ServiceInfoProvider.class); + + private final OzoneManagerProtocol om; + private final CertificateClient certClient; + + private String caCertPEM; + private List caCertPEMList; + + /** + * Initializes the provider. + * The OzoneManagerProtocol implementation is used to provide the service + * list part of the ServiceInfoEx object this class provides for OM, while + * the SecurityConfig and the CertificateClient is used to provide security + * and trust related information within the same object. + * + * @param config the current security configuration + * @param om the OzoneManagerProtocol provides the service list + * @param certClient the CertificateClient provides certificate information + */ + ServiceInfoProvider(SecurityConfig config, OzoneManagerProtocol om, + CertificateClient certClient) { + this(config, om, certClient, false); + } + + /** + * Initializes the provider. + * The OzoneManagerProtocol implementation is used to provide the service + * list part of the ServiceInfoEx object this class provides for OM, while + * the SecurityConfig and the CertificateClient is used to provide security + * and trust related information within the same object. + * + * In some cases OM is initializing this class before a properly set up + * CertificateClient is given to the OM for tests. In this case the + * initialization code would fail, so this is handled in OM when a new + * CertificateClient is set the provider is re-created, but for this to work, + * the initial initialization is disabled when we are in a test process. + * + * @param config the current security configuration + * @param om the OzoneManagerProtocol provides the service list + * @param certClient the CertificateClient provides certificate information + * @param skipInitializationForTesting if we are testing OM in secure env this + * might need to be true + */ + ServiceInfoProvider(SecurityConfig config, OzoneManagerProtocol om, + CertificateClient certClient, boolean skipInitializationForTesting) { + this.om = om; + if (config.isSecurityEnabled() && !skipInitializationForTesting) { + this.certClient = certClient; + Set certs = getCACertificates(); + caCertPEM = toPEMEncodedString(newestOf(certs)); + caCertPEMList = toPEMEncodedStrings(certs); + this.certClient.registerRootCARotationListener(onRootCAChange()); + } else { + this.certClient = null; + caCertPEM = null; + caCertPEMList = emptyList(); + } + } + + private Function, CompletableFuture> + onRootCAChange() { + return certs -> { + CompletableFuture returnedFuture = new CompletableFuture<>(); + try { + synchronized (this) { + caCertPEM = toPEMEncodedString(newestOf(certs)); + caCertPEMList = toPEMEncodedStrings(certs); + } + returnedFuture.complete(null); + } catch (Exception e) { + LOG.error("Unable to refresh cached PEM formatted CA certificates.", e); + returnedFuture.completeExceptionally(e); + } + return returnedFuture; + }; + } + + public ServiceInfoEx provide() throws IOException { + String returnedCaCertPEM; + List returnedCaCertPEMList; + synchronized (this) { + returnedCaCertPEM = caCertPEM; + returnedCaCertPEMList = new ArrayList<>(caCertPEMList); + } + return new ServiceInfoEx( + om.getServiceList(), returnedCaCertPEM, returnedCaCertPEMList); + } + + private Set getCACertificates() { + Set rootCerts = certClient.getAllRootCaCerts(); + return !rootCerts.isEmpty() ? rootCerts : certClient.getAllCaCerts(); + } + + private X509Certificate newestOf(Collection certs) { + return certs.stream() + .max(comparing(X509Certificate::getNotAfter)) + .orElse(null); + } + + private String toPEMEncodedString(X509Certificate cert) { + try { + return cert == null ? null : CertificateCodec.getPEMEncodedString(cert); + } catch (SCMSecurityException e) { + throw new RuntimeException(e); + } + } + + private List toPEMEncodedStrings(Collection certs) { + return certs.stream() + .map(this::toPEMEncodedString) + .collect(Collectors.toList()); + } +} diff --git a/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestServiceInfoProvider.java b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestServiceInfoProvider.java new file mode 100644 index 000000000000..2fa69aff1567 --- /dev/null +++ b/hadoop-ozone/ozone-manager/src/test/java/org/apache/hadoop/ozone/om/TestServiceInfoProvider.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.hadoop.ozone.om; + +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.security.SecurityConfig; +import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient; +import org.apache.hadoop.ozone.om.helpers.ServiceInfoEx; +import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; + +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; + +import static java.util.Collections.emptyList; +import static org.apache.hadoop.hdds.security.x509.CertificateTestUtils.aKeyPair; +import static org.apache.hadoop.hdds.security.x509.CertificateTestUtils.createSelfSignedCert; +import static org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec.getPEMEncodedString; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Tests to check functionality of how we provide the ServiceInfoEx object + * from OM to clients. + */ +public class TestServiceInfoProvider { + + private OzoneConfiguration conf; + private OzoneManagerProtocol om; + + @BeforeEach + public void setup() throws Exception { + conf = new OzoneConfiguration(); + + om = mock(OzoneManagerProtocol.class); + when(om.getServiceList()).thenReturn(emptyList()); + } + + /** + * Tests for unsecure environment. + */ + @Nested + public class UnsecureEnvironment { + + private ServiceInfoProvider provider; + + @BeforeEach + public void setup() throws Exception { + conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, false); + provider = new ServiceInfoProvider(new SecurityConfig(conf), om, null); + } + + @Test + public void test() throws Exception { + ServiceInfoEx info = provider.provide(); + + assertThat(info.getServiceInfoList(), sameInstance(emptyList())); + assertThat(info.getCaCertificate(), is(nullValue())); + assertThat(info.getCaCertPemList(), is(empty())); + } + } + + /** + * Tests for secure environment. + */ + @Nested + public class TestSecureEnvironment { + + private CertificateClient certClient; + private X509Certificate cert1; + private String pem1; + private X509Certificate cert2; + private String pem2; + private ServiceInfoProvider provider; + + @BeforeEach + public void setup() throws Exception { + conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, true); + certClient = mock(CertificateClient.class); + cert1 = createSelfSignedCert(aKeyPair(conf), "1st", Duration.ofDays(1)); + pem1 = getPEMEncodedString(cert1); + cert2 = createSelfSignedCert(aKeyPair(conf), "2nd", Duration.ofDays(2)); + pem2 = getPEMEncodedString(cert2); + when(certClient.getAllRootCaCerts()) + .thenReturn(new HashSet<>(Arrays.asList(cert1, cert2))); + provider = + new ServiceInfoProvider(new SecurityConfig(conf), om, certClient); + } + + @Test + public void withoutRootCARenew() throws Exception { + ServiceInfoEx info = provider.provide(); + + assertThat(info.getServiceInfoList(), sameInstance(emptyList())); + assertThat(info.getCaCertificate(), is(equalTo(pem2))); + assertThat(info.getCaCertPemList(), containsInAnyOrder(pem1, pem2)); + + info = provider.provide(); + + assertThat(info.getServiceInfoList(), sameInstance(emptyList())); + assertThat(info.getCaCertificate(), is(equalTo(pem2))); + assertThat(info.getCaCertPemList(), containsInAnyOrder(pem1, pem2)); + } + + @Test + public void withRootCARenew() throws Exception { + ServiceInfoEx info = provider.provide(); + + assertThat(info.getServiceInfoList(), sameInstance(emptyList())); + assertThat(info.getCaCertificate(), is(equalTo(pem2))); + assertThat(info.getCaCertPemList(), containsInAnyOrder(pem1, pem2)); + + X509Certificate cert3 = + createSelfSignedCert(aKeyPair(conf), "cn", Duration.ofDays(3)); + String pem3 = getPEMEncodedString(cert3); + List certs = Arrays.asList(cert2, cert3); + ArgumentCaptor, CompletableFuture>> + captor = ArgumentCaptor.forClass(Function.class); + verify(certClient).registerRootCARotationListener(captor.capture()); + captor.getValue().apply(certs).join(); + + info = provider.provide(); + + assertThat(info.getServiceInfoList(), sameInstance(emptyList())); + assertThat(info.getCaCertificate(), is(equalTo(pem3))); + assertThat(info.getCaCertPemList(), containsInAnyOrder(pem2, pem3)); + } + } +}