Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.apache.hadoop.hdds.upgrade.HDDSLayoutFeature.WEBUI_PORTS_IN_DATANODEDETAILS;
import static org.apache.hadoop.hdds.upgrade.HDDSLayoutFeature.RATIS_DATASTREAM_PORT_IN_DATANODEDETAILS;
import static org.apache.hadoop.ozone.ClientVersion.VERSION_HANDLES_UNKNOWN_DN_PORTS;

Expand Down Expand Up @@ -805,7 +806,11 @@ public static final class Port {
public enum Name {
STANDALONE, RATIS, REST, REPLICATION, RATIS_ADMIN, RATIS_SERVER,
@BelongsToHDDSLayoutVersion(RATIS_DATASTREAM_PORT_IN_DATANODEDETAILS)
RATIS_DATASTREAM;
RATIS_DATASTREAM,
@BelongsToHDDSLayoutVersion(WEBUI_PORTS_IN_DATANODEDETAILS)
HTTP,
@BelongsToHDDSLayoutVersion(WEBUI_PORTS_IN_DATANODEDETAILS)
HTTPS;

public static final Set<Name> ALL_PORTS = ImmutableSet.copyOf(
Name.values());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,9 @@ public enum HDDSLayoutFeature implements LayoutFeature {
DATANODE_SCHEMA_V3(4, "Datanode RocksDB Schema Version 3 (one rocksdb " +
"per disk)"),
RATIS_DATASTREAM_PORT_IN_DATANODEDETAILS(5, "Adding the RATIS_DATASTREAM " +
"port to the DatanodeDetails.");
"port to the DatanodeDetails."),
WEBUI_PORTS_IN_DATANODEDETAILS(6, "Adding HTTP and HTTPS ports " +
"to DatanodeDetails.");

////////////////////////////// //////////////////////////////

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import org.apache.hadoop.hdds.security.x509.certificate.client.CertificateClient;
import org.apache.hadoop.hdds.security.x509.certificate.client.DNCertificateClient;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateSignRequest;
import org.apache.hadoop.hdds.server.http.HttpConfig;
import org.apache.hadoop.hdds.server.http.RatisDropwizardExports;
import org.apache.hadoop.hdds.tracing.TracingUtil;
import org.apache.hadoop.hdds.utils.HddsServerUtil;
Expand All @@ -68,6 +69,8 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;

import static org.apache.hadoop.hdds.protocol.DatanodeDetails.Port.Name.HTTP;
import static org.apache.hadoop.hdds.protocol.DatanodeDetails.Port.Name.HTTPS;
import static org.apache.hadoop.ozone.OzoneConfigKeys.HDDS_DATANODE_PLUGINS_KEY;
import static org.apache.hadoop.ozone.conf.OzoneServiceConfig.DEFAULT_SHUTDOWN_HOOK_PRIORITY;
import static org.apache.hadoop.ozone.common.Storage.StorageState.INITIALIZED;
Expand Down Expand Up @@ -296,6 +299,15 @@ public void start() {
try {
httpServer = new HddsDatanodeHttpServer(conf);
httpServer.start();
HttpConfig.Policy policy = HttpConfig.getHttpPolicy(conf);
if (policy.isHttpEnabled()) {
datanodeDetails.setPort(DatanodeDetails.newPort(HTTP,
httpServer.getHttpAddress().getPort()));
}
if (policy.isHttpsEnabled()) {
datanodeDetails.setPort(DatanodeDetails.newPort(HTTPS,
httpServer.getHttpsAddress().getPort()));
}
} catch (Exception ex) {
LOG.error("HttpServer failed to start.", ex);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@
import java.io.IOException;
import java.util.UUID;

import static org.junit.Assert.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;

/**
* Tests for {@link DatanodeIdYaml}.
Expand Down Expand Up @@ -96,4 +97,48 @@ void testWriteReadAfterRatisDatastreamPortLayoutVersion(@TempDir File dir)
read.getPort(DatanodeDetails.Port.Name.RATIS_DATASTREAM));
}

@Test
void testWriteReadBeforeWebUIPortLayoutVersion(@TempDir File dir)
throws IOException {
DatanodeDetails original = MockDatanodeDetails.randomDatanodeDetails();
File file = new File(dir, "datanode.yaml");
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, dir.toString());
DatanodeLayoutStorage layoutStorage = new DatanodeLayoutStorage(conf,
UUID.randomUUID().toString(),
HDDSLayoutFeature.DATANODE_SCHEMA_V3.layoutVersion());
layoutStorage.initialize();

DatanodeIdYaml.createDatanodeIdFile(original, file, conf);
DatanodeDetails read = DatanodeIdYaml.readDatanodeIdFile(file);

assertNotNull(original.getPort(DatanodeDetails.Port.Name.HTTP));
assertNotNull(original.getPort(DatanodeDetails.Port.Name.HTTPS));
assertNull(read.getPort(DatanodeDetails.Port.Name.HTTP));
assertNull(read.getPort(DatanodeDetails.Port.Name.HTTPS));
}

@Test
void testWriteReadAfterWebUIPortLayoutVersion(@TempDir File dir)
throws IOException {
DatanodeDetails original = MockDatanodeDetails.randomDatanodeDetails();
File file = new File(dir, "datanode.yaml");
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HddsConfigKeys.OZONE_METADATA_DIRS, dir.toString());
DatanodeLayoutStorage layoutStorage = new DatanodeLayoutStorage(conf,
UUID.randomUUID().toString(),
HDDSLayoutFeature.WEBUI_PORTS_IN_DATANODEDETAILS.layoutVersion());
layoutStorage.initialize();

DatanodeIdYaml.createDatanodeIdFile(original, file, conf);
DatanodeDetails read = DatanodeIdYaml.readDatanodeIdFile(file);

assertNotNull(original.getPort(DatanodeDetails.Port.Name.HTTP));
assertNotNull(original.getPort(DatanodeDetails.Port.Name.HTTPS));
assertEquals(original.getPort(DatanodeDetails.Port.Name.HTTP),
read.getPort(DatanodeDetails.Port.Name.HTTP));
assertEquals(original.getPort(DatanodeDetails.Port.Name.HTTPS),
read.getPort(DatanodeDetails.Port.Name.HTTPS));
}

}