Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,17 @@
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerNotFoundException;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager;
import org.apache.hadoop.ozone.om.helpers.BucketLayout;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo;
import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup;
import org.apache.hadoop.ozone.recon.api.types.ContainerDiscrepancyInfo;
import org.apache.hadoop.ozone.recon.api.types.ContainerKeyPrefix;
import org.apache.hadoop.ozone.recon.api.types.ContainerMetadata;
import org.apache.hadoop.ozone.recon.api.types.ContainersResponse;
Expand Down Expand Up @@ -92,6 +98,7 @@ public class ContainerEndpoint {
private ReconOMMetadataManager omMetadataManager;

private final ReconContainerManager containerManager;
private final PipelineManager pipelineManager;
private final ContainerHealthSchemaManager containerHealthSchemaManager;
private final ReconNamespaceSummaryManager reconNamespaceSummaryManager;
private final OzoneStorageContainerManager reconSCM;
Expand All @@ -105,6 +112,7 @@ public ContainerEndpoint(OzoneStorageContainerManager reconSCM,
ReconNamespaceSummaryManager reconNamespaceSummaryManager) {
this.containerManager =
(ReconContainerManager) reconSCM.getContainerManager();
this.pipelineManager = reconSCM.getPipelineManager();
this.containerHealthSchemaManager = containerHealthSchemaManager;
this.reconNamespaceSummaryManager = reconNamespaceSummaryManager;
this.reconSCM = reconSCM;
Expand Down Expand Up @@ -481,4 +489,81 @@ private List<ContainerBlockMetadata> getBlocks(
return blockIds;
}

@GET
@Path("/mismatch")
public Response getContainerMisMatchInsights() {
List<ContainerDiscrepancyInfo> containerDiscrepancyInfoList =
new ArrayList<>();
try {
Map<Long, ContainerMetadata> omContainers =
reconContainerMetadataManager.getContainers(-1, -1);
List<Long> scmNonDeletedContainers =
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

scmNonDeletedContainers and scmAllContainers are same, duplicate

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

scmNonDeletedContainers and scmAllContainers are same, duplicate

Its fixed. Pls re-review.

containerManager.getContainers().stream()
.filter(containerInfo -> !(containerInfo.getState() ==
HddsProtos.LifeCycleState.DELETED))
.map(containerInfo -> containerInfo.getContainerID()).collect(
Collectors.toList());

// Filter list of container Ids which are present in OM but not in SCM.
List<Map.Entry<Long, ContainerMetadata>> notSCMContainers =
omContainers.entrySet().stream().filter(containerMetadataEntry ->
!(scmNonDeletedContainers.contains(
containerMetadataEntry.getKey())))
.collect(
Collectors.toList());

notSCMContainers.forEach(nonSCMContainer -> {

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

notSCMContainers List will be empty in normal scenario and only if some thing went wrong will have it populated.
Here we need to check notSCMContainers size is not zero then only do further steps.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

foreach will not execute if list is empty, so no check required.

ContainerDiscrepancyInfo containerDiscrepancyInfo =
new ContainerDiscrepancyInfo();
containerDiscrepancyInfo.setContainerID(nonSCMContainer.getKey());
containerDiscrepancyInfo.setNumberOfKeys(
nonSCMContainer.getValue().getNumberOfKeys());
containerDiscrepancyInfo.setPipelines(nonSCMContainer.getValue()
.getPipelines());
containerDiscrepancyInfo.setExistsAt("OM");
containerDiscrepancyInfoList.add(containerDiscrepancyInfo);
});

// Filter list of container Ids which are present in SCM but not in OM.
List<Long> nonOMContainers = scmNonDeletedContainers.stream()
.filter(containerId -> !omContainers.containsKey(containerId))
.collect(Collectors.toList());

List<Pipeline> pipelines = new ArrayList<>();
nonOMContainers.forEach(nonOMContainerId -> {

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here again need a check if nonOMContainers empty

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

foreach will not execute if list is empty, so no check required.

ContainerDiscrepancyInfo containerDiscrepancyInfo =
new ContainerDiscrepancyInfo();
containerDiscrepancyInfo.setContainerID(nonOMContainerId);
containerDiscrepancyInfo.setNumberOfKeys(0);
PipelineID pipelineID = null;
try {
pipelineID = containerManager.getContainer(
ContainerID.valueOf(nonOMContainerId))
.getPipelineID();

if (null != pipelineID) {
pipelines.add(pipelineManager.getPipeline(pipelineID));
}
} catch (ContainerNotFoundException e) {
LOG.warn("Container {} not found in SCM: {}", nonOMContainerId, e);
} catch (PipelineNotFoundException e) {
LOG.debug("Pipeline not found for container: {} and pipelineId: {}",
nonOMContainerId, pipelineID, e);
}
containerDiscrepancyInfo.setPipelines(pipelines);
containerDiscrepancyInfo.setExistsAt("SCM");
containerDiscrepancyInfoList.add(containerDiscrepancyInfo);
});

} catch (IOException ex) {
throw new WebApplicationException(ex,
Response.Status.INTERNAL_SERVER_ERROR);
} catch (IllegalArgumentException e) {
throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
} catch (Exception ex) {
throw new WebApplicationException(ex,
Response.Status.INTERNAL_SERVER_ERROR);
}
return Response.ok(containerDiscrepancyInfoList).build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.recon.api.types;

import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;

import java.util.List;

/**
* Metadata object that represents a Container Discrepancy Info.
*/
public class ContainerDiscrepancyInfo {

@JsonProperty("containerId")
private long containerID;

@JsonProperty("numberOfKeys")
private long numberOfKeys;

@JsonProperty("pipelines")
private List<Pipeline> pipelines;

@JsonProperty("existsAt")
private String existsAt;

public ContainerDiscrepancyInfo() {

}

public long getContainerID() {
return containerID;
}

public void setContainerID(long containerID) {
this.containerID = containerID;
}

public long getNumberOfKeys() {
return numberOfKeys;
}

public void setNumberOfKeys(long numberOfKeys) {
this.numberOfKeys = numberOfKeys;
}

public List<Pipeline> getPipelines() {
return pipelines;
}

public void setPipelines(
List<Pipeline> pipelines) {
this.pipelines = pipelines;
}

public String getExistsAt() {
return existsAt;
}

public void setExistsAt(String existsAt) {
this.existsAt = existsAt;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,13 @@
*/
package org.apache.hadoop.ozone.recon.api.types;

import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;

import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import java.util.List;

/**
* Metadata object that represents a Container.
Expand All @@ -33,6 +37,9 @@ public class ContainerMetadata {
@XmlElement(name = "NumberOfKeys")
private long numberOfKeys;

@JsonProperty("pipelines")
private List<Pipeline> pipelines;

public ContainerMetadata(long containerID) {
this.containerID = containerID;
}
Expand All @@ -53,4 +60,12 @@ public void setNumberOfKeys(long numberOfKeys) {
this.numberOfKeys = numberOfKeys;
}

public List<Pipeline> getPipelines() {
return pipelines;
}

public void setPipelines(
List<Pipeline> pipelines) {
this.pipelines = pipelines;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,16 @@
import javax.inject.Singleton;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.utils.db.BatchOperation;
import org.apache.hadoop.hdds.utils.db.RDBBatchOperation;
import org.apache.hadoop.ozone.om.helpers.BucketLayout;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.recon.ReconUtils;
import org.apache.hadoop.ozone.recon.api.types.ContainerKeyPrefix;
import org.apache.hadoop.ozone.recon.api.types.ContainerMetadata;
import org.apache.hadoop.ozone.recon.api.types.KeyPrefixContainer;
import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager;
import org.apache.hadoop.ozone.recon.scm.ContainerReplicaHistory;
import org.apache.hadoop.ozone.recon.scm.ContainerReplicaHistoryList;
import org.apache.hadoop.ozone.recon.spi.ReconContainerMetadataManager;
Expand All @@ -54,6 +58,7 @@
import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.hadoop.ozone.recon.schema.tables.daos.GlobalStatsDao;
import org.hadoop.ozone.recon.schema.tables.pojos.GlobalStats;
import org.jetbrains.annotations.NotNull;
import org.jooq.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -80,6 +85,9 @@ public class ReconContainerMetadataManagerImpl
@Inject
private Configuration sqlConfiguration;

@Inject
private ReconOMMetadataManager omMetadataManager;

@Inject
public ReconContainerMetadataManagerImpl(ReconDBProvider reconDBProvider,
Configuration sqlConfiguration) {
Expand Down Expand Up @@ -455,6 +463,8 @@ public Map<Long, ContainerMetadata> getContainers(int limit,
ContainerKeyPrefix containerKeyPrefix = keyValue.getKey();
Long containerID = containerKeyPrefix.getContainerId();
Integer numberOfKeys = keyValue.getValue();
List<Pipeline> pipelines =
getPipelines(containerKeyPrefix);

// break the loop if limit has been reached
// and one more new entity needs to be added to the containers map
Expand All @@ -469,12 +479,34 @@ public Map<Long, ContainerMetadata> getContainers(int limit,
ContainerMetadata containerMetadata = containers.get(containerID);
containerMetadata.setNumberOfKeys(containerMetadata.getNumberOfKeys() +
numberOfKeys);
containerMetadata.setPipelines(pipelines);
containers.put(containerID, containerMetadata);
}
}
return containers;
}

@NotNull
private List<Pipeline> getPipelines(ContainerKeyPrefix containerKeyPrefix)
throws IOException {
OmKeyInfo omKeyInfo = omMetadataManager.getKeyTable(BucketLayout.LEGACY)
.getSkipCache(containerKeyPrefix.getKeyPrefix());
if (null == omKeyInfo) {
omKeyInfo =
omMetadataManager.getKeyTable(BucketLayout.FILE_SYSTEM_OPTIMIZED)
.getSkipCache(containerKeyPrefix.getKeyPrefix());
}
List<Pipeline> pipelines = new ArrayList<>();
if (null != omKeyInfo) {
omKeyInfo.getKeyLocationVersions().stream().map(
omKeyLocationInfoGroup ->
omKeyLocationInfoGroup.getLocationList()
.stream().map(omKeyLocationInfo -> pipelines.add(
omKeyLocationInfo.getPipeline())));
}
return pipelines;
}

@Override
public void deleteContainerMapping(ContainerKeyPrefix containerKeyPrefix)
throws IOException {
Expand Down
Loading