Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions dev-support/pmd/pmd-ruleset.xml
Original file line number Diff line number Diff line change
Expand Up @@ -42,5 +42,12 @@
<rule ref="category/java/performance.xml/StringInstantiation"/>
<rule ref="category/java/performance.xml/UseStringBufferLength"/>

<rule ref="category/java/codestyle.xml/FieldDeclarationsShouldBeAtStartOfClass">
<properties>
<property name="ignoreAnonymousClassDeclarations" value="false" />
<property name="ignoreEnumDeclarations" value="false" />
</properties>
</rule>

<exclude-pattern>.*/generated-sources/.*</exclude-pattern>
</ruleset>
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@
*/
public final class NativeConstants {

public static final String ROCKS_TOOLS_NATIVE_LIBRARY_NAME = "ozone_rocksdb_tools";
public static final String ROCKS_TOOLS_NATIVE_PROPERTY = "rocks_tools_native";

private NativeConstants() {

}

public static final String ROCKS_TOOLS_NATIVE_LIBRARY_NAME = "ozone_rocksdb_tools";
public static final String ROCKS_TOOLS_NATIVE_PROPERTY = "rocks_tools_native";
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,6 @@ public final class CompactionLogEntry implements
CompactionLogEntry::getProtobuf,
CompactionLogEntry.class);

public static Codec<CompactionLogEntry> getCodec() {
return CODEC;
}

private final long dbSequenceNumber;
private final long compactionTime;
private final List<CompactionFileInfo> inputFileInfoList;
Expand All @@ -62,6 +58,10 @@ public CompactionLogEntry(long dbSequenceNumber,
this.compactionReason = compactionReason;
}

public static Codec<CompactionLogEntry> getCodec() {
return CODEC;
}

public List<CompactionFileInfo> getInputFileInfoList() {
return inputFileInfoList;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,26 +38,6 @@
*/
public class PrintableGraph {

/**
* Enum to print different type of node's name in the graph image.
*/
public enum GraphType {
/**
* To use SST file name as node name.
*/
FILE_NAME,

/**
* To use SST file name and total key in the file as node name.
*/
KEY_SIZE,

/**
* To use SST file name and cumulative key as node name.
*/
CUMULATIVE_SIZE
}

private final Graph<String, Edge> graph;

public PrintableGraph(MutableGraph<CompactionNode> guavaGraph,
Expand Down Expand Up @@ -119,4 +99,24 @@ private String getVertex(CompactionNode node, GraphType graphType) {
return node.getFileName();
}
}

/**
* Enum to print different type of node's name in the graph image.
*/
public enum GraphType {
/**
* To use SST file name as node name.
*/
FILE_NAME,

/**
* To use SST file name and total key in the file as node name.
*/
KEY_SIZE,

/**
* To use SST file name and cumulative key as node name.
*/
CUMULATIVE_SIZE
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,35 @@ public class RocksDBCheckpointDiffer implements AutoCloseable,
public static final Set<String> COLUMN_FAMILIES_TO_TRACK_IN_DAG =
ImmutableSet.of("keyTable", "directoryTable", "fileTable");

// Hash table to track CompactionNode for a given SST File.
private final ConcurrentHashMap<String, CompactionNode> compactionNodeMap =
new ConcurrentHashMap<>();

// We are maintaining a two way DAG. This allows easy traversal from
// source snapshot to destination snapshot as well as the other direction.

private final MutableGraph<CompactionNode> forwardCompactionDAG =
GraphBuilder.directed().build();

private final MutableGraph<CompactionNode> backwardCompactionDAG =
GraphBuilder.directed().build();

public static final Integer DEBUG_DAG_BUILD_UP = 2;
public static final Integer DEBUG_DAG_TRAVERSAL = 3;
public static final Integer DEBUG_DAG_LIVE_NODES = 4;
public static final Integer DEBUG_READ_ALL_DB_KEYS = 5;
private static final HashSet<Integer> DEBUG_LEVEL = new HashSet<>();

static {
addDebugLevel(DEBUG_DAG_BUILD_UP);
addDebugLevel(DEBUG_DAG_TRAVERSAL);
addDebugLevel(DEBUG_DAG_LIVE_NODES);
}

static {
RocksDB.loadLibrary();
}

/**
* This is a package private constructor and should not be used other than
* testing. Caller should use RocksDBCheckpointDifferHolder#getInstance() to
Expand Down Expand Up @@ -309,35 +338,6 @@ public void close() {
}
}

// Hash table to track CompactionNode for a given SST File.
private final ConcurrentHashMap<String, CompactionNode> compactionNodeMap =
new ConcurrentHashMap<>();

// We are maintaining a two way DAG. This allows easy traversal from
// source snapshot to destination snapshot as well as the other direction.

private final MutableGraph<CompactionNode> forwardCompactionDAG =
GraphBuilder.directed().build();

private final MutableGraph<CompactionNode> backwardCompactionDAG =
GraphBuilder.directed().build();

public static final Integer DEBUG_DAG_BUILD_UP = 2;
public static final Integer DEBUG_DAG_TRAVERSAL = 3;
public static final Integer DEBUG_DAG_LIVE_NODES = 4;
public static final Integer DEBUG_READ_ALL_DB_KEYS = 5;
private static final HashSet<Integer> DEBUG_LEVEL = new HashSet<>();

static {
addDebugLevel(DEBUG_DAG_BUILD_UP);
addDebugLevel(DEBUG_DAG_TRAVERSAL);
addDebugLevel(DEBUG_DAG_LIVE_NODES);
}

static {
RocksDB.loadLibrary();
}

public static void addDebugLevel(Integer level) {
DEBUG_LEVEL.add(level);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,6 @@ public class ListOptions {

@CommandLine.ArgGroup(exclusive = true)
private ExclusiveLimit exclusiveLimit = new ExclusiveLimit();

static class ExclusiveLimit {
@CommandLine.Option(names = {"--length", "-l"},
description = "Maximum number of items to list",
defaultValue = "100",
showDefaultValue = CommandLine.Help.Visibility.ALWAYS)
private int limit;

@CommandLine.Option(names = {"--all", "-a"},
description = "List all results",
defaultValue = "false")
private boolean all;
}

@CommandLine.Option(names = {"--start", "-s"},
description = "The item to start the listing from.\n" +
Expand Down Expand Up @@ -72,4 +59,17 @@ public String getStartItem() {
public String getPrefix() {
return prefix;
}

static class ExclusiveLimit {
@CommandLine.Option(names = {"--length", "-l"},
description = "Maximum number of items to list",
defaultValue = "100",
showDefaultValue = CommandLine.Help.Visibility.ALWAYS)
private int limit;

@CommandLine.Option(names = {"--all", "-a"},
description = "List all results",
defaultValue = "false")
private boolean all;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -57,28 +57,6 @@ public class CreateBucketHandler extends BucketHandler {
" user if not specified")
private String ownerName;

private static class LayoutConverter implements CommandLine.ITypeConverter<BucketLayout> {
@Override
public BucketLayout convert(String value) {
if (value == null) {
return null;
}
switch (value) {
case "fso":
return BucketLayout.FILE_SYSTEM_OPTIMIZED;
case "obs":
return BucketLayout.OBJECT_STORE;
default:
for (BucketLayout candidate : BucketLayout.values()) {
if (candidate.name().equalsIgnoreCase(value)) {
return candidate;
}
}
throw new IllegalArgumentException("Unknown bucket layout: " + value);
}
}
}

@Option(names = { "--layout", "-l" }, converter = LayoutConverter.class,
description = "Allowed Bucket Layouts: fso (for file system optimized buckets FILE_SYSTEM_OPTIMIZED), " +
"obs (for object store optimized OBJECT_STORE) and legacy (LEGACY is Deprecated)")
Expand Down Expand Up @@ -149,4 +127,26 @@ public void execute(OzoneClient client, OzoneAddress address)
printObjectAsJson(bucket);
}
}

private static class LayoutConverter implements CommandLine.ITypeConverter<BucketLayout> {
@Override
public BucketLayout convert(String value) {
if (value == null) {
return null;
}
switch (value) {
case "fso":
return BucketLayout.FILE_SYSTEM_OPTIMIZED;
case "obs":
return BucketLayout.OBJECT_STORE;
default:
for (BucketLayout candidate : BucketLayout.values()) {
if (candidate.name().equalsIgnoreCase(value)) {
return candidate;
}
}
throw new IllegalArgumentException("Unknown bucket layout: " + value);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,13 @@ public final class OzoneClientFactory {
private static final Logger LOG = LoggerFactory.getLogger(
OzoneClientFactory.class);

private static final LeakDetector OZONE_CLIENT_LEAK_DETECTOR = new LeakDetector("OzoneClientObject");

/**
* Private constructor, class is not meant to be initialized.
*/
private OzoneClientFactory() { }

private static final LeakDetector OZONE_CLIENT_LEAK_DETECTOR =
new LeakDetector("OzoneClientObject");

public static UncheckedAutoCloseable track(AutoCloseable object) {
final Class<?> clazz = object.getClass();
final StackTraceElement[] stackTrace = HddsUtils.getStackTrace(LOG);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,14 @@ public class ReplicatedBlockChecksumComputer extends
private static final Logger LOG =
LoggerFactory.getLogger(ReplicatedBlockChecksumComputer.class);

private final List<ContainerProtos.ChunkInfo> chunkInfoList;

static MD5Hash digest(ByteBuffer data) {
final MessageDigest digester = MD5Hash.getDigester();
digester.update(data);
return new MD5Hash(digester.digest());
}

private final List<ContainerProtos.ChunkInfo> chunkInfoList;

public ReplicatedBlockChecksumComputer(
List<ContainerProtos.ChunkInfo> chunkInfoList) {
this.chunkInfoList = chunkInfoList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@
*/
public final class ECKeyOutputStream extends KeyOutputStream
implements KeyMetadataAware {

private static final Logger LOG = LoggerFactory.getLogger(KeyOutputStream.class);

private OzoneClientConfig config;
private ECChunkBuffers ecChunkBufferCache;
private final BlockingQueue<ECChunkBuffers> ecStripeQueue;
Expand All @@ -75,14 +78,6 @@ public final class ECKeyOutputStream extends KeyOutputStream
*/
private boolean atomicKeyCreation;

private enum StripeWriteStatus {
SUCCESS,
FAILED
}

private static final Logger LOG =
LoggerFactory.getLogger(KeyOutputStream.class);

private volatile boolean closed;
private volatile boolean closing;
// how much of data is actually written yet to underlying stream
Expand Down Expand Up @@ -730,4 +725,9 @@ private void releaseBuffers(ByteBuffer[] buffers) {
}
}
}

private enum StripeWriteStatus {
SUCCESS,
FAILED
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,10 @@
public class KeyDataStreamOutput extends AbstractDataStreamOutput
implements KeyMetadataAware {

private OzoneClientConfig config;

/**
* Defines stream action while calling handleFlushOrClose.
*/
enum StreamAction {
FLUSH, HSYNC, CLOSE, FULL
}

private static final Logger LOG =
LoggerFactory.getLogger(KeyDataStreamOutput.class);

private OzoneClientConfig config;
private boolean closed;

// how much of data is actually written yet to underlying stream
Expand Down Expand Up @@ -560,4 +552,11 @@ private void checkNotClosed() throws IOException {
+ blockDataStreamOutputEntryPool.getKeyName());
}
}

/**
* Defines stream action while calling handleFlushOrClose.
*/
enum StreamAction {
FLUSH, HSYNC, CLOSE, FULL
}
}
Loading