Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,8 @@ public interface DBStoreHAManager {
default Table<String, TransactionInfo> getTransactionInfoTable() {
return null;
}

default Table<Long, FlushedTransactionInfo> getFlushedTransactionsTable() {
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hdds.utils;

import java.util.Objects;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.utils.db.Codec;
import org.apache.hadoop.hdds.utils.db.DelegatedCodec;
import org.apache.hadoop.hdds.utils.db.Proto2Codec;
import org.apache.ratis.server.protocol.TermIndex;

/**
* Represents information about a flushed transaction, including its term and transaction index.
* This class is a lightweight container used to track specific transaction metadata and provides
* methods for serialization and deserialization using a codec.
*/
public class FlushedTransactionInfo {

private static final Codec<FlushedTransactionInfo> CODEC = new DelegatedCodec<>(
Proto2Codec.get(HddsProtos.FlushedTransactionInfo.getDefaultInstance()),
FlushedTransactionInfo::getFromProtobuf,
FlushedTransactionInfo::getProtobuf,
FlushedTransactionInfo.class);

private final long term;
private final long transactionIndex;

FlushedTransactionInfo(TermIndex termIndex) {
this.transactionIndex = termIndex.getIndex();
this.term = termIndex.getTerm();
}

public static FlushedTransactionInfo valueOf(long currentTerm, long transactionIndex) {
return valueOf(TermIndex.valueOf(currentTerm, transactionIndex));
}

public static FlushedTransactionInfo valueOf(TermIndex termIndex) {
return new FlushedTransactionInfo(termIndex);
}

public static Codec<FlushedTransactionInfo> getCodec() {
return CODEC;
}

public long getTerm() {
return term;
}

public long getTransactionIndex() {
return transactionIndex;
}

public static FlushedTransactionInfo getFromProtobuf(HddsProtos.FlushedTransactionInfo transactionInfo) {
return new FlushedTransactionInfo(TermIndex.valueOf(transactionInfo.getTermIndex(),
transactionInfo.getTransactionId()));
}

private HddsProtos.FlushedTransactionInfo getProtobuf() {
return HddsProtos.FlushedTransactionInfo.newBuilder().setTermIndex(this.getTerm())
.setTransactionId(this.getTransactionIndex()).build();
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FlushedTransactionInfo that = (FlushedTransactionInfo) o;
return this.getTerm() == that.getTerm() && this.getTransactionIndex() == that.getTransactionIndex();
}

@Override
public int hashCode() {
return Objects.hash(getTerm(), getTransactionIndex());
}

@Override
public String toString() {
return "FlushedTransactionInfo{" +
"term=" + term +
", transactionIndex=" + transactionIndex +
'}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
* <p>
* This class is immutable.
*/
public final class TransactionInfo implements Comparable<TransactionInfo> {
public class TransactionInfo implements Comparable<TransactionInfo> {
private static final Codec<TransactionInfo> CODEC = new DelegatedCodec<>(
StringCodec.get(),
TransactionInfo::valueOf,
Expand Down Expand Up @@ -99,7 +99,7 @@ public static TermIndex getTermIndex(long transactionIndex) {
return TermIndex.valueOf(NON_RATIS_TERM, transactionIndex);
}

private TransactionInfo(TermIndex termIndex) {
TransactionInfo(TermIndex termIndex) {
this.transactionInfoString = termIndex.getTerm() + TRANSACTION_INFO_SPLIT_KEY + termIndex.getIndex();
this.snapshotInfo = new SnapshotInfo() {
@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import org.apache.hadoop.hdds.utils.CollectionUtils;
import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType;
import org.apache.hadoop.hdds.utils.db.managed.ManagedColumnFamilyOptions;
Expand Down Expand Up @@ -80,6 +81,11 @@ public TypedTable<KEY, VALUE> getTable(DBStore db, CacheType cacheType)
return db.getTable(tableName, keyCodec, valueCodec, cacheType);
}

public TypedTable<KEY, VALUE> getTable(DBStore db, CacheType cacheType, Function<KEY, Boolean> keyValidator)
throws RocksDatabaseException, CodecException {
return db.getTable(tableName, keyCodec, valueCodec, cacheType, keyValidator);
}

public String getName() {
return tableName;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import org.apache.hadoop.hdds.annotation.InterfaceStability;
import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.hadoop.hdds.utils.db.cache.TableCache.CacheType;
Expand Down Expand Up @@ -64,6 +65,21 @@ <KEY, VALUE> TypedTable<KEY, VALUE> getTable(
String name, Codec<KEY> keyCodec, Codec<VALUE> valueCodec, TableCache.CacheType cacheType)
throws RocksDatabaseException, CodecException;

/**
* Gets table store with implict key/value conversion.
*
* @param name - table name
* @param keyCodec - key codec
* @param valueCodec - value codec
* @param cacheType - cache type
* @param keyValidatorFunction - function to validate key before put/delete
* @return - Table Store
*/
<KEY, VALUE> TypedTable<KEY, VALUE> getTable(
String name, Codec<KEY> keyCodec, Codec<VALUE> valueCodec, TableCache.CacheType cacheType,
Function<KEY, Boolean> keyValidatorFunction) throws RocksDatabaseException, CodecException;


/**
* Lists the Known list of Tables in a DB.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.apache.hadoop.hdds.conf.ConfigurationSource;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.utils.RocksDBStoreMetrics;
Expand Down Expand Up @@ -280,6 +281,13 @@ public <K, V> TypedTable<K, V> getTable(
return new TypedTable<>(getTable(name), keyCodec, valueCodec, cacheType);
}

@Override
public <K, V> TypedTable<K, V> getTable(
String name, Codec<K> keyCodec, Codec<V> valueCodec, TableCache.CacheType cacheType,
Function<K, Boolean> keyValidator) throws RocksDatabaseException, CodecException {
return new TypedTable<>(getTable(name), keyCodec, valueCodec, cacheType, keyValidator);
}

@Override
public List<Table<?, ?>> listTables() {
final List<Table<?, ?>> returnList = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import org.apache.hadoop.hdds.utils.IOUtils;
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
import org.apache.hadoop.hdds.utils.TableCacheMetrics;
Expand Down Expand Up @@ -66,6 +67,12 @@ public class TypedTable<KEY, VALUE> implements Table<KEY, VALUE> {
private final CodecBuffer.Capacity bufferCapacity
= new CodecBuffer.Capacity(this, BUFFER_SIZE_DEFAULT);
private final TableCache<KEY, VALUE> cache;
private final Function<KEY, Boolean> keyValidatorFunction;

TypedTable(RDBTable rawTable, Codec<KEY> keyCodec, Codec<VALUE> valueCodec, CacheType cacheType)
throws RocksDatabaseException, CodecException {
this(rawTable, keyCodec, valueCodec, cacheType, (k) -> true);
}

/**
* Create an TypedTable from the raw table with specified cache type.
Expand All @@ -74,13 +81,14 @@ public class TypedTable<KEY, VALUE> implements Table<KEY, VALUE> {
* @param keyCodec The key codec.
* @param valueCodec The value codec.
* @param cacheType How to cache the entries?
* @param keyValidatorFunction A function to validate the key before performing a write operation.
*/
TypedTable(RDBTable rawTable, Codec<KEY> keyCodec, Codec<VALUE> valueCodec, CacheType cacheType)
throws RocksDatabaseException, CodecException {
TypedTable(RDBTable rawTable, Codec<KEY> keyCodec, Codec<VALUE> valueCodec, CacheType cacheType,
Function<KEY, Boolean> keyValidatorFunction) throws RocksDatabaseException, CodecException {
this.rawTable = Objects.requireNonNull(rawTable, "rawTable==null");
this.keyCodec = Objects.requireNonNull(keyCodec, "keyCodec == null");
this.valueCodec = Objects.requireNonNull(valueCodec, "valueCodec == null");

this.keyValidatorFunction = keyValidatorFunction;
this.info = getClassSimpleName(getClass()) + "-" + getName() + "(" + getClassSimpleName(keyCodec.getTypeClass())
+ "->" + getClassSimpleName(valueCodec.getTypeClass()) + ")";

Expand Down Expand Up @@ -132,6 +140,7 @@ private VALUE decodeValue(byte[] value) throws CodecException {

@Override
public void put(KEY key, VALUE value) throws RocksDatabaseException, CodecException {
assert this.keyValidatorFunction.apply(key);
if (supportCodecBuffer) {
try (CodecBuffer k = keyCodec.toDirectCodecBuffer(key);
CodecBuffer v = valueCodec.toDirectCodecBuffer(value)) {
Expand All @@ -144,6 +153,7 @@ public void put(KEY key, VALUE value) throws RocksDatabaseException, CodecExcept

@Override
public void putWithBatch(BatchOperation batch, KEY key, VALUE value) throws RocksDatabaseException, CodecException {
assert this.keyValidatorFunction.apply(key);
if (supportCodecBuffer) {
CodecBuffer keyBuffer = null;
CodecBuffer valueBuffer = null;
Expand Down Expand Up @@ -366,6 +376,7 @@ private VALUE getFromTableIfExist(KEY key) throws RocksDatabaseException, CodecE

@Override
public void delete(KEY key) throws RocksDatabaseException, CodecException {
assert this.keyValidatorFunction.apply(key);
if (keyCodec.supportCodecBuffer()) {
try (CodecBuffer buffer = keyCodec.toDirectCodecBuffer(key)) {
rawTable.delete(buffer.asReadOnlyByteBuffer());
Expand All @@ -377,16 +388,19 @@ public void delete(KEY key) throws RocksDatabaseException, CodecException {

@Override
public void deleteWithBatch(BatchOperation batch, KEY key) throws CodecException {
assert this.keyValidatorFunction.apply(key);
rawTable.deleteWithBatch(batch, encodeKey(key));
}

@Override
public void deleteRangeWithBatch(BatchOperation batch, KEY beginKey, KEY endKey) throws CodecException {
assert this.keyValidatorFunction.apply(beginKey) && this.keyValidatorFunction.apply(endKey);
rawTable.deleteRangeWithBatch(batch, encodeKey(beginKey), encodeKey(endKey));
}

@Override
public void deleteRange(KEY beginKey, KEY endKey) throws RocksDatabaseException, CodecException {
assert this.keyValidatorFunction.apply(beginKey) && this.keyValidatorFunction.apply(endKey);
rawTable.deleteRange(encodeKey(beginKey), encodeKey(endKey));
}

Expand Down
5 changes: 5 additions & 0 deletions hadoop-hdds/interface-client/src/main/proto/hdds.proto
Original file line number Diff line number Diff line change
Expand Up @@ -550,3 +550,8 @@ message InnerNode {
optional uint32 numOfLeaves = 2;
repeated ChildrenMap childrenMap = 3;
}

message FlushedTransactionInfo {
required uint64 termIndex = 1;
required uint64 transactionId = 2;
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.utils.FlushedTransactionInfo;
import org.apache.hadoop.hdds.utils.TransactionInfo;
import org.apache.hadoop.hdds.utils.db.ByteStringCodec;
import org.apache.hadoop.hdds.utils.db.DBColumnFamilyDefinition;
Expand Down Expand Up @@ -75,6 +76,13 @@ public class SCMDBDefinition extends DBDefinition.WithMap {
ContainerID.getCodec(),
ContainerInfo.getCodec());

public static final DBColumnFamilyDefinition<Long, FlushedTransactionInfo>
FLUSHEDTRANSACTIONS =
new DBColumnFamilyDefinition<>(
"scmFlushedTransactions",
LongCodec.get(),
FlushedTransactionInfo.getCodec());

public static final DBColumnFamilyDefinition<String, TransactionInfo>
TRANSACTIONINFO =
new DBColumnFamilyDefinition<>(
Expand Down Expand Up @@ -123,6 +131,7 @@ public class SCMDBDefinition extends DBDefinition.WithMap {
PIPELINES,
SEQUENCE_ID,
STATEFUL_SERVICE_CONFIG,
FLUSHEDTRANSACTIONS,
TRANSACTIONINFO,
VALID_CERTS,
VALID_SCM_CERTS);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.CONTAINERS;
import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.DELETED_BLOCKS;
import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.FLUSHEDTRANSACTIONS;
import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.META;
import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.MOVE;
import static org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition.PIPELINES;
Expand All @@ -43,12 +44,14 @@
import org.apache.hadoop.hdds.scm.container.common.helpers.MoveDataNodePair;
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.hadoop.hdds.utils.FlushedTransactionInfo;
import org.apache.hadoop.hdds.utils.HAUtils;
import org.apache.hadoop.hdds.utils.TransactionInfo;
import org.apache.hadoop.hdds.utils.db.BatchOperationHandler;
import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.hadoop.hdds.utils.db.DBStoreBuilder;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.ratis.util.ExitUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -69,6 +72,8 @@ public class SCMMetadataStoreImpl implements SCMMetadataStore {

private Table<PipelineID, Pipeline> pipelineTable;

private Table<Long, FlushedTransactionInfo> flushedTransactionsTable;

private Table<String, TransactionInfo> transactionInfoTable;

private Table<String, Long> sequenceIdTable;
Expand Down Expand Up @@ -142,6 +147,10 @@ public void start(OzoneConfiguration config)

checkAndPopulateTable(containerTable, CONTAINERS.getName());

flushedTransactionsTable = FLUSHEDTRANSACTIONS.getTable(store, TableCache.CacheType.PARTIAL_CACHE,
(tid) -> tid >= 0);
checkAndPopulateTable(flushedTransactionsTable, FLUSHEDTRANSACTIONS.getName());

transactionInfoTable = TRANSACTIONINFO.getTable(store);

checkAndPopulateTable(transactionInfoTable, TRANSACTIONINFO.getName());
Expand Down Expand Up @@ -203,6 +212,11 @@ public Table<String, TransactionInfo> getTransactionInfoTable() {
return transactionInfoTable;
}

@Override
public Table<Long, FlushedTransactionInfo> getFlushedTransactionsTable() {
return flushedTransactionsTable;
}

@Override
public BatchOperationHandler getBatchHandler() {
return this.store;
Expand Down
Loading