Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ private CheckAndMutate(byte[] row, byte[] family, byte[] qualifier,final Compare
this.op = op;
this.value = value;
this.filter = null;
this.timeRange = timeRange;
this.timeRange = timeRange != null ? timeRange : TimeRange.allTime();
this.action = action;
}

Expand All @@ -227,7 +227,7 @@ private CheckAndMutate(byte[] row, Filter filter, TimeRange timeRange, Row actio
this.op = null;
this.value = null;
this.filter = filter;
this.timeRange = timeRange;
this.timeRange = timeRange != null ? timeRange : TimeRange.allTime();
this.action = action;
}

Expand Down Expand Up @@ -266,6 +266,13 @@ public Filter getFilter() {
return filter;
}

/**
* @return whether this has a filter or not
*/
public boolean hasFilter() {
return filter != null;
}

/**
* @return the time range to check
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.ExtendedCellBuilder;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
Expand All @@ -65,6 +66,7 @@
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.CheckAndMutate;
import org.apache.hadoop.hbase.client.ClientUtil;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
Expand All @@ -84,6 +86,7 @@
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.RegionStatesCount;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.SlowLogParams;
import org.apache.hadoop.hbase.client.SnapshotDescription;
Expand Down Expand Up @@ -3543,4 +3546,73 @@ public static RSGroupProtos.RSGroupInfo toProtoGroupInfo(RSGroupInfo pojo) {
return RSGroupProtos.RSGroupInfo.newBuilder().setName(pojo.getName()).addAllServers(hostports)
.addAllTables(tables).addAllConfiguration(configuration).build();
}

public static CheckAndMutate toCheckAndMutate(ClientProtos.Condition condition,
MutationProto mutation, CellScanner cellScanner) throws IOException {
byte[] row = condition.getRow().toByteArray();
CheckAndMutate.Builder builder = CheckAndMutate.newBuilder(row);
Filter filter = condition.hasFilter() ? ProtobufUtil.toFilter(condition.getFilter()) : null;
if (filter != null) {
builder.ifMatches(filter);
} else {
builder.ifMatches(condition.getFamily().toByteArray(),
condition.getQualifier().toByteArray(),
CompareOperator.valueOf(condition.getCompareType().name()),
ProtobufUtil.toComparator(condition.getComparator()).getValue());
}
TimeRange timeRange = condition.hasTimeRange() ?
ProtobufUtil.toTimeRange(condition.getTimeRange()) : TimeRange.allTime();
builder.timeRange(timeRange);

try {
MutationType type = mutation.getMutateType();
switch (type) {
case PUT:
return builder.build(ProtobufUtil.toPut(mutation, cellScanner));
case DELETE:
return builder.build(ProtobufUtil.toDelete(mutation, cellScanner));
default:
throw new DoNotRetryIOException("Unsupported mutate type: " + type.name());
}
} catch (IllegalArgumentException e) {
throw new DoNotRetryIOException(e.getMessage());
}
}

public static CheckAndMutate toCheckAndMutate(ClientProtos.Condition condition,
List<Mutation> mutations) throws IOException {
assert mutations.size() > 0;
byte[] row = condition.getRow().toByteArray();
CheckAndMutate.Builder builder = CheckAndMutate.newBuilder(row);
Filter filter = condition.hasFilter() ? ProtobufUtil.toFilter(condition.getFilter()) : null;
if (filter != null) {
builder.ifMatches(filter);
} else {
builder.ifMatches(condition.getFamily().toByteArray(),
condition.getQualifier().toByteArray(),
CompareOperator.valueOf(condition.getCompareType().name()),
ProtobufUtil.toComparator(condition.getComparator()).getValue());
}
TimeRange timeRange = condition.hasTimeRange() ?
ProtobufUtil.toTimeRange(condition.getTimeRange()) : TimeRange.allTime();
builder.timeRange(timeRange);

try {
if (mutations.size() == 1) {
Mutation m = mutations.get(0);
if (m instanceof Put) {
return builder.build((Put) m);
} else if (m instanceof Delete) {
return builder.build((Delete) m);
} else {
throw new DoNotRetryIOException("Unsupported mutate type: " + mutations.get(0)
.getClass().getSimpleName().toUpperCase());
}
} else {
return builder.build(new RowMutations(mutations.get(0).getRow()).add(mutations));
}
} catch (IllegalArgumentException e) {
throw new DoNotRetryIOException(e.getMessage());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,12 @@ public interface MetricsRegionServerSource extends BaseSource, JvmPauseMonitorSo
*/
void updateCheckAndPut(long t);

/**
* Update checkAndMutate histogram
* @param t time it took
*/
void updateCheckAndMutate(long t);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After this change, do we still have checkAndPut and checkAndDelete?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes. This change keeps the existing metrics for checkAndPut and checkAndDelete for backward compatibility. This change doesn't break the existing tests for the metrics for checkAndPut and checkAndDelete.


/**
* Update the Get time histogram .
*
Expand Down Expand Up @@ -399,6 +405,7 @@ public interface MetricsRegionServerSource extends BaseSource, JvmPauseMonitorSo
String DELETE_KEY = "delete";
String CHECK_AND_DELETE_KEY = "checkAndDelete";
String CHECK_AND_PUT_KEY = "checkAndPut";
String CHECK_AND_MUTATE_KEY = "checkAndMutate";
String DELETE_BATCH_KEY = "deleteBatch";
String GET_SIZE_KEY = "getSize";
String GET_KEY = "get";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ public class MetricsRegionServerSourceImpl
private final MetricHistogram deleteBatchHisto;
private final MetricHistogram checkAndDeleteHisto;
private final MetricHistogram checkAndPutHisto;
private final MetricHistogram checkAndMutateHisto;
private final MetricHistogram getHisto;
private final MetricHistogram incrementHisto;
private final MetricHistogram appendHisto;
Expand Down Expand Up @@ -112,6 +113,7 @@ public MetricsRegionServerSourceImpl(String metricsName,
deleteBatchHisto = getMetricsRegistry().newTimeHistogram(DELETE_BATCH_KEY);
checkAndDeleteHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_DELETE_KEY);
checkAndPutHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_PUT_KEY);
checkAndMutateHisto = getMetricsRegistry().newTimeHistogram(CHECK_AND_MUTATE_KEY);

getHisto = getMetricsRegistry().newTimeHistogram(GET_KEY);
slowGet = getMetricsRegistry().newCounter(SLOW_GET_KEY, SLOW_GET_DESC, 0L);
Expand Down Expand Up @@ -618,6 +620,11 @@ public void updateCheckAndPut(long t) {
checkAndPutHisto.add(t);
}

@Override
public void updateCheckAndMutate(long t) {
checkAndMutateHisto.add(t);
}

@Override
public void updatePutBatch(long t) {
putBatchHisto.add(t);
Expand Down
Loading