Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -856,10 +856,24 @@ void unassign(byte[] regionName, boolean force)
* the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged.
*
* @return <code>true</code> if region normalizer ran, <code>false</code> otherwise.
* @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs
*/
boolean normalize() throws IOException;
default boolean normalize() throws IOException {
return normalize(new NormalizeTableFilterParams.Builder().build());
}

/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* This is a non-blocking invocation to region normalizer. If return value is true, it means
* the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged.
*
* @param ntfp limit to tables matching the specified filter.
* @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs
*/
boolean normalize(NormalizeTableFilterParams ntfp) throws IOException;

/**
* Query the current state of the region normalizer.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -395,8 +395,8 @@ public CacheEvictionStats clearBlockCache(TableName tableName) throws IOExceptio
}

@Override
public boolean normalize() throws IOException {
return get(admin.normalize());
public boolean normalize(NormalizeTableFilterParams ntfp) throws IOException {
return get(admin.normalize(ntfp));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -1279,7 +1279,17 @@ default CompletableFuture<Boolean> balance() {
* @return true if region normalizer ran, false otherwise. The return value will be wrapped by a
* {@link CompletableFuture}
*/
CompletableFuture<Boolean> normalize();
default CompletableFuture<Boolean> normalize() {
return normalize(new NormalizeTableFilterParams.Builder().build());
}

/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* @param ntfp limit to tables matching the specified filter.
* @return true if region normalizer ran, false otherwise. The return value will be wrapped by a
* {@link CompletableFuture}
*/
CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp);

/**
* Turn the cleaner chore on/off.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -709,8 +709,8 @@ public CompletableFuture<Boolean> isNormalizerEnabled() {
}

@Override
public CompletableFuture<Boolean> normalize() {
return wrap(rawAdmin.normalize());
public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return wrap(rawAdmin.normalize(ntfp));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;

import java.util.List;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;

/**
* A collection of criteria used for table selection. The logic of table selection is as follows:
* <ul>
* <li>
* When no parameter values are provided, an unfiltered list of all user tables is returned.
* </li>
* <li>
* When a list of {@link TableName TableNames} are provided, the filter starts with any of
* these tables that exist.
* </li>
* <li>
* When a {@code namespace} name is provided, the filter starts with all the tables present in
* that namespace.
* </li>
* <li>
* If both a list of {@link TableName TableNames} and a {@code namespace} name are provided,
* the {@link TableName} list is honored and the {@code namespace} name is ignored.
* </li>
* <li>
* If a {@code regex} is provided, this subset of {@link TableName TableNames} is further
* reduced to those that match the provided regular expression.
* </li>
* </ul>
*/
@InterfaceAudience.Public
public final class NormalizeTableFilterParams {
private final List<TableName> tableNames;
private final String regex;
private final String namespace;

private NormalizeTableFilterParams(final List<TableName> tableNames, final String regex,
final String namespace) {
this.tableNames = tableNames;
this.regex = regex;
this.namespace = namespace;
}

public List<TableName> getTableNames() {
return tableNames;
}

public String getRegex() {
return regex;
}

public String getNamespace() {
return namespace;
}

/**
* Used to instantiate an instance of {@link NormalizeTableFilterParams}.
*/
public static class Builder {
private List<TableName> tableNames;
private String regex;
private String namespace;

public Builder tableFilterParams(final NormalizeTableFilterParams ntfp) {
this.tableNames = ntfp.getTableNames();
this.regex = ntfp.getRegex();
this.namespace = ntfp.getNamespace();
return this;
}

public Builder tableNames(final List<TableName> tableNames) {
this.tableNames = tableNames;
return this;
}

public Builder regex(final String regex) {
this.regex = regex;
return this;
}

public Builder namespace(final String namespace) {
this.namespace = namespace;
return this;
}

public NormalizeTableFilterParams build() {
return new NormalizeTableFilterParams(tableNames, regex, namespace);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3286,14 +3286,18 @@ public CompletableFuture<Boolean> isNormalizerEnabled() {
}

@Override
public CompletableFuture<Boolean> normalize() {
public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return normalize(RequestConverter.buildNormalizeRequest(ntfp));
}

private CompletableFuture<Boolean> normalize(NormalizeRequest request) {
return this
.<Boolean> newMasterCaller()
.action(
(controller, stub) -> this.<NormalizeRequest, NormalizeResponse, Boolean> call(
controller, stub, RequestConverter.buildNormalizeRequest(),
(s, c, req, done) -> s.normalize(c, req, done), (resp) -> resp.getNormalizerRan()))
.call();
.<Boolean> newMasterCaller()
.action(
(controller, stub) -> this.call(
controller, stub, request, MasterService.Interface::normalize,
NormalizeResponse::getNormalizerRan))
.call();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2293,6 +2293,13 @@ public static HBaseProtos.TableName toProtoTableName(TableName tableName) {
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
}

public static List<HBaseProtos.TableName> toProtoTableNameList(List<TableName> tableNameList) {
if (tableNameList == null) {
return new ArrayList<>();
}
return tableNameList.stream().map(ProtobufUtil::toProtoTableName).collect(Collectors.toList());
}

public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
if (tableNamesList == null) {
return new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
Expand Down Expand Up @@ -46,6 +46,7 @@
import org.apache.hadoop.hbase.client.LogQueryFilter;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec;
import org.apache.hadoop.hbase.client.RegionInfo;
Expand Down Expand Up @@ -1476,8 +1477,18 @@ public static RegionOpenInfo buildRegionOpenInfo(RegionInfo region, List<ServerN
*
* @return a NormalizeRequest
*/
public static NormalizeRequest buildNormalizeRequest() {
return NormalizeRequest.newBuilder().build();
public static NormalizeRequest buildNormalizeRequest(NormalizeTableFilterParams ntfp) {
final NormalizeRequest.Builder builder = NormalizeRequest.newBuilder();
if (ntfp.getTableNames() != null) {
builder.addAllTableNames(ProtobufUtil.toProtoTableNameList(ntfp.getTableNames()));
}
if (ntfp.getRegex() != null) {
builder.setRegex(ntfp.getRegex());
}
if (ntfp.getNamespace() != null) {
builder.setNamespace(ntfp.getNamespace());
}
return builder.build();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -354,6 +354,9 @@ message IsSplitOrMergeEnabledResponse {
}

message NormalizeRequest {
repeated TableName table_names = 1;
optional string regex = 2;
optional string namespace = 3;
}

message NormalizeResponse {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS;
import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK;
import static org.apache.hadoop.hbase.util.DNS.MASTER_HOSTNAME_KEY;

import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.Constructor;
Expand All @@ -38,6 +37,7 @@
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -80,9 +80,9 @@
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.RegionStatesCount;
Expand Down Expand Up @@ -226,14 +226,13 @@
import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
import org.apache.hbase.thirdparty.com.google.protobuf.Descriptors;
import org.apache.hbase.thirdparty.com.google.protobuf.Service;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;

import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
Expand Down Expand Up @@ -1902,14 +1901,18 @@ public RegionNormalizer getRegionNormalizer() {
return this.normalizer;
}

public boolean normalizeRegions() throws IOException {
return normalizeRegions(new NormalizeTableFilterParams.Builder().build());
}

/**
* Perform normalization of cluster (invoked by {@link RegionNormalizerChore}).
* Perform normalization of cluster.
*
* @return true if an existing normalization was already in progress, or if a new normalization
* was performed successfully; false otherwise (specifically, if HMaster finished initializing
* or normalization is globally disabled).
*/
public boolean normalizeRegions() throws IOException {
public boolean normalizeRegions(final NormalizeTableFilterParams ntfp) throws IOException {
final long startTime = EnvironmentEdgeManager.currentTime();
if (regionNormalizerTracker == null || !regionNormalizerTracker.isNormalizerOn()) {
LOG.debug("Region normalization is disabled, don't run region normalizer.");
Expand All @@ -1930,12 +1933,19 @@ public boolean normalizeRegions() throws IOException {

int affectedTables = 0;
try {
final List<TableName> allEnabledTables =
new ArrayList<>(tableStateManager.getTablesInStates(TableState.State.ENABLED));
Collections.shuffle(allEnabledTables);
final Set<TableName> matchingTables = getTableDescriptors(new LinkedList<>(),
ntfp.getNamespace(), ntfp.getRegex(), ntfp.getTableNames(), false)
.stream()
.map(TableDescriptor::getTableName)
.collect(Collectors.toSet());
final Set<TableName> allEnabledTables =
tableStateManager.getTablesInStates(TableState.State.ENABLED);
final List<TableName> targetTables =
new ArrayList<>(Sets.intersection(matchingTables, allEnabledTables));
Collections.shuffle(targetTables);

final List<Long> submittedPlanProcIds = new ArrayList<>();
for (TableName table : allEnabledTables) {
for (TableName table : targetTables) {
if (table.isSystemTable()) {
continue;
}
Expand Down Expand Up @@ -3399,9 +3409,9 @@ public List<TableName> listTableNames(final String namespace, final String regex
}

/**
* @return list of table table descriptors after filtering by regex and whether to include system
* tables, etc.
* @throws IOException
* Return a list of table table descriptors after applying any provided filter parameters. Note
* that the user-facing description of this filter logic is presented on the class-level javadoc
* of {@link NormalizeTableFilterParams}.
*/
private List<TableDescriptor> getTableDescriptors(final List<TableDescriptor> htds,
final String namespace, final String regex, final List<TableName> tableNameList,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
Expand Down Expand Up @@ -1920,7 +1921,14 @@ public NormalizeResponse normalize(RpcController controller,
NormalizeRequest request) throws ServiceException {
rpcPreCheck("normalize");
try {
return NormalizeResponse.newBuilder().setNormalizerRan(master.normalizeRegions()).build();
final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.tableNames(ProtobufUtil.toTableNameList(request.getTableNamesList()))
.regex(request.hasRegex() ? request.getRegex() : null)
.namespace(request.hasNamespace() ? request.getNamespace() : null)
.build();
return NormalizeResponse.newBuilder()
.setNormalizerRan(master.normalizeRegions(ntfp))
.build();
} catch (IOException ex) {
throw new ServiceException(ex);
}
Expand Down
Loading