Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hbase.master.procedure;


import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
Expand All @@ -33,10 +34,12 @@
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.replication.ReplicationException;
import org.apache.hadoop.hbase.rsgroup.RSGroupInfo;
import org.apache.hadoop.hbase.util.CommonFSUtils;
Expand Down Expand Up @@ -285,6 +288,10 @@ private void preCreate(final MasterProcedureEnv env)
getTableName(), (newRegions != null ? newRegions.size() : 0));
}

TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
StoreFileTrackerFactory.persistTrackerConfig(env.getMasterConfiguration(), builder);
tableDescriptor = builder.build();

final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
if (cpHost != null) {
final RegionInfo[] regions = newRegions == null ? null :
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,7 @@ public Path commitDaughterRegion(final RegionInfo regionInfo, List<Path> allRegi
writeRegionInfoFileContent(conf, fs, regionInfoFile, regionInfoContent);
HRegionFileSystem regionFs = HRegionFileSystem.openRegionFromFileSystem(
env.getMasterConfiguration(), fs, getTableDir(), regionInfo, false);
insertRegionFilesIntoStoreTracker(allRegionFiles, env, regionFs);
insertRegionFilesIntoStoreTracker(allRegionFiles, env, regionFs);
}
return regionDir;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.yetus.audience.InterfaceAudience;
Expand Down Expand Up @@ -85,4 +86,11 @@ void set(List<StoreFileInfo> files) {
throw new UnsupportedOperationException(
"Should not call this method on " + getClass().getSimpleName());
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
super.persistConfiguration(builder);
builder.setValue(SRC_IMPL, src.getClass().getName());
builder.setValue(DST_IMPL, dst.getClass().getName());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
import java.io.IOException;
import java.util.Collection;
import java.util.List;

import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.regionserver.StoreFileWriter;
Expand Down Expand Up @@ -71,4 +73,10 @@ void replace(Collection<StoreFileInfo> compactedFiles, Collection<StoreFileInfo>
* @return Writer for a new StoreFile
*/
StoreFileWriter createWriter(CreateStoreFileWriterParams params) throws IOException;

/**
* Saves StoreFileTracker implementations specific configs into the table descriptors.
* @param builder The table descriptor builder for the given table.
*/
void persistConfiguration(TableDescriptorBuilder builder);
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,22 @@
*/
package org.apache.hadoop.hbase.regionserver.storefiletracker;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;

import java.io.IOException;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
Expand Down Expand Up @@ -78,6 +82,15 @@ public final void replace(Collection<StoreFileInfo> compactedFiles,
}
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
if (StringUtils.isEmpty(builder.getValue(TRACK_IMPL))) {
String trackerImpl = StoreFileTrackerFactory.
getStoreFileTrackerImpl(conf).getName();
builder.setValue(TRACK_IMPL, trackerImpl).build();
}
}

private HFileContext createFileContext(Compression.Algorithm compression,
boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context encryptionContext) {
if (compression == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* http://www.apache.org/licenses/LICENSE-2.0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is unnecessary?

* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand All @@ -22,6 +20,7 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.util.Bytes;
Expand All @@ -35,15 +34,17 @@
/**
* Factory method for creating store file tracker.
*/
@InterfaceAudience.Private
public final class StoreFileTrackerFactory {
@InterfaceAudience.Private public final class StoreFileTrackerFactory {
public static final String TRACK_IMPL = "hbase.store.file-tracker.impl";
private static final Logger LOG = LoggerFactory.getLogger(StoreFileTrackerFactory.class);

public static Class<? extends StoreFileTracker> getStoreFileTrackerImpl(Configuration conf) {
return conf.getClass(TRACK_IMPL, DefaultStoreFileTracker.class, StoreFileTracker.class);
}

public static StoreFileTracker create(Configuration conf, boolean isPrimaryReplica,
StoreContext ctx) {
Class<? extends StoreFileTracker> tracker =
conf.getClass(TRACK_IMPL, DefaultStoreFileTracker.class, StoreFileTracker.class);
Class<? extends StoreFileTracker> tracker = getStoreFileTrackerImpl(conf);
LOG.info("instantiating StoreFileTracker impl {}", tracker.getName());
return ReflectionUtils.newInstance(tracker, conf, isPrimaryReplica, ctx);
}
Expand All @@ -54,7 +55,7 @@ public static StoreFileTracker create(Configuration conf, boolean isPrimaryRepli
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family));
StoreContext ctx = StoreContext.getBuilder().withColumnFamilyDescriptor(fDescBuilder.build())
.withRegionFileSystem(regionFs).build();
return StoreFileTrackerFactory.create(conf, TRACK_IMPL, isPrimaryReplica, ctx);
return StoreFileTrackerFactory.create(conf, isPrimaryReplica, ctx);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good. This is the root cause of the failing UTs.

}

public static Configuration mergeConfigurations(Configuration global, TableDescriptor table,
Expand All @@ -76,4 +77,12 @@ static StoreFileTrackerBase create(Configuration conf, String configName,
LOG.info("instantiating StoreFileTracker impl {} as {}", tracker.getName(), configName);
return ReflectionUtils.newInstance(tracker, conf, isPrimaryReplica, ctx);
}

public static void persistTrackerConfig(Configuration conf, TableDescriptorBuilder builder) {
TableDescriptor tableDescriptor = builder.build();
ColumnFamilyDescriptor cfDesc = tableDescriptor.getColumnFamilies()[0];
StoreContext context = StoreContext.getBuilder().withColumnFamilyDescriptor(cfDesc).build();
StoreFileTracker tracker = StoreFileTrackerFactory.create(conf, true, context);
tracker.persistConfiguration(builder);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.HBaseTestingUtil.countRows;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
Expand All @@ -39,6 +40,7 @@
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -423,6 +425,10 @@ private void testCloneTableSchema(final TableName tableName, final TableName new
assertEquals(BLOCK_SIZE, newTableDesc.getColumnFamily(FAMILY_1).getBlocksize());
assertEquals(BLOCK_CACHE, newTableDesc.getColumnFamily(FAMILY_1).isBlockCacheEnabled());
assertEquals(TTL, newTableDesc.getColumnFamily(FAMILY_1).getTimeToLive());
// HBASE-26246 introduced persist of store file tracker into table descriptor
tableDesc = TableDescriptorBuilder.newBuilder(tableDesc).setValue(TRACK_IMPL,
StoreFileTrackerFactory.getStoreFileTrackerImpl(TEST_UTIL.getConfiguration()).getName()).
build();
TEST_UTIL.verifyTableDescriptorIgnoreTableName(tableDesc, newTableDesc);

if (preserveSplits) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
Expand All @@ -34,6 +35,7 @@
import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -236,6 +238,10 @@ public void testGetTableDescriptor() throws IOException {
ADMIN.createTable(htd);
Table table = TEST_UTIL.getConnection().getTable(htd.getTableName());
TableDescriptor confirmedHtd = table.getDescriptor();
//HBASE-26246 introduced persist of store file tracker into table descriptor
htd = TableDescriptorBuilder.newBuilder(htd).setValue(TRACK_IMPL,
StoreFileTrackerFactory.getStoreFileTrackerImpl(TEST_UTIL.getConfiguration()).getName()).
build();
assertEquals(0, TableDescriptor.COMPARATOR.compare(htd, confirmedHtd));
MetaTableAccessor.fullScanMetaAndPrint(TEST_UTIL.getConnection());
table.close();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
Expand All @@ -34,6 +35,7 @@
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -372,6 +374,10 @@ private void testCloneTableSchema(final TableName tableName,
assertEquals(BLOCK_SIZE, newTableDesc.getColumnFamily(FAMILY_1).getBlocksize());
assertEquals(BLOCK_CACHE, newTableDesc.getColumnFamily(FAMILY_1).isBlockCacheEnabled());
assertEquals(TTL, newTableDesc.getColumnFamily(FAMILY_1).getTimeToLive());
//HBASE-26246 introduced persist of store file tracker into table descriptor
tableDesc = TableDescriptorBuilder.newBuilder(tableDesc).setValue(TRACK_IMPL,
StoreFileTrackerFactory.getStoreFileTrackerImpl(TEST_UTIL.getConfiguration()).getName()).
build();
TEST_UTIL.verifyTableDescriptorIgnoreTableName(tableDesc, newTableDesc);

if (preserveSplits) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.client;

import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
Expand All @@ -35,6 +36,7 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -147,6 +149,10 @@ public void testGetTableDescriptor() throws Exception {
TableDescriptor desc = builder.build();
admin.createTable(desc).join();
TableDescriptor confirmedHtd = admin.getDescriptor(tableName).get();
//HBASE-26246 introduced persist of store file tracker into table descriptor
desc = TableDescriptorBuilder.newBuilder(desc).setValue(TRACK_IMPL,
StoreFileTrackerFactory.getStoreFileTrackerImpl(TEST_UTIL.getConfiguration()).getName()).
build();
assertEquals(0, TableDescriptor.COMPARATOR.compare(desc, confirmedHtd));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hadoop.hbase.master.procedure;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
Expand Down Expand Up @@ -60,6 +61,7 @@
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
Expand Down Expand Up @@ -226,6 +228,11 @@ public static void validateTableCreation(final HMaster master, final TableName t
assertTrue("family not found " + family[i], htd.getColumnFamily(Bytes.toBytes(family[i])) != null);
}
assertEquals(family.length, htd.getColumnFamilyCount());

// checks store file tracker impl has been properly set in htd
String storeFileTrackerImpl =
StoreFileTrackerFactory.getStoreFileTrackerImpl(master.getConfiguration()).getName();
assertEquals(storeFileTrackerImpl, htd.getValue(TRACK_IMPL));
}

public static void validateTableDeletion(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.master.procedure;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACK_IMPL;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
Expand All @@ -38,6 +39,7 @@
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.regionserver.storefiletracker.TestStoreFileTracker;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
Expand Down Expand Up @@ -88,6 +90,21 @@ private void testSimpleCreate(final TableName tableName, byte[][] splitKeys) thr
MasterProcedureTestingUtility.validateTableCreation(getMaster(), tableName, regions, F1, F2);
}

@Test
public void testCreateWithTrackImpl() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, F1);
String trackerName = TestStoreFileTracker.class.getName();
htd = TableDescriptorBuilder.newBuilder(htd).setValue(TRACK_IMPL, trackerName).build();
RegionInfo[] regions = ModifyRegionUtils.createRegionInfos(htd, null);
long procId = ProcedureTestingUtility.submitAndWait(procExec,
new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId));
htd = getMaster().getTableDescriptors().get(tableName);
assertEquals(trackerName, htd.getValue(TRACK_IMPL));
}

@Test
public void testCreateWithoutColumnFamily() throws Exception {
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,15 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -38,15 +40,21 @@ public class TestStoreFileTracker extends DefaultStoreFileTracker {

public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
if (ctx.getRegionFileSystem() != null) {
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
} else {
LOG.info("ctx.getRegionFileSystem() returned null. Leaving storeId null.");
}

}

@Override
protected void doAddNewStoreFiles(Collection<StoreFileInfo> newFiles) throws IOException {
LOG.info("adding to storeId: {}", storeId);
trackedFiles.get(storeId).addAll(newFiles);
trackedFiles.putIfAbsent(storeId, (List<StoreFileInfo>)newFiles);
}

@Override
Expand Down