Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-26326 CreateTableProcedure fails when FileBasedStoreFileTracker… #3721

Merged
merged 18 commits into from
Oct 13, 2021
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableState;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.master.MasterFileSystem;
Expand Down Expand Up @@ -290,9 +289,8 @@ private void preCreate(final MasterProcedureEnv env)
(newRegions != null ? newRegions.size() : 0));
}

TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
StoreFileTrackerFactory.persistTrackerConfig(env.getMasterConfiguration(), builder);
tableDescriptor = builder.build();
tableDescriptor = StoreFileTrackerFactory.updateWithTrackerConfigs(env.getMasterConfiguration(),
tableDescriptor);

final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
if (cpHost != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,14 @@ class FileBasedStoreFileTracker extends StoreFileTrackerBase {

public FileBasedStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
backedFile = new StoreFileListFile(ctx);
//CreateTableProcedure needs to instantiate the configured SFT impl, in order to update table
//descriptors with the SFT impl specific configs. By the time this happens, the table has no
//regions nor stores yet, so it can't create a proper StoreContext.
if (ctx != null) {
backedFile = new StoreFileListFile(ctx);
} else {
backedFile = null;
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.regionserver.StoreContext;
Expand Down Expand Up @@ -88,17 +89,6 @@ void set(List<StoreFileInfo> files) {
"Should not call this method on " + getClass().getSimpleName());
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
super.persistConfiguration(builder);
if (StringUtils.isEmpty(builder.getValue(SRC_IMPL))) {
builder.setValue(SRC_IMPL, src.getTrackerName());
}
if (StringUtils.isEmpty(builder.getValue(DST_IMPL))) {
builder.setValue(DST_IMPL, dst.getTrackerName());
}
}

static Class<? extends StoreFileTracker> getSrcTrackerClass(Configuration conf) {
return StoreFileTrackerFactory.getStoreFileTrackerClassForMigration(conf, SRC_IMPL);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.util.Collection;
import java.util.List;

import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
Expand Down Expand Up @@ -75,13 +76,13 @@ void replace(Collection<StoreFileInfo> compactedFiles, Collection<StoreFileInfo>
StoreFileWriter createWriter(CreateStoreFileWriterParams params) throws IOException;

/**
* Saves StoreFileTracker implementations specific configurations into the table descriptors.
* Adds StoreFileTracker implementations specific configurations into the table descriptor.
* <p/>
* This is used to avoid accidentally data loss when changing the cluster level store file tracker
* implementation, and also possible misconfiguration between master and region servers.
* <p/>
* See HBASE-26246 for more details.
* @param builder The table descriptor builder for the given table.
*/
void persistConfiguration(TableDescriptorBuilder builder);
TableDescriptor updateWithTrackerConfigs(TableDescriptorBuilder builder);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe buildWithTrackerConfigs if you're returning the TableDescriptor instead of the TableDescriptorBuilder. IMO, i'd just return the Builder back and let the caller build() when they're ready.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IMO, i'd just return the Builder back and let the caller build() when they're ready.

Yeah, I guess that would be better. WDYT, @Apache9 ?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just return void is enough. The upper layer could build it after calling this method.

}
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,14 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
Expand Down Expand Up @@ -83,10 +83,9 @@ public final void replace(Collection<StoreFileInfo> compactedFiles,
}

@Override
public void persistConfiguration(TableDescriptorBuilder builder) {
if (StringUtils.isEmpty(builder.getValue(TRACKER_IMPL))) {
builder.setValue(TRACKER_IMPL, getTrackerName());
}
public TableDescriptor updateWithTrackerConfigs(TableDescriptorBuilder builder) {
builder.setValue(TRACKER_IMPL, getTrackerName());
return builder.build();
}

protected final String getTrackerName() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,10 @@
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.procedure2.util.StringUtils;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
import org.apache.hadoop.hbase.regionserver.StoreContext;

import org.apache.hadoop.hbase.regionserver.StoreUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
Expand Down Expand Up @@ -158,12 +160,18 @@ static StoreFileTrackerBase createForMigration(Configuration conf, String config
return ReflectionUtils.newInstance(tracker, conf, isPrimaryReplica, ctx);
}

public static void persistTrackerConfig(Configuration conf, TableDescriptorBuilder builder) {
TableDescriptor tableDescriptor = builder.build();
ColumnFamilyDescriptor cfDesc = tableDescriptor.getColumnFamilies()[0];
StoreContext context = StoreContext.getBuilder().withColumnFamilyDescriptor(cfDesc).build();
StoreFileTracker tracker = StoreFileTrackerFactory.create(conf, true, context);
tracker.persistConfiguration(builder);
public static TableDescriptor updateWithTrackerConfigs(Configuration conf,
TableDescriptor descriptor) {
//CreateTableProcedure needs to instantiate the configured SFT impl, in order to update table
//descriptors with the SFT impl specific configs. By the time this happens, the table has no
//regions nor stores yet, so it can't create a proper StoreContext.
if (StringUtils.isEmpty(descriptor.getValue(TRACKER_IMPL))) {
StoreFileTracker tracker =
StoreFileTrackerFactory.create(conf, true, null);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(descriptor);
return tracker.updateWithTrackerConfigs(builder);
}
return descriptor;
}

// should not use MigrationStoreFileTracker for new family
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory;
import org.apache.hadoop.hbase.regionserver.storefiletracker.TestStoreFileTracker;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
Expand Down Expand Up @@ -105,6 +106,21 @@ public void testCreateWithTrackImpl() throws Exception {
assertEquals(trackerName, htd.getValue(TRACKER_IMPL));
}

@Test
public void testCreateWithFileBasedStoreTrackerImpl() throws Exception {
ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
procExec.getEnvironment().getMasterConfiguration().set(StoreFileTrackerFactory.TRACKER_IMPL,
StoreFileTrackerFactory.Trackers.FILE.name());
joshelser marked this conversation as resolved.
Show resolved Hide resolved
final TableName tableName = TableName.valueOf(name.getMethodName());
TableDescriptor htd = MasterProcedureTestingUtility.createHTD(tableName, F1);
RegionInfo[] regions = ModifyRegionUtils.createRegionInfos(htd, null);
long procId = ProcedureTestingUtility.submitAndWait(procExec,
new CreateTableProcedure(procExec.getEnvironment(), htd, regions));
ProcedureTestingUtility.assertProcNotFailed(procExec.getResult(procId));
htd = getMaster().getTableDescriptors().get(tableName);
assertEquals(StoreFileTrackerFactory.Trackers.FILE.name(), htd.getValue(TRACKER_IMPL));
}

@Test
public void testCreateWithoutColumnFamily() throws Exception {
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class TestStoreFileTracker extends DefaultStoreFileTracker {

public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
if (ctx.getRegionFileSystem() != null) {
if (ctx != null && ctx.getRegionFileSystem() != null) {
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
Expand Down