diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java index 9a00fe26a8f0..85a433af81e2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java @@ -24,8 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.stream.Stream; - -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.MetaMutationAnnotation; @@ -612,9 +610,8 @@ private List mergeStoreFiles(MasterProcedureEnv env, HRegionFileSystem reg List mergedFiles = new ArrayList<>(); for (ColumnFamilyDescriptor hcd : htd.getColumnFamilies()) { String family = hcd.getNameAsString(); - Configuration trackerConfig = - StoreFileTrackerFactory.mergeConfigurations(env.getMasterConfiguration(), htd, hcd); - StoreFileTracker tracker = StoreFileTrackerFactory.create(trackerConfig, family, regionFs); + StoreFileTracker tracker = + StoreFileTrackerFactory.create(env.getMasterConfiguration(), htd, hcd, regionFs); final Collection storeFiles = tracker.load(); if (storeFiles != null && storeFiles.size() > 0) { for (StoreFileInfo storeFileInfo : storeFiles) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java index 44136dc7cbee..60ff48985f42 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java @@ -666,9 +666,8 @@ private Pair, List> splitStoreFiles(final MasterProcedureEnv en new HashMap>(htd.getColumnFamilyCount()); for (ColumnFamilyDescriptor cfd : htd.getColumnFamilies()) { String family = cfd.getNameAsString(); - Configuration trackerConfig = StoreFileTrackerFactory. - mergeConfigurations(env.getMasterConfiguration(), htd, htd.getColumnFamily(cfd.getName())); - StoreFileTracker tracker = StoreFileTrackerFactory.create(trackerConfig, family, regionFs); + StoreFileTracker tracker = + StoreFileTrackerFactory.create(env.getMasterConfiguration(), htd, cfd, regionFs); Collection sfis = tracker.load(); if (sfis == null) { continue; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java index db18d4e50fd6..03a7c5c8baaf 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java @@ -595,7 +595,6 @@ void cleanupDaughterRegion(final RegionInfo regionInfo) throws IOException { * to the proper location in the filesystem. * * @param regionInfo daughter {@link org.apache.hadoop.hbase.client.RegionInfo} - * @throws IOException */ public Path commitDaughterRegion(final RegionInfo regionInfo, List allRegionFiles, MasterProcedureEnv env) throws IOException { @@ -622,12 +621,8 @@ private void insertRegionFilesIntoStoreTracker(List allFiles, MasterProced Map> fileInfoMap = new HashMap<>(); for(Path file : allFiles) { String familyName = file.getParent().getName(); - trackerMap.computeIfAbsent(familyName, t -> { - Configuration config = StoreFileTrackerFactory.mergeConfigurations(conf, tblDesc, - tblDesc.getColumnFamily(Bytes.toBytes(familyName))); - return StoreFileTrackerFactory. - create(config, familyName, regionFs); - }); + trackerMap.computeIfAbsent(familyName, t -> StoreFileTrackerFactory.create(conf, tblDesc, + tblDesc.getColumnFamily(Bytes.toBytes(familyName)), regionFs)); fileInfoMap.computeIfAbsent(familyName, l -> new ArrayList<>()); List infos = fileInfoMap.get(familyName); infos.add(new StoreFileInfo(conf, fs, file, true)); @@ -673,7 +668,6 @@ public void createSplitsDir(RegionInfo daughterA, RegionInfo daughterB) throws I * this method is invoked on the Master side, then the RegionSplitPolicy will * NOT have a reference to a Region. * @return Path to created reference. - * @throws IOException */ public Path splitStoreFile(RegionInfo hri, String familyName, HStoreFile f, byte[] splitRow, boolean top, RegionSplitPolicy splitPolicy) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java index 90704fe528a0..b586027f8333 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/storefiletracker/StoreFileTrackerFactory.java @@ -22,13 +22,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.StoreContext; import org.apache.hadoop.hbase.regionserver.StoreUtils; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; @@ -113,16 +111,15 @@ public static StoreFileTracker create(Configuration conf, boolean isPrimaryRepli * Used at master side when splitting/merging regions, as we do not have a Store, thus no * StoreContext at master side. */ - public static StoreFileTracker create(Configuration conf, String family, - HRegionFileSystem regionFs) { - ColumnFamilyDescriptorBuilder fDescBuilder = - ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family)); - StoreContext ctx = StoreContext.getBuilder().withColumnFamilyDescriptor(fDescBuilder.build()) - .withRegionFileSystem(regionFs).build(); - return StoreFileTrackerFactory.create(conf, true, ctx); + public static StoreFileTracker create(Configuration conf, TableDescriptor td, + ColumnFamilyDescriptor cfd, HRegionFileSystem regionFs) { + StoreContext ctx = + StoreContext.getBuilder().withColumnFamilyDescriptor(cfd).withRegionFileSystem(regionFs) + .withFamilyStoreDirectoryPath(regionFs.getStoreDir(cfd.getNameAsString())).build(); + return StoreFileTrackerFactory.create(mergeConfigurations(conf, td, cfd), true, ctx); } - public static Configuration mergeConfigurations(Configuration global, TableDescriptor table, + private static Configuration mergeConfigurations(Configuration global, TableDescriptor table, ColumnFamilyDescriptor family) { return StoreUtils.createStoreConfiguration(global, table, family); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java index 5e82cad6b494..f154aa92cd6e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java @@ -47,7 +47,8 @@ import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStoreFile; import org.apache.hadoop.hbase.regionserver.StoreFileInfo; -import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.Threads; @@ -291,8 +292,8 @@ public void addRegion(final Path tableDir, final RegionInfo regionInfo) throws I addRegion(tableDir, regionInfo, visitor); } - protected void addRegion(final Path tableDir, final RegionInfo regionInfo, RegionVisitor visitor) - throws IOException { + protected void addRegion(Path tableDir, RegionInfo regionInfo, RegionVisitor visitor) + throws IOException { boolean isMobRegion = MobUtils.isMobRegionInfo(regionInfo); try { Path baseDir = tableDir; @@ -300,8 +301,8 @@ protected void addRegion(final Path tableDir, final RegionInfo regionInfo, Regio if (isMobRegion) { baseDir = CommonFSUtils.getTableDir(MobUtils.getMobHome(conf), regionInfo.getTable()); } - HRegionFileSystem regionFs = HRegionFileSystem.openRegionFromFileSystem(conf, rootFs, - baseDir, regionInfo, true); + HRegionFileSystem regionFs = + HRegionFileSystem.openRegionFromFileSystem(conf, rootFs, baseDir, regionInfo, true); monitor.rethrowException(); // 1. dump region meta info into the snapshot directory @@ -317,26 +318,19 @@ protected void addRegion(final Path tableDir, final RegionInfo regionInfo, Regio // in batches and may miss files being added/deleted. This could be more robust (iteratively // checking to see if we have all the files until we are sure), but the limit is currently // 1000 files/batch, far more than the number of store files under a single column family. - Collection familyNames = regionFs.getFamilies(); - if (familyNames != null) { - for (String familyName: familyNames) { - Object familyData = visitor.familyOpen(regionData, Bytes.toBytes(familyName)); - monitor.rethrowException(); - - Collection storeFiles = regionFs.getStoreFiles(familyName); - if (storeFiles == null) { - if (LOG.isDebugEnabled()) { - LOG.debug("No files under family: " + familyName); - } - continue; - } - - // 2.1. build the snapshot reference for the store - // iterate through all the store's files and create "references". - addReferenceFiles(visitor, regionData, familyData, storeFiles, false); - - visitor.familyClose(regionData, familyData); + for (ColumnFamilyDescriptor cfd : htd.getColumnFamilies()) { + Object familyData = visitor.familyOpen(regionData, cfd.getName()); + monitor.rethrowException(); + StoreFileTracker tracker = StoreFileTrackerFactory.create(conf, htd, cfd, regionFs); + List storeFiles = tracker.load(); + if (storeFiles.isEmpty()) { + LOG.debug("No files under family: {}", cfd.getNameAsString()); + continue; } + // 2.1. build the snapshot reference for the store + // iterate through all the store's files and create "references". + addReferenceFiles(visitor, regionData, familyData, storeFiles, false); + visitor.familyClose(regionData, familyData); } visitor.regionClose(regionData); } catch (IOException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java index c2087a935198..e352303f76ec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClientCloneLinksAfterDelete.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner; import org.apache.hadoop.hbase.mob.MobConstants; import org.apache.hadoop.hbase.regionserver.FlushLifeCycleTracker; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.MobSnapshotTestingUtils; import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils; import org.apache.hadoop.hbase.testclassification.ClientTests; @@ -93,7 +94,8 @@ public static void setUpBeforeClass() throws Exception { @Override protected void createTable() throws IOException, InterruptedException { MobSnapshotTestingUtils.createMobTable(TEST_UTIL, tableName, - SnapshotTestingUtils.getSplitKeys(), getNumReplicas(), DelayFlushCoprocessor.class.getName(), + SnapshotTestingUtils.getSplitKeys(), getNumReplicas(), + StoreFileTrackerFactory.Trackers.DEFAULT.name(), DelayFlushCoprocessor.class.getName(), FAMILY); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java index cdc41b01e87d..0695be12d0c4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMobSnapshotFromClient.java @@ -26,8 +26,6 @@ import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.experimental.categories.Category; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Test create/using/deleting snapshots from the client @@ -41,8 +39,6 @@ public class TestMobSnapshotFromClient extends TestSnapshotFromClient { public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestMobSnapshotFromClient.class); - private static final Logger LOG = LoggerFactory.getLogger(TestMobSnapshotFromClient.class); - /** * Setup the config for the cluster * @throws Exception on failure @@ -60,6 +56,7 @@ protected static void setupConf(Configuration conf) { @Override protected void createTable() throws Exception { - MobSnapshotTestingUtils.createMobTable(UTIL, TABLE_NAME, getNumReplicas(), TEST_FAM); + MobSnapshotTestingUtils.createMobTable(UTIL, TABLE_NAME, getNumReplicas(), trackerImpl.name(), + TEST_FAM); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java index 08e33ac78d05..56a48c122a14 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotFromClient.java @@ -23,6 +23,7 @@ import static org.junit.Assert.fail; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; @@ -32,9 +33,11 @@ import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.TableNameTestRule; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.master.snapshot.SnapshotManager; import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.snapshot.SnapshotCreationException; import org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException; import org.apache.hadoop.hbase.snapshot.SnapshotManifestV1; @@ -51,7 +54,10 @@ import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameter; +import org.junit.runners.Parameterized.Parameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -64,7 +70,8 @@ *

* This is an end-to-end test for the snapshot utility */ -@Category({LargeTests.class, ClientTests.class}) +@RunWith(Parameterized.class) +@Category({ LargeTests.class, ClientTests.class }) public class TestSnapshotFromClient { @ClassRule @@ -82,7 +89,16 @@ public class TestSnapshotFromClient { private static final Pattern MATCH_ALL = Pattern.compile(".*"); @Rule - public TestName name = new TestName(); + public TableNameTestRule name = new TableNameTestRule(); + + @Parameter + public StoreFileTrackerFactory.Trackers trackerImpl; + + @Parameters(name = "{index}: tracker={0}") + public static List params() { + return Arrays.asList(new Object[] { StoreFileTrackerFactory.Trackers.DEFAULT }, + new Object[] { StoreFileTrackerFactory.Trackers.FILE }); + } /** * Setup the config for the cluster @@ -109,7 +125,6 @@ protected static void setupConf(Configuration conf) { conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true); conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, ConstantSizeRegionSplitPolicy.class.getName()); - } @Before @@ -119,7 +134,8 @@ public void setup() throws Exception { protected void createTable() throws Exception { TableDescriptor htd = - TableDescriptorBuilder.newBuilder(TABLE_NAME).setRegionReplication(getNumReplicas()).build(); + TableDescriptorBuilder.newBuilder(TABLE_NAME).setRegionReplication(getNumReplicas()) + .setValue(StoreFileTrackerFactory.TRACKER_IMPL, trackerImpl.name()).build(); UTIL.createTable(htd, new byte[][] { TEST_FAM }, null); } @@ -316,7 +332,7 @@ public void testOfflineTableSnapshotWithEmptyRegions() throws Exception { @Test public void testListTableSnapshots() throws Exception { Admin admin = null; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = name.getTableName(); try { admin = UTIL.getAdmin(); @@ -401,7 +417,7 @@ public void testListTableSnapshotsWithRegex() throws Exception { @Test public void testDeleteTableSnapshots() throws Exception { Admin admin = null; - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = name.getTableName(); try { admin = UTIL.getAdmin(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index c7203a928aa7..b4f628b358b5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; @@ -1068,10 +1069,9 @@ private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo h when(mockEnv.getMasterConfiguration()).thenReturn(new Configuration()); TableDescriptors mockTblDescs = mock(TableDescriptors.class); when(mockServices.getTableDescriptors()).thenReturn(mockTblDescs); - TableDescriptor mockTblDesc = mock(TableDescriptor.class); + TableDescriptor mockTblDesc = TableDescriptorBuilder.newBuilder(hri.getTable()) + .setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build(); when(mockTblDescs.get(any())).thenReturn(mockTblDesc); - ColumnFamilyDescriptor mockCfDesc = mock(ColumnFamilyDescriptor.class); - when(mockTblDesc.getColumnFamily(any())).thenReturn(mockCfDesc); Path regionDir = regionFs.commitDaughterRegion(hri, splitFiles, mockEnv); return new Path(new Path(regionDir, family), path.getName()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java index fea4fb4ba583..7523ae8fa0ba 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/MobSnapshotTestingUtils.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; @@ -45,29 +46,40 @@ public class MobSnapshotTestingUtils { /** * Create the Mob Table. */ - public static void createMobTable(final HBaseTestingUtil util, - final TableName tableName, int regionReplication, - final byte[]... families) throws IOException, InterruptedException { - createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), - regionReplication, families); + public static void createMobTable(final HBaseTestingUtil util, final TableName tableName, + int regionReplication, final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), regionReplication, + StoreFileTrackerFactory.Trackers.DEFAULT.name(), families); + } + + public static void createMobTable(final HBaseTestingUtil util, final TableName tableName, + int regionReplication, String storeFileTracker, final byte[]... families) + throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(), regionReplication, + storeFileTracker, families); } - public static void createPreSplitMobTable(final HBaseTestingUtil util, - final TableName tableName, int nRegions, final byte[]... families) - throws IOException, InterruptedException { - createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(nRegions), - 1, families); + public static void createPreSplitMobTable(final HBaseTestingUtil util, final TableName tableName, + int nRegions, final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, SnapshotTestingUtils.getSplitKeys(nRegions), 1, families); + } + + public static void createMobTable(final HBaseTestingUtil util, final TableName tableName, + final byte[][] splitKeys, int regionReplication, final byte[]... families) + throws IOException, InterruptedException { + createMobTable(util, tableName, splitKeys, regionReplication, + StoreFileTrackerFactory.Trackers.DEFAULT.name(), families); } public static void createMobTable(final HBaseTestingUtil util, final TableName tableName, - final byte[][] splitKeys, int regionReplication, final byte[]... families) - throws IOException, InterruptedException { - createMobTable(util, tableName, splitKeys, regionReplication, null, families); + final byte[][] splitKeys, int regionReplication, String storeFileTracker, + final byte[]... families) throws IOException, InterruptedException { + createMobTable(util, tableName, splitKeys, regionReplication, storeFileTracker, null, families); } - public static void createMobTable(HBaseTestingUtil util, TableName tableName, - byte[][] splitKeys, int regionReplication, String cpClassName, byte[]... families) - throws IOException, InterruptedException { + public static void createMobTable(HBaseTestingUtil util, TableName tableName, byte[][] splitKeys, + int regionReplication, String storeFileTracker, String cpClassName, byte[]... families) + throws IOException, InterruptedException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName).setRegionReplication(regionReplication); for (byte[] family : families) { @@ -77,6 +89,7 @@ public static void createMobTable(HBaseTestingUtil util, TableName tableName, if (!StringUtils.isBlank(cpClassName)) { builder.setCoprocessor(cpClassName); } + builder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, storeFileTracker); util.getAdmin().createTable(builder.build(), splitKeys); SnapshotTestingUtils.waitForTableToBeOnline(util, tableName); assertEquals((splitKeys.length + 1) * regionReplication, @@ -85,15 +98,10 @@ public static void createMobTable(HBaseTestingUtil util, TableName tableName, /** * Create a Mob table. - * - * @param util - * @param tableName - * @param families * @return An Table instance for the created table. - * @throws IOException */ - public static Table createMobTable(final HBaseTestingUtil util, - final TableName tableName, final byte[]... families) throws IOException { + public static Table createMobTable(final HBaseTestingUtil util, final TableName tableName, + final byte[]... families) throws IOException { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName); for (byte[] family : families) { // Disable blooms (they are on by default as of 0.95) but we disable them @@ -102,10 +110,7 @@ public static Table createMobTable(final HBaseTestingUtil util, // and blooms being // on is interfering. builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family) - .setBloomFilterType(BloomType.NONE) - .setMobEnabled(true) - .setMobThreshold(0L) - .build()); + .setBloomFilterType(BloomType.NONE).setMobEnabled(true).setMobThreshold(0L).build()); } util.getAdmin().createTable(builder.build()); // HBaseAdmin only waits for regions to appear in hbase:meta we should wait @@ -135,8 +140,8 @@ public static int countMobRows(final Table table, final byte[]... families) thro } } - public static void verifyMobRowCount(final HBaseTestingUtil util, - final TableName tableName, long expectedRows) throws IOException { + public static void verifyMobRowCount(final HBaseTestingUtil util, final TableName tableName, + long expectedRows) throws IOException { Table table = ConnectionFactory.createConnection(util.getConfiguration()).getTable(tableName); try { @@ -156,13 +161,10 @@ public SnapshotMock(final Configuration conf, final FileSystem fs, final Path ro @Override public TableDescriptor createHtd(final String tableName) { - return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) - .setColumnFamily(ColumnFamilyDescriptorBuilder - .newBuilder(Bytes.toBytes(TEST_FAMILY)) - .setMobEnabled(true) - .setMobThreshold(0L) - .build()) - .build(); + return TableDescriptorBuilder + .newBuilder(TableName.valueOf(tableName)).setColumnFamily(ColumnFamilyDescriptorBuilder + .newBuilder(Bytes.toBytes(TEST_FAMILY)).setMobEnabled(true).setMobThreshold(0L).build()) + .build(); } } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index d8d2a5eed0b1..00d2e84a464b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -480,9 +480,8 @@ public SnapshotBuilder(final Configuration conf, final FileSystem fs, this.desc = desc; this.tableRegions = tableRegions; this.snapshotDir = SnapshotDescriptionUtils.getWorkingSnapshotDir(desc, rootDir, conf); - new FSTableDescriptors(conf) - .createTableDescriptorForTableDirectory(this.snapshotDir.getFileSystem(conf), - snapshotDir, htd, false); + FSTableDescriptors.createTableDescriptorForTableDirectory( + this.snapshotDir.getFileSystem(conf), snapshotDir, htd, false); } public TableDescriptor getTableDescriptor() { @@ -502,15 +501,13 @@ public Path[] addRegion() throws IOException { } public Path[] addRegionV1() throws IOException { - return addRegion(desc.toBuilder() - .setVersion(SnapshotManifestV1.DESCRIPTOR_VERSION) - .build()); + return addRegion( + desc.toBuilder().setVersion(SnapshotManifestV1.DESCRIPTOR_VERSION).build()); } public Path[] addRegionV2() throws IOException { - return addRegion(desc.toBuilder() - .setVersion(SnapshotManifestV2.DESCRIPTOR_VERSION) - .build()); + return addRegion( + desc.toBuilder().setVersion(SnapshotManifestV2.DESCRIPTOR_VERSION).build()); } private Path[] addRegion(final SnapshotProtos.SnapshotDescription desc) throws IOException { @@ -521,6 +518,7 @@ private Path[] addRegion(final SnapshotProtos.SnapshotDescription desc) throws I RegionData regionData = tableRegions[this.snapshotted++]; ForeignExceptionDispatcher monitor = new ForeignExceptionDispatcher(desc.getName()); SnapshotManifest manifest = SnapshotManifest.create(conf, fs, snapshotDir, desc, monitor); + manifest.addTableDescriptor(htd); manifest.addRegion(regionData.tableDir, regionData.hri); return regionData.files; }