Skip to content

Commit

Permalink
HBASE-26639 The implementation of TestMergesSplitsAddToTracker is pro…
Browse files Browse the repository at this point in the history
…blematic (apache#4010)

Signed-off-by: Wellington Ramos Chevreuil <[email protected]>

Conflicts:
	hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/storefiletracker/TestStoreFileTracker.java
  • Loading branch information
Apache9 authored and apurtell committed Mar 26, 2022
1 parent 44718fa commit e386198
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,12 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.
TRACKER_IMPL;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
Expand All @@ -36,10 +34,14 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNameTestRule;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.regionserver.storefiletracker.TestStoreFileTracker;
import org.apache.hadoop.hbase.testclassification.LargeTests;
Expand All @@ -54,7 +56,6 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;


@Category({RegionServerTests.class, LargeTests.class})
Expand All @@ -66,14 +67,15 @@ public class TestMergesSplitsAddToTracker {

private static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();

public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
private static final String FAMILY_NAME_STR = "info";

private static final byte[] FAMILY_NAME = Bytes.toBytes(FAMILY_NAME_STR);

@Rule
public TestName name = new TestName();
public TableNameTestRule name = new TableNameTestRule();

@BeforeClass
public static void setupClass() throws Exception {
TEST_UTIL.getConfiguration().set(TRACKER_IMPL, TestStoreFileTracker.class.getName());
TEST_UTIL.startMiniCluster();
}

Expand All @@ -84,13 +86,24 @@ public static void afterClass() throws Exception {

@Before
public void setup(){
TestStoreFileTracker.trackedFiles = new HashMap<>();
TestStoreFileTracker.clear();
}

private TableName createTable(byte[] splitKey) throws IOException {
TableDescriptor td = TableDescriptorBuilder.newBuilder(name.getTableName())
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME))
.setValue(TRACKER_IMPL, TestStoreFileTracker.class.getName()).build();
if (splitKey != null) {
TEST_UTIL.getAdmin().createTable(td, new byte[][] { splitKey });
} else {
TEST_UTIL.getAdmin().createTable(td);
}
return td.getTableName();
}

@Test
public void testCommitDaughterRegion() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//first put some data in order to have a store file created
putThreeRowsAndFlush(table);
HRegion region = TEST_UTIL.getHBaseCluster().getRegions(table).get(0);
Expand Down Expand Up @@ -124,8 +137,7 @@ public void testCommitDaughterRegion() throws Exception {

@Test
public void testCommitMergedRegion() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//splitting the table first
split(table, Bytes.toBytes("002"));
//Add data and flush to create files in the two different regions
Expand Down Expand Up @@ -162,8 +174,7 @@ public void testCommitMergedRegion() throws Exception {

@Test
public void testSplitLoadsFromTracker() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//Add data and flush to create files in the two different regions
putThreeRowsAndFlush(table);
HRegion region = TEST_UTIL.getHBaseCluster().getRegions(table).get(0);
Expand All @@ -187,9 +198,7 @@ private void split(TableName table, byte[] splitKey) throws IOException {

@Test
public void testMergeLoadsFromTracker() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, new byte[][]{FAMILY_NAME},
new byte[][]{Bytes.toBytes("002")});
TableName table = createTable(Bytes.toBytes("002"));
//Add data and flush to create files in the two different regions
putThreeRowsAndFlush(table);
List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(table);
Expand Down Expand Up @@ -237,10 +246,8 @@ private void validateDaughterRegionsFiles(HRegion region, String orignalFileName
}

private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception {
String storeId = regionDir.getName() + "-info";
for(FileStatus f : fs.listStatus(new Path(regionDir, Bytes.toString(FAMILY_NAME)))){
assertTrue(TestStoreFileTracker.trackedFiles.get(storeId).stream().filter(s ->
s.getPath().equals(f.getPath())).findFirst().isPresent());
for (FileStatus f : fs.listStatus(new Path(regionDir, FAMILY_NAME_STR))) {
assertTrue(TestStoreFileTracker.tracked(regionDir.getName(), FAMILY_NAME_STR, f.getPath()));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.slf4j.Logger;
Expand All @@ -32,15 +35,16 @@
public class TestStoreFileTracker extends DefaultStoreFileTracker {

private static final Logger LOG = LoggerFactory.getLogger(TestStoreFileTracker.class);
public static Map<String, List<StoreFileInfo>> trackedFiles = new HashMap<>();
private static ConcurrentMap<String, BlockingQueue<StoreFileInfo>> trackedFiles =
new ConcurrentHashMap<>();
private String storeId;

public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
if (ctx != null && ctx.getRegionFileSystem() != null) {
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
trackedFiles.computeIfAbsent(storeId, v -> new LinkedBlockingQueue<>());
} else {
LOG.info("ctx.getRegionFileSystem() returned null. Leaving storeId null.");
}
Expand All @@ -50,11 +54,19 @@ public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreC
protected void doAddNewStoreFiles(Collection<StoreFileInfo> newFiles) throws IOException {
LOG.info("adding to storeId: {}", storeId);
trackedFiles.get(storeId).addAll(newFiles);
trackedFiles.putIfAbsent(storeId, (List<StoreFileInfo>)newFiles);
}

@Override
public List<StoreFileInfo> load() throws IOException {
return trackedFiles.get(storeId);
return new ArrayList<>(trackedFiles.get(storeId));
}

public static boolean tracked(String encodedRegionName, String family, Path file) {
BlockingQueue<StoreFileInfo> files = trackedFiles.get(encodedRegionName + "-" + family);
return files != null && files.stream().anyMatch(s -> s.getPath().equals(file));
}

public static void clear() {
trackedFiles.clear();
}
}

0 comments on commit e386198

Please sign in to comment.