Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-26639 The implementation of TestMergesSplitsAddToTracker is pro… #4010

Merged
merged 2 commits into from
Jan 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,15 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.
TRACKER_IMPL;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -37,10 +34,14 @@
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNameTestRule;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
import org.apache.hadoop.hbase.regionserver.storefiletracker.TestStoreFileTracker;
import org.apache.hadoop.hbase.testclassification.LargeTests;
Expand All @@ -55,7 +56,6 @@
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;


@Category({RegionServerTests.class, LargeTests.class})
Expand All @@ -67,14 +67,15 @@ public class TestMergesSplitsAddToTracker {

private static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();

public static final byte[] FAMILY_NAME = Bytes.toBytes("info");
private static final String FAMILY_NAME_STR = "info";

private static final byte[] FAMILY_NAME = Bytes.toBytes(FAMILY_NAME_STR);

@Rule
public TestName name = new TestName();
public TableNameTestRule name = new TableNameTestRule();

@BeforeClass
public static void setupClass() throws Exception {
TEST_UTIL.getConfiguration().set(TRACKER_IMPL, TestStoreFileTracker.class.getName());
TEST_UTIL.startMiniCluster();
}

Expand All @@ -85,13 +86,24 @@ public static void afterClass() throws Exception {

@Before
public void setup(){
TestStoreFileTracker.trackedFiles = new HashMap<>();
TestStoreFileTracker.clear();
}

private TableName createTable(byte[] splitKey) throws IOException {
TableDescriptor td = TableDescriptorBuilder.newBuilder(name.getTableName())
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_NAME))
.setValue(TRACKER_IMPL, TestStoreFileTracker.class.getName()).build();
if (splitKey != null) {
TEST_UTIL.getAdmin().createTable(td, new byte[][] { splitKey });
} else {
TEST_UTIL.getAdmin().createTable(td);
}
return td.getTableName();
}

@Test
public void testCommitDaughterRegion() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//first put some data in order to have a store file created
putThreeRowsAndFlush(table);
HRegion region = TEST_UTIL.getHBaseCluster().getRegions(table).get(0);
Expand Down Expand Up @@ -125,8 +137,7 @@ public void testCommitDaughterRegion() throws Exception {

@Test
public void testCommitMergedRegion() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//splitting the table first
TEST_UTIL.getAdmin().split(table, Bytes.toBytes("002"));
//Add data and flush to create files in the two different regions
Expand Down Expand Up @@ -163,8 +174,7 @@ public void testCommitMergedRegion() throws Exception {

@Test
public void testSplitLoadsFromTracker() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, FAMILY_NAME);
TableName table = createTable(null);
//Add data and flush to create files in the two different regions
putThreeRowsAndFlush(table);
HRegion region = TEST_UTIL.getHBaseCluster().getRegions(table).get(0);
Expand All @@ -182,9 +192,7 @@ public void testSplitLoadsFromTracker() throws Exception {

@Test
public void testMergeLoadsFromTracker() throws Exception {
TableName table = TableName.valueOf(name.getMethodName());
TEST_UTIL.createTable(table, new byte[][]{FAMILY_NAME},
new byte[][]{Bytes.toBytes("002")});
TableName table = createTable(Bytes.toBytes("002"));
//Add data and flush to create files in the two different regions
putThreeRowsAndFlush(table);
List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(table);
Expand Down Expand Up @@ -232,10 +240,8 @@ private void validateDaughterRegionsFiles(HRegion region, String orignalFileName
}

private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception {
String storeId = regionDir.getName() + "-info";
for(FileStatus f : fs.listStatus(new Path(regionDir, Bytes.toString(FAMILY_NAME)))){
assertTrue(TestStoreFileTracker.trackedFiles.get(storeId).stream().filter(s ->
s.getPath().equals(f.getPath())).findFirst().isPresent());
for (FileStatus f : fs.listStatus(new Path(regionDir, FAMILY_NAME_STR))) {
assertTrue(TestStoreFileTracker.tracked(regionDir.getName(), FAMILY_NAME_STR, f.getPath()));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,13 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.StoreContext;
import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.slf4j.Logger;
Expand All @@ -33,15 +35,16 @@
public class TestStoreFileTracker extends DefaultStoreFileTracker {

private static final Logger LOG = LoggerFactory.getLogger(TestStoreFileTracker.class);
public static Map<String, List<StoreFileInfo>> trackedFiles = new HashMap<>();
private static ConcurrentMap<String, BlockingQueue<StoreFileInfo>> trackedFiles =
new ConcurrentHashMap<>();
private String storeId;

public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreContext ctx) {
super(conf, isPrimaryReplica, ctx);
if (ctx != null && ctx.getRegionFileSystem() != null) {
this.storeId = ctx.getRegionInfo().getEncodedName() + "-" + ctx.getFamily().getNameAsString();
LOG.info("created storeId: {}", storeId);
trackedFiles.computeIfAbsent(storeId, v -> new ArrayList<>());
trackedFiles.computeIfAbsent(storeId, v -> new LinkedBlockingQueue<>());
} else {
LOG.info("ctx.getRegionFileSystem() returned null. Leaving storeId null.");
}
Expand All @@ -51,11 +54,19 @@ public TestStoreFileTracker(Configuration conf, boolean isPrimaryReplica, StoreC
protected void doAddNewStoreFiles(Collection<StoreFileInfo> newFiles) throws IOException {
LOG.info("adding to storeId: {}", storeId);
trackedFiles.get(storeId).addAll(newFiles);
trackedFiles.putIfAbsent(storeId, (List<StoreFileInfo>)newFiles);
}

@Override
public List<StoreFileInfo> load() throws IOException {
return trackedFiles.get(storeId);
return new ArrayList<>(trackedFiles.get(storeId));
}

public static boolean tracked(String encodedRegionName, String family, Path file) {
BlockingQueue<StoreFileInfo> files = trackedFiles.get(encodedRegionName + "-" + family);
return files != null && files.stream().anyMatch(s -> s.getPath().equals(file));
}

public static void clear() {
trackedFiles.clear();
}
}