From 7ef63b65f0169415f315da3a1f4a049d23fdc00c Mon Sep 17 00:00:00 2001 From: chenglei Date: Sat, 24 Dec 2022 17:52:39 +0800 Subject: [PATCH] HBASE-27539 Encapsulate and centralise access to ref count through StoreFileInfo (#4928) Signed-off-by: Wellington Chevreuil --- .../hadoop/hbase/io/HalfStoreFileReader.java | 7 +-- .../hadoop/hbase/regionserver/HStoreFile.java | 8 +-- .../hbase/regionserver/StoreFileInfo.java | 23 ++++++-- .../hbase/regionserver/StoreFileReader.java | 27 ++++----- .../hbase/regionserver/StoreFileWriter.java | 2 +- .../hadoop/hbase/tool/BulkLoadHFilesTool.java | 9 +-- .../hbase/io/TestHalfStoreFileReader.java | 23 ++++---- .../hbase/regionserver/TestHStoreFile.java | 58 +++++++++++-------- .../TestRowPrefixBloomFilter.java | 41 +++++++------ ...estStoreFileScannerWithTagCompression.java | 18 +++--- 10 files changed, 126 insertions(+), 90 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java index 95665391740e..cc680173a4e3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HalfStoreFileReader.java @@ -20,7 +20,6 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Optional; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HConstants; @@ -31,6 +30,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.ReaderContext; +import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.regionserver.StoreFileReader; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; @@ -69,13 +69,12 @@ public class HalfStoreFileReader extends StoreFileReader { * @param fileInfo HFile info * @param cacheConf CacheConfig * @param r original reference file (contains top or bottom) - * @param refCount reference count * @param conf Configuration */ public HalfStoreFileReader(final ReaderContext context, final HFileInfo fileInfo, - final CacheConfig cacheConf, final Reference r, AtomicInteger refCount, + final CacheConfig cacheConf, final Reference r, StoreFileInfo storeFileInfo, final Configuration conf) throws IOException { - super(context, fileInfo, cacheConf, refCount, conf); + super(context, fileInfo, cacheConf, storeFileInfo, conf); // This is not actual midkey for this half-file; its just border // around which we split top and bottom. Have to look in files to find // actual last and first keys for bottom and top halves. Half-files don't diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index 606058597b74..ae514f0aef8d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -343,12 +343,12 @@ public boolean isCompactedAway() { } public int getRefCount() { - return fileInfo.refCount.get(); + return fileInfo.getRefCount(); } /** Returns true if the file is still used in reads */ public boolean isReferencedInReads() { - int rc = fileInfo.refCount.get(); + int rc = fileInfo.getRefCount(); assert rc >= 0; // we should not go negative. return rc > 0; } @@ -647,11 +647,11 @@ Set getCompactedStoreFiles() { } long increaseRefCount() { - return this.fileInfo.refCount.incrementAndGet(); + return this.fileInfo.increaseRefCount(); } long decreaseRefCount() { - return this.fileInfo.refCount.decrementAndGet(); + return this.fileInfo.decreaseRefCount(); } static void increaseStoreFilesRefeCount(Collection storeFiles) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java index 4b6a375fdb96..518210398d4e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java @@ -108,7 +108,7 @@ public class StoreFileInfo implements Configurable { // Counter that is incremented every time a scanner is created on the // store file. It is decremented when the scan on the store file is // done. - final AtomicInteger refCount = new AtomicInteger(0); + private final AtomicInteger refCount = new AtomicInteger(0); /** * Create a Store File Info @@ -275,12 +275,13 @@ public HDFSBlocksDistribution getHDFSBlockDistribution() { return this.hdfsBlocksDistribution; } - StoreFileReader createReader(ReaderContext context, CacheConfig cacheConf) throws IOException { + public StoreFileReader createReader(ReaderContext context, CacheConfig cacheConf) + throws IOException { StoreFileReader reader = null; if (this.reference != null) { - reader = new HalfStoreFileReader(context, hfileInfo, cacheConf, reference, refCount, conf); + reader = new HalfStoreFileReader(context, hfileInfo, cacheConf, reference, this, conf); } else { - reader = new StoreFileReader(context, hfileInfo, cacheConf, refCount, conf); + reader = new StoreFileReader(context, hfileInfo, cacheConf, this, conf); } return reader; } @@ -681,7 +682,7 @@ boolean isNoReadahead() { return this.noReadahead; } - HFileInfo getHFileInfo() { + public HFileInfo getHFileInfo() { return hfileInfo; } @@ -713,4 +714,16 @@ public void initHFileInfo(ReaderContext context) throws IOException { this.hfileInfo = new HFileInfo(context, conf); } + int getRefCount() { + return this.refCount.get(); + } + + int increaseRefCount() { + return this.refCount.incrementAndGet(); + } + + int decreaseRefCount() { + return this.refCount.decrementAndGet(); + } + } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java index 36c67f41a3e7..a2778e54a725 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileReader.java @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Optional; import java.util.SortedSet; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; @@ -78,24 +77,26 @@ public class StoreFileReader { private int prefixLength = -1; protected Configuration conf; - // Counter that is incremented every time a scanner is created on the - // store file. It is decremented when the scan on the store file is - // done. All StoreFileReader for the same StoreFile will share this counter. - private final AtomicInteger refCount; + /** + * All {@link StoreFileReader} for the same StoreFile will share the + * {@link StoreFileInfo#refCount}. Counter that is incremented every time a scanner is created on + * the store file. It is decremented when the scan on the store file is done. + */ + private final StoreFileInfo storeFileInfo; private final ReaderContext context; - private StoreFileReader(HFile.Reader reader, AtomicInteger refCount, ReaderContext context, + private StoreFileReader(HFile.Reader reader, StoreFileInfo storeFileInfo, ReaderContext context, Configuration conf) { this.reader = reader; bloomFilterType = BloomType.NONE; - this.refCount = refCount; + this.storeFileInfo = storeFileInfo; this.context = context; this.conf = conf; } public StoreFileReader(ReaderContext context, HFileInfo fileInfo, CacheConfig cacheConf, - AtomicInteger refCount, Configuration conf) throws IOException { - this(HFile.createReader(context, fileInfo, cacheConf, conf), refCount, context, conf); + StoreFileInfo storeFileInfo, Configuration conf) throws IOException { + this(HFile.createReader(context, fileInfo, cacheConf, conf), storeFileInfo, context, conf); } void copyFields(StoreFileReader storeFileReader) throws IOException { @@ -120,7 +121,7 @@ public boolean isPrimaryReplicaReader() { */ @InterfaceAudience.Private StoreFileReader() { - this.refCount = new AtomicInteger(0); + this.storeFileInfo = null; this.reader = null; this.context = null; } @@ -151,7 +152,7 @@ public StoreFileScanner getStoreFileScanner(boolean cacheBlocks, boolean pread, * is opened. */ int getRefCount() { - return refCount.get(); + return storeFileInfo.getRefCount(); } /** @@ -159,7 +160,7 @@ int getRefCount() { * count so reader is not close until some object is holding the lock */ void incrementRefCount() { - refCount.incrementAndGet(); + storeFileInfo.increaseRefCount(); } /** @@ -167,7 +168,7 @@ void incrementRefCount() { * count, and also, if this is not the common pread reader, we should close it. */ void readCompleted() { - refCount.decrementAndGet(); + storeFileInfo.decreaseRefCount(); if (context.getReaderType() == ReaderType.STREAM) { try { reader.close(false); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java index de32c270565b..b76867d1c223 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java @@ -402,7 +402,7 @@ HFile.Writer getHFileWriter() { * @param dir Directory to create file in. * @return random filename inside passed dir */ - static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { + public static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { if (!fs.getFileStatus(dir).isDirectory()) { throw new IOException("Expecting " + dir.toString() + " to be a directory"); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java index 06f97cf0aff6..e1bea90f49d9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTool.java @@ -753,10 +753,11 @@ private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile, StoreFileWriter halfWriter = null; try { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, inFile).build(); - HFileInfo hfile = new HFileInfo(context, conf); - halfReader = - new HalfStoreFileReader(context, hfile, cacheConf, reference, new AtomicInteger(0), conf); - hfile.initMetaAndIndex(halfReader.getHFileReader()); + StoreFileInfo storeFileInfo = + new StoreFileInfo(conf, fs, fs.getFileStatus(inFile), reference); + storeFileInfo.initHFileInfo(context); + halfReader = (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(halfReader.getHFileReader()); Map fileInfo = halfReader.loadFileInfo(); int blocksize = familyDescriptor.getBlocksize(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java index cdeb3d9de832..7dd4cbe44f93 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHalfStoreFileReader.java @@ -24,7 +24,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -39,10 +38,10 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; -import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; +import org.apache.hadoop.hbase.regionserver.StoreFileInfo; import org.apache.hadoop.hbase.testclassification.IOTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; @@ -118,10 +117,12 @@ public void testHalfScanAndReseek() throws IOException { private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, CacheConfig cacheConf) throws IOException { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build(); - HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration()); - final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConf, - bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration()); - fileInfo.initMetaAndIndex(halfreader.getHFileReader()); + StoreFileInfo storeFileInfo = + new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, fs.getFileStatus(p), bottom); + storeFileInfo.initHFileInfo(context); + final HalfStoreFileReader halfreader = + (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader()); halfreader.loadFileInfo(); final HFileScanner scanner = halfreader.getScanner(false, false); @@ -214,10 +215,12 @@ public void testHalfScanner() throws IOException { private Cell doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, Cell seekBefore, CacheConfig cacheConfig) throws IOException { ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build(); - HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration()); - final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConfig, - bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration()); - fileInfo.initMetaAndIndex(halfreader.getHFileReader()); + StoreFileInfo storeFileInfo = + new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, fs.getFileStatus(p), bottom); + storeFileInfo.initHFileInfo(context); + final HalfStoreFileReader halfreader = + (HalfStoreFileReader) storeFileInfo.createReader(context, cacheConfig); + storeFileInfo.getHFileInfo().initMetaAndIndex(halfreader.getHFileReader()); halfreader.loadFileInfo(); final HFileScanner scanner = halfreader.getScanner(false, false); scanner.seekBefore(seekBefore); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 2a2e7a2d2fa3..a0c23af5ef0d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -39,7 +39,6 @@ import java.util.Map; import java.util.OptionalLong; import java.util.TreeSet; -import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -75,7 +74,6 @@ import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; -import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.PreviousBlockCompressionRatePredicator; import org.apache.hadoop.hbase.io.hfile.ReaderContext; @@ -118,7 +116,7 @@ public class TestHStoreFile { private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); - private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile").toString(); + private static Path ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFile"); private static final ChecksumType CKTYPE = ChecksumType.CRC32C; private static final int CKBYTES = 512; private static String TEST_FAMILY = "cf"; @@ -598,10 +596,10 @@ private void bloomWriteRead(StoreFileWriter writer, FileSystem fs) throws Except writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); StoreFileScanner scanner = getStoreFileScanner(reader, false, false); @@ -646,7 +644,10 @@ public void testBloomFilter() throws Exception { conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); // write the file - Path f = new Path(ROOT_DIR, name.getMethodName()); + if (!fs.exists(ROOT_DIR)) { + fs.mkdirs(ROOT_DIR); + } + Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -662,7 +663,10 @@ public void testDeleteFamilyBloomFilter() throws Exception { float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0); // write the file - Path f = new Path(ROOT_DIR, name.getMethodName()); + if (!fs.exists(ROOT_DIR)) { + fs.mkdirs(ROOT_DIR); + } + Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); @@ -681,10 +685,10 @@ public void testDeleteFamilyBloomFilter() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -720,7 +724,11 @@ public void testDeleteFamilyBloomFilter() throws Exception { @Test public void testReseek() throws Exception { // write the file - Path f = new Path(ROOT_DIR, name.getMethodName()); + if (!fs.exists(ROOT_DIR)) { + fs.mkdirs(ROOT_DIR); + } + Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); + HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f) @@ -730,10 +738,10 @@ public void testReseek() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); // Now do reseek with empty KV to position to the beginning of the file @@ -764,9 +772,13 @@ public void testBloomTypes() throws Exception { // 2nd for loop for every column (2*colCount) float[] expErr = { 2 * rowCount * colCount * err, 2 * rowCount * 2 * colCount * err }; + if (!fs.exists(ROOT_DIR)) { + fs.mkdirs(ROOT_DIR); + } for (int x : new int[] { 0, 1 }) { // write the file - Path f = new Path(ROOT_DIR, name.getMethodName() + x); + Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); + HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -790,10 +802,10 @@ public void testBloomTypes() throws Exception { ReaderContext context = new ReaderContextBuilder().withFilePath(f).withFileSize(fs.getFileStatus(f).getLen()) .withFileSystem(fs).withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f)).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); StoreFileScanner scanner = getStoreFileScanner(reader, false, false); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java index 8748e59f6eca..27da15d59150 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRowPrefixBloomFilter.java @@ -24,7 +24,6 @@ import static org.mockito.Mockito.when; import java.io.IOException; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -37,7 +36,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; -import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; import org.apache.hadoop.hbase.log.HBaseMarkers; @@ -180,15 +178,18 @@ public void testRowPrefixBloomFilter() throws Exception { float expErr = 2 * prefixRowCount * suffixRowCount * err; int expKeys = fixedLengthExpKeys; // write the file - Path f = new Path(testDir, name.getMethodName()); + if (!fs.exists(testDir)) { + fs.mkdirs(testDir); + } + Path f = StoreFileWriter.getUniqueFile(fs, testDir); writeStoreFile(f, bt, expKeys); // read the file ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -251,14 +252,17 @@ public void testRowPrefixBloomFilterWithGet() throws Exception { FileSystem fs = FileSystem.getLocal(conf); int expKeys = fixedLengthExpKeys; // write the file - Path f = new Path(testDir, name.getMethodName()); + if (!fs.exists(testDir)) { + fs.mkdirs(testDir); + } + Path f = StoreFileWriter.getUniqueFile(fs, testDir); writeStoreFile(f, bt, expKeys); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); @@ -304,14 +308,17 @@ public void testRowPrefixBloomFilterWithScan() throws Exception { FileSystem fs = FileSystem.getLocal(conf); int expKeys = fixedLengthExpKeys; // write the file - Path f = new Path(testDir, name.getMethodName()); + if (!fs.exists(testDir)) { + fs.mkdirs(testDir); + } + Path f = StoreFileWriter.getUniqueFile(fs, testDir); writeStoreFile(f, bt, expKeys); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); reader.loadFileInfo(); reader.loadBloomfilter(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java index ea6e003fc9c7..67671fe12fef 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileScannerWithTagCompression.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -38,7 +37,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; -import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -60,8 +58,7 @@ public class TestStoreFileScannerWithTagCompression { private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static Configuration conf = TEST_UTIL.getConfiguration(); private static CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration()); - private static String ROOT_DIR = - TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression").toString(); + private static Path ROOT_DIR = TEST_UTIL.getDataTestDir("TestStoreFileScannerWithTagCompression"); private static FileSystem fs = null; @BeforeClass @@ -73,7 +70,10 @@ public static void setUp() throws IOException { @Test public void testReseek() throws Exception { // write the file - Path f = new Path(ROOT_DIR, "testReseek"); + if (!fs.exists(ROOT_DIR)) { + fs.mkdirs(ROOT_DIR); + } + Path f = StoreFileWriter.getUniqueFile(fs, ROOT_DIR); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true) .withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build(); // Make a store file and write data to it. @@ -84,10 +84,10 @@ public void testReseek() throws Exception { writer.close(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); - HFileInfo fileInfo = new HFileInfo(context, conf); - StoreFileReader reader = - new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); - fileInfo.initMetaAndIndex(reader.getHFileReader()); + StoreFileInfo storeFileInfo = new StoreFileInfo(conf, fs, f, true); + storeFileInfo.initHFileInfo(context); + StoreFileReader reader = storeFileInfo.createReader(context, cacheConf); + storeFileInfo.getHFileInfo().initMetaAndIndex(reader.getHFileReader()); StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false); try { // Now do reseek with empty KV to position to the beginning of the file