Skip to content

Commit

Permalink
HBASE-27936 NPE in StoreFileReader.passesGeneralRowPrefixBloomFilter() (
Browse files Browse the repository at this point in the history
#5300)

Need to also copy bloomFilterMetrics in StoreFileReader.copyFields

Signed-off-by: Viraj Jasani <[email protected]>
  • Loading branch information
Apache9 authored Jun 22, 2023
1 parent cf02edb commit da171c3
Show file tree
Hide file tree
Showing 4 changed files with 235 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import static org.apache.hadoop.hbase.regionserver.HStoreFile.DELETE_FAMILY_COUNT;
import static org.apache.hadoop.hbase.regionserver.HStoreFile.LAST_BLOOM_KEY;

import com.google.errorprone.annotations.RestrictedApi;
import java.io.DataInput;
import java.io.IOException;
import java.util.Map;
Expand Down Expand Up @@ -103,6 +104,7 @@ void copyFields(StoreFileReader storeFileReader) throws IOException {
this.generalBloomFilter = storeFileReader.generalBloomFilter;
this.deleteFamilyBloomFilter = storeFileReader.deleteFamilyBloomFilter;
this.bloomFilterType = storeFileReader.bloomFilterType;
this.bloomFilterMetrics = storeFileReader.bloomFilterMetrics;
this.sequenceID = storeFileReader.sequenceID;
this.timeRange = storeFileReader.timeRange;
this.lastBloomKey = storeFileReader.lastBloomKey;
Expand Down Expand Up @@ -496,7 +498,9 @@ public Map<byte[], byte[]> loadFileInfo() throws IOException {
return fi;
}

public void loadBloomfilter() {
@RestrictedApi(explanation = "Should only be called in tests", link = "",
allowedOnPath = ".*/src/test/.*")
void loadBloomfilter() {
this.loadBloomfilter(BlockType.GENERAL_BLOOM_META, null);
this.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META, null);
}
Expand Down Expand Up @@ -546,7 +550,9 @@ public void loadBloomfilter(BlockType blockType, BloomFilterMetrics metrics) {
}
}

private void setBloomFilterFaulty(BlockType blockType) {
@RestrictedApi(explanation = "Should only be called in tests", link = "",
allowedOnPath = ".*/StoreFileReader.java|.*/src/test/.*")
void setBloomFilterFaulty(BlockType blockType) {
if (blockType == BlockType.GENERAL_BLOOM_META) {
setGeneralBloomFilterFaulty();
} else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
Expand All @@ -563,11 +569,11 @@ public long getFilterEntries() {
return generalBloomFilter != null ? generalBloomFilter.getKeyCount() : reader.getEntries();
}

public void setGeneralBloomFilterFaulty() {
private void setGeneralBloomFilterFaulty() {
generalBloomFilter = null;
}

public void setDeleteFamilyBloomFilterFaulty() {
private void setDeleteFamilyBloomFilterFaulty() {
this.deleteFamilyBloomFilter = null;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,7 @@ private StoreFileWriter(FileSystem fs, Path path, final Configuration conf, Cach
this.bloomType = BloomType.NONE;
}

// initialize delete family Bloom filter when there is NO RowCol Bloom
// filter
// initialize delete family Bloom filter when there is NO RowCol Bloom filter
if (this.bloomType != BloomType.ROWCOL) {
this.deleteFamilyBloomFilterWriter = BloomFilterFactory.createDeleteBloomAtWrite(conf,
cacheConf, (int) Math.min(maxKeys, Integer.MAX_VALUE), writer);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
Expand Down Expand Up @@ -129,39 +128,41 @@ public Result[] get(List<Get> gets) throws IOException {
}

static class RegionScannerToResultScannerAdaptor implements ResultScanner {
private static final Result[] EMPTY_RESULT_ARRAY = new Result[0];
private final RegionScanner regionScanner;

RegionScannerToResultScannerAdaptor(final RegionScanner regionScanner) {
this.regionScanner = regionScanner;
}
private final RegionScanner scanner;

@Override
public Iterator<Result> iterator() {
throw new UnsupportedOperationException();
}
private boolean moreRows = true;

@Override
public Result next() throws IOException {
List<Cell> cells = new ArrayList<>();
return regionScanner.next(cells) ? Result.create(cells) : null;
private final List<Cell> cells = new ArrayList<>();

RegionScannerToResultScannerAdaptor(final RegionScanner scanner) {
this.scanner = scanner;
}

@Override
public Result[] next(int nbRows) throws IOException {
List<Result> results = new ArrayList<>(nbRows);
for (int i = 0; i < nbRows; i++) {
Result result = next();
if (result == null) break;
results.add(result);
public Result next() throws IOException {
if (!moreRows) {
return null;
}
for (;;) {
moreRows = scanner.next(cells);
if (cells.isEmpty()) {
if (!moreRows) {
return null;
} else {
continue;
}
}
Result result = Result.create(cells);
cells.clear();
return result;
}
return results.toArray(EMPTY_RESULT_ARRAY);
}

@Override
public void close() {
try {
regionScanner.close();
scanner.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
Expand All @@ -174,7 +175,7 @@ public boolean renewLease() {

@Override
public ScanMetrics getScanMetrics() {
throw new UnsupportedOperationException();
return null;
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,200 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Scan.ReadType;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;

/**
* A UT to make sure that everything is fine when we fail to load bloom filter.
* <p>
* See HBASE-27936 for more details.
*/
@Category({ RegionServerTests.class, MediumTests.class })
public class TestBloomFilterFaulty {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestBloomFilterFaulty.class);

private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();

private static final byte[] FAMILY = Bytes.toBytes("family");

private static final byte[] QUAL = Bytes.toBytes("qualifier");

private static final TableDescriptor TD =
TableDescriptorBuilder.newBuilder(TableName.valueOf("test"))
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILY)
.setBloomFilterType(BloomType.ROWPREFIX_FIXED_LENGTH)
.setConfiguration("RowPrefixBloomFilter.prefix_length", "2").build())
.build();

private static final RegionInfo RI = RegionInfoBuilder.newBuilder(TD.getTableName()).build();

@AfterClass
public static void tearDownAfterClass() {
UTIL.cleanupTestDir();
}

private HRegion region;

@Rule
public final TestName name = new TestName();

private void generateHFiles() throws IOException {
for (int i = 0; i < 4; i++) {
long ts = EnvironmentEdgeManager.currentTime();
for (int j = 0; j < 5; j++) {
byte[] row = Bytes.toBytes(j);
region.put(new Put(row).addColumn(FAMILY, QUAL, ts, Bytes.toBytes(i * 10 + j)));
region.delete(new Delete(row).addFamilyVersion(FAMILY, ts));
}

for (int j = 5; j < 10; j++) {
byte[] row = Bytes.toBytes(j);
region.put(new Put(row).addColumn(FAMILY, QUAL, ts + 1, Bytes.toBytes(i * 10 + j)));
}

FlushResult result = region.flush(true);
if (
result.getResult() == FlushResult.Result.CANNOT_FLUSH
|| result.getResult() == FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY
) {
throw new IOException("Can not flush region, flush result: " + result);
}
}
}

@Before
public void setUp() throws IOException {
Path rootDir = UTIL.getDataTestDir(name.getMethodName());
// generate some hfiles so we can have StoreFileReader which has bloomfilters
region = HBaseTestingUtil.createRegionAndWAL(RI, rootDir, UTIL.getConfiguration(), TD);
generateHFiles();
HStore store = region.getStore(FAMILY);
for (HStoreFile storefile : store.getStorefiles()) {
storefile.initReader();
StoreFileReader reader = storefile.getReader();
// make sure we load bloom filters correctly
assertNotNull(reader.generalBloomFilter);
assertNotNull(reader.deleteFamilyBloomFilter);
}
}

@After
public void tearDown() throws IOException {
if (region != null) {
HBaseTestingUtil.closeRegionAndWAL(region);
}
}

private void setFaulty(BlockType type) {
HStore store = region.getStore(FAMILY);
for (HStoreFile storefile : store.getStorefiles()) {
storefile.getReader().setBloomFilterFaulty(type);
}
}

private void testGet() throws IOException {
for (int i = 0; i < 5; i++) {
assertTrue(region.get(new Get(Bytes.toBytes(i))).isEmpty());
}
for (int i = 5; i < 10; i++) {
assertEquals(30 + i,
Bytes.toInt(region.get(new Get(Bytes.toBytes(i))).getValue(FAMILY, QUAL)));
}
}

private void testStreamScan() throws IOException {
try (RegionAsTable table = new RegionAsTable(region);
ResultScanner scanner = table.getScanner(new Scan().setReadType(ReadType.STREAM))) {
for (int i = 5; i < 10; i++) {
Result result = scanner.next();
assertEquals(i, Bytes.toInt(result.getRow()));
assertEquals(30 + i, Bytes.toInt(result.getValue(FAMILY, QUAL)));
}
assertNull(scanner.next());
}
}

private void testRegion() throws IOException {
// normal read
testGet();
// scan with stream reader
testStreamScan();
// major compact
region.compact(true);
// test read and scan again
testGet();
testStreamScan();
}

@Test
public void testNoGeneralBloomFilter() throws IOException {
setFaulty(BlockType.GENERAL_BLOOM_META);
testRegion();
}

@Test
public void testNoDeleteFamilyBloomFilter() throws IOException {
setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
testRegion();
}

@Test
public void testNoAnyBloomFilter() throws IOException {
setFaulty(BlockType.GENERAL_BLOOM_META);
setFaulty(BlockType.DELETE_FAMILY_BLOOM_META);
testRegion();
}
}

0 comments on commit da171c3

Please sign in to comment.