From 8ec02c025eea7d612f9832fd9d49d56d29bfd28e Mon Sep 17 00:00:00 2001
From: liangxs
Date: Tue, 16 Aug 2022 02:09:36 +0800
Subject: [PATCH] =?UTF-8?q?HBASE-26982=20Add=20index=20and=20bloom=20filte?=
=?UTF-8?q?r=20statistics=20of=20LruBlockCache=20on=E2=80=A6=20(#4376)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Andrew Purtell
Signed-off-by: stack
---
.../hadoop/hbase/io/hfile/BlockType.java | 9 +++
.../tmpl/regionserver/BlockCacheTmpl.jamon | 29 +++++++++-
.../hadoop/hbase/io/hfile/LruBlockCache.java | 57 ++++++++++++++++---
3 files changed, 86 insertions(+), 9 deletions(-)
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java
index 3bd98e6388c9..9b8a6bbfe2c5 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockType.java
@@ -215,4 +215,13 @@ public final boolean isData() {
return this == DATA || this == ENCODED_DATA;
}
+ /** Returns whether this block category is index */
+ public final boolean isIndex() {
+ return this.getCategory() == BlockCategory.INDEX;
+ }
+
+ /** Returns whether this block category is bloom filter */
+ public final boolean isBloom() {
+ return this.getCategory() == BlockCategory.BLOOM;
+ }
}
diff --git a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index f7abdc80dd3a..7f4d0063dc17 100644
--- a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -37,6 +37,7 @@ org.apache.hadoop.hbase.io.hfile.CachedBlock;
org.apache.hadoop.conf.Configuration;
org.apache.hadoop.hbase.io.hfile.CacheConfig;
org.apache.hadoop.hbase.io.hfile.BlockCache;
+org.apache.hadoop.hbase.io.hfile.LruBlockCache;
org.apache.hadoop.hbase.io.hfile.bucket.BucketCacheStats;
org.apache.hadoop.hbase.io.hfile.bucket.BucketCache;
org.apache.hadoop.hbase.io.hfile.bucket.BucketAllocator;
@@ -284,6 +285,8 @@ are combined counts. Request count is sum of hits and misses.
String bcName = bc.getClass().getSimpleName();
int maxCachedBlocksByFile = BlockCacheUtil.getMaxCachedBlocksByFile(config);
+ boolean lru = bc instanceof LruBlockCache;
+
boolean bucketCache = bc.getClass().getSimpleName().equals("BucketCache");
BucketCacheStats bucketCacheStats = null;
BucketAllocator bucketAllocator = null;
@@ -328,7 +331,19 @@ are combined counts. Request count is sum of hits and misses.
Count of DATA Blocks |
%if>
+<%if lru %>
+
+ Index Block Count |
+ <% String.format("%,d", ((LruBlockCache)bc).getIndexBlockCount()) %> |
+ Count of INDEX Blocks |
+
+ Bloom Block Count |
+ <% String.format("%,d", ((LruBlockCache)bc).getBloomBlockCount()) %> |
+ Count of BLOOM Blocks |
+
+%if>
+
Size of Blocks |
<% TraditionalBinaryPrefix.long2String(bc.getCurrentSize(), "B", 1) %> |
Size of Blocks |
@@ -340,7 +355,19 @@ are combined counts. Request count is sum of hits and misses.
Size of DATA Blocks |
%if>
-<& evictions_tmpl; bc = bc; &>
+<%if lru %>
+
+ Size of Index Blocks |
+ <% TraditionalBinaryPrefix.long2String(((LruBlockCache)bc).getCurrentIndexSize(), "B", 1) %> |
+ Size of INDEX Blocks |
+
+
+ Size of Bloom Blocks |
+ <% TraditionalBinaryPrefix.long2String(((LruBlockCache)bc).getCurrentBloomSize(), "B", 1) %> |
+ Size of BLOOM Blocks |
+
+%if>
+ <& evictions_tmpl; bc = bc; &>
<& hits_tmpl; bc = bc; &>
<%if bucketCache %>
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
index 48ba0eaf5798..a3f883745e06 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java
@@ -174,13 +174,25 @@ public class LruBlockCache implements FirstLevelBlockCache {
private final AtomicLong size;
/** Current size of data blocks */
- private final LongAdder dataBlockSize;
+ private final LongAdder dataBlockSize = new LongAdder();
+
+ /** Current size of index blocks */
+ private final LongAdder indexBlockSize = new LongAdder();
+
+ /** Current size of bloom blocks */
+ private final LongAdder bloomBlockSize = new LongAdder();
/** Current number of cached elements */
private final AtomicLong elements;
/** Current number of cached data block elements */
- private final LongAdder dataBlockElements;
+ private final LongAdder dataBlockElements = new LongAdder();
+
+ /** Current number of cached index block elements */
+ private final LongAdder indexBlockElements = new LongAdder();
+
+ /** Current number of cached bloom block elements */
+ private final LongAdder bloomBlockElements = new LongAdder();
/** Cache access count (sequential ID) */
private final AtomicLong count;
@@ -307,8 +319,6 @@ public LruBlockCache(long maxSize, long blockSize, boolean evictionThread, int m
this.stats = new CacheStats(this.getClass().getSimpleName());
this.count = new AtomicLong(0);
this.elements = new AtomicLong(0);
- this.dataBlockElements = new LongAdder();
- this.dataBlockSize = new LongAdder();
this.overhead = calculateOverhead(maxSize, blockSize, mapConcurrencyLevel);
this.size = new AtomicLong(this.overhead);
this.hardCapacityLimitFactor = hardLimitFactor;
@@ -417,7 +427,11 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory)
long newSize = updateSizeMetrics(cb, false);
map.put(cacheKey, cb);
long val = elements.incrementAndGet();
- if (buf.getBlockType().isData()) {
+ if (buf.getBlockType().isBloom()) {
+ bloomBlockElements.increment();
+ } else if (buf.getBlockType().isIndex()) {
+ indexBlockElements.increment();
+ } else if (buf.getBlockType().isData()) {
dataBlockElements.increment();
}
if (LOG.isTraceEnabled()) {
@@ -473,8 +487,14 @@ private long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
if (evict) {
heapsize *= -1;
}
- if (bt != null && bt.isData()) {
- dataBlockSize.add(heapsize);
+ if (bt != null) {
+ if (bt.isBloom()) {
+ bloomBlockSize.add(heapsize);
+ } else if (bt.isIndex()) {
+ indexBlockSize.add(heapsize);
+ } else if (bt.isData()) {
+ dataBlockSize.add(heapsize);
+ }
}
return size.addAndGet(heapsize);
}
@@ -582,7 +602,12 @@ protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess
long size = map.size();
assertCounterSanity(size, val);
}
- if (block.getBuffer().getBlockType().isData()) {
+ BlockType bt = block.getBuffer().getBlockType();
+ if (bt.isBloom()) {
+ bloomBlockElements.decrement();
+ } else if (bt.isIndex()) {
+ indexBlockElements.decrement();
+ } else if (bt.isData()) {
dataBlockElements.decrement();
}
if (evictedByEvictionProcess) {
@@ -851,6 +876,14 @@ public long getCurrentDataSize() {
return this.dataBlockSize.sum();
}
+ public long getCurrentIndexSize() {
+ return this.indexBlockSize.sum();
+ }
+
+ public long getCurrentBloomSize() {
+ return this.bloomBlockSize.sum();
+ }
+
@Override
public long getFreeSize() {
return getMaxSize() - getCurrentSize();
@@ -871,6 +904,14 @@ public long getDataBlockCount() {
return this.dataBlockElements.sum();
}
+ public long getIndexBlockCount() {
+ return this.indexBlockElements.sum();
+ }
+
+ public long getBloomBlockCount() {
+ return this.bloomBlockElements.sum();
+ }
+
EvictionThread getEvictionThread() {
return this.evictionThread;
}