Skip to content

Commit

Permalink
HBASE-28124 Missing fields in Scan.toJSON (#5678)
Browse files Browse the repository at this point in the history
Signed-off-by: Pankaj Kumar < [email protected]>
Signed-off-by: Rajeshbabu Chintaguntla <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
(cherry picked from commit 0763a74)
  • Loading branch information
chandrasekhar-188k authored and Apache9 committed Mar 19, 2024
1 parent 26b9e32 commit 931c72f
Show file tree
Hide file tree
Showing 3 changed files with 129 additions and 2 deletions.
2 changes: 1 addition & 1 deletion RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ These release notes cover new developer and user-facing incompatibilities, impor

* [HBASE-28204](https://issues.apache.org/jira/browse/HBASE-28204) | *Major* | **Region Canary can take lot more time If any region (except the first region) starts with delete markers**

Canary is using Scan for first region of the table and Get for rest of the region. RAW Scan was only enabled for first region of any table. If a region has high number of deleted rows for the first row of the key-space, then It can take really long time for Get to finish execution.
Canary is using Scan for first region of the table and Get for rest of the region. RAW Scan was only enabled for first region of any table. If a region has high number of deleted rows for the first row of the key-space, then It can take really long time for Get to finish execution.

With this change, Region canary will use scan to validate that every region is accessible and also enables RAW Scan if it's enabled by the user.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.Filter;
Expand Down Expand Up @@ -904,7 +905,7 @@ public Map<String, Object> getFingerprint() {
*/
@Override
public Map<String, Object> toMap(int maxCols) {
// start with the fingerpring map and build on top of it
// start with the fingerprint map and build on top of it
Map<String, Object> map = getFingerprint();
// map from families to column list replaces fingerprint's list of families
Map<String, List<String>> familyColumns = new HashMap<>();
Expand Down Expand Up @@ -952,6 +953,34 @@ public Map<String, Object> toMap(int maxCols) {
if (getId() != null) {
map.put("id", getId());
}
map.put("includeStartRow", includeStartRow);
map.put("includeStopRow", includeStopRow);
map.put("allowPartialResults", allowPartialResults);
map.put("storeLimit", storeLimit);
map.put("storeOffset", storeOffset);
map.put("reversed", reversed);
if (null != asyncPrefetch) {
map.put("asyncPrefetch", asyncPrefetch);
}
map.put("mvccReadPoint", mvccReadPoint);
map.put("limit", limit);
map.put("readType", readType);
map.put("needCursorResult", needCursorResult);
map.put("targetReplicaId", targetReplicaId);
map.put("consistency", consistency);
if (!colFamTimeRangeMap.isEmpty()) {
Map<String, List<Long>> colFamTimeRangeMapStr = colFamTimeRangeMap.entrySet().stream()
.collect(Collectors.toMap((e) -> Bytes.toStringBinary(e.getKey()), e -> {
TimeRange value = e.getValue();
List<Long> rangeList = new ArrayList<>();
rangeList.add(value.getMin());
rangeList.add(value.getMax());
return rangeList;
}));

map.put("colFamTimeRangeMap", colFamTimeRangeMapStr);
}
map.put("priority", getPriority());
return map;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@

import org.apache.hbase.thirdparty.com.google.common.reflect.TypeToken;
import org.apache.hbase.thirdparty.com.google.gson.Gson;
import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder;
import org.apache.hbase.thirdparty.com.google.gson.LongSerializationPolicy;
import org.apache.hbase.thirdparty.com.google.gson.ToNumberPolicy;

/**
* Run tests that use the functionality of the Operation superclass for Puts, Gets, Deletes, Scans,
Expand Down Expand Up @@ -345,6 +348,101 @@ public void testOperationJSON() throws IOException {
kvMap.get("qualifier"));
}

/**
* Test the client Scan Operations' JSON encoding to ensure that produced JSON is parseable and
* that the details are present and not corrupted.
* @throws IOException if the JSON conversion fails
*/
@Test
public void testScanOperationToJSON() throws IOException {
// produce a Scan Operation
Scan scan = new Scan().withStartRow(ROW, true);
scan.addColumn(FAMILY, QUALIFIER);
scan.withStopRow(ROW, true);
scan.readVersions(5);
scan.setBatch(10);
scan.setAllowPartialResults(true);
scan.setMaxResultsPerColumnFamily(3);
scan.setRowOffsetPerColumnFamily(8);
scan.setCaching(20);
scan.setMaxResultSize(50);
scan.setCacheBlocks(true);
scan.setReversed(true);
scan.setTimeRange(1000, 2000);
scan.setAsyncPrefetch(true);
scan.setMvccReadPoint(123);
scan.setLimit(5);
scan.setReadType(Scan.ReadType.PREAD);
scan.setNeedCursorResult(true);
scan.setFilter(SCV_FILTER);
scan.setReplicaId(1);
scan.setConsistency(Consistency.STRONG);
scan.setLoadColumnFamiliesOnDemand(true);
scan.setColumnFamilyTimeRange(FAMILY, 2000, 3000);
scan.setPriority(10);

// get its JSON representation, and parse it
String json = scan.toJSON();
Type typeOfHashMap = new TypeToken<Map<String, Object>>() {
}.getType();
Gson gson = new GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING)
.setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE).create();
Map<String, Object> parsedJSON = gson.fromJson(json, typeOfHashMap);
// check for the row
assertEquals("startRow incorrect in Scan.toJSON()", Bytes.toStringBinary(ROW),
parsedJSON.get("startRow"));
// check for the family and the qualifier.
List familyInfo = (List) ((Map) parsedJSON.get("families")).get(Bytes.toStringBinary(FAMILY));
assertNotNull("Family absent in Scan.toJSON()", familyInfo);
assertEquals("Qualifier absent in Scan.toJSON()", 1, familyInfo.size());
assertEquals("Qualifier incorrect in Scan.toJSON()", Bytes.toStringBinary(QUALIFIER),
familyInfo.get(0));
assertEquals("stopRow incorrect in Scan.toJSON()", Bytes.toStringBinary(ROW),
parsedJSON.get("stopRow"));
assertEquals("includeStartRow incorrect in Scan.toJSON()", true,
parsedJSON.get("includeStartRow"));
assertEquals("includeStopRow incorrect in Scan.toJSON()", true,
parsedJSON.get("includeStopRow"));
assertEquals("maxVersions incorrect in Scan.toJSON()", 5L, parsedJSON.get("maxVersions"));
assertEquals("batch incorrect in Scan.toJSON()", 10L, parsedJSON.get("batch"));
assertEquals("allowPartialResults incorrect in Scan.toJSON()", true,
parsedJSON.get("allowPartialResults"));
assertEquals("storeLimit incorrect in Scan.toJSON()", 3L, parsedJSON.get("storeLimit"));
assertEquals("storeOffset incorrect in Scan.toJSON()", 8L, parsedJSON.get("storeOffset"));
assertEquals("caching incorrect in Scan.toJSON()", 20L, parsedJSON.get("caching"));
assertEquals("maxResultSize incorrect in Scan.toJSON()", "50", parsedJSON.get("maxResultSize"));
assertEquals("cacheBlocks incorrect in Scan.toJSON()", true, parsedJSON.get("cacheBlocks"));
assertEquals("reversed incorrect in Scan.toJSON()", true, parsedJSON.get("reversed"));
List trList = (List) parsedJSON.get("timeRange");
assertEquals("timeRange incorrect in Scan.toJSON()", 2, trList.size());
assertEquals("timeRange incorrect in Scan.toJSON()", "1000", trList.get(0));
assertEquals("timeRange incorrect in Scan.toJSON()", "2000", trList.get(1));

assertEquals("asyncPrefetch incorrect in Scan.toJSON()", true, parsedJSON.get("asyncPrefetch"));
assertEquals("mvccReadPoint incorrect in Scan.toJSON()", "123",
parsedJSON.get("mvccReadPoint"));
assertEquals("limit incorrect in Scan.toJSON()", 5L, parsedJSON.get("limit"));
assertEquals("readType incorrect in Scan.toJSON()", "PREAD", parsedJSON.get("readType"));
assertEquals("needCursorResult incorrect in Scan.toJSON()", true,
parsedJSON.get("needCursorResult"));

Map colFamTimeRange = (Map) parsedJSON.get("colFamTimeRangeMap");
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 1L, colFamTimeRange.size());
List testFamily = (List) colFamTimeRange.get("testFamily");
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 2L, testFamily.size());
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "2000", testFamily.get(0));
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "3000", testFamily.get(1));

assertEquals("targetReplicaId incorrect in Scan.toJSON()", 1L,
parsedJSON.get("targetReplicaId"));
assertEquals("consistency incorrect in Scan.toJSON()", "STRONG", parsedJSON.get("consistency"));
assertEquals("loadColumnFamiliesOnDemand incorrect in Scan.toJSON()", true,
parsedJSON.get("loadColumnFamiliesOnDemand"));

assertEquals("priority incorrect in Scan.toJSON()", 10L, parsedJSON.get("priority"));

}

@Test
public void testPutCreationWithByteBuffer() {
Put p = new Put(ROW);
Expand Down

0 comments on commit 931c72f

Please sign in to comment.