Skip to content

Commit

Permalink
HBASE-28124 Missing fields in Scan.toJSON (#5678)
Browse files Browse the repository at this point in the history
Signed-off-by: Pankaj Kumar < [email protected]>
Signed-off-by: Rajeshbabu Chintaguntla <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
(cherry picked from commit 0763a74)
  • Loading branch information
chandrasekhar-188k authored and Apache9 committed Mar 19, 2024
1 parent 73bdf95 commit ecf6e81
Show file tree
Hide file tree
Showing 3 changed files with 144 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.Filter;
Expand Down Expand Up @@ -747,7 +748,7 @@ public Map<String, Object> getFingerprint() {
*/
@Override
public Map<String, Object> toMap(int maxCols) {
// start with the fingerpring map and build on top of it
// start with the fingerprint map and build on top of it
Map<String, Object> map = getFingerprint();
// map from families to column list replaces fingerprint's list of families
Map<String, List<String>> familyColumns = new HashMap<>();
Expand Down Expand Up @@ -795,6 +796,34 @@ public Map<String, Object> toMap(int maxCols) {
if (getId() != null) {
map.put("id", getId());
}
map.put("includeStartRow", includeStartRow);
map.put("includeStopRow", includeStopRow);
map.put("allowPartialResults", allowPartialResults);
map.put("storeLimit", storeLimit);
map.put("storeOffset", storeOffset);
map.put("reversed", reversed);
if (null != asyncPrefetch) {
map.put("asyncPrefetch", asyncPrefetch);
}
map.put("mvccReadPoint", mvccReadPoint);
map.put("limit", limit);
map.put("readType", readType);
map.put("needCursorResult", needCursorResult);
map.put("targetReplicaId", targetReplicaId);
map.put("consistency", consistency);
if (!colFamTimeRangeMap.isEmpty()) {
Map<String, List<Long>> colFamTimeRangeMapStr = colFamTimeRangeMap.entrySet().stream()
.collect(Collectors.toMap((e) -> Bytes.toStringBinary(e.getKey()), e -> {
TimeRange value = e.getValue();
List<Long> rangeList = new ArrayList<>();
rangeList.add(value.getMin());
rangeList.add(value.getMax());
return rangeList;
}));

map.put("colFamTimeRangeMap", colFamTimeRangeMapStr);
}
map.put("priority", getPriority());
return map;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,20 +44,26 @@ public void itSerializesScan() {
Scan scan = new Scan();
scan.withStartRow(Bytes.toBytes(123));
scan.withStopRow(Bytes.toBytes(456));
String expectedOutput = "{\n" + " \"startTime\": 1,\n" + " \"processingTime\": 2,\n"
+ " \"queueTime\": 3,\n" + " \"responseSize\": 4,\n" + " \"blockBytesScanned\": 5,\n"
+ " \"fsReadTime\": 6,\n" + " \"multiGetsCount\": 6,\n" + " \"multiMutationsCount\": 7,\n"
+ " \"scan\": {\n" + " \"startRow\": \"\\\\x00\\\\x00\\\\x00{\",\n"
+ " \"stopRow\": \"\\\\x00\\\\x00\\\\x01\\\\xC8\",\n" + " \"batch\": -1,\n"
+ " \"cacheBlocks\": true,\n" + " \"totalColumns\": 0,\n"
+ " \"maxResultSize\": -1,\n" + " \"families\": {},\n" + " \"caching\": -1,\n"
+ " \"maxVersions\": 1,\n" + " \"timeRange\": [\n" + " 0,\n"
+ " 9223372036854775807\n" + " ]\n" + " }\n" + "}";
String expectedOutput =
"{\n" + " \"startTime\": 1,\n" + " \"processingTime\": 2,\n" + " \"queueTime\": 3,\n"
+ " \"responseSize\": 4,\n" + " \"blockBytesScanned\": 5,\n" + " \"fsReadTime\": 6,\n"
+ " \"multiGetsCount\": 6,\n" + " \"multiMutationsCount\": 7,\n" + " \"scan\": {\n"
+ " \"startRow\": \"\\\\x00\\\\x00\\\\x00{\",\n" + " \"targetReplicaId\": -1,\n"
+ " \"batch\": -1,\n" + " \"totalColumns\": 0,\n" + " \"maxResultSize\": -1,\n"
+ " \"families\": {},\n" + " \"priority\": -1,\n" + " \"caching\": -1,\n"
+ " \"includeStopRow\": false,\n" + " \"consistency\": \"STRONG\",\n"
+ " \"maxVersions\": 1,\n" + " \"storeOffset\": 0,\n" + " \"mvccReadPoint\": -1,\n"
+ " \"includeStartRow\": true,\n" + " \"needCursorResult\": false,\n"
+ " \"stopRow\": \"\\\\x00\\\\x00\\\\x01\\\\xC8\",\n" + " \"storeLimit\": -1,\n"
+ " \"limit\": -1,\n" + " \"cacheBlocks\": true,\n"
+ " \"readType\": \"DEFAULT\",\n" + " \"allowPartialResults\": false,\n"
+ " \"reversed\": false,\n" + " \"timeRange\": [\n" + " 0,\n"
+ " 9223372036854775807\n" + " ]\n" + " }\n" + "}";
OnlineLogRecord o = new OnlineLogRecord(1, 2, 3, 4, 5, 6, null, null, null, null, null, null,
null, 6, 7, 0, scan, Collections.emptyMap(), Collections.emptyMap());
String actualOutput = o.toJsonPrettyPrint();
System.out.println(actualOutput);
Assert.assertEquals(actualOutput, expectedOutput);
Assert.assertEquals(expectedOutput, actualOutput);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@

import org.apache.hbase.thirdparty.com.google.common.reflect.TypeToken;
import org.apache.hbase.thirdparty.com.google.gson.Gson;
import org.apache.hbase.thirdparty.com.google.gson.GsonBuilder;
import org.apache.hbase.thirdparty.com.google.gson.LongSerializationPolicy;
import org.apache.hbase.thirdparty.com.google.gson.ToNumberPolicy;

/**
* Run tests that use the functionality of the Operation superclass for Puts, Gets, Deletes, Scans,
Expand Down Expand Up @@ -345,6 +348,101 @@ public void testOperationJSON() throws IOException {
kvMap.get("qualifier"));
}

/**
* Test the client Scan Operations' JSON encoding to ensure that produced JSON is parseable and
* that the details are present and not corrupted.
* @throws IOException if the JSON conversion fails
*/
@Test
public void testScanOperationToJSON() throws IOException {
// produce a Scan Operation
Scan scan = new Scan().withStartRow(ROW, true);
scan.addColumn(FAMILY, QUALIFIER);
scan.withStopRow(ROW, true);
scan.readVersions(5);
scan.setBatch(10);
scan.setAllowPartialResults(true);
scan.setMaxResultsPerColumnFamily(3);
scan.setRowOffsetPerColumnFamily(8);
scan.setCaching(20);
scan.setMaxResultSize(50);
scan.setCacheBlocks(true);
scan.setReversed(true);
scan.setTimeRange(1000, 2000);
scan.setAsyncPrefetch(true);
scan.setMvccReadPoint(123);
scan.setLimit(5);
scan.setReadType(Scan.ReadType.PREAD);
scan.setNeedCursorResult(true);
scan.setFilter(SCV_FILTER);
scan.setReplicaId(1);
scan.setConsistency(Consistency.STRONG);
scan.setLoadColumnFamiliesOnDemand(true);
scan.setColumnFamilyTimeRange(FAMILY, 2000, 3000);
scan.setPriority(10);

// get its JSON representation, and parse it
String json = scan.toJSON();
Type typeOfHashMap = new TypeToken<Map<String, Object>>() {
}.getType();
Gson gson = new GsonBuilder().setLongSerializationPolicy(LongSerializationPolicy.STRING)
.setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE).create();
Map<String, Object> parsedJSON = gson.fromJson(json, typeOfHashMap);
// check for the row
assertEquals("startRow incorrect in Scan.toJSON()", Bytes.toStringBinary(ROW),
parsedJSON.get("startRow"));
// check for the family and the qualifier.
List familyInfo = (List) ((Map) parsedJSON.get("families")).get(Bytes.toStringBinary(FAMILY));
assertNotNull("Family absent in Scan.toJSON()", familyInfo);
assertEquals("Qualifier absent in Scan.toJSON()", 1, familyInfo.size());
assertEquals("Qualifier incorrect in Scan.toJSON()", Bytes.toStringBinary(QUALIFIER),
familyInfo.get(0));
assertEquals("stopRow incorrect in Scan.toJSON()", Bytes.toStringBinary(ROW),
parsedJSON.get("stopRow"));
assertEquals("includeStartRow incorrect in Scan.toJSON()", true,
parsedJSON.get("includeStartRow"));
assertEquals("includeStopRow incorrect in Scan.toJSON()", true,
parsedJSON.get("includeStopRow"));
assertEquals("maxVersions incorrect in Scan.toJSON()", 5L, parsedJSON.get("maxVersions"));
assertEquals("batch incorrect in Scan.toJSON()", 10L, parsedJSON.get("batch"));
assertEquals("allowPartialResults incorrect in Scan.toJSON()", true,
parsedJSON.get("allowPartialResults"));
assertEquals("storeLimit incorrect in Scan.toJSON()", 3L, parsedJSON.get("storeLimit"));
assertEquals("storeOffset incorrect in Scan.toJSON()", 8L, parsedJSON.get("storeOffset"));
assertEquals("caching incorrect in Scan.toJSON()", 20L, parsedJSON.get("caching"));
assertEquals("maxResultSize incorrect in Scan.toJSON()", "50", parsedJSON.get("maxResultSize"));
assertEquals("cacheBlocks incorrect in Scan.toJSON()", true, parsedJSON.get("cacheBlocks"));
assertEquals("reversed incorrect in Scan.toJSON()", true, parsedJSON.get("reversed"));
List trList = (List) parsedJSON.get("timeRange");
assertEquals("timeRange incorrect in Scan.toJSON()", 2, trList.size());
assertEquals("timeRange incorrect in Scan.toJSON()", "1000", trList.get(0));
assertEquals("timeRange incorrect in Scan.toJSON()", "2000", trList.get(1));

assertEquals("asyncPrefetch incorrect in Scan.toJSON()", true, parsedJSON.get("asyncPrefetch"));
assertEquals("mvccReadPoint incorrect in Scan.toJSON()", "123",
parsedJSON.get("mvccReadPoint"));
assertEquals("limit incorrect in Scan.toJSON()", 5L, parsedJSON.get("limit"));
assertEquals("readType incorrect in Scan.toJSON()", "PREAD", parsedJSON.get("readType"));
assertEquals("needCursorResult incorrect in Scan.toJSON()", true,
parsedJSON.get("needCursorResult"));

Map colFamTimeRange = (Map) parsedJSON.get("colFamTimeRangeMap");
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 1L, colFamTimeRange.size());
List testFamily = (List) colFamTimeRange.get("testFamily");
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", 2L, testFamily.size());
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "2000", testFamily.get(0));
assertEquals("colFamTimeRangeMap incorrect in Scan.toJSON()", "3000", testFamily.get(1));

assertEquals("targetReplicaId incorrect in Scan.toJSON()", 1L,
parsedJSON.get("targetReplicaId"));
assertEquals("consistency incorrect in Scan.toJSON()", "STRONG", parsedJSON.get("consistency"));
assertEquals("loadColumnFamiliesOnDemand incorrect in Scan.toJSON()", true,
parsedJSON.get("loadColumnFamiliesOnDemand"));

assertEquals("priority incorrect in Scan.toJSON()", 10L, parsedJSON.get("priority"));

}

@Test
public void testPutCreationWithByteBuffer() {
Put p = new Put(ROW);
Expand Down

0 comments on commit ecf6e81

Please sign in to comment.