Skip to content

Commit

Permalink
HBASE-26192 Master UI hbck should provide a JSON formatted output option
Browse files Browse the repository at this point in the history
  • Loading branch information
apurtell committed May 27, 2022
1 parent cf0ed2e commit 9543888
Show file tree
Hide file tree
Showing 3 changed files with 295 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@
import org.apache.hadoop.hbase.master.cleaner.ReplicationBarrierCleaner;
import org.apache.hadoop.hbase.master.cleaner.SnapshotCleanerChore;
import org.apache.hadoop.hbase.master.http.MasterDumpServlet;
import org.apache.hadoop.hbase.master.http.MasterHbckServlet;
import org.apache.hadoop.hbase.master.http.MasterRedirectServlet;
import org.apache.hadoop.hbase.master.http.MasterStatusServlet;
import org.apache.hadoop.hbase.master.http.api_v1.ResourceConfigFactory;
Expand Down Expand Up @@ -707,6 +708,7 @@ protected MasterRpcServices createRpcServices() throws IOException {
protected void configureInfoServer(InfoServer infoServer) {
infoServer.addUnprivilegedServlet("master-status", "/master-status", MasterStatusServlet.class);
infoServer.addUnprivilegedServlet("api_v1", "/api/v1/*", buildApiV1Servlet());
infoServer.addUnprivilegedServlet("hbck", "/hbck", MasterHbckServlet.class);

infoServer.setAttribute(MASTER, this);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.http;

import java.io.IOException;
import java.io.PrintWriter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.HbckChore;
import org.apache.hadoop.hbase.master.janitor.CatalogJanitor;
import org.apache.hadoop.hbase.master.janitor.Report;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.gson.Gson;

@InterfaceAudience.Private
public class MasterHbckServlet extends HttpServlet {

private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(MasterHbckServlet.class);
private static final Gson GSON = new Gson();

@Override
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
final HMaster master = (HMaster) getServletContext().getAttribute(HMaster.MASTER);
if (!master.isInitialized()) {
LOG.warn("Master is not initialized yet");
sendError(response, HttpServletResponse.SC_SERVICE_UNAVAILABLE,
"master is not initialized yet");
return;
}
final HbckChore hbckChore = master.getHbckChore();
if (hbckChore == null || hbckChore.isDisabled()) {
LOG.warn("Hbck chore is disabled");
sendError(response, HttpServletResponse.SC_SERVICE_UNAVAILABLE, "Hbck chore is disabled");
return;
}
if (!Boolean.parseBoolean(request.getParameter("cache"))) {
try {
master.getMasterRpcServices().runHbckChore(null, null);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException se) {
LOG.warn("Failed generating a new hbck chore report; using cache", se);
}
try {
master.getMasterRpcServices().runCatalogScan(null, null);
} catch (org.apache.hbase.thirdparty.com.google.protobuf.ServiceException se) {
LOG.warn("Failed generating a new catalogjanitor report; using cache", se);
}
}
Map<String, Object> result = new HashMap<>();
result.put("startTimestamp", hbckChore.getCheckingStartTimestamp());
result.put("endTimestamp", hbckChore.getCheckingEndTimestamp());
final Map<String, Pair<ServerName, List<ServerName>>> inconsistentRegions =
hbckChore.getInconsistentRegions();
if (inconsistentRegions != null && !inconsistentRegions.isEmpty()) {
result.put("inconsistentRegions", inconsistentRegions);
}
final Map<String, ServerName> orphanRegionsOnRS = hbckChore.getOrphanRegionsOnRS();
if (orphanRegionsOnRS != null && !orphanRegionsOnRS.isEmpty()) {
result.put("orphanRegionsOnRS", orphanRegionsOnRS);
}
final Map<String, Path> orphanRegionsOnFS = hbckChore.getOrphanRegionsOnFS();
if (orphanRegionsOnFS != null && !orphanRegionsOnFS.isEmpty()) {
result.put("orphanRegionsOnFS", orphanRegionsOnFS);
}
final CatalogJanitor janitor = master.getCatalogJanitor();
if (janitor != null) {
final Report report = janitor.getLastReport();
if (report != null && !report.isEmpty()) {
List<Pair<RegionInfo, RegionInfo>> holes = report.getHoles();
if (holes != null && !holes.isEmpty()) {
result.put("holes", holes);
}
List<Pair<RegionInfo, RegionInfo>> overlaps = report.getOverlaps();
if (overlaps != null && !overlaps.isEmpty()) {
result.put("overlaps", overlaps);
}
List<Pair<RegionInfo, ServerName>> unknownServers = report.getUnknownServers();
if (unknownServers != null && !unknownServers.isEmpty()) {
result.put("unknownServers", unknownServers);
}
List<String> emptyRegionInfo = report.getEmptyRegionInfo().stream()
.map(b -> Bytes.toStringBinary(b)).collect(Collectors.toList());
if (!emptyRegionInfo.isEmpty()) {
result.put("emptyRegionInfo", emptyRegionInfo);
}
}
}
response.setContentType("application/json");
PrintWriter out = response.getWriter();
out.write(GSON.toJson(result));
out.write('\n');
}

private static void sendError(HttpServletResponse response, int code, String message)
throws IOException {
response.setContentType("application/json");
Map<String, Object> result = new HashMap<>();
result.put("error", message);
response.setStatus(code);
PrintWriter out = response.getWriter();
out.write(GSON.toJson(result));
out.write('\n');
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.http;

import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.HbckChore;
import org.apache.hadoop.hbase.master.janitor.CatalogJanitor;
import org.apache.hadoop.hbase.master.janitor.Report;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.apache.hbase.thirdparty.com.google.gson.Gson;
import org.apache.hbase.thirdparty.com.google.gson.reflect.TypeToken;

/**
* Tests for the master hbck servlet.
*/
@Category({ MasterTests.class, MediumTests.class })
public class TestMasterHbckServlet {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMasterHbckServlet.class);

static final ServerName FAKE_HOST = ServerName.valueOf("fakehost", 12345, 1234567890);
static final TableDescriptor FAKE_TABLE =
TableDescriptorBuilder.newBuilder(TableName.valueOf("mytable")).build();
static final RegionInfo FAKE_HRI = RegionInfoBuilder.newBuilder(FAKE_TABLE.getTableName())
.setStartKey(Bytes.toBytes("a")).setEndKey(Bytes.toBytes("b")).build();
static final long HBCK_START_TIMESTAMP = System.currentTimeMillis();
static final long HBCK_END_TIMESTAMP = System.currentTimeMillis() + 1000;
private final Gson GSON = new Gson();

private HMaster master;

@Before
public void setupBasicMocks() {
// Fake inconsistentRegions
Map<String, Pair<ServerName, List<ServerName>>> inconsistentRegions = new HashMap<>();
// TODO

// Fake orphanRegionsOnRS
Map<String, ServerName> orphanRegionsOnRS = new HashMap<>();
// TODO

// Fake orphanRegionsOnFS
Map<String, Path> orphanRegionsOnFS = new HashMap<>();
// TODO

// Fake HbckChore
HbckChore hbckChore = Mockito.mock(HbckChore.class);
Mockito.doReturn(HBCK_START_TIMESTAMP).when(hbckChore).getCheckingStartTimestamp();
Mockito.doReturn(HBCK_END_TIMESTAMP).when(hbckChore).getCheckingEndTimestamp();
Mockito.doReturn(inconsistentRegions).when(hbckChore).getInconsistentRegions();
Mockito.doReturn(orphanRegionsOnRS).when(hbckChore).getOrphanRegionsOnRS();
Mockito.doReturn(orphanRegionsOnFS).when(hbckChore).getOrphanRegionsOnFS();

// Fake region holes
List<Pair<RegionInfo, RegionInfo>> holes = new ArrayList<>();
// TODO

// Fake region overlaps
List<Pair<RegionInfo, RegionInfo>> overlaps = new ArrayList<>();
// TODO

// Fake unknown servers
List<Pair<RegionInfo, ServerName>> unknownServers = new ArrayList<>();
// TODO

// Fake empty region info
List<String> emptyRegionInfo = new ArrayList<>();
// TODO

// Fake catalog janitor report
Report report = Mockito.mock(Report.class);
Mockito.doReturn(HBCK_START_TIMESTAMP).when(report).getCreateTime();
Mockito.doReturn(holes).when(report).getHoles();
Mockito.doReturn(overlaps).when(report).getOverlaps();
Mockito.doReturn(unknownServers).when(report).getUnknownServers();
Mockito.doReturn(emptyRegionInfo).when(report).getEmptyRegionInfo();

// Fake CatalogJanitor
CatalogJanitor janitor = Mockito.mock(CatalogJanitor.class);
Mockito.doReturn(report).when(janitor).getLastReport();

// Fake master
master = Mockito.mock(HMaster.class);
Mockito.doReturn(HBaseConfiguration.create()).when(master).getConfiguration();
Mockito.doReturn(hbckChore).when(master).getHbckChore();
Mockito.doReturn(janitor).when(master).getCatalogJanitor();
Mockito.doReturn(FAKE_HOST).when(master).getServerName();
Mockito.doReturn(Optional.of(FAKE_HOST)).when(master).getActiveMaster();
}

@Test
public void testHbckServletWithMocks() throws Exception {
// Set up request and response mocks
HttpServletRequest request = Mockito.mock(HttpServletRequest.class);
ByteArrayOutputStream out = new ByteArrayOutputStream();
HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
Mockito.doReturn(new PrintWriter(out)).when(response).getWriter();

// Instantiate the servlet and call doGet
MasterHbckServlet servlet = new MasterHbckServlet();
servlet.getServletContext().setAttribute(HMaster.MASTER, master);
servlet.doGet(request, response);

// Response should be in 'out'
Map<String,Object> result = GSON.fromJson(new String(out.toByteArray(), StandardCharsets.UTF_8),
new TypeToken<Map<String,Object>>(){}.getType());

// Check that the result is as expected
// TODO

}

}

0 comments on commit 9543888

Please sign in to comment.