Skip to content

Commit

Permalink
HBASE-20904 Prometheus /metrics http endpoint for monitoring (#4691)
Browse files Browse the repository at this point in the history
Co-authored-by: Luca Kovacs <[email protected]>
Co-authored-by: Madhusoodan P <[email protected]>
Co-authored-by: Luca Kovacs <[email protected]>
Signed-off-by: Duo Zhang <[email protected]>
  • Loading branch information
3 people authored Aug 24, 2022
1 parent a0481d1 commit f9ea7ee
Show file tree
Hide file tree
Showing 9 changed files with 392 additions and 11 deletions.
8 changes: 8 additions & 0 deletions hbase-common/src/main/resources/hbase-default.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1786,6 +1786,14 @@ possible configurations would overwhelm and obscure the important.
ThreadPool.
</description>
</property>
<property>
<name>hbase.http.metrics.servlets</name>
<value>jmx,metrics,prometheus</value>
<description>
Comma separated list of servlet names to enable for metrics collection. Supported
servlets are jmx, metrics, prometheus
</description>
</property>
<property>
<name>hbase.replication.rpc.codec</name>
<value>org.apache.hadoop.hbase.codec.KeyValueCodecWithTags</value>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metrics2.impl;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.yetus.audience.InterfaceAudience;

@InterfaceAudience.Private
public final class MetricsExportHelper {
private MetricsExportHelper() {
}

public static Collection<MetricsRecord> export() {
MetricsSystemImpl instance = (MetricsSystemImpl) DefaultMetricsSystem.instance();
MetricsBuffer metricsBuffer = instance.sampleMetrics();
List<MetricsRecord> metrics = new ArrayList<>();
for (MetricsBuffer.Entry entry : metricsBuffer) {
entry.records().forEach(metrics::add);
}
return metrics;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics;

import java.util.Collection;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MetricsTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.impl.MetricsExportHelper;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category({ MetricsTests.class, SmallTests.class })
public class TestMetricsExportHelper {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMetricsExportHelper.class);

@Test
public void testExportHelper() {
DefaultMetricsSystem.initialize("exportHelperTestSystem");
DefaultMetricsSystem.instance().start();

String metricsName = "exportMetricsTestGrp";
String gaugeName = "exportMetricsTestGauge";
String counterName = "exportMetricsTestCounter";

BaseSourceImpl baseSource = new BaseSourceImpl(metricsName, "", metricsName, metricsName);

baseSource.setGauge(gaugeName, 0);
baseSource.incCounters(counterName, 1);

Collection<MetricsRecord> metrics = MetricsExportHelper.export();
DefaultMetricsSystem.instance().stop();

Assert.assertTrue(metrics.stream().anyMatch(mr -> mr.name().equals(metricsName)));
Assert.assertTrue(gaugeName + " is missing in the export",
contains(metrics, metricsName, gaugeName));
Assert.assertTrue(counterName + " is missing in the export",
contains(metrics, metricsName, counterName));
}

private boolean contains(Collection<MetricsRecord> metrics, String metricsName,
String metricName) {
return metrics.stream().filter(mr -> mr.name().equals(metricsName)).anyMatch(mr -> {
for (AbstractMetric metric : mr.metrics()) {
if (metric.name().equals(metricName)) {
return true;
}
}
return false;
});
}
}
13 changes: 13 additions & 0 deletions hbase-http/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,19 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<!-- resource bundle only needed at build time -->
<dependency>
<groupId>org.apache.hbase</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.conf.ConfServlet;
import org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
import org.apache.hadoop.hbase.http.log.LogLevel;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
Expand All @@ -70,6 +69,7 @@
import org.slf4j.LoggerFactory;

import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.org.eclipse.jetty.http.HttpVersion;
import org.apache.hbase.thirdparty.org.eclipse.jetty.server.Handler;
Expand Down Expand Up @@ -154,6 +154,18 @@ public class HttpServer implements FilterContainer {
public static final String NO_CACHE_FILTER = "NoCacheFilter";
public static final String APP_DIR = "webapps";

public static final String METRIC_SERVLETS_CONF_KEY = "hbase.http.metrics.servlets";
public static final String[] METRICS_SERVLETS_DEFAULT = { "jmx", "metrics", "prometheus" };
private static final ImmutableMap<String,
ServletConfig> METRIC_SERVLETS = new ImmutableMap.Builder<String, ServletConfig>()
.put("jmx",
new ServletConfig("jmx", "/jmx", "org.apache.hadoop.hbase.http.jmx.JMXJsonServlet"))
.put("metrics",
new ServletConfig("metrics", "/metrics", "org.apache.hadoop.metrics.MetricsServlet"))
.put("prometheus", new ServletConfig("prometheus", "/prometheus",
"org.apache.hadoop.hbase.http.prometheus.PrometheusHadoopServlet"))
.build();

private final AccessControlList adminsAcl;

protected final Server webServer;
Expand Down Expand Up @@ -751,16 +763,7 @@ protected void addDefaultServlets(ContextHandlerCollection contexts, Configurati
// set up default servlets
addPrivilegedServlet("stacks", "/stacks", StackServlet.class);
addPrivilegedServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
// Hadoop3 has moved completely to metrics2, and dropped support for Metrics v1's
// MetricsServlet (see HADOOP-12504). We'll using reflection to load if against hadoop2.
// Remove when we drop support for hbase on hadoop2.x.
try {
Class<?> clz = Class.forName("org.apache.hadoop.metrics.MetricsServlet");
addPrivilegedServlet("metrics", "/metrics", clz.asSubclass(HttpServlet.class));
} catch (Exception e) {
// do nothing
}
addPrivilegedServlet("jmx", "/jmx", JMXJsonServlet.class);

// While we don't expect users to have sensitive information in their configuration, they
// might. Give them an option to not expose the service configuration to all users.
if (conf.getBoolean(HTTP_PRIVILEGED_CONF_KEY, HTTP_PRIVILEGED_CONF_DEFAULT)) {
Expand All @@ -784,6 +787,22 @@ protected void addDefaultServlets(ContextHandlerCollection contexts, Configurati
LOG.info("ASYNC_PROFILER_HOME environment variable and async.profiler.home system property "
+ "not specified. Disabling /prof endpoint.");
}

/* register metrics servlets */
String[] enabledServlets = conf.getStrings(METRIC_SERVLETS_CONF_KEY, METRICS_SERVLETS_DEFAULT);
for (String enabledServlet : enabledServlets) {
try {
ServletConfig servletConfig = METRIC_SERVLETS.get(enabledServlet);
if (servletConfig != null) {
Class<?> clz = Class.forName(servletConfig.getClazz());
addPrivilegedServlet(servletConfig.getName(), servletConfig.getPathSpec(),
clz.asSubclass(HttpServlet.class));
}
} catch (Exception e) {
/* shouldn't be fatal, so warn the user about it */
LOG.warn("Couldn't register the servlet " + enabledServlet, e);
}
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;

import org.apache.yetus.audience.InterfaceAudience;

/* pojo to hold the servlet info */

@InterfaceAudience.Private
class ServletConfig {
private String name;
private String pathSpec;
private String clazz;

public ServletConfig(String name, String pathSpec, String clazz) {
this.name = name;
this.pathSpec = pathSpec;
this.clazz = clazz;
}

public String getName() {
return name;
}

public String getPathSpec() {
return pathSpec;
}

public String getClazz() {
return clazz;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http.prometheus;

import com.google.errorprone.annotations.RestrictedApi;
import java.io.IOException;
import java.io.Writer;
import java.util.Collection;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsRecord;
import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.MetricsExportHelper;
import org.apache.yetus.audience.InterfaceAudience;

@InterfaceAudience.Private
public class PrometheusHadoopServlet extends HttpServlet {

private static final Pattern SPLIT_PATTERN =
Pattern.compile("(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=([A-Z][a-z]))|\\W|(_)+");

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
writeMetrics(resp.getWriter());
}

static String toPrometheusName(String metricRecordName, String metricName) {
String baseName = metricRecordName + StringUtils.capitalize(metricName);
String[] parts = SPLIT_PATTERN.split(baseName);
return String.join("_", parts).toLowerCase();
}

/*
* SimpleClient for Prometheus is not used, because the format is very easy to implement and this
* solution doesn't add any dependencies to the project. You can check the Prometheus format here:
* https://prometheus.io/docs/instrumenting/exposition_formats/
*/
@RestrictedApi(explanation = "Should only be called in tests or self", link = "",
allowedOnPath = ".*/src/test/.*|.*/PrometheusHadoopServlet\\.java")
void writeMetrics(Writer writer) throws IOException {
Collection<MetricsRecord> metricRecords = MetricsExportHelper.export();
for (MetricsRecord metricsRecord : metricRecords) {
for (AbstractMetric metrics : metricsRecord.metrics()) {
if (metrics.type() == MetricType.COUNTER || metrics.type() == MetricType.GAUGE) {

String key = toPrometheusName(metricsRecord.name(), metrics.name());
writer.append("# TYPE ").append(key).append(" ")
.append(metrics.type().toString().toLowerCase()).append("\n").append(key).append("{");

/* add tags */
String sep = "";
for (MetricsTag tag : metricsRecord.tags()) {
String tagName = tag.name().toLowerCase();
writer.append(sep).append(tagName).append("=\"").append(tag.value()).append("\"");
sep = ",";
}
writer.append("} ");
writer.append(metrics.value().toString()).append('\n');
}
}
}
writer.flush();
}
}
Loading

0 comments on commit f9ea7ee

Please sign in to comment.