Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[METRICS] use built-in cost functions to fetch beans #1622

Merged
merged 7 commits into from
Mar 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class ClusterInfoSensor implements MetricSensor {
@Override
public List<? extends HasBeanObject> fetch(MBeanClient client, ClusterBean bean) {
return Stream.of(
List.of(ServerMetrics.KafkaServer.clusterId(client)),
List.of(ServerMetrics.KafkaServer.CLUSTER_ID.fetch(client)),
LogMetrics.Log.SIZE.fetch(client),
ClusterMetrics.Partition.REPLICAS_COUNT.fetch(client))
.flatMap(Collection::stream)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,11 @@
*/
package org.astraea.common.metrics.broker;

import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.astraea.common.EnumInfo;
import org.astraea.common.admin.TopicPartition;
Expand All @@ -28,9 +32,25 @@ public final class ClusterMetrics {

public static final String DOMAIN_NAME = "kafka.cluster";

public static final Collection<BeanQuery> QUERIES = List.copyOf(Partition.ALL.values());

public enum Partition implements EnumInfo {
REPLICAS_COUNT("ReplicasCount");

private static final Map<Partition, BeanQuery> ALL =
Arrays.stream(Partition.values())
.collect(
Collectors.toUnmodifiableMap(
Function.identity(),
m ->
BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", "Partition")
.property("topic", "*")
.property("partition", "*")
.property("name", m.metricName())
.build()));

private final String metricName;

Partition(String metricName) {
Expand All @@ -55,18 +75,8 @@ public static ClusterMetrics.Partition ofAlias(String alias) {
return EnumInfo.ignoreCaseEnum(ClusterMetrics.Partition.class, alias);
}

public BeanQuery query() {
return BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", "Partition")
.property("topic", "*")
.property("partition", "*")
.property("name", metricName())
.build();
}

public List<PartitionMetric> fetch(MBeanClient client) {
return client.beans(query()).stream()
return client.beans(ALL.get(this)).stream()
.map(PartitionMetric::new)
.collect(Collectors.toUnmodifiableList());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,13 @@
*/
package org.astraea.common.metrics.broker;

import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.astraea.common.EnumInfo;
import org.astraea.common.metrics.BeanObject;
import org.astraea.common.metrics.BeanQuery;
Expand All @@ -30,10 +33,28 @@ public final class LogMetrics {
public static final String DOMAIN_NAME = "kafka.log";
public static final String LOG_TYPE = "Log";

public static final Collection<BeanQuery> QUERIES =
Stream.of(LogCleanerManager.ALL.values().stream(), Log.ALL.values().stream())
.flatMap(f -> f)
.collect(Collectors.toUnmodifiableList());

public enum LogCleanerManager implements EnumInfo {
UNCLEANABLE_BYTES("uncleanable-bytes"),
UNCLEANABLE_PARTITIONS_COUNT("uncleanable-partitions-count");

private static final Map<LogCleanerManager, BeanQuery> ALL =
Arrays.stream(LogCleanerManager.values())
.collect(
Collectors.toUnmodifiableMap(
Function.identity(),
m ->
BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", "LogCleanerManager")
.property("logDirectory", "*")
.property("name", m.metricName)
.build()));

public static LogCleanerManager ofAlias(String alias) {
return EnumInfo.ignoreCaseEnum(LogCleanerManager.class, alias);
}
Expand All @@ -58,17 +79,8 @@ public String toString() {
return alias();
}

public BeanQuery query() {
return BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", "LogCleanerManager")
.property("logDirectory", "*")
.property("name", metricName)
.build();
}

public List<Gauge> fetch(MBeanClient mBeanClient) {
return mBeanClient.beans(query()).stream()
return mBeanClient.beans(ALL.get(this)).stream()
.map(Gauge::new)
.collect(Collectors.toUnmodifiableList());
}
Expand Down Expand Up @@ -108,6 +120,28 @@ public enum Log implements EnumInfo {
NUM_LOG_SEGMENTS("NumLogSegments"),
SIZE("Size");

private static final Map<Log, BeanQuery> ALL =
Arrays.stream(Log.values())
.collect(
Collectors.toUnmodifiableMap(
Function.identity(),
m ->
BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", LOG_TYPE)
.property("topic", "*")
.property("partition", "*")
.property("name", m.metricName)
// Due to a Kafka bug. This log metrics might come with an `is-future`
// property
// with it.
// And the property is never removed even if the folder migration is
// done.
// We use the BeanQuery property list pattern to work around this issue.
// See https://github.com/apache/kafka/pull/12979
.usePropertyListPattern()
.build()));

public static Log ofAlias(String alias) {
return EnumInfo.ignoreCaseEnum(Log.class, alias);
}
Expand Down Expand Up @@ -140,24 +174,8 @@ public static Collection<Gauge> gauges(Collection<HasBeanObject> beans, Log type
.collect(Collectors.toUnmodifiableList());
}

public BeanQuery query() {
return BeanQuery.builder()
.domainName(DOMAIN_NAME)
.property("type", LOG_TYPE)
.property("topic", "*")
.property("partition", "*")
.property("name", metricName)
// Due to a Kafka bug. This log metrics might come with an `is-future` property
// with it.
// And the property is never removed even if the folder migration is done.
// We use the BeanQuery property list pattern to work around this issue.
// See https://github.com/apache/kafka/pull/12979
.usePropertyListPattern()
.build();
}

public List<Gauge> fetch(MBeanClient mBeanClient) {
return mBeanClient.beans(query()).stream()
return mBeanClient.beans(ALL.get(this)).stream()
.map(Gauge::new)
.collect(Collectors.toUnmodifiableList());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,19 @@
*/
package org.astraea.common.metrics.broker;

import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.astraea.common.EnumInfo;
import org.astraea.common.metrics.BeanObject;
import org.astraea.common.metrics.BeanQuery;
import org.astraea.common.metrics.MBeanClient;

public class NetworkMetrics {
public static final Collection<BeanQuery> QUERIES = List.copyOf(Request.ALL.values());

public enum Request implements EnumInfo {
PRODUCE("Produce"),
Expand Down Expand Up @@ -84,6 +91,19 @@ public enum Request implements EnumInfo {
LIST_TRANSACTIONS("ListTransactions"),
ALLOCATE_PRODUCER_IDS("AllocateProducerIds");

private static final Map<Request, BeanQuery> ALL =
Arrays.stream(Request.values())
.collect(
Collectors.toUnmodifiableMap(
Function.identity(),
m ->
BeanQuery.builder()
.domainName("kafka.network")
.property("type", "RequestMetrics")
.property("request", m.metricName())
.property("name", "TotalTimeMs")
.build()));

public static Request ofAlias(String alias) {
return EnumInfo.ignoreCaseEnum(Request.class, alias);
}
Expand All @@ -108,17 +128,8 @@ public String toString() {
return alias();
}

public BeanQuery query() {
return BeanQuery.builder()
.domainName("kafka.network")
.property("type", "RequestMetrics")
.property("request", this.metricName())
.property("name", "TotalTimeMs")
.build();
}

public Histogram fetch(MBeanClient mBeanClient) {
return new Histogram(mBeanClient.bean(query()));
return new Histogram(mBeanClient.bean(ALL.get(this)));
}

public static class Histogram implements HasHistogram {
Expand Down
Loading