org.apache.kafka.common.Metric Java Examples
The following examples show how to use
org.apache.kafka.common.Metric.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 7 votes |
@Test void shouldKeepMetersWhenMetricsDoNotChange() { //Given Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { MetricName metricName = new MetricName("a", "b", "c", new LinkedHashMap<>()); KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM); return Collections.singletonMap(metricName, metric); }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(1); kafkaMetrics.checkAndBindMetrics(registry); assertThat(registry.getMeters()).hasSize(1); }
Example #2
Source File: LiKafkaClientsUtils.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 6 votes |
/** * kafka doesnt have an API for getting the client id from a client (WTH?!) * relying on reflection is tricky because we may be dealing with various * wrappers/decorators, but it does leak through kafka's metrics tags ... * @param metrics kafka client metrics * @return best guess for the client id */ private static String fishForClientId(Map<MetricName, ? extends Metric> metrics) { Set<String> candidates = new HashSet<>(); metrics.forEach((metricName, metric) -> { Map<String, String> tags = metricName.tags(); if (tags == null) { return; } String clientId = tags.get("client-id"); if (clientId != null) { candidates.add(clientId); } }); if (candidates.isEmpty()) { return null; } if (candidates.size() > 1) { throw new IllegalArgumentException("ambiguous client id from client: " + candidates); } return candidates.iterator().next(); }
Example #3
Source File: KafkaMetrics.java From micrometer with Apache License 2.0 | 6 votes |
/** * Define common tags and meters before binding metrics */ void prepareToBindMetrics(MeterRegistry registry) { Map<MetricName, ? extends Metric> metrics = metricsSupplier.get(); // Collect static metrics and tags Metric startTime = null; for (Map.Entry<MetricName, ? extends Metric> entry : metrics.entrySet()) { MetricName name = entry.getKey(); if (clientId.equals(DEFAULT_VALUE) && name.tags().get(CLIENT_ID_TAG_NAME) != null) clientId = name.tags().get(CLIENT_ID_TAG_NAME); if (METRIC_GROUP_APP_INFO.equals(name.group())) if (VERSION_METRIC_NAME.equals(name.name())) { kafkaVersion = (String) entry.getValue().metricValue(); } else if (START_TIME_METRIC_NAME.equals(name.name())) { startTime = entry.getValue(); } } if (startTime != null) { bindMeter(registry, startTime, meterName(startTime), meterTags(startTime)); } }
Example #4
Source File: KafkaStatsDReporterTest.java From kafka-statsd-metrics2 with Apache License 2.0 | 6 votes |
@Test public final void sendDoubleGauge() throws Exception { final double value = 10.11; Metric metric = new Metric() { @Override public MetricName metricName() { return new MetricName("test-metric", "group"); } @Override public double value() { return value; } }; addMetricAndRunReporter("foo", metric, "bar"); verify(statsD).gauge(Matchers.eq("foo"), Matchers.eq(value), Matchers.eq("bar")); }
Example #5
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 6 votes |
@Test void shouldRemoveOlderMeterWithLessTags() { Map<String, String> tags = new LinkedHashMap<>(); Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { MetricName metricName = new MetricName("a", "b", "c", tags); KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM); return Collections.singletonMap(metricName, metric); }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(1); assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(1); //only version tags.put("key0", "value0"); kafkaMetrics.checkAndBindMetrics(registry); assertThat(registry.getMeters()).hasSize(1); assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2); }
Example #6
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 6 votes |
@Test void shouldRemoveMeterWithLessTags() { Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { MetricName firstName = new MetricName("a", "b", "c", Collections.emptyMap()); KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM); Map<String, String> tags = new LinkedHashMap<>(); tags.put("key0", "value0"); MetricName secondName = new MetricName("a", "b", "c", tags); KafkaMetric secondMetric = new KafkaMetric(this, secondName, new Value(), new MetricConfig(), Time.SYSTEM); Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>(); metrics.put(firstName, firstMetric); metrics.put(secondName, secondMetric); return metrics; }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(1); assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2); // version + key0 }
Example #7
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 6 votes |
@Test void shouldBindMetersWithSameTags() { Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { Map<String, String> firstTags = new LinkedHashMap<>(); firstTags.put("key0", "value0"); MetricName firstName = new MetricName("a", "b", "c", firstTags); KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM); Map<String, String> secondTags = new LinkedHashMap<>(); secondTags.put("key0", "value1"); MetricName secondName = new MetricName("a", "b", "c", secondTags); KafkaMetric secondMetric = new KafkaMetric(this, secondName, new Value(), new MetricConfig(), Time.SYSTEM); Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>(); metrics.put(firstName, firstMetric); metrics.put(secondName, secondMetric); return metrics; }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(2); assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2); // version + key0 }
Example #8
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 6 votes |
@Issue("#1968") @Test void shouldRemoveOlderMeterWithLessTagsWhenCommonTagsConfigured() { //Given Map<String, String> tags = new LinkedHashMap<>(); Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { MetricName metricName = new MetricName("a", "b", "c", tags); KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM); return Collections.singletonMap(metricName, metric); }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); registry.config().commonTags("common", "value"); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(1); assertThat(registry.getMeters().get(0).getId().getTags()).containsExactlyInAnyOrder(Tag.of("kafka-version", "unknown"), Tag.of("common", "value")); // only version tags.put("key0", "value0"); kafkaMetrics.checkAndBindMetrics(registry); assertThat(registry.getMeters()).hasSize(1); assertThat(registry.getMeters().get(0).getId().getTags()).containsExactlyInAnyOrder(Tag.of("kafka-version", "unknown"), Tag.of("key0", "value0"), Tag.of("common", "value")); }
Example #9
Source File: PartitionAssignmentWatchdog.java From ja-micro with Apache License 2.0 | 6 votes |
private synchronized void checkAssignments() { logger.debug("Checking partition assignments"); try { KafkaConsumer<String, String> consumer = consumers.iterator().next(); Map<MetricName, ? extends Metric> metrics = consumer.metrics(); for (MetricName name : metrics.keySet()) { if ("assigned-partitions".equals(name.name())) { Metric metric = metrics.get(name); Map<String, String> tags = name.tags(); String clientId = tags.get("client-id"); int partitionCount = ((Double)metric.metricValue()).intValue(); processDataPoint(clientId, partitionCount, Instant.now()); } } } catch (NoSuchElementException ex) { } }
Example #10
Source File: KafkaProducerMetricsMonitor.java From singer with Apache License 2.0 | 6 votes |
@SuppressWarnings({ "deprecation" }) protected void publishKafkaProducerMetricsToOstrich() { Map<KafkaProducerConfig, KafkaProducer<byte[], byte[]>> producers = KafkaProducerManager .getInstance().getProducers(); for (Entry<KafkaProducerConfig, KafkaProducer<byte[], byte[]>> kafkaProducerEntry : producers .entrySet()) { KafkaProducerConfig key = kafkaProducerEntry.getKey(); String signature = convertSignatureToTag(key); Map<MetricName, ? extends Metric> metrics = kafkaProducerEntry.getValue().metrics(); for (Entry<MetricName, ? extends Metric> entry : metrics.entrySet()) { if (PRODUCER_METRICS_WHITELIST.contains(entry.getKey().name())) { OpenTsdbMetricConverter.gauge("kafkaproducer." + entry.getKey().name(), entry.getValue().value(), "cluster=" + signature); } } } }
Example #11
Source File: ConsumerMetrics.java From core-ng-project with Apache License 2.0 | 5 votes |
void add(Map<MetricName, ? extends Metric> kafkaMetrics) { for (var entry : kafkaMetrics.entrySet()) { MetricName name = entry.getKey(); if ("consumer-fetch-manager-metrics".equals(name.group())) { if ("records-lag-max".equals(name.name())) recordsLagMax.add(entry.getValue()); else if ("records-consumed-rate".equals(name.name())) recordsConsumedRate.add(entry.getValue()); else if ("bytes-consumed-rate".equals(name.name())) bytesConsumedRate.add(entry.getValue()); else if ("fetch-rate".equals(name.name())) fetchRate.add(entry.getValue()); } } }
Example #12
Source File: ProducerMetrics.java From core-ng-project with Apache License 2.0 | 5 votes |
public void set(Map<MetricName, ? extends Metric> kafkaMetrics) { for (Map.Entry<MetricName, ? extends Metric> entry : kafkaMetrics.entrySet()) { MetricName metricName = entry.getKey(); if ("producer-metrics".equals(metricName.group())) { String name = metricName.name(); if ("request-rate".equals(name)) requestRate = entry.getValue(); else if ("request-size-avg".equals(name)) requestSizeAvg = entry.getValue(); else if ("outgoing-byte-rate".equals(name)) outgoingByteRate = entry.getValue(); } } }
Example #13
Source File: ConsumerMetricsTest.java From core-ng-project with Apache License 2.0 | 5 votes |
@Test void sum() { Metric metric1 = mock(Metric.class); when(metric1.metricValue()).thenReturn(1d); Metric metric2 = mock(Metric.class); when(metric2.metricValue()).thenReturn(Double.NEGATIVE_INFINITY); var metrics = new ConsumerMetrics(null); assertThat(metrics.sum(List.of(metric1, metric1, metric2))).isEqualTo(2); }
Example #14
Source File: ProducerMetricsTest.java From core-ng-project with Apache License 2.0 | 5 votes |
@BeforeEach void createProducerMetrics() { requestSizeAvg = mock(Metric.class); metrics = new ProducerMetrics(null); metrics.requestSizeAvg = requestSizeAvg; }
Example #15
Source File: KafkaMetrics.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
void updateMetrics(String[] clusters, Map<String, Map<MetricName, ? extends Metric>> metricsMap) { long current = System.currentTimeMillis(); if (current - lastMetricSampleTime < metricsRefreshInterval) { return; } lastMetricSampleTime = current; if (stats == null) { stats = new KafkaConsumerStats[clusters.length]; } for (int i = 0; i < clusters.length; i++) { if (stats[i] == null) { stats[i] = new KafkaConsumerStats(); stats[i].cluster = clusters[i]; } Map<MetricName, ? extends Metric> cMetrics = metricsMap.get(clusters[i]); if (cMetrics == null || cMetrics.isEmpty()) { stats[i].bytesPerSec = 0; stats[i].msgsPerSec = 0; continue; } if (stats[i].bytePerSecMK == null || stats[i].msgPerSecMK == null) { for (MetricName mn : cMetrics.keySet()) { if (mn.name().equals("bytes-consumed-rate")) { stats[i].bytePerSecMK = mn; } else if (mn.name().equals("records-consumed-rate")) { stats[i].msgPerSecMK = mn; } } } stats[i].bytesPerSec = cMetrics.get(stats[i].bytePerSecMK).value(); stats[i].msgsPerSec = cMetrics.get(stats[i].msgPerSecMK).value(); } }
Example #16
Source File: KafkaMetricsTest.java From micrometer with Apache License 2.0 | 5 votes |
@Test void shouldNotAddAppInfoMetrics() { Supplier<Map<MetricName, ? extends Metric>> supplier = () -> { Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>(); MetricName metricName = new MetricName("a0", "b0", "c0", new LinkedHashMap<>()); KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM); metrics.put(metricName, metric); MetricName appInfoMetricName = new MetricName("a1", KafkaMetrics.METRIC_GROUP_APP_INFO, "c0", new LinkedHashMap<>()); KafkaMetric appInfoMetric = new KafkaMetric(this, appInfoMetricName, new Value(), new MetricConfig(), Time.SYSTEM); metrics.put(appInfoMetricName, appInfoMetric); return metrics; }; kafkaMetrics = new KafkaMetrics(supplier); MeterRegistry registry = new SimpleMeterRegistry(); kafkaMetrics.bindTo(registry); assertThat(registry.getMeters()).hasSize(1); kafkaMetrics.checkAndBindMetrics(registry); assertThat(registry.getMeters()).hasSize(1); }
Example #17
Source File: KafkaMetrics.java From micrometer with Apache License 2.0 | 5 votes |
private void bindMeter(MeterRegistry registry, Metric metric, String name, Iterable<Tag> tags) { if (name.endsWith("total") || name.endsWith("count")) { registerCounter(registry, metric, name, tags); } else { registerGauge(registry, metric, name, tags); } }
Example #18
Source File: KafkaMetrics.java From micrometer with Apache License 2.0 | 5 votes |
private List<Tag> meterTags(Metric metric, boolean includeCommonTags) { List<Tag> tags = new ArrayList<>(); metric.metricName().tags().forEach((key, value) -> tags.add(Tag.of(key, value))); tags.add(Tag.of(KAFKA_VERSION_TAG_NAME, kafkaVersion)); extraTags.forEach(tags::add); if (includeCommonTags) { commonTags.forEach(tags::add); } return tags; }
Example #19
Source File: KafkaUReplicatorMetricsReporter.java From uReplicator with Apache License 2.0 | 5 votes |
public <T extends com.codahale.metrics.Metric> void registerMetric(String metricName, T metric) { checkState(); if (_registry == null) { return; } if (!_registry.getNames().contains(metricName)) { _registry.register(metricName, metric); } else { LOGGER.warn("Failed to register an existed metric: {}", metricName); } }
Example #20
Source File: KafkaEventSink.java From TNT4J with Apache License 2.0 | 5 votes |
@Override public KeyValueStats getStats(Map<String, Object> stats) { super.getStats(stats); if (isOpen()) { Map<MetricName, ? extends Metric> kMetrics = producer.metrics(); for (Map.Entry<MetricName, ? extends Metric> entry : kMetrics.entrySet()) { MetricName kMetric = entry.getKey(); stats.put(Utils.qualify(this, kMetric.group() + "/" + kMetric.name()), entry.getValue().metricValue()); } } return this; }
Example #21
Source File: StatsDMetricsRegistry.java From kafka-statsd-metrics2 with Apache License 2.0 | 5 votes |
public void register( String metricName, Metric metric, String tag ) { metrics.put(metricName, metric); tags.put(metricName, tag); }
Example #22
Source File: KafkaUReplicatorMetricsReporter.java From uReplicator with Apache License 2.0 | 5 votes |
public void removeKafkaMetrics(String prefix, Map<MetricName, ? extends Metric> metrics) { checkState(); for (MetricName metricName : metrics.keySet()) { String kafkaMetricName = String.format("%s.%s", prefix, metricName.name()); removeMetric(kafkaMetricName); } }
Example #23
Source File: KafkaUReplicatorMetricsReporter.java From uReplicator with Apache License 2.0 | 5 votes |
public void registerKafkaMetrics(String prefix, Map<MetricName, ? extends Metric> metrics) { checkState(); for (MetricName metricName : metrics.keySet()) { String kafkaMetricName = String.format("%s.%s", prefix, metricName.name()); registerMetric(kafkaMetricName, new GraphiteKafkaGauge(metrics.get(metricName))); } }
Example #24
Source File: KafkaConsumerWrapper.java From attic-apex-malhar with Apache License 2.0 | 5 votes |
public Map<String, Map<MetricName, ? extends Metric>> getAllConsumerMetrics() { Map<String, Map<MetricName, ? extends Metric>> val = new HashMap<>(); for (Map.Entry<String, AbstractKafkaConsumer> e : consumers.entrySet()) { val.put(e.getKey(), e.getValue().metrics()); } return val; }
Example #25
Source File: MetricsProxy.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 4 votes |
@Override public Metric put(Object key, Object value) { throw new UnsupportedOperationException(); //this collection is immutable in vanilla kafka anyway }
Example #26
Source File: MockLiKafkaProducer.java From li-apache-kafka-clients with BSD 2-Clause "Simplified" License | 4 votes |
@Override public Map<MetricName, ? extends Metric> metrics() { throw new UnsupportedOperationException("Not implemented yet"); }
Example #27
Source File: KafkaMetricMutableWrapper.java From flink with Apache License 2.0 | 4 votes |
public KafkaMetricMutableWrapper(org.apache.kafka.common.Metric metric) { this.kafkaMetric = metric; }
Example #28
Source File: KafkaMetricMutableWrapper.java From flink with Apache License 2.0 | 4 votes |
public void setKafkaMetric(Metric kafkaMetric) { this.kafkaMetric = kafkaMetric; }
Example #29
Source File: MockKafkaProducer.java From samza with Apache License 2.0 | 4 votes |
@Override public Map<MetricName, Metric> metrics() { return null; }
Example #30
Source File: KafkaConsumerThreadTest.java From flink with Apache License 2.0 | 4 votes |
@Override public Map<MetricName, ? extends Metric> metrics() { throw new UnsupportedOperationException(); }