org.apache.kafka.common.metrics.stats.Value Java Examples

The following examples show how to use org.apache.kafka.common.metrics.stats.Value. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 7 votes vote down vote up
@Test
void shouldKeepMetersWhenMetricsDoNotChange() {
    //Given
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        MetricName metricName = new MetricName("a", "b", "c", new LinkedHashMap<>());
        KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
        return Collections.singletonMap(metricName, metric);
    };
    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(1);

    kafkaMetrics.checkAndBindMetrics(registry);
    assertThat(registry.getMeters()).hasSize(1);
}
 
Example #2
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 6 votes vote down vote up
@Test
void shouldRemoveOlderMeterWithLessTags() {
    Map<String, String> tags = new LinkedHashMap<>();
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        MetricName metricName = new MetricName("a", "b", "c", tags);
        KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
        return Collections.singletonMap(metricName, metric);
    };
    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(1);
    assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(1); //only version

    tags.put("key0", "value0");
    kafkaMetrics.checkAndBindMetrics(registry);
    assertThat(registry.getMeters()).hasSize(1);
    assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2);
}
 
Example #3
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 6 votes vote down vote up
@Test
void shouldRemoveMeterWithLessTags() {
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        MetricName firstName = new MetricName("a", "b", "c", Collections.emptyMap());
        KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM);
        Map<String, String> tags = new LinkedHashMap<>();
        tags.put("key0", "value0");
        MetricName secondName = new MetricName("a", "b", "c", tags);
        KafkaMetric secondMetric = new KafkaMetric(this, secondName, new Value(), new MetricConfig(), Time.SYSTEM);
        Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>();
        metrics.put(firstName, firstMetric);
        metrics.put(secondName, secondMetric);
        return metrics;
    };
    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(1);
    assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2); // version + key0
}
 
Example #4
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 6 votes vote down vote up
@Test
void shouldBindMetersWithSameTags() {
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        Map<String, String> firstTags = new LinkedHashMap<>();
        firstTags.put("key0", "value0");
        MetricName firstName = new MetricName("a", "b", "c", firstTags);
        KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM);
        Map<String, String> secondTags = new LinkedHashMap<>();
        secondTags.put("key0", "value1");
        MetricName secondName = new MetricName("a", "b", "c", secondTags);
        KafkaMetric secondMetric = new KafkaMetric(this, secondName, new Value(), new MetricConfig(), Time.SYSTEM);

        Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>();
        metrics.put(firstName, firstMetric);
        metrics.put(secondName, secondMetric);
        return metrics;
    };

    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(2);
    assertThat(registry.getMeters().get(0).getId().getTags()).hasSize(2); // version + key0
}
 
Example #5
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 6 votes vote down vote up
@Issue("#1968")
@Test
void shouldBindMetersWithDifferentClientIds() {
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        Map<String, String> firstTags = new LinkedHashMap<>();
        firstTags.put("key0", "value0");
        firstTags.put("client-id", "client0");
        MetricName firstName = new MetricName("a", "b", "c", firstTags);
        KafkaMetric firstMetric = new KafkaMetric(this, firstName, new Value(), new MetricConfig(), Time.SYSTEM);
        return Collections.singletonMap(firstName, firstMetric);
    };

    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();
    registry.counter("kafka.b.a", "client-id", "client1", "key0", "value0");

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(2);
}
 
Example #6
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 6 votes vote down vote up
@Issue("#1968")
@Test
void shouldRemoveOlderMeterWithLessTagsWhenCommonTagsConfigured() {
    //Given
    Map<String, String> tags = new LinkedHashMap<>();
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        MetricName metricName = new MetricName("a", "b", "c", tags);
        KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
        return Collections.singletonMap(metricName, metric);
    };

    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();
    registry.config().commonTags("common", "value");

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(1);
    assertThat(registry.getMeters().get(0).getId().getTags()).containsExactlyInAnyOrder(Tag.of("kafka-version", "unknown"), Tag.of("common", "value")); // only version

    tags.put("key0", "value0");
    kafkaMetrics.checkAndBindMetrics(registry);
    assertThat(registry.getMeters()).hasSize(1);
    assertThat(registry.getMeters().get(0).getId().getTags()).containsExactlyInAnyOrder(Tag.of("kafka-version", "unknown"), Tag.of("key0", "value0"), Tag.of("common", "value"));
}
 
Example #7
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private Sensor configureErrorRate(Metrics metrics) {
  Sensor sensor = createSensor(metrics, metricGroupName + "-error-rate");
  sensor.add(
      metrics.metricName("error-rate", this.metricGroupName,
                         "The number of messages which were consumed but not processed. "
                         + "Messages may not be processed if, for instance, the message "
                         + "contents could not be deserialized due to an incompatible schema. "
                         + "Alternately, a consumed messages may not have been produced, hence "
                         + "being effectively dropped. Such messages would also be counted "
                         + "toward the error rate."),
      new Value());
  return sensor;
}
 
Example #8
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private Sensor configureMessagesOut(Metrics metrics) {
  Sensor sensor = createSensor(metrics, metricGroupName + "-messages-produced");
  sensor.add(
      metrics.metricName("messages-produced-per-sec", this.metricGroupName,
                         "The number of messages produced per second across all queries"),
      new Value());

  return sensor;
}
 
Example #9
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private Sensor configureMessagesIn(Metrics metrics) {
  Sensor sensor = createSensor(metrics, metricGroupName + "-messages-consumed");
  sensor.add(
      metrics.metricName("messages-consumed-per-sec", this.metricGroupName,
                         "The number of messages consumed per second across all queries"),
      new Value());
  return sensor;
}
 
Example #10
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private Sensor configureTotalMessagesIn(Metrics metrics) {
  Sensor sensor = createSensor(metrics, metricGroupName + "-total-messages-consumed");
  sensor.add(
      metrics.metricName("messages-consumed-total", this.metricGroupName,
          "The total number of messages consumed across all queries"),
      new Value());
  return sensor;
}
 
Example #11
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 5 votes vote down vote up
private Sensor configureTotalBytesIn(Metrics metrics) {
  Sensor sensor = createSensor(metrics, metricGroupName + "-total-bytes-consumed");
  sensor.add(
      metrics.metricName("bytes-consumed-total", this.metricGroupName,
          "The total number of bytes consumed across all queries"),
      new Value());
  return sensor;
}
 
Example #12
Source File: MissingPartitionsJmxReporter.java    From mirus with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
MissingPartitionsJmxReporter(Metrics metrics) {
  super(metrics);
  Sensor missingPartsSensor = metrics.sensor(MISSING_DEST_PARTITIONS);
  MetricName missingPartsName = metrics.metricName(MISSING_DEST_PARTITIONS + "-count", "mirus");
  missingPartsSensor.add(missingPartsName, new Value());
  this.missingPartsSensor = missingPartsSensor;
}
 
Example #13
Source File: KafkaMetricsTest.java    From micrometer with Apache License 2.0 5 votes vote down vote up
@Test
void shouldNotAddAppInfoMetrics() {
    Supplier<Map<MetricName, ? extends Metric>> supplier = () -> {
        Map<MetricName, KafkaMetric> metrics = new LinkedHashMap<>();
        MetricName metricName = new MetricName("a0", "b0", "c0", new LinkedHashMap<>());
        KafkaMetric metric = new KafkaMetric(this, metricName, new Value(), new MetricConfig(), Time.SYSTEM);
        metrics.put(metricName, metric);
        MetricName appInfoMetricName =
                new MetricName("a1", KafkaMetrics.METRIC_GROUP_APP_INFO, "c0",
                        new LinkedHashMap<>());
        KafkaMetric appInfoMetric =
                new KafkaMetric(this, appInfoMetricName, new Value(), new MetricConfig(), Time.SYSTEM);
        metrics.put(appInfoMetricName, appInfoMetric);
        return metrics;
    };
    kafkaMetrics = new KafkaMetrics(supplier);
    MeterRegistry registry = new SimpleMeterRegistry();

    kafkaMetrics.bindTo(registry);
    assertThat(registry.getMeters()).hasSize(1);

    kafkaMetrics.checkAndBindMetrics(registry);
    assertThat(registry.getMeters()).hasSize(1);
}
 
Example #14
Source File: WorkersMetrics.java    From kafka-workers with Apache License 2.0 4 votes vote down vote up
public void addSensor(String name) {
    Sensor sensor = metrics.sensor(name);
    sensor.add(metrics.metricName("value", name), new Value());
}
 
Example #15
Source File: KsqlEngineMetrics.java    From ksql-fork-with-deep-learning-function with Apache License 2.0 4 votes vote down vote up
private Sensor configureIdleQueriesSensor(Metrics metrics) {
  Sensor sensor = createSensor(metrics, "num-idle-queries");
  sensor.add(metrics.metricName("num-idle-queries", this.metricGroupName), new Value());
  return sensor;
}
 
Example #16
Source File: ConnectorJmxReporter.java    From mirus with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
private void ensureMetricsCreated(String connectorName) {

    Map<String, String> connectorTags = getConnectorLevelTags(connectorName);

    MetricName runningMetric =
        getMetric(
            RUNNING_TASK_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of running tasks per connector",
            connectorLevelJmxTags,
            connectorTags);
    MetricName pausedMetric =
        getMetric(
            PAUSED_TASK_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of paused tasks per connector",
            connectorLevelJmxTags,
            connectorTags);
    MetricName failedMetric =
        getMetric(
            FAILED_TASK_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of failed tasks per connector",
            connectorLevelJmxTags,
            connectorTags);
    MetricName unassignedMetric =
        getMetric(
            UNASSIGNED_TASK_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of unassigned tasks per connector",
            connectorLevelJmxTags,
            connectorTags);
    MetricName destroyedMetric =
        getMetric(
            DESTROYED_TASK_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of destroyed tasks per connector",
            connectorLevelJmxTags,
            connectorTags);

    MetricName totalAttemptsPerConnectorMetric =
        getMetric(
            FAILED_TASK_ATTEMPTS_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of failed task restart attempts per connector",
            connectorLevelJmxTags,
            connectorTags);

    MetricName restartAttemptsPerConnectorMetric =
        getMetric(
            FAILED_CONNECTOR_ATTEMPTS_METRIC_NAME + "-count",
            CONNECTOR_JMX_GROUP_NAME,
            "count of failed connector restart attempts per connector",
            connectorLevelJmxTags,
            connectorTags);

    if (!metrics.metrics().containsKey(runningMetric)) {
      metrics
          .sensor(calculateSensorName(allStates.get("RUNNING"), connectorName))
          .add(runningMetric, new Value());
    }
    if (!metrics.metrics().containsKey(pausedMetric)) {
      metrics
          .sensor(calculateSensorName(allStates.get("PAUSED"), connectorName))
          .add(pausedMetric, new Value());
    }

    if (!metrics.metrics().containsKey(failedMetric)) {
      metrics
          .sensor(calculateSensorName(allStates.get("FAILED"), connectorName))
          .add(failedMetric, new Value());
    }
    if (!metrics.metrics().containsKey(unassignedMetric)) {
      metrics
          .sensor(calculateSensorName(allStates.get("UNASSIGNED"), connectorName))
          .add(unassignedMetric, new Value());
    }
    if (!metrics.metrics().containsKey(destroyedMetric)) {
      metrics
          .sensor(calculateSensorName(allStates.get("DESTROYED"), connectorName))
          .add(destroyedMetric, new Value());
    }
    if (!metrics.metrics().containsKey(totalAttemptsPerConnectorMetric)) {
      metrics
          .sensor(FAILED_TASK_ATTEMPTS_METRIC_NAME + connectorName)
          .add(totalAttemptsPerConnectorMetric, new Total());
    }

    if (!metrics.metrics().containsKey(restartAttemptsPerConnectorMetric)) {
      metrics
          .sensor(FAILED_CONNECTOR_ATTEMPTS_METRIC_NAME + connectorName)
          .add(restartAttemptsPerConnectorMetric, new Total());
    }
  }