com.codahale.metrics.Metric Scala Examples
The following examples show how to use com.codahale.metrics.Metric.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: JvmMetricSet.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.metrics import java.util import com.codahale.metrics.jvm.{ ClassLoadingGaugeSet, GarbageCollectorMetricSet, JvmAttributeGaugeSet, MemoryUsageGaugeSet, ThreadStatesGaugeSet } import com.codahale.metrics.{Metric, MetricSet} import com.daml.metrics.JvmMetricSet._ import scala.collection.JavaConverters._ class JvmMetricSet extends MetricSet { private val metricSets = Map( "class_loader" -> new ClassLoadingGaugeSet, "garbage_collector" -> new GarbageCollectorMetricSet, "attributes" -> new JvmAttributeGaugeSet, "memory_usage" -> new MemoryUsageGaugeSet, "thread_states" -> new ThreadStatesGaugeSet, ) override def getMetrics: util.Map[String, Metric] = metricSets.flatMap { case (metricSetName, metricSet) => val metricSetPrefix = Prefix :+ metricSetName metricSet.getMetrics.asScala.map { case (metricName, metric) => (metricSetPrefix :+ metricName).toString -> metric } }.asJava } object JvmMetricSet { private val Prefix = MetricName("jvm") }
Example 2
Source File: JvmMetricsSet.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.metrics import java.util import scala.collection.JavaConverters._ import com.codahale.metrics.jvm.{MemoryUsageGaugeSet, ThreadStatesGaugeSet} import com.codahale.metrics.{Metric, MetricSet} class JvmMetricsSet(name: String) extends MetricSet { override def getMetrics: util.Map[String, Metric] = { val memoryMetrics = new MemoryUsageGaugeSet().getMetrics.asScala val threadMetrics = new ThreadStatesGaugeSet().getMetrics.asScala Map( s"$name:memory.total.used" -> memoryMetrics("total.used"), s"$name:memory.total.committed" -> memoryMetrics("total.committed"), s"$name:memory.total.max" -> memoryMetrics("total.max"), s"$name:memory.heap.used" -> memoryMetrics("heap.used"), s"$name:memory.heap.committed" -> memoryMetrics("heap.committed"), s"$name:memory.heap.max" -> memoryMetrics("heap.max"), s"$name:thread.count" -> threadMetrics("count"), s"$name:thread.daemon.count" -> threadMetrics("daemon.count") ).asJava } }
Example 3
Source File: RemoraDatadogReporter.scala From remora with MIT License | 5 votes |
package reporter import java.util.concurrent.TimeUnit import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry} import config.DataDog import models.RegistryKafkaMetric import org.coursera.metrics.datadog.TaggedName.TaggedNameBuilder import org.coursera.metrics.datadog.transport.UdpTransport import org.coursera.metrics.datadog.{DatadogReporter, MetricNameFormatter} class RemoraDatadogReporter(metricRegistry: MetricRegistry, datadogConfig: DataDog) { private val transport = new UdpTransport.Builder().withStatsdHost(datadogConfig.agentHost).withPort(datadogConfig.agentPort).build private val kafkaConsumerGroupFilter: MetricFilter = new MetricFilter { override def matches(metricName: String, metric: Metric): Boolean = { val trackedConsumerGroups = datadogConfig.trackedConsumerGroups trackedConsumerGroups.isEmpty || trackedConsumerGroups.exists(consumerGroupName => metricName.contains(consumerGroupName)) } } private def metricNameFormatter(removeTagsFromMetricName: Boolean): MetricNameFormatter = new MetricNameFormatter { override def format(nameWithPrefix: String, path: String*): String = { RegistryKafkaMetric.decode(nameWithPrefix.replaceFirst(s"${datadogConfig.name}\\.","")) match { case Some(registryKafkaMetric) => val builder = new TaggedNameBuilder().metricName( if (removeTagsFromMetricName) buildNameWithoutTags(registryKafkaMetric) else nameWithPrefix ).addTag("topic", registryKafkaMetric.topic) .addTag("group", registryKafkaMetric.group) registryKafkaMetric.partition.foreach(p => builder.addTag("partition", p)) builder.build().encode() case None => nameWithPrefix } } } private def buildNameWithoutTags(registryKafkaMetric: RegistryKafkaMetric): String = s"${datadogConfig.name}.${registryKafkaMetric.prefix}.${registryKafkaMetric.suffix}" def startReporter(): Unit = DatadogReporter .forRegistry(metricRegistry) .withPrefix(datadogConfig.name) .withTransport(transport) .filter(kafkaConsumerGroupFilter) .withMetricNameFormatter(metricNameFormatter(datadogConfig.removeTagsFromMetricName)) .build .start(datadogConfig.intervalMinutes, TimeUnit.MINUTES) }
Example 4
Source File: RemoraDatadogReporterSpec.scala From remora with MIT License | 5 votes |
package reporter import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry} import config.DataDog import org.coursera.metrics.datadog.MetricNameFormatter import org.scalamock.scalatest.MockFactory import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester} class RemoraDatadogReporterSpec extends FlatSpec with Matchers with PrivateMethodTester with MockFactory { private val metricRegistry: MetricRegistry = new MetricRegistry private val metric: Metric = mock[Metric] private val config = DataDog(enabled = true, "test", 1, "localhost", 8125, List.empty, removeTagsFromMetricName = false) private val configRemoveTags = DataDog(enabled = true, "test", 1, "localhost", 8125, List.empty, removeTagsFromMetricName = true) "Metrics filter" should "match any metric when no filter is given" in { val filter = buildMetricFilter(List.empty) filter.matches("any_metrics_name", metric) should be(true) } it should "match metric containing consumer group name" in { val kafkaConsumerGroupName = "test-consumer1" val filter = buildMetricFilter(List(kafkaConsumerGroupName)) filter.matches(s"metric-name-$kafkaConsumerGroupName", metric) should be(true) } it should "not match metric containing consumer group name" in { val filter = buildMetricFilter(List("test-consumer1")) filter.matches("some-metrics", metric) should be(false) } "Metric name formatter" should "add tag information if metric is well formatted" in { val formatter = getMetricNameFormatter(config) formatter.format(s"${config.name}.gauge.test.1.test-consumer.lag") should be(s"${config.name}.gauge.test.1.test-consumer.lag[topic:test,group:test-consumer,partition:1]") } it should "not add partition tag information if no partition" in { val formatter = getMetricNameFormatter(config) formatter.format(s"${config.name}.gauge.test-topic.test-consumer.totalLag") should be(s"${config.name}.gauge.test-topic.test-consumer.totalLag[topic:test-topic,group:test-consumer]") } it should "not add tag information otherwise" in { val formatter = getMetricNameFormatter(config) formatter.format(s"${config.name}.gauge.test_1_faulty_test-consumer__lag") should be(s"${config.name}.gauge.test_1_faulty_test-consumer__lag") } "Metric name formatter without tags" should "add tag information if metric is well formatted" in { val formatter = getMetricNameFormatter(configRemoveTags) formatter.format(s"${configRemoveTags.name}.gauge.test.1.test-consumer.lag") should be(s"${configRemoveTags.name}.gauge.lag[topic:test,group:test-consumer,partition:1]") } it should "not add partition tag information if no partition" in { val formatter = getMetricNameFormatter(configRemoveTags) formatter.format(s"${configRemoveTags.name}.gauge.test-topic.test-consumer.totalLag") should be(s"${configRemoveTags.name}.gauge.totalLag[topic:test-topic,group:test-consumer]") } private def buildMetricFilter(kafkaConsumerList: List[String], removeTags: Boolean = false): MetricFilter = { val config = DataDog(enabled = true, "test", 1, "localhost", 8125, kafkaConsumerList, removeTags) val reporter = new RemoraDatadogReporter(metricRegistry, config) reporter invokePrivate PrivateMethod[MetricFilter]('kafkaConsumerGroupFilter)() } private def getMetricNameFormatter(config: DataDog): MetricNameFormatter = { val reporter = new RemoraDatadogReporter(metricRegistry, config) reporter invokePrivate PrivateMethod[MetricNameFormatter]('metricNameFormatter)(config.removeTagsFromMetricName) } }
Example 5
Source File: APIMetrics.scala From vinyldns with Apache License 2.0 | 5 votes |
package vinyldns.api.metrics import java.util.concurrent.TimeUnit import cats.effect.{Blocker, ContextShift, IO} import com.codahale.metrics.Slf4jReporter.LoggingLevel import com.codahale.metrics.{Metric, MetricFilter, ScheduledReporter, Slf4jReporter} import com.typesafe.config.Config import org.slf4j.LoggerFactory import pureconfig._ import pureconfig.generic.auto._ import pureconfig.module.catseffect.syntax._ import vinyldns.core.VinylDNSMetrics final case class MemoryMetricsSettings(logEnabled: Boolean, logSeconds: Int) final case class APIMetricsSettings(memory: MemoryMetricsSettings) object APIMetrics { private implicit val cs: ContextShift[IO] = IO.contextShift(scala.concurrent.ExecutionContext.global) // Output all memory metrics to the log, do not start unless configured private val logReporter = Slf4jReporter .forRegistry(VinylDNSMetrics.metricsRegistry) .filter(new MetricFilter { def matches(name: String, metric: Metric): Boolean = name.startsWith("memory") }) .withLoggingLevel(LoggingLevel.INFO) .outputTo(LoggerFactory.getLogger("MemStats")) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build() def initialize( settings: APIMetricsSettings, reporter: ScheduledReporter = logReporter ): IO[Unit] = IO { if (settings.memory.logEnabled) { reporter.start(settings.memory.logSeconds, TimeUnit.SECONDS) } } def loadSettings(config: Config): IO[APIMetricsSettings] = Blocker[IO].use( ConfigSource.fromConfig(config).loadF[IO, APIMetricsSettings](_) ) }