com.codahale.metrics.Slf4jReporter Scala Examples
The following examples show how to use com.codahale.metrics.Slf4jReporter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MetricsReporting.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.sandbox.metrics import java.time.Duration import java.util.concurrent.TimeUnit import com.codahale.metrics.Slf4jReporter.LoggingLevel import com.codahale.metrics.jmx.JmxReporter import com.codahale.metrics.{MetricRegistry, Reporter, Slf4jReporter} import com.daml.metrics.{JvmMetricSet, Metrics} import com.daml.platform.configuration.MetricsReporter import com.daml.resources.{Resource, ResourceOwner} import scala.concurrent.{ExecutionContext, Future} final class MetricsReporting( jmxDomain: String, extraMetricsReporter: Option[MetricsReporter], extraMetricsReportingInterval: Duration, ) extends ResourceOwner[Metrics] { def acquire()(implicit executionContext: ExecutionContext): Resource[Metrics] = { val registry = new MetricRegistry registry.registerAll(new JvmMetricSet) for { slf4JReporter <- acquire(newSlf4jReporter(registry)) _ <- acquire(newJmxReporter(registry)) .map(_.start()) _ <- extraMetricsReporter.fold(Resource.unit) { reporter => acquire(reporter.register(registry)) .map(_.start(extraMetricsReportingInterval.getSeconds, TimeUnit.SECONDS)) } // Trigger a report to the SLF4J logger on shutdown. _ <- Resource(Future.successful(slf4JReporter))(reporter => Future.successful(reporter.report())) } yield new Metrics(registry) } private def newJmxReporter(registry: MetricRegistry): JmxReporter = JmxReporter .forRegistry(registry) .inDomain(jmxDomain) .build() private def newSlf4jReporter(registry: MetricRegistry): Slf4jReporter = Slf4jReporter .forRegistry(registry) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .withLoggingLevel(LoggingLevel.DEBUG) .build() private def acquire[T <: Reporter](reporter: => T)( implicit executionContext: ExecutionContext ): Resource[T] = ResourceOwner .forCloseable(() => reporter) .acquire() }
Example 2
Source File: Slf4jSink.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{MetricRegistry, Slf4jReporter} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 3
Source File: package.scala From zio-metrics with Apache License 2.0 | 5 votes |
package zio.metrics.dropwizard import zio.{ Has, Layer, Task, ZLayer } import java.util.concurrent.TimeUnit import java.io.File import java.util.Locale import java.net.InetSocketAddress import java.util.concurrent.TimeUnit import org.slf4j.LoggerFactory import java.{ util => ju } import java.io.File package object reporters { import com.codahale.metrics.MetricRegistry import com.codahale.metrics.MetricFilter import com.codahale.metrics.graphite.Graphite import com.codahale.metrics.graphite.GraphiteReporter import com.codahale.metrics.ConsoleReporter import com.codahale.metrics.Slf4jReporter import com.codahale.metrics.CsvReporter import com.codahale.metrics.jmx.JmxReporter import com.codahale.metrics.Reporter type Reporters = Has[Reporters.Service] object Reporters { trait Service { def jmx(r: MetricRegistry): Task[JmxReporter] def console(r: MetricRegistry): Task[ConsoleReporter] def slf4j(r: MetricRegistry, duration: Int, unit: TimeUnit, loggerName: String): Task[Slf4jReporter] def csv(r: MetricRegistry, file: File, locale: Locale): Task[Reporter] def graphite(r: MetricRegistry, host: String, port: Int, prefix: String): Task[GraphiteReporter] } val live: Layer[Nothing, Reporters] = ZLayer.succeed(new Service { def jmx(r: MetricRegistry): zio.Task[JmxReporter] = Task(JmxReporter.forRegistry(r).build()) def console(r: MetricRegistry): Task[ConsoleReporter] = Task( ConsoleReporter .forRegistry(r) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build() ) def slf4j(r: MetricRegistry, duration: Int, unit: TimeUnit, loggerName: String): Task[Slf4jReporter] = Task( Slf4jReporter .forRegistry(r) .outputTo(LoggerFactory.getLogger(loggerName)) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build() ) def csv(r: MetricRegistry, file: File, locale: ju.Locale): zio.Task[Reporter] = Task( CsvReporter .forRegistry(r) .formatFor(locale) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build(file) ) def graphite(r: MetricRegistry, host: String, port: Int, prefix: String): zio.Task[GraphiteReporter] = Task { val graphite = new Graphite(new InetSocketAddress(host, port)) GraphiteReporter .forRegistry(r) .prefixedWith(prefix) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .filter(MetricFilter.ALL) .build(graphite) } }) } }
Example 4
Source File: Slf4jSink.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{MetricRegistry, Slf4jReporter} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 5
Source File: Slf4jSink.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{MetricRegistry, Slf4jReporter} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 6
Source File: Slf4jSink.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{Slf4jReporter, MetricRegistry} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 7
Source File: APIMetrics.scala From vinyldns with Apache License 2.0 | 5 votes |
package vinyldns.api.metrics import java.util.concurrent.TimeUnit import cats.effect.{Blocker, ContextShift, IO} import com.codahale.metrics.Slf4jReporter.LoggingLevel import com.codahale.metrics.{Metric, MetricFilter, ScheduledReporter, Slf4jReporter} import com.typesafe.config.Config import org.slf4j.LoggerFactory import pureconfig._ import pureconfig.generic.auto._ import pureconfig.module.catseffect.syntax._ import vinyldns.core.VinylDNSMetrics final case class MemoryMetricsSettings(logEnabled: Boolean, logSeconds: Int) final case class APIMetricsSettings(memory: MemoryMetricsSettings) object APIMetrics { private implicit val cs: ContextShift[IO] = IO.contextShift(scala.concurrent.ExecutionContext.global) // Output all memory metrics to the log, do not start unless configured private val logReporter = Slf4jReporter .forRegistry(VinylDNSMetrics.metricsRegistry) .filter(new MetricFilter { def matches(name: String, metric: Metric): Boolean = name.startsWith("memory") }) .withLoggingLevel(LoggingLevel.INFO) .outputTo(LoggerFactory.getLogger("MemStats")) .convertRatesTo(TimeUnit.SECONDS) .convertDurationsTo(TimeUnit.MILLISECONDS) .build() def initialize( settings: APIMetricsSettings, reporter: ScheduledReporter = logReporter ): IO[Unit] = IO { if (settings.memory.logEnabled) { reporter.start(settings.memory.logSeconds, TimeUnit.SECONDS) } } def loadSettings(config: Config): IO[APIMetricsSettings] = Blocker[IO].use( ConfigSource.fromConfig(config).loadF[IO, APIMetricsSettings](_) ) }
Example 8
Source File: Slf4jSink.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{Slf4jReporter, MetricRegistry} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 9
Source File: Slf4jSink.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.{Locale, Properties} import java.util.concurrent.TimeUnit import com.codahale.metrics.{MetricRegistry, Slf4jReporter} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase(Locale.ROOT)) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }
Example 10
Source File: Slf4jSink.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import com.codahale.metrics.{Slf4jReporter, MetricRegistry} import org.apache.spark.SecurityManager import org.apache.spark.metrics.MetricsSystem private[spark] class Slf4jSink( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SLF4J_DEFAULT_PERIOD = 10 val SLF4J_DEFAULT_UNIT = "SECONDS" val SLF4J_KEY_PERIOD = "period" val SLF4J_KEY_UNIT = "unit" val pollPeriod = Option(property.getProperty(SLF4J_KEY_PERIOD)) match { case Some(s) => s.toInt case None => SLF4J_DEFAULT_PERIOD } val pollUnit: TimeUnit = Option(property.getProperty(SLF4J_KEY_UNIT)) match { case Some(s) => TimeUnit.valueOf(s.toUpperCase()) case None => TimeUnit.valueOf(SLF4J_DEFAULT_UNIT) } MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod) val reporter: Slf4jReporter = Slf4jReporter.forRegistry(registry) .convertDurationsTo(TimeUnit.MILLISECONDS) .convertRatesTo(TimeUnit.SECONDS) .build() override def start() { reporter.start(pollPeriod, pollUnit) } override def stop() { reporter.stop() } override def report() { reporter.report() } }