com.codahale.metrics.json.MetricsModule Scala Examples
The following examples show how to use com.codahale.metrics.json.MetricsModule.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MetricsServlet.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 2
Source File: MetricsServlet.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 3
Source File: MetricsServlet.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 4
Source File: MetricsServlet.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.SecurityManager import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers: Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 5
Source File: Registry.scala From shield with MIT License | 5 votes |
package shield.metrics import java.util.concurrent.TimeUnit import com.codahale.metrics.json.MetricsModule import com.codahale.metrics.jvm.{ThreadStatesGaugeSet, MemoryUsageGaugeSet, GarbageCollectorMetricSet} import com.codahale.metrics.{JvmAttributeGaugeSet, MetricRegistry} import com.fasterxml.jackson.databind.ObjectMapper import spray.http.{ContentTypes, HttpEntity, StatusCodes, HttpResponse} // todo: Separate metrics per domain object Registry { val metricRegistry = new MetricRegistry() metricRegistry.register("jvm.attribute", new JvmAttributeGaugeSet()) metricRegistry.register("jvm.gc", new GarbageCollectorMetricSet()) metricRegistry.register("jvm.memory", new MemoryUsageGaugeSet()) metricRegistry.register("jvm.threads", new ThreadStatesGaugeSet()) private val mapper = new ObjectMapper() mapper.registerModule(new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, false)) private val writer = mapper.writerWithDefaultPrettyPrinter() def metricsResponse : HttpResponse = HttpResponse(StatusCodes.OK, HttpEntity(ContentTypes.`application/json`, writer.writeValueAsBytes(metricRegistry) )) }
Example 6
Source File: JsonFileReporter.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.metrics import java.io.{BufferedWriter, Closeable, IOException, OutputStreamWriter} import java.util.{Timer, TimerTask} import java.util.concurrent.TimeUnit import scala.util.Try import scala.util.control.NonFatal import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.fs.permission.FsPermission import org.apache.kyuubi.Logging import org.apache.spark.{KyuubiSparkUtil, SparkConf} import org.apache.spark.KyuubiConf._ private[metrics] class JsonFileReporter(conf: SparkConf, registry: MetricRegistry) extends Closeable with Logging { private val jsonMapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.MILLISECONDS, TimeUnit.MILLISECONDS, false)) private val timer = new Timer(true) private val interval = KyuubiSparkUtil.timeStringAsMs(conf.get(METRICS_REPORT_INTERVAL)) private val path = conf.get(METRICS_REPORT_LOCATION) private val hadoopConf = KyuubiSparkUtil.newConfiguration(conf) def start(): Unit = { timer.schedule(new TimerTask { var bw: BufferedWriter = _ override def run(): Unit = try { val json = jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(registry) val tmpPath = new Path(path + ".tmp") val tmpPathUri = tmpPath.toUri val fs = if (tmpPathUri.getScheme == null && tmpPathUri.getAuthority == null) { FileSystem.getLocal(hadoopConf) } else { FileSystem.get(tmpPathUri, hadoopConf) } fs.delete(tmpPath, true) bw = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath, true))) bw.write(json) bw.close() fs.setPermission(tmpPath, FsPermission.createImmutable(Integer.parseInt("644", 8).toShort)) val finalPath = new Path(path) fs.rename(tmpPath, finalPath) fs.setPermission(finalPath, FsPermission.createImmutable(Integer.parseInt("644", 8).toShort)) } catch { case NonFatal(e) => error("Error writing metrics to json file" + path, e) } finally { if (bw != null) { Try(bw.close()) } } }, 0, interval) } override def close(): Unit = { timer.cancel() } }
Example 7
Source File: MetricsServlet.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.SecurityManager import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) //最终生成处理/metrics/json请求的ServletContextHandler,而请求的真正处理由getMetricsSnapshot方法 //利用fastJson解析,生成 def getHandlers: Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 8
Source File: MetricsServlet.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }
Example 9
Source File: MetricsServlet.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest import com.codahale.metrics.MetricRegistry import com.codahale.metrics.json.MetricsModule import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.servlet.ServletContextHandler import org.apache.spark.{SparkConf, SecurityManager} import org.apache.spark.ui.JettyUtils._ private[spark] class MetricsServlet( val property: Properties, val registry: MetricRegistry, securityMgr: SecurityManager) extends Sink { val SERVLET_KEY_PATH = "path" val SERVLET_KEY_SAMPLE = "sample" val SERVLET_DEFAULT_SAMPLE = false val servletPath = property.getProperty(SERVLET_KEY_PATH) val servletShowSample = Option(property.getProperty(SERVLET_KEY_SAMPLE)).map(_.toBoolean) .getOrElse(SERVLET_DEFAULT_SAMPLE) val mapper = new ObjectMapper().registerModule( new MetricsModule(TimeUnit.SECONDS, TimeUnit.MILLISECONDS, servletShowSample)) def getHandlers(conf: SparkConf): Array[ServletContextHandler] = { Array[ServletContextHandler]( createServletHandler(servletPath, new ServletParams(request => getMetricsSnapshot(request), "text/json"), securityMgr, conf) ) } def getMetricsSnapshot(request: HttpServletRequest): String = { mapper.writeValueAsString(registry) } override def start() { } override def stop() { } override def report() { } }