scala.collection.JavaConverters.seqAsJavaListConverter Scala Examples
The following examples show how to use scala.collection.JavaConverters.seqAsJavaListConverter.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ThreadPoolMetrics.scala From prometheus-akka with Apache License 2.0 | 5 votes |
package com.workday.prometheus.akka import java.util.Collections import java.util.concurrent.ThreadPoolExecutor import scala.collection.JavaConverters.seqAsJavaListConverter import scala.collection.concurrent.TrieMap import io.prometheus.client.Collector import io.prometheus.client.Collector.MetricFamilySamples import io.prometheus.client.GaugeMetricFamily object ThreadPoolMetrics extends Collector { val map = TrieMap[String, Option[ThreadPoolExecutor]]() this.register() override def collect(): java.util.List[MetricFamilySamples] = { val dispatcherNameList = List("dispatcherName").asJava val activeThreadCountGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_active_thread_count", "Akka ThreadPool Dispatcher Active Thread Count", dispatcherNameList) val corePoolSizeGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_core_pool_size", "Akka ThreadPool Dispatcher Core Pool Size", dispatcherNameList) val currentPoolSizeGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_current_pool_size", "Akka ThreadPool Dispatcher Current Pool Size", dispatcherNameList) val largestPoolSizeGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_largest_pool_size", "Akka ThreadPool Dispatcher Largest Pool Size", dispatcherNameList) val maxPoolSizeGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_max_pool_size", "Akka ThreadPool Dispatcher Max Pool Size", dispatcherNameList) val completedTaskCountGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_completed_task_count", "Akka ThreadPoolExecutor Dispatcher Completed Task Count", dispatcherNameList) val totalTaskCountGauge = new GaugeMetricFamily("akka_dispatcher_threadpoolexecutor_total_task_count", "Akka ThreadPoolExecutor Dispatcher Total Task Count", dispatcherNameList) map.foreach { case (dispatcherName, tpeOption) => val dispatcherNameList = List(dispatcherName).asJava tpeOption match { case Some(tpe) => { activeThreadCountGauge.addMetric(dispatcherNameList, tpe.getActiveCount) corePoolSizeGauge.addMetric(dispatcherNameList, tpe.getCorePoolSize) currentPoolSizeGauge.addMetric(dispatcherNameList, tpe.getPoolSize) largestPoolSizeGauge.addMetric(dispatcherNameList, tpe.getLargestPoolSize) maxPoolSizeGauge.addMetric(dispatcherNameList, tpe.getMaximumPoolSize) completedTaskCountGauge.addMetric(dispatcherNameList, tpe.getCompletedTaskCount) totalTaskCountGauge.addMetric(dispatcherNameList, tpe.getTaskCount) } case None => { activeThreadCountGauge.addMetric(dispatcherNameList, 0) corePoolSizeGauge.addMetric(dispatcherNameList, 0) currentPoolSizeGauge.addMetric(dispatcherNameList, 0) largestPoolSizeGauge.addMetric(dispatcherNameList, 0) maxPoolSizeGauge.addMetric(dispatcherNameList, 0) completedTaskCountGauge.addMetric(dispatcherNameList, 0) totalTaskCountGauge.addMetric(dispatcherNameList, 0) } } } val jul = new java.util.ArrayList[MetricFamilySamples] jul.add(activeThreadCountGauge) jul.add(corePoolSizeGauge) jul.add(currentPoolSizeGauge) jul.add(largestPoolSizeGauge) jul.add(maxPoolSizeGauge) jul.add(completedTaskCountGauge) jul.add(totalTaskCountGauge) Collections.unmodifiableList(jul) } def add(dispatcherName: String, tpe: ThreadPoolExecutor): Unit = { map.put(dispatcherName, Some(tpe)) } def remove(dispatcherName: String): Unit = { map.put(dispatcherName, None) } }
Example 2
Source File: ForkJoinPoolMetrics.scala From prometheus-akka with Apache License 2.0 | 5 votes |
package com.workday.prometheus.akka import java.util.Collections import scala.collection.JavaConverters.seqAsJavaListConverter import scala.collection.concurrent.TrieMap import io.prometheus.client.Collector import io.prometheus.client.Collector.MetricFamilySamples import io.prometheus.client.GaugeMetricFamily object ForkJoinPoolMetrics extends Collector { val map = TrieMap[String, Option[ForkJoinPoolLike]]() this.register() override def collect(): java.util.List[MetricFamilySamples] = { val dispatcherNameList = List("dispatcherName").asJava val parallelismGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_parellelism", "Akka ForkJoinPool Dispatcher Parellelism", dispatcherNameList) val poolSizeGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_pool_size", "Akka ForkJoinPool Dispatcher Pool Size", dispatcherNameList) val activeThreadCountGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_active_thread_count", "Akka ForkJoinPool Dispatcher Active Thread Count", dispatcherNameList) val runningThreadCountGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_running_thread_count", "Akka ForkJoinPool Dispatcher Running Thread Count", dispatcherNameList) val queuedTaskCountGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_queued_task_count", "Akka ForkJoinPool Dispatcher Queued Task Count", dispatcherNameList) val queuedSubmissionCountGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_queued_submission_count", "Akka ForkJoinPool Dispatcher Queued Submission Count", dispatcherNameList) val stealCountGauge = new GaugeMetricFamily("akka_dispatcher_forkjoinpool_steal_count", "Akka ForkJoinPool Dispatcher Steal Count", dispatcherNameList) map.foreach { case (dispatcherName, fjpOption) => val dispatcherNameList = List(dispatcherName).asJava fjpOption match { case Some(fjp) => { parallelismGauge.addMetric(dispatcherNameList, fjp.getParallelism) poolSizeGauge.addMetric(dispatcherNameList, fjp.getPoolSize) activeThreadCountGauge.addMetric(dispatcherNameList, fjp.getActiveThreadCount) runningThreadCountGauge.addMetric(dispatcherNameList, fjp.getRunningThreadCount) queuedSubmissionCountGauge.addMetric(dispatcherNameList, fjp.getQueuedSubmissionCount) queuedTaskCountGauge.addMetric(dispatcherNameList, fjp.getQueuedTaskCount) stealCountGauge.addMetric(dispatcherNameList, fjp.getStealCount) } case None => { parallelismGauge.addMetric(dispatcherNameList, 0) poolSizeGauge.addMetric(dispatcherNameList, 0) activeThreadCountGauge.addMetric(dispatcherNameList, 0) runningThreadCountGauge.addMetric(dispatcherNameList, 0) queuedSubmissionCountGauge.addMetric(dispatcherNameList, 0) queuedTaskCountGauge.addMetric(dispatcherNameList, 0) stealCountGauge.addMetric(dispatcherNameList, 0) } } } val jul = new java.util.ArrayList[MetricFamilySamples] jul.add(parallelismGauge) jul.add(poolSizeGauge) jul.add(activeThreadCountGauge) jul.add(runningThreadCountGauge) jul.add(queuedSubmissionCountGauge) jul.add(queuedTaskCountGauge) jul.add(stealCountGauge) Collections.unmodifiableList(jul) } def add(dispatcherName: String, fjp: ForkJoinPoolLike): Unit = { map.put(dispatcherName, Some(fjp)) } def remove(dispatcherName: String): Unit = { map.put(dispatcherName, None) } }
Example 3
Source File: ParquetCompatibilityTest.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.parquet import scala.collection.JavaConverters.{collectionAsScalaIterableConverter, mapAsJavaMapConverter, seqAsJavaListConverter} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path, PathFilter} import org.apache.parquet.hadoop.api.WriteSupport import org.apache.parquet.hadoop.api.WriteSupport.WriteContext import org.apache.parquet.hadoop.{ParquetFileReader, ParquetWriter} import org.apache.parquet.io.api.RecordConsumer import org.apache.parquet.schema.{MessageType, MessageTypeParser} import org.apache.spark.sql.QueryTest def writeDirect( path: String, schema: String, metadata: Map[String, String], recordWriters: (RecordConsumer => Unit)*): Unit = { val messageType = MessageTypeParser.parseMessageType(schema) val writeSupport = new DirectWriteSupport(messageType, metadata) val parquetWriter = new ParquetWriter[RecordConsumer => Unit](new Path(path), writeSupport) try recordWriters.foreach(parquetWriter.write) finally parquetWriter.close() } }
Example 4
Source File: ImplicitsTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.audit.impl.avro import com.google.common.collect.Lists import org.junit.Assert.assertEquals import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.BlockJUnit4ClassRunner import scala.collection.JavaConverters.seqAsJavaListConverter import com.eharmony.aloha.audit.impl.avro.Implicits.{RichFlatScore, RichScore} import java.{lang => jl, util => ju} import org.apache.avro.generic.GenericRecord @Test def testAllFieldsAppear(): Unit = { val s = filledInScore assertEquals(s, s.toFlatScore.toScore) } @Test def testSameFieldsInGenericRecord(): Unit = { val s = filledInScore val s1 = s.asInstanceOf[GenericRecord] val s2 = s.toFlatScore.asInstanceOf[GenericRecord] testStuff(s1, s2, Map( "model" -> modelId, "value" -> value, "errorMsgs" -> errors, "missingVarNames" -> missing, "prob" -> prob )) } private[this] def testStuff(r1: GenericRecord, r2: GenericRecord, data: Map[String, Any]): Unit = { data.foreach { case (k, v) => val v1 = r1.get(k) val v2 = r2.get(k) assertEquals(s"for r1('$k') = $v1. Expected $v", v, r1.get(k)) assertEquals(s"for r2('$k') = $v2. Expected $v", v, r2.get(k)) } } } object ImplicitsTest { private def filledInScore = new Score(modelId, value, subvalues, errors, missing, prob) private def modelId = new ModelId(5L, "five") private def value: jl.Double = 13d private def subvalues = Lists.newArrayList(scr(12L, 8)) private def errors: ju.List[CharSequence] = Lists.newArrayList("one error", "two errors") private def missing: ju.List[CharSequence] = Lists.newArrayList("some feature", "another feature", "yet another feature") private def prob: jl.Float = 1f private lazy val score: Score = scr(1, 1, scr(2L, 2, scr(4f, 4), scr(5, 5) ), scr(3d, 3, scr(6d, 6), scr(7L, 7) ) ) private lazy val irregularTree: Score = scr(1, 1, scr(2L, 2), scr(3d, 3, scr(5d, 5), scr(6L, 6) ), scr(4d, 4, scr(7L, 7) ) ) private[this] def scr(value: Any, id: Long, children: Score*): Score = { new Score( new ModelId(id, ""), value, Lists.newArrayList(children.asJava), java.util.Collections.emptyList(), java.util.Collections.emptyList(), null ) } }
Example 5
Source File: FlatScoreTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.audit.impl.avro import com.google.common.collect.Lists import org.junit.Assert.assertEquals import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.BlockJUnit4ClassRunner import com.eharmony.aloha.audit.impl.avro.AvroScoreAuditorTest.serializeRoundTrip import scala.collection.JavaConverters.seqAsJavaListConverter import java.{util => ju} @RunWith(classOf[BlockJUnit4ClassRunner]) class FlatScoreTest { import FlatScoreTest.flatScore @Test def testSerializability(): Unit = { val serDeserFS = serializeRoundTrip(FlatScore.getClassSchema, flatScore).head // When comparing the records instead of the JSON strings, equality doesn't // hold because they are different types. flatScoreList is a SpecificRecord // and SpecificRecord checks if the other values is a SpecificRecord. assertEquals(flatScore.toString, serDeserFS.toString) } } object FlatScoreTest { private[this] def empty[A]: ju.List[A] = ju.Collections.emptyList[A] private[this] implicit def toArrayList[A, B](as: Seq[A])(implicit ev: A => B): ju.ArrayList[B] = Lists.newArrayList(as.map(ev).asJava) private[this] def fsd(value: Any, id: Long, children: Int*): FlatScoreDescendant = { new FlatScoreDescendant( new ModelId(id, ""), value, children, empty[CharSequence], empty[CharSequence], null ) } private[avro] lazy val flatScore: FlatScore = { new FlatScore(new ModelId(1L, ""), 1, Vector(0, 1), empty[CharSequence], empty[CharSequence], null, Seq( fsd(2L, 2, 2, 3), // 0 fsd(3d, 3, 4, 5), // 1 fsd(4f, 4), // 2 fsd(5, 5), // 3 fsd(6d, 6), // 4 fsd(7L, 7) // 5 ) ) } }