scala.collection.immutable Scala Examples
The following examples show how to use scala.collection.immutable.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: K-Centers.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.dataset @annotation.tailrec def go(cpt: Int, haveAllCentersConverged: Boolean, centers: List[(Int, V)]): List[(Int, V)] = { val preUpdatedCenters = data.groupByKey( cz => obtainNearestCenterID(cz.v, centers, metric) )(encoderInt) .mapGroups(computeCenters)(encoder) .collect .sortBy(_._1) .toList val alignedOldCenters = preUpdatedCenters.map{ case (oldClusterID, _) => centers(oldClusterID) } val updatedCenters = preUpdatedCenters.zipWithIndex.map{ case ((oldClusterID, center), newClusterID) => (newClusterID, center) } val shiftingEnough = areCentersNotMovingEnough(updatedCenters, alignedOldCenters, minShift, metric) if(cpt < maxIterations && !shiftingEnough) { go(cpt + 1, shiftingEnough, updatedCenters) } else { updatedCenters } } immutable.HashMap(go(0, false, centers):_*) } }
Example 2
Source File: infos.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql.schema import helloscala.common.util._ import scala.collection.immutable trait SQLSchema { def listTable(schemaName: String): immutable.Seq[TableInfo] def listColumn(tableName: String, schemaName: String): immutable.Seq[ColumnInfo] } trait BaseInfo { def schemaName: String def tableName: String } abstract class InfoHelper(_data: Map[String, AnyRef]) { protected def data(name: Symbol) = _data(StringUtils.convertPropertyToUnderscore(name.name)) protected def data(name: String) = _data(name) @inline def asString(name: Symbol): Option[String] = asString(StringUtils.convertPropertyToUnderscore(name.name)) def asString(name: String): Option[String] = _data.get(name).flatMap(AsString.unapply) @inline def asInt(name: Symbol): Option[Int] = asInt(StringUtils.convertPropertyToUnderscore(name.name)) def asInt(name: String): Option[Int] = _data.get(name).flatMap(AsInt.unapply) @inline def asBoolean(name: Symbol): Option[Boolean] = asBoolean(StringUtils.convertPropertyToUnderscore(name.name)) def asBoolean(name: String): Option[Boolean] = _data.get(name).flatMap(AsBoolean.unapply) @inline def asFloat(name: Symbol): Option[Float] = asFloat(StringUtils.convertPropertyToUnderscore(name.name)) def asFloat(name: String): Option[Float] = _data.get(name).flatMap(AsFloat.unapply) @inline def asDouble(name: Symbol): Option[Double] = asDouble(StringUtils.convertPropertyToUnderscore(name.name)) def asDouble(name: String): Option[Double] = _data.get(name).flatMap(AsDouble.unapply) } case class ColumnInfo( schemaName: String, tableName: String, columnName: String, ordinalPosition: Int, columnDefault: Option[String], isNullable: Boolean, dataType: String, characterMaximumLength: Option[Int], characterOctetLength: Option[Int], numericPrecision: Option[Int], numericPrecisionRadix: Option[Int], numericScale: Option[Int], datetimePrecision: Option[Int], isUpdatable: Option[Boolean]) extends BaseInfo case class TableInfo(schemaName: String, tableName: String, tableType: String, isInsertable: Boolean) extends BaseInfo
Example 3
Source File: JdbcFlow.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql import java.nio.charset.{ Charset, StandardCharsets } import java.sql.ResultSet import akka.NotUsed import akka.stream.scaladsl.Flow import akka.util.ByteString import fusion.jdbc.util.JdbcUtils import scala.collection.immutable case class JdbcResultSet(rs: ResultSet, values: immutable.IndexedSeq[AnyRef]) object JdbcFlow { def flowToText(valueSeparator: Char = ','): Flow[immutable.IndexedSeq[AnyRef], String, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = new java.lang.StringBuilder() var i = 0 while (i < values.length) { builder.append(values(i).toString) i += 1 if (i < values.length) { builder.append(valueSeparator) } } builder.toString } def flowToSeq: Flow[ResultSet, immutable.IndexedSeq[AnyRef], NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData (1 to rs.getMetaData.getColumnCount).map { i => val typ = metaData.getColumnType(i) if (JdbcUtils.isString(typ)) { rs.getString(i) } else rs.getObject(i) } } def flowToByteString( valueSeparator: Char = ',', charset: Charset = StandardCharsets.UTF_8): Flow[immutable.IndexedSeq[AnyRef], ByteString, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = ByteString.newBuilder var i = 0 while (i < values.length) { builder.putBytes(values(i).toString.getBytes(charset)) i += 1 if (i < values.length) { builder.putByte(valueSeparator.toByte) } } builder.result() } def flowJdbcResultSet: Flow[ResultSet, JdbcResultSet, NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData JdbcResultSet(rs, (1 to metaData.getColumnCount).map(i => rs.getObject(i))) } }
Example 4
Source File: Graph1.scala From fusion-data with Apache License 2.0 | 5 votes |
package example.akkastream.basic import akka.NotUsed import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, ClosedShape } import akka.stream.scaladsl.{ Broadcast, Flow, GraphDSL, Merge, RunnableGraph, Sink, Source } import scala.collection.immutable import scala.io.StdIn object Graph1 extends App { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() val graph = g(1 to 2) graph.run() StdIn.readLine() system.terminate() def g(data: immutable.Iterable[Int]) = RunnableGraph.fromGraph(GraphDSL.create() { implicit b: GraphDSL.Builder[NotUsed] => import GraphDSL.Implicits._ val in = Source(data) val out = Sink.foreach(println) val bcast = b.add(Broadcast[Int](2)) val merge = b.add(Merge[Int](2)) val f1, f2, f3, f4 = Flow[Int].map(_ + 10) in ~> f1 ~> bcast ~> f2 ~> merge ~> f3 ~> out bcast ~> f4 ~> merge ClosedShape }) }
Example 5
Source File: GraphComponent.scala From fusion-data with Apache License 2.0 | 5 votes |
package example.akkastream.basic import akka.actor.ActorSystem import akka.stream.FanInShape.{ Init, Name } import akka.stream._ import akka.stream.scaladsl.{ Balance, Flow, GraphDSL, Merge, MergePreferred, RunnableGraph, Sink, Source } import scala.collection.immutable import scala.io.StdIn case class PriorityWorkerPoolShape[In, Out](jobsIn: Inlet[In], priorityJobsIn: Inlet[In], resultsOut: Outlet[Out]) extends Shape { override def inlets: immutable.Seq[Inlet[_]] = jobsIn :: priorityJobsIn :: Nil override def outlets: immutable.Seq[Outlet[_]] = resultsOut :: Nil override def deepCopy(): Shape = PriorityWorkerPoolShape(jobsIn.carbonCopy(), priorityJobsIn.carbonCopy(), resultsOut.carbonCopy()) } case class PriorityWorkerPoolShape2[In, Out](_init: Init[Out] = Name("PriorityWorkerPoolShape2")) extends FanInShape[Out](_init) { override protected def construct(init: Init[Out]): FanInShape[Out] = PriorityWorkerPoolShape2(init) val jobsIn: Inlet[In] = newInlet[In]("jobsIn") val priorityJobsIn: Inlet[In] = newInlet[In]("priorityJobsIn") // Outlet[Out] 使用名字 "out" 将被自动创建 } object PriorityWorkerPool { def apply[In, Out](worker: Flow[In, Out, Any], workerCount: Int) = GraphDSL.create() { implicit b => import GraphDSL.Implicits._ val priorityMerge = b.add(MergePreferred[In](1)) val balance = b.add(Balance[In](workerCount)) val resultsMerge = b.add(Merge[Out](workerCount)) for (i <- 0 until workerCount) balance.out(i) ~> worker ~> resultsMerge.in(i) // 在合并优先和普通作业后发送到平衡器 priorityMerge ~> balance PriorityWorkerPoolShape(priorityMerge.in(0), priorityMerge.preferred, resultsMerge.out) } } object GraphComponent extends App { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() import system.dispatcher val worker1 = Flow[String].map("step 1 " + _) val worker2 = Flow[String].map("step 2 " + _) val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit b => import GraphDSL.Implicits._ val priorityPool1 = b.add(PriorityWorkerPool(worker1, 4)) val priorityPool2 = b.add(PriorityWorkerPool(worker2, 2)) Source(1 to 10).map("job: " + _) ~> priorityPool1.jobsIn Source(1 to 10).map("priority job: " + _) ~> priorityPool1.priorityJobsIn priorityPool1.resultsOut ~> priorityPool2.jobsIn Source(1 to 10).map("one-step, priority " + _) ~> priorityPool2.priorityJobsIn priorityPool2.resultsOut ~> Sink.foreach(println) ClosedShape }) g.run() StdIn.readLine() system.terminate() }
Example 6
Source File: DiscoverConnexComponents.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.graph final def obtainConnexComponents(nodes: immutable.HashSet[Int], neighbors: immutable.HashMap[Int, immutable.HashSet[Int]]): List[List[Int]] = { val visited = mutable.HashMap.empty[Int, Int] def depthFirstTraverseFunctional(node: Int, clusterID: Int): Unit = { val nodeToExplore = immutable.HashSet(node) def obtainUnvisitedNeihbors(hs: immutable.HashSet[Int]) = { hs.flatMap{ n => val unvisited = neighbors(n).filter( n => !visited.contains(n) ) visited ++= unvisited.map( uv => (uv, clusterID) ) unvisited } } @annotation.tailrec def go(hs: immutable.HashSet[Int]): immutable.HashSet[Int] = if(!hs.isEmpty) go(obtainUnvisitedNeihbors(hs)) else hs go(nodeToExplore) } var clusterID = 0 nodes.foreach( n => if(!visited.contains(n)) { visited += ((n, clusterID)) depthFirstTraverseFunctional(n, clusterID) clusterID += 1 } ) val labeledNodes = nodes.toList.map( n => (n, visited(n)) ) val labels = labeledNodes.map(_._2) val connexComponents = labels.map( l => labeledNodes.collect{ case (n, cID) if cID == l => n } ) connexComponents } }
Example 7
Source File: HashingFunctions.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.hashing final def hf(v: Array[Double], j: Int): Double = { @annotation.tailrec def go(s: Double, i: Int): Double = { if(i < v.size) go(s + v(i) * hvs(j)._1(i), i + 1) else s } (go(0D, 0) + hvs(j)._2) / w } final def obtainBucketPerLevel(v: Array[Double]): immutable.IndexedSeq[Int] = { hvs.map{ case (rv, _, hfid) => val bucketID = bucketsLimits.find{ case (th, _) => hf(v, hfid) <= th } if(bucketID.isDefined) bucketID.get._2 else buckets } } }
Example 8
Source File: K-Means.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.scala final def fit[D <: ContinuousDistance, GS[Y] <: GenSeq[Y]]( data: GS[Array[Double]], k: Int, metric: D, minShift: Double, maxIterations: Int ): KMeansModel[D] = { KMeans(k, metric, minShift, maxIterations, immutable.HashMap.empty[Int, ScalarVector]).fit(scalarToClusterizable(data)) } }
Example 9
Source File: KPPInitializer.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.scala final def kppInit[ O, V <: GVector[V], Cz[Y, Z <: GVector[Z]] <: Clusterizable[Y, Z, Cz], D <: Distance[V] ](data: GenSeq[Cz[O, V]], metric: D, k: Int): immutable.HashMap[Int, V] = { val centers = mutable.ArrayBuffer(data(Random.nextInt(data.size)).v) def obtainNearestCenter(v: V): V = centers.minBy(metric.d(_, v)) @annotation.tailrec def go(i: Int): Unit = { val preprocessed = data.map{ cz => val toPow2 = metric.d(cz.v, obtainNearestCenter(cz.v)) (cz.v, toPow2 * toPow2) } val phi = preprocessed.aggregate(0D)((agg, e) => agg + e._2, _ + _) val probabilities = preprocessed.map{ case (v, toPow2) => (v, toPow2 / phi) }.seq val shuffled = Random.shuffle(probabilities) centers += Stats.obtainMedianFollowingWeightedDistribution[V](shuffled) if(i < k - 2) go(i + 1) } go(0) immutable.HashMap(centers.zipWithIndex.map{ case (center, clusterID) => (clusterID, center) }:_*) } }
Example 10
Source File: K-Modes.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.scala final def fit[D <: BinaryDistance, GS[Y] <: GenSeq[Y]]( data: GS[Array[Int]], k: Int, metric: D, maxIterations: Int, minShift: Double ): KModesModel[D] = { KModes(k, metric, minShift, maxIterations).fit(binaryToClusterizable(data)) } }
Example 11
Source File: ClusterwiseTypes.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.spark.clustering.clusterwise import scala.collection.{mutable, immutable} import breeze.linalg.DenseMatrix trait ClusterwiseTypes { final type ClassID = Int final type ID = Int final type Xvector = Array[Double] final type Yvector = Array[Double] final type IDXYtest = Seq[(Int, (Xvector, Yvector))] final type IDXtest = Seq[(Long, Xvector)] final type DSPerClass = Array[(ID, (Xvector, Yvector, ClassID))] final type ClassedDS = Array[(Int, DSPerClass)] final type IDXDS = Array[mutable.ArrayBuffer[(Int, Xvector)]] final type YDS = Array[mutable.ArrayBuffer[Yvector]] final type RegPerClass = (Double, DenseMatrix[Double], Array[Double], Array[(Int, Array[Double])]) final type ClassedDSperGrp = Array[(Int, Array[(Int, Int, Array[(ClassID, Int, Xvector, Yvector)])])] }
Example 12
Source File: PostgresSchema.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql.schema import com.zaxxer.hikari.HikariDataSource import fusion.jdbc.JdbcTemplate import fusion.jdbc.util.JdbcUtils import helloscala.common.util.AsBoolean import scala.collection.immutable import scala.util.control.NonFatal class PostgresSchema private (jdbcTemplate: JdbcTemplate) extends SQLSchema { import mass.connector.sql.schema.PostgresSchema._ override def listTable(schemaName: String): immutable.Seq[TableInfo] = jdbcTemplate.listForObject( s"select * from information_schema.tables where table_schema = ?", List(schemaName), rs => tableInfo(jdbcTemplate, JdbcUtils.resultSetToMap(rs))) override def listColumn(tableName: String, schemaName: String): immutable.Seq[ColumnInfo] = jdbcTemplate.listForObject( "select * from information_schema.columns where table_schema = ? and table_name = ?", List(schemaName, tableName), rs => columnInfo(JdbcUtils.resultSetToMap(rs))) } object PostgresSchema { def listColumn(jdbcTemplate: JdbcTemplate, tableName: String, schemaName: String): immutable.Seq[ColumnInfo] = jdbcTemplate.listForObject( "select * from information_schema.columns where table_schema = '?' and table_name = '?'", List(schemaName, tableName), rs => columnInfo(JdbcUtils.resultSetToMap(rs))) def tableInfo(jdbcTemplate: JdbcTemplate, _data: Map[String, AnyRef]): TableInfo = TableInfo( _data("table_schema").toString, _data("table_name").toString, _data("table_type").toString, AsBoolean.unapply(_data("is_insertable_into")).getOrElse(true)) def columnInfo(_data: Map[String, AnyRef]): ColumnInfo = { val helper = new InfoHelper(_data) {} try { ColumnInfo( _data("table_schema").toString, _data("table_name").toString, _data("column_name").toString, helper.asInt('ordinalPosition).get, helper.asString("column_default"), helper.asBoolean('isNullable).getOrElse(true), helper.asString("data_type").getOrElse(""), helper.asInt('characterMaximumLength), helper.asInt('characterOctetLength), helper.asInt('numericPrecision), helper.asInt('numericPrecisionRadix), helper.asInt('numericScale), helper.asInt('datetimePrecision), helper.asBoolean('isUpdatable)) } catch { case NonFatal(e) => println(_data("data_type")) println(_data) e.printStackTrace() throw e } } def apply(dataSource: HikariDataSource): PostgresSchema = apply(JdbcTemplate(dataSource, true, true, false)) def apply(JdbcTemplate: JdbcTemplate): PostgresSchema = new PostgresSchema(JdbcTemplate) }
Example 13
Source File: K-Means.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.rdd final def fit[D <: ContinuousDistance]( data: RDD[Array[Double]], k: Int, metric: D, minShift: Double, maxIterations: Int, persistanceLVL: StorageLevel ): KMeansModel[D] = { KMeans(k, metric, minShift, maxIterations, persistanceLVL).fit(scalarDataWithIDToClusterizable(data.zipWithIndex)) } }
Example 14
Source File: K-Modes.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.clustering.kcenters.rdd final def fit[D <: BinaryDistance]( data: RDD[Array[Int]], k: Int, metric: D, minShift: Double, maxIterations: Int, persistanceLVL: StorageLevel ): KModesModel[D] = { KModes(k, metric, minShift, maxIterations, persistanceLVL).fit(binaryDataWithIDToClusterizable(data.zipWithIndex)) } }
Example 15
Source File: DistributedRoughSet.scala From Clustering4Ever with Apache License 2.0 | 5 votes |
package org.clustering4ever.spark.preprocessing.rst final def runHeuristic[O, T : ClassTag, V[A] <: GSimpleVector[A, V[A]], Sz[B, C <: GVector[C]] <: Supervizable[B, C, Sz]](data: RDD[Sz[O, V[T]]], columnsOfFeats: Seq[Seq[Int]]): mutable.Buffer[Int] = { val nbColumns = columnsOfFeats.size val dataBC = sc.broadcast(data.collect.par) sc.parallelize(0 until 8888, nbColumns).mapPartitionsWithIndex{ (idxp, _) => val dataPerFeat = dataBC.value.map(_.obtainOneBucket(idxp)) val originalFeatures = columnsOfFeats(idxp) val originalFeatIdByTmpFeatId = originalFeatures.zipWithIndex.map(_.swap).toMap val allReductSet = roughSet(dataPerFeat) allReductSet(Random.nextInt(allReductSet.size)).map(originalFeatIdByTmpFeatId).toIterator } .collect .toBuffer } }
Example 16
Source File: AbstractAppender.scala From rollbar-scala with MIT License | 5 votes |
package com.storecove.rollbar.appenders import com.storecove.rollbar.util.FiniteQueue import com.storecove.rollbar.{RollbarNotifier, RollbarNotifierDefaults, RollbarNotifierFactory} import org.slf4j.MDC import scala.collection.JavaConversions._ import scala.collection.{immutable, mutable} trait AbstractAppender { protected val DEFAULT_LOGS_LIMITS = 100 protected var enabled: Boolean = true protected var onlyThrowable: Boolean = true protected var url: String = RollbarNotifierDefaults.defaultUrl protected var apiKey: String = _ protected var environment: String = _ protected var notifyLevelString: String = "ERROR" protected var limit: Int = DEFAULT_LOGS_LIMITS protected val rollbarNotifier: RollbarNotifier = RollbarNotifierFactory.getNotifier(apiKey, environment) protected val logBuffer: FiniteQueue[String] = new FiniteQueue[String](immutable.Queue[String]()) def setNotifyLevel(level: String): Unit protected def notifyLevel: Any = "ERROR" def setEnabled(enabled: Boolean): Unit = this.enabled = enabled def setOnlyThrowable(onlyThrowable: Boolean): Unit = this.onlyThrowable = onlyThrowable def setApiKey(apiKey: String): Unit = { this.apiKey = apiKey rollbarNotifier.setApiKey(apiKey) } def setEnvironment(environment: String): Unit = { this.environment = environment rollbarNotifier.setEnvironment(environment) } def setUrl(url: String): Unit = { this.url = url rollbarNotifier.setUrl(url) } def setLimit(limit: Int): Unit = this.limit = limit def getEnabled: Boolean = enabled def getOnlyThrowable: Boolean = onlyThrowable def getApiKey: String = apiKey def getEnvironment: String = environment def getUrl: String = url def getNotifyLevel: String = notifyLevelString def getLimit: Int = limit protected def getMDCContext: mutable.Map[String, String] = { val mdc = MDC.getCopyOfContextMap if (mdc == null) { mutable.Map.empty[String, String] } else { mapAsScalaMap(mdc) } } }
Example 17
Source File: Settings.scala From sbt-lagom-descriptor-generator with Apache License 2.0 | 5 votes |
import bintray.BintrayPlugin.autoImport._ import com.typesafe.sbt.SbtPgp.autoImportImpl.PgpKeys import com.typesafe.sbt.SbtScalariform import com.typesafe.sbt.SbtScalariform.ScalariformKeys import de.heikoseeberger.sbtheader.HeaderKey.headers import de.heikoseeberger.sbtheader.{ HeaderPattern, Seq } import sbt.Keys._ import sbt.ScriptedPlugin.{ scriptedLaunchOpts, scriptedSettings } import sbt._ import sbtrelease.ReleasePlugin.autoImport._ import scala.collection.immutable import scalariform.formatter.preferences.{ AlignSingleLineCaseStatements, DanglingCloseParenthesis, DoubleIndentClassDeclaration, Force } object Settings { def headerLicenseSettings = Seq( headers := headers.value ++ Map( "scala" -> ( HeaderPattern.cStyleBlockComment, """| |""".stripMargin ) ), scalacOptions ++= List( "-unchecked", "-deprecation", "-language:_", "-target:jvm-1.7", // target "jvm-1.8" was not added until scala 2.11.5 (https://issues.scala-lang.org/browse/SI-8966) "-encoding", "UTF-8" ), unmanagedSourceDirectories in Compile := List((scalaSource in Compile).value), unmanagedSourceDirectories in Test := List((scalaSource in Test).value), unmanagedSourceDirectories in IntegrationTest := List((scalaSource in Test).value) ) def commonScalariformSettings: immutable.Seq[Setting[_]] = SbtScalariform.scalariformSettings ++ Seq( // Scalariform settings ScalariformKeys.preferences := ScalariformKeys.preferences.value .setPreference(AlignSingleLineCaseStatements, true) .setPreference(AlignSingleLineCaseStatements.MaxArrowIndent, 100) .setPreference(DoubleIndentClassDeclaration, true) .setPreference(DanglingCloseParenthesis, Force) ) def scriptedTestsSettings = scriptedSettings ++ Seq( scriptedLaunchOpts += { version apply { v => s"-Dproject.version=$v" } }.value ) // release-related settings def bintraySettings = Seq( bintrayOrganization := Some("lagom"), bintrayRepository := "sbt-plugin-releases", bintrayPackage := "lagom-descriptor-generator-sbt-plugin", bintrayReleaseOnPublish := false ) def publishMavenStyleSettings = Seq( publishMavenStyle := false ) def releaseSettings: Seq[Setting[_]] = Seq( releasePublishArtifactsAction := PgpKeys.publishSigned.value, releaseTagName := (version in ThisBuild).value, releaseProcess := { import ReleaseTransformations._ Seq[ReleaseStep]( checkSnapshotDependencies, inquireVersions, setReleaseVersion, commitReleaseVersion, tagRelease, publishArtifacts, releaseStepTask(bintrayRelease in thisProjectRef.value), releaseStepCommand("sonatypeRelease"), setNextVersion, commitNextVersion, pushChanges ) } ) }
Example 18
Source File: RoutingHandler.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.routing import akka.http.scaladsl.marshalling._ import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.model._ import akka.http.scaladsl.server._ import akka.http.scaladsl.settings.RoutingSettings import com.github.vonnagy.service.container.http.routing.Rejection.{DuplicateRejection, NotFoundRejection} import com.github.vonnagy.service.container.http.{DefaultMarshallers, RejectionResponse} import com.github.vonnagy.service.container.log.LoggingAdapter import com.typesafe.config.Config import org.json4s.jackson.Serialization import scala.collection.immutable import scala.util.control.NonFatal trait RoutingHandler extends Directives with DefaultMarshallers with LoggingAdapter { def conf: Config implicit val routeSettings = RoutingSettings(conf) implicit val marshaller: ToEntityMarshaller[AnyRef] = jsonMarshaller implicit val rejectionHandler = new RejectionHandler { val orig = RejectionHandler.newBuilder() .handle { case NotFoundRejection(errorMsg) => complete(NotFound, errorMsg) } .handle { case DuplicateRejection(errorMsg) => complete(BadRequest, errorMsg) } .handle { case MalformedRequestContentRejection(errorMsg, _) => complete(UnprocessableEntity, errorMsg) } .handleNotFound { complete((NotFound, "The requested resource could not be found.")) } .result .seal def apply(v1: immutable.Seq[Rejection]): Option[Route] = { val originalResult = orig(v1).getOrElse(complete(StatusCodes.InternalServerError)) Some(mapResponse(transformExceptionRejection) { originalResult }) } } private def transformExceptionRejection(response: HttpResponse): HttpResponse = { response.entity match { // If the entity isn't Strict (and it definitely will be), don't bother // converting, just throw an error, because something's weird. case strictEntity: HttpEntity.Strict => val rej = RejectionResponse(response.status.intValue, response.status.defaultMessage, strictEntity.data.utf8String) response.withEntity(HttpEntity(ContentType(MediaTypes.`application/json`), Serialization.write(rej))) case _ => throw new Exception("Unexpected entity type") } } }
Example 19
Source File: CnProxyComPlugin.scala From ProxyCrawler with Apache License 2.0 | 5 votes |
package org.crowdcrawler.proxycrawler.crawler.plugins import org.crowdcrawler.proxycrawler.ProxyInfo import org.jsoup.Jsoup import java.net.URI import java.nio.charset.Charset import scala.collection.{immutable,mutable} import util.control.Breaks._ private val charNum = immutable.Map( "v" -> "3", "m" -> "4", "a" -> "2", "l" -> "9", "q" -> "0", "b" -> "5", "i" -> "7", "w" -> "6", "r" -> "8", "c" -> "1" ) val seeds: List[URI] = { List( new URI("http://www.cnproxy.com/proxy1.html"), new URI("http://www.cnproxy.com/proxy2.html"), new URI("http://www.cnproxy.com/proxy3.html"), new URI("http://www.cnproxy.com/proxy4.html"), new URI("http://www.cnproxy.com/proxy5.html"), new URI("http://www.cnproxy.com/proxy6.html"), new URI("http://www.cnproxy.com/proxy7.html"), new URI("http://www.cnproxy.com/proxy8.html"), new URI("http://www.cnproxy.com/proxy9.html"), new URI("http://www.cnproxy.com/proxy10.html"), new URI("http://www.cnproxy.com/proxyedu1.html"), new URI("http://www.cnproxy.com/proxyedu2.html") ) } private def decryptPort(encrypted: String): Int = encrypted.split("\\+").map(str => charNum(str)).mkString.toInt def extract(html: String): List[ProxyInfo] = { val result = mutable.ListBuffer.empty[ProxyInfo] val doc = Jsoup.parse(html) val rows = doc.select("#proxylisttb > table").get(2).select("tr") for (i <- 1 until rows.size()) { breakable { // skip the first row val row = rows.get(i) val tds = row.select("td") val host = tds.get(0).text val port = { val pattern = "document.write(\":\"+" val original = tds.get(0).html() val pos1 = original.indexOf(pattern) if (pos1 == -1) break val pos2 = original.indexOf(")</script>", pos1) if (pos2 == -1) break val portStr = original.substring(pos1 + pattern.length, pos2) decryptPort(portStr) } val schema = tds.get(1).text val speeds = tds.get(2).text val speed = { val splitted = speeds.split(",") var sum = 0 for (str <- splitted) { val tmp = str.toInt sum += tmp } sum / splitted.length } val country = tds.get(3).text val proxyInfo = ProxyInfo(host, port, schema, speed, country, null) result += proxyInfo } } result.toList } def next(html: String): List[URI] = List() override val responseCharset: Charset = Charset.forName("GB2312") }
Example 20
Source File: ProxyCrawler.scala From ProxyCrawler with Apache License 2.0 | 5 votes |
package org.crowdcrawler.proxycrawler import java.io.IOException import java.net.URI import java.security.cert.X509Certificate import com.typesafe.scalalogging.Logger import org.apache.http.client.methods.HttpGet import org.apache.http.impl.client.HttpClients import org.apache.http.ssl.{TrustStrategy, SSLContexts} import org.apache.http.conn.ssl.{NoopHostnameVerifier, SSLConnectionSocketFactory} import org.apache.http.util.EntityUtils import org.crowdcrawler.proxycrawler.crawler.plugins.AbstractPlugin import org.apache.http.HttpHeaders import org.slf4j.LoggerFactory import scala.collection.immutable import scala.collection.mutable class ProxyCrawler(plugins: List[AbstractPlugin]) { *;q=0.8"), (HttpHeaders.ACCEPT_ENCODING, "gzip, deflate, sdch"), (HttpHeaders.ACCEPT_LANGUAGE, "en-US,en;q=0.8,zh-CN;q=0.6,zh;q=0.4"), (HttpHeaders.CONNECTION, "keep-alive") ) private val CLIENT = { // trust all certificates including self-signed certificates val sslContext = SSLContexts.custom().loadTrustMaterial(null, new TrustStrategy() { def isTrusted(chain: Array[X509Certificate], authType: String) = true }).build() val connectionFactory = new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE) HttpClients.custom().setSSLSocketFactory(connectionFactory).build() } def apply(classNames: String*): ProxyCrawler = { val plugins = mutable.ListBuffer.empty[AbstractPlugin] for (className <- classNames) { val clazz = Class.forName("org.crowdcrawler.proxycrawler.crawler.plugins." + className) plugins += clazz.newInstance().asInstanceOf[AbstractPlugin] } new ProxyCrawler(plugins.toList) } private def createRequest(uri: URI, headers: immutable.Map[String, String]): HttpGet = { val request = new HttpGet(uri) for (header <- headers) { request.setHeader(header._1, header._2) } request } }
Example 21
Source File: ClusterSoakSpec.scala From akka-kubernetes-tests with Apache License 2.0 | 5 votes |
package akka.cluster.soak import akka.actor.ActorSystem import akka.discovery.ServiceDiscovery.Resolved import akka.event.Logging import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, StatusCodes} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.kubernetes.soak.Tests.{ResponseTimeNanos, Target} import akka.kubernetes.soak.{StatsJsonSupport, TestResults} import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import org.scalatest.concurrent.ScalaFutures import org.scalatest.time.{Seconds, Span} import org.scalatest.{Matchers, WordSpec} import akka.util.PrettyDuration._ import scala.collection.immutable import scala.concurrent.Future import scala.concurrent.duration._ class ClusterSoakSpec(endpoints: Resolved)(implicit system: ActorSystem) extends WordSpec with StatsJsonSupport with ScalaFutures with Matchers { import system.dispatcher implicit val mat = ActorMaterializer() implicit override val patienceConfig = PatienceConfig(timeout = Span(30, Seconds), interval = Span(2, Seconds)) val log = Logging(system, getClass) "The Clustered service" should { "not have had any failures" in { val responses: immutable.Seq[TestResults] = Source(endpoints.addresses) .mapAsyncUnordered(10) { rt => log.info("Hitting {}", rt.host) val request = HttpRequest(uri = s"http://${rt.host}:${rt.port.getOrElse(8080)}/stats") for { response <- Http().singleRequest(request) entity <- response.entity.toStrict(1.second) results <- response.status match { case StatusCodes.OK => Unmarshal(entity).to[TestResults] case unexpected => Future.failed( new RuntimeException(s"Unexpected response code: $unexpected body: ${entity.data.utf8String}") ) } } yield results } .runWith(Sink.seq) .futureValue log.info("{} nodes tested", responses.size) val maxJoinTimes = responses.map(_.joiningTime).sorted.reverse.take(5).map(_.nanos.pretty) log.info("Max join times: {}", maxJoinTimes) val maxResponseTimePerNode: immutable.Seq[(Target, ResponseTimeNanos)] = responses.map(_.lastResult.responses.maxBy(_._2)) val averageResponseTimesPerNode = responses .map((eachNode: TestResults) => { val total = eachNode.lastResult.responses.map(_._2).sum.nanos val count = eachNode.lastResult.responses.size total / count }) .sorted .reverse log.info("All response times: {}", responses) log.info("Slowest response times across all node pings: {}", maxResponseTimePerNode.sortBy(_._2).reverse.take(5).map(_._2.nanos.pretty)) log.info("Slowest average response times across all node pings: {}", averageResponseTimesPerNode.take(5).map(_.pretty)) responses.filter(_.testsFailed != 0) shouldEqual Nil withClue("Response took longer than 2 seconds. Do some investigation") { responses.filter(_.lastResult.responses.exists(_._2.nanos > 2.seconds)) shouldEqual Nil } withClue("Found unreachable events") { responses.filter(_.memberUnreachableEvents != 0) shouldEqual Nil } withClue("Found downed events") { responses.filter(_.memberDownedEvents != 0) shouldEqual Nil } } } }
Example 22
Source File: team.scala From AckCord with MIT License | 5 votes |
package ackcord.data import scala.collection.immutable import ackcord.data.raw.PartialUser import ackcord.util.IntCirceEnumWithUnknown import enumeratum.values.{IntEnum, IntEnumEntry} case class TeamMember( membershipState: TeamMembershipState, permissions: Seq[String], teamId: SnowflakeType[Team], user: PartialUser ) sealed abstract class TeamMembershipState(val value: Int) extends IntEnumEntry object TeamMembershipState extends IntEnum[TeamMembershipState] with IntCirceEnumWithUnknown[TeamMembershipState] { override def values: immutable.IndexedSeq[TeamMembershipState] = findValues case object Invited extends TeamMembershipState(1) case object Accepted extends TeamMembershipState(2) case class Unknown(i: Int) extends TeamMembershipState(i) override def createUnknown(value: Int): TeamMembershipState = Unknown(value) }
Example 23
Source File: Maps1.scala From learning-scala with Apache License 2.0 | 5 votes |
package com.es.scala.basics import scala.collection.mutable import scala.collection.immutable object Maps1 { def main(args: Array[String]) { // immutable val m1 = immutable.Map ("a" -> 1, "b" -> 2) println (m1) println ("m1(a) : " + m1("a")) // --> 1 // println (m1("c")) // --> exception println ("m1.getOrElese(c, -1) : " + m1.getOrElse("c", -1)) // --> -1 // m1("d") = 10 // error // empty mutable hashmap val m2 = new mutable.HashMap[String, Int]() // add elements m2 += "a" -> 1 m2 += "b" -> 2 m2 += "c" -> 3 println("m2 : " + m2) } }
Example 24
Source File: InitIza.scala From izanami with Apache License 2.0 | 5 votes |
package experiments import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.RawHeader import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import scala.collection.immutable import scala.concurrent.{Future} object InitIza extends App { implicit val system: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() import system.dispatcher private val http = Http() private val features = "http://localhost:9000/api/features" //private val features = "http://izanami-perfs.cleverapps.io/api/features" Source(0 to 2000) .mapAsyncUnordered(10) { postFeature } .alsoTo(Sink.foreach { case (c, s) if c == StatusCodes.Created => case (c, s) => println(s"Oups $c $s") }) .runWith(Sink.ignore) .onComplete { _ => println("Done") } private def postFeature(i: Int): Future[(StatusCode, String)] = { val headers: immutable.Seq[HttpHeader] = immutable.Seq( RawHeader("Izanami-Client-Id", "xxxx"), RawHeader("Izanami-Client-Secret", "xxxx") ) val body = s""" | { | "id": "a:key:$i", | "enabled": true, | "activationStrategy": "NO_STRATEGY" | } """.stripMargin http .singleRequest( HttpRequest( HttpMethods.POST, Uri(features), headers = headers, entity = HttpEntity.Strict(ContentTypes.`application/json`, ByteString(body)) ) ) .flatMap { case HttpResponse(code, _, entity, _) => entity.dataBytes.map(_.utf8String).runFold("")((str, acc) => str + acc).map(s => (code, s)) } } }
Example 25
Source File: S3Brain.scala From sumobot with Apache License 2.0 | 5 votes |
package com.sumologic.sumobot.brain import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import java.util.Properties import akka.actor.{Actor, Props} import com.amazonaws.auth.{AWSCredentials, AWSStaticCredentialsProvider} import com.amazonaws.services.s3.{AmazonS3Client, AmazonS3ClientBuilder} import com.amazonaws.services.s3.model.ObjectMetadata import com.sumologic.sumobot.brain.Brain._ import scala.collection.JavaConverters._ import scala.collection.immutable object S3Brain { def props(credentials: AWSCredentials, bucket: String, s3Key: String): Props = Props(classOf[S3Brain], credentials, bucket, s3Key) } class S3Brain(credentials: AWSCredentials, bucket: String, s3Key: String) extends Actor { private val s3Client = AmazonS3ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(credentials)).build private var brainContents: Map[String, String] = loadFromS3() override def receive: Receive = { case Store(key, value) => brainContents += (key -> value) saveToS3(brainContents) case Remove(key) => brainContents -= key saveToS3(brainContents) case Retrieve(key) => brainContents.get(key) match { case Some(value) => sender() ! ValueRetrieved(key, value) case None => sender() ! ValueMissing(key) } case ListValues(prefix) => sender() ! ValueMap(brainContents.filter(_._1.startsWith(prefix))) } private def loadFromS3(): Map[String, String] = { if (s3Client.doesBucketExistV2(bucket)) { val props = new Properties() props.load(s3Client.getObject(bucket, s3Key).getObjectContent) immutable.Map(props.asScala.toSeq: _*) } else { Map.empty } } private def saveToS3(contents: Map[String, String]): Unit = { if (!s3Client.doesBucketExistV2(bucket)) { s3Client.createBucket(bucket) } val props = new Properties() props.putAll(contents.asJava) val out = new ByteArrayOutputStream() props.store(out, "") out.flush() out.close() val in = new ByteArrayInputStream(out.toByteArray) s3Client.putObject(bucket, s3Key, in, new ObjectMetadata()) } }
Example 26
Source File: GeneratorMixin.scala From courier with Apache License 2.0 | 5 votes |
package org.coursera.courier.generator import org.coursera.courier.generator.specs.Definition import scala.collection.immutable trait GeneratorMixin { def extraClassExpressions(definition: Definition): immutable.Seq[String] = List.empty def extraCompanionExpressions(definition: Definition): immutable.Seq[String] = List.empty } object NilGeneratorMixin extends GeneratorMixin { override def extraClassExpressions(definition: Definition): immutable.Seq[String] = List.empty override def extraCompanionExpressions(definition: Definition): immutable.Seq[String] = List.empty }
Example 27
Source File: package.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir import scala.collection.immutable package object openapi { implicit class IterableToListMap[A](xs: Iterable[A]) { def toListMap[T, U](implicit ev: A <:< (T, U)): immutable.ListMap[T, U] = { val b = immutable.ListMap.newBuilder[T, U] for (x <- xs) b += x b.result() } } }
Example 28
Source File: CsvLine.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.semantics.compiled.plugin.csv import scala.collection.immutable trait CsvLine { def e(s: String): EnumConstant def b(s: String): Boolean def i(s: String): Int def l(s: String): Long def f(s: String): Float def d(s: String): Double def s(s: String): String def oe(s: String): Option[EnumConstant] def ob(s: String): Option[Boolean] def oi(s: String): Option[Int] def ol(s: String): Option[Long] def of(s: String): Option[Float] def od(s: String): Option[Double] def os(s: String): Option[String] def ve(s: String): immutable.IndexedSeq[EnumConstant] def vb(s: String): immutable.IndexedSeq[Boolean] def vi(s: String): immutable.IndexedSeq[Int] def vl(s: String): immutable.IndexedSeq[Long] def vf(s: String): immutable.IndexedSeq[Float] def vd(s: String): immutable.IndexedSeq[Double] def vs(s: String): immutable.IndexedSeq[String] def voe(s: String): immutable.IndexedSeq[Option[EnumConstant]] def vob(s: String): immutable.IndexedSeq[Option[Boolean]] def voi(s: String): immutable.IndexedSeq[Option[Int]] def vol(s: String): immutable.IndexedSeq[Option[Long]] def vof(s: String): immutable.IndexedSeq[Option[Float]] def vod(s: String): immutable.IndexedSeq[Option[Double]] def vos(s: String): immutable.IndexedSeq[Option[String]] // ove, ..., ovs // ovoe, ..., ovos }
Example 29
Source File: optionalHandler.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.semantics.compiled.plugin.csv import scala.collection.immutable import scala.util.Try sealed trait OptionalHandler { def produceOption[A](fieldName: String, f: Option[String => A], fields: Array[String], missing: String => Boolean): Option[A] def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]] } case class GracefulOptionalHandler(indices: Map[String, Int]) extends OptionalHandler { def produceOption[A]( fieldName: String, f: Option[String => A], fields: Array[String], missing: String => Boolean ): Option[A] = { for { g <- f i <- indices.get(fieldName) field = fields(i) if !missing(field) x <- Try { g(field) }.toOption } yield x } def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]] = { f.map( g => vals.map( v => if (missing(v)) None else Try { g(v) }.toOption )(scala.collection.breakOut) ) getOrElse RepeatedIndexedSeq.fill(vals.length)(None) } } case class FailFastOptionalHandler(indices: Map[String, Int]) extends OptionalHandler { def produceOption[A]( fieldName: String, f: Option[String => A], fields: Array[String], missing: String => Boolean ): Option[A] = { f.flatMap{ g => val field = fields(indices(fieldName)) if (missing(field)) None else Option(g(field)) } } def produceOptions[A](vals: Array[String], f: Option[String => A], missing: String => Boolean): immutable.IndexedSeq[Option[A]] = { f.map( g => vals.map( v => if (missing(v)) None else Some(g(v)) )(scala.collection.breakOut) ) getOrElse RepeatedIndexedSeq.fill(vals.length)(None) } } case class RepeatedIndexedSeq[+A](length: Int, a: A) extends immutable.IndexedSeq[A] { def apply(i: Int): A = if (0 <= i && i < length) a else throw new ArrayIndexOutOfBoundsException(s"index $i not in range 0 ... ${length - 1}") } object RepeatedIndexedSeq { def fill[A](n: Int)(a: A) = RepeatedIndexedSeq(n, a) }
Example 30
Source File: BasicDecisionTree.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.models.tree.decision import com.eharmony.aloha.audit.Auditor import com.eharmony.aloha.factory._ import com.eharmony.aloha.id.ModelIdentity import com.eharmony.aloha.models.tree.Tree import com.eharmony.aloha.models.{SubmodelBase, Subvalue} import com.eharmony.aloha.reflect.RefInfo import com.eharmony.aloha.semantics.Semantics import spray.json.{JsValue, JsonFormat, JsonReader} import scala.collection.immutable override def commonJsonReader[U, N, A, B <: U]( factory: SubmodelFactory[U, A], semantics: Semantics[A], auditor: Auditor[U, N, B]) (implicit r: RefInfo[N], jf: JsonFormat[N]): Option[JsonReader[BasicDecisionTree[U, N, A, B]]] = { Some(new JsonReader[BasicDecisionTree[U, N, A, B]] { override def read(json: JsValue): BasicDecisionTree[U, N, A, B] = { val dtAst = json.convertTo(decisionTreeAstJsonFormat[N]) val mId = getModelId(json) val t = Tree[NodeAst[N], immutable.IndexedSeq, Node[A, N]]( dtAst.nodes, root, id, childIds, treeBuilder[A, N](semantics, dtAst.missingDataOk)) val dt = BasicDecisionTree(mId.get, t, dtAst.returnBest, auditor) dt } }) } } def parser: ModelParser = Parser }
Example 31
Source File: Cache.scala From AckCord with MIT License | 5 votes |
package ackcord import scala.collection.immutable import ackcord.gateway.GatewayMessage import akka.actor.typed.ActorSystem import akka.stream.scaladsl.{Sink, Source} import akka.{NotUsed, actor => classic} def create( cacheProcessor: MemoryCacheSnapshot.CacheProcessor = MemoryCacheSnapshot.defaultCacheProcessor, parallelism: Int = 4 )(implicit system: ActorSystem[Nothing]): Cache = { val (publish, subscribe) = CacheStreams.cacheStreams(cacheProcessor) val (gatewayPublish, gatewaySubscribe) = CacheStreams.gatewayEvents[Any] //Keep it drained if nothing else is using it subscribe.runWith(Sink.ignore) Cache(publish, subscribe, gatewayPublish, gatewaySubscribe, parallelism) } }
Example 32
Source File: Streamable.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core import org.reactivestreams.Publisher import scala.annotation.implicitNotFound import scala.collection.immutable import scala.concurrent.Future import scala.util.Try import swave.core.impl.util.RingBuffer import swave.core.io.Bytes @implicitNotFound( msg = "Don't know how to create a stream from instances of type ${T}. Maybe you'd like to provide an `implicit Streamable[${T}]`?") //#source-quote abstract class Streamable[-T] { type Out def apply(value: T): Spout[Out] } //#source-quote object Streamable { type Aux[T, O] = Streamable[T] { type Out = O } private val spout = new Streamable[Spout[AnyRef]] { type Out = AnyRef def apply(value: Spout[AnyRef]) = value } implicit def forSpout[T]: Aux[Spout[T], T] = spout.asInstanceOf[Aux[Spout[T], T]] private val option = new Streamable[Option[AnyRef]] { type Out = AnyRef def apply(value: Option[AnyRef]) = Spout.fromOption(value) } implicit def forOption[T]: Aux[Option[T], T] = option.asInstanceOf[Aux[Option[T], T]] private val iterable = new Streamable[immutable.Iterable[AnyRef]] { type Out = AnyRef def apply(value: immutable.Iterable[AnyRef]) = Spout.fromIterable(value) } implicit def forIterable[T]: Aux[immutable.Iterable[T], T] = iterable.asInstanceOf[Aux[immutable.Iterable[T], T]] private val iterator = new Streamable[Iterator[AnyRef]] { type Out = AnyRef def apply(value: Iterator[AnyRef]) = Spout.fromIterator(value) } implicit def forIterator[T]: Aux[Iterator[T], T] = iterator.asInstanceOf[Aux[Iterator[T], T]] private val publisher = new Streamable[Publisher[AnyRef]] { type Out = AnyRef def apply(value: Publisher[AnyRef]) = Spout.fromPublisher(value) } implicit def forPublisher[T]: Aux[Publisher[T], T] = publisher.asInstanceOf[Aux[Publisher[T], T]] private val ringBuffer = new Streamable[RingBuffer[AnyRef]] { type Out = AnyRef def apply(value: RingBuffer[AnyRef]) = Spout.fromRingBuffer(value) } private[swave] implicit def forRingBuffer[T]: Aux[RingBuffer[T], T] = ringBuffer.asInstanceOf[Aux[RingBuffer[T], T]] private val future = new Streamable[Future[AnyRef]] { type Out = AnyRef def apply(value: Future[AnyRef]) = Spout.fromFuture(value) } implicit def forFuture[T]: Aux[Future[T], T] = future.asInstanceOf[Aux[Future[T], T]] private val tryy = new Streamable[Try[AnyRef]] { type Out = AnyRef def apply(value: Try[AnyRef]) = Spout.fromTry(value) } implicit def forTry[T]: Aux[Try[T], T] = tryy.asInstanceOf[Aux[Try[T], T]] implicit def forBytes[T](implicit ev: Bytes[T]): Aux[T, Byte] = new Streamable[T] { type Out = Byte def apply(value: T): Spout[Byte] = Spout.fromIterator(ev.toSeq(value).iterator) } implicit def lazyStreamable[T, O](implicit ev: Streamable.Aux[T, O]): Aux[() ⇒ T, O] = new Streamable[() ⇒ T] { type Out = O def apply(f: () ⇒ T) = ev(f()) } }
Example 33
Source File: Webhook.scala From AckCord with MIT License | 5 votes |
package ackcord.data import scala.collection.immutable import ackcord.CacheSnapshot import ackcord.util.IntCirceEnumWithUnknown import enumeratum.values.{IntEnum, IntEnumEntry} def textGuildChannel(implicit snapshot: CacheSnapshot): Option[TextGuildChannel] = guildId .flatMap(snapshot.getGuildChannel(_, channelId)) .orElse(snapshot.getGuildChannel(channelId)) .collect { case gChannel: TextGuildChannel => gChannel } } sealed abstract class WebhookType(val value: Int) extends IntEnumEntry object WebhookType extends IntEnum[WebhookType] with IntCirceEnumWithUnknown[WebhookType] { override def values: immutable.IndexedSeq[WebhookType] = findValues case object Incomming extends WebhookType(1) case object ChannelFollower extends WebhookType(2) case class Unknown(i: Int) extends WebhookType(i) override def createUnknown(value: Int): WebhookType = Unknown(value) }
Example 34
Source File: Switch.scala From AckCord with MIT License | 5 votes |
package ackcord.util import java.util.concurrent.atomic.AtomicBoolean import scala.collection.immutable import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler} import akka.stream.{Attributes, FanInShape2, Inlet, Outlet} //TODO: Maybe use a third inlet to determine where to listen to class Switch[A](ref: AtomicBoolean, emitChangeTrue: immutable.Seq[A], emitChangeFalse: immutable.Seq[A]) extends GraphStage[FanInShape2[A, A, A]] { override val shape: FanInShape2[A, A, A] = new FanInShape2[A, A, A]("Switch") val in1: Inlet[A] = shape.in0 val in2: Inlet[A] = shape.in1 val out: Outlet[A] = shape.out override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) with OutHandler { private var lastState: Boolean = ref.get() private var waitingOther: A = _ private def activeIn(): Inlet[A] = { val newState = ref.get() val newIn = if (newState) in1 else in2 if (lastState != newState) { lastState = newState emitMultiple(out, if (newState) emitChangeTrue else emitChangeFalse) if (waitingOther != null) { emit(out, waitingOther) waitingOther = null.asInstanceOf[A] } tryPull(newIn) } newIn } private def setInHandler(in: Inlet[A]): Unit = { setHandler( in, new InHandler { override def onPush(): Unit = { if (activeIn() == in) { emit(out, grab(in)) } else { require(waitingOther == null, "Pushed other when a waiting other was already defined") waitingOther = grab(in) } } } ) } setInHandler(in1) setInHandler(in2) setHandler(out, this) override def onPull(): Unit = pull(activeIn()) } }
Example 35
Source File: TypeVar.scala From lift with MIT License | 5 votes |
package arithmetic import lift.arithmetic._ import ir._ import ir.ast.Expr import scala.collection.{immutable, mutable} import scala.language.implicitConversions class TypeVar private(range : Range, fixedId: Option[Long] = None) extends ExtensibleVar("", range, fixedId) { override def copy(r: Range) = new TypeVar(r, Some(id)) override def cloneSimplified() = new TypeVar(range, Some(id)) with SimplifiedExpr override def visitAndRebuild(f: (ArithExpr) => ArithExpr): ArithExpr = f(new TypeVar(range.visitAndRebuild(f), Some(id))) override lazy val toString = "tv_" + name + "_" + id } object TypeVar { def apply(range : Range = RangeUnknown) = { new TypeVar(range) } def getTypeVars(expr: Expr) : Set[TypeVar] = { Expr.visitWithState(immutable.HashSet[TypeVar]())(expr, (inExpr, set) => set ++ getTypeVars(inExpr.t)) } def getTypeVars(t: Type) : Set[TypeVar] = { val result = new mutable.HashSet[TypeVar]() Type.visit(t, (ae:ArithExpr) => result ++= getTypeVars(ae) : Unit ) result.toSet } def getTypeVars(expr: ArithExpr) : Set[TypeVar] = { val typeVars = scala.collection.mutable.HashSet[TypeVar]() ArithExpr.visit(expr, { case tv: TypeVar => typeVars += tv case _ => }) typeVars.toSet } }
Example 36
Source File: Compile.scala From lift with MIT License | 5 votes |
package opencl.executor import ir.TypeChecker import ir.ast.Lambda import lift.arithmetic.{?, ArithExpr} import opencl.generator.{OpenCLGenerator, Verbose, NDRange} import scala.collection.immutable def apply(f: Lambda, localSize0: ArithExpr, localSize1: ArithExpr, localSize2: ArithExpr, globalSize1: ArithExpr, globalSize2: ArithExpr, globalSize3: ArithExpr, valueMap: immutable.Map[ArithExpr, ArithExpr]): String = { // 1. type check TypeChecker(f) // 2. generate OpenCL kernel val kernel = OpenCLGenerator.generate(f, NDRange(localSize0, localSize1, localSize2), NDRange(globalSize1, globalSize2, globalSize3), valueMap) // 3. print and return kernel code if (Verbose()) { println("Kernel code:") println(kernel) } kernel } }
Example 37
Source File: JobService.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.job.service.job import java.io.File import java.nio.charset.StandardCharsets import java.nio.file.{ Files, Path } import akka.actor.typed.{ ActorRef, ActorSystem } import akka.actor.typed.scaladsl.AskPattern._ import akka.http.scaladsl.server.directives.FileInfo import akka.util.Timeout import javax.inject.{ Inject, Singleton } import mass.job.service.job.JobActor.CommandReply import mass.message.job._ import scala.collection.immutable import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.reflect.ClassTag @Singleton class JobService @Inject() (implicit system: ActorSystem[_]) { implicit val timeout: Timeout = Timeout(10.seconds) val jobActor: ActorRef[JobActor.Command] = JobActor.init(system) def listOption(): Future[JobGetAllOptionResp] = askToJob[JobGetAllOptionResp](JobGetAllOptionReq()) def uploadFiles(list: immutable.Seq[(FileInfo, File)])(implicit ec: ExecutionContext): Future[JobUploadFilesResp] = { askToJob[JobUploadFilesResp](JobUploadFilesReq(list)).andThen { case _ => list.foreach { case (_, file) => Files.deleteIfExists(file.toPath) } } } def uploadJobOnZip(fileInfo: FileInfo, file: Path)(implicit ec: ExecutionContext): Future[JobUploadJobResp] = { val req = JobUploadJobReq( file, fileInfo.fileName, fileInfo.contentType.charsetOption.map(_.nioCharset()).getOrElse(StandardCharsets.UTF_8)) askToJob[JobUploadJobResp](req).andThen { case _ => Files.deleteIfExists(file) } } def updateTrigger(req: JobUpdateReq): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](req) def page(req: JobPageReq): Future[JobPageResp] = askToJob[JobPageResp](req) def findItemByKey(key: String): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](JobFindReq(key = key)) def createJob(req: JobCreateReq): Future[JobCreateResp] = askToJob[JobCreateResp](req) def updateJob(req: JobUpdateReq): Future[JobSchedulerResp] = askToJob[JobSchedulerResp](req) @inline private def askToJob[RESP](req: JobMessage)(implicit tag: ClassTag[RESP]): Future[RESP] = jobActor.ask[JobResponse](replyTo => CommandReply(req, replyTo)).mapTo[RESP] }
Example 38
Source File: EtlGraph.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.rdp.etl.graph import helloscala.common.util.StringUtils import mass.connector.Connector import mass.rdp.RdpSystem import mass.rdp.etl.EtlWorkflowExecution import mass.rdp.etl.graph.EtlScriptType.EtlScriptType import scala.collection.immutable case class EtlConnector(ref: String) { require(StringUtils.isNoneBlank(ref), s"ref: $ref 需要指定数据连接器") } object EtlScriptType extends Enumeration { type EtlScriptType = Value val sql = Value(1) val scala = Value(2) val javascript = Value(3) val java = Value(4) } case class EtlScript(`type`: EtlScriptType, src: Option[String], content: Option[String]) { require(src.nonEmpty || content.nonEmpty, s"src: ${src}属性 或 script内容${content}不能同时为空") } case class EtlSource(name: String, connector: EtlConnector, script: EtlScript, out: String) { require(StringUtils.isNoneBlank(name), "name 不能为空") require(StringUtils.isNoneBlank(out), "out 不能为空") } case class EtlFlow(name: String, script: EtlScript, outs: Vector[String]) { require(StringUtils.isNoneBlank(name), "name 不能为空") require(outs.nonEmpty, "outs 不能为空") } case class EtlSink(name: String, connector: EtlConnector, script: EtlScript) { require(StringUtils.isNoneBlank(name), "name 不能为空") } case class EtlGraphSetting(name: String, source: EtlSource, flows: Vector[EtlFlow], sink: EtlSink) trait EtlGraph { def name: String = graphSetting.name def graphSource: EtlSource = graphSetting.source def graphFlows: Vector[EtlFlow] = graphSetting.flows def graphSink: EtlSink = graphSetting.sink def graphSetting: EtlGraphSetting def run(connectors: immutable.Seq[Connector], rdpSystem: RdpSystem): EtlWorkflowExecution }
Example 39
Source File: EtlGraphImpl.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.rdp.etl.graph import akka.NotUsed import akka.stream.scaladsl.{ Sink, Source } import com.typesafe.scalalogging.StrictLogging import javax.script.SimpleBindings import mass.connector.Connector import mass.connector.sql._ import mass.core.event.{ EventData, EventDataSimple } import mass.core.script.ScriptManager import mass.rdp.RdpSystem import mass.rdp.etl.{ EtlResult, EtlWorkflowExecution, SqlEtlResult } import scala.collection.immutable import scala.concurrent.{ Future, Promise } import scala.util.{ Failure, Success } case class EtlGraphImpl(graphSetting: EtlGraphSetting) extends EtlGraph with StrictLogging { override def run(connectors: immutable.Seq[Connector], rdpSystem: RdpSystem): EtlWorkflowExecution = { implicit val ec = rdpSystem.materializer.system.dispatcher implicit val mat = rdpSystem.materializer def getConnector(name: String): Connector = connectors.find(_.name == name) orElse rdpSystem.connectorSystem.getConnector(name) getOrElse (throw new EtlGraphException(s"connector ref: $name 不存在")) val promise = Promise[EtlResult]() val source = dataSource(getConnector(graphSource.connector.ref), rdpSystem) val sink = dataSink(getConnector(graphSink.connector.ref), rdpSystem) graphFlows .foldLeft(source)((s, etlFlow) => s.map { event => val engine = ScriptManager.scriptJavascript val bindings = new SimpleBindings() bindings.put("event", event.asInstanceOf[EventDataSql]) val data = engine.eval(etlFlow.script.content.get, bindings) // TODO 在此可设置是否发送通知消息给在线监控系统 logger.debug(s"engine: $engine, event: $event, result data: $data") EventDataSimple(data) }) .runWith(sink) .onComplete { case Success(result) => promise.success(SqlEtlResult(result)) case Failure(e) => promise.failure(e) } new EtlWorkflowExecution(promise, () => ()) } private def dataSource(connector: Connector, rdpSystem: RdpSystem): Source[EventData, NotUsed] = rdpSystem.streamFactories.get(connector.`type`.toString) match { case Some(b) => b.buildSource(connector, graphSource) case _ => throw new EtlGraphException(s"未知Connector: $connector") } private def dataSink(connector: Connector, rdpSystem: RdpSystem): Sink[EventData, Future[JdbcSinkResult]] = rdpSystem.streamFactories.get(connector.`type`.toString) match { case Some(b) => b.buildSink(connector, graphSink) case _ => throw new EtlGraphException(s"未知Connector: $connector") } }
Example 40
Source File: EtlWorkflow.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.rdp.etl import java.nio.file.Path import mass.connector.Connector import mass.rdp.RdpSystem import mass.rdp.etl.graph.{ EtlGraph, EtlGraphException, EtlGraphImpl, EtlGraphXmlParserFactory } import mass.core.workflow.Workflow import scala.collection.immutable import scala.util.{ Failure, Try } import scala.xml.{ Elem, XML } case class EtlWorkflow(connectors: immutable.Seq[Connector], graph: EtlGraph, rdpSystem: RdpSystem) extends Workflow[EtlResult] with AutoCloseable { override def close(): Unit = connectors.foreach(_.close()) override def run(): EtlWorkflowExecution = graph.run(connectors, rdpSystem) } object EtlWorkflow { def fromFile(path: Path, rdpSystem: RdpSystem): Try[EtlWorkflow] = fromXML(XML.loadFile(path.toFile), rdpSystem) def fromString(workflow: String, rdpSystem: RdpSystem): Try[EtlWorkflow] = fromXML(XML.loadString(workflow), rdpSystem) def fromXML(workflow: Elem, rdpSystem: RdpSystem): Try[EtlWorkflow] = { require(workflow.head.label == "workflow", s"workflow必需为根元素。elem: $workflow") val connectors = (workflow \ "connectors" \ "connector").flatMap(node => rdpSystem.connectorSystem.fromXML(node)) rdpSystem.graphParserFactories.get("xml") match { case Some(factory) => factory .asInstanceOf[EtlGraphXmlParserFactory] .build((workflow \ "graph").head) .parse() .map(setting => new EtlWorkflow(connectors, EtlGraphImpl(setting), rdpSystem)) case _ => Failure(new EtlGraphException("EtlGraphParserFactory type: xml 不存在")) } } }
Example 41
Source File: SQLSchemaTest.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql.schema import fusion.testkit.FusionWordSpecLike import mass.connector.sql.TestSchema import org.scalatest.BeforeAndAfterAll import scala.collection.immutable class SQLSchemaTest extends FusionWordSpecLike with BeforeAndAfterAll { override protected def afterAll(): Unit = { TestSchema.postgres.close() super.afterAll() } "schema-postgres" should { val schema = PostgresSchema(TestSchema.postgres) var tables = immutable.Seq.empty[TableInfo] "listTable" in { tables = schema.listTable("public") tables should not be empty val table = tables.head table.schemaName shouldBe "public" tables.foreach(println) } "listColumn" in { val columns = schema.listColumn(tables.head.tableName, tables.head.schemaName) columns should not be empty columns.foreach(println) } } }
Example 42
Source File: Codecs.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import akka.http.javadsl.{ model => jm } import akka.grpc.GrpcServiceException import akka.grpc.scaladsl.headers.{ `Message-Accept-Encoding`, `Message-Encoding` } import io.grpc.Status import scala.collection.immutable import scala.util.{ Failure, Success, Try } object Codecs { // TODO should this list be made user-extensible? val supportedCodecs = immutable.Seq(Gzip, Identity) private val supported = supportedCodecs.map(_.name) private val byName = supportedCodecs.map(c => c.name -> c).toMap def detect(encoding: Option[String]): Try[Codec] = encoding .map { codec => byName .get(codec) .map(Success(_)) .getOrElse(Failure(new GrpcServiceException( Status.UNIMPLEMENTED.withDescription(s"Message Encoding $encoding is not supported")))) } .getOrElse(Success(Identity)) }
Example 43
Source File: ScalaMarshallersCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import protocbridge.Artifact import templates.ScalaCommon.txt._ import com.github.ghik.silencer.silent trait ScalaMarshallersCodeGenerator extends ScalaCodeGenerator { def name = "akka-grpc-scaladsl-server-marshallers" override def perServiceContent = Set(generateMarshalling) override def suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => Artifact("com.typesafe.akka", s"akka-http_${scalaBinaryVersion.prefix}", BuildInfo.akkaHttpVersion) +: super .suggestedDependencies(scalaBinaryVersion) def generateMarshalling( @silent("never used") logger: Logger, service: Service): immutable.Seq[CodeGeneratorResponse.File] = { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Marshallers(service).body) b.setName(s"${service.packageDir}/${service.name}Marshallers.scala") immutable.Seq(b.build) } } object ScalaMarshallersCodeGenerator extends ScalaMarshallersCodeGenerator
Example 44
Source File: Service.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.immutable import scala.collection.JavaConverters._ import com.google.protobuf.Descriptors._ import scalapb.compiler.{ DescriptorImplicits, GeneratorParams } case class Service( descriptor: String, packageName: String, name: String, grpcName: String, methods: immutable.Seq[Method], serverPowerApi: Boolean, usePlayActions: Boolean, comment: Option[String] = None) { def serializers: Seq[Serializer] = (methods.map(_.deserializer) ++ methods.map(_.serializer)).distinct def packageDir = packageName.replace('.', '/') } object Service { def apply( generatorParams: GeneratorParams, fileDesc: FileDescriptor, serviceDescriptor: ServiceDescriptor, serverPowerApi: Boolean, usePlayActions: Boolean): Service = { implicit val ops = new DescriptorImplicits(generatorParams, fileDesc.getDependencies.asScala.toList :+ fileDesc) import ops._ val serviceClassName = serviceDescriptor.getName Service( fileDesc.fileDescriptorObject.fullName + ".javaDescriptor", fileDesc.scalaPackage.fullName, serviceClassName, (if (fileDesc.getPackage.isEmpty) "" else fileDesc.getPackage + ".") + serviceDescriptor.getName, serviceDescriptor.getMethods.asScala.map(method => Method(method)).toList, serverPowerApi, usePlayActions, serviceDescriptor.comment) } }
Example 45
Source File: JavaClientCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.javadsl import scala.collection.immutable import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import protocbridge.Artifact import templates.JavaClient.txt.{ Client, ClientPowerApi } trait JavaClientCodeGenerator extends JavaCodeGenerator { override def name = "akka-grpc-javadsl-client" override def perServiceContent: Set[(Logger, Service) => immutable.Seq[CodeGeneratorResponse.File]] = super.perServiceContent + generateInterface + generateRaw def generateInterface(logger: Logger, service: Service): immutable.Seq[CodeGeneratorResponse.File] = { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Client(service).body) val clientPath = s"${service.packageDir}/${service.name}Client.java" b.setName(clientPath) logger.info(s"Generating Akka gRPC Client [${service.packageName}.${service.name}]") immutable.Seq(b.build) } def generateRaw(logger: Logger, service: Service): immutable.Seq[CodeGeneratorResponse.File] = { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ClientPowerApi(service).body) val clientPath = s"${service.packageDir}/${service.name}ClientPowerApi.java" b.setName(clientPath) logger.info(s"Generating Akka gRPC Lifted Client interface[${service.packageName}.${service.name}]") immutable.Seq(b.build) } override val suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => Seq( Artifact( BuildInfo.organization, BuildInfo.runtimeArtifactName + "_" + scalaBinaryVersion.prefix, BuildInfo.version), // TODO: remove grpc-stub dependency once we have a akka-http based client #193 Artifact("io.grpc", "grpc-stub", BuildInfo.grpcVersion)) } object JavaClientCodeGenerator extends JavaClientCodeGenerator
Example 46
Source File: JavaServerCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.javadsl import scala.collection.immutable import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import protocbridge.Artifact import templates.JavaServer.txt.{ Handler, PowerApiInterface } class JavaServerCodeGenerator extends JavaCodeGenerator { override def name = "akka-grpc-javadsl-server" override def perServiceContent: Set[(Logger, Service) => immutable.Seq[CodeGeneratorResponse.File]] = super.perServiceContent + generatePlainHandlerFactory + generatePowerHandlerFactory + generatePowerService override val suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => Seq( Artifact( BuildInfo.organization, BuildInfo.runtimeArtifactName + "_" + scalaBinaryVersion.prefix, BuildInfo.version)) val generatePlainHandlerFactory: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Handler(service, powerApis = false).body) val serverPath = s"${service.packageDir}/${service.name}HandlerFactory.java" b.setName(serverPath) logger.info(s"Generating Akka gRPC service handler for ${service.packageName}.${service.name}") immutable.Seq(b.build) } val generatePowerHandlerFactory: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Handler(service, powerApis = true).body) val serverPath = s"${service.packageDir}/${service.name}PowerApiHandlerFactory.java" b.setName(serverPath) logger.info(s"Generating Akka gRPC service power API handler for ${service.packageName}.${service.name}") immutable.Seq(b.build) } else immutable.Seq.empty } val generatePowerService: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(PowerApiInterface(service).body) b.setName(s"${service.packageDir}/${service.name}PowerApi.java") logger.info(s"Generating Akka gRPC service power interface for [${service.packageName}.${service.name}]") immutable.Seq(b.build) } else immutable.Seq.empty } } object JavaServerCodeGenerator extends JavaServerCodeGenerator
Example 47
Source File: JavaCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.javadsl import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.Descriptors._ import com.google.protobuf.compiler.PluginProtos.{ CodeGeneratorRequest, CodeGeneratorResponse } import protocbridge.Artifact import templates.JavaCommon.txt.ApiInterface import scala.collection.JavaConverters._ import scala.collection.immutable import com.github.ghik.silencer.silent abstract class JavaCodeGenerator extends CodeGenerator { def staticContent(@silent("never used") logger: Logger): Set[CodeGeneratorResponse.File] = Set.empty def staticContent( @silent("never used") logger: Logger, @silent("never used") allServices: Seq[Service]): Set[CodeGeneratorResponse.File] = Set.empty override def run(request: CodeGeneratorRequest, logger: Logger): CodeGeneratorResponse = { val b = CodeGeneratorResponse.newBuilder // generate services code here, the data types we want to leave to scalapb val fileDescByName: Map[String, FileDescriptor] = request.getProtoFileList.asScala.foldLeft[Map[String, FileDescriptor]](Map.empty) { case (acc, fp) => val deps = fp.getDependencyList.asScala.map(acc).toArray acc + (fp.getName -> FileDescriptor.buildFrom(fp, deps)) } // Currently per-invocation options, intended to become per-service options eventually // https://github.com/akka/akka-grpc/issues/451 val params = request.getParameter.toLowerCase val serverPowerApi = params.contains("server_power_apis") && !params.contains("server_power_apis=false") val usePlayActions = params.contains("use_play_actions") && !params.contains("use_play_actions=false") val services = (for { file <- request.getFileToGenerateList.asScala fileDesc = fileDescByName(file) serviceDesc <- fileDesc.getServices.asScala } yield Service(fileDesc, serviceDesc, serverPowerApi, usePlayActions)).toVector for { service <- services generator <- perServiceContent generated <- generator(logger, service) } { b.addFile(generated) } staticContent(logger).map(b.addFile) staticContent(logger, services).map(b.addFile) b.build() } def generateServiceInterface(service: Service): CodeGeneratorResponse.File = { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ApiInterface(service).body) b.setName(s"${service.packageDir}/${service.name}.java") b.build } override val suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => Seq( Artifact( BuildInfo.organization, BuildInfo.runtimeArtifactName + "_" + scalaBinaryVersion.prefix, BuildInfo.version)) }
Example 48
Source File: Service.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.javadsl import com.google.protobuf.Descriptors.{ FileDescriptor, ServiceDescriptor } import scalapb.compiler.{ DescriptorImplicits, GeneratorParams } import scala.annotation.tailrec import scala.collection.JavaConverters._ import scala.collection.immutable final case class Service( descriptor: String, packageName: String, name: String, grpcName: String, methods: immutable.Seq[Method], serverPowerApi: Boolean, usePlayActions: Boolean, comment: Option[String] = None) { def serializers: Seq[Serializer] = (methods.map(_.deserializer) ++ methods.map(_.serializer)).distinct def packageDir = packageName.replace('.', '/') } object Service { def apply( fileDesc: FileDescriptor, serviceDescriptor: ServiceDescriptor, serverPowerApi: Boolean, usePlayActions: Boolean): Service = { val comment = { // Use ScalaPB's implicit classes to avoid replicating the logic for comment extraction // Note that this be problematic if/when ScalaPB uses scala-specific stuff to do that implicit val ops = new DescriptorImplicits(GeneratorParams(), fileDesc.getDependencies.asScala.toList :+ fileDesc.getFile) import ops._ serviceDescriptor.comment } val packageName = if (fileDesc.getOptions.hasJavaPackage) fileDesc.getOptions.getJavaPackage else fileDesc.getPackage Service( outerClass(fileDesc) + ".getDescriptor()", packageName, serviceDescriptor.getName, (if (fileDesc.getPackage.isEmpty) "" else fileDesc.getPackage + ".") + serviceDescriptor.getName, serviceDescriptor.getMethods.asScala.toList.map(method => Method(method)), serverPowerApi, usePlayActions, comment) } private[javadsl] def basename(name: String): String = name.replaceAll("^.*/", "").replaceAll("\\.[^\\.]*$", "") private[javadsl] def outerClass(t: FileDescriptor) = if (t.toProto.getOptions.hasJavaOuterClassname) t.toProto.getOptions.getJavaOuterClassname else { val className = Service.toCamelCase(protoName(t)) if (hasConflictingClassName(t, className)) className + "OuterClass" else className } private def hasConflictingClassName(d: FileDescriptor, className: String): Boolean = d.findServiceByName(className) != null || d.findMessageTypeByName(className) != null || d.findEnumTypeByName(className) != null private[javadsl] def protoName(t: FileDescriptor) = t.getName.replaceAll("\\.proto", "").split("/").last private[javadsl] def toCamelCase(name: String): String = if (name.isEmpty) "" else toCamelCaseRec(name, 0, new StringBuilder(name.length), true) @tailrec private def toCamelCaseRec(in: String, idx: Int, out: StringBuilder, capNext: Boolean): String = { if (idx >= in.length) out.toString else { val head = in.charAt(idx) if (head.isLetter) toCamelCaseRec(in, idx + 1, out.append(if (capNext) head.toUpper else head), false) else toCamelCaseRec(in, idx + 1, if (head.isDigit) out.append(head) else out, true) } } }
Example 49
Source File: JavaInterfaceCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.javadsl import scala.collection.immutable import akka.grpc.gen.Logger import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.JavaCommon.txt.ApiInterface object JavaInterfaceCodeGenerator extends JavaCodeGenerator { override def name = "akka-grpc-javadsl-interface" override def perServiceContent: Set[(Logger, Service) => immutable.Seq[CodeGeneratorResponse.File]] = super.perServiceContent + generateServiceFile val generateServiceFile: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ApiInterface(service).body) b.setName(s"${service.packageDir}/${service.name}.java") logger.info(s"Generating Akka gRPC service interface for [${service.packageName}.${service.name}]") immutable.Seq(b.build) } }
Example 50
Source File: GrpcMarshallingSpec.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.scaladsl import akka.actor.ActorSystem import akka.grpc.internal.{ AbstractGrpcProtocol, GrpcProtocolNative, Gzip } import akka.grpc.scaladsl.headers.`Message-Encoding` import akka.http.scaladsl.model.{ HttpEntity, HttpRequest } import akka.stream.ActorMaterializer import akka.stream.scaladsl.Sink import io.grpc.{ Status, StatusException } import io.grpc.testing.integration.messages.{ BoolValue, SimpleRequest } import io.grpc.testing.integration.test.TestService import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.immutable import scala.concurrent.{ Await, Future } import scala.concurrent.duration._ class GrpcMarshallingSpec extends AnyWordSpec with Matchers { "The scaladsl GrpcMarshalling" should { val message = SimpleRequest(responseCompressed = Some(BoolValue(true))) implicit val serializer = TestService.Serializers.SimpleRequestSerializer implicit val system = ActorSystem() implicit val mat = ActorMaterializer() val awaitTimeout = 10.seconds val zippedBytes = AbstractGrpcProtocol.encodeFrameData( AbstractGrpcProtocol.fieldType(Gzip), Gzip.compress(serializer.serialize(message))) "correctly unmarshal a zipped object" in { val request = HttpRequest( headers = immutable.Seq(`Message-Encoding`("gzip")), entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes)) val marshalled = Await.result(GrpcMarshalling.unmarshal(request), 10.seconds) marshalled.responseCompressed should be(Some(BoolValue(true))) } "correctly unmarshal a zipped stream" in { val request = HttpRequest( headers = immutable.Seq(`Message-Encoding`("gzip")), entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes ++ zippedBytes)) val stream = Await.result(GrpcMarshalling.unmarshalStream(request), 10.seconds) val items = Await.result(stream.runWith(Sink.seq), 10.seconds) items(0).responseCompressed should be(Some(BoolValue(true))) items(1).responseCompressed should be(Some(BoolValue(true))) } // https://github.com/grpc/grpc/blob/master/doc/compression.md#compression-method-asymmetry-between-peers // test case 6 "fail with INTERNAL when the compressed bit is on but the encoding is identity" in { val request = HttpRequest( headers = immutable.Seq(`Message-Encoding`("identity")), entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes)) assertFailure(GrpcMarshalling.unmarshal(request), Status.Code.INTERNAL, "encoding") } // https://github.com/grpc/grpc/blob/master/doc/compression.md#compression-method-asymmetry-between-peers // test case 6 "fail with INTERNAL when the compressed bit is on but the encoding is missing" in { val request = HttpRequest(entity = HttpEntity.Strict(GrpcProtocolNative.contentType, zippedBytes)) assertFailure(GrpcMarshalling.unmarshal(request), Status.Code.INTERNAL, "encoding") } def assertFailure(failure: Future[_], expectedStatusCode: Status.Code, expectedMessageFragment: String): Unit = { val e = Await.result(failure.failed, awaitTimeout).asInstanceOf[StatusException] e.getStatus.getCode should be(expectedStatusCode) e.getStatus.getDescription should include(expectedMessageFragment) } } }
Example 51
Source File: AkkaDiscoveryNameResolverProviderSpec.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import java.net.URI import java.net.InetSocketAddress import java.util.{ List => JList } import scala.concurrent.ExecutionContext.Implicits._ import scala.concurrent.Future import scala.concurrent.Promise import scala.concurrent.duration._ import scala.collection.immutable import io.grpc.Attributes import io.grpc.NameResolver.Listener import io.grpc.EquivalentAddressGroup import akka.actor.ActorSystem import akka.discovery.Lookup import akka.discovery.ServiceDiscovery import akka.discovery.ServiceDiscovery.Resolved import akka.discovery.ServiceDiscovery.ResolvedTarget import akka.testkit.TestKit import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.time.{ Millis, Seconds, Span } import org.scalatest.wordspec.AnyWordSpecLike class AkkaDiscoveryNameResolverProviderSpec extends TestKit(ActorSystem()) with AnyWordSpecLike with Matchers with ScalaFutures { implicit override val patienceConfig = PatienceConfig(timeout = scaled(Span(2, Seconds)), interval = scaled(Span(5, Millis))) "AkkaDiscoveryNameResolverProviderSpec" should { "provide a NameResolver that uses the supplied serviceName" in { val serviceName = "testServiceName" val discovery = new ServiceDiscovery() { override def lookup(lookup: Lookup, resolveTimeout: FiniteDuration): Future[Resolved] = { lookup.serviceName should be(serviceName) Future.successful(Resolved(serviceName, immutable.Seq(ResolvedTarget("10.0.0.3", Some(4312), None)))) } } val provider = new AkkaDiscoveryNameResolverProvider( discovery, 443, portName = None, protocol = None, resolveTimeout = 3.seconds) val resolver = provider.newNameResolver(new URI("//" + serviceName), null) val addressGroupsPromise = Promise[List[EquivalentAddressGroup]] val listener = new Listener() { override def onAddresses(addresses: JList[EquivalentAddressGroup], attributes: Attributes): Unit = { import scala.collection.JavaConverters._ addressGroupsPromise.success(addresses.asScala.toList) } override def onError(error: io.grpc.Status): Unit = ??? } resolver.start(listener) val addressGroups = addressGroupsPromise.future.futureValue addressGroups.size should be(1) val addresses = addressGroups(0).getAddresses() addresses.size should be(1) val address = addresses.get(0).asInstanceOf[InetSocketAddress] address.getHostString() should be("10.0.0.3") address.getPort() should be(4312) } } }
Example 52
Source File: CodecsSpec.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc import akka.grpc.internal.{ Codecs, Gzip, Identity } import akka.grpc.scaladsl.headers import akka.http.scaladsl.model.HttpRequest import io.grpc.Status import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import org.scalatest.TryValues import scala.collection.immutable class CodecsSpec extends AnyWordSpec with Matchers with TryValues { private def accept(encodings: String*): HttpRequest = HttpRequest(headers = immutable.Seq(headers.`Message-Accept-Encoding`(encodings.mkString(",")))) private def enc(encodings: String*): HttpRequest = HttpRequest(headers = immutable.Seq(headers.`Message-Encoding`(encodings.mkString(",")))) "Negotiating message encoding with remote client" should { "default to Identity if no encoding provided" in { Codecs.negotiate(HttpRequest()) should be(Identity) } "accept explicit Identity" in { Codecs.negotiate(accept(Identity.name)) should be(Identity) } "accept explicit Gzip" in { Codecs.negotiate(accept(Gzip.name)) should be(Gzip) } "use client preference with multiple known encodings" in { Codecs.negotiate(accept(Gzip.name, Identity.name)) should be(Gzip) Codecs.negotiate(accept(Identity.name, Gzip.name)) should be(Identity) } "use first known encoding" in { Codecs.negotiate(accept("xxxxx", Gzip.name, Identity.name)) should be(Gzip) } "use default encoding if unknown encodings specified" in { Codecs.negotiate(accept("xxxxx")) should be(Identity) } } "Detecting message encoding from remote" should { "default to Identity if not specified" in { Codecs.detect(HttpRequest()).success.value should be(Identity) } "accept explicit Identity" in { Codecs.detect(enc(Identity.name)).success.value should be(Identity) } "accept explicit Gzip" in { Codecs.detect(enc(Gzip.name)).success.value should be(Gzip) } "fail with unknown encoding" in { val detected = Codecs.detect(enc("xxxxxxx")) detected.failure.exception shouldBe a[GrpcServiceException] detected.failure.exception.asInstanceOf[GrpcServiceException].status.getCode should be( Status.UNIMPLEMENTED.getCode) } } }
Example 53
Source File: ScalaServerCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.Logger import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.ScalaServer.txt.{ Handler, PowerApiTrait } class ScalaServerCodeGenerator extends ScalaCodeGenerator { override def name = "akka-grpc-scaladsl-server" override def perServiceContent = super.perServiceContent + generatePlainHandler + generatePowerHandler + generatePowerApiTrait val generatePlainHandler: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Handler(service, powerApis = false).body) b.setName(s"${service.packageDir}/${service.name}Handler.scala") logger.info(s"Generating Akka gRPC service handler for ${service.packageName}.${service.name}") immutable.Seq(b.build) } val generatePowerHandler: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Handler(service, powerApis = true).body) b.setName(s"${service.packageDir}/${service.name}PowerApiHandler.scala") logger.info(s"Generating Akka gRPC service power API handler for ${service.packageName}.${service.name}") immutable.Seq(b.build) } else immutable.Seq.empty } val generatePowerApiTrait: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(PowerApiTrait(service).body) b.setName(s"${service.packageDir}/${service.name}PowerApi.scala") logger.info(s"Generating Akka gRPC service power API interface for ${service.packageName}.${service.name}") immutable.Seq(b.build) } else immutable.Seq.empty } } object ScalaServerCodeGenerator extends ScalaServerCodeGenerator
Example 54
Source File: GrpcResponseHelpers.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import akka.NotUsed import akka.actor.ActorSystem import akka.actor.ClassicActorSystemProvider import akka.annotation.InternalApi import akka.grpc.{ ProtobufSerializer, Trailers } import akka.grpc.GrpcProtocol.{ GrpcProtocolWriter, TrailerFrame } import akka.grpc.scaladsl.{ headers, GrpcExceptionHandler } import akka.http.scaladsl.model.{ HttpEntity, HttpResponse } import akka.http.scaladsl.model.HttpEntity.ChunkStreamPart import akka.stream.Materializer import akka.stream.scaladsl.Source import io.grpc.Status import scala.collection.immutable import scala.concurrent.{ ExecutionContext, Future } @InternalApi // consumed from generated classes so cannot be private object GrpcResponseHelpers { def apply[T](e: Source[T, NotUsed])( implicit m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpResponse = GrpcResponseHelpers(e, Source.single(GrpcEntityHelpers.trailer(Status.OK))) def apply[T](e: Source[T, NotUsed], eHandler: ActorSystem => PartialFunction[Throwable, Trailers])( implicit m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpResponse = GrpcResponseHelpers(e, Source.single(GrpcEntityHelpers.trailer(Status.OK)), eHandler) def apply[T](e: Source[T, NotUsed], status: Future[Status])( implicit m: ProtobufSerializer[T], mat: Materializer, writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpResponse = GrpcResponseHelpers(e, status, GrpcExceptionHandler.defaultMapper _) def apply[T]( e: Source[T, NotUsed], status: Future[Status], eHandler: ActorSystem => PartialFunction[Throwable, Trailers])( implicit m: ProtobufSerializer[T], mat: Materializer, writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpResponse = { implicit val ec: ExecutionContext = mat.executionContext GrpcResponseHelpers( e, Source.lazilyAsync(() => status.map(GrpcEntityHelpers.trailer)).mapMaterializedValue(_ => NotUsed), eHandler) } def apply[T]( e: Source[T, NotUsed], trail: Source[TrailerFrame, NotUsed], eHandler: ActorSystem => PartialFunction[Throwable, Trailers] = GrpcExceptionHandler.defaultMapper)( implicit m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpResponse = { response(GrpcEntityHelpers(e, trail, eHandler)) } private def response[T](entity: Source[ChunkStreamPart, NotUsed])(implicit writer: GrpcProtocolWriter) = { HttpResponse( headers = immutable.Seq(headers.`Message-Encoding`(writer.messageEncoding.name)), entity = HttpEntity.Chunked(writer.contentType, entity)) } def status(trailer: Trailers)(implicit writer: GrpcProtocolWriter): HttpResponse = response(Source.single(writer.encodeFrame(GrpcEntityHelpers.trailer(trailer.status, trailer.metadata)))) }
Example 55
Source File: GrpcRequestHelpers.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import akka.actor.ActorSystem import akka.actor.ClassicActorSystemProvider import akka.grpc.{ ProtobufSerializer, Trailers } import akka.grpc.GrpcProtocol.GrpcProtocolWriter import akka.http.scaladsl.model.HttpEntity.ChunkStreamPart import akka.stream.scaladsl.Source import akka.NotUsed import akka.annotation.InternalApi import akka.grpc.scaladsl.{ headers, GrpcExceptionHandler } import akka.http.scaladsl.model.{ HttpEntity, HttpMethods, HttpRequest, Uri } import io.grpc.Status import scala.collection.immutable @InternalApi object GrpcRequestHelpers { def apply[T]( uri: Uri, e: Source[T, NotUsed], eHandler: ActorSystem => PartialFunction[Throwable, Trailers] = GrpcExceptionHandler.defaultMapper)( implicit m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider): HttpRequest = request(uri, GrpcEntityHelpers(e, Source.single(GrpcEntityHelpers.trailer(Status.OK)), eHandler)) private def request[T](uri: Uri, entity: Source[ChunkStreamPart, NotUsed])( implicit writer: GrpcProtocolWriter): HttpRequest = { HttpRequest( uri = uri, method = HttpMethods.POST, headers = immutable.Seq( headers.`Message-Encoding`(writer.messageEncoding.name), headers.`Message-Accept-Encoding`(Codecs.supportedCodecs.map(_.name).mkString(","))), entity = HttpEntity.Chunked(writer.contentType, entity)) } }
Example 56
Source File: headers.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.scaladsl.headers import akka.annotation.ApiMayChange import akka.http.scaladsl.model.HttpHeader import akka.http.scaladsl.model.headers.{ ModeledCustomHeader, ModeledCustomHeaderCompanion } import akka.http.javadsl.{ model => jm } import scala.collection.immutable import scala.util.Try @ApiMayChange final class `Message-Accept-Encoding`(override val value: String) extends ModeledCustomHeader[`Message-Accept-Encoding`] { override def renderInRequests = true override def renderInResponses = true override val companion = `Message-Accept-Encoding` lazy val values: Array[String] = value.split(",") } @ApiMayChange object `Message-Accept-Encoding` extends ModeledCustomHeaderCompanion[`Message-Accept-Encoding`] { override val name = "grpc-accept-encoding" override def parse(value: String) = Try(new `Message-Accept-Encoding`(value)) def findIn(headers: Iterable[jm.HttpHeader]): Array[String] = headers.find(_.is(name)).map(_.value()).map(_.split(",")).getOrElse(Array.empty) def findIn(headers: java.lang.Iterable[jm.HttpHeader]): Option[String] = { import scala.collection.JavaConverters._ findIn(headers.asScala) } } final class `Status`(code: Int) extends ModeledCustomHeader[`Status`] { override def renderInRequests = false override def renderInResponses = true override val companion = `Status` override def value() = code.toString } object `Status` extends ModeledCustomHeaderCompanion[`Status`] { override val name = "grpc-status" override def parse(value: String) = Try(new `Status`(Integer.parseInt(value))) def findIn(headers: immutable.Seq[HttpHeader]): Option[Int] = headers.find(_.is(name)).map(h => Integer.parseInt(h.value())) } // TODO percent-encoding of message? final class `Status-Message`(override val value: String) extends ModeledCustomHeader[`Status-Message`] { override def renderInRequests = false override def renderInResponses = true override val companion = `Status-Message` } object `Status-Message` extends ModeledCustomHeaderCompanion[`Status-Message`] { override val name = "grpc-message" override def parse(value: String) = Try(new `Status-Message`(value)) def findIn(headers: immutable.Seq[HttpHeader]): Option[String] = headers.find(_.is(name)).map(_.value()) }
Example 57
Source File: WebHandler.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.scaladsl import scala.collection.immutable import scala.concurrent.Future import akka.actor.ClassicActorSystemProvider import akka.annotation.ApiMayChange import akka.http.javadsl.{ model => jmodel } import akka.http.scaladsl.model.{ HttpMethods, HttpRequest, HttpResponse } import akka.http.scaladsl.model.headers._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.server.directives.MarshallingDirectives.handleWith import ch.megard.akka.http.cors.scaladsl.CorsDirectives.cors import ch.megard.akka.http.cors.scaladsl.model.HttpHeaderRange import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings @ApiMayChange object WebHandler { def grpcWebHandler(handlers: PartialFunction[HttpRequest, Future[HttpResponse]]*)( implicit as: ClassicActorSystemProvider, corsSettings: CorsSettings = defaultCorsSettings): HttpRequest => Future[HttpResponse] = { implicit val system = as.classicSystem val servicesHandler = ServiceHandler.concat(handlers: _*) Route.asyncHandler(cors(corsSettings) { handleWith(servicesHandler) }) } }
Example 58
Source File: GrpcInteropSpec.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.interop import akka.stream.{ Materializer, SystemMaterializer } import akka.http.scaladsl.model._ import akka.http.scaladsl.server.RouteResult.Complete import akka.http.scaladsl.server.{ Directive0, Directives, Route } import io.grpc.testing.integration.TestServiceHandlerFactory import io.grpc.testing.integration.test.TestService import org.scalatest.WordSpec import scala.collection.immutable // Generated by our plugin import io.grpc.testing.integration.test.TestServiceHandler class GrpcInteropIoWithAkkaSpec extends GrpcInteropTests(IoGrpcJavaServerProvider, AkkaHttpClientProviderScala) class GrpcInteropAkkaScalaWithIoSpec extends GrpcInteropTests(AkkaHttpServerProviderScala, IoGrpcJavaClientProvider) class GrpcInteropAkkaScalaWithAkkaSpec extends GrpcInteropTests(AkkaHttpServerProviderScala, AkkaHttpClientProviderScala) class GrpcInteropAkkaJavaWithIoSpec extends GrpcInteropTests(AkkaHttpServerProviderJava, IoGrpcJavaClientProvider) class GrpcInteropAkkaJavaWithAkkaSpec extends GrpcInteropTests(AkkaHttpServerProviderJava, AkkaHttpClientProviderScala) object AkkaHttpServerProviderScala extends AkkaHttpServerProvider { import Directives._ val label: String = "akka-grpc server scala" val pendingCases = Set() val server = AkkaGrpcServerScala(implicit sys => { implicit val ec = sys.dispatcher val requestHandler = TestServiceHandler(new TestServiceImpl()) val route: Route = (pathPrefix(TestService.name) & echoHeaders) { ctx => requestHandler(ctx.request).map(Complete) } implicit val mat: Materializer = SystemMaterializer(sys).materializer Route.asyncHandler(Route.seal(route)) }) val echoHeaders: Directive0 = extractRequest.flatMap(request => { val initialHeaderToEcho = request.headers.find(_.name() == "x-grpc-test-echo-initial") val trailingHeaderToEcho = request.headers.find(_.name() == "x-grpc-test-echo-trailing-bin") mapResponseHeaders(h => h ++ initialHeaderToEcho) & mapTrailingResponseHeaders(h => h ++ trailingHeaderToEcho) }) // TODO to be moved to the runtime lib (or even akka-http itself?) def mapTrailingResponseHeaders(f: immutable.Seq[HttpHeader] => immutable.Seq[HttpHeader]) = mapResponse(response => response.withEntity(response.entity match { case HttpEntity.Chunked(contentType, data) => { HttpEntity.Chunked(contentType, data.map { case chunk: HttpEntity.Chunk => chunk case last: HttpEntity.LastChunk => HttpEntity.LastChunk(last.extension, f(last.trailer)) }) } case _ => throw new IllegalArgumentException("Trailing response headers are only supported on Chunked responses") })) } object AkkaHttpServerProviderJava extends AkkaHttpServerProvider { val label: String = "akka-grpc server java" val pendingCases = Set( "custom_metadata" ) val server = new AkkaGrpcServerJava((mat, sys) => { TestServiceHandlerFactory.create(new JavaTestServiceImpl(mat), sys) }) } object AkkaHttpClientProviderScala extends AkkaHttpClientProvider { val label: String = "akka-grpc scala client tester" def client = AkkaGrpcClientScala(settings => implicit sys => new AkkaGrpcClientTester(settings)) }
Example 59
Source File: TestServiceImpl.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.interop import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.reflect.ClassTag import scala.collection.immutable import akka.grpc.scaladsl.{GrpcMarshalling} import akka.NotUsed import akka.actor.ActorSystem import akka.grpc._ import akka.stream.scaladsl.{Flow, Source} import akka.stream.{ Materializer, SystemMaterializer } import com.google.protobuf.ByteString import io.grpc.{ Status, StatusRuntimeException } // Generated by our plugin import io.grpc.testing.integration.test.TestService import io.grpc.testing.integration.messages._ import io.grpc.testing.integration.empty.Empty object TestServiceImpl { val parametersToResponseFlow: Flow[ResponseParameters, StreamingOutputCallResponse, NotUsed] = Flow[ResponseParameters] .map { parameters => StreamingOutputCallResponse( Some(Payload(body = ByteString.copyFrom(new Array[Byte](parameters.size))))) } } class TestServiceImpl(implicit sys: ActorSystem) extends TestService { import TestServiceImpl._ implicit val mat: Materializer = SystemMaterializer(sys).materializer implicit val ec: ExecutionContext = sys.dispatcher override def emptyCall(req: Empty) = Future.successful(Empty()) override def unaryCall(req: SimpleRequest): Future[SimpleResponse] = { req.responseStatus match { case None => Future.successful(SimpleResponse(Some(Payload(ByteString.copyFrom(new Array[Byte](req.responseSize)))))) case Some(requestStatus) => val responseStatus = Status.fromCodeValue(requestStatus.code).withDescription(requestStatus.message) // - Either one of the following works Future.failed(new GrpcServiceException(responseStatus)) // throw new GrpcServiceException(responseStatus) } } override def cacheableUnaryCall(in: SimpleRequest): Future[SimpleResponse] = ??? override def fullDuplexCall(in: Source[StreamingOutputCallRequest, NotUsed]): Source[StreamingOutputCallResponse, NotUsed] = in.map(req => { req.responseStatus.foreach(reqStatus => throw new GrpcServiceException( Status.fromCodeValue(reqStatus.code).withDescription(reqStatus.message))) req }).mapConcat( _.responseParameters.to[immutable.Seq]).via(parametersToResponseFlow) override def halfDuplexCall(in: Source[StreamingOutputCallRequest, NotUsed]): Source[StreamingOutputCallResponse, NotUsed] = ??? override def streamingInputCall(in: Source[StreamingInputCallRequest, NotUsed]): Future[StreamingInputCallResponse] = { in .map(_.payload.map(_.body.size).getOrElse(0)) .runFold(0)(_ + _) .map { sum => StreamingInputCallResponse(sum) } } override def streamingOutputCall(in: StreamingOutputCallRequest): Source[StreamingOutputCallResponse, NotUsed] = Source(in.responseParameters.to[immutable.Seq]).via(parametersToResponseFlow) override def unimplementedCall(in: Empty): Future[Empty] = ??? }
Example 60
Source File: nodes.scala From akka-viz with MIT License | 5 votes |
package scalatags.rx import java.util.concurrent.atomic.AtomicReference import org.scalajs.dom import org.scalajs.dom.Element import org.scalajs.dom.ext._ import org.scalajs.dom.raw.Comment import rx._ import scala.collection.immutable import scala.language.implicitConversions import scalatags.JsDom.all._ import scalatags.jsdom import scalatags.rx.ext._ trait RxNodeInstances { implicit class rxStringFrag(v: Rx[String])(implicit val ctx: Ctx.Owner) extends jsdom.Frag { def render: dom.Text = { val node = dom.document.createTextNode(v.now) v foreach { s => node.replaceData(0, node.length, s) } attachTo node node } } implicit class bindRxElement[T <: dom.Element](e: Rx[T])(implicit val ctx: Ctx.Owner) extends Modifier { def applyTo(t: Element) = { val element = new AtomicReference(e.now) t.appendChild(element.get()) e.triggerLater { val current = e.now val previous = element getAndSet current t.replaceChild(current, previous) } attachTo t } } implicit class bindRxElements(e: Rx[immutable.Iterable[Element]])(implicit val ctx: Ctx.Owner) extends Modifier { def applyTo(t: Element) = { val nonEmpty = e.map { t => if (t.isEmpty) List(new Comment) else t } val fragments = new AtomicReference(nonEmpty.now) nonEmpty.now foreach t.appendChild nonEmpty triggerLater { val current = e.now val previous = fragments getAndSet current val i = t.childNodes.indexOf(previous.head) if (i < 0) throw new IllegalStateException("Children changed") 0 to (previous.size - 1) foreach (_ => t.removeChild(t.childNodes.item(i))) if (t.childNodes.length > i) { val next = t.childNodes.item(i) current foreach (t.insertBefore(_, next)) } else { current foreach t.appendChild } } } } }
Example 61
Source File: PlayScalaServerCodeGenerator.scala From play-grpc with Apache License 2.0 | 5 votes |
package play.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.Logger import akka.grpc.gen.scaladsl.ScalaCodeGenerator import akka.grpc.gen.scaladsl.ScalaServerCodeGenerator import akka.grpc.gen.scaladsl.Service import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.PlayScala.txt._ class PlayScalaServerCodeGenerator extends ScalaCodeGenerator { override def name: String = "play-grpc-server-scala" override def perServiceContent = super.perServiceContent + generatePlainRouter + generatePowerRouter private val generatePlainRouter: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() if (service.usePlayActions) b.setContent(RouterUsingActions(service, powerApis = false).body) else b.setContent(Router(service, powerApis = false).body) b.setName(s"${service.packageDir}/AbstractRouter.scala") logger.info(s"Generating Play gRPC service play router for ${service.packageName}.${service.name}") immutable.Seq(b.build) } private val generatePowerRouter: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() if (service.usePlayActions) b.setContent(RouterUsingActions(service, powerApis = true).body) else b.setContent(Router(service, powerApis = true).body) b.setName(s"${service.packageDir}/Abstract${service.name}PowerApiRouter.scala") logger.info(s"Generating Akka gRPC service power API play router for ${service.packageName}.${service.name}") immutable.Seq(b.build) } else immutable.Seq.empty, } } object PlayScalaServerCodeGenerator extends PlayScalaServerCodeGenerator
Example 62
Source File: PlayScalaClientCodeGenerator.scala From play-grpc with Apache License 2.0 | 5 votes |
package play.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.Logger import akka.grpc.gen.scaladsl.ScalaCodeGenerator import akka.grpc.gen.scaladsl.Service import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.PlayScala.txt.AkkaGrpcClientModule import templates.PlayScala.txt.ClientProvider import scala.annotation.tailrec object PlayScalaClientCodeGenerator extends PlayScalaClientCodeGenerator class PlayScalaClientCodeGenerator extends ScalaCodeGenerator { val ClientModuleName = "AkkaGrpcClientModule" override def name: String = "play-grpc-client-scala" override def perServiceContent = super.perServiceContent + generateClientProvider private val generateClientProvider: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ClientProvider(service).body) b.setName(s"${service.packageName.replace('.', '/')}/${service.name}ClientProvider.scala") logger.info(s"Generating Play gRPC play client provider for ${service.packageName}.${service.name}") immutable.Seq(b.build) } override def staticContent(logger: Logger, allServices: Seq[Service]): Set[CodeGeneratorResponse.File] = { if (allServices.nonEmpty) { val packageName = packageForSharedModuleFile(allServices) val b = CodeGeneratorResponse.File.newBuilder() b.setContent(AkkaGrpcClientModule(packageName, allServices).body) b.setName(s"${packageName.replace('.', '/')}/${ClientModuleName}.scala") val set = Set(b.build) logger.info( s"Generated [${packageName}.${ClientModuleName}] add it to play.modules.enabled and a section " + "with Akka gRPC client config under akka.grpc.client.\"servicepackage.ServiceName\" to be able to inject " + "client instances.", ) set } else Set.empty } def packageForSharedModuleFile(allServices: Seq[Service]): String = // single service or all services in single package - use that if (allServices.forall(_.packageName == allServices.head.packageName)) allServices.head.packageName else { // try to find longest common prefix allServices.tail.foldLeft(allServices.head.packageName)((packageName, service) => if (packageName == service.packageName) packageName else commonPackage(packageName, service.packageName), ) } def commonPackage(a: String, b: String): String = { val aPackages = a.split('.') val bPackages = b.split('.') @tailrec def countIdenticalPackage(pos: Int): Int = { if ((aPackages.length < pos + 1) || (bPackages.length < pos + 1)) pos else if (aPackages(pos) == bPackages(pos)) countIdenticalPackage(pos + 1) else pos } val prefixLength = countIdenticalPackage(0) if (prefixLength == 0) "" // no common, use root package else aPackages.take(prefixLength).mkString(".") } }
Example 63
Source File: ReflectHelperSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.marshallers.json import org.scalatest.{FlatSpec, Matchers} import org.squbs.marshallers.json.TestData._ import scala.collection.immutable import scala.reflect.ClassTag import scala.reflect.runtime.universe._ class ReflectHelperSpec extends FlatSpec with Matchers{ it should "determine an object is Scala or Java" in { ReflectHelper.isJavaClass(fullTeamWithPrivateMembers) shouldBe true ReflectHelper.isJavaClass(fullTeam) shouldBe false } it should "determine a class is Scala or Java" in { ReflectHelper.isJavaClass(classOf[TeamWithPrivateMembers]) shouldBe true ReflectHelper.isJavaClass(classOf[Team]) shouldBe false } it should "convert TypeTag to Manifest for any type" in { def assertTypeTagToManifestConversion[T](implicit typeTag: TypeTag[T], manifest: Manifest[T]) = ReflectHelper.toManifest[T] shouldBe manifest assertTypeTagToManifestConversion[Team] assertTypeTagToManifestConversion[List[Employee]] assertTypeTagToManifestConversion[Map[String, Seq[Employee]]] } it should "find the right class given a type and TypeTag, with erasure" in { ReflectHelper.toClass[Team] shouldBe classOf[Team] ReflectHelper.toClass[immutable.Seq[Employee]] shouldBe classOf[immutable.Seq[_]] } it should "convert TypeTag to ClassTag for any type, with erasure" in { ReflectHelper.toClassTag[Team] shouldBe ClassTag[Team](classOf[Team]) ReflectHelper.toClassTag[immutable.Seq[Employee]] shouldBe ClassTag[immutable.Seq[_]](classOf[immutable.Seq[_]]) } }
Example 64
Source File: ExecutorClusterListener.scala From marvin-engine-executor with Apache License 2.0 | 5 votes |
package org.marvin.executor.manager import akka.cluster.Cluster import akka.cluster.ClusterEvent._ import akka.actor.{Actor, ActorLogging, Address} import scala.collection.immutable class ExecutorClusterListener(seedNodes: immutable.Seq[Address]) extends Actor with ActorLogging { var cluster: Cluster = _ override def preStart(): Unit = { cluster = Cluster(context.system) log.info(s"Joining to the cluster ${context.system.name} ...") cluster.joinSeedNodes(seedNodes) log.info(s"Subscribing to the cluster ${context.system.name} ...") cluster.subscribe(self, initialStateMode = InitialStateAsEvents, classOf[MemberUp], classOf[MemberEvent], classOf[UnreachableMember]) log.info(s"Cluster configuration done! :-P") log.info(s"Cluster Node Address is ${cluster.selfAddress}") } override def postStop(): Unit = { log.info(s"Leaving cluster ${context.system.name} :-( ...") cluster.unsubscribe(self) cluster.leave(cluster.selfAddress) log.info("Left cluster with success!") } def receive = { case MemberUp(member) => log.info("Member is Up: {}", member.address) case UnreachableMember(member) => log.info("Member detected as unreachable: {}", member) case MemberRemoved(member, previousStatus) => log.info("Member is Removed: {} after {}", member.address, previousStatus) case _:MemberEvent => log.info("Unknow Message received ...") } }
Example 65
Source File: QueryGuardTest.scala From gimel with Apache License 2.0 | 5 votes |
package com.paypal.gimel.common.query.guard import scala.collection.immutable import scala.concurrent.Future import scala.util.{Failure, Try} import org.scalatest.FunSuite import com.paypal.gimel.common.conf.QueryGuardConfigs import com.paypal.gimel.common.utilities.spark.SharedSparkSession import com.paypal.gimel.logger.Logger class QueryGuardTest extends FunSuite with SharedSparkSession { override protected val additionalConfig: Map[String, String] = Map( QueryGuardConfigs.JOB_TTL -> "60000" ) import ConcurrentContext._ private val logger = Logger() def startAppAsync(jobSleepTimeoutConfig: Map[Int, Long] = Map.empty, eachRunLength: Int = 10): Unit = { val scheduledJobs: immutable.Seq[Future[Unit]] = for (jobId <- 0 until jobSleepTimeoutConfig.size) yield { executeAsync( performAction(spark, jobSleepTimeoutConfig(jobId), eachRunLength) ) } awaitAll(scheduledJobs.toIterator) } def startAppSync(jobSleepTimeoutConfig: Map[Int, Long] = Map.empty, eachRunLength: Int = 10): Unit = { for (jobId <- 0 until jobSleepTimeoutConfig.size) { startSparkjob(spark, jobSleepTimeoutConfig(jobId), eachRunLength) } } test( "Query guard eviction with all the tasks completing within the scheduled time interval" ) { spark.conf.set(QueryGuardConfigs.DELAY_TTL, "1000") val jobSleepTimeoutConfig: Map[Int, Long] = Map(0 -> 5000, 1 -> 4000, 2 -> 500, 3 -> 2500) logger.setLogLevel("CONSOLE") val queryGuard: QueryGuard = new QueryGuard(spark) queryGuard.start() startAppAsync(jobSleepTimeoutConfig) queryGuard.stop() } test("Query guard eviction with synchronous timed task execution") { spark.conf.set(QueryGuardConfigs.JOB_TTL, "3000") spark.conf.set(QueryGuardConfigs.DELAY_TTL, "1000") val jobSleepTimeoutConfig: Map[Int, Long] = Map(0 -> 500, 1 -> 2500, 2 -> 1500, 3 -> 2800) logger.setLogLevel("CONSOLE") val queryGuard: QueryGuard = new QueryGuard(spark) queryGuard.start() startAppSync(jobSleepTimeoutConfig, 1) queryGuard.stop() } ignore("Ignoring this test") { test("Query guard eviction with app fail criteria") { spark.conf.set(QueryGuardConfigs.JOB_TTL, "3000") spark.conf.set(QueryGuardConfigs.DELAY_TTL, "1000") val jobSleepTimeoutConfig: Map[Int, Long] = Map(0 -> 500, 1 -> 2500, 2 -> 1500, 3 -> 4000) logger.setLogLevel("CONSOLE") val queryGuard: QueryGuard = new QueryGuard(spark) queryGuard.start() Try { startAppSync(jobSleepTimeoutConfig, 1) } match { case Failure(exception) => logger.error(exception.getMessage) assert( exception.getMessage .contains( "cancelled as it reached the max TTL: 3 seconds, with Job start time " ) ) case _ => throw new AssertionError("Expected an exception wiht TTL breach") } queryGuard.stop() } } test("looping") { for { cntr <- 1 until 23 hr = "%02d".format(cntr) } { println(s" query where hr =$hr") } } }
Example 66
Source File: TestStreamlets.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.streamlets.descriptors import scala.collection.immutable import org.apache.avro.SchemaBuilder import com.typesafe.config.Config import cloudflow.streamlets._ import cloudflow.streamlets.avro.AvroUtil case class Coffee(espressos: Int) object Schemas { val coffeeSchema = SchemaBuilder .record("Coffee") .namespace("cloudflow.sbt") .fields() .name("expressos") .`type`() .nullable() .intType() .noDefault() .endRecord() } case object TestRuntime extends StreamletRuntime { override val name = "test-runtime" } trait TestStreamlet extends Streamlet[StreamletContext] { override def runtime: StreamletRuntime = TestRuntime def logStartRunnerMessage(buildInfo: String): Unit = ??? override protected def createContext(config: Config): StreamletContext = ??? override def run(context: StreamletContext): StreamletExecution = ??? } class CoffeeIngress extends Streamlet[StreamletContext] with TestStreamlet { case class TestOutlet(name: String, schemaDefinition: SchemaDefinition) extends Outlet override val shape = StreamletShape(TestOutlet("out", AvroUtil.createSchemaDefinition(Schemas.coffeeSchema))) override val labels: immutable.IndexedSeq[String] = Vector("test", "coffee") override val description: String = "Coffee Ingress Test" }
Example 67
Source File: StreamletShape.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.streamlets import scala.collection.immutable import scala.annotation.varargs trait StreamletShape { def inlets: immutable.IndexedSeq[Inlet] def outlets: immutable.IndexedSeq[Outlet] @varargs def withInlets(inlet: Inlet, inlets: Inlet*): StreamletShape @varargs def withOutlets(outlet: Outlet, outlets: Outlet*): StreamletShape } private[streamlets] final case class StreamletShapeImpl( inlets: immutable.IndexedSeq[Inlet], outlets: immutable.IndexedSeq[Outlet] ) extends StreamletShape { @varargs def withInlets(inlet: Inlet, inlets: Inlet*) = copy(inlets = inlet +: inlets.toIndexedSeq) @varargs def withOutlets(outlet: Outlet, outlets: Outlet*) = copy(outlets = outlet +: outlets.toIndexedSeq) } object StreamletShape { def apply(inlet: Inlet): StreamletShape = StreamletShapeImpl(immutable.IndexedSeq(inlet), immutable.IndexedSeq()) def apply(outlet: Outlet): StreamletShape = StreamletShapeImpl(immutable.IndexedSeq(), immutable.IndexedSeq(outlet)) def apply(inlet: Inlet, outlet: Outlet): StreamletShape = StreamletShapeImpl(immutable.IndexedSeq(inlet), immutable.IndexedSeq(outlet)) @varargs def withInlets(inlet: Inlet, inlets: Inlet*): StreamletShapeImpl = StreamletShapeImpl(inlet +: inlets.toIndexedSeq, immutable.IndexedSeq()) @varargs def withOutlets(outlet: Outlet, outlets: Outlet*): StreamletShapeImpl = StreamletShapeImpl(immutable.IndexedSeq(), outlet +: outlets.toIndexedSeq) // Java API @varargs def createWithInlets(inlet: Inlet, inlets: Inlet*): StreamletShapeImpl = withInlets(inlet, inlets: _*) // Java API @varargs def createWithOutlets(outlet: Outlet, outlets: Outlet*): StreamletShapeImpl = withOutlets(outlet, outlets: _*) }
Example 68
Source File: MergeLogic.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.akkastream.util.scaladsl import scala.collection.immutable import akka.NotUsed import akka.stream._ import akka.stream.ClosedShape import akka.stream.scaladsl._ import cloudflow.akkastream._ import cloudflow.streamlets._ import cloudflow.akkastream.scaladsl._ import akka.kafka.ConsumerMessage._ override def runnableGraph() = { val inlets = inletPorts.map(inlet ⇒ sourceWithCommittableContext[T](inlet)).toList val out = committableSink[T](outlet) RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[NotUsed] ⇒ import GraphDSL.Implicits._ val merge = builder.add(Merger.graph(inlets)) merge ~> out ClosedShape }) } }
Example 69
Source File: StreamletDescriptor.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.blueprint import scala.collection.immutable object StreamletDescriptor { private val Server = "server" } final case class StreamletDescriptor( className: String, runtime: StreamletRuntimeDescriptor, labels: immutable.IndexedSeq[String], description: String, inlets: immutable.IndexedSeq[InletDescriptor], outlets: immutable.IndexedSeq[OutletDescriptor], configParameters: immutable.IndexedSeq[ConfigParameterDescriptor], attributes: immutable.IndexedSeq[StreamletAttributeDescriptor] = Vector.empty, volumeMounts: immutable.IndexedSeq[VolumeMountDescriptor] ) { def isIngress: Boolean = inlets.isEmpty && outlets.nonEmpty def isServer: Boolean = attributes.exists(_.attributeName == StreamletDescriptor.Server) def getAttribute(name: String): Option[StreamletAttributeDescriptor] = attributes.find { attrib ⇒ attrib.attributeName == name } } case class StreamletRuntimeDescriptor(name: String) { override def toString: String = name } sealed trait PortDescriptor { def name: String def schema: SchemaDescriptor def isOutlet: Boolean } final case class InletDescriptor( name: String, schema: SchemaDescriptor ) extends PortDescriptor { def isOutlet = false } final case class OutletDescriptor( name: String, schema: SchemaDescriptor ) extends PortDescriptor { def isOutlet = true } final case class SchemaDescriptor( name: String, schema: String, fingerprint: String, format: String ) final case class StreamletAttributeDescriptor( attributeName: String, configPath: String ) final case class ConfigParameterDescriptor( key: String, description: String, validationType: String, validationPattern: Option[String], defaultValue: Option[String] ) final case class VolumeMountDescriptor( name: String, path: String, accessMode: String, pvcName: String = "" // This string is only used in the operator and will remain empty until deserialized on the operator side )
Example 70
Source File: ActiveShapeModelIOTests.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.io import java.io.File import java.net.URLDecoder import breeze.linalg.{DenseMatrix, DenseVector} import scalismo.ScalismoTestSuite import scalismo.numerics.FixedPointsUniformMeshSampler3D import scalismo.statisticalmodel.MultivariateNormalDistribution import scalismo.statisticalmodel.asm._ import scalismo.utils.Random import scala.collection.immutable class ActiveShapeModelIOTests extends ScalismoTestSuite { implicit val rng = Random(42L) private def createTmpH5File(): File = { val f = File.createTempFile("hdf5file", ".h5") f.deleteOnExit() f } private def createAsm(): ActiveShapeModel = { val statismoFile = new File(URLDecoder.decode(getClass.getResource("/facemodel.h5").getPath, "UTF-8")) val shapeModel = StatismoIO.readStatismoMeshModel(statismoFile).get val (sprofilePoints, _) = new FixedPointsUniformMeshSampler3D(shapeModel.referenceMesh, 100).sample.unzip val pointIds = sprofilePoints.map { point => shapeModel.referenceMesh.pointSet.findClosestPoint(point).id } val dists = for (i <- pointIds.indices) yield new MultivariateNormalDistribution(DenseVector.ones[Double](3) * i.toDouble, DenseMatrix.eye[Double](3) * i.toDouble) val profiles = new Profiles(pointIds.to[immutable.IndexedSeq].zip(dists).map { case (i, d) => Profile(i, d) }) new ActiveShapeModel(shapeModel, profiles, GaussianGradientImagePreprocessor(1), NormalDirectionFeatureExtractor(1, 1)) } describe("An active shape model") { it("can be written to disk and read again") { val originalAsm = createAsm() val h5file = createTmpH5File() ActiveShapeModelIO.writeActiveShapeModel(originalAsm, h5file).get val newAsm = ActiveShapeModelIO.readActiveShapeModel(h5file).get newAsm should equal(originalAsm) h5file.delete() } } }
Example 71
Source File: SearchPointSampler.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.statisticalmodel.asm import scalismo.common.PointId import scalismo.geometry.{_3D, Point} import scalismo.mesh.TriangleMesh import scala.collection.immutable trait SearchPointSampler extends Function2[TriangleMesh[_3D], PointId, immutable.Seq[Point[_3D]]] {} case class NormalDirectionSearchPointSampler(numberOfPoints: Int, searchDistance: Float) extends SearchPointSampler { override def apply(mesh: TriangleMesh[_3D], pointId: PointId): immutable.Seq[Point[_3D]] = { val point = mesh.pointSet.point(pointId) val interval = searchDistance * 2 / numberOfPoints val normalUnnormalized = mesh.vertexNormals(pointId) val normal = normalUnnormalized * (1.0 / normalUnnormalized.norm) def samplePointsOnNormal(): immutable.Seq[Point[_3D]] = { for (i <- -numberOfPoints / 2 to numberOfPoints / 2) yield { point + normal * i * interval } } samplePointsOnNormal() } }
Example 72
Source File: Events.scala From cloud-integration with Apache License 2.0 | 5 votes |
package com.cloudera.spark.cloud.s3.commit import scala.collection.immutable def event(year: Int, month: Int, day: Int, value: String): Event = { new Event(year, month, day, day + month * 100 + year * 10000, Months(month - 1)._1, "%04d-%02d0-%02d".format(year, month, day), value ) } val Months = Array( ("Jan", 31), ("Feb", 28), ("Mar", 31), ("Apr", 30), ("May", 31), ("Jun", 30), ("Jul", 31), ("Aug", 31), ("Sep", 30), ("Oct", 31), ("Nov", 30), ("Dec", 31)) }
Example 73
Source File: PlayJavaClientCodeGenerator.scala From play-grpc with Apache License 2.0 | 5 votes |
package play.grpc.gen.javadsl import akka.grpc.gen.Logger import akka.grpc.gen.javadsl.JavaCodeGenerator import akka.grpc.gen.javadsl.Service import play.grpc.gen.scaladsl.PlayScalaClientCodeGenerator import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.PlayJava.txt.AkkaGrpcClientModule import templates.PlayJava.txt.ClientProvider import scala.annotation.tailrec import scala.collection.immutable import play.grpc.gen.scaladsl.PlayScalaClientCodeGenerator object PlayJavaClientCodeGenerator extends PlayJavaClientCodeGenerator class PlayJavaClientCodeGenerator extends JavaCodeGenerator { override def name: String = "play-grpc-client-java" override def perServiceContent = super.perServiceContent + generateClientProvider private val generateClientProvider: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ClientProvider(service).body) b.setName(s"${service.packageName.replace('.', '/')}/${service.name}ClientProvider.java") logger.info(s"Generating Play gRPC client provider for ${service.packageName}.${service.name}") immutable.Seq(b.build) } override def staticContent(logger: Logger, allServices: Seq[Service]): Set[CodeGeneratorResponse.File] = { if (allServices.nonEmpty) { val packageName = packageForSharedModuleFile(allServices) val b = CodeGeneratorResponse.File.newBuilder() b.setContent(AkkaGrpcClientModule(packageName, allServices).body) b.setName(s"${packageName.replace('.', '/')}/${PlayScalaClientCodeGenerator.ClientModuleName}.java") val set = Set(b.build) logger.info( s"Generated [${packageName}.${PlayScalaClientCodeGenerator.ClientModuleName}] add it to play.modules.enabled and a section " + "with Akka gRPC client config under akka.grpc.client.\"servicepackage.ServiceName\" to be able to inject " + "client instances.", ) set } else Set.empty } private[play] def packageForSharedModuleFile(allServices: Seq[Service]): String = // single service or all services in single package - use that if (allServices.forall(_.packageName == allServices.head.packageName)) allServices.head.packageName else { // try to find longest common prefix allServices.tail.foldLeft(allServices.head.packageName)((packageName, service) => if (packageName == service.packageName) packageName else PlayScalaClientCodeGenerator.commonPackage(packageName, service.packageName), ) } }
Example 74
Source File: DefManipulation.scala From Elysium with MIT License | 5 votes |
package nz.daved.elysium.manipulate import scala.collection.immutable import scala.meta._ trait DefManipulation { implicit class DefImplicits(defn: Defn.Def) { def appendStat(stat: String): Defn.Def = appendStat(stat.parse[Stat].get) def appendStat(stat: Stat): Defn.Def = { defn match { case Defn.Def(_,_,_,_,_, Term.Block(stats)) => defn.copy(body = Term.Block(stats :+ stat)) case Defn.Def(_,_,_,_,_, singleStat) => defn.copy(body = Term.Block(singleStat :: stat :: Nil)) case _ => abort("append stat noes not support this class pattern") } } def prependStat(stat: String): Defn.Def = prependStat(stat.parse[Stat].get) def prependStat(stat: Stat): Defn.Def = { defn match { case Defn.Def(_,_,_,_,_, Term.Block(stats)) => defn.copy(body = Term.Block(stat +: stats)) case Defn.Def(_,_,_,_,_, singleStat) => defn.copy(body = Term.Block(stat :: singleStat :: Nil)) case _ => abort("prepend stat noes not support this class pattern") } } def replaceStats(stats: immutable.Seq[Stat]): Defn.Def = defn.copy(body = Term.Block(stats)) def deleteStats: Defn.Def = defn.copy(body = Term.Block(Nil)) // TODO: Make HasName a typeclass and use that instead def rename(n: String): Defn.Def = rename(Term.Name(n)) def rename(n: Term.Name): Defn.Def = defn.copy(name = n) } } object DefManipulation extends DefManipulation
Example 75
Source File: StringTemplate.scala From asura with MIT License | 5 votes |
package asura.core.util import asura.common.exceptions.InvalidStatusException import asura.common.util.{LogUtils, StringUtils} import com.typesafe.scalalogging.Logger import jodd.util.StringTemplateParser import scala.collection.{immutable, mutable} object StringTemplate { def uriPathParse(tpl: String, context: immutable.Map[String, String]): String = { uriPathParser.parse(tpl, macroName => { context.get(macroName) match { case None => throw InvalidStatusException(s"${macroName}: path template variable not found") case Some(value) => value } }) } }
Example 76
Source File: HttpParser.scala From asura with MIT License | 5 votes |
package asura.core.http import akka.http.scaladsl.model.{HttpMethods => AkkaHttpMethods, _} import asura.core.ErrorMessages import asura.core.auth.AuthManager import asura.core.concurrent.ExecutionContextManager.sysGlobal import asura.core.es.model.{Authorization, HttpCaseRequest} import asura.core.runtime.{RuntimeContext, RuntimeMetrics} import scala.collection.immutable import scala.concurrent.Future object HttpParser { def toHttpRequest(cs: HttpCaseRequest, context: RuntimeContext)(implicit metrics: RuntimeMetrics): Future[HttpRequest] = { var method: HttpMethod = null val headers: immutable.Seq[HttpHeader] = HeaderUtils.toHeaders(cs, context) val request = cs.request if (null == request) { method = AkkaHttpMethods.GET } else { method = HttpMethods.toAkkaMethod(request.method) } val uri: Uri = UriUtils.toUri(cs, context) val entity = if (AkkaHttpMethods.GET != method) { EntityUtils.toEntity(cs, context) } else { HttpEntity.Empty } val notAuthoredRequest = HttpRequest(method = method, uri = uri, headers = headers, entity = entity) metrics.renderRequestEnd() val authUsed: Seq[Authorization] = if (null != context.options && null != context.options.getUsedEnv()) { context.options.getUsedEnv().auth } else { Nil } if (null != authUsed && authUsed.nonEmpty) { metrics.renderAuthBegin() authUsed.foldLeft(Future.successful(notAuthoredRequest))((futureRequest, auth) => { for { initialAuthoredRequest <- futureRequest authoredRequest <- { val operator = AuthManager(auth.`type`) if (operator.nonEmpty) { operator.get.authorize(initialAuthoredRequest, auth) } else { ErrorMessages.error_NotRegisteredAuth(auth.`type`).toFutureFail } } } yield authoredRequest }).map(req => { metrics.renderAuthEnd() req }) } else { Future.successful(notAuthoredRequest) } } }
Example 77
Source File: HeaderUtils.scala From asura with MIT License | 5 votes |
package asura.core.http import akka.http.scaladsl.model.HttpHeader.ParsingResult.{Error, Ok} import akka.http.scaladsl.model.headers.{Cookie, RawHeader} import akka.http.scaladsl.model.{ErrorInfo, HttpHeader} import asura.common.util.StringUtils import asura.core.es.model.{Environment, HttpCaseRequest} import asura.core.runtime.RuntimeContext import asura.core.{CoreConfig, ErrorMessages} import com.typesafe.scalalogging.Logger import scala.collection.immutable import scala.collection.mutable.ArrayBuffer object HeaderUtils { val logger = Logger("HeaderUtils") def toHeaders(cs: HttpCaseRequest, context: RuntimeContext): immutable.Seq[HttpHeader] = { val headers = ArrayBuffer[HttpHeader]() val request = cs.request val env = if (null != context.options) context.options.getUsedEnv() else null if (null != request) { val headerSeq = request.header if (null != headerSeq) { for (h <- headerSeq if (h.enabled && StringUtils.isNotEmpty(h.key))) { HttpHeader.parse(h.key, context.renderSingleMacroAsString(h.value)) match { case Ok(header: HttpHeader, errors: List[ErrorInfo]) => if (errors.nonEmpty) logger.warn(errors.mkString(",")) headers += header case Error(error: ErrorInfo) => logger.warn(error.detail) } } } val cookieSeq = request.cookie if (null != cookieSeq) { val cookies = ArrayBuffer[(String, String)]() for (c <- cookieSeq if (c.enabled && StringUtils.isNotEmpty(c.key))) { cookies += ((c.key, context.renderSingleMacroAsString(c.value))) } if (cookies.nonEmpty) headers += Cookie(cookies: _*) } } if (null != env && null != env.headers && env.headers.nonEmpty) { for (h <- env.headers if (h.enabled && StringUtils.isNotEmpty(h.key))) { HttpHeader.parse(h.key, context.renderSingleMacroAsString(h.value)) match { case Ok(header: HttpHeader, errors: List[ErrorInfo]) => if (errors.nonEmpty) logger.warn(errors.mkString(",")) headers += header case Error(error: ErrorInfo) => logger.warn(error.detail) } } } if (null != env && env.enableProxy) { val headerIdentifier = validateProxyVariables(env) val dst = StringBuilder.newBuilder dst.append("/").append(cs.group).append("/").append(cs.project).append("/").append(env.namespace) headers += RawHeader(headerIdentifier, dst.toString) } headers.toList } def validateProxyVariables(env: Environment): String = { if (!CoreConfig.linkerdConfig.enabled) { throw ErrorMessages.error_ProxyDisabled.toException } if (StringUtils.isEmpty(env.namespace)) { throw ErrorMessages.error_EmptyNamespace.toException } if (StringUtils.isEmpty(env.server)) { throw ErrorMessages.error_EmptyProxyServer.toException } val proxyServerOpt = CoreConfig.linkerdConfig.servers.find(_.tag.equals(env.server)) if (proxyServerOpt.isEmpty && StringUtils.isEmpty(proxyServerOpt.get.headerIdentifier)) { throw ErrorMessages.error_InvalidProxyConfig.toException } else { proxyServerOpt.get.headerIdentifier } } def isApplicationJson(header: HttpHeader): Boolean = { if (header.lowercaseName().equals("content-type")) { header.value().contains(HttpContentTypes.JSON) } else { false } } }
Example 78
Source File: ProtoBuffTest.scala From c4proto with Apache License 2.0 | 5 votes |
package ee.cone.c4actor import java.lang.management.ManagementFactory import java.util import java.util.concurrent.{Callable, Executors} import ee.cone.c4actor.AnyAdapter._ import ee.cone.c4actor.AnyOrigProtocol.N_AnyOrig import ee.cone.c4actor.ProtoBuffTestProtocol.{D_TestOrig, D_TestOrigForDecode} import ee.cone.c4di.{c4, c4app} import ee.cone.c4proto._ import scala.collection.immutable import scala.util.Random trait ProtoBuffTestProtocolAppBase @protocol("ProtoBuffTestProtocolApp") object ProtoBuffTestProtocol { @Id(0x1) case class D_TestOrig( @Id(0x2) srcId: String, @Id(0x3) list: List[String], @Id(0x4) byteStr: List[N_AnyOrig] ) @Id(0x5) case class D_TestOrigForDecode( @Id(0x6) srcId: String, @Id(0x7) number: Long ) } @c4app class SeqProtoBuffTestAppBase extends ProtoBuffTestApp @c4app class ParProtoBuffTestAppBase extends ProtoBuffTestApp trait ProtoBuffTestApp extends VMExecutionApp with ExecutableApp with BaseApp with ProtoApp with ProtoBuffTestProtocolApp with AnyOrigProtocolApp class SerializationRunnable(pid: Int, testOrigs: Seq[D_TestOrigForDecode], qAdapterRegistry: QAdapterRegistry) extends Callable[Long] { def call(): Long = { TestCode.test(testOrigs, qAdapterRegistry) } } object TestCode { def test(testOrigs: Seq[D_TestOrigForDecode], qAdapterRegistry: QAdapterRegistry): Long = { val time = System.currentTimeMillis() val encoded: immutable.Seq[N_AnyOrig] = testOrigs.map(encode(qAdapterRegistry)(_)) val testOrigsss: immutable.Seq[D_TestOrig] = encoded.zipWithIndex.map { case (a, b) => D_TestOrig(b.toString, a.toString.split(",").toList, List(a)) } val encoded2: immutable.Seq[N_AnyOrig] = testOrigsss.map(encode(qAdapterRegistry)(_)) val decoded: immutable.Seq[D_TestOrig] = encoded2.map(decode[D_TestOrig](qAdapterRegistry)) // assert (testOrigsss == decoded) val time2 = System.currentTimeMillis() time2 - time } }
Example 79
Source File: FacetIndex.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.index import io.radicalbit.nsdb.common.protocol.{Bit, DimensionFieldType} import org.apache.lucene.document._ import org.apache.lucene.facet._ import org.apache.lucene.facet.taxonomy.SearcherTaxonomyManager import org.apache.lucene.facet.taxonomy.directory.{DirectoryTaxonomyReader, DirectoryTaxonomyWriter} import org.apache.lucene.index.IndexWriter import org.apache.lucene.search._ import org.apache.lucene.store.Directory import scala.collection.immutable import scala.util.{Failure, Success, Try} abstract class FacetIndex(val directory: Directory, val taxoDirectory: Directory) extends AbstractStructuredIndex { private lazy val searchTaxonomyManager: SearcherTaxonomyManager = new SearcherTaxonomyManager(directory, taxoDirectory, null) def write(bit: Bit)(implicit writer: IndexWriter, taxonomyWriter: DirectoryTaxonomyWriter): Try[Long] protected[this] def facetNamePrefix: String protected[this] def facetName(name: String): String = s"$facetNamePrefix$name" protected[index] def internalResult(query: Query, groupField: String, sort: Option[Sort], limit: Option[Int], valueIndexType: IndexType[_]): Option[FacetResult] protected[index] def result(query: Query, groupField: String, sort: Option[Sort], limit: Option[Int], groupFieldIndexType: IndexType[_], valueIndexType: IndexType[_]): Seq[Bit] override def validateRecord(bit: Bit): Try[immutable.Iterable[Field]] = validateSchemaTypeSupport(bit) .map( se => se.collect { case (_, t) if t.fieldClassType != DimensionFieldType => t } .flatMap(elem => elem.indexType.facetField(elem.name, elem.value))) protected[this] def commonWrite(bit: Bit, facetConfig: Seq[Field] => FacetsConfig, facetField: (Field, String, FacetsConfig) => Field)( implicit writer: IndexWriter, taxonomyWriter: DirectoryTaxonomyWriter): Try[Long] = { val allFields = validateRecord(bit) allFields match { case Success(fields) => val doc = new Document val c = facetConfig(fields.toSeq) fields .filterNot(f => f.name() == "value") .foreach { f => doc.add(f) if (f.isInstanceOf[StringField] || f.isInstanceOf[FloatPoint] || f.isInstanceOf[DoublePoint] || f.isInstanceOf[IntPoint] || f.isInstanceOf[LongPoint]) { val path = if (f.numericValue != null) f.numericValue.toString else f.stringValue doc.add(facetField(f, path, c)) } } Try(writer.addDocument(c.build(taxonomyWriter, doc))).recoverWith { case t => t.printStackTrace() Failure(t) } case Failure(t) => t.printStackTrace() Failure(t) } } }
Example 80
Source File: ScalaCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.JavaConverters._ import scala.collection.immutable import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.Descriptors._ import com.google.protobuf.compiler.PluginProtos.{ CodeGeneratorRequest, CodeGeneratorResponse } import scalapb.compiler.GeneratorParams import protocbridge.Artifact import com.github.ghik.silencer.silent abstract class ScalaCodeGenerator extends CodeGenerator { // Override this to add generated files per service def perServiceContent: Set[(Logger, Service) => immutable.Seq[CodeGeneratorResponse.File]] = Set.empty // Override these to add service-independent generated files def staticContent(@silent("never used") logger: Logger): Set[CodeGeneratorResponse.File] = Set.empty def staticContent( @silent("never used") logger: Logger, @silent("never used") allServices: Seq[Service]): Set[CodeGeneratorResponse.File] = Set.empty override def suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => Seq( Artifact( BuildInfo.organization, BuildInfo.runtimeArtifactName + "_" + scalaBinaryVersion.prefix, BuildInfo.version)) // generate services code here, the data types we want to leave to scalapb override def run(request: CodeGeneratorRequest, logger: Logger): CodeGeneratorResponse = { val b = CodeGeneratorResponse.newBuilder val fileDescByName: Map[String, FileDescriptor] = request.getProtoFileList.asScala.foldLeft[Map[String, FileDescriptor]](Map.empty) { case (acc, fp) => val deps = fp.getDependencyList.asScala.map(acc).toArray acc + (fp.getName -> FileDescriptor.buildFrom(fp, deps)) } // Currently per-invocation options, intended to become per-service options eventually // https://github.com/akka/akka-grpc/issues/451 val params = request.getParameter.toLowerCase // flags listed in akkaGrpcCodeGeneratorSettings's description val serverPowerApi = params.contains("server_power_apis") && !params.contains("server_power_apis=false") val usePlayActions = params.contains("use_play_actions") && !params.contains("use_play_actions=false") val services = (for { file <- request.getFileToGenerateList.asScala fileDesc = fileDescByName(file) serviceDesc <- fileDesc.getServices.asScala } yield Service( parseParameters(request.getParameter), fileDesc, serviceDesc, serverPowerApi, usePlayActions)).toSeq for { service <- services generator <- perServiceContent generated <- generator(logger, service) } { b.addFile(generated) } staticContent(logger).map(b.addFile) staticContent(logger, services).map(b.addFile) b.build() } // flags listed in akkaGrpcCodeGeneratorSettings's description private def parseParameters(params: String): GeneratorParams = params.split(",").map(_.trim).filter(_.nonEmpty).foldLeft[GeneratorParams](GeneratorParams()) { case (p, "java_conversions") => p.copy(javaConversions = true) case (p, "flat_package") => p.copy(flatPackage = true) case (p, "single_line_to_string") => p.copy(singleLineToProtoString = true) // for backward-compatibility case (p, "single_line_to_proto_string") => p.copy(singleLineToProtoString = true) case (p, "ascii_format_to_string") => p.copy(asciiFormatToString = true) case (p, "no_lenses") => p.copy(lenses = false) case (p, "retain_source_code_info") => p.copy(retainSourceCodeInfo = true) case (x, _) => x } }
Example 81
Source File: ScalaTraitCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.Logger import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.ScalaCommon.txt.ApiTrait object ScalaTraitCodeGenerator extends ScalaCodeGenerator { override def name = "akka-grpc-scaladsl-trait" override def perServiceContent = super.perServiceContent + generateServiceFile val generateServiceFile: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(ApiTrait(service).body) b.setName(s"${service.packageDir}/${service.name}.scala") logger.info(s"Generating Akka gRPC service interface for ${service.packageName}.${service.name}") immutable.Seq(b.build) } }
Example 82
Source File: ScalaClientCodeGenerator.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.gen.scaladsl import scala.collection.immutable import akka.grpc.gen.{ BuildInfo, CodeGenerator, Logger } import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import protocbridge.Artifact import templates.ScalaClient.txt._ trait ScalaClientCodeGenerator extends ScalaCodeGenerator { override def name = "akka-grpc-scaladsl-client" override def perServiceContent = super.perServiceContent + generateStub def generateStub(logger: Logger, service: Service): immutable.Seq[CodeGeneratorResponse.File] = { val b = CodeGeneratorResponse.File.newBuilder() b.setContent(Client(service).body) b.setName(s"${service.packageDir}/${service.name}Client.scala") logger.info(s"Generating Akka gRPC client for ${service.packageName}.${service.name}") immutable.Seq(b.build) } override val suggestedDependencies = (scalaBinaryVersion: CodeGenerator.ScalaBinaryVersion) => // TODO: remove grpc-stub dependency once we have a akka-http based client #193 Artifact("io.grpc", "grpc-stub", BuildInfo.grpcVersion) +: super.suggestedDependencies(scalaBinaryVersion) } object ScalaClientCodeGenerator extends ScalaClientCodeGenerator
Example 83
Source File: BasicShabondiTest.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.shabondi import java.util import java.util.concurrent.{ExecutorService, Executors} import com.google.common.util.concurrent.ThreadFactoryBuilder import com.typesafe.scalalogging.Logger import oharastream.ohara.common.data.Row import oharastream.ohara.common.setting.TopicKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.kafka.TopicAdmin import oharastream.ohara.shabondi.common.ShabondiUtils import oharastream.ohara.shabondi.sink.SinkConfig import oharastream.ohara.shabondi.source.SourceConfig import oharastream.ohara.testing.WithBroker import org.junit.After import scala.collection.{immutable, mutable} import scala.concurrent.{ExecutionContext, Future} import scala.jdk.CollectionConverters._ private[shabondi] abstract class BasicShabondiTest extends WithBroker { protected val log = Logger(this.getClass()) protected val brokerProps = testUtil.brokersConnProps protected val topicAdmin: TopicAdmin = TopicAdmin.of(brokerProps) protected val newThreadPool: () => ExecutorService = () => Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat(this.getClass.getSimpleName + "-").build()) protected val countRows: (util.Queue[Row], Long, ExecutionContext) => Future[Long] = (queue, executionTime, ec) => Future { log.debug("countRows begin...") val baseTime = System.currentTimeMillis() var count = 0L var running = true while (running) { val row = queue.poll() if (row != null) count += 1 else Thread.sleep(100) running = (System.currentTimeMillis() - baseTime) < executionTime } log.debug("countRows done") count }(ec) protected def createTopicKey = TopicKey.of("default", CommonUtils.randomString(5)) protected def createTestTopic(topicKey: TopicKey): Unit = topicAdmin.topicCreator .numberOfPartitions(1) .numberOfReplications(1.toShort) .topicKey(topicKey) .create protected def defaultSourceConfig( sourceToTopics: Seq[TopicKey] = Seq.empty[TopicKey] ): SourceConfig = { import ShabondiDefinitions._ val args = mutable.ArrayBuffer( GROUP_DEFINITION.key + "=" + CommonUtils.randomString(5), NAME_DEFINITION.key + "=" + CommonUtils.randomString(3), SHABONDI_CLASS_DEFINITION.key + "=" + classOf[ShabondiSource].getName, CLIENT_PORT_DEFINITION.key + "=8080", BROKERS_DEFINITION.key + "=" + testUtil.brokersConnProps ) if (sourceToTopics.nonEmpty) args += s"${SOURCE_TO_TOPICS_DEFINITION.key}=${TopicKey.toJsonString(sourceToTopics.asJava)}" val rawConfig = ShabondiUtils.parseArgs(args.toArray) new SourceConfig(rawConfig) } protected def defaultSinkConfig( sinkFromTopics: Seq[TopicKey] = Seq.empty[TopicKey] ): SinkConfig = { import ShabondiDefinitions._ val args = mutable.ArrayBuffer( GROUP_DEFINITION.key + "=" + CommonUtils.randomString(5), NAME_DEFINITION.key + "=" + CommonUtils.randomString(3), SHABONDI_CLASS_DEFINITION.key + "=" + classOf[ShabondiSink].getName, CLIENT_PORT_DEFINITION.key + "=8080", BROKERS_DEFINITION.key + "=" + testUtil.brokersConnProps ) if (sinkFromTopics.nonEmpty) args += s"${SINK_FROM_TOPICS_DEFINITION.key}=${TopicKey.toJsonString(sinkFromTopics.asJava)}" val rawConfig = ShabondiUtils.parseArgs(args.toArray) new SinkConfig(rawConfig) } protected def singleRow(columnSize: Int, rowId: Int = 0): Row = KafkaSupport.singleRow(columnSize, rowId) protected def multipleRows(rowSize: Int): immutable.Iterable[Row] = KafkaSupport.multipleRows(rowSize) @After def tearDown(): Unit = { Releasable.close(topicAdmin) } }
Example 84
Source File: CommonQueries.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.on.sql.queries import java.sql.Connection import anorm.SqlParser._ import anorm._ import com.daml.ledger.on.sql.Index import com.daml.ledger.on.sql.queries.Queries._ import com.daml.ledger.participant.state.kvutils.KVOffset import com.daml.ledger.participant.state.kvutils.api.LedgerRecord import com.daml.ledger.validator.LedgerStateOperations.{Key, Value} import scala.collection.{breakOut, immutable} import scala.util.Try trait CommonQueries extends Queries { protected implicit val connection: Connection override final def selectLatestLogEntryId(): Try[Option[Index]] = Try { SQL"SELECT MAX(sequence_no) max_sequence_no FROM #$LogTable" .as(get[Option[Long]]("max_sequence_no").singleOpt) .flatten } override final def selectFromLog( startExclusive: Index, endInclusive: Index, ): Try[immutable.Seq[(Index, LedgerRecord)]] = Try { SQL"SELECT sequence_no, entry_id, envelope FROM #$LogTable WHERE sequence_no > $startExclusive AND sequence_no <= $endInclusive ORDER BY sequence_no" .as((long("sequence_no") ~ getBytes("entry_id") ~ getBytes("envelope")).map { case index ~ entryId ~ envelope => index -> LedgerRecord(KVOffset.fromLong(index), entryId, envelope) }.*) } override final def selectStateValuesByKeys(keys: Seq[Key]): Try[immutable.Seq[Option[Value]]] = Try { val results = SQL"SELECT key, value FROM #$StateTable WHERE key IN ($keys)" .fold(Map.newBuilder[Key, Value], ColumnAliaser.empty) { (builder, row) => builder += row("key") -> row("value") } .fold(exceptions => throw exceptions.head, _.result()) keys.map(results.get)(breakOut) } override final def updateState(stateUpdates: Seq[(Key, Value)]): Try[Unit] = Try { executeBatchSql(updateStateQuery, stateUpdates.map { case (key, value) => Seq[NamedParameter]("key" -> key, "value" -> value) }) } protected val updateStateQuery: String }
Example 85
Source File: TimedQueries.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.on.sql.queries import com.daml.ledger.on.sql.Index import com.daml.ledger.participant.state.kvutils.api.LedgerRecord import com.daml.ledger.participant.state.v1.LedgerId import com.daml.ledger.validator.LedgerStateOperations.{Key, Value} import com.daml.metrics.{Metrics, Timed} import scala.collection.immutable import scala.util.Try final class TimedQueries(delegate: Queries, metrics: Metrics) extends Queries { override def selectLatestLogEntryId(): Try[Option[Index]] = Timed.value( metrics.daml.ledger.database.queries.selectLatestLogEntryId, delegate.selectLatestLogEntryId()) override def selectFromLog(start: Index, end: Index): Try[immutable.Seq[(Index, LedgerRecord)]] = Timed.value( metrics.daml.ledger.database.queries.selectFromLog, delegate.selectFromLog(start, end)) override def selectStateValuesByKeys(keys: Seq[Key]): Try[immutable.Seq[Option[Value]]] = Timed.value( metrics.daml.ledger.database.queries.selectStateValuesByKeys, delegate.selectStateValuesByKeys(keys)) override def updateOrRetrieveLedgerId(providedLedgerId: LedgerId): Try[LedgerId] = Timed.value( metrics.daml.ledger.database.queries.updateOrRetrieveLedgerId, delegate.updateOrRetrieveLedgerId(providedLedgerId)) override def insertRecordIntoLog(key: Key, value: Value): Try[Index] = Timed.value( metrics.daml.ledger.database.queries.insertRecordIntoLog, delegate.insertRecordIntoLog(key, value)) override def updateState(stateUpdates: Seq[(Key, Value)]): Try[Unit] = Timed.value( metrics.daml.ledger.database.queries.updateState, delegate.updateState(stateUpdates)) override def truncate(): Try[Unit] = Timed.value(metrics.daml.ledger.database.queries.truncate, delegate.truncate()) }
Example 86
Source File: CommandCompletionSource.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.services.commands import akka.NotUsed import akka.stream.scaladsl.Source import com.daml.grpc.adapter.ExecutionSequencerFactory import com.daml.grpc.adapter.client.akka.ClientAdapter import com.daml.ledger.api.v1.command_completion_service.{ CompletionStreamRequest, CompletionStreamResponse } import io.grpc.stub.StreamObserver import scala.collection.{breakOut, immutable} object CommandCompletionSource { def toStreamElements( response: CompletionStreamResponse): immutable.Iterable[CompletionStreamElement] = { val completions: Vector[CompletionStreamElement] = response.completions.map(CompletionStreamElement.CompletionElement)(breakOut) response.checkpoint.fold(completions)(cp => completions :+ CompletionStreamElement.CheckpointElement(cp)) } def apply( request: CompletionStreamRequest, stub: (CompletionStreamRequest, StreamObserver[CompletionStreamResponse]) => Unit)( implicit esf: ExecutionSequencerFactory): Source[CompletionStreamElement, NotUsed] = { ClientAdapter .serverStreaming(request, stub) .mapConcat(toStreamElements) .log( "completion at client", { case CompletionStreamElement.CheckpointElement(c) => s"Checkpoint ${c.offset}" case CompletionStreamElement.CompletionElement(c) => s"Completion $c" } ) } }
Example 87
Source File: CommandTrackerShape.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.services.commands.tracker import akka.stream.{Inlet, Outlet, Shape} import com.daml.ledger.api.v1.command_submission_service.SubmitRequest import com.daml.ledger.api.v1.completion.Completion import com.daml.ledger.api.v1.ledger_offset.LedgerOffset import com.daml.ledger.client.services.commands.CompletionStreamElement import com.daml.util.Ctx import com.google.protobuf.empty.Empty import scala.collection.immutable import scala.util.Try private[tracker] final case class CommandTrackerShape[Context]( submitRequestIn: Inlet[Ctx[Context, SubmitRequest]], submitRequestOut: Outlet[Ctx[(Context, String), SubmitRequest]], commandResultIn: Inlet[Either[Ctx[(Context, String), Try[Empty]], CompletionStreamElement]], resultOut: Outlet[Ctx[Context, Completion]], offsetOut: Outlet[LedgerOffset]) extends Shape { override def inlets: immutable.Seq[Inlet[_]] = Vector(submitRequestIn, commandResultIn) override def outlets: immutable.Seq[Outlet[_]] = Vector(submitRequestOut, resultOut, offsetOut) override def deepCopy(): Shape = CommandTrackerShape[Context]( submitRequestIn.carbonCopy(), submitRequestOut.carbonCopy(), commandResultIn.carbonCopy(), resultOut.carbonCopy(), offsetOut.carbonCopy()) }
Example 88
Source File: DispatcherImpl.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.akkastreams.dispatcher import java.util.concurrent.atomic.AtomicReference import akka.NotUsed import akka.stream.scaladsl.Source import com.github.ghik.silencer.silent import org.slf4j.LoggerFactory import scala.collection.immutable final class DispatcherImpl[Index: Ordering]( name: String, zeroIndex: Index, headAtInitialization: Index) extends Dispatcher[Index] { private val logger = LoggerFactory.getLogger(getClass) require( !indexIsBeforeZero(headAtInitialization), s"head supplied at Dispatcher initialization $headAtInitialization is before zero index $zeroIndex. " + s"This would imply that the ledger end is before the ledger begin, which makes this invalid configuration." ) private sealed abstract class State extends Product with Serializable { def getSignalDispatcher: Option[SignalDispatcher] def getLastIndex: Index } // the following silent are due to // <https://github.com/scala/bug/issues/4440> @silent private final case class Running(lastIndex: Index, signalDispatcher: SignalDispatcher) extends State { override def getLastIndex: Index = lastIndex override def getSignalDispatcher: Option[SignalDispatcher] = Some(signalDispatcher) } @silent private final case class Closed(lastIndex: Index) extends State { override def getLastIndex: Index = lastIndex override def getSignalDispatcher: Option[SignalDispatcher] = None } // So why not broadcast the actual new index, instead of using a signaller? // The reason is if we do that, the new indices race with readHead // in a way that makes it hard to start up new subscriptions. In particular, // we can tolerate NewIndexSignals being out of order or dropped, maintaining the weaker invariant that, // if head is updated, at least one NewIndexSignal eventually arrives. private val state = new AtomicReference[State](Running(headAtInitialization, SignalDispatcher())) override def apply(newHead: Index): immutable.Iterable[(Index, Index)] = if (Ordering[Index].gt(newHead, max)) { val intervalBegin = max max = newHead List(intervalBegin -> newHead) } else Nil } private def indexIsBeforeZero(checkedIndex: Index): Boolean = Ordering[Index].gt(zeroIndex, checkedIndex) def close(): Unit = state.getAndUpdate { case Running(idx, _) => Closed(idx) case c: Closed => c } match { case Running(idx, disp) => disp.signal() disp.close() case c: Closed => () } private def closedError: IllegalStateException = new IllegalStateException(s"$name: Dispatcher is closed") }
Example 89
Source File: DropRepeatedSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.server.api import akka.actor.ActorSystem import akka.pattern.pipe import akka.stream.Materializer import akka.stream.scaladsl.{Sink, Source} import akka.testkit.{TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.collection.immutable import scala.concurrent.ExecutionContext final class DropRepeatedSpec extends TestKit(ActorSystem(classOf[DropRepeatedSpec].getSimpleName)) with WordSpecLike with Matchers with BeforeAndAfterAll { private[this] implicit val materializer: Materializer = Materializer(system) private[this] implicit val executionContext: ExecutionContext = materializer.executionContext override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } "DropRepeated" should { "drop repeated elements" in { val probe = TestProbe() val input = immutable.Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5) val _ = Source(input) .via(DropRepeated()) .runWith(Sink.seq) .pipeTo(probe.ref) .failed .foreach(fail(_)) probe.expectMsg(Vector(1, 2, 3, 4, 5)) } "does not drop duplicate elements that are not repeated" in { val probe = TestProbe() val input = immutable.Seq(1, 1, 2, 2, 1, 1, 2, 2) val _ = Source(input) .via(DropRepeated()) .runWith(Sink.seq) .pipeTo(probe.ref) .failed .foreach(fail(_)) probe.expectMsg(Vector(1, 2, 1, 2)) } } }
Example 90
Source File: MultiResourceBase.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.api.testing.utils import org.scalatest.AsyncTestSuite import scala.collection.immutable trait MultiResourceBase[FixtureId, TestContext] extends MultiFixtureBase[FixtureId, TestContext] with SuiteResource[Map[FixtureId, () => TestContext]] { self: AsyncTestSuite => protected def fixtureIdsEnabled: Set[FixtureId] protected def constructResource(index: Int, fixtureId: FixtureId): Resource[TestContext] override protected lazy val suiteResource: Resource[Map[FixtureId, () => TestContext]] = { MultiResource(fixtureIdsEnabled.zipWithIndex.map { case (backend, idx) => backend -> constructResource(idx, backend) }.toMap) } override protected lazy val fixtures: immutable.Iterable[TestFixture] = fixtureIdsEnabled.map { implementation => TestFixture(implementation, suiteResource.value(implementation)) } } case class MultiResource[FixtureId, TestContext](resources: Map[FixtureId, Resource[TestContext]]) extends Resource[Map[FixtureId, () => TestContext]] { override lazy val value: Map[FixtureId, () => TestContext] = resources.mapValues(r => () => r.value) override def setup(): Unit = resources.foreach(_._2.setup()) override def close(): Unit = resources.foreach(_._2.close()) }
Example 91
Source File: Relation.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.data import scala.collection.{mutable, immutable} object Relation { // NOTE: this definition and specifically inversion assumes // that the values related to an A are non-empty // we treat // - the empty relation mapping // - and a Map that maps everything to the empty set // as the same // this fits our purposes for the moment type Relation[A, B] = immutable.Map[A, Set[B]] object Relation { def merge[A, B](r: Relation[A, B], pair: (A, Set[B])): Relation[A, B] = r.updated(pair._1, r.getOrElse(pair._1, Set.empty[B]).union(pair._2)) def union[A, B](r1: Relation[A, B], r2: Relation[A, B]): Relation[A, B] = r2.foldLeft(r1)(merge) def diff[A, B](r1: Relation[A, B], r2: Relation[A, B]): Relation[A, B] = r1.map { case (a, bs) => a -> r2.get(a).fold(bs)(bs diff _) } def invert[A, B](relation: Relation[A, B]): Relation[B, A] = { val result = mutable.Map[B, Set[A]]() withDefaultValue Set() relation.foreach { case (a, bs) => bs.foreach(b => result(b) = result(b) + a) } result.toMap } def flatten[A, B](relation: Relation[A, B]): Iterator[(A, B)] = for { kvs <- relation.iterator value <- kvs._2 } yield (kvs._1, value) def mapKeys[A, K, B](r: Relation[A, B])(f: A => K): Relation[K, B] = r.map { case (a, b) => f(a) -> b } } }
Example 92
Source File: DomainEvent.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import com.daml.ledger.api.refinements.ApiTypes._ import scala.collection.immutable sealed trait DomainEvent { def witnessParties: immutable.Seq[Party] } final case class DomainCreatedEvent( eventId: EventId, contractId: ContractId, templateId: TemplateId, witnessParties: immutable.Seq[Party], createArguments: CreateArguments, contractData: Contract.OfAny) extends DomainEvent final case class DomainArchivedEvent( eventId: EventId, contractId: ContractId, templateId: TemplateId, witnessParties: immutable.Seq[Party]) extends DomainEvent
Example 93
Source File: generics.scala From spark-tools with Apache License 2.0 | 5 votes |
package io.univalence.autobuild.struct import shapeless.labelled._ import shapeless._ import scala.collection.immutable import scala.language.higherKinds trait TypeName[T] { def name: String } object TypeName { import scala.reflect.runtime.universe.TypeTag implicit def fromTypeTag[T](implicit typeTag: TypeTag[T]): TypeName[T] = new TypeName[T] { override def name: String = typeTag.tpe.toString } } trait PathAwareness[T[_]] { def injectPrefix[A](prefix: String)(t: T[A]): T[A] } object DefaultPathAwareness { implicit def defaultPathAwareness[App[_]] = new PathAwareness[App] { override def injectPrefix[A](prefix: String)(t: App[A]): App[A] = t } } trait FieldsNonRecur[L] { def fieldnames: List[(String, String)] } trait LowPriorityFieldsNonRecur { implicit def caseClassFields[F, G]( implicit gen: LabelledGeneric.Aux[F, G], encode: Lazy[FieldsNonRecur[G]] ): FieldsNonRecur[F] = new FieldsNonRecur[F] { override def fieldnames: List[(String, String)] = encode.value.fieldnames } implicit def hcon[K <: Symbol, H, T <: HList]( implicit key: Witness.Aux[K], tv: TypeName[H], tailEncode: Lazy[FieldsNonRecur[T]] ): FieldsNonRecur[FieldType[K, H] :: T] = new FieldsNonRecur[FieldType[K, H] :: T] { override def fieldnames: List[(String, String)] = (key.value.name, tv.name) :: tailEncode.value.fieldnames } } object FieldsNonRecur extends LowPriorityFieldsNonRecur { implicit def hnil[L <: HNil]: FieldsNonRecur[L] = new FieldsNonRecur[L] { override def fieldnames: List[(String, String)] = Nil } def fieldnames[A](implicit tmr: FieldsNonRecur[A]): Seq[(String, String)] = tmr.fieldnames }
Example 94
Source File: ZeppelinHelpers.scala From ArchiveSpark with MIT License | 5 votes |
package org.archive.archivespark.util import org.apache.spark.rdd.RDD import scala.collection.immutable object ZeppelinHelpers { def table(rdd: RDD[Seq[Any]], cols: String*): String = { val table = rdd.map(seq => seq.map(_.toString).mkString("\t")).collect.mkString("\n") "%table " + cols.mkString("\t") + "\n" + table } def table(values: collection.Map[_, _], keyCol: String, valCol: String): String = { val table = values.toSeq.map{case (k, v) => s"$k\t$v"}.mkString("\n") s"%table $keyCol\t$valCol\n$table" } def table(values: immutable.Map[_, _], keyCol: String, valCol: String): String = table(values.toMap, keyCol, valCol) }
Example 95
Source File: B3FormatPropagation.scala From opencensus-scala with Apache License 2.0 | 5 votes |
package io.opencensus.scala.http.propagation import io.opencensus.trace.propagation.TextFormat.{Getter, Setter} import io.opencensus.trace.{Span, SpanContext} import scala.collection.{immutable, mutable} import scala.util.Try trait B3FormatPropagation[Header, Request] extends Propagation[Header, Request] { def headerValue(req: Request, key: String): Option[String] def createHeader(key: String, value: String): Header override def extractContext(request: Request): Try[SpanContext] = Try(b3Format.extract(request, HeaderGetter)) private type HttpHeaderBuilder = mutable.ArrayBuffer[Header] private object HeaderSetter extends Setter[HttpHeaderBuilder] { override def put( carrier: HttpHeaderBuilder, key: String, value: String ): Unit = { carrier += createHeader(key, value) } } private object HeaderGetter extends Getter[Request] { override def get(carrier: Request, key: String): String = headerValue(carrier, key).orNull } }
Example 96
Source File: MockPropagation.scala From opencensus-scala with Apache License 2.0 | 5 votes |
package io.opencensus.scala.http.testSuite import io.opencensus.scala.http.propagation.Propagation import io.opencensus.trace._ import scala.collection.immutable import scala.util.{Failure, Success, Try} trait MockPropagation[Header, Request] extends Propagation[Header, Request] { def rawHeader(key: String, value: String): Header def path(request: Request): String val requestPathWithoutParent = "/no/parent/context" val fakeTraceId = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" val fakeSpanId = "bbbbbbbbbbbbbbbb" val sampledSpanContext = SpanContext.create( TraceId.fromLowerBase16(fakeTraceId), SpanId.fromLowerBase16(fakeSpanId), TraceOptions.builder().setIsSampled(true).build(), Tracestate.builder.build() ) override def headersWithTracingContext(span: Span): immutable.Seq[Header] = List(rawHeader("X-Mock-Trace", "12345")) override def extractContext(request: Request): Try[SpanContext] = if (path(request) == requestPathWithoutParent) Failure(new Exception("test error")) else Success(sampledSpanContext) }
Example 97
Source File: MatfastSessionState.scala From MatRel with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.matfast import org.apache.spark.sql.matfast.execution.MatfastPlanner import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.execution.datasources.{DataSourceStrategy, FileSourceStrategy} import org.apache.spark.sql.{execution => sparkexecution, _} import org.apache.spark.sql.internal.SessionState import org.apache.spark.sql.matfast.plans.{MatfastOptimizer} import scala.collection.immutable private[matfast] class MatfastSessionState (matfastSession: MatfastSession) extends SessionState(matfastSession) { self => protected[matfast] lazy val matfastConf = new MatfastConf protected[matfast] def getSQLOptimizer = optimizer protected[matfast] lazy val matfastOptimizer: MatfastOptimizer = new MatfastOptimizer protected[matfast] val matfastPlanner: sparkexecution.SparkPlanner = { new MatfastPlanner(matfastSession, conf, experimentalMethods.extraStrategies) } override def executePlan(plan: LogicalPlan) = new execution.QueryExecution(matfastSession, plan) def setConf(key: String, value: String): Unit = { if (key.startsWith("matfast.")) matfastConf.setConfString(key, value) else conf.setConfString(key, value) } def getConf(key: String): String = { if (key.startsWith("matfast.")) matfastConf.getConfString(key) else conf.getConfString(key) } def getConf(key: String, defaultValue: String): String = { if (key.startsWith("matfast.")) conf.getConfString(key, defaultValue) else conf.getConfString(key, defaultValue) } def getAllConfs: immutable.Map[String, String] = { conf.getAllConfs ++ matfastConf.getAllConfs } }
Example 98
Source File: AkkaStreamsTest.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.flow import akka.{ Done, NotUsed } import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl._ import com.github.dnvriend.streams.util.{ OutputCustomer, InputCustomer } import com.github.dnvriend.streams.TestSpec import scala.collection.immutable import scala.concurrent.Future class AkkaStreamsTest extends TestSpec { lazy val writeCustomersSink = Sink.foreach[OutputCustomer] { (outputCustomer: OutputCustomer) ⇒ log.info("Customer: {}", outputCustomer) } "The Akka Stream Chain" should "execute normally" in { val chain: Future[Done] = inputCustomersSource.via(normalizeFlow).runWith(writeCustomersSink) chain.toTry should be a 'success } it should "process 100 customers" in { var counter = 0 val counterSink = Sink.foreach[OutputCustomer] { _ ⇒ counter += 1 } inputCustomersSource.via(normalizeFlow).runWith(counterSink).toTry should be a 'success counter shouldBe 100 } it should "transform a customer" in { inputCustomersSource .via(normalizeFlow) .runWith(TestSink.probe[OutputCustomer]) .request(1) .expectNext() match { case OutputCustomer(_, _) ⇒ case u ⇒ fail("Unexpected: " + u) } } // Testing Streams // see: http://doc.akka.io/docs/akka-stream-and-http-experimental/1.0-RC1/scala/stream-testkit.html "Probe Sink" should "be testable" in { // Using probe as a Sink allows manual control over demand and assertions over elements coming downstream. // Streams testkit provides a sink that materializes to a TestSubscriber.Probe. Source(1 to 4) .filter(_ % 2 == 0) .map(_ * 2) .runWith(TestSink.probe[Int]) .request(2) .expectNext(4, 8) .expectComplete() } "Probe Source" should "be testable" in { // A source that materializes to TestPublisher.Probe can be used for asserting demand or controlling when stream // is completed or ended with an error. TestSource.probe[Int] .toMat(Sink.cancelled)(Keep.left) .run() .expectCancellation() } "Source" should "be created from Range" in { Source(1 to 2) .map(identity) .runWith(TestSink.probe[Int]) .request(2) .expectNext(1, 2) .expectComplete() } it should "be created from a List" in { Source(List(1, 2)) .runWith(TestSink.probe[Int]) .request(2) .expectNext(1, 2) .expectComplete() } it should "be created from a Vector" in { Source(Vector(1, 2)) .runWith(TestSink.probe[Int]) .request(2) .expectNext(1, 2) .expectComplete() } }
Example 99
Source File: TestSelector.scala From suzaku with Apache License 2.0 | 5 votes |
package perftest import suzaku.ui._ import suzaku.ui.style.StyleClassBase import suzaku.widget.Button import scala.collection.immutable case class TestInstance(name: String, component: ComponentBlueprint) object TestSelector { final case class State private (isRunning: Boolean, currentTest: Int) { def selectTest(idx: Int): State = { println(s"Selecting test $idx") State(isRunning = !(isRunning && idx == currentTest), currentTest = idx) } } final case class CBP private (tests: immutable.Seq[TestInstance]) extends ComponentBlueprint { override def create = new ComponentImpl(this) } final class ComponentImpl(initialBlueprint: CBP) extends Component[CBP, State](initialBlueprint) { override def render(state: State): Blueprint = { import suzaku.ui.layout._ import suzaku.ui.style._ LinearLayout(Direction.Vertical)( LinearLayout()( // draw buttons for each test instance blueprint.tests.zipWithIndex.map { case (test, idx) => Button(label = test.name, onClick = () => modState(s => s.selectTest(idx))) << ( backgroundColor := (if (state.isRunning && state.currentTest == idx) Colors.lightblue else Colors.gray) ) } ) << TestListStyle, if (state.isRunning) { LinearLayout()(blueprint.tests(state.currentTest).component) << TestPanelStyle } else EmptyBlueprint ) } override def initialState: State = State(isRunning = false, 0) } def apply(tests: immutable.Seq[TestInstance]): CBP = CBP(tests) } object TestPanelStyle extends StyleClassBase { import suzaku.ui.style._ def styleDefs = List( padding := 20.px ) } object ButtonStyle extends StyleClassBase { import suzaku.ui.style._ def styleDefs = List( padding := 5.px, margin := 10.px ) } object TestListStyle extends StyleClassBase { import suzaku.ui.style._ def styleDefs = List( widgetStyle := (Button -> List(ButtonStyle)) ) }
Example 100
Source File: WidgetBlueprint.scala From suzaku with Apache License 2.0 | 5 votes |
package suzaku.ui import suzaku.ui.UIProtocol.UIChannel import suzaku.ui.layout.LayoutProperty import suzaku.ui.style.{StyleBaseProperty, StyleClass, StyleClasses, StylePropOrClass, StyleProperty, StyleSeq} import scala.collection.immutable trait WidgetBlueprint extends Blueprint { type P <: WidgetProtocol type This <: WidgetBlueprint type Proxy <: WidgetProxy[P, This] private[suzaku] var _style = immutable.Map.empty[Class[_], StyleProperty] private[suzaku] var _layout = List.empty[LayoutProperty] private[suzaku] var _activeEvents = Set.empty[Int] private[suzaku] var _onClickHandler = Option.empty[(Int, Int, Int) => Unit] private[suzaku] var _onLongClickHandler = Option.empty[(Int, Int, Int) => Unit] private[suzaku] var _onFocusChangeHandler = Option.empty[Boolean => Unit] def createProxy(widgetId: Int, uiChannel: UIChannel): Proxy def children: Seq[Blueprint] = Seq.empty def sameAs(that: This): Boolean = equals(that) && _style == that._style && _layout == that._layout && _activeEvents == that._activeEvents @noinline final def <<(styleProperty: StylePropOrClass*): this.type = { var styleClasses = List.empty[StyleClass] styleProperty.foreach { case StyleSeq(seq) => _style ++= seq.map(p => (p.getClass, p)) case s: StyleBaseProperty => _style += ((s.getClass, s)) case c: StyleClass => styleClasses ::= c case _ => } if (styleClasses.nonEmpty) { val prevClasses = _style.getOrElse(classOf[StyleClasses], StyleClasses(Nil)).asInstanceOf[StyleClasses] _style += (classOf[StyleClasses] -> StyleClasses(prevClasses.styles ::: styleClasses.reverse)) } this } @inline final def withStyle(styleProperty: StylePropOrClass*): this.type = <<(styleProperty: _*) @noinline final def withLayout(layoutProperty: LayoutProperty*): this.type = { _layout = _layout ::: layoutProperty.toList this } @noinline final def withOnClick(handler: (Int, Int, Int) => Unit, disable: Boolean = false): this.type = { if (disable) { _activeEvents = _activeEvents - WidgetExtProtocol.EventType.OnClickEvent _onClickHandler = None } else { _activeEvents = _activeEvents + WidgetExtProtocol.EventType.OnClickEvent _onClickHandler = Some(handler) } this } @noinline final def withOnLongClick(handler: (Int, Int, Int) => Unit, disable: Boolean = false): this.type = { if (disable) { _activeEvents = _activeEvents - WidgetExtProtocol.EventType.OnLongClickEvent _onLongClickHandler = None } else { _activeEvents = _activeEvents + WidgetExtProtocol.EventType.OnLongClickEvent _onLongClickHandler = Some(handler) } this } @noinline final def withOnFocusChange(handler: Boolean => Unit, disable: Boolean = false): this.type = { if (disable) { _activeEvents = _activeEvents - WidgetExtProtocol.EventType.OnFocusChangeEvent _onFocusChangeHandler = None } else { _activeEvents = _activeEvents + WidgetExtProtocol.EventType.OnFocusChangeEvent _onFocusChangeHandler = Some(handler) } this } } trait WidgetProtocolProvider { def widgetProtocol: WidgetProtocol }
Example 101
Source File: ParametrizedFlow.scala From akka_streams_tutorial with MIT License | 5 votes |
package sample.stream_shared_state import akka.Done import akka.actor.{ActorSystem, Cancellable} import akka.stream.scaladsl.{Flow, GraphDSL, Keep, Sink, Source, SourceQueueWithComplete, Zip} import akka.stream.{FlowShape, OverflowStrategy} import scala.collection.immutable import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.{Failure, Success} object ParametrizedFlow extends App { val service = ParameterizedFlowService Thread.sleep(5000) service.update(1.0) Thread.sleep(2000) service.update(1.5) Thread.sleep(2000) service.cancel() Thread.sleep(2000) println(service.result()) } object ParameterizedFlowService { implicit val system = ActorSystem("ParameterizedFlowService") implicit val executionContext = system.dispatcher def update(element: Double): Unit = flow._1._2.offer(element) def cancel(): Boolean = flow._1._1.cancel() def result(): Future[Seq[Double]] = flow._2 val fun = (flowValue: Int, paramValue: Double) => flowValue * paramValue val flow: ((Cancellable, SourceQueueWithComplete[Double]), Future[immutable.Seq[Double]]) = Source.tick(0.seconds, 500.millis, 10) .viaMat(createParamFlow(1, OverflowStrategy.dropBuffer, 0.5)(fun))(Keep.both) .wireTap(x => println(x)) .toMat(Sink.seq)(Keep.both) .run() val done: Future[Done] = flow._1._2.watchCompletion() terminateWhen(done) private def createParamFlow[A, P, O](bufferSize: Int, overflowStrategy: OverflowStrategy, initialParam: P)(fun: (A, P) => O) = Flow.fromGraph(GraphDSL.create(Source.queue[P](bufferSize, overflowStrategy)) { implicit builder => queue => import GraphDSL.Implicits._ val zip = builder.add(Zip[A, P]()) //Interesting use of the extrapolate operator //based on https://doc.akka.io/docs/akka/current/stream/stream-rate.html#understanding-extrapolate-and-expand val extra = builder.add(Flow[P].extrapolate(Iterator.continually(_), Some(initialParam))) val map = builder.add(Flow[(A, P)].map(r => fun(r._1, r._2))) queue ~> extra ~> zip.in1 zip.out ~> map FlowShape(zip.in0, map.out) }) private def terminateWhen(done: Future[_]) = { done.onComplete { case Success(_) => println("Flow Success. About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 102
Source File: XmlProcessing.scala From akka_streams_tutorial with MIT License | 5 votes |
package alpakka.xml import java.nio.file.Paths import java.util.Base64 import akka.actor.ActorSystem import akka.stream.alpakka.xml.scaladsl.XmlParsing import akka.stream.alpakka.xml.{EndElement, ParseEvent, StartElement, TextEvent} import akka.stream.scaladsl.{FileIO, Sink, Source} import akka.util.ByteString import scala.collection.immutable import scala.concurrent.Future import scala.util.{Failure, Success} object XmlProcessing extends App { implicit val system = ActorSystem("XmlProcessing") implicit val executionContext = system.dispatcher val resultFileName = "testfile_result.jpg" val done = FileIO.fromPath(Paths.get("./src/main/resources/xml_with_base64_embedded.xml")) .via(XmlParsing.parser) .statefulMapConcat(() => { // state val stringBuilder: StringBuilder = StringBuilder.newBuilder var counter: Int = 0 // aggregation function parseEvent: ParseEvent => parseEvent match { case s: StartElement if s.attributes.contains("mediaType") => stringBuilder.clear() val mediaType = s.attributes.head._2 println("mediaType: " + mediaType) immutable.Seq(mediaType) case s: EndElement if s.localName == "embeddedDoc" => val text = stringBuilder.toString println("File content: " + text) //large embedded files are read into memory Source.single(ByteString(text)) .map(each => ByteString(Base64.getMimeDecoder.decode(each.toByteBuffer))) .runWith(FileIO.toPath(Paths.get(s"$counter-$resultFileName"))) counter = counter + 1 immutable.Seq(text) case t: TextEvent => stringBuilder.append(t.text) immutable.Seq.empty case _ => immutable.Seq.empty } }) .runWith(Sink.ignore) terminateWhen(done) def terminateWhen(done: Future[_]) = { done.onComplete { case Success(_) => println("Flow Success. About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 103
Source File: InputFormatConf.scala From flint with Apache License 2.0 | 5 votes |
package com.twosigma.flint.hadoop import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{ FileSystem, Path } import org.apache.hadoop.io.{ LongWritable, Text, Writable } import org.apache.hadoop.mapreduce.{ InputFormat, InputSplit, Job, RecordReader } import org.apache.hadoop.mapreduce.lib.input.{ FileInputFormat, FileSplit, TextInputFormat } import scala.collection.immutable trait InputFormatConf[K, V] extends Serializable { type IF <: InputFormat[K, V] type Split <: InputSplit with Writable type KExtract <: Extract[K] type VExtract <: Extract[V] def kExtract: KExtract def vExtract: VExtract def makeInputFormat(): IF // I'm unsure if we should WriSer them for them def makeSplits(hadoopConf: Configuration): IndexedSeq[WriSer[Split]] // TODO do we want to require typing of the RecordReader as well? final def createRecordReader(hadoopConf: Configuration, split: Split, inputFormat: IF = makeInputFormat()): RecordReader[K, V] = { val tac = ConfOnlyTAC(hadoopConf) val recordReader = inputFormat.createRecordReader(split, tac) recordReader.initialize(split, tac) recordReader } } case class TextInputFormatConf(file: String, partitions: Int) extends InputFormatConf[LongWritable, Text] { type IF = TextInputFormat type Split = FileSplit // TODO now that we figured out what's up, see if we can't eliminate the need for this... val internalK = Extract.unit[LongWritable] val internalV = Extract.text type KExtract = internalK.type type VExtract = internalV.type override val kExtract: KExtract = internalK override val vExtract: VExtract = internalV def makeInputFormat() = new TextInputFormat() def makeSplits(hadoopConf: Configuration): immutable.IndexedSeq[WriSer[FileSplit]] = { val job = Job.getInstance(hadoopConf) FileInputFormat.setInputPaths(job, file) val path = new Path(file) val len = FileSystem.get(hadoopConf).listStatus(path).head.getLen val size_per = math.round(len / partitions.toDouble) ((0 until partitions - 1).map { p => new FileSplit(path, size_per * p, size_per, null) } :+ { val fin = size_per * (partitions - 1) new FileSplit(path, fin, len - fin, null) }).map(WriSer(_)) } } // TODO do we really get much from having this as its own class? consider just making a def csv method in TextInputFormatConf object CSVInputFormatConf { def apply[V](ifc: InputFormatConf[LongWritable, V] { type Split = FileSplit }): InputFormatConf[LongWritable, V] { type IF = ifc.IF type Split = ifc.Split type KExtract = ifc.KExtract type VExtract = ifc.VExtract } = new InputFormatConf[LongWritable, V] { type IF = ifc.IF type Split = ifc.Split type KExtract = ifc.KExtract type VExtract = ifc.VExtract override val kExtract: KExtract = ifc.kExtract override val vExtract: VExtract = ifc.vExtract override def makeInputFormat() = ifc.makeInputFormat() override def makeSplits(hadoopConf: Configuration) = { val splits = ifc.makeSplits(hadoopConf) splits.headOption.fold(IndexedSeq.empty[WriSer[Split]]) { case WriSer(head) => val rr = createRecordReader(hadoopConf, head) require(rr.nextKeyValue, "csv has no header, first line was empty") val afterHeader = rr.getCurrentKey.get require(rr.nextKeyValue, "first split is empty") WriSer(new FileSplit(head.getPath, afterHeader, head.getLength - afterHeader, null)) +: splits.tail } } } }
Example 104
Source File: Arbiter.scala From Principles-of-Reactive-Programming with GNU General Public License v3.0 | 5 votes |
package kvstore import akka.actor.{ActorRef, Actor} import scala.collection.immutable object Arbiter { case object Join case object JoinedPrimary case object JoinedSecondary case class Replicas(replicas: Set[ActorRef]) } class Arbiter extends Actor { import Arbiter._ var leader: Option[ActorRef] = None var replicas = Set.empty[ActorRef] def receive = { case Join => if (leader.isEmpty) { leader = Some(sender) replicas += sender sender ! JoinedPrimary } else { replicas += sender sender ! JoinedSecondary } leader foreach (_ ! Replicas(replicas)) } }
Example 105
Source File: MetadataManagerSpec.scala From flink-jpmml with GNU Affero General Public License v3.0 | 5 votes |
package io.radicalbit.flink.pmml.scala.api.managers import io.radicalbit.flink.pmml.scala.models.control.{AddMessage, DelMessage} import io.radicalbit.flink.pmml.scala.models.core.{ModelId, ModelInfo} import io.radicalbit.flink.pmml.scala.utils.PmmlLoaderKit import org.scalatest.{Matchers, WordSpec} import scala.collection.immutable abstract class MetadataManagerSpec[M: MetadataManager] extends WordSpec with Matchers with PmmlLoaderKit { val modelName = "model" val modelVersion = 1 val modelPath: String = getPMMLSource(Source.KmeansPmml) val modelId: ModelId = ModelId(modelName, modelVersion) val modelInfo = ModelInfo(modelPath) val in = immutable.Map(modelId -> modelInfo) val unknownIn = immutable.Map(ModelId("unknown-id", scala.util.Random.nextLong()) -> modelInfo) def outOnKnown: immutable.Map[ModelId, ModelInfo] = toOut(in) def outOnUnknown: immutable.Map[ModelId, ModelInfo] = toOut(unknownIn) def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo] def controlMessage: M "MetadataManager" should { "manage metadata correctly if targeted model is not already in metadata (Add add metadata, Del returns input)" in { MetadataManager(controlMessage, unknownIn) shouldBe outOnUnknown } "manage metadata correctly if targeted model already exists (Add returns input, Del removes metadata)" in { MetadataManager(controlMessage, in) shouldBe outOnKnown } } } class AddMetadataManagerSpec extends MetadataManagerSpec[AddMessage] { override lazy val controlMessage: AddMessage = AddMessage(modelName, modelVersion, modelPath, System.currentTimeMillis()) override def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo] = in.get(modelId) match { case Some(_) => in case None => in + (modelId -> modelInfo) } } class RemoveMetadataManagerSpec extends MetadataManagerSpec[DelMessage] { override lazy val controlMessage: DelMessage = DelMessage(modelName, modelVersion, System.currentTimeMillis()) override def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo] = in - ModelId(modelName, modelVersion) }
Example 106
Source File: CorsSupportSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.core.http import akka.http.scaladsl.model.HttpMethods import ch.megard.akka.http.cors.scaladsl.model.HttpOriginMatcher import org.scalatest.matchers.should.Matchers import org.scalatest.funspec.AnyFunSpecLike import scala.collection.immutable class CorsSupportSpec extends Matchers with AnyFunSpecLike with CorsSupport { describe("Cors Support") { it("has sensible defaults") { settings.allowCredentials shouldBe false settings.exposedHeaders shouldBe immutable.Seq("Link") settings.allowedMethods shouldBe Seq( HttpMethods.GET, HttpMethods.POST, HttpMethods.HEAD, HttpMethods.OPTIONS ) settings.allowedOrigins shouldBe HttpOriginMatcher.* settings.maxAge shouldBe Some(1800) settings.allowGenericHttpRequests shouldBe true } } }
Example 107
Source File: ClickhouseQueryBuilder.scala From clickhouse-scala-client with GNU Lesser General Public License v3.0 | 5 votes |
package com.crobox.clickhouse.internal import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model.headers.{HttpEncodingRange, RawHeader} import akka.http.scaladsl.model.{HttpMethods, HttpRequest, RequestEntity, Uri} import com.crobox.clickhouse.internal.QuerySettings.ReadQueries import com.crobox.clickhouse.internal.progress.ProgressHeadersAsEventsStage import com.typesafe.config.Config import com.typesafe.scalalogging.LazyLogging import scala.collection.immutable private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging { private val Headers = { import HttpEncodingRange.apply import akka.http.scaladsl.model.headers.HttpEncodings.{deflate, gzip} import akka.http.scaladsl.model.headers.`Accept-Encoding` immutable.Seq(`Accept-Encoding`(gzip, deflate)) } private val MaxUriSize = 16 * 1024 protected def toRequest(uri: Uri, query: String, queryIdentifier: Option[String], settings: QuerySettings, entity: Option[RequestEntity])(config: Config): HttpRequest = { val urlQuery = uri.withQuery(Query(Query("query" -> query) ++ settings.withFallback(config).asQueryParams: _*)) entity match { case Some(e) => logger.debug(s"Executing clickhouse query [$query] on host [${uri .toString()}] with entity payload of length ${e.contentLengthOption}") HttpRequest( method = HttpMethods.POST, uri = urlQuery, entity = e, headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _)) ) case None if settings.idempotent.contains(true) && settings.readOnly == ReadQueries && urlQuery .toString() .getBytes .length < MaxUriSize => //max url size logger.debug(s"Executing clickhouse idempotent query [$query] on host [${uri.toString()}]") HttpRequest( method = HttpMethods.GET, uri = urlQuery.withQuery( urlQuery .query() .filterNot( _._1 == "readonly" ) //get requests are readonly by default, if we send the readonly flag clickhouse will fail the request ), headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _)) ) case None => logger.debug(s"Executing clickhouse query [$query] on host [${uri.toString()}]") HttpRequest( method = HttpMethods.POST, uri = uri.withQuery(settings.withFallback(config).asQueryParams), entity = query, headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _)) ) } } }
Example 108
Source File: HttpRequestConversionSupport.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.persistence.serializers import java.net.InetAddress import akka.http.scaladsl.model.HttpHeader.ParsingResult import akka.http.scaladsl.model.{ HttpEntity, HttpHeader, HttpMethod, HttpMethods, HttpProtocol, HttpRequest, RemoteAddress, Uri } import com.ing.wbaa.rokku.proxy.data.{ UserAssumeRole, UserRawJson } import spray.json.DefaultJsonProtocol import scala.collection.immutable trait HttpRequestConversionSupport extends DefaultJsonProtocol { case class SimplifiedRemoteAddress(host: String) { def toRemoteAddr: RemoteAddress = { val a = host.split(":") RemoteAddress(InetAddress.getByName(a(0)), Some(a(1).toInt)) } } case class SimplifiedHttpRequest(method: String, uri: String, headers: List[String], entity: String, httpProtocol: String) implicit val httpRequestF = jsonFormat5(SimplifiedHttpRequest) implicit val userRoleF = jsonFormat1(UserAssumeRole) implicit val userSTSF = jsonFormat5(UserRawJson) implicit val remoteAddressF = jsonFormat1(SimplifiedRemoteAddress) private[persistence] def convertAkkaHeadersToStrings(headers: Seq[HttpHeader]): List[String] = headers.map(h => s"${h.name()}=${h.value()}").toList private def convertStringsToAkkaHeaders(headers: List[String]): immutable.Seq[HttpHeader] = headers.map { p => val kv = p.split("=") HttpHeader.parse(kv(0), kv(1)) match { case ParsingResult.Ok(header, _) => header case ParsingResult.Error(error) => throw new Exception(s"Unable to convert to HttpHeader: ${error.summary}") } } private def httpMethodFrom(m: String): HttpMethod = m match { case "GET" => HttpMethods.GET case "HEAD" => HttpMethods.HEAD case "PUT" => HttpMethods.PUT case "POST" => HttpMethods.POST case "DELETE" => HttpMethods.DELETE } private[persistence] def toAkkaHttpRequest(s: SimplifiedHttpRequest): HttpRequest = HttpRequest( httpMethodFrom(s.method), Uri(s.uri), convertStringsToAkkaHeaders(s.headers), HttpEntity(s.entity), HttpProtocol(s.httpProtocol) ) }
Example 109
Source File: FilterRecursiveMultiDelete.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.handler import akka.stream.ActorMaterializer import akka.stream.alpakka.xml.scaladsl.XmlParsing import akka.stream.alpakka.xml.{ EndElement, StartElement, TextEvent } import akka.stream.scaladsl.{ Sink, Source } import akka.util.ByteString import scala.collection.immutable import scala.collection.mutable.ListBuffer import scala.concurrent.Future object FilterRecursiveMultiDelete { def exctractMultideleteObjectsFlow(source: Source[ByteString, Any])(implicit materializer: ActorMaterializer): Future[Seq[String]] = { var isKeyTag = false source .via(XmlParsing.parser) .statefulMapConcat(() => { val keys = new ListBuffer[String] isKeyTag = false parseEvent => parseEvent match { case e: StartElement if e.localName.startsWith("Delete") => keys.clear() immutable.Seq.empty case e: StartElement if e.localName == "Key" => isKeyTag = true immutable.Seq.empty case e: EndElement if e.localName == "Key" => isKeyTag = false immutable.Seq.empty case e: TextEvent => if (isKeyTag) keys.append(e.text) immutable.Seq.empty case e: EndElement if e.localName == "Delete" => immutable.Seq(keys).flatten case _ => immutable.Seq.empty } }).runWith(Sink.seq) } }
Example 110
Source File: FilterRecursiveListBucketHandler.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.handler import java.net.URLDecoder import akka.NotUsed import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.stream.alpakka.xml.scaladsl.{ XmlParsing, XmlWriting } import akka.stream.alpakka.xml.{ EndElement, ParseEvent, StartElement, TextEvent } import akka.stream.scaladsl.Flow import akka.util.ByteString import com.ing.wbaa.rokku.proxy.data.{ Read, RequestId, S3Request, User } import scala.collection.immutable import scala.collection.mutable.ListBuffer protected[this] def filterRecursiveListObjects(user: User, requestS3: S3Request)(implicit id: RequestId): Flow[ByteString, ByteString, NotUsed] = { def elementResult(allContentsElements: ListBuffer[ParseEvent], isContentsTag: Boolean, element: ParseEvent): immutable.Seq[ParseEvent] = { if (isContentsTag) { allContentsElements += element immutable.Seq.empty } else { immutable.Seq(element) } } def isPathOkInRangerPolicy(path: String)(implicit id: RequestId): Boolean = { val pathToCheck = normalizePath(path) val isUserAuthorized = isUserAuthorizedForRequest(requestS3.copy(s3BucketPath = Some(pathToCheck)), user) isUserAuthorized } def normalizePath(path: String): String = { val delimiter = "/" val decodedPath = URLDecoder.decode(path, "UTF-8") val delimiterIndex = decodedPath.lastIndexOf(delimiter) val pathToCheckWithoutLastSlash = if (delimiterIndex > 0) delimiter + decodedPath.substring(0, delimiterIndex) else "" val s3BucketName = requestS3.s3BucketPath.getOrElse(delimiter) val s3pathWithoutLastDelimiter = if (s3BucketName.length > 1 && s3BucketName.endsWith(delimiter)) s3BucketName.substring(0, s3BucketName.length - 1) else s3BucketName s3pathWithoutLastDelimiter + pathToCheckWithoutLastSlash } Flow[ByteString].via(XmlParsing.parser) .statefulMapConcat(() => { // state val keyTagValue = StringBuilder.newBuilder val allContentsElements = new ListBuffer[ParseEvent] var isContentsTag = false var isKeyTag = false // aggregation function parseEvent => parseEvent match { //catch <Contents> to start collecting elements case element: StartElement if element.localName == "Contents" => isContentsTag = true allContentsElements.clear() allContentsElements += element immutable.Seq.empty //catch end </Contents> to validate the path in ranger case element: EndElement if element.localName == "Contents" => isContentsTag = false allContentsElements += element if (isPathOkInRangerPolicy(keyTagValue.stripMargin)) { allContentsElements.toList } else { immutable.Seq.empty } // catch <Key> where is the patch name to match in ranger case element: StartElement if element.localName == "Key" => keyTagValue.clear() isKeyTag = true elementResult(allContentsElements, isContentsTag, element) //catch end </Key> case element: EndElement if element.localName == "Key" => isKeyTag = false elementResult(allContentsElements, isContentsTag, element) //catch all element text <..>text<\..> but only set the text from <Key> case element: TextEvent => if (isKeyTag) keyTagValue.append(element.text) elementResult(allContentsElements, isContentsTag, element) //just past through the rest of elements case element => elementResult(allContentsElements, isContentsTag, element) } }) .via(XmlWriting.writer) } }
Example 111
Source File: HttpRequestRecorderSpec.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.persistence import java.net.InetAddress import akka.actor.{ ActorSystem, PoisonPill, Props } import akka.http.scaladsl.model.HttpHeader.ParsingResult import akka.http.scaladsl.model._ import akka.testkit.{ ImplicitSender, TestKit } import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.persistence.HttpRequestRecorder.{ ExecutedRequestCmd, LatestRequests, LatestRequestsResult } import org.scalatest.BeforeAndAfterAll import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AnyWordSpecLike import scala.collection.immutable class HttpRequestRecorderSpec extends TestKit(ActorSystem("RequestRecorderTest")) with ImplicitSender with AnyWordSpecLike with Diagrams with BeforeAndAfterAll { override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } private def convertStringsToAkkaHeaders(headers: List[String]): immutable.Seq[HttpHeader] = headers.map { p => val kv = p.split("=") HttpHeader.parse(kv(0), kv(1)) match { case ParsingResult.Ok(header, _) => header case ParsingResult.Error(error) => throw new Exception(s"Unable to convert to HttpHeader: ${error.summary}") } } val requestRecorder = system.actorOf(Props(classOf[HttpRequestRecorder]), "localhost-1") val headers = List("Remote-Address=0:0:0:0:0:0:0:1:58170", "Host=localhost:8987", "X-Amz-Content-SHA256=02502914aca52472205417e4c418ee499ba39ca1b283d99da26e295df2eccf32", "User-Agent=aws-cli/1.16.30 Python/2.7.5 Linux/3.10.0-862.14.4.el7.x86_64 botocore/1.12.20", "Content-MD5=Wf7l+rCPsVw8eqc34kVJ1g==", "Authorization=AWS4-HMAC-SHA256 Credential=6r24619bHVWvrxR5AMHNkGZ6vNRXoGCP/20190704/us-east-1/s3/aws4_request", "SignedHeaders=content-md5;host;x-amz-content-sha256;x-amz-date;x-amz-security-token", "Signature=271dda503da6fcf04cc058cb514b28a6d522a9b712ab553bfb88fb7814ab082f") val httpRequest = HttpRequest( HttpMethods.PUT, Uri("http://127.0.0.1:8010/home/testuser/file34"), convertStringsToAkkaHeaders(headers), HttpEntity.Empty.withContentType(ContentTypes.`application/octet-stream`).toString(), HttpProtocols.`HTTP/1.1` ) val userSTS = User(UserName("okUser"), Set(UserGroup("okGroup")), AwsAccessKey("accesskey"), AwsSecretKey("secretkey"), UserAssumeRole("")) val clientIPAddress = RemoteAddress(InetAddress.getByName("localhost"), Some(1234)) "RequestRecorder" should { "persist Http request event" in { requestRecorder ! ExecutedRequestCmd(httpRequest, userSTS, clientIPAddress) requestRecorder ! LatestRequests(1) expectMsg(LatestRequestsResult(List(ExecutedRequestEvt(httpRequest, userSTS, clientIPAddress)))) requestRecorder ! PoisonPill val requestRecorder1 = system.actorOf(Props(classOf[HttpRequestRecorder]), "localhost-2") requestRecorder1 ! LatestRequests(1) expectMsg(LatestRequestsResult(List(ExecutedRequestEvt(httpRequest, userSTS, clientIPAddress)))) } } }
Example 112
Source File: UnsortedSetDeserializerModule.scala From mango with Apache License 2.0 | 5 votes |
package com.kakao.shaded.jackson.module.scala.deser import scala.collection.generic.GenericCompanion import java.util.AbstractCollection import scala.collection.{immutable, mutable} import com.kakao.shaded.jackson.module.scala.modifiers.SetTypeModifierModule import com.kakao.shaded.jackson.databind.`type`.CollectionLikeType import com.kakao.shaded.jackson.databind.jsontype.TypeDeserializer import com.kakao.shaded.jackson.core.JsonParser import com.kakao.shaded.jackson.databind.deser.std.{StdValueInstantiator, CollectionDeserializer, ContainerDeserializerBase} import com.kakao.shaded.jackson.databind.deser.{ContextualDeserializer, Deserializers, ValueInstantiator} import com.kakao.shaded.jackson.databind.{BeanProperty, JavaType, BeanDescription, DeserializationContext, JsonDeserializer, DeserializationConfig} import com.kakao.shaded.jackson.module.scala.util.CompanionSorter private class SetBuilderWrapper[E](val builder: mutable.Builder[E, _ <: collection.Set[E]]) extends AbstractCollection[E] { override def add(e: E) = { builder += e; true } // Required by AbstractCollection, but the deserializer doesn't care about them. def iterator() = null def size() = 0 } private object UnsortedSetDeserializer { val COMPANIONS = new CompanionSorter[collection.Set]() .add(immutable.HashSet) .add(immutable.ListSet) .add(immutable.Set) .add(mutable.HashSet) .add(mutable.LinkedHashSet) .add(mutable.Set) .toList def companionFor(cls: Class[_]): GenericCompanion[collection.Set] = COMPANIONS find { _._1.isAssignableFrom(cls) } map { _._2 } getOrElse Set def builderFor[A](cls: Class[_]): mutable.Builder[A, collection.Set[A]] = companionFor(cls).newBuilder[A] } private class SetInstantiator(config: DeserializationConfig, valueType: Class[_]) extends StdValueInstantiator(config, valueType) { override def canCreateUsingDefault = true override def createUsingDefault(ctxt: DeserializationContext) = new SetBuilderWrapper[AnyRef](UnsortedSetDeserializer.builderFor[AnyRef](valueType)) } private class UnsortedSetDeserializer(collectionType: JavaType, containerDeserializer: CollectionDeserializer) extends ContainerDeserializerBase[collection.Set[_]](collectionType) with ContextualDeserializer { def this(collectionType: JavaType, valueDeser: JsonDeserializer[Object], valueTypeDeser: TypeDeserializer, valueInstantiator: ValueInstantiator) = this(collectionType, new CollectionDeserializer(collectionType, valueDeser, valueTypeDeser, valueInstantiator)) def createContextual(ctxt: DeserializationContext, property: BeanProperty) = { val newDelegate = containerDeserializer.createContextual(ctxt, property) new UnsortedSetDeserializer(collectionType, newDelegate) } override def getContentType = containerDeserializer.getContentType override def getContentDeserializer = containerDeserializer.getContentDeserializer override def deserialize(jp: JsonParser, ctxt: DeserializationContext): collection.Set[_] = containerDeserializer.deserialize(jp, ctxt) match { case wrapper: SetBuilderWrapper[_] => wrapper.builder.result() } } private object UnsortedSetDeserializerResolver extends Deserializers.Base { private final val SET = classOf[collection.Set[_]] private final val SORTED_SET = classOf[collection.SortedSet[_]] override def findCollectionLikeDeserializer(collectionType: CollectionLikeType, config: DeserializationConfig, beanDesc: BeanDescription, elementTypeDeserializer: TypeDeserializer, elementDeserializer: JsonDeserializer[_]): JsonDeserializer[_] = { val rawClass = collectionType.getRawClass if (!SET.isAssignableFrom(rawClass)) null else if (SORTED_SET.isAssignableFrom(rawClass)) null else { val deser = elementDeserializer.asInstanceOf[JsonDeserializer[AnyRef]] val instantiator = new SetInstantiator(config, rawClass) new UnsortedSetDeserializer(collectionType, deser, elementTypeDeserializer, instantiator) } } } trait UnsortedSetDeserializerModule extends SetTypeModifierModule { this += (_ addDeserializers UnsortedSetDeserializerResolver) }
Example 113
Source File: DescTableReponse.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.druid import org.json4s.JsonAST.{JArray, JObject, JValue} import org.json4s.JsonDSL._ import scala.collection.{immutable, mutable} case class DescTableRequest(dataSource: String) { def toJson: JValue = { JObject("queryType" -> "segmentMetadata", "dataSource" -> dataSource) } } case class DescTableResponse(data: Seq[(String, Any)]) object DescTableResponse { def parse(js: JValue): DescTableResponse = { var arr = new mutable.HashMap[String, Any] js match { case JArray(results) => val columns = (results.last \ "columns").asInstanceOf[JObject].values columns.foreach { col => arr += (col._1 -> col._2 .asInstanceOf[immutable.HashMap[String, String]] .get("type") .get) } DescTableResponse(arr.toSeq.sortBy(_._1)) case err @ _ => throw new IllegalArgumentException("Invalid time series response: " + err) } } }
Example 114
Source File: S3SnapshotStore.scala From akka-persistence-s3 with MIT License | 5 votes |
package akka.persistence.s3 package snapshot import java.io.ByteArrayInputStream import akka.actor.ActorLogging import akka.persistence.serialization.Snapshot import akka.persistence.{ SelectedSnapshot, SnapshotMetadata, SnapshotSelectionCriteria } import akka.persistence.snapshot.SnapshotStore import akka.serialization.SerializationExtension import com.amazonaws.services.s3.model.{ ObjectMetadata, S3ObjectInputStream, ListObjectsRequest } import com.typesafe.config.Config import scala.collection.JavaConversions._ import scala.collection.immutable import scala.concurrent.Future import scala.util.control.NonFatal case class SerializationResult(stream: ByteArrayInputStream, size: Int) class S3SnapshotStore(config: Config) extends SnapshotStore with ActorLogging with SnapshotKeySupport { import context.dispatcher val settings = new S3SnapshotConfig(config) val s3Client: S3Client = new S3Client { val s3ClientConfig = new S3ClientConfig(context.system.settings.config.getConfig("s3-client")) } private val serializationExtension = SerializationExtension(context.system) private val s3Dispatcher = context.system.dispatchers.lookup("s3-snapshot-store.s3-client-dispatcher") val extensionName = settings.extension override def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = { snapshotMetadatas(persistenceId, criteria) .map(_.sorted.takeRight(settings.maxLoadAttempts)) .flatMap(load) } private def load(metadata: immutable.Seq[SnapshotMetadata]): Future[Option[SelectedSnapshot]] = metadata.lastOption match { case None => Future.successful(None) case Some(md) => s3Client.getObject(settings.bucketName, snapshotKey(md))(s3Dispatcher) .map { obj => val snapshot = deserialize(obj.getObjectContent) Some(SelectedSnapshot(md, snapshot.data)) } recoverWith { case NonFatal(e) => log.error(e, s"Error loading snapshot [${md}]") load(metadata.init) // try older snapshot } } override def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = { val serialized = serialize(Snapshot(snapshot)) val objectMetadata = new ObjectMetadata() objectMetadata.setContentLength(serialized.size) s3Client.putObject( settings.bucketName, snapshotKey(metadata), serialized.stream, objectMetadata )(s3Dispatcher).map(_ => ()) } override def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = { if (metadata.timestamp == 0L) deleteAsync(metadata.persistenceId, SnapshotSelectionCriteria(metadata.sequenceNr, Long.MaxValue, metadata.sequenceNr, Long.MinValue)) else s3Client.deleteObject(settings.bucketName, snapshotKey(metadata))(s3Dispatcher) } override def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = { val metadatas = snapshotMetadatas(persistenceId, criteria) metadatas.map(list => Future.sequence(list.map(deleteAsync))) } private def snapshotMetadatas(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[List[SnapshotMetadata]] = { s3Client.listObjects( new ListObjectsRequest() .withBucketName(settings.bucketName) .withPrefix(prefixFromPersistenceId(persistenceId)) .withDelimiter("/") )(s3Dispatcher) .map(_.getObjectSummaries.toList.map(s => parseKeyToMetadata(s.getKey)) .filter(m => m.sequenceNr >= criteria.minSequenceNr && m.sequenceNr <= criteria.maxSequenceNr && m.timestamp >= criteria.minTimestamp && m.timestamp <= criteria.maxTimestamp)) } protected def deserialize(inputStream: S3ObjectInputStream): Snapshot = serializationExtension.deserialize(akka.persistence.serialization.streamToBytes(inputStream), classOf[Snapshot]).get protected def serialize(snapshot: Snapshot): SerializationResult = { val serialized = serializationExtension.findSerializerFor(snapshot).toBinary(snapshot) SerializationResult(new ByteArrayInputStream(serializationExtension.findSerializerFor(snapshot).toBinary(snapshot)), serialized.size) } }
Example 115
Source File: HttpMetricsDirectives.scala From akka-http-metrics with Apache License 2.0 | 5 votes |
package fr.davit.akka.http.metrics.core.scaladsl.server import akka.http.scaladsl.marshalling.ToEntityMarshaller import akka.http.scaladsl.model.HttpHeader import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.PathMatcher.{Matched, Unmatched} import akka.http.scaladsl.server.directives.BasicDirectives.{mapRequestContext, tprovide} import akka.http.scaladsl.server.directives.RouteDirectives.reject import akka.http.scaladsl.server.util.Tuple import akka.http.scaladsl.server.{Directive, PathMatcher, StandardRoute} import fr.davit.akka.http.metrics.core.HttpMetricsRegistry import fr.davit.akka.http.metrics.core.scaladsl.model.PathLabelHeader import scala.collection.immutable trait HttpMetricsDirectives { def metrics[T <: HttpMetricsRegistry: ToEntityMarshaller](registry: T): StandardRoute = complete(registry) def pathLabeled[L](pm: PathMatcher[L]): Directive[L] = pathPrefixLabeled(pm ~ PathEnd) def pathLabeled[L](pm: PathMatcher[L], label: String): Directive[L] = pathPrefixLabeled(pm ~ PathEnd, label) def pathPrefixLabeled[L](pm: PathMatcher[L]): Directive[L] = rawPathPrefixLabeled(Slash ~ pm) def pathPrefixLabeled[L](pm: PathMatcher[L], label: String): Directive[L] = rawPathPrefixLabeled(Slash ~ pm, label) def rawPathPrefixLabeled[L](pm: PathMatcher[L]): Directive[L] = rawPathPrefixLabeled(pm, None) def rawPathPrefixLabeled[L](pm: PathMatcher[L], label: String): Directive[L] = rawPathPrefixLabeled(pm, Some(label)) private def rawPathPrefixLabeled[L](pm: PathMatcher[L], label: Option[String]): Directive[L] = { implicit val LIsTuple: Tuple[L] = pm.ev extractRequestContext.flatMap { ctx => val pathCandidate = ctx.unmatchedPath.toString pm(ctx.unmatchedPath) match { case Matched(rest, values) => tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapResponseHeaders { headers => var pathHeader = label match { case Some(l) => PathLabelHeader("/" + l) // pm matches additional slash prefix case None => PathLabelHeader(pathCandidate.substring(0, pathCandidate.length - rest.charCount)) } val builder = immutable.Seq.newBuilder[HttpHeader] headers.foreach { case PathLabelHeader(suffix) => pathHeader = PathLabelHeader(pathHeader.value + suffix) case h: HttpHeader => builder += h } builder += pathHeader builder.result() } case Unmatched => reject } } } } object HttpMetricsDirectives extends HttpMetricsDirectives
Example 116
Source File: UdashFonts.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash.web.commons.styles.utils import com.avsystem.commons.misc.{AbstractValueEnum, AbstractValueEnumCompanion, EnumCtx, ValueEnumCompanion} import io.udash.css.{CssBase, CssStyle} import scalacss.internal.{AV, FontFace, NonEmptyVector} import scala.collection.immutable final class FontWeight(val value: AV)(implicit enumCtx: EnumCtx) extends AbstractValueEnum object FontWeight extends AbstractValueEnumCompanion[FontWeight] with CssBase { import dsl._ final val Thin: Value = new FontWeight(fontWeight._100) final val Light: Value = new FontWeight(fontWeight._300) final val Regular: Value = new FontWeight(fontWeight._400) final val Medium: Value = new FontWeight(fontWeight._500) final val Bold: Value = new FontWeight(fontWeight._700) final val Black: Value = new FontWeight(fontWeight._900) } final class FontStyle(val value: AV)(implicit enumCtx: EnumCtx) extends AbstractValueEnum object FontStyle extends AbstractValueEnumCompanion[FontStyle] with CssBase { import dsl._ final val Normal: Value = new FontStyle(fontStyle.normal) final val Italic: Value = new FontStyle(fontStyle.italic) } final class FontFileType(val extension: String, val format: Option[String])(implicit enumCtx: EnumCtx) extends AbstractValueEnum object FontFileType extends ValueEnumCompanion[FontFileType] { final val EotFont: Value = new FontFileType("eot", None) final val WoffFont: Value = new FontFileType("woff", Some("format('woff')")) final val Woff2Font: Value = new FontFileType("woff2", Some("format('woff2')")) final val TruetypeFont: Value = new FontFileType("ttf", Some("format('truetype')")) } final case class FontVariation(path: String, fontWeight: FontWeight = FontWeight.Regular, fontStyle: FontStyle = FontStyle.Normal) object FontFamily { val Roboto = "'Roboto', sans-serif" } object UdashFonts extends CssBase { import dsl._ def roboto(fontWeight: FontWeight = FontWeight.Regular, fontStyle: FontStyle = FontStyle.Normal): CssStyle = mixin( fontFamily :=! FontFamily.Roboto, fontStyle.value, fontWeight.value ) private val fontFiles = immutable.Seq( FontVariation("/fonts/roboto/Roboto-Black", FontWeight.Black, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-BlackItalic", FontWeight.Black, FontStyle.Italic), FontVariation("/fonts/roboto/Roboto-Bold", FontWeight.Bold, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-BoldItalic", FontWeight.Bold, FontStyle.Italic), FontVariation("/fonts/roboto/Roboto-Medium", FontWeight.Medium, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-MediumItalic", FontWeight.Medium, FontStyle.Italic), FontVariation("/fonts/roboto/Roboto-Regular", FontWeight.Regular, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-Italic", FontWeight.Regular, FontStyle.Italic), FontVariation("/fonts/roboto/Roboto-Light", FontWeight.Light, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-LightItalic", FontWeight.Light, FontStyle.Italic), FontVariation("/fonts/roboto/Roboto-Thin", FontWeight.Thin, FontStyle.Normal), FontVariation("/fonts/roboto/Roboto-ThinItalic", FontWeight.Thin, FontStyle.Italic) ) val font: immutable.Map[FontVariation, CssStyle] = fontFiles.map(fontFile => fontFile -> namedFontFace(FontFamily.Roboto, _ => { new FontFace( fontFamily = Some(FontFamily.Roboto), src = NonEmptyVector( fontSrc(fontFile.path, FontFileType.TruetypeFont.extension, FontFileType.TruetypeFont.format) ), fontWeightValue = Some(fontFile.fontWeight.value.value), fontStyleValue = Some(fontFile.fontStyle.value.value) ) } ) ).toMap private def fontSrc(fileUrl: String, fileExt: String, fileFormat: Option[String]) = s"url('$fileUrl.$fileExt')${fileFormat.map(v => s" $v").getOrElse("")}" }
Example 117
Source File: SpringIndirectActorProducer.scala From akka-spring-boot with Apache License 2.0 | 5 votes |
package com.github.scalaspring.akka import akka.actor.{Actor, IndirectActorProducer} import org.springframework.context.ConfigurableApplicationContext import scala.collection.immutable object SpringIndirectActorProducer { def getBeanNameForType(applicationContext: ConfigurableApplicationContext, clazz: Class[_]): String = { val beanNames = applicationContext.getBeanNamesForType(clazz) if (beanNames.length > 1) throw new IllegalArgumentException(s"Multiple beans found for actor class ${clazz.getName} (${beanNames}}). Please use name-based constructor to specify bean name to use.") beanNames.headOption.orElse(throw new IllegalArgumentException(s"No bean defined for actor class ${clazz.getName}")).get } def getTypeForBeanName(applicationContext: ConfigurableApplicationContext, beanName: String): Class[_ <: Actor] = { applicationContext.getBeanFactory.getType(beanName).asInstanceOf[Class[Actor]] } } import SpringIndirectActorProducer._ class SpringIndirectActorProducer(clazz: Class[_ <: Actor], applicationContext: ConfigurableApplicationContext, beanName: String, args: immutable.Seq[AnyRef]) extends IndirectActorProducer { def this(clazz: Class[_ <: Actor], applicationContext: ConfigurableApplicationContext, args: immutable.Seq[AnyRef]) = this(clazz, applicationContext, getBeanNameForType(applicationContext, clazz), args) def this(beanName: String, applicationContext: ConfigurableApplicationContext, args: immutable.Seq[AnyRef]) = this(getTypeForBeanName(applicationContext, beanName), applicationContext, beanName, args) validateActorBeanDefinition protected def validateActorBeanDefinition: Unit = { val beanClass = applicationContext.getBeanFactory.getType(beanName) val beanDefinition = applicationContext.getBeanFactory.getBeanDefinition(beanName) require(actorClass.isAssignableFrom(beanClass), s"""Invalid bean type. Bean "${beanName}" of type ${beanClass.getSimpleName} does not extend ${actorClass.getSimpleName}.""") require(!beanDefinition.isSingleton, s"""Actor beans must be non-singleton. Suggested fix: Annotate ${beanDefinition.getBeanClassName} with the @${classOf[ActorComponent].getSimpleName} annotation to create actor beans with prototype scope.""") // TODO: Validate actor constructor if arguments supplied to enable fail fast (see akka.util.Reflect.findConstructor) } override def actorClass: Class[_ <: Actor] = clazz override def produce(): Actor = { args match { case s if s.isEmpty => applicationContext.getBean(beanName).asInstanceOf[Actor] case _ => applicationContext.getBean(beanName, args: _*).asInstanceOf[Actor] } } }
Example 118
Source File: CortexAuthSrv.scala From Cortex with GNU Affero General Public License v3.0 | 5 votes |
package org.thp.cortex.services import javax.inject.{Inject, Singleton} import scala.collection.immutable import scala.concurrent.ExecutionContext import play.api.{Configuration, Logger} import org.elastic4play.services.AuthSrv import org.elastic4play.services.auth.MultiAuthSrv object CortexAuthSrv { private[CortexAuthSrv] lazy val logger = Logger(getClass) def getAuthSrv(authTypes: Seq[String], authModules: immutable.Set[AuthSrv]): Seq[AuthSrv] = ("key" +: authTypes.filterNot(_ == "key")) .flatMap { authType ⇒ authModules .find(_.name == authType) .orElse { logger.error(s"Authentication module $authType not found") None } } } @Singleton class CortexAuthSrv @Inject()( configuration: Configuration, authModules: immutable.Set[AuthSrv], userSrv: UserSrv, implicit override val ec: ExecutionContext ) extends MultiAuthSrv( CortexAuthSrv.getAuthSrv(configuration.getDeprecated[Option[Seq[String]]]("auth.provider", "auth.type").getOrElse(Seq("local")), authModules), ec ) { // Uncomment the following lines if you want to prevent user with key to use password to authenticate // override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] = // userSrv.get(username) // .transformWith { // case Success(user) if user.key().isDefined ⇒ Future.failed(AuthenticationError("Authentication by password is not permitted for user with key")) // case _: Success[_] ⇒ super.authenticate(username, password) // case _: Failure[_] ⇒ Future.failed(AuthenticationError("Authentication failure")) // } }
Example 119
Source File: SuccessfulOf.scala From naptime with Apache License 2.0 | 5 votes |
package org.coursera.naptime.access.combiner import org.coursera.common.concurrent.Futures import org.coursera.naptime.NaptimeActionException import org.coursera.naptime.access.HeaderAccessControl import play.api.http.Status import play.api.mvc.RequestHeader import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future def successfulOf[A]( controls: immutable.Seq[HeaderAccessControl[A]]): HeaderAccessControl[Set[A]] = { new HeaderAccessControl[Set[A]] { override def run(requestHeader: RequestHeader)( implicit ec: ExecutionContext): Future[Either[NaptimeActionException, Set[A]]] = { Future .traverse(controls) { control => Futures.safelyCall(control.run(requestHeader)) } .map { results => val successes = results.collect { case Right(authentication) => authentication } lazy val firstErrorOption = results.collectFirst { case Left(error) => error } if (successes.nonEmpty) { Right(successes.toSet) } else { firstErrorOption match { case Some(error) => Left(error) case None => badAccessControlsResponse } } } } override def check(authInfo: Set[A]): Either[NaptimeActionException, Set[A]] = { if (authInfo.nonEmpty) Right(authInfo) else badAccessControlsResponse } } } } object SuccessfulOf { private[combiner] val badAccessControlsResponse = { Left( NaptimeActionException( Status.UNAUTHORIZED, Some("auth.perms"), Some("Invalid access control configuration"))) } }
Example 120
Source File: Router.scala From naptime with Apache License 2.0 | 5 votes |
package org.coursera.naptime.router2 import javax.inject.Inject import com.google.inject.Injector import org.coursera.naptime.resources.CollectionResource import play.api.mvc.RequestHeader import scala.annotation.tailrec import scala.collection.immutable import language.experimental.macros object Router { def build[T <: CollectionResource[_, _, _]]: ResourceRouterBuilder = macro MacroImpls.build[T] @tailrec def routeRequestHelper( resourceRouters: Seq[ResourceRouter], requestHeader: RequestHeader, path: String): Option[RouteAction] = { if (resourceRouters.isEmpty) { None } else { val first = resourceRouters.head val firstResult = first.routeRequest(path, requestHeader) if (firstResult.isDefined) { firstResult } else { routeRequestHelper(resourceRouters.tail, requestHeader, path) } } } routeRequestHelper(resourceRouters, requestHeader, path) } else { None } } // TODO(saeta): add additional functionality (i.e. listing all resources / metadata, etc.) // TODO(saeta): performance test the new router implementation & do reasonable optimizations. }
Example 121
Source File: SchemaUtilsTest.scala From naptime with Apache License 2.0 | 5 votes |
package org.coursera.naptime import com.linkedin.data.schema.ArrayDataSchema import com.linkedin.data.schema.DataSchema import com.linkedin.data.schema.DataSchemaUtil import com.linkedin.data.schema.MapDataSchema import com.linkedin.data.schema.RecordDataSchema import com.linkedin.data.schema.UnionDataSchema import org.coursera.naptime.ari.graphql.models.MergedCourse import org.junit._ import org.scalatest.junit.AssertionsForJUnit import scala.collection.immutable class SchemaUtilsTest extends AssertionsForJUnit { @Test def testFixupSchema_EmptyOverrides(): Unit = { val schemaToFix = MergedCourse.SCHEMA val overrides = immutable.Map[String, DataSchema]() SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert(schemaToFix === MergedCourse.SCHEMA) } @Test def testFixupSchema_RecursiveSchema_EmptyOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val overrides = Map[String, DataSchema]() SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert(schemaToFix === RecursiveModelBase.SCHEMA) } @Test def testFixupSchema_RecursiveSchema_WithOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val longDataSchema = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.LONG) val overrides = Map("org.coursera.naptime.RecursiveChild" -> longDataSchema) SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert(schemaToFix.getField("recursiveChild").getType === longDataSchema) } @Test def testFixupSchema_ListSchema_WithOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val longDataSchema = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.LONG) val overrides = Map("org.coursera.naptime.RecursiveChild" -> longDataSchema) SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert( schemaToFix.getField("recursiveChildren").getType === new ArrayDataSchema(longDataSchema)) } @Test def testFixupSchema_MapSchema_WithOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val longDataSchema = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.LONG) val overrides = Map("org.coursera.naptime.RecursiveChild" -> longDataSchema) SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert( schemaToFix.getField("recursiveChildMap").getType === new MapDataSchema(longDataSchema)) } @Test def testFixupSchema_RecursiveListSchema_WithOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val stringDataSchema = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.STRING) val overrides = Map("org.coursera.naptime.StringLikeField" -> stringDataSchema) SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert( schemaToFix .getField("nestedChild") .getType .asInstanceOf[ArrayDataSchema] .getItems .asInstanceOf[RecordDataSchema] .getField("stringLikeField") .getType === stringDataSchema) } @Test def testFixupSchema_RecursiveUnionSchema_WithOverrides(): Unit = { val schemaToFix = RecursiveModelBase.SCHEMA val stringDataSchema = DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.STRING) val overrides = Map("org.coursera.naptime.StringLikeField" -> stringDataSchema) SchemaUtils.fixupInferredSchemas(schemaToFix, overrides) assert( schemaToFix .getField("unionChild") .getType .asInstanceOf[UnionDataSchema] .getTypeByName("NestedChild") .asInstanceOf[RecordDataSchema] .getField("stringLikeField") .getType === stringDataSchema) } }
Example 122
Source File: AkkaHttpBackend.scala From drunk with Apache License 2.0 | 5 votes |
package com.github.jarlakxen.drunk.backend import scala.collection.immutable import scala.concurrent.{ExecutionContext, Future} import akka.actor.ActorSystem import akka.http.scaladsl.{Http, HttpExt} import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpHeader, HttpMethods, HttpRequest, Uri} import akka.stream.ActorMaterializer class AkkaHttpBackend private[AkkaHttpBackend] ( uri: Uri, headers: immutable.Seq[HttpHeader], httpExt: HttpExt )(override implicit val as: ActorSystem, override implicit val mat: ActorMaterializer) extends AkkaBackend { def send(body: String): Future[(Int, String)] = { implicit val ec: ExecutionContext = as.dispatcher val req = HttpRequest(HttpMethods.POST, uri, headers, HttpEntity(ContentTypes.`application/json`, body)) val res = httpExt.singleRequest(req) res.flatMap { hr => val code = hr.status.intValue() val charsetFromHeaders = encodingFromContentType(hr.entity.contentType.toString).getOrElse("utf-8") val decodedResponse = decodeResponse(hr) val stringBody = bodyToString(decodedResponse, charsetFromHeaders) if (code >= 200 && code < 300) { stringBody.map { body => hr.discardEntityBytes() (code, body) } } else { stringBody.flatMap { body => hr.discardEntityBytes() Future.failed(new RuntimeException(s"${uri.toString} return $code with body: $body")) } } } } } object AkkaHttpBackend { val ContentTypeHeader = "Content-Type" def apply( uri: Uri, headers: immutable.Seq[HttpHeader] = Nil, httpExt: Option[HttpExt] = None )(implicit as: ActorSystem, mat: ActorMaterializer): AkkaHttpBackend = { val http = httpExt.getOrElse { Http(as) } new AkkaHttpBackend(uri, headers, http) } }
Example 123
Source File: AkkaConnectionBackend.scala From drunk with Apache License 2.0 | 5 votes |
package com.github.jarlakxen.drunk.backend import akka.actor.ActorSystem import akka.http.scaladsl.Http.OutgoingConnection import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Flow, Sink, Source} import scala.collection.immutable import scala.concurrent.{ExecutionContext, Future} class AkkaConnectionBackend private[AkkaConnectionBackend] ( uri: Uri, flow: Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]], headers: immutable.Seq[HttpHeader] )(override implicit val as: ActorSystem, override implicit val mat: ActorMaterializer) extends AkkaBackend { def send(body: String): Future[(Int, String)] = { implicit val ec: ExecutionContext = as.dispatcher val req = HttpRequest( method = HttpMethods.POST, uri = uri, headers = headers, entity = HttpEntity(ContentTypes.`application/json`, body) ) val res = Source.single(req).via(flow).runWith(Sink.head) res.flatMap { hr => val code = hr.status.intValue() val charsetFromHeaders = encodingFromContentType(hr.entity.contentType.toString).getOrElse("utf-8") val decodedResponse = decodeResponse(hr) val stringBody = bodyToString(decodedResponse, charsetFromHeaders) if (code >= 200 && code < 300) { stringBody.map { body => hr.discardEntityBytes() (code, body) } } else { stringBody.flatMap { body => hr.discardEntityBytes() Future.failed(new RuntimeException(s"${uri.toString} return $code with body: $body")) } } } } } object AkkaConnectionBackend { def apply(uri: Uri, flow: Flow[HttpRequest, HttpResponse, Future[OutgoingConnection]], headers: immutable.Seq[HttpHeader] = Nil )( implicit as: ActorSystem, mat: ActorMaterializer): AkkaConnectionBackend = new AkkaConnectionBackend(uri, flow, headers) }
Example 124
Source File: ScalaNumberProxy.scala From perf_tester with Apache License 2.0 | 5 votes |
package scala package runtime import scala.collection.immutable import scala.math.ScalaNumericAnyConversions import immutable.NumericRange import Proxy.Typed type ResultWithoutStep = Range.Partial[T, NumericRange[T]] def isWhole() = false @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _)) @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _)) @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) } trait OrderedProxy[T] extends Any with Ordered[T] with Typed[T] { protected def ord: Ordering[T] def compare(y: T) = ord.compare(self, y) } trait RangedProxy[T] extends Any with Typed[T] { type ResultWithoutStep def until(end: T): ResultWithoutStep def until(end: T, step: T): immutable.IndexedSeq[T] def to(end: T): ResultWithoutStep def to(end: T, step: T): immutable.IndexedSeq[T] }
Example 125
Source File: Future.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.compat import akka.annotation.InternalApi import scala.concurrent.{ ExecutionContext, Future ⇒ SFuture } import scala.collection.immutable @InternalApi private[akka] object Future { def fold[T, R](futures: TraversableOnce[SFuture[T]])(zero: R)(op: (R, T) ⇒ R)(implicit executor: ExecutionContext): SFuture[R] = { // This will have performance implications since the elements are copied to a Vector SFuture.foldLeft[T, R](futures.to[immutable.Iterable])(zero)(op)(executor) } def fold[T, R](futures: immutable.Iterable[SFuture[T]])(zero: R)(op: (R, T) ⇒ R)(implicit executor: ExecutionContext): SFuture[R] = SFuture.foldLeft[T, R](futures)(zero)(op)(executor) def reduce[T, R >: T](futures: TraversableOnce[SFuture[T]])(op: (R, T) ⇒ R)(implicit executor: ExecutionContext): SFuture[R] = { // This will have performance implications since the elements are copied to a Vector SFuture.reduceLeft[T, R](futures.to[immutable.Iterable])(op)(executor) } def reduce[T, R >: T](futures: immutable.Iterable[SFuture[T]])(op: (R, T) ⇒ R)(implicit executor: ExecutionContext): SFuture[R] = SFuture.reduceLeft[T, R](futures)(op)(executor) def find[T](futures: TraversableOnce[SFuture[T]])(p: T ⇒ Boolean)(implicit executor: ExecutionContext): SFuture[Option[T]] = { // This will have performance implications since the elements are copied to a Vector SFuture.find[T](futures.to[immutable.Iterable])(p)(executor) } def find[T](futures: immutable.Iterable[SFuture[T]])(p: T ⇒ Boolean)(implicit executor: ExecutionContext): SFuture[Option[T]] = SFuture.find[T](futures)(p)(executor) }
Example 126
Source File: ReflectiveDynamicAccess.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.actor import scala.collection.immutable import java.lang.reflect.InvocationTargetException import scala.reflect.ClassTag import scala.util.Try class ReflectiveDynamicAccess(val classLoader: ClassLoader) extends DynamicAccess { override def getClassFor[T: ClassTag](fqcn: String): Try[Class[_ <: T]] = Try[Class[_ <: T]]({ val c = Class.forName(fqcn, false, classLoader).asInstanceOf[Class[_ <: T]] val t = implicitly[ClassTag[T]].runtimeClass if (t.isAssignableFrom(c)) c else throw new ClassCastException(t + " is not assignable from " + c) }) override def createInstanceFor[T: ClassTag](clazz: Class[_], args: immutable.Seq[(Class[_], AnyRef)]): Try[T] = Try { val types = args.map(_._1).toArray val values = args.map(_._2).toArray val constructor = clazz.getDeclaredConstructor(types: _*) constructor.setAccessible(true) val obj = constructor.newInstance(values: _*) val t = implicitly[ClassTag[T]].runtimeClass if (t.isInstance(obj)) obj.asInstanceOf[T] else throw new ClassCastException(clazz.getName + " is not a subtype of " + t) } recover { case i: InvocationTargetException if i.getTargetException ne null ⇒ throw i.getTargetException } override def createInstanceFor[T: ClassTag](fqcn: String, args: immutable.Seq[(Class[_], AnyRef)]): Try[T] = getClassFor(fqcn) flatMap { c ⇒ createInstanceFor(c, args) } override def getObjectFor[T: ClassTag](fqcn: String): Try[T] = { val classTry = if (fqcn.endsWith("$")) getClassFor(fqcn) else getClassFor(fqcn + "$") recoverWith { case _ ⇒ getClassFor(fqcn) } classTry flatMap { c ⇒ Try { val module = c.getDeclaredField("MODULE$") module.setAccessible(true) val t = implicitly[ClassTag[T]].runtimeClass module.get(null) match { case null ⇒ throw new NullPointerException case x if !t.isInstance(x) ⇒ throw new ClassCastException(fqcn + " is not a subtype of " + t) case x: T ⇒ x } } recover { case i: InvocationTargetException if i.getTargetException ne null ⇒ throw i.getTargetException } } } }
Example 127
Source File: Collections.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.util import scala.collection.immutable import scala.annotation.tailrec private[akka] object Collections { case object EmptyImmutableSeq extends immutable.Seq[Nothing] { override final def iterator = Iterator.empty override final def apply(idx: Int): Nothing = throw new java.lang.IndexOutOfBoundsException(idx.toString) override final def length: Int = 0 } abstract class PartialImmutableValuesIterable[From, To] extends immutable.Iterable[To] { def isDefinedAt(from: From): Boolean def apply(from: From): To def valuesIterator: Iterator[From] final def iterator: Iterator[To] = { val superIterator = valuesIterator new Iterator[To] { private[this] var _next: To = _ private[this] var _hasNext = false @tailrec override final def hasNext: Boolean = if (!_hasNext && superIterator.hasNext) { // If we need and are able to look for the next value val potentiallyNext = superIterator.next() if (isDefinedAt(potentiallyNext)) { _next = apply(potentiallyNext) _hasNext = true true } else hasNext //Attempt to find the next } else _hasNext // Return if we found one override final def next(): To = if (hasNext) { val ret = _next _next = null.asInstanceOf[To] // Mark as consumed (nice to the GC, don't leak the last returned value) _hasNext = false // Mark as consumed (we need to look for the next value) ret } else throw new java.util.NoSuchElementException("next") } } override lazy val size: Int = iterator.size override def foreach[C](f: To ⇒ C) = iterator foreach f } }
Example 128
Source File: UdpSender.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.nio.channels.DatagramChannel import scala.collection.immutable import scala.util.control.NonFatal import akka.dispatch.{ RequiresMessageQueue, UnboundedMessageQueueSemantics } import akka.io.Inet.SocketOption import akka.io.Udp._ import akka.actor._ private[io] class UdpSender( val udp: UdpExt, channelRegistry: ChannelRegistry, commander: ActorRef, options: immutable.Traversable[SocketOption]) extends Actor with ActorLogging with WithUdpSend with RequiresMessageQueue[UnboundedMessageQueueSemantics] { val channel = { val datagramChannel = DatagramChannel.open datagramChannel.configureBlocking(false) val socket = datagramChannel.socket options foreach { _.beforeDatagramBind(socket) } datagramChannel } channelRegistry.register(channel, initialOps = 0) def receive: Receive = { case registration: ChannelRegistration ⇒ options.foreach { case v2: Inet.SocketOptionV2 ⇒ v2.afterConnect(channel.socket) case _ ⇒ } commander ! SimpleSenderReady context.become(sendHandlers(registration)) } override def postStop(): Unit = if (channel.isOpen) { log.debug("Closing DatagramChannel after being stopped") try channel.close() catch { case NonFatal(e) ⇒ log.debug("Error closing DatagramChannel: {}", e) } } }
Example 129
Source File: TcpIncomingConnection.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.nio.channels.SocketChannel import scala.collection.immutable import akka.actor.ActorRef import akka.io.Inet.SocketOption private[io] class TcpIncomingConnection( _tcp: TcpExt, _channel: SocketChannel, registry: ChannelRegistry, bindHandler: ActorRef, options: immutable.Traversable[SocketOption], readThrottling: Boolean) extends TcpConnection(_tcp, _channel, readThrottling) { signDeathPact(bindHandler) registry.register(channel, initialOps = 0) def receive = { case registration: ChannelRegistration ⇒ completeConnect(registration, bindHandler, options) } }
Example 130
Source File: Dns.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.net.{ Inet4Address, Inet6Address, InetAddress, UnknownHostException } import akka.actor._ import akka.routing.ConsistentHashingRouter.ConsistentHashable import com.typesafe.config.Config import scala.collection.{ breakOut, immutable } abstract class Dns { def cached(name: String): Option[Dns.Resolved] = None def resolve(name: String)(system: ActorSystem, sender: ActorRef): Option[Dns.Resolved] = { val ret = cached(name) if (ret.isEmpty) IO(Dns)(system).tell(Dns.Resolve(name), sender) ret } } object Dns extends ExtensionId[DnsExt] with ExtensionIdProvider { sealed trait Command case class Resolve(name: String) extends Command with ConsistentHashable { override def consistentHashKey = name } case class Resolved(name: String, ipv4: immutable.Seq[Inet4Address], ipv6: immutable.Seq[Inet6Address]) extends Command { val addrOption: Option[InetAddress] = IpVersionSelector.getInetAddress(ipv4.headOption, ipv6.headOption) @throws[UnknownHostException] def addr: InetAddress = addrOption match { case Some(ipAddress) ⇒ ipAddress case None ⇒ throw new UnknownHostException(name) } } object Resolved { def apply(name: String, addresses: Iterable[InetAddress]): Resolved = { val ipv4: immutable.Seq[Inet4Address] = addresses.collect({ case a: Inet4Address ⇒ a })(breakOut) val ipv6: immutable.Seq[Inet6Address] = addresses.collect({ case a: Inet6Address ⇒ a })(breakOut) Resolved(name, ipv4, ipv6) } } def cached(name: String)(system: ActorSystem): Option[Resolved] = { Dns(system).cache.cached(name) } def resolve(name: String)(system: ActorSystem, sender: ActorRef): Option[Resolved] = { Dns(system).cache.resolve(name)(system, sender) } override def lookup() = Dns override def createExtension(system: ExtendedActorSystem): DnsExt = new DnsExt(system) override def get(system: ActorSystem): DnsExt = super.get(system) } class DnsExt(system: ExtendedActorSystem) extends IO.Extension { val Settings = new Settings(system.settings.config.getConfig("akka.io.dns")) class Settings private[DnsExt] (_config: Config) { import _config._ val Dispatcher: String = getString("dispatcher") val Resolver: String = getString("resolver") val ResolverConfig: Config = getConfig(Resolver) val ProviderObjectName: String = ResolverConfig.getString("provider-object") } val provider: DnsProvider = system.dynamicAccess.getClassFor[DnsProvider](Settings.ProviderObjectName).get.newInstance() val cache: Dns = provider.cache val manager: ActorRef = { system.systemActorOf( props = Props(provider.managerClass, this).withDeploy(Deploy.local).withDispatcher(Settings.Dispatcher), name = "IO-DNS") } def getResolver: ActorRef = manager } object IpVersionSelector { def getInetAddress(ipv4: Option[Inet4Address], ipv6: Option[Inet6Address]): Option[InetAddress] = System.getProperty("java.net.preferIPv6Addresses") match { case "true" ⇒ ipv6 orElse ipv4 case _ ⇒ ipv4 orElse ipv6 } }
Example 131
Source File: InetAddressDnsResolver.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.net.{ InetAddress, UnknownHostException } import java.security.Security import java.util.concurrent.TimeUnit import akka.actor.Actor import com.typesafe.config.Config import scala.collection.immutable import akka.util.Helpers.Requiring import scala.util.Try class InetAddressDnsResolver(cache: SimpleDnsCache, config: Config) extends Actor { // Controls the cache policy for successful lookups only private final val CachePolicyProp = "networkaddress.cache.ttl" // Deprecated JVM property key, keeping for legacy compatibility; replaced by CachePolicyProp private final val CachePolicyPropFallback = "sun.net.inetaddr.ttl" // Controls the cache policy for negative lookups only private final val NegativeCachePolicyProp = "networkaddress.cache.negative.ttl" // Deprecated JVM property key, keeping for legacy compatibility; replaced by NegativeCachePolicyProp private final val NegativeCachePolicyPropFallback = "sun.net.inetaddr.negative.ttl" // default values (-1 and 0 are magic numbers, trust them) private final val Forever = -1 private final val Never = 0 private final val DefaultPositive = 30 private lazy val cachePolicy: Int = { val n = Try(Security.getProperty(CachePolicyProp).toInt) .orElse(Try(Security.getProperty(CachePolicyPropFallback).toInt)) .getOrElse(DefaultPositive) // default if (n < 0) Forever else n } private lazy val negativeCachePolicy = { val n = Try(Security.getProperty(NegativeCachePolicyProp).toInt) .orElse(Try(Security.getProperty(NegativeCachePolicyPropFallback).toInt)) .getOrElse(0) // default if (n < 0) Forever else n } private def getTtl(path: String, positive: Boolean): Long = config.getString(path) match { case "default" ⇒ (if (positive) cachePolicy else negativeCachePolicy) match { case Never ⇒ Never case n if n > 0 ⇒ TimeUnit.SECONDS.toMillis(n) case _ ⇒ Long.MaxValue // forever if negative } case "forever" ⇒ Long.MaxValue case "never" ⇒ Never case _ ⇒ config.getDuration(path, TimeUnit.MILLISECONDS) .requiring(_ > 0, s"akka.io.dns.$path must be 'default', 'forever', 'never' or positive duration") } val positiveTtl: Long = getTtl("positive-ttl", positive = true) val negativeTtl: Long = getTtl("negative-ttl", positive = false) override def receive = { case Dns.Resolve(name) ⇒ val answer = cache.cached(name) match { case Some(a) ⇒ a case None ⇒ try { val answer = Dns.Resolved(name, InetAddress.getAllByName(name)) if (positiveTtl != Never) cache.put(answer, positiveTtl) answer } catch { case e: UnknownHostException ⇒ val answer = Dns.Resolved(name, immutable.Seq.empty, immutable.Seq.empty) if (negativeTtl != Never) cache.put(answer, negativeTtl) answer } } sender() ! answer } }
Example 132
Source File: SimpleDnsCache.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.util.concurrent.atomic.AtomicReference import akka.io.Dns.Resolved import scala.annotation.tailrec import scala.collection.immutable private[io] sealed trait PeriodicCacheCleanup { def cleanup(): Unit } class SimpleDnsCache extends Dns with PeriodicCacheCleanup { import akka.io.SimpleDnsCache._ private val cache = new AtomicReference(new Cache( immutable.SortedSet()(ExpiryEntryOrdering), immutable.Map(), clock _)) private val nanoBase = System.nanoTime() override def cached(name: String): Option[Resolved] = { cache.get().get(name) } protected def clock(): Long = { val now = System.nanoTime() if (now - nanoBase < 0) 0 else (now - nanoBase) / 1000000 } @tailrec private[io] final def put(r: Resolved, ttlMillis: Long): Unit = { val c = cache.get() if (!cache.compareAndSet(c, c.put(r, ttlMillis))) put(r, ttlMillis) } @tailrec override final def cleanup(): Unit = { val c = cache.get() if (!cache.compareAndSet(c, c.cleanup())) cleanup() } } object SimpleDnsCache { private class Cache(queue: immutable.SortedSet[ExpiryEntry], cache: immutable.Map[String, CacheEntry], clock: () ⇒ Long) { def get(name: String): Option[Resolved] = { for { e ← cache.get(name) if e.isValid(clock()) } yield e.answer } def put(answer: Resolved, ttlMillis: Long): Cache = { val until0 = clock() + ttlMillis val until = if (until0 < 0) Long.MaxValue else until0 new Cache( queue + new ExpiryEntry(answer.name, until), cache + (answer.name → CacheEntry(answer, until)), clock) } def cleanup(): Cache = { val now = clock() var q = queue var c = cache while (q.nonEmpty && !q.head.isValid(now)) { val minEntry = q.head val name = minEntry.name q -= minEntry if (c.get(name).filterNot(_.isValid(now)).isDefined) c -= name } new Cache(q, c, clock) } } private case class CacheEntry(answer: Dns.Resolved, until: Long) { def isValid(clock: Long): Boolean = clock < until } private class ExpiryEntry(val name: String, val until: Long) extends Ordered[ExpiryEntry] { def isValid(clock: Long): Boolean = clock < until override def compare(that: ExpiryEntry): Int = -until.compareTo(that.until) } private object ExpiryEntryOrdering extends Ordering[ExpiryEntry] { override def compare(x: ExpiryEntry, y: ExpiryEntry): Int = { x.until.compareTo(y.until) } } }
Example 133
Source File: HttpClientTestSupport.scala From wix-http-testkit with MIT License | 5 votes |
package com.wix.e2e.http.drivers import java.io.DataOutputStream import java.net.{HttpURLConnection, URL} import akka.http.scaladsl.model.HttpMethods.GET import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.`Transfer-Encoding` import akka.stream.scaladsl.Source import com.wix.e2e.http.client.extractors._ import com.wix.e2e.http.info.HttpTestkitVersion import com.wix.e2e.http.matchers.{RequestMatcher, ResponseMatcher} import com.wix.e2e.http.{BaseUri, HttpRequest, RequestHandler} import com.wix.test.random._ import scala.collection.immutable import scala.collection.mutable.ListBuffer trait HttpClientTestSupport { val parameter = randomStrPair val header = randomStrPair val formData = randomStrPair val userAgent = randomStr val cookie = randomStrPair val path = s"$randomStr/$randomStr" val anotherPath = s"$randomStr/$randomStr" val someObject = SomeCaseClass(randomStr, randomInt) val somePort = randomPort val content = randomStr val anotherContent = randomStr val requestData = ListBuffer.empty[String] val bigResponse = 1024 * 1024 def issueChunkedPostRequestWith(content: String, toPath: String)(implicit baseUri: BaseUri) = { val serverUrl = new URL(s"http://localhost:${baseUri.port}/$toPath") val conn = serverUrl.openConnection.asInstanceOf[HttpURLConnection] conn.setRequestMethod("POST") conn.setRequestProperty("Content-Type", "text/plain") conn.setChunkedStreamingMode(0) conn.setDoOutput(true) conn.setDoInput(true) conn.setUseCaches(false) conn.connect() val out = new DataOutputStream(conn.getOutputStream) out.writeBytes(content) out.flush() out.close() conn.disconnect() } } object HttpClientTestResponseHandlers { def handlerFor(path: String, returnsBody: String): RequestHandler = { case r: HttpRequest if r.uri.path.toString.endsWith(path) => HttpResponse(entity = returnsBody) } def unmarshallingAndStoringHandlerFor(path: String, storeTo: ListBuffer[String]): RequestHandler = { case r: HttpRequest if r.uri.path.toString.endsWith(path) => storeTo.append( r.extractAsString ) HttpResponse() } def bigResponseWith(size: Int): RequestHandler = { case HttpRequest(GET, uri, _, _, _) if uri.path.toString().contains("big-response") => HttpResponse(entity = HttpEntity(randomStrWith(size))) } def chunkedResponseFor(path: String): RequestHandler = { case r: HttpRequest if r.uri.path.toString.endsWith(path) => HttpResponse(entity = HttpEntity.Chunked(ContentTypes.`text/plain(UTF-8)`, Source.single(randomStr))) } def alwaysRespondWith(transferEncoding: TransferEncoding, toPath: String): RequestHandler = { case r: HttpRequest if r.uri.path.toString.endsWith(toPath) => HttpResponse().withHeaders(immutable.Seq(`Transfer-Encoding`(transferEncoding))) } val slowRespondingServer: RequestHandler = { case _ => Thread.sleep(500); HttpResponse() } } case class SomeCaseClass(s: String, i: Int) object HttpClientMatchers { import com.wix.e2e.http.matchers.RequestMatchers._ def haveClientHttpTestkitUserAgentWithLibraryVersion: RequestMatcher = haveAnyHeadersOf("User-Agent" -> s"client-http-testkit/$HttpTestkitVersion") } object HttpServerMatchers { import com.wix.e2e.http.matchers.ResponseMatchers._ def haveServerHttpTestkitHeaderWithLibraryVersion: ResponseMatcher = haveAnyHeadersOf("Server" -> s"server-http-testkit/$HttpTestkitVersion") }
Example 134
Source File: KustoAuthenticationTestE2E.scala From azure-kusto-spark with Apache License 2.0 | 5 votes |
package com.microsoft.kusto.spark import java.util.UUID import com.microsoft.azure.kusto.data.{ClientFactory, ConnectionStringBuilder} import com.microsoft.kusto.spark.datasink.{KustoSinkOptions, SinkTableCreationMode} import com.microsoft.kusto.spark.sql.extension.SparkExtension._ import com.microsoft.kusto.spark.utils.KustoQueryUtils import org.apache.spark.sql.SparkSession import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner import scala.collection.immutable @RunWith(classOf[JUnitRunner]) class KustoAuthenticationTestE2E extends FlatSpec { private val spark: SparkSession = SparkSession.builder() .appName("KustoSink") .master(f"local[2]") .getOrCreate() val cluster: String = System.getProperty(KustoSinkOptions.KUSTO_CLUSTER) val database: String = System.getProperty(KustoSinkOptions.KUSTO_DATABASE) val appId: String = System.getProperty(KustoSinkOptions.KUSTO_AAD_APP_ID) val appKey: String = System.getProperty(KustoSinkOptions.KUSTO_AAD_APP_SECRET) val authority: String = System.getProperty(KustoSinkOptions.KUSTO_AAD_AUTHORITY_ID, "microsoft.com") val keyVaultAppId: String = System.getProperty(KustoSinkOptions.KEY_VAULT_APP_ID) val keyVaultAppKey: String = System.getProperty(KustoSinkOptions.KEY_VAULT_APP_KEY) val keyVaultUri: String = System.getProperty(KustoSinkOptions.KEY_VAULT_URI) "keyVaultAuthentication" should "use key vault for authentication and retracting kusto app auth params" taggedAs KustoE2E in { import spark.implicits._ val expectedNumberOfRows = 1000 val timeoutMs: Int = 8 * 60 * 1000 // 8 minutes val rows: immutable.IndexedSeq[(String, Int)] = (1 to expectedNumberOfRows).map(v => (s"row-$v", v)) val prefix = "keyVaultAuthentication" val table = KustoQueryUtils.simplifyName(s"${prefix}_${UUID.randomUUID()}") val engineKcsb = ConnectionStringBuilder.createWithAadApplicationCredentials(s"https://$cluster.kusto.windows.net", appId, appKey, authority) val kustoAdminClient = ClientFactory.createClient(engineKcsb) val df = rows.toDF("name", "value") val conf: Map[String, String] = Map( KustoSinkOptions.KEY_VAULT_URI -> keyVaultUri, KustoSinkOptions.KEY_VAULT_APP_ID -> (if(keyVaultAppId == null) appId else keyVaultAppId), KustoSinkOptions.KEY_VAULT_APP_KEY -> (if(keyVaultAppKey == null) appKey else keyVaultAppKey), KustoSinkOptions.KUSTO_TABLE_CREATE_OPTIONS -> SinkTableCreationMode.CreateIfNotExist.toString ) df.write.kusto(cluster, database, table, conf) val dfResult = spark.read.kusto(cluster, database, table, conf) val result = dfResult.select("name", "value").rdd.collect().sortBy(x => x.getInt(1)) val orig = df.select("name", "value").rdd.collect().sortBy(x => x.getInt(1)) assert(result.diff(orig).isEmpty) } "deviceAuthentication" should "use aad device authentication" taggedAs KustoE2E in { import spark.implicits._ val expectedNumberOfRows = 1000 val timeoutMs: Int = 8 * 60 * 1000 // 8 minutes val rows: immutable.IndexedSeq[(String, Int)] = (1 to expectedNumberOfRows).map(v => (s"row-$v", v)) val prefix = "deviceAuthentication" val table = KustoQueryUtils.simplifyName(s"${prefix}_${UUID.randomUUID()}") val engineKcsb = ConnectionStringBuilder.createWithAadApplicationCredentials(s"https://$cluster.kusto.windows.net", appId, appKey, authority) val kustoAdminClient = ClientFactory.createClient(engineKcsb) val df = rows.toDF("name", "value") val conf: Map[String, String] = Map( KustoSinkOptions.KUSTO_TABLE_CREATE_OPTIONS -> SinkTableCreationMode.CreateIfNotExist.toString ) df.write.kusto(cluster, database, table, conf) KustoTestUtils.validateResultsAndCleanup(kustoAdminClient, table, database, expectedNumberOfRows, timeoutMs, tableCleanupPrefix = prefix) } }
Example 135
Source File: ZeppelinAction.scala From intellij-zeppelin with MIT License | 5 votes |
package intellij.zeppelin import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent} import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.command.{CommandProcessor, UndoConfirmationPolicy} import com.intellij.openapi.editor.{Document, Editor} import com.intellij.openapi.util.Computable import scala.collection.immutable abstract class ZeppelinAction extends AnAction with IdeaDocumentApi { def zeppelin(anActionEvent: AnActionEvent): ZeppelinApi = { ZeppelinConnection.connectionFor(anActionEvent.getProject).api } def findNotebook(editor: Editor): Option[Notebook] = precedingLines(editor).flatMap(x => Notebook.parse(x._2)).headOption def findParagraph(editor: Editor): Option[Paragraph] = precedingLines(editor).flatMap(x => Paragraph.parse(x._2)).headOption private def precedingLines(editor: Editor): immutable.Seq[(Int, String)] = { val currentLine = editor.getCaretModel.getLogicalPosition.line Range(currentLine, 1, -1).map { line => val start = editor.getDocument.getLineStartOffset(line - 1) val end = editor.getDocument.getLineStartOffset(line) (line, editor.getDocument.getCharsSequence.subSequence(start, end).toString) }.map(x => x.copy(_2 = x._2.stripLineEnd)) } def findNote(editor: Editor, line: Int): Option[Notebook] = { val currentLine = editor.getCaretModel.getLogicalPosition.line val linesInReverse = Range(currentLine, 1, -1).map { line => val start = editor.getDocument.getLineStartOffset(line - 1) val end = editor.getDocument.getLineStartOffset(line) editor.getDocument.getCharsSequence.subSequence(start, end).toString }.map(_.stripLineEnd) linesInReverse.flatMap(Notebook.parse).headOption } protected def runWriteAction(anActionEvent: AnActionEvent)(f: Document => Unit): Unit = ApplicationManager.getApplication.runWriteAction{ val document = currentDocument(currentFileIn(anActionEvent.getProject)) new Computable[Unit] { override def compute(): Unit = { CommandProcessor.getInstance().executeCommand( anActionEvent.getProject, new Runnable { override def run(): Unit = { f(document) } }, "Modified from Zeppelin Idea plugin", "ZeppelinIdea", UndoConfirmationPolicy.DEFAULT, document ) } } } }
Example 136
Source File: TryUtil.scala From PureCSV with Apache License 2.0 | 5 votes |
package purecsv.safe import scala.collection.immutable import scala.util.{Failure, Success, Try} package object tryutil { implicit class IterableOfTry[A](iter: Iterable[Try[A]]) { lazy val getSuccessesAndFailures: (immutable.List[(Int,A)], immutable.List[(Int,Throwable)]) = { val successes = scala.collection.mutable.Buffer[(Int,A)]() val failures = scala.collection.mutable.Buffer[(Int,Throwable)]() iter.zipWithIndex.foreach { case (Success(a),i) => successes += (i+1 -> a) case (Failure(f),i) => failures += (i+1 -> f) } (successes.toList,failures.toList) } } implicit class IteratorOfTry[A](iter: Iterator[Try[A]]) extends IterableOfTry[A](iter.toIterable) }
Example 137
Source File: PerfListener.scala From spark-testing-base with Apache License 2.0 | 5 votes |
package com.holdenkarau.spark.testing import scala.collection.mutable import scala.collection.immutable import org.apache.spark.scheduler._ import org.apache.spark.executor.TaskMetrics // TODO(holden): See if we can make a more attributable listener override def onTaskEnd(taskEnd: SparkListenerTaskEnd) { val info = taskEnd.taskInfo val metrics = taskEnd.taskMetrics updateMetricsForTask(metrics) } private def updateMetricsForTask(metrics: TaskMetrics): Unit = { totalExecutorRunTime += metrics.executorRunTime jvmGCTime += metrics.jvmGCTime resultSerializationTime += metrics.resultSerializationTime metrics.inputMetrics match { case Some(inputMetrics) => recordsRead += inputMetrics.recordsRead case _ => } metrics.outputMetrics match { case Some(outputMetrics) => recordsWritten += outputMetrics.recordsWritten case _ => } } } //end::listener[]
Example 138
Source File: PerfListener.scala From spark-testing-base with Apache License 2.0 | 5 votes |
package com.holdenkarau.spark.testing import scala.collection.mutable import scala.collection.immutable import org.apache.spark.scheduler._ import org.apache.spark.executor.TaskMetrics // TODO(holden): See if we can make a more attributable listener override def onTaskEnd(taskEnd: SparkListenerTaskEnd) { val info = taskEnd.taskInfo val metrics = taskEnd.taskMetrics updateMetricsForTask(metrics) } private def updateMetricsForTask(metrics: TaskMetrics): Unit = { totalExecutorRunTime += metrics.executorRunTime jvmGCTime += metrics.jvmGCTime resultSerializationTime += metrics.resultSerializationTime recordsRead += metrics.inputMetrics.recordsRead recordsWritten += metrics.outputMetrics.recordsWritten } } //end::listener[]
Example 139
Source File: Loader.scala From lagom with Apache License 2.0 | 5 votes |
import play.api.Application import play.api.ApplicationLoader import play.api.BuiltInComponentsFromContext import play.api.libs.ws.ahc.AhcWSComponents import com.softwaremill.macwire._ import router.Routes import com.lightbend.lagom.scaladsl.api._ import com.lightbend.lagom.scaladsl.client.LagomServiceClientComponents import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents import scala.collection.immutable class Loader extends ApplicationLoader { def load(context: ApplicationLoader.Context): Application = { new BuiltInComponentsFromContext(context) with LagomServiceClientComponents with AhcWSComponents with LagomDevModeComponents with controllers.AssetsComponents { override lazy val serviceInfo = ServiceInfo( "p", Map( "p" -> immutable.Seq( ServiceAcl.forPathRegex("/p"), ServiceAcl.forPathRegex("/assets/.*") ) ) ) override lazy val router = { val prefix = "/" wire[Routes] } override lazy val httpFilters = Nil lazy val applicationController = wire[controllers.Application] }.application } }
Example 140
Source File: LagomDevModePortAssigner.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode import scala.collection.immutable object PortAssigner { private[lagom] case class ProjectName(name: String) { def withTls = ProjectName(name + "-tls") } private[lagom] object ProjectName { implicit object OrderingProjectName extends Ordering[ProjectName] { def compare(x: ProjectName, y: ProjectName): Int = x.name.compare(y.name) } } private[lagom] case class PortRange(min: Int, max: Int) { require(min > 0, "Bottom port range must be greater than 0") require(max < Integer.MAX_VALUE, "Upper port range must be smaller than " + Integer.MAX_VALUE) require(min <= max, "Bottom port range must be smaller than the upper port range") val delta: Int = max - min + 1 def includes(value: Int): Boolean = value >= min && value <= max } private[lagom] object Port { final val Unassigned = Port(-1) } private[lagom] case class Port(value: Int) extends AnyVal { def next: Port = Port(value + 1) } def computeProjectsPort( range: PortRange, projectNames: Seq[ProjectName], enableSsl: Boolean ): Map[ProjectName, Port] = { val lagomProjects = projectNames.to[immutable.SortedSet] val projects = // duplicate the project list by adding the tls variant if (enableSsl) lagomProjects.flatMap { plainName => Seq(plainName, plainName.withTls) } else lagomProjects val doubleMessage = if (enableSsl) "The number of ports available must be at least twice the number of projects." else "" require( projects.size <= range.delta, s"""A larger port range is needed, as you have ${lagomProjects.size} Lagom projects and only ${range.delta} |ports available. $doubleMessage |You should increase the range passed for the lagomPortRange build setting. """.stripMargin ) @annotation.tailrec def findFirstAvailablePort(port: Port, unavailable: Set[Port]): Port = { // wrap around if the port's number equal the portRange max limit if (!range.includes(port.value)) findFirstAvailablePort(Port(range.min), unavailable) else if (unavailable(port)) findFirstAvailablePort(port.next, unavailable) else port } @annotation.tailrec def assignProjectPort( projectNames: Seq[ProjectName], assignedPort: Set[Port], unassigned: Vector[ProjectName], result: Map[ProjectName, Port] ): Map[ProjectName, Port] = projectNames match { case Nil if unassigned.nonEmpty => // if we are here there are projects with colliding hash that still need to get their port assigned. As expected, this step is carried out after assigning // a port to all non-colliding projects. val proj = unassigned.head val projectedPort = projectedPortFor(proj) val port = findFirstAvailablePort(projectedPort, assignedPort) assignProjectPort(projectNames, assignedPort + port, unassigned.tail, result + (proj -> port)) case Nil => result case proj +: rest => val projectedPort = projectedPortFor(proj) if (assignedPort(projectedPort)) assignProjectPort(rest, assignedPort, unassigned :+ proj, result) else assignProjectPort(rest, assignedPort + projectedPort, unassigned, result + (proj -> projectedPort)) } def projectedPortFor(name: ProjectName): Port = { val hash = Math.abs(name.hashCode()) val portDelta = hash % range.delta Port(range.min + portDelta) } assignProjectPort(projects.toSeq, Set.empty[Port], Vector.empty[ProjectName], Map.empty[ProjectName, Port]) } }
Example 141
Source File: ServiceRegistrationModule.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.server import java.net.URI import java.util.function.{ Function => JFunction } import akka.actor.CoordinatedShutdown import akka.Done import akka.NotUsed import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistry import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistryService import com.lightbend.lagom.internal.javadsl.server.ResolvedServices import com.lightbend.lagom.devmode.internal.registry.serviceDnsRecords import com.typesafe.config.Config import javax.inject.Inject import javax.inject.Provider import javax.inject.Singleton import play.api.inject.Binding import play.api.inject.Module import play.api.Configuration import play.api.Environment import play.api.Logger import scala.compat.java8.FutureConverters.CompletionStageOps import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.collection.JavaConverters._ import scala.collection.immutable class ServiceRegistrationModule extends Module { override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = Seq( bind[ServiceRegistrationModule.RegisterWithServiceRegistry].toSelf.eagerly(), bind[ServiceRegistrationModule.ServiceConfig].toProvider[ServiceRegistrationModule.ServiceConfigProvider] ) } object ServiceRegistrationModule { class ServiceConfigProvider @Inject() (config: Config) extends Provider[ServiceConfig] { override lazy val get = ServiceConfig(serviceDnsRecords(config)) } case class ServiceConfig(uris: immutable.Seq[URI]) @Singleton private class RegisterWithServiceRegistry @Inject() ( coordinatedShutdown: CoordinatedShutdown, resolvedServices: ResolvedServices, config: ServiceConfig, registry: ServiceRegistry )(implicit ec: ExecutionContext) { private lazy val logger: Logger = Logger(this.getClass()) private val locatableServices = resolvedServices.services.filter(_.descriptor.locatableService) coordinatedShutdown.addTask( CoordinatedShutdown.PhaseBeforeServiceUnbind, "unregister-services-from-service-locator-javadsl" ) { () => Future .sequence(locatableServices.map { service => registry.unregister(service.descriptor.name).invoke().toScala }) .map(_ => Done) } locatableServices.foreach { service => val c = ServiceRegistryService.of(config.uris.asJava, service.descriptor.acls) registry .register(service.descriptor.name) .invoke(c) .exceptionally(new JFunction[Throwable, NotUsed] { def apply(t: Throwable) = { logger .error(s"Service name=[${service.descriptor.name}] couldn't register itself to the service locator.", t) NotUsed.getInstance() } }) } } }
Example 142
Source File: JavaServiceRegistryClient.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.javadsl.registry import java.net.URI import java.util.Optional import javax.inject.Inject import javax.inject.Singleton import com.lightbend.lagom.devmode.internal.registry.AbstractLoggingServiceRegistryClient import com.lightbend.lagom.javadsl.api.transport.NotFound import scala.collection.immutable import scala.compat.java8.FutureConverters._ import scala.compat.java8.OptionConverters import scala.concurrent.ExecutionContext import scala.concurrent.Future @Singleton private[lagom] class JavaServiceRegistryClient @Inject() ( registry: ServiceRegistry, implicit val ec: ExecutionContext ) extends AbstractLoggingServiceRegistryClient { protected override def internalLocateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] = registry .lookup(serviceName, OptionConverters.toJava(portName)) .invoke() .toScala .map(immutable.Seq[URI](_)) .recover { case _: NotFound => Nil } }
Example 143
Source File: LagomDevModeServiceRegistry.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.scaladsl.registry import java.net.URI import akka.NotUsed import akka.util.ByteString import com.lightbend.lagom.devmode.internal.registry.ServiceRegistryClient import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer.NegotiatedDeserializer import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer.NegotiatedSerializer import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer import com.lightbend.lagom.scaladsl.api.deser.StrictMessageSerializer import com.lightbend.lagom.scaladsl.api.transport.MessageProtocol import com.lightbend.lagom.scaladsl.api.transport.Method import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceAcl import com.lightbend.lagom.scaladsl.api.ServiceCall import play.api.libs.functional.syntax._ import play.api.libs.json._ import scala.collection.immutable import scala.collection.immutable.Seq trait ServiceRegistry extends Service { def register(name: String): ServiceCall[ServiceRegistryService, NotUsed] def unregister(name: String): ServiceCall[NotUsed, NotUsed] def lookup(name: String, portName: Option[String]): ServiceCall[NotUsed, URI] def registeredServices: ServiceCall[NotUsed, immutable.Seq[RegisteredService]] import Service._ import ServiceRegistry._ def descriptor: Descriptor = { named(ServiceRegistryClient.ServiceName) .withCalls( restCall(Method.PUT, "/services/:id", register _), restCall(Method.DELETE, "/services/:id", this.unregister _), restCall(Method.GET, "/services/:id?portName", lookup _), pathCall("/services", registeredServices) ) .withLocatableService(false) } } object ServiceRegistry { implicit val uriMessageSerializer: MessageSerializer[URI, ByteString] = new StrictMessageSerializer[URI] { private val serializer = new NegotiatedSerializer[URI, ByteString] { override def serialize(message: URI): ByteString = ByteString.fromString(message.toString, "utf-8") override val protocol: MessageProtocol = MessageProtocol.empty.withContentType("text/plain").withCharset("utf-8") } override def serializerForRequest = serializer override def serializerForResponse(acceptedMessageProtocols: Seq[MessageProtocol]) = serializer override def deserializer(protocol: MessageProtocol): NegotiatedDeserializer[URI, ByteString] = new NegotiatedDeserializer[URI, ByteString] { override def deserialize(wire: ByteString) = URI.create(wire.decodeString(protocol.charset.getOrElse("utf-8"))) } } } case class RegisteredService(name: String, url: URI, portName: Option[String]) object RegisteredService { import UriFormat.uriFormat implicit val format: Format[RegisteredService] = Json.format[RegisteredService] } case class ServiceRegistryService(uris: immutable.Seq[URI], acls: immutable.Seq[ServiceAcl]) object ServiceRegistryService { def apply(uri: URI, acls: immutable.Seq[ServiceAcl]): ServiceRegistryService = ServiceRegistryService(Seq(uri), acls) import UriFormat.uriFormat implicit val methodFormat: Format[Method] = (__ \ "name").format[String].inmap(new Method(_), _.name) implicit val serviceAclFormat: Format[ServiceAcl] = (__ \ "method") .formatNullable[Method] .and((__ \ "pathRegex").formatNullable[String]) .apply(ServiceAcl.apply, acl => (acl.method, acl.pathRegex)) implicit val format: Format[ServiceRegistryService] = Json.format[ServiceRegistryService] } object UriFormat { implicit val uriFormat: Format[URI] = implicitly[Format[String]].inmap(URI.create, _.toString) }
Example 144
Source File: ScalaServiceRegistryClient.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.scaladsl.registry import java.net.URI import com.lightbend.lagom.devmode.internal.registry.AbstractLoggingServiceRegistryClient import com.lightbend.lagom.scaladsl.api.transport.NotFound import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future private[lagom] class ScalaServiceRegistryClient(registry: ServiceRegistry)(implicit ec: ExecutionContext) extends AbstractLoggingServiceRegistryClient { protected override def internalLocateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] = registry .lookup(serviceName, portName) .invoke() .map(immutable.Seq[URI](_)) .recover { case _: NotFound => Nil } }
Example 145
Source File: AbstractLoggingServiceRegistryClient.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.registry import java.net.URI import org.slf4j.Logger import org.slf4j.LoggerFactory import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.Failure import scala.util.Success private[lagom] abstract class AbstractLoggingServiceRegistryClient(implicit ec: ExecutionContext) extends ServiceRegistryClient { protected val log: Logger = LoggerFactory.getLogger(getClass) override def locateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] = { require( serviceName != ServiceRegistryClient.ServiceName, "The service registry client cannot locate the service registry service itself" ) log.debug("Locating service name=[{}] ...", serviceName) val location: Future[immutable.Seq[URI]] = internalLocateAll(serviceName, portName) location.onComplete { case Success(Nil) => log.warn("serviceName=[{}] was not found. Hint: Maybe it was not started?", serviceName) case Success(uris) => log.debug("serviceName=[{}] can be reached at uris=[{}]", serviceName: Any, uris: Any) case Failure(e) => log.warn("Service registry replied with an error when looking up serviceName=[{}]", serviceName: Any, e: Any) } location } protected def internalLocateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] }
Example 146
Source File: LagomDevModeServiceDiscoverySpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.registry import java.net.InetAddress import java.net.URI import akka.actor.ActorSystem import akka.discovery.ServiceDiscovery.Resolved import akka.discovery.ServiceDiscovery.ResolvedTarget import akka.testkit.TestKit import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.ScalaFutures._ import scala.collection.immutable import scala.concurrent.Future import scala.concurrent.duration._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike class LagomDevModeServiceDiscoverySpec extends TestKit(ActorSystem("LagomDevModeSimpleServiceDiscoverySpec")) with AnyWordSpecLike with Matchers with BeforeAndAfterAll { private val client = new StaticServiceRegistryClient( Map( "test-service" -> List(URI.create("http://localhost:8080")), "test-service-without-port" -> List(URI.create("http://localhost")) ) ) protected override def afterAll(): Unit = { shutdown(verifySystemShutdown = true) } private val discovery = LagomDevModeServiceDiscovery(system) discovery.setServiceRegistryClient(client) "DevModeSimpleServiceDiscoverySpec" should { "resolve services in the registry" in { val expected = Resolved("test-service", List(ResolvedTarget("localhost", Some(8080), Some(InetAddress.getLocalHost)))) discovery.lookup("test-service", 100.milliseconds).futureValue shouldBe expected } "allow missing ports" in { val expected = Resolved("test-service-without-port", List(ResolvedTarget("localhost", None, Some(InetAddress.getLocalHost)))) discovery.lookup("test-service-without-port", 100.milliseconds).futureValue shouldBe expected } } } private class StaticServiceRegistryClient(registrations: Map[String, List[URI]]) extends ServiceRegistryClient { override def locateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] = Future.successful(registrations.getOrElse(serviceName, Nil)) }
Example 147
Source File: FilteredServiceImpl.scala From lagom with Apache License 2.0 | 5 votes |
package docs.scaladsl.mb import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.persistence.EventStreamElement import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry import com.lightbend.lagom.scaladsl.api.broker.Topic import com.lightbend.lagom.scaladsl.broker.TopicProducer import scala.collection.immutable class FilteredServiceImpl(persistentEntityRegistry: PersistentEntityRegistry) extends HelloServiceImpl(persistentEntityRegistry) { //#filter-events override def greetingsTopic(): Topic[GreetingMessage] = TopicProducer.singleStreamWithOffset { fromOffset => persistentEntityRegistry .eventStream(HelloEventTag.INSTANCE, fromOffset) .mapConcat(filterEvents) } private def filterEvents(ev: EventStreamElement[HelloEvent]) = ev match { // Only publish greetings where the message is "Hello". case ev @ EventStreamElement(_, GreetingMessageChanged("Hello"), offset) => immutable.Seq((convertEvent(ev), offset)) case _ => Nil } //#filter-events private def convertEvent(helloEvent: EventStreamElement[HelloEvent]): GreetingMessage = { helloEvent.event match { case GreetingMessageChanged(msg) => GreetingMessage(msg) } } }
Example 148
Source File: ItemAdded.scala From lagom with Apache License 2.0 | 5 votes |
package docs.home.scaladsl.serialization.v2c import com.lightbend.lagom.scaladsl.playjson.JsonMigration import com.lightbend.lagom.scaladsl.playjson.JsonMigrations import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import play.api.libs.json.JsObject import play.api.libs.json.JsPath import play.api.libs.json.JsString import scala.collection.immutable //#rename case class ItemAdded(shoppingCartId: String, itemId: String, quantity: Int) //#rename object ItemAddedMigration { class ShopSerializerRegistry1 extends JsonSerializerRegistry { override def serializers = Vector.empty //#imperative-migration private val itemAddedMigration = new JsonMigration(2) { override def transform(fromVersion: Int, json: JsObject): JsObject = { if (fromVersion < 2) { val productId = (JsPath \ "productId").read[JsString].reads(json).get json + ("itemId" -> productId) - "productId" } else { json } } } override def migrations = Map[String, JsonMigration]( classOf[ItemAdded].getName -> itemAddedMigration ) //#imperative-migration } class ShopSerializerRegistry2 extends JsonSerializerRegistry { override val serializers = Vector.empty //#transformer-migration val productIdToItemId = JsPath.json .update( (JsPath \ "itemId").json.copyFrom((JsPath \ "productId").json.pick) ) .andThen((JsPath \ "productId").json.prune) override def migrations = Map[String, JsonMigration]( JsonMigrations.transform[ItemAdded]( immutable.SortedMap( 1 -> productIdToItemId ) ) ) //#transformer-migration } }
Example 149
Source File: ItemAdded.scala From lagom with Apache License 2.0 | 5 votes |
package docs.home.scaladsl.serialization.v2b import com.lightbend.lagom.scaladsl.playjson._ import scala.collection.immutable //#add-mandatory case class ItemAdded(shoppingCartId: String, productId: String, quantity: Int, discount: Double) //#add-mandatory object ItemAddedMigration { object ShopCommands { val serializers = Vector.empty[JsonSerializer[_]] } object ShopEvents { val serializers = Vector.empty[JsonSerializer[_]] } //#imperative-migration class ShopSerializerRegistry extends JsonSerializerRegistry { import play.api.libs.json._ override val serializers = ShopCommands.serializers ++ ShopEvents.serializers private val itemAddedMigration = new JsonMigration(2) { override def transform(fromVersion: Int, json: JsObject): JsObject = { if (fromVersion < 2) { json + ("discount" -> JsNumber(0.0d)) } else { json } } } override def migrations = Map[String, JsonMigration]( classOf[ItemAdded].getName -> itemAddedMigration ) } //#imperative-migration } object ItemAddedMigrationTransformer { object ShopCommands { val serializers = immutable.Seq.empty[JsonSerializer[_]] } object ShopEvents { val serializers = immutable.Seq.empty[JsonSerializer[_]] } //#transformer-migration class ShopSerializerRegistry extends JsonSerializerRegistry { import play.api.libs.json._ override val serializers = ShopCommands.serializers ++ ShopEvents.serializers val addDefaultDiscount = JsPath.json.update((JsPath \ "discount").json.put(JsNumber(0.0d))) override def migrations = Map[String, JsonMigration]( JsonMigrations.transform[ItemAdded]( immutable.SortedMap( 1 -> addDefaultDiscount ) ) ) } //#transformer-migration }
Example 150
Source File: JdbcReadSideQuery.scala From lagom with Apache License 2.0 | 5 votes |
package docs.home.scaladsl.persistence //#imports import scala.collection.immutable import scala.collection.immutable.VectorBuilder import akka.NotUsed import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession //#imports trait JdbcReadSideQuery { trait BlogService extends Service { def getPostSummaries(): ServiceCall[NotUsed, immutable.IndexedSeq[PostSummary]] override def descriptor = ??? } //#service-impl class BlogServiceImpl(jdbcSession: JdbcSession) extends BlogService { import JdbcSession.tryWith override def getPostSummaries() = ServiceCall { request => jdbcSession.withConnection { connection => tryWith(connection.prepareStatement("SELECT id, title FROM blogsummary")) { ps => tryWith(ps.executeQuery()) { rs => val summaries = new VectorBuilder[PostSummary] while (rs.next()) { summaries += PostSummary(rs.getString("id"), rs.getString("title")) } summaries.result() } } } } //#service-impl } }
Example 151
Source File: ServiceDetector.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.api.tools import com.lightbend.lagom.internal.spi.ServiceAcl import com.lightbend.lagom.internal.spi.ServiceDescription import com.lightbend.lagom.internal.spi.ServiceDiscovery import com.typesafe.config.ConfigFactory import play.api._ import play.api.libs.functional.syntax._ import play.api.libs.json._ import scala.collection.JavaConverters._ import scala.collection.immutable import scala.compat.java8.OptionConverters._ def services(classLoader: ClassLoader): String = { val config = ConfigFactory.load(classLoader) val serviceDiscoveryClassName = if (config.hasPath(ServiceDiscoveryKey)) { config.getString(ServiceDiscoveryKey) } else { config.getString(ApplicationLoaderKey) } services(classLoader, serviceDiscoveryClassName) } private[tools] def services(classLoader: ClassLoader, serviceDiscoveryClassName: String): String = { log.debug("Loading service discovery class: " + serviceDiscoveryClassName) val serviceDiscoverClass = classLoader.loadClass(serviceDiscoveryClassName) val castServiceDiscoveryClass = serviceDiscoverClass.asSubclass(classOf[ServiceDiscovery]) val serviceDiscovery = castServiceDiscoveryClass.newInstance() val services = serviceDiscovery.discoverServices(classLoader).asScala.toIndexedSeq Json.stringify(Json.toJson(services)) } }
Example 152
Source File: AbstractEmbeddedPersistentActorSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.persistence.testkit import akka.actor.ActorRef import akka.actor.ActorSystem import akka.actor.Props import akka.actor.actorRef2Scala import akka.persistence.PersistentActor import akka.testkit.ImplicitSender import akka.testkit.TestKitBase import com.lightbend.lagom.persistence.ActorSystemSpec import com.lightbend.lagom.persistence.PersistenceSpec import scala.collection.immutable import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.playjson.JsonSerializer import scala.concurrent.duration._ object AbstractEmbeddedPersistentActorSpec { final case class Cmd(data: String) final case class Evt(data: String) case object Get final case class State(data: Vector[String] = Vector.empty) { def apply(evt: Evt): State = { copy(data :+ evt.data) } } def props(persistenceId: String): Props = Props(new Persistent(persistenceId)) class Persistent(override val persistenceId: String) extends PersistentActor { var state = State() override def receiveRecover = { case evt: Evt => state = state(evt) } override def receiveCommand = { case Cmd(data) => persist(Evt(data.toUpperCase)) { evt => state = state(evt) } case Get => sender() ! state } } object EmbeddedPersistentActorSerializers extends JsonSerializerRegistry { override def serializers: immutable.Seq[JsonSerializer[_]] = { import play.api.libs.json._ import JsonSerializer.emptySingletonFormat Vector( JsonSerializer(Json.format[Cmd]), JsonSerializer(Json.format[Evt]), JsonSerializer(emptySingletonFormat(Get)), JsonSerializer(Json.format[State]) ) } } } trait AbstractEmbeddedPersistentActorSpec { spec: ActorSystemSpec => import AbstractEmbeddedPersistentActorSpec._ "A persistent actor" must { "store events in the embedded journal" in within(15.seconds) { val p = system.actorOf(props("p1")) println(implicitly[ActorRef]) p ! Get expectMsg(State()) p ! Cmd("a") p ! Cmd("b") p ! Cmd("c") p ! Get expectMsg(State(Vector("A", "B", "C"))) // start another with same persistenceId should recover state val p2 = system.actorOf(props("p1")) p2 ! Get expectMsg(State(Vector("A", "B", "C"))) } } }
Example 153
Source File: TransportExceptionSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.api.transport import java.util import java.util.Optional import com.lightbend.lagom.javadsl.api.deser.DeserializationException import com.lightbend.lagom.javadsl.api.deser.SerializationException import scala.collection.immutable import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class TransportExceptionSpec extends AnyWordSpec with Matchers { val protocolTextPlain = new MessageProtocol(Optional.of("text/plain"), Optional.of("utf-8"), Optional.empty[String]) val protocolJson = new MessageProtocol(Optional.of("application/json"), Optional.of("utf-8"), Optional.empty[String]) val protocolHtml = new MessageProtocol(Optional.of("text/html"), Optional.of("utf-8"), Optional.empty[String]) val supportedExceptions: immutable.Seq[TransportException] = List( new DeserializationException("some msg - DeserializationException"), new BadRequest("some msg - BadRequest"), new Forbidden("some msg - Forbidden"), new PolicyViolation("some msg - PolicyViolation"), new NotFound("some msg - NotFound"), new NotAcceptable(util.Arrays.asList(protocolJson, protocolTextPlain), protocolHtml), new PayloadTooLarge("some msg - PayloadTooLarge"), new UnsupportedMediaType(protocolTextPlain, protocolJson), new SerializationException("some msg - SerializationException") ) "Lagom-provided TransportExceptions" should { supportedExceptions.foreach { ex => s"be buildable from code and message (${ex.getClass.getName})" in { val reconstructed = TransportException.fromCodeAndMessage(ex.errorCode(), ex.exceptionMessage()) reconstructed.getClass.getName should ===(ex.getClass.getName) reconstructed.exceptionMessage() should ===(ex.exceptionMessage()) } } // TODO: implement roundtrip de/ser tests like in com.lightbend.lagom.scaladsl.api.ExceptionsSpec } }
Example 154
Source File: AdditionalRouter.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.server import java.util import play.api.inject.Injector import play.api.routing.Router import play.core.j.JavaRouterAdapter import scala.collection.JavaConverters._ import scala.collection.immutable trait AdditionalRouter { def prefix: Option[String] def withPrefix(path: String): AdditionalRouter } final case class ClassBased[R <: Router](classType: Class[R], prefix: Option[String]) extends AdditionalRouter { def this(classType: Class[R]) = this(classType, None) def withPrefix(path: String): ClassBased[R] = copy(prefix = AdditionalRouter.appendPrefix(prefix, path)) } final case class InstanceBased(router: Router, prefix: Option[String]) extends AdditionalRouter { def this(classType: Router) = this(classType, None) def withPrefix(path: String): InstanceBased = copy(prefix = AdditionalRouter.appendPrefix(prefix, path)) } object AdditionalRouter { private[lagom] def appendPrefix(prefix: Option[String], newPrefix: String) = { prefix .map(oldPrefix => newPrefix + "/" + oldPrefix) .orElse(Option(newPrefix)) } private[lagom] def wireRouters( injector: Injector, additionalRouters: util.List[AdditionalRouter] ): util.List[Router] = { // modifies the Router in case a prefix is defined // otherwise returns the router as is def applyPrefix(router: Router, prefix: Option[String]): Router = prefix.map(router.withPrefix).getOrElse(router) additionalRouters.asScala .foldLeft(immutable.Seq.empty[Router]) { (routers, ar) => ar match { case ar: InstanceBased => routers :+ applyPrefix(ar.router, ar.prefix) case ar: ClassBased[_] => val ins = injector.instanceOf(ar.classType) routers :+ applyPrefix(ins, ar.prefix) } } .asJava } }
Example 155
Source File: TestServiceClient.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.client import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.api.broker.Topic import scala.collection.immutable object TestServiceClient extends ServiceClientConstructor { override def construct[S <: Service](constructor: (ServiceClientImplementationContext) => S): S = { constructor(new ServiceClientImplementationContext { override def resolve(descriptor: Descriptor): ServiceClientContext = { new ServiceClientContext { override def createServiceCall[Request, Response]( methodName: String, params: immutable.Seq[Any] ): ServiceCall[Request, Response] = { TestServiceCall(descriptor, methodName, params) } override def createTopic[Message](methodName: String): Topic[Message] = { TestTopic(descriptor, methodName) } } } }) } }
Example 156
Source File: TopicProducers.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.internal.broker import akka.persistence.query.Offset import akka.stream.scaladsl.Source import com.lightbend.lagom.scaladsl.api.broker.Subscriber import com.lightbend.lagom.scaladsl.api.broker.Topic import com.lightbend.lagom.scaladsl.persistence.AggregateEvent import com.lightbend.lagom.scaladsl.persistence.AggregateEventTag import scala.collection.immutable trait InternalTopic[Message] extends Topic[Message] { final override def topicId: Topic.TopicId = throw new UnsupportedOperationException("Topic#topicId is not permitted in the service's topic implementation") final override def subscribe: Subscriber[Message] = throw new UnsupportedOperationException("Topic#subscribe is not permitted in the service's topic implementation.") } final class TaggedOffsetTopicProducer[Message, Event <: AggregateEvent[Event]]( val tags: immutable.Seq[AggregateEventTag[Event]], val readSideStream: (AggregateEventTag[Event], Offset) => Source[(Message, Offset), _] ) extends InternalTopic[Message]
Example 157
Source File: Producer.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.broker.kafka import java.net.URI import akka.actor.ActorSystem import akka.persistence.query.{ Offset => AkkaOffset } import akka.stream.Materializer import akka.stream.scaladsl._ import com.lightbend.lagom.internal.projection.ProjectionRegistry import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.WorkerCoordinates import com.lightbend.lagom.spi.persistence.OffsetStore import org.apache.kafka.common.serialization.Serializer import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future private[lagom] object Producer { def startTaggedOffsetProducer[Message]( system: ActorSystem, tags: immutable.Seq[String], kafkaConfig: KafkaConfig, locateService: String => Future[Seq[URI]], topicId: String, eventStreamFactory: (String, AkkaOffset) => Source[(Message, AkkaOffset), _], partitionKeyStrategy: Option[Message => String], serializer: Serializer[Message], offsetStore: OffsetStore, projectionRegistry: ProjectionRegistry )(implicit mat: Materializer, ec: ExecutionContext): Unit = { val projectionName = s"kafkaProducer-$topicId" val producerConfig = ProducerConfig(system.settings.config) val topicProducerProps = (coordinates: WorkerCoordinates) => TopicProducerActor.props( coordinates, kafkaConfig, producerConfig, locateService, topicId, eventStreamFactory, partitionKeyStrategy, serializer, offsetStore ) val entityIds = tags.toSet projectionRegistry.registerProjection( projectionName, entityIds, topicProducerProps, producerConfig.role ) } }
Example 158
Source File: CassandraReadSideHandler.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.persistence.cassandra import akka.persistence.query.Offset import akka.stream.ActorAttributes import akka.stream.scaladsl.Flow import akka.Done import akka.NotUsed import com.datastax.driver.core.BatchStatement import com.datastax.driver.core.BoundStatement import com.lightbend.lagom.internal.persistence.cassandra.CassandraOffsetDao import com.lightbend.lagom.internal.persistence.cassandra.CassandraOffsetStore import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor.ReadSideHandler import com.lightbend.lagom.scaladsl.persistence._ import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession import org.slf4j.LoggerFactory import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.collection.JavaConverters._ private[cassandra] final class CassandraAutoReadSideHandler[Event <: AggregateEvent[Event]]( session: CassandraSession, offsetStore: CassandraOffsetStore, handlers: Map[Class[_ <: Event], CassandraAutoReadSideHandler.Handler[Event]], globalPrepareCallback: () => Future[Done], prepareCallback: AggregateEventTag[Event] => Future[Done], readProcessorId: String, dispatcher: String )(implicit ec: ExecutionContext) extends CassandraReadSideHandler[Event, CassandraAutoReadSideHandler.Handler[Event]]( session, handlers, dispatcher ) { import CassandraAutoReadSideHandler.Handler @volatile private var offsetDao: CassandraOffsetDao = _ protected override def invoke( handler: Handler[Event], element: EventStreamElement[Event] ): Future[immutable.Seq[BoundStatement]] = { for { statements <- handler .asInstanceOf[EventStreamElement[Event] => Future[immutable.Seq[BoundStatement]]] .apply(element) } yield statements :+ offsetDao.bindSaveOffset(element.offset) } protected def offsetStatement(offset: Offset): immutable.Seq[BoundStatement] = immutable.Seq(offsetDao.bindSaveOffset(offset)) override def globalPrepare(): Future[Done] = { globalPrepareCallback.apply() } override def prepare(tag: AggregateEventTag[Event]): Future[Offset] = { for { _ <- prepareCallback.apply(tag) dao <- offsetStore.prepare(readProcessorId, tag.tag) } yield { offsetDao = dao dao.loadedOffset } } }
Example 159
Source File: CassandraReadSideImpl.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.persistence.cassandra import akka.Done import akka.actor.ActorSystem import com.datastax.driver.core.BoundStatement import com.lightbend.lagom.internal.persistence.cassandra.CassandraOffsetStore import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor.ReadSideHandler import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraReadSide.ReadSideHandlerBuilder import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraReadSide import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraSession import com.lightbend.lagom.scaladsl.persistence.AggregateEvent import com.lightbend.lagom.scaladsl.persistence.AggregateEventTag import com.lightbend.lagom.scaladsl.persistence.EventStreamElement import scala.collection.immutable import scala.concurrent.Future import scala.reflect.ClassTag private[lagom] final class CassandraReadSideImpl( system: ActorSystem, session: CassandraSession, offsetStore: CassandraOffsetStore ) extends CassandraReadSide { private val dispatcher = system.settings.config.getString("lagom.persistence.read-side.use-dispatcher") implicit val ec = system.dispatchers.lookup(dispatcher) override def builder[Event <: AggregateEvent[Event]](eventProcessorId: String): ReadSideHandlerBuilder[Event] = { new ReadSideHandlerBuilder[Event] { import CassandraAutoReadSideHandler.Handler private var prepareCallback: AggregateEventTag[Event] => Future[Done] = tag => Future.successful(Done) private var globalPrepareCallback: () => Future[Done] = () => Future.successful(Done) private var handlers = Map.empty[Class[_ <: Event], Handler[Event]] override def setGlobalPrepare(callback: () => Future[Done]): ReadSideHandlerBuilder[Event] = { globalPrepareCallback = callback this } override def setPrepare(callback: (AggregateEventTag[Event]) => Future[Done]): ReadSideHandlerBuilder[Event] = { prepareCallback = callback this } override def setEventHandler[E <: Event: ClassTag]( handler: EventStreamElement[E] => Future[immutable.Seq[BoundStatement]] ): ReadSideHandlerBuilder[Event] = { val eventClass = implicitly[ClassTag[E]].runtimeClass.asInstanceOf[Class[Event]] handlers += (eventClass -> handler.asInstanceOf[Handler[Event]]) this } override def build(): ReadSideHandler[Event] = { new CassandraAutoReadSideHandler[Event]( session, offsetStore, handlers, globalPrepareCallback, prepareCallback, eventProcessorId, dispatcher ) } } } }
Example 160
Source File: TestEntityReadSide.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.persistence.cassandra import akka.Done import akka.actor.ActorSystem import com.datastax.driver.core.BoundStatement import com.datastax.driver.core.PreparedStatement import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor.ReadSideHandler import com.lightbend.lagom.scaladsl.persistence.AggregateEventTag import com.lightbend.lagom.scaladsl.persistence.EventStreamElement import com.lightbend.lagom.scaladsl.persistence.ReadSideProcessor import com.lightbend.lagom.scaladsl.persistence.TestEntity import scala.collection.immutable import scala.concurrent.Future object TestEntityReadSide { class TestEntityReadSideProcessor(system: ActorSystem, readSide: CassandraReadSide, session: CassandraSession) extends ReadSideProcessor[TestEntity.Evt] { def buildHandler: ReadSideHandler[TestEntity.Evt] = { import system.dispatcher @volatile var writeStmt: PreparedStatement = null def createTable(): Future[Done] = { return session.executeCreateTable( "CREATE TABLE IF NOT EXISTS testcounts (id text, count bigint, PRIMARY KEY (id))" ) } def prepareWriteStmt(): Future[Done] = { return session.prepare("UPDATE testcounts SET count = ? WHERE id = ?").map { ws => writeStmt = ws Done } } def updateCount(element: EventStreamElement[TestEntity.Appended]): Future[immutable.Seq[BoundStatement]] = { return session.selectOne("SELECT count FROM testcounts WHERE id = ?", element.entityId).map { maybeRow => val count = maybeRow match { case Some(row) => row.getLong("count") case None => 0L } Vector(writeStmt.bind(java.lang.Long.valueOf(count + 1L), element.entityId)); } } readSide .builder[TestEntity.Evt]("testoffsets") .setGlobalPrepare(() => createTable()) .setPrepare(tag => prepareWriteStmt()) .setEventHandler[TestEntity.Appended](updateCount) .build() } def aggregateTags: Set[AggregateEventTag[TestEntity.Evt]] = TestEntity.Evt.aggregateEventShards.allTags } } class TestEntityReadSide(system: ActorSystem, session: CassandraSession) { import system.dispatcher def getAppendCount(entityId: String): Future[Long] = { session.selectOne("SELECT count FROM testcounts WHERE id = ?", entityId).map { case Some(row) => row.getLong("count") case None => 0L } } }
Example 161
Source File: ServiceLocatorSessionProvider.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.persistence.cassandra import java.net.InetSocketAddress import java.net.URI import scala.collection.immutable import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.concurrent.Promise import scala.concurrent.duration._ import scala.util.control.NoStackTrace import akka.actor.ActorSystem import akka.persistence.cassandra.ConfigSessionProvider import com.typesafe.config.Config import play.api.Logger private[lagom] final class ServiceLocatorSessionProvider(system: ActorSystem, config: Config) extends ConfigSessionProvider(system, config) { private val log = Logger(getClass) override def lookupContactPoints( clusterId: String )(implicit ec: ExecutionContext): Future[immutable.Seq[InetSocketAddress]] = { ServiceLocatorHolder(system).serviceLocatorEventually.flatMap { serviceLocatorAdapter => serviceLocatorAdapter.locateAll(clusterId).map { case Nil => throw new NoContactPointsException(s"No contact points for [$clusterId]") case uris => log.debug(s"Found Cassandra contact points: $uris") // URIs must be all valid uris.foreach { uri => require(uri.getHost != null, s"missing host in $uri for Cassandra contact points $clusterId") require(uri.getPort != -1, s"missing port in $uri for Cassandra contact points $clusterId") } uris.map { uri => new InetSocketAddress(uri.getHost, uri.getPort) } } } } } private[lagom] final class NoContactPointsException(msg: String) extends RuntimeException(msg) with NoStackTrace
Example 162
Source File: HDFSExecutorMetricsReplayListenerBus.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.scheduler import java.io.InputStream import scala.collection.immutable import scala.collection.mutable.ListBuffer import scala.io.Source import scala.util.parsing.json._ import org.apache.spark.internal.Logging private[spark] class HDFSExecutorMetricsReplayListenerBus extends SparkListenerBus with Logging { def replay( logDataList: ListBuffer[(InputStream, String)], sourceName: String, maybeTruncated: Boolean = false): Unit = { logDataList.foreach(logData => { try { for (line <- Source.fromInputStream(logData._1).getLines()) { val hashMapParsed = JSON.parseFull(line) val hashMap = { hashMapParsed match { case Some(m: Map[String, Any]) => m case _ => new immutable.HashMap[String, Any] } } val hdfsExecutorMetrics = new HDFSExecutorMetrics( hashMap("values").asInstanceOf[Map[String, Any]], hashMap("host").asInstanceOf[String], hashMap("timestamp").asInstanceOf[Double].toLong) postToAll(hdfsExecutorMetrics) } } catch { case ex: Exception => ex.printStackTrace(); logError(ex.toString) logWarning(s"Got JsonParseException from log file $logData") } }) } }
Example 163
Source File: VectorFieldNode.scala From scalismo-ui with GNU General Public License v3.0 | 5 votes |
package scalismo.ui.model import scalismo.common.{DiscreteDomain, DiscreteField} import scalismo.geometry._ import scalismo.ui.model.capabilities._ import scalismo.ui.model.properties._ import scala.collection.immutable class VectorFieldsNode(override val parent: GroupNode) extends SceneNodeCollection[VectorFieldNode] { override val name: String = "Scalar fields" def add(vectorField: DiscreteField[_3D, DiscreteDomain[_3D], EuclideanVector[_3D]], name: String): VectorFieldNode = { val node = new VectorFieldNode(this, vectorField, name) add(node) node } def addTransformationGlyph(pointCloud: PointCloud, name: String): TransformationGlyphNode = { val node = new TransformationGlyphNode(this, pointCloud, name) add(node) node } } class VectorFieldNode(override val parent: VectorFieldsNode, val source: DiscreteField[_3D, DiscreteDomain[_3D], EuclideanVector[_3D]], initialName: String) extends RenderableSceneNode with Removeable with Renameable with Grouped with HasOpacity with HasLineWidth with HasScalarRange { name = initialName // we store the vectors as a sequence, as values are defined by iterators, which we cannot // traverse twice private lazy val vectors: immutable.IndexedSeq[EuclideanVector[_3D]] = source.values.toIndexedSeq override val opacity = new OpacityProperty() override val lineWidth = new LineWidthProperty() override lazy val scalarRange: ScalarRangeProperty = { val (min, max) = { val norms = vectors.map(_.norm) (norms.min.toFloat, norms.max.toFloat) } new ScalarRangeProperty(ScalarRange(min, max)) } override def group: GroupNode = parent.parent override def remove(): Unit = parent.remove(this) }
Example 164
Source File: SceneNodeCollection.scala From scalismo-ui with GNU General Public License v3.0 | 5 votes |
package scalismo.ui.model import scalismo.ui.model.capabilities.CollapsableView import scala.collection.{immutable, mutable} object SceneNodeCollection { import scala.language.implicitConversions implicit def collectionAsChildNodeSeq[ChildNode <: SceneNode]( collection: SceneNodeCollection[ChildNode] ): immutable.Seq[ChildNode] = collection.children } trait SceneNodeCollection[ChildNode <: SceneNode] extends SceneNode with CollapsableView { private val _items = mutable.ListBuffer.empty[ChildNode] final override def children: List[ChildNode] = _items.toList protected def add(child: ChildNode): Unit = { require(child.parent == this) _items += child publishEvent(SceneNode.event.ChildAdded(this, child)) publishEvent(SceneNode.event.ChildrenChanged(this)) } protected def addToFront(child: ChildNode): Unit = { require(child.parent == this) _items.prepend(child) publishEvent(SceneNode.event.ChildAdded(this, child)) publishEvent(SceneNode.event.ChildrenChanged(this)) } def remove(child: ChildNode): Unit = { require(_items.contains(child)) _items -= child publishEvent(SceneNode.event.ChildRemoved(this, child)) publishEvent(SceneNode.event.ChildrenChanged(this)) } // a collection is hidden in the tree view if it contains less than 2 items. override def isViewCollapsed: Boolean = _items.length < 2 }
Example 165
Source File: FactoryTest.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package test.scala.collection import org.junit.{Assert, Test} import scala.collection.compat._ import scala.collection.{BitSet, immutable, mutable} class FactoryTest { implicitly[Factory[Char, String]] implicitly[Factory[Char, Array[Char]]] implicitly[Factory[Int, collection.BitSet]] implicitly[Factory[Int, mutable.BitSet]] implicitly[Factory[Int, immutable.BitSet]] implicitly[Factory[Nothing, Seq[Nothing]]] def f[A] = implicitly[Factory[A, Stream[A]]] BitSet: Factory[Int, BitSet] Iterable: Factory[Int, Iterable[Int]] immutable.TreeSet: Factory[Int, immutable.TreeSet[Int]] Map: Factory[(Int, String), Map[Int, String]] immutable.TreeMap: Factory[(Int, String), immutable.TreeMap[Int, String]] @Test def streamFactoryPreservesLaziness(): Unit = { val factory = implicitly[Factory[Int, Stream[Int]]] var counter = 0 val source = Stream.continually { counter += 1; 1 } val result = factory.fromSpecific(source) Assert.assertEquals(1, counter) // One element has been evaluated because Stream is not lazy in its head } @Test def factoriesAreReusable(): Unit = { def generically[M[X] <: Iterable[X]](in: M[Int], factory: Factory[Int, M[Int]]): Unit = { val l = Iterator(-3, -2, -1).to(factory) val m = in.iterator.to(factory) Assert.assertEquals(in, m) } generically[List](List(1, 2, 3), List) generically[Seq](Seq(1, 2, 3), Seq) generically[IndexedSeq](IndexedSeq(1, 2, 3), IndexedSeq) generically[Vector](Vector(1, 2, 3), Vector) generically[Set](Set(1, 2, 3), Set) } }
Example 166
Source File: CompanionSrc.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package fix import scala.{collection => c} import scala.collection.{immutable => i, mutable => m} import scala.collection import scala.collection.{ immutable, mutable } import scala.collection.compat._ object CompanionSrc { (null: c.IndexedSeq[Int]).iterableFactory (null: c.Iterable[Int]).iterableFactory (null: c.Seq[Int]).iterableFactory (null: collection.Iterable[Int]).iterableFactory (null: i.HashSet[Int]).iterableFactory (null: i.IndexedSeq[Int]).iterableFactory (null: i.Iterable[Int]).iterableFactory (null: i.LinearSeq[Int]).iterableFactory (null: i.List[Int]).iterableFactory (null: i.ListSet[Int]).iterableFactory (null: i.Queue[Int]).iterableFactory (null: i.Seq[Int]).iterableFactory (null: i.Set[Int]).iterableFactory (null: i.Stream[Int]).iterableFactory (null: immutable.Iterable[Int]).iterableFactory (null: i.Vector[Int]).iterableFactory (null: m.ArrayBuffer[Int]).iterableFactory (null: m.ArraySeq[Int]).iterableFactory (null: m.ArrayStack[Int]).iterableFactory (null: m.Buffer[Int]).iterableFactory (null: m.HashSet[Int]).iterableFactory (null: m.IndexedSeq[Int]).iterableFactory (null: m.Iterable[Int]).iterableFactory (null: m.LinearSeq[Int]).iterableFactory (null: m.LinkedHashSet[Int]).iterableFactory (null: m.Queue[Int]).iterableFactory (null: m.Seq[Int]).iterableFactory (null: m.Set[Int]).iterableFactory (null: mutable.Iterable[Int]).iterableFactory }
Example 167
Source File: ExperimentalSrc.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package fix import scala.collection import scala.collection.immutable import scala.collection.mutable.{Map, Set} // Challenge to make sure the scoping is correct class ExperimentalSrc(iset: immutable.Set[Int], cset: collection.Set[Int], imap: immutable.Map[Int, Int], cmap: collection.Map[Int, Int]) { iset + 1 iset - 2 cset ++ _root_.scala.collection.Set(1) cset -- _root_.scala.collection.Set(2) cmap ++ _root_.scala.collection.Map(2 -> 3) cmap ++ _root_.scala.collection.Map((4, 5)) imap + (2 -> 3) imap + ((4, 5)) // Map.zip imap.zip(List()).toMap List().zip(List()) }
Example 168
Source File: CanBuildFromSrc.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package fix import scala.language.higherKinds import scala.collection.immutable import scala.collection.immutable import scala.collection.compat._ object CanBuildFromSrc { def f[C0, A, C1[_]](c0: C0)(implicit cbf: Factory[Int, C1[Int]], cbf2: Factory[String, C1[String]], cbf3: BuildFrom[C0, A, C1[A]]): C1[Int] = { val b = cbf.newBuilder val b2 = cbf.newBuilder val b3 = cbf.newBuilder val b4 = cbf2.newBuilder val b5 = cbf3.newBuilder(c0) val b6 = cbf3.newBuilder(c0) cbf.fromSpecific(List.empty[Int]) cbf2.fromSpecific(List.empty[String]) b.result() } def kind(implicit cbf: BuildFrom[String, Char, String], cbf2: BuildFrom[String, (Int, Boolean), Map[Int, Boolean]]): Unit = { cbf.newBuilder("") cbf2.newBuilder("") () } def f2[T, That](implicit cbf: Factory[T, That]): Foo[T, That] = new Foo def f3[T, That](implicit cbf: Factory[T, That with immutable.Iterable[_]]): Foo[T, That] = new Foo class Foo[T, That](implicit cbf: Factory[T, That]) { val b = cbf.newBuilder } }
Example 169
Source File: ExperimentalSrc.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package fix import scala.collection import scala.collection.immutable import scala.collection.mutable.{Map, Set} // Challenge to make sure the scoping is correct class ExperimentalSrc(iset: immutable.Set[Int], cset: collection.Set[Int], imap: immutable.Map[Int, Int], cmap: collection.Map[Int, Int]) { iset + 1 iset - 2 cset + 1 cset - 2 cmap + (2 -> 3) cmap + ((4, 5)) imap + (2 -> 3) imap + ((4, 5)) // Map.zip imap.zip(List()) List().zip(List()) }
Example 170
Source File: CanBuildFromSrc.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package fix import scala.language.higherKinds import scala.collection.immutable import collection.generic.CanBuildFrom object CanBuildFromSrc { def f[C0, A, C1[_]](c0: C0)(implicit cbf: CanBuildFrom[Nothing, Int, C1[Int]], cbf2: CanBuildFrom[Nothing, String, C1[String]], cbf3: CanBuildFrom[C0, A, C1[A]]): C1[Int] = { val b = cbf() val b2 = cbf.apply val b3 = cbf.apply() val b4 = cbf2.apply() val b5 = cbf3(c0) val b6 = cbf3.apply(c0) List.empty[Int].to[C1] List.empty[String].to[C1] b.result() } def kind(implicit cbf: CanBuildFrom[String, Char, String], cbf2: CanBuildFrom[String, (Int, Boolean), Map[Int, Boolean]]): Unit = { cbf("") cbf2("") () } def f2[T, That](implicit cbf: CanBuildFrom[Nothing, T, That]): Foo[T, That] = new Foo def f3[T, That](implicit cbf: CanBuildFrom[Nothing, T, That with immutable.Traversable[_]]): Foo[T, That] = new Foo class Foo[T, That](implicit cbf: CanBuildFrom[Nothing, T, That]) { val b = cbf() } }
Example 171
Source File: LeaseContentionSpec.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.coordination.lease.kubernetes import java.util.concurrent.Executors import akka.actor.ActorSystem import akka.coordination.lease.TimeoutSettings import akka.coordination.lease.kubernetes.internal.KubernetesApiImpl import akka.coordination.lease.scaladsl.LeaseProvider import akka.testkit.TestKit import com.typesafe.config.ConfigFactory import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import scala.collection.immutable import scala.concurrent.{ExecutionContext, Future} class LeaseContentionSpec extends TestKit(ActorSystem("LeaseContentionSpec", ConfigFactory.parseString( """ akka.loglevel = INFO akka.coordination.lease.kubernetes { api-service-host = localhost api-service-port = 8080 namespace = "lease" namespace-path = "" secure-api-server = false } """ ))) with WordSpecLike with Matchers with ScalaFutures with BeforeAndAfterAll { implicit val patience: PatienceConfig = PatienceConfig(testKitSettings.DefaultTimeout.duration) // for cleanup val k8sApi = new KubernetesApiImpl(system, KubernetesSettings(system, TimeoutSettings(system.settings.config.getConfig("akka.coordination.lease.kubernetes")))) val lease1 = "contended-lease" val lease2 = "contended-lease-2" override protected def beforeAll(): Unit = { k8sApi.removeLease(lease1).futureValue k8sApi.removeLease(lease2).futureValue } override protected def afterAll(): Unit ={ TestKit.shutdownActorSystem(system) } "A lease under contention" should { "only allow one client to get acquire lease" in { val underTest = LeaseProvider(system) val nrClients = 30 implicit val ec = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(nrClients)) // too many = HTTP request queue of pool fills up // could make this more contended with a countdown latch so they all start at the same time val leases: immutable.Seq[(String, Boolean)] = Future.sequence((0 until nrClients).map(i => { val clientName = s"client$i" val lease = underTest.getLease(lease1, KubernetesLease.configPath, clientName) Future { lease.acquire() }.flatMap(identity).map(granted => (clientName, granted)) })).futureValue val numberGranted = leases.count { case (_, granted) => granted } withClue(s"More than one lease granted $leases") { numberGranted shouldEqual 1 } } } }
Example 172
Source File: AkkaManagementSettings.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.management import java.net.InetAddress import java.util.Optional import scala.collection.JavaConverters._ import scala.collection.immutable import scala.compat.java8.OptionConverters._ import akka.annotation.InternalApi import com.typesafe.config.Config final class AkkaManagementSettings(val config: Config) { private val managementConfig = config.getConfig("akka.management") object Http { private val cc = managementConfig.getConfig("http") val Hostname: String = { val hostname = cc.getString("hostname") if (hostname == "<hostname>") InetAddress.getLocalHost.getHostAddress else if (hostname.trim() == "") InetAddress.getLocalHost.getHostAddress else hostname } val Port: Int = { val p = cc.getInt("port") require(0 to 65535 contains p, s"akka.management.http.port must be 0 through 65535 (was ${p})") p } val EffectiveBindHostname: String = cc.getString("bind-hostname") match { case "" => Hostname case value => value } val EffectiveBindPort: Int = cc.getString("bind-port") match { case "" => Port case value => val p = value.toInt require(0 to 65535 contains p, s"akka.management.http.bind-port must be 0 through 65535 (was ${p})") p } val BasePath: Option[String] = Option(cc.getString("base-path")).flatMap(it => if (it.trim == "") None else Some(it)) val RouteProviders: immutable.Seq[NamedRouteProvider] = { def validFQCN(value: Any) = { value != null && value != "null" && value.toString.trim.nonEmpty } cc.getConfig("routes") .root .unwrapped .asScala .collect { case (name, value) if validFQCN(value) => NamedRouteProvider(name, value.toString) } .toList } val RouteProvidersReadOnly: Boolean = cc.getBoolean("route-providers-read-only") } @InternalApi private[akka] object AkkaManagementSettings { implicit class HasDefined(val config: Config) { def hasDefined(key: String): Boolean = config.hasPath(key) && config.getString(key).trim.nonEmpty && config.getString(key) != s"<$key>" def optDefinedValue(key: String): Option[String] = if (hasDefined(key)) Some(config.getString(key)) else None def optValue(key: String): Option[String] = config.getString(key) match { case "" => None case other => Some(other) } } } final case class NamedRouteProvider(name: String, fullyQualifiedClassName: String)
Example 173
Source File: HealthCheckSettings.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.management import com.typesafe.config.Config import akka.util.JavaDurationConverters._ import scala.collection.JavaConverters._ import scala.collection.immutable import scala.concurrent.duration.FiniteDuration final case class NamedHealthCheck(name: String, fullyQualifiedClassName: String) object HealthCheckSettings { def apply(config: Config): HealthCheckSettings = { def validFQCN(value: Any) = { value != null && value != "null" && value.toString.trim.nonEmpty } new HealthCheckSettings( config .getConfig("readiness-checks") .root .unwrapped .asScala .collect { case (name, value) if validFQCN(value) => NamedHealthCheck(name, value.toString) } .toList, config .getConfig("liveness-checks") .root .unwrapped .asScala .collect { case (name, value) if validFQCN(value) => NamedHealthCheck(name, value.toString) } .toList, config.getString("readiness-path"), config.getString("liveness-path"), config.getDuration("check-timeout").asScala ) } def getCheckTimeout(): java.time.Duration = checkTimeout.asJava }
Example 174
Source File: ClusterHttpManagementProtocol.scala From akka-management with Apache License 2.0 | 5 votes |
package akka.management.cluster import scala.collection.immutable import akka.annotation.InternalApi import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import spray.json.DefaultJsonProtocol import spray.json.RootJsonFormat final case class ClusterUnreachableMember(node: String, observedBy: immutable.Seq[String]) final case class ClusterMember(node: String, nodeUid: String, status: String, roles: Set[String]) object ClusterMember { implicit val clusterMemberOrdering: Ordering[ClusterMember] = Ordering.by(_.node) } final case class ClusterMembers( selfNode: String, members: Set[ClusterMember], unreachable: immutable.Seq[ClusterUnreachableMember], leader: Option[String], oldest: Option[String], oldestPerRole: Map[String, String]) final case class ClusterHttpManagementMessage(message: String) final case class ShardRegionInfo(shardId: String, numEntities: Int) final case class ShardDetails(regions: immutable.Seq[ShardRegionInfo]) @InternalApi private[akka] object ClusterHttpManagementOperation { def fromString(value: String): Option[ClusterHttpManagementOperation] = Vector(Down, Leave, Join).find(_.toString.equalsIgnoreCase(value)) } trait ClusterHttpManagementJsonProtocol extends SprayJsonSupport with DefaultJsonProtocol { implicit val clusterUnreachableMemberFormat: RootJsonFormat[ClusterUnreachableMember] = jsonFormat2(ClusterUnreachableMember) implicit val clusterMemberFormat: RootJsonFormat[ClusterMember] = jsonFormat4(ClusterMember.apply) implicit val clusterMembersFormat: RootJsonFormat[ClusterMembers] = jsonFormat6(ClusterMembers) implicit val clusterMemberMessageFormat: RootJsonFormat[ClusterHttpManagementMessage] = jsonFormat1(ClusterHttpManagementMessage) implicit val shardRegionInfoFormat: RootJsonFormat[ShardRegionInfo] = jsonFormat2(ShardRegionInfo) implicit val shardDetailsFormat: RootJsonFormat[ShardDetails] = jsonFormat1(ShardDetails) }
Example 175
Source File: TableLoader.scala From carbondata with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.util.Properties import scala.collection.{immutable, mutable} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand import org.apache.carbondata.core.constants.CarbonCommonConstants import org.apache.carbondata.core.datastore.impl.FileFactory import org.apache.carbondata.core.util.CarbonProperties // scalastyle:off object TableLoader { def extractOptions(propertiesFile: String): immutable.Map[String, String] = { val props = new Properties val path = new Path(propertiesFile) val fs = path.getFileSystem(FileFactory.getConfiguration) props.load(fs.open(path)) val elments = props.entrySet().iterator() val map = new mutable.HashMap[String, String]() System.out.println("properties file:") while (elments.hasNext) { val elment = elments.next() System.out.println(s"${elment.getKey}=${elment.getValue}") map.put(elment.getKey.asInstanceOf[String], elment.getValue.asInstanceOf[String]) } immutable.Map(map.toSeq: _*) } def extractStorePath(map: immutable.Map[String, String]): String = { map.get(CarbonCommonConstants.STORE_LOCATION) match { case Some(path) => path case None => throw new Exception(s"${CarbonCommonConstants.STORE_LOCATION} can't be empty") } } def loadTable(spark: SparkSession, dbName: Option[String], tableName: String, inputPaths: String, options: scala.collection.immutable.Map[String, String]): Unit = { CarbonLoadDataCommand(dbName, tableName, inputPaths, Nil, options, false).run(spark) } def main(args: Array[String]): Unit = { if (args.length < 3) { System.err.println("Usage: TableLoader <properties file> <table name> <input files>") System.exit(1) } System.out.println("parameter list:") args.foreach(System.out.println) val map = extractOptions(TableAPIUtil.escape(args(0))) val storePath = extractStorePath(map) System.out.println(s"${CarbonCommonConstants.STORE_LOCATION}:$storePath") val (dbName, tableName) = TableAPIUtil.parseSchemaName(TableAPIUtil.escape(args(1))) System.out.println(s"table name: $dbName.$tableName") val inputPaths = TableAPIUtil.escape(args(2)) val spark = TableAPIUtil.spark(storePath, s"TableLoader: $dbName.$tableName") loadTable(spark, Option(dbName), tableName, inputPaths, map) } }
Example 176
Source File: Precisions.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.core.enums import enumeratum.EnumEntry import scala.collection.immutable sealed abstract class Precision extends EnumEntry with HasNone { override def toString: String = entryName } object Precisions extends enumeratum.Enum[Precision] { val values: immutable.IndexedSeq[Precision] = findValues case object Nanoseconds extends Precision { override val entryName: String = "ns" } case object Microseconds extends Precision { override val entryName: String = "u" } case object Milliseconds extends Precision { override val entryName: String = "ms" } case object Seconds extends Precision { override val entryName: String = "s" } case object Minutes extends Precision { override val entryName: String = "m" } case object Hours extends Precision { override val entryName: String = "h" } case object None extends Precision { override val isNone: Boolean = true } }
Example 177
Source File: Epochs.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.core.enums import enumeratum.EnumEntry import scala.collection.immutable sealed abstract class Epoch extends EnumEntry with HasNone { override def toString: String = entryName } object Epochs extends enumeratum.Enum[Epoch] { val values: immutable.IndexedSeq[Epoch] = findValues case object Nanoseconds extends Epoch { override val entryName: String = "ns" } case object Microseconds extends Epoch { override val entryName: String = "u" } case object Milliseconds extends Epoch { override val entryName: String = "ms" } case object Seconds extends Epoch { override val entryName: String = "s" } case object Minutes extends Epoch { override val entryName: String = "m" } case object Hours extends Epoch { override val entryName: String = "h" } case object None extends Epoch { override val isNone: Boolean = true } }
Example 178
Source File: TestActor.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase import akka.actor.{ActorLogging, ActorRef, Props} import akka.persistence.{DeleteMessagesSuccess, PersistentActor, RecoveryCompleted} import akka.persistence.couchbase.TestActor.{GetLastRecoveredEvent, SaveSnapshot} import akka.persistence.journal.Tagged import scala.collection.immutable object TestActor { def props(persistenceId: String): Props = props(persistenceId, "couchbase-journal.write") def props(persistenceId: String, journalId: String): Props = Props(new TestActor(persistenceId, journalId)) final case class PersistAll(events: immutable.Seq[String]) final case class PersistAllAsync(events: immutable.Seq[String]) final case class DeleteTo(seqNr: Long) final case object SaveSnapshot final case object GetLastRecoveredEvent case object Stop } class TestActor(override val persistenceId: String, override val journalPluginId: String) extends PersistentActor with ActorLogging { var lastDelete: ActorRef = _ var lastRecoveredEvent: String = _ val receiveRecover: Receive = { case evt: String => lastRecoveredEvent = evt case RecoveryCompleted => log.debug("Recovery completed, lastRecoveredEvent: {}", lastRecoveredEvent) } val receiveCommand: Receive = { case cmd: String => persist(cmd) { evt => sender() ! evt + "-done" } case cmd: Tagged => persist(cmd) { evt => val msg = s"${evt.payload}-done" sender() ! msg } case TestActor.PersistAll(events) => val size = events.size val handler = { var count = 0 _: String => { count += 1 if (count == size) sender() ! "PersistAll-done" } } persistAll(events)(handler) case TestActor.PersistAllAsync(events) => val size = events.size val handler = { var count = 0 evt: String => { count += 1 if (count == size) sender() ! "PersistAllAsync-done" } } persistAllAsync(events)(handler) sender() ! "PersistAllAsync-triggered" case TestActor.DeleteTo(seqNr) => lastDelete = sender() deleteMessages(seqNr) case d: DeleteMessagesSuccess => lastDelete ! d case SaveSnapshot => saveSnapshot("dumb-snapshot-body") sender() ! snapshotSequenceNr case GetLastRecoveredEvent => sender() ! lastRecoveredEvent case TestActor.Stop => context.stop(self) } }
Example 179
Source File: BaseTokenIterator.scala From tethys with Apache License 2.0 | 5 votes |
package tethys.readers.tokens import tethys.commons.TokenNode import tethys.commons.TokenNode._ import tethys.readers.tokens.TokenIterator.CopySupport import scala.annotation.tailrec import scala.collection.mutable import scala.collection.immutable trait BaseTokenIterator extends TokenIterator { override def next(): this.type = { nextToken() this } override def skipExpression(): this.type = { val token = currentToken() if(token.isStructStart) skipStructure(1).next() else next() } override def collectExpression(): TokenIterator with CopySupport = { val queue = createTokenNode() match { case (node, 0) => immutable.Queue[TokenNode](node) case (node, _) => collectTokens(1, immutable.Queue.newBuilder[TokenNode] += node) } nextToken()//set pointer after this expression end new QueueIterator(queue) } @tailrec private def skipStructure(started: Int): this.type = { if(started == 0) this else { val token = nextToken() if(token.isStructStart) skipStructure(started + 1) else if(token.isStructEnd) skipStructure(started - 1) else skipStructure(started) } } @tailrec private def collectTokens(started: Int, builder: mutable.Builder[TokenNode, immutable.Queue[TokenNode]]): immutable.Queue[TokenNode] = { if(started == 0) builder.result() else { nextToken() val (node, shift) = createTokenNode() collectTokens(started + shift, builder += node) } } private def createTokenNode(): (TokenNode, Int) = { val token = currentToken() if(token.isArrayStart) ArrayStartNode -> 1 else if(token.isArrayEnd) ArrayEndNode -> -1 else if(token.isObjectStart) ObjectStartNode -> 1 else if(token.isObjectEnd) ObjectEndNode -> -1 else if(token.isNullValue) NullValueNode -> 0 else if(token.isFieldName) FieldNameNode(fieldName()) -> 0 else if(token.isStringValue) StringValueNode(string()) -> 0 else if(token.isNumberValue) number() match { case v: java.lang.Short => ShortValueNode(v) -> 0 case v: java.lang.Integer => IntValueNode(v) -> 0 case v: java.lang.Long => LongValueNode(v) -> 0 case v: java.lang.Float => FloatValueNode(v) -> 0 case v: java.lang.Double => DoubleValueNode(v) -> 0 case n => NumberValueNode(n) -> 0 } else BooleanValueNode(boolean()) -> 0 } }
Example 180
Source File: IncomingMessage.scala From cluster-broccoli with Apache License 2.0 | 5 votes |
package de.frosner.broccoli.websocket import de.frosner.broccoli.models._ import enumeratum._ import play.api.libs.json._ import scala.collection.immutable implicit val incomingMessageFormat: Format[IncomingMessage] = Format.apply( (JsPath \ "messageType").read[Type].flatMap(readsPayload), Writes { case AddInstance(create) => write(Type.AddInstance, create) case DeleteInstance(instance) => write(Type.DeleteInstance, instance) case UpdateInstance(update) => write(Type.UpdateInstance, update) case GetInstanceTasks(instance) => write(Type.GetInstanceTasks, instance) } ) private def readsPayload(`type`: Type): Reads[IncomingMessage] = { val payload = JsPath \ "payload" `type` match { case Type.AddInstance => payload.read[InstanceCreation].map(AddInstance) case Type.DeleteInstance => payload.read[String].map(DeleteInstance) case Type.UpdateInstance => payload.read[InstanceUpdate].map(UpdateInstance) case Type.GetInstanceTasks => payload.read[String].map(GetInstanceTasks) } } private def write[P](`type`: Type, payload: P)(implicit writesP: Writes[P]): JsObject = Json.obj("messageType" -> `type`, "payload" -> payload) }
Example 181
Source File: NomadHttpClientIntegrationSpec.scala From cluster-broccoli with Apache License 2.0 | 5 votes |
package de.frosner.broccoli.nomad import cats.instances.future._ import com.netaporter.uri.Uri import com.netaporter.uri.dsl._ import de.frosner.broccoli.nomad.models.{Allocation, Job, WithId} import de.frosner.broccoli.test.contexts.WSClientContext import de.frosner.broccoli.test.contexts.docker.BroccoliDockerContext import de.frosner.broccoli.test.contexts.docker.BroccoliTestService.{Broccoli, Nomad} import org.scalacheck.Gen import org.specs2.concurrent.ExecutionEnv import org.specs2.mutable.Specification import org.specs2.specification.mutable.ExecutionEnvironment import play.api.libs.json.Json import play.api.libs.ws.WSClient import scala.collection.immutable import scala.concurrent.blocking import scala.concurrent.duration._ class NomadHttpClientIntegrationSpec extends Specification with WSClientContext with BroccoliDockerContext with ExecutionEnvironment { override def broccoliDockerConfig: BroccoliDockerContext.Configuration = BroccoliDockerContext.Configuration.services(Broccoli, Nomad) private val broccoliApi = "http://localhost:9000/api/v1" override def is(implicit executionEnv: ExecutionEnv): Any = "The NomadHttpClient" should { "get allocations for a running nomad job" >> { wsClient: WSClient => // Generate a random identifier for the instance val identifier = Gen.resize(10, Gen.identifier).sample.get val client = new NomadHttpClient(Uri.parse("http://localhost:4646"), wsClient) (for { // Create and start a simple instance to look at it's allocations _ <- wsClient .url(broccoliApi / "instances") .post( Json.obj("templateId" -> "http-server", "parameters" -> Json.obj( "id" -> identifier ))) .map(response => { // Ensure that the response.status must beEqualTo(201) response }) _ <- wsClient .url(broccoliApi / "instances" / identifier) .post(Json.obj("status" -> "running")) .map(response => { response.status must beEqualTo(200) // Wait until the service is up blocking(Thread.sleep(1.seconds.toMillis)) response }) allocations <- client.getAllocationsForJob(shapeless.tag[Job.Id](identifier)).value } yield { allocations must beRight( (v: WithId[immutable.Seq[Allocation]]) => (v.jobId === identifier) and (v.payload must have length 1)) }).await(5, broccoliDockerConfig.startupPatience + 2.seconds) } } }
Example 182
Source File: MethodNaming.scala From ScalaClean with Apache License 2.0 | 5 votes |
import scala.collection.immutable //more a test that the names are unique and refer appropriately package name1 { object A extends App { x.immutableSeq(Array(getClass)) } object x { def immutableSeq(arr: Array[Class[_]]): immutable.Seq[Class[_]] = ??? def immutableSeq[T](arr: Array[T]): immutable.Seq[T] = ??? } } package name2 { object A extends App { x.immutableSeq(Array(1,2,3)) } object x { def immutableSeq(arr: Array[Class[_]]): immutable.Seq[Class[_]] = ??? def immutableSeq[T](arr: Array[T]): immutable.Seq[T] = ??? } }
Example 183
Source File: ProjectSet.scala From ScalaClean with Apache License 2.0 | 5 votes |
package scalaclean.model.impl import java.nio.file.Path import scalaclean.model._ import scala.collection.immutable import scala.reflect.ClassTag class ProjectSet(projectPropertyPaths: Path*) extends ProjectModel { val projects: List[Project] = projectPropertyPaths.toList map { p => Project(p, this) } val (legacyElements: Map[LegacyElementId, Seq[ElementModelImpl]], elements: Map[ElementId, ElementModelImpl]) = { val (elements, rels: immutable.Seq[BasicRelationshipInfo]) = projects.map(_.read).unzip val elementsMap: Map[LegacyElementId, Seq[ElementModelImpl]] = elements.flatten.groupBy(_.legacySymbol) val modelElements = elements.flatten.toIterator.map(e => e.elementId -> e).toMap def duplicates = { val skipped = elementsMap.filter { case (k, v) => v.size != 1 } val skipped2 = elements.flatten.groupBy(_.elementId).filter { case (k, v) => v.size != 1 } (skipped, skipped2) } if (elements.flatten.size != modelElements.size) { val (orig, newTokens) = duplicates println("Duplicate OLD SYMBOLS ") orig.foreach { case (s, values) => println(s" $s") } println("Duplicate NEW SYMBOLS ") newTokens.foreach { case (s, values) => println(s" $s") values.foreach { v => println(s" $v") } } throw new IllegalStateException("Duplicate elements found") } val relsFrom = rels.reduce(_ + _) val relsTo = relsFrom.byTo relsFrom.complete(modelElements) modelElements.values foreach (_.complete(modelElements, relsFrom = relsFrom, relsTo = relsTo)) ModelReader.finished() (elementsMap, modelElements) } override def legacySymbol[T <: ModelElement](symbol: LegacyElementId)(implicit tpe: ClassTag[T]): T = { val targetSymbol = if (symbol.value.startsWith("G:")) symbol else LegacyElementId("G:" + symbol.value) legacyElements.get(targetSymbol) match { case None => throw new IllegalArgumentException(s"Unknown symbol $symbol") case Some(x) => x.collectFirst {case x: T => x} getOrElse ( throw new IllegalArgumentException(s"Unexpected symbol $symbol - found a $x when expecting a ${tpe.runtimeClass}")) } } override def getLegacySymbol[T <: ModelElement](symbol: LegacyElementId)(implicit tpe: ClassTag[T]): Option[T] = { legacyElements.get(symbol) match { case None => None case Some(x) => x.collectFirst {case x: T => x} orElse ( throw new IllegalArgumentException(s"Unexpected symbol $symbol - found a $x when expecting a ${tpe.runtimeClass}")) } } override def element[T <: ModelElement](id: ElementId)(implicit tpe: ClassTag[T]): T = { elements.get(id) match { case None => throw new IllegalArgumentException(s"Unknown element $id") case Some(x: T) => x case Some(x) => throw new IllegalArgumentException(s"Unexpected element $id - found a $x when expecting a ${tpe.runtimeClass}") } } override def getElement[T <: ModelElement](id: ElementId)(implicit tpe: ClassTag[T]): Option[T] = { elements.get(id) match { case None => None case Some(x: T) => Some(x) case Some(x) => throw new IllegalArgumentException(s"Unexpected element $id - found a $x when expecting a ${tpe.runtimeClass}") } } override def size: Int = elements.size override def allOf[T <: ModelElement : ClassTag]: Iterator[T] = { elements.values.iterator collect { case x: T => x } } }
Example 184
Source File: DynamicLeastShardAllocationStrategy.scala From cloudstate with Apache License 2.0 | 5 votes |
package io.cloudstate.proxy.eventsourced import akka.actor.ActorRef import akka.cluster.sharding.ShardCoordinator.ShardAllocationStrategy import akka.cluster.sharding.ShardRegion.ShardId import scala.collection.immutable import scala.concurrent.Future class DynamicLeastShardAllocationStrategy(rebalanceThreshold: Int, maxSimultaneousRebalance: Int, rebalanceNumber: Int, rebalanceFactor: Double) extends ShardAllocationStrategy with Serializable { def this(rebalanceThreshold: Int, maxSimultaneousRebalance: Int) = this(rebalanceThreshold, maxSimultaneousRebalance, rebalanceThreshold, 0.0) override def allocateShard( requester: ActorRef, shardId: ShardId, currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]] ): Future[ActorRef] = { val (regionWithLeastShards, _) = currentShardAllocations.minBy { case (_, v) => v.size } Future.successful(regionWithLeastShards) } override def rebalance(currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]], rebalanceInProgress: Set[ShardId]): Future[Set[ShardId]] = if (rebalanceInProgress.size < maxSimultaneousRebalance) { val (_, leastShards) = currentShardAllocations.minBy { case (_, v) => v.size } val mostShards = currentShardAllocations .collect { case (_, v) => v.filterNot(s => rebalanceInProgress(s)) } .maxBy(_.size) val difference = mostShards.size - leastShards.size if (difference > rebalanceThreshold) { val factoredRebalanceLimit = (rebalanceFactor, rebalanceNumber) match { // This condition is to maintain semantic backwards compatibility, from when rebalanceThreshold was also // the number of shards to move. case (0.0, 0) => rebalanceThreshold case (0.0, justAbsolute) => justAbsolute case (justFactor, 0) => math.max((justFactor * mostShards.size).round.toInt, 1) case (factor, absolute) => math.min(math.max((factor * mostShards.size).round.toInt, 1), absolute) } // The ideal number to rebalance to so these nodes have an even number of shards val evenRebalance = difference / 2 val n = math.min(math.min(factoredRebalanceLimit, evenRebalance), maxSimultaneousRebalance - rebalanceInProgress.size) Future.successful(mostShards.sorted.take(n).toSet) } else emptyRebalanceResult } else emptyRebalanceResult private[this] final val emptyRebalanceResult = Future.successful(Set.empty[ShardId]) }
Example 185
Source File: JavascriptExecutor.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.javascriptEngine import akka.actor.ActorSystem import akka.pattern.ask import akka.util.Timeout import com.typesafe.jse.Engine.JsExecutionResult import cool.graph.cuid.Cuid import cool.graph.javascriptEngine.lib.{Engine, Trireme} import scala.collection.immutable import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.concurrent.duration._ object JavascriptExecutor { implicit val system = ActorSystem("jse-system") implicit val timeout = Timeout(5.seconds) def execute(program: String): Future[Result] = { // note: probably not the way to do this ... val engine = system.actorOf(Trireme.props(), s"engine-${Cuid.createCuid()}") (engine ? Engine.ExecuteJs(program, immutable.Seq(), timeout.duration)) .mapTo[JsExecutionResult] .map(res => Result(result = res.output.utf8String, error = res.error.utf8String)) } def executeFunction(program: String): Future[Map[String, Any]] = { import spray.json._ import DefaultJsonProtocol._ // todo: copied from shared.Utils. Extract to own module implicit object AnyJsonFormat extends JsonFormat[Any] { def write(x: Any) = x match { case m: Map[_, _] => JsObject(m.asInstanceOf[Map[String, Any]].mapValues(write)) case l: List[Any] => JsArray(l.map(write).toVector) case n: Int => JsNumber(n) case n: Long => JsNumber(n) case s: String => JsString(s) case true => JsTrue case false => JsFalse case v: JsValue => v case null => JsNull case r => JsString(r.toString) } def read(x: JsValue): Any = { x match { case l: JsArray => l.elements.map(read).toList case m: JsObject => m.fields.mapValues(write) case s: JsString => s.value case n: JsNumber => n.value case b: JsBoolean => b.value case JsNull => null case _ => sys.error("implement all scalar types!") } } } execute(program).map(res => { if (!res.error.trim.isEmpty) { throw new JsExecutionError(res.error) } res.result.parseJson.asJsObject.convertTo[Map[String, Any]] }) } } case class Result(result: String, error: String) class JsExecutionError(message: String) extends Error
Example 186
Source File: PubSubRouter.scala From graphcool-framework with Apache License 2.0 | 5 votes |
package cool.graph.messagebus.pubsub import akka.actor.{Actor, ActorRef, Terminated} import akka.routing._ import cool.graph.messagebus.pubsub.PubSubProtocol.{Envelope, Publish, Subscribe, Unsubscribe} import scala.collection.{immutable, mutable} object PubSubProtocol { case class Subscribe(topic: String, actorRef: ActorRef) case class Publish(topic: String, message: Any) case class Unsubscribe(topic: String, ref: ActorRef) case class Envelope(actualTopic: String, message: Any) } case class PubSubRouterAlt() extends Actor { val pubSubLogic = PubSubRoutingLogic() var router = Router(pubSubLogic, Vector.empty) override def receive: Receive = { case Subscribe(topic, ref) => context.watch(ref) router = router.addRoutee(PubSubRoutee(topic, ref)) case Publish(topic, message) => router.route(Envelope(topic, message), sender()) case Unsubscribe(topic, ref) => router = router.removeRoutee(PubSubRoutee(topic, ref)) case Terminated(a) => router = router.withRoutees(router.routees.collect { case routee @ PubSubRoutee(_, ref) if ref != a => routee }) } } case class PubSubRouter() extends Actor { val subscribers = mutable.HashMap[String, mutable.Set[ActorRef]]() override def receive: Receive = { case Subscribe(topic, ref) => context.watch(ref) subscribers.getOrElseUpdate(topic, mutable.Set.empty) += ref case Publish(topic, message) => subscribers.getOrElse(topic, mutable.Set.empty).foreach(_.tell(message, sender())) case Unsubscribe(topic, ref) => subscribers.getOrElse(topic, mutable.Set.empty).remove(ref) case Terminated(a) => subscribers.values.foreach(_.remove(a)) } } case class PubSubRoutee(topic: String, ref: ActorRef) extends Routee { override def send(message: Any, sender: ActorRef): Unit = { message match { case Envelope(_, payload) => ref.tell(payload, sender) case _ => } } } case class PubSubRoutingLogic() extends RoutingLogic { def select(message: Any, routees: immutable.IndexedSeq[Routee]): Routee = { val pubSubRoutees = routees.collect { case pubSubRoutee: PubSubRoutee => pubSubRoutee } message match { case Envelope(topic, _) => val targets = pubSubRoutees.filter(_.topic == topic) SeveralRoutees(targets.asInstanceOf[immutable.IndexedSeq[Routee]]) case _ => NoRoutee } } }
Example 187
Source File: TestActor.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import scala.collection.immutable import akka.actor.Props import akka.persistence.PersistentActor import akka.actor.ActorRef import akka.persistence.DeleteMessagesSuccess import akka.persistence.cassandra.EventWithMetaData import akka.persistence.journal.Tagged object TestActor { def props(persistenceId: String, journalId: String = "akka.persistence.cassandra.journal"): Props = Props(new TestActor(persistenceId, journalId)) final case class PersistAll(events: immutable.Seq[String]) final case class DeleteTo(seqNr: Long) } class TestActor(override val persistenceId: String, override val journalPluginId: String) extends PersistentActor { var lastDelete: ActorRef = _ val receiveRecover: Receive = { case evt: String => } val receiveCommand: Receive = { case cmd: String => persist(cmd) { evt => sender() ! evt + "-done" } case cmd: EventWithMetaData => persist(cmd) { evt => sender() ! s"$evt-done" } case cmd: Tagged => persist(cmd) { evt => val msg = s"${evt.payload}-done" sender() ! msg } case TestActor.PersistAll(events) => val size = events.size val handler = { var count = 0 evt: String => { count += 1 if (count == size) sender() ! "PersistAll-done" } } persistAll(events)(handler) case TestActor.DeleteTo(seqNr) => lastDelete = sender() deleteMessages(seqNr) case d: DeleteMessagesSuccess => lastDelete ! d } }
Example 188
Source File: EventsByTagStressSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra import akka.persistence.cassandra.query.TestActor import akka.persistence.cassandra.query._ import akka.persistence.journal.Tagged import akka.persistence.query.NoOffset import akka.stream.testkit.TestSubscriber import akka.stream.testkit.scaladsl.TestSink import scala.collection.immutable import scala.concurrent.Future class EventsByTagStressSpec extends CassandraSpec(s""" akka.persistence.cassandra { events-by-tag { max-message-batch-size = 25 } } """) { implicit val ec = system.dispatcher val writers = 10 val readers = 20 val messages = 5000 "EventsByTag" must { "work under load" in { val pas = (0 until writers).map { i => system.actorOf(TestActor.props(s"pid$i")) } val eventsByTagQueries: immutable.Seq[(Int, TestSubscriber.Probe[(String, Int)])] = (0 until readers).map { i => val probe = queryJournal .eventsByTag("all", NoOffset) .map(i => { (i.persistenceId, i.event.asInstanceOf[Int]) }) .runWith(TestSink.probe) (i, probe) } system.log.info("Started events by tag queries") val writes: Future[Unit] = Future { system.log.info("Sending messages") (0 until messages).foreach { i => pas.foreach(ref => { ref ! Tagged(i, Set("all")) expectMsg(s"$i-done") }) } system.log.info("Sent messages") } writes.onComplete(result => system.log.info("{}", result)) system.log.info("Reading messages") var latestValues: Map[(Int, String), Int] = Map.empty.withDefault(_ => -1) (0 until messages).foreach { _ => (0 until writers).foreach { _ => eventsByTagQueries.foreach { case (probeNr, probe) => // should be in order per persistence id per probe val (pid, msg) = probe.requestNext() latestValues((probeNr, pid)) shouldEqual (msg - 1) latestValues += (probeNr, pid) -> msg } } } system.log.info("Received all messages {}", latestValues) } } }
Example 189
Source File: Models.scala From ticket-booking-aecor with Apache License 2.0 | 5 votes |
package ru.pavkin.booking.common.models import cats.Order import cats.implicits._ import cats.kernel.{ Eq, Monoid } import enumeratum._ import io.circe.{ Decoder, Encoder } import io.circe.generic.semiauto._ import ru.pavkin.booking.common.json.AnyValCoders._ import scala.collection.immutable case class Money(amount: BigDecimal) extends AnyVal object Money { implicit val monoid: Monoid[Money] = new Monoid[Money] { def empty: Money = Money(0) def combine(x: Money, y: Money): Money = Money(x.amount + y.amount) } } case class BookingKey(value: String) extends AnyVal case class ClientId(value: String) extends AnyVal object ClientId { implicit val eqInstance: Eq[ClientId] = Eq.fromUniversalEquals } case class ConcertId(value: String) extends AnyVal case class Row(num: Int) extends AnyVal case class SeatNumber(num: Int) extends AnyVal case class Seat(row: Row, number: SeatNumber) object Seat { def seat(row: Int, number: Int): Seat = Seat(Row(row), SeatNumber(number)) implicit val order: Order[Seat] = Order.by(s => (s.row.num, s.number.num)) implicit val decoder: Decoder[Seat] = deriveDecoder implicit val encoder: Encoder[Seat] = deriveEncoder } case class Ticket(seat: Seat, price: Money) object Ticket { implicit val decoder: Decoder[Ticket] = deriveDecoder implicit val encoder: Encoder[Ticket] = deriveEncoder } case class PaymentId(value: String) extends AnyVal sealed trait BookingStatus extends EnumEntry object BookingStatus extends Enum[BookingStatus] with CirceEnum[BookingStatus] { case object AwaitingConfirmation extends BookingStatus case object Confirmed extends BookingStatus case object Denied extends BookingStatus case object Canceled extends BookingStatus case object Settled extends BookingStatus def values: immutable.IndexedSeq[BookingStatus] = findValues }
Example 190
Source File: BookingEventSerializer.scala From ticket-booking-aecor with Apache License 2.0 | 5 votes |
package ru.pavkin.booking.booking.serialization import aecor.data.Enriched import aecor.journal.postgres.PostgresEventJournal import aecor.journal.postgres.PostgresEventJournal.Serializer.TypeHint import cats.data.NonEmptyList import enumeratum.EnumEntry import ru.pavkin.booking.booking.entity._ import ru.pavkin.booking.booking.protobuf.msg import enumeratum._ import cats.syntax.either._ import scala.collection.immutable object BookingEventSerializer extends PostgresEventJournal.Serializer[Enriched[EventMetadata, BookingEvent]] { sealed trait Hint extends EnumEntry object Hint extends Enum[Hint] { case object AA extends Hint case object AB extends Hint case object AC extends Hint case object AD extends Hint case object AE extends Hint case object AF extends Hint case object AG extends Hint def values: immutable.IndexedSeq[Hint] = findValues } import Hint._ def serialize(a: Enriched[EventMetadata, BookingEvent]): (TypeHint, Array[Byte]) = a match { case Enriched(m, BookingPlaced(clientId, concertId, seats)) => AA.entryName -> msg.BookingPlaced(clientId, concertId, seats.toList, m.timestamp).toByteArray case Enriched(m, BookingConfirmed(tickets, expiresAt)) => AB.entryName -> msg.BookingConfirmed(tickets.toList, expiresAt, m.timestamp).toByteArray case Enriched(m, BookingDenied(reason)) => AC.entryName -> msg.BookingDenied(reason, m.timestamp).toByteArray case Enriched(m, BookingCancelled(reason)) => AD.entryName -> msg.BookingCancelled(reason, m.timestamp).toByteArray case Enriched(m, BookingExpired) => AE.entryName -> msg.BookingExpired(m.timestamp).toByteArray case Enriched(m, BookingPaid(paymentId)) => AF.entryName -> msg.BookingPaid(paymentId, m.timestamp).toByteArray case Enriched(m, BookingSettled) => AG.entryName -> msg.BookingSettled(m.timestamp).toByteArray } def deserialize(typeHint: TypeHint, bytes: Array[Byte]): Either[Throwable, Enriched[EventMetadata, BookingEvent]] = Either.catchNonFatal(Hint.withName(typeHint) match { case Hint.AA => val raw = msg.BookingPlaced.parseFrom(bytes) Enriched( EventMetadata(raw.timestamp), BookingPlaced(raw.clientId, raw.concertId, NonEmptyList.fromListUnsafe(raw.seats.toList)) ) case Hint.AB => val raw = msg.BookingConfirmed.parseFrom(bytes) Enriched( EventMetadata(raw.timestamp), BookingConfirmed(NonEmptyList.fromListUnsafe(raw.tickets.toList), raw.expiresAt) ) case Hint.AC => val raw = msg.BookingDenied.parseFrom(bytes) Enriched(EventMetadata(raw.timestamp), BookingDenied(raw.reason)) case Hint.AD => val raw = msg.BookingCancelled.parseFrom(bytes) Enriched(EventMetadata(raw.timestamp), BookingCancelled(raw.reason)) case Hint.AE => val raw = msg.BookingExpired.parseFrom(bytes) Enriched(EventMetadata(raw.timestamp), BookingExpired) case Hint.AF => val raw = msg.BookingPaid.parseFrom(bytes) Enriched(EventMetadata(raw.timestamp), BookingPaid(raw.paymentId)) case Hint.AG => val raw = msg.BookingSettled.parseFrom(bytes) Enriched(EventMetadata(raw.timestamp), BookingSettled) }) }
Example 191
Source File: WebGatewayLoader.scala From Scala-Reactive-Programming with MIT License | 5 votes |
import javax.inject.Inject import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator import com.lightbend.lagom.scaladsl.api.{ServiceAcl, ServiceInfo} import com.lightbend.lagom.scaladsl.client.LagomServiceClientComponents import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents import com.packt.publishing.wf.api.WFService import com.packt.publishing.wf.consumer.api.WFConsumerService import com.softwaremill.macwire._ import controllers.{Assets, WFConsumerController, WFController} import play.api.ApplicationLoader.Context import play.api.i18n.I18nComponents import play.api.libs.ws.ahc.AhcWSComponents import play.api._ import play.api.Play.current import play.api.i18n.Messages.Implicits._ import router.Routes import scala.collection.immutable import scala.concurrent.ExecutionContext abstract class WebGateway @Inject()(context: Context) extends BuiltInComponentsFromContext(context) with I18nComponents with AhcWSComponents with LagomServiceClientComponents{ override lazy val serviceInfo: ServiceInfo = ServiceInfo( "wf-frontend", Map( "wf-frontend" -> immutable.Seq(ServiceAcl.forPathRegex("(?!/api/).*")) ) ) override implicit lazy val executionContext: ExecutionContext = actorSystem.dispatcher override lazy val router = { val prefix = "/" wire[Routes] } lazy val wfService = serviceClient.implement[WFService] lazy val wfConsumerService = serviceClient.implement[WFConsumerService] lazy val wfController = wire[WFController] lazy val wfConsumerController = wire[WFConsumerController] lazy val assets = wire[Assets] } class WebGatewayLoader extends ApplicationLoader { override def load(context: Context): Application = context.environment.mode match { case Mode.Dev => new WebGateway(context) with LagomDevModeComponents {}.application case _ => new WebGateway(context) { override def serviceLocator = NoServiceLocator }.application } }
Example 192
Source File: WebGatewayLoader.scala From Scala-Reactive-Programming with MIT License | 5 votes |
import javax.inject.Inject import com.lightbend.lagom.internal.client.CircuitBreakerMetricsProviderImpl import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator import com.lightbend.lagom.scaladsl.api.{ServiceAcl, ServiceInfo} import com.lightbend.lagom.scaladsl.client.LagomServiceClientComponents import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents import com.packt.publishing.wf.api.WFService import com.packt.publishing.wf.consumer.api.WFConsumerService import com.softwaremill.macwire._ import controllers.{Assets, WFConsumerController, WFController} import play.api.ApplicationLoader.Context import play.api.i18n.I18nComponents import play.api.libs.ws.ahc.AhcWSComponents import play.api._ import play.api.Play.current import play.api.i18n.Messages.Implicits._ import router.Routes import scala.collection.immutable import scala.concurrent.ExecutionContext import com.typesafe.conductr.bundlelib.lagom.scaladsl.ConductRApplicationComponents abstract class WebGateway @Inject()(context: Context) extends BuiltInComponentsFromContext(context) with I18nComponents with AhcWSComponents with LagomServiceClientComponents { override lazy val serviceInfo: ServiceInfo = ServiceInfo( "wf-frontend", Map( "wf-frontend" -> immutable.Seq(ServiceAcl.forPathRegex("(?!/api/).*")) ) ) override implicit lazy val executionContext: ExecutionContext = actorSystem.dispatcher override lazy val router = { val prefix = "/" wire[Routes] } lazy val wfService = serviceClient.implement[WFService] lazy val wfConsumerService = serviceClient.implement[WFConsumerService] lazy val wfController = wire[WFController] lazy val wfConsumerController = wire[WFConsumerController] lazy val assets = wire[Assets] } class WebGatewayLoader extends ApplicationLoader { override def load(context: Context): Application = context.environment.mode match { case Mode.Dev => new WebGateway(context) with LagomDevModeComponents {}.application case _ => (new WebGateway(context) with ConductRApplicationComponents { override lazy val circuitBreakerMetricsProvider = new CircuitBreakerMetricsProviderImpl(actorSystem) }).application } }
Example 193
Source File: SerializationUtils.scala From midas with BSD 3-Clause "New" or "Revised" License | 5 votes |
package midas.widgets import chisel3._ import scala.collection.immutable object SerializationUtils { // Boxed types for different leaf chisel types we currently support case class SerializableType(typeString: String) val UIntType = SerializableType("UInt") val SIntType = SerializableType("SInt") case class SerializableField(name: String, tpe: SerializableType, fieldWidth: Int) { def regenType(): Data = tpe match { case UIntType => UInt(fieldWidth.W) case SIntType => SInt(fieldWidth.W) case _ => throw new Exception(s"Type string with no associated chisel type: ${tpe}") } } object SerializableField { def apply(name: String, field: Data): SerializableField = field match { case f: Aggregate => throw new Exception(s"Cannot serialize aggregate types; pass in leaf fields instead.") case f: UInt => SerializableField(name, UIntType, f.getWidth) case f: SInt => SerializableField(name, SIntType, f.getWidth) case _ => throw new Exception(s"Cannot serialize this field type") } } class RegeneratedTargetIO(inputs: Seq[SerializableField], outputs: Seq[SerializableField]) extends Record { val inputPorts = inputs.map(field => field.name -> Input(field.regenType)) val outputPorts = outputs.map(field => field.name -> Output(field.regenType)) override val elements = immutable.ListMap((inputPorts ++ outputPorts):_*) override def cloneType = new RegeneratedTargetIO(inputs, outputs).asInstanceOf[this.type] } }
Example 194
Source File: AggregateSnapshotRecovery.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs.snapshot import akka.persistence._ import com.productfoundry.akka.cqrs.Aggregate import com.productfoundry.akka.cqrs.publish.EventPublication import scala.collection.immutable trait AggregateSnapshotRecovery extends PersistentActor { this: Aggregate => type SnapshotType <: AggregateStateSnapshot def getStateSnapshot(state: S): SnapshotType def getStateFromSnapshot(snapshot: SnapshotType): S abstract override def receiveCommand: Receive = receiveSnapshotCommand orElse super.receiveCommand private def receiveSnapshotCommand: Receive = { case SaveSnapshotSuccess(metadata) => log.info("Snapshot saved successfully {}", metadata) case SaveSnapshotFailure(metadata, reason) => log.error(reason, "Snapshot save failed {}", metadata) } abstract override def receiveRecover: Receive = receiveSnapshotRecover orElse super.receiveRecover private def receiveSnapshotRecover: Receive = { case SnapshotOffer(_, aggregateSnapshot: AggregateSnapshot) => setAggregateSnapshot(aggregateSnapshot) } def saveAggregateSnapshot(): Unit = { super.saveSnapshot(getAggregateSnapshot) } def setAggregateSnapshot(snapshot: AggregateSnapshot): Unit = { // Revision and state revisedState = RevisedState( snapshot.revision, snapshot.stateSnapshotOption.map { stateSnapshot => getStateFromSnapshot(stateSnapshot.asInstanceOf[SnapshotType]) } ) // Reliable event publisher for { reliableEventPublisher <- asReliableEventPublisherOption reliableEventPublisherSnapshot <- snapshot.reliableEventPublisherSnapshotOption } { reliableEventPublisher.setReliableEventPublisherSnapshot(reliableEventPublisherSnapshot) } // At least once delivery for { atLeastOnceDelivery <- asAtLeastOnceDeliveryOption atLeastOnceDeliverySnapshot <- snapshot.atLeastOnceDeliverySnapshotOption } { val updatedUnconfirmedDeliveries: immutable.Seq[AtLeastOnceDelivery.UnconfirmedDelivery] = atLeastOnceDeliverySnapshot.unconfirmedDeliveries.map { unconfirmedDelivery => val updatedMessageOption: Option[Any] = unconfirmedDelivery.message match { case publication@EventPublication(_, Some(confirmDeliveryRequest), _) => // We need to update the target for the confirm delivery request as the actor ref has changed Some(publication.copy(confirmationOption = Some(confirmDeliveryRequest.copy(target = context.self)))) case _ => // Nothing to modify None } updatedMessageOption.fold(unconfirmedDelivery)(updatedMessage => unconfirmedDelivery.copy(message = updatedMessage)) } atLeastOnceDelivery.setDeliverySnapshot(atLeastOnceDeliverySnapshot.copy(unconfirmedDeliveries = updatedUnconfirmedDeliveries)) } } }
Example 195
Source File: PatternElementWriterTest.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.okapi.neo4j.io import org.neo4j.driver.v1.Values import org.opencypher.okapi.api.value.CypherValue.CypherMap import org.opencypher.okapi.neo4j.io.Neo4jHelpers.Neo4jDefaults.metaPropertyKey import org.opencypher.okapi.neo4j.io.Neo4jHelpers._ import org.opencypher.okapi.neo4j.io.testing.Neo4jServerFixture import org.opencypher.okapi.testing.Bag._ import org.opencypher.okapi.testing.BaseTestSuite import org.scalatest.BeforeAndAfter import scala.collection.immutable class PatternElementWriterTest extends BaseTestSuite with Neo4jServerFixture with BeforeAndAfter { it("can write nodes") { ElementWriter.createNodes( inputNodes.toIterator, Array(metaPropertyKey, "val1", "val2", "val3", null), neo4jConfig, Set("Foo", "Bar", "Baz") )(rowToListValue) val expected = inputNodes.map { node => CypherMap( s"n.$metaPropertyKey" -> node(0), "n.val1" -> node(1), "n.val2" -> node(2), "n.val3" -> node(3) ) }.toBag val result = neo4jConfig.cypherWithNewSession(s"MATCH (n) RETURN n.$metaPropertyKey, n.val1, n.val2, n.val3").map(CypherMap).toBag result should equal(expected) } it("can write relationships") { ElementWriter.createRelationships( inputRels.toIterator, 1, 2, Array(metaPropertyKey, null, null, "val3"), neo4jConfig, "REL", None )(rowToListValue) val expected = inputRels.map { rel => CypherMap( s"r.$metaPropertyKey" -> rel(0), "r.val3" -> rel(3) ) }.toBag val result = neo4jConfig.cypherWithNewSession(s"MATCH ()-[r]->() RETURN r.$metaPropertyKey, r.val3").map(CypherMap).toBag result should equal(expected) } override def dataFixture: String = "" private def rowToListValue(data: Array[AnyRef]) = Values.value(data.map(Values.value): _*) private val numberOfNodes = 10 val inputNodes: immutable.IndexedSeq[Array[AnyRef]] = (1 to numberOfNodes).map { i => Array[AnyRef]( i.asInstanceOf[AnyRef], i.asInstanceOf[AnyRef], i.toString.asInstanceOf[AnyRef], (i % 2 == 0).asInstanceOf[AnyRef], (i+1).asInstanceOf[AnyRef] ) } val inputRels: immutable.IndexedSeq[Array[AnyRef]] = (2 to numberOfNodes).map { i => Array[AnyRef]( i.asInstanceOf[AnyRef], (i - 1).asInstanceOf[AnyRef], i.asInstanceOf[AnyRef], (i % 2 == 0).asInstanceOf[AnyRef] ) } }
Example 196
Source File: JobTimeSpan.scala From sparklens with Apache License 2.0 | 5 votes |
package com.qubole.sparklens.timespan import com.qubole.sparklens.common.{AggregateMetrics, AppContext} import org.apache.spark.executor.TaskMetrics import org.apache.spark.scheduler.TaskInfo import org.json4s.DefaultFormats import org.json4s.JsonAST.JValue import scala.collection.{immutable, mutable} private def criticalTime(stageID: Int, data: mutable.HashMap[Int, (Seq[Int], Long)]): Long = { //Provide 0 value for val stageData = data.getOrElse(stageID, (List.empty[Int], 0L)) stageData._2 + { if (stageData._1.size == 0) { 0L }else { stageData._1.map(x => criticalTime(x, data)).max } } } override def getMap(): Map[String, _ <: Any] = { implicit val formats = DefaultFormats Map( "jobID" -> jobID, "jobMetrics" -> jobMetrics.getMap, "stageMap" -> AppContext.getMap(stageMap)) ++ super.getStartEndTime() } } object JobTimeSpan { def getTimeSpan(json: Map[String, JValue]): mutable.HashMap[Long, JobTimeSpan] = { implicit val formats = DefaultFormats val map = new mutable.HashMap[Long, JobTimeSpan] json.keys.map(key => { val value = json.get(key).get.extract[JValue] val timeSpan = new JobTimeSpan((value \ "jobID").extract[Long]) timeSpan.jobMetrics = AggregateMetrics.getAggregateMetrics((value \ "jobMetrics") .extract[JValue]) timeSpan.stageMap = StageTimeSpan.getTimeSpan((value \ "stageMap").extract[ immutable.Map[String, JValue]]) timeSpan.addStartEnd(value) map.put(key.toLong, timeSpan) }) map } }
Example 197
Source File: SingleFloatFamilyMetaExtractorTest.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.aerosolve.training.photon.ml.data import com.airbnb.aerosolve.core.features.Features import com.airbnb.aerosolve.core.Example import org.junit.Assert._ import org.junit.Test import scala.collection.immutable class SingleFloatFamilyMetaExtractorTest { private def mockExampleMetaFields(metaFamily: String): Example = { val builder = Features.builder() builder.names(Array[String](metaFamily + "_foo", metaFamily + "_bar", metaFamily + "_tar", "randomF_b")) builder.values(Array[Object]( new java.lang.Integer(123), new java.lang.Double(100d), new java.lang.Long(-321L), new java.lang.Integer(20))) builder.build().toExample(false) } @Test def testMetaDataExtraction(): Unit = { val extractor = new SingleFloatFamilyMetaExtractor() val example = mockExampleMetaFields("meta") val metaMap = extractor.buildMetaDataMap(example, immutable.Map[String, String]()) assertEquals(3, metaMap.size()) assertEquals(123L, metaMap.get("foo")) assertEquals(100L, metaMap.get("bar")) // Should make all ids >= 0 assertEquals(321L, metaMap.get("tar")) val example2 = mockExampleMetaFields("metav2") val metaMap2 = extractor.buildMetaDataMap(example2, immutable.Map[String, String]("metaFamily" -> "metav2")) assertEquals(3, metaMap2.size()) assertEquals(123L, metaMap2.get("foo")) assertEquals(100L, metaMap2.get("bar")) assertEquals(321L, metaMap2.get("tar")) } @Test(expected = classOf[IllegalArgumentException]) def testMissingField(): Unit = { val extractor = new SingleFloatFamilyMetaExtractor() val example = mockExampleMetaFields("metav2") extractor.buildMetaDataMap(example, immutable.Map[String, String]()) } }
Example 198
Source File: PlayJavaServerCodeGenerator.scala From play-grpc with Apache License 2.0 | 5 votes |
package play.grpc.gen.javadsl import scala.collection.immutable import akka.grpc.gen.Logger import akka.grpc.gen.javadsl.JavaCodeGenerator import akka.grpc.gen.javadsl.Service import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse import templates.PlayJavaServer.txt.Router import templates.PlayJavaServer.txt.RouterUsingActions class PlayJavaServerCodeGenerator extends JavaCodeGenerator { override def name: String = "play-grpc-server-java" override def perServiceContent = super.perServiceContent + generatePlainRouter + generatePowerRouter private val generatePlainRouter: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { val b = CodeGeneratorResponse.File.newBuilder() if (service.usePlayActions) b.setContent(RouterUsingActions(service, powerApis = false).body) else b.setContent(Router(service, powerApis = false).body) b.setName(s"${service.packageDir}/Abstract${service.name}Router.java") logger.info(s"Generating Play gRPC service play router for ${service.packageName}.${service.name}") immutable.Seq(b.build) } private val generatePowerRouter: (Logger, Service) => immutable.Seq[CodeGeneratorResponse.File] = (logger, service) => { if (service.serverPowerApi) { val b = CodeGeneratorResponse.File.newBuilder() if (service.usePlayActions) b.setContent(RouterUsingActions(service, powerApis = true).body) else b.setContent(Router(service, powerApis = true).body) b.setName(s"${service.packageDir}/Abstract${service.name}PowerApiRouter.java") logger.info(s"Generating Akka gRPC service power API play router for ${service.packageName}.${service.name}") immutable.Seq(b.build) } else immutable.Seq.empty, } } object PlayJavaServerCodeGenerator extends PlayJavaServerCodeGenerator