scala.collection.breakOut Scala Examples
The following examples show how to use scala.collection.breakOut.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DiscoverEsTopology.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.analytics.util import akka.actor.ActorSystem import akka.http.scaladsl.model.HttpRequest import akka.stream.ActorMaterializer import com.fasterxml.jackson.databind.JsonNode import scala.collection.JavaConverters._ import scala.collection.breakOut import scala.concurrent.ExecutionContextExecutor case class Shard(indexName: String, shard: Int) { // This field is not in the constructor argument list since it is not part of equality. private var _downloadAttempt: Int = 0 def downloadAttempt: Int = _downloadAttempt def nextAttempt: Shard = { val copy = this.copy() copy._downloadAttempt = this._downloadAttempt + 1 copy } } case class EsTopology(nodes: Map[String, String], // nodeId -> address shards: Map[Shard, Seq[String]], // shard -> Seq[nodeId] allIndexNames: Set[String]) object DiscoverEsTopology { def apply(esContactPoint: String, aliases: Seq[String] = Seq.empty) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): EsTopology = { // Get a map from node name -> address val nodesJson = HttpUtil.jsonResult(HttpRequest(uri = s"http://$esContactPoint/_nodes"), "find es nodes") val extractAddress = "inet\\[/(.+)]".r // "inet[/10.204.146.152:9304]" val nodes: Map[String, String] = nodesJson.get("nodes").fields.asScala.map { entry => val nodeId = entry.getKey val extractAddress(hostPort) = entry.getValue.get("http_address").asText nodeId -> hostPort }.toMap // Find all the shards for all indexes. val searchShardsJson = HttpUtil.jsonResult(HttpRequest(uri = s"http://$esContactPoint/_search_shards"), "search shards") val shards: Map[Shard, Seq[String]] = searchShardsJson.get("shards").elements.asScala.map { shardLocations: JsonNode => // Sort the shard locations so that the primary is first - we will always try the primary first val locations = shardLocations.elements.asScala.toSeq.sortBy(_.findValue("primary").booleanValue).reverse assert(locations.nonEmpty) assert(locations.head.findValue("primary").booleanValue) // first one is primary node val indexName = locations.head.findValue("index").asText val shard = locations.head.findValue("shard").asInt val nodeIds: Vector[String] = locations.map(_.findValue("node").asText)(breakOut) Shard(indexName, shard) -> nodeIds }.toMap // Get a list of aliases that we want to read from. // This is used to filter the list of all shards down to the ones that we want to read from. def resolveAlias(alias: String): Set[String] = { val aliasesJson = HttpUtil.jsonResult(HttpRequest(uri = s"http://$esContactPoint/$alias/_alias"), s"shards for $alias") aliasesJson.fieldNames.asScala.toSet } val readIndexNames: Set[String] = if (aliases.isEmpty) resolveAlias("cm_well_all") // Default if no alias or index name specified. else (Set.empty[String] /: aliases) (_ ++ resolveAlias(_)) // resolve and combine all the index names // allIndexNames is useful for validation of parameters to ensure they are all valid index names. val allIndexNames: Set[String] = { val aliasesJson = HttpUtil.jsonResult(HttpRequest(uri = s"http://$esContactPoint/_all/_alias"), "Get all index names") aliasesJson.fieldNames.asScala.toSet } EsTopology( nodes = nodes, // Only read shards for indexes that are included in the given aliases or index names. shards = shards.filter { case (shard, _) => readIndexNames.contains(shard.indexName) }, allIndexNames = allIndexNames) } }
Example 2
Source File: PositiveLabelsFunction.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.dataset.vw.multilabel import com.eharmony.aloha.AlohaException import com.eharmony.aloha.dataset.DvProducer import com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator.{determineLabelNamespaces, LabelNamespaces} import com.eharmony.aloha.reflect.RefInfo import com.eharmony.aloha.semantics.compiled.CompiledSemantics import com.eharmony.aloha.semantics.func.GenAggFunc import scala.collection.breakOut import scala.util.{Failure, Success, Try} import scala.collection.{immutable => sci} private[multilabel] abstract class PositiveLabelsFunction[A, K: RefInfo] { self: DvProducer => private[multilabel] def positiveLabelsFn( semantics: CompiledSemantics[A], positiveLabels: String ): Try[GenAggFunc[A, sci.IndexedSeq[K]]] = getDv[A, sci.IndexedSeq[K]]( semantics, "positiveLabels", Option(positiveLabels), Option(Vector.empty[K])) private[multilabel] def labelNamespaces(nss: List[(String, List[Int])]): Try[LabelNamespaces] = { val nsNames: Set[String] = nss.map(_._1)(breakOut) determineLabelNamespaces(nsNames) match { case Some(ns) => Success(ns) // If there are so many VW namespaces that all available Unicode characters are taken, // then a memory error will probably already have occurred. case None => Failure(new AlohaException( "Could not find any Unicode characters to as VW namespaces. Namespaces provided: " + nsNames.mkString(", ") )) } } }
Example 3
Source File: VwMultilabelModelPluginJsonReader.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.models.vw.jni.multilabel.json import com.eharmony.aloha.dataset.vw.multilabel.VwMultilabelRowCreator.{LabelNamespaces, determineLabelNamespaces} import com.eharmony.aloha.models.multilabel.SparsePredictorProducer import com.eharmony.aloha.models.vw.jni.Namespaces import com.eharmony.aloha.models.vw.jni.multilabel.VwSparseMultilabelPredictorProducer import com.eharmony.aloha.util.Logging import spray.json.{DeserializationException, JsValue, JsonReader} import scala.collection.breakOut import scala.collection.immutable.ListMap case class VwMultilabelModelPluginJsonReader[K](featureNames: Seq[String], numLabelsInTrainingSet: Int) extends JsonReader[SparsePredictorProducer[K]] with VwMultilabelModelJson with Namespaces with Logging { import VwMultilabelModelPluginJsonReader._ override def read(json: JsValue): VwSparseMultilabelPredictorProducer[K] = { val ast = json.asJsObject(notObjErr(json)).convertTo[VwMultilabelAst] val (namespaces, defaultNs, missing) = allNamespaceIndices(featureNames, ast.namespaces.getOrElse(ListMap.empty)) if (missing.nonEmpty) info(s"features in namespaces not found in featureNames: $missing") val namespaceNames: Set[String] = namespaces.map(_._1)(breakOut) val labelAndDummyLabelNss = determineLabelNamespaces(namespaceNames) labelAndDummyLabelNss match { case Some(LabelNamespaces(labelNs, _)) => VwSparseMultilabelPredictorProducer[K](ast.modelSource, defaultNs, namespaces, labelNs, numLabelsInTrainingSet) case _ => throw new DeserializationException( "Could not determine label namespace. Found namespaces: " + namespaceNames.mkString(", ") ) } } } object VwMultilabelModelPluginJsonReader extends Logging { private val JsonErrStrLength = 100 private[multilabel] def notObjErr(json: JsValue): String = { val str = json.prettyPrint val substr = str.substring(0, JsonErrStrLength) s"JSON object expected. Found " + substr + (if (str.length != substr.length) " ..." else "") } }
Example 4
Source File: ProjectDefinitionUtil.scala From neo-sbt-scalafmt with Apache License 2.0 | 5 votes |
package sbt import scala.collection.breakOut class ProjectDefinitionUtil(project: ProjectDefinition[_]) { def sbtFiles = { def sbtFiles(addSettings: AddSettings): Set[File] = addSettings match { case addSettings: AddSettings.SbtFiles => addSettings.files.map(IO.resolve(project.base, _)).filterNot(_.isHidden).toSet case addSettings: AddSettings.DefaultSbtFiles => BuildPaths.configurationSources(project.base).filter(addSettings.include).filterNot(_.isHidden).toSet case addSettings: AddSettings.Sequence => addSettings.sequence.flatMap(sbtFiles)(breakOut) case _ => Set.empty } sbtFiles(project.auto) } }
Example 5
Source File: ProjectDefinitionUtil.scala From neo-sbt-scalafmt with Apache License 2.0 | 5 votes |
package sbt import sbt.internal.AddSettings import scala.collection.breakOut class ProjectDefinitionUtil(project: ProjectDefinition[_]) { def sbtFiles = { def sbtFiles(addSettings: AddSettings): Set[File] = addSettings match { case addSettings: AddSettings.SbtFiles => addSettings.files.map(IO.resolve(project.base, _)).filterNot(_.isHidden).toSet case addSettings: AddSettings.DefaultSbtFiles => BuildPaths.configurationSources(project.base).filter(addSettings.include).filterNot(_.isHidden).toSet case addSettings: AddSettings.Sequence => addSettings.sequence.flatMap(sbtFiles)(breakOut) case _ => Set.empty } sbtFiles(AddSettings.defaultSbtFiles) } }
Example 6
Source File: TckEngineAdapter.scala From ingraph with Eclipse Public License 1.0 | 5 votes |
package ingraph.ire import ingraph.ire.datatypes.Tuple import ingraph.compiler.FPlanParser import ingraph.model.expr.{AbstractReturnItem, EdgeListAttribute, ResolvableName, VertexAttribute} import ingraph.model.fplan.Production import org.opencypher.tools.tck.api._ import org.opencypher.tools.tck.values._ import scala.collection.breakOut class TckEngineAdapter extends Graph { val indexer = new Indexer() def toCypherPropertyMap(map: Map[String, Any]): CypherPropertyMap = { CypherPropertyMap(map.map { case (columnName, value) => columnName -> toCypherValue(value) }) } def toCypherRelationship(id: Long, indexer: Indexer) = { val edge = indexer.edgeLookup(id) CypherRelationship(edge.`type`, toCypherPropertyMap(edge.properties)) } def toCypherValue(value: Any, attribute: Option[ResolvableName] = None, indexer: Option[Indexer] = None): CypherValue = { val isVertex = attribute .collect { case a: AbstractReturnItem => a.toAttribute } .collect { case a: VertexAttribute => a } .isDefined val isEdgeListAttribute = attribute .collect { case a: AbstractReturnItem => a.toAttribute } .collect { case a: EdgeListAttribute => a } .isDefined value match { case null => CypherNull case value: java.lang.Boolean => CypherBoolean(value) case value: String => CypherString(value) case value: Long if !isVertex => CypherInteger(value) case value: Long if isVertex => { val vertex = indexer.get.vertexLookup(value) val properties = toCypherPropertyMap(vertex.properties) CypherNode(vertex.labels, properties) } case value: Seq[Any] if !isEdgeListAttribute => CypherOrderedList(value.map(toCypherValue(_)).toList) case value: Seq[Long] if isEdgeListAttribute => CypherOrderedList(value.map(toCypherRelationship(_, indexer.get)).toList) } } override def cypher(query: String, params: Map[String, CypherValue], meta: QueryType): Result = { println(meta) println(query) val result = meta match { case InitQuery => { val createAdapter = new OneTimeQueryAdapter(query, meta.toString, indexer) createAdapter.results() CypherValueRecords.empty } // TODO case SideEffectQuery => CypherValueRecords.empty case _ => { println("++++++++++++++++") val readAdapter = new IncrementalQueryAdapter(query, meta.toString, indexer) val plan = FPlanParser.parse(query).asInstanceOf[Production] val columnNames = plan.outputNames.toSeq val resultTuples = readAdapter.results() val tupleConversion: Tuple => Map[String, CypherValue] = tuple => { val cellInfos = columnNames zip plan.output zip tuple cellInfos.map { case ((columnName, attribute), value) => columnName -> toCypherValue(value, Some(attribute), Some(readAdapter.indexer)) }(breakOut) } val result = resultTuples.map(tupleConversion).toList result.foreach(println) readAdapter.close() CypherValueRecords(columnNames.toList, result) } } println() result } // TODO lifecycle override def close(): Unit = super.close() }
Example 7
Source File: FsmGraph.scala From akka-viz with MIT License | 5 votes |
package akkaviz.frontend.components import akkaviz.frontend.vis import akkaviz.frontend.vis.{NetworkData, NetworkOptions} import org.scalajs.dom.html.Element import org.scalajs.dom.{Element => domElement} import scala.collection.breakOut import scala.scalajs.js class FsmGraph(parent: Element) { private[this] val networkNodes = new vis.DataSet[vis.Node]() private[this] val networkEdges = new vis.DataSet[vis.Edge]() private[this] val data = NetworkData(networkNodes, networkEdges) private[this] val network = new vis.Network(parent, data, fsmNetworkOptions) network.on("stabilizationProgress", () => network.fit()) network.on("stabilized", () => network.fit()) def displayFsm(transitions: Set[(String, String)]): Unit = { if (transitions.isEmpty) { parent.style.display = "none" } else { parent.style.display = "box" val allStates = transitions.flatMap { case (from, to) => Set(from, to) } val nodes: js.Array[vis.Node] = allStates.map(state => vis.Node(state, simplifyStateName(state), state))(breakOut) val edges: js.Array[vis.Edge] = transitions.map { case (from, to) => vis.Edge(s"${from}>${to}", from, to) }(breakOut) networkNodes.clear() networkNodes.add(nodes) networkEdges.clear() networkEdges.add(edges) } } @inline private[this] def simplifyStateName(state: String): String = { state.split('.').last.stripSuffix("$") } @inline private[this] def fsmNetworkOptions: NetworkOptions = { val opts = js.Dynamic.literal() opts.nodes = js.Dynamic.literal() opts.nodes.shape = "dot" opts.physics = js.Dynamic.literal() opts.physics.solver = "forceAtlas2Based" opts.physics.forceAtlas2Based = js.Dynamic.literal() opts.physics.forceAtlas2Based.springLength = 100 opts.asInstanceOf[NetworkOptions] } }
Example 8
Source File: ActorSystems.scala From akka-viz with MIT License | 5 votes |
package akkaviz.events import akka.actor.{ActorPath, ActorRef, ActorSystem} import akka.viz.ActorCellInstrumentation import scala.collection.breakOut import scala.ref.WeakReference import scala.util.Try object ActorSystems { private[this] val systemReferences = scala.collection.mutable.Map[String, WeakReference[ActorSystem]]() def systems: scala.collection.immutable.Map[String, ActorSystem] = systemReferences.flatMap { case (name, ref) => ref.get.map { system => name -> system } }(breakOut) def registerSystem(system: ActorSystem): Unit = { systemReferences.update(system.name, WeakReference(system)) } def tell(path: String, message: Any): Unit = { Try { val actorPath = ActorPath.fromString(path) systems.get(actorPath.address.system).foreach { system => system.actorSelection(actorPath).tell(message, ActorRef.noSender) } } } def refreshActorState(path: String): Unit = { tell(path, ActorCellInstrumentation.RefreshInternalStateMsg) } }
Example 9
Source File: SerializerFinder.scala From akka-viz with MIT License | 5 votes |
package akkaviz.serialization import org.clapper.classutil.ClassFinder import scala.collection.breakOut trait SerializerFinder { private[this] val rm = scala.reflect.runtime.currentMirror def findSerializers: List[AkkaVizSerializer] = { val finder = ClassFinder() val classes = finder.getClasses.filter(_.isConcrete).filter(_.implements(classOf[AkkaVizSerializer].getName)) classes.flatMap { cls => val clazz = Class.forName(cls.name) val classSymbol = rm.classSymbol(clazz) if (classSymbol.isModule) { Some(rm.reflectModule(classSymbol.asModule).instance.asInstanceOf[AkkaVizSerializer]) } else { val constructors = classSymbol.toType.members.filter(_.isConstructor).map(_.asMethod) val constructorMaybe = constructors.filter(_.isPublic).filter(_.paramLists.exists(_.size == 0)).headOption constructorMaybe.map { constructor => rm.reflectClass(classSymbol).reflectConstructor(constructor).apply().asInstanceOf[AkkaVizSerializer] } } }(breakOut) } }
Example 10
Source File: ClusteringEvaluatorSpecBase.scala From fotm-info with MIT License | 5 votes |
import info.fotm.clustering.Defaults import info.fotm.domain._ import scala.collection.breakOut trait ClusteringEvaluatorSpecBase { val gen = Defaults.generators(3) import gen._ val teamSize = Defaults.settings(3).teamSize val players1500: Seq[CharacterSnapshot] = (1 to teamSize).map(i => genPlayer(1500, 1)) val players1580: Seq[CharacterSnapshot] = (1 to teamSize).map(i => genPlayer(1580, 1)) val player1500 = players1500.head val player1580 = players1580.head val team1500: Team = Team(players1500.map(_.id).toSet) val team1580: Team = Team(players1580.map(_.id).toSet) val hPlayers1500: Seq[CharacterSnapshot] = (1 to teamSize).map(i => genPlayer(1500, 0)) val hPlayers1580: Seq[CharacterSnapshot] = (1 to teamSize).map(i => genPlayer(1580, 0)) val hTeam1500: Team = Team(hPlayers1500.map(_.id).toSet) val hTeam1580: Team = Team(hPlayers1580.map(_.id).toSet) val aPlayers = players1500 ++ players1580 val hPlayers = hPlayers1500 ++ hPlayers1580 val allPlayers: Seq[CharacterSnapshot] = aPlayers ++ hPlayers val allTeams = Seq(team1500, team1580, hTeam1500, hTeam1580) val ladder: CharacterLadder = CharacterLadder(Axis.all.head, allPlayers.map(p => (p.id, p)).toMap) }
Example 11
Source File: RealClusterer.scala From fotm-info with MIT License | 5 votes |
package info.fotm.clustering import java.util.UUID import info.fotm.clustering.Clusterer.Cluster import info.fotm.util.MathVector import scala.collection.breakOut object RealClusterer { def sequence(clusterers: RealClusterer*): RealClusterer = new RealClusterer { override def clusterize[T](input: Map[T, MathVector], groupSize: Int): Set[Seq[T]] = { val init = (Set.empty[Seq[T]], input) val (results, _) = clusterers.foldLeft(init) { (rs, c) => val (clusters, currentInput) = rs val currentClusters = c.clusterize(currentInput, groupSize) (clusters ++ currentClusters, currentInput -- currentClusters.flatten) } results } } // TODO: works only as long as MathVector has referential equality def wrap(clusterer: Clusterer) = new RealClusterer { override def clusterize[T](input: Map[T, MathVector], groupSize: Int): Set[Seq[T]] = { require(groupSize > 1) if (input.size < groupSize) Set.empty else if (input.size == groupSize) Set(input.keys.toSeq) else if (groupSize == 1) input.keys.map(Seq(_)).toSet else { val reverseMap = input.map(_.swap) val clusters: Set[Cluster] = clusterer.clusterize(input.values.toSeq, groupSize) clusters .filter(_.size == groupSize) // additional protection against misbehaving clusterers // remove overlapping teams (penalize multiplexer and merged algos?) .filter(c => clusters.count(_.intersect(c).nonEmpty) == 1) .map(vectors => vectors.map(reverseMap)) } } } lazy val identity = new RealClusterer { override def clusterize[T](input: Map[T, MathVector], groupSize: Int): Set[Seq[T]] = Set(input.keys.toSeq) } } trait RealClusterer { def clusterize[T](input: Map[T, MathVector], groupSize: Int): Set[Seq[T]] // def clusterize(input: Seq[MathVector], groupSize: Int): Seq[Int] } trait SeenEnhancer extends RealClusterer { val seen = scala.collection.mutable.HashMap.empty[Any, UUID] abstract override def clusterize[T](input: Map[T, MathVector], groupSize: Int): Set[Seq[T]] = { val inputSeenPreviously: Map[T, MathVector] = for { (k, v) <- input if seen.contains(k) } yield (k, v) val updateGroups: Map[UUID, Map[T, MathVector]] = inputSeenPreviously.groupBy(kv => seen(kv._1)) val result: Set[Seq[T]] = updateGroups.flatMap(g => super.clusterize(g._2, groupSize))(breakOut) val updateId: UUID = java.util.UUID.randomUUID() for ((k, v) <- input) { seen(k) = updateId } result } }
Example 12
Source File: ClusteringEvaluator.scala From fotm-info with MIT License | 5 votes |
package info.fotm.clustering import info.fotm.clustering.ClusteringEvaluatorData.DataPoint import info.fotm.domain.Domain._ import info.fotm.domain._ import info.fotm.util.Statistics.Metrics import info.fotm.util.{MathVector, Statistics} import scala.collection.breakOut import scala.collection.immutable.Iterable import scala.util.Random class ClusteringEvaluator(features: List[Feature[CharacterStatsUpdate]]) extends App { type Bucket = Set[CharacterStatsUpdate] type BucketFilter = (Bucket => Bucket) def findTeamsInUpdate(ladderUpdate: LadderUpdate, clusterer: RealClusterer, bucketFilter: BucketFilter = identity): Set[Team] = (for { bucket <- splitIntoBuckets(ladderUpdate) team <- findTeamsInBucket(bucket, ladderUpdate.current.axis.bracket.size, clusterer, bucketFilter) } yield team)(breakOut) def splitIntoBuckets(ladderUpdate: LadderUpdate): Iterable[Set[CharacterStatsUpdate]] = for { (_, factionUpdates) <- ladderUpdate.statsUpdates.groupBy(u => ladderUpdate.current.rows(u.id).view.factionId) (winners, losers) = factionUpdates.partition(u => ladderUpdate.current(u.id).season.wins > ladderUpdate.previous(u.id).season.wins) bucket <- Seq(winners, losers) } yield bucket def findTeamsInBucket(inputBucket: Set[CharacterStatsUpdate], teamSize: Int, clusterer: RealClusterer, bucketFilter: BucketFilter = identity): Set[Team] = { val bucket = bucketFilter(inputBucket).toSeq // NB! do not remove .toSeq here or .zip below won't work if (bucket.isEmpty) Set() else { val featureVectors: Seq[MathVector] = Feature.normalize(features, bucket) val featureMap: Map[CharacterId, MathVector] = bucket.map(_.id).zip(featureVectors)(breakOut) val clusters = clusterer.clusterize(featureMap, teamSize) clusters.map(ps => Team(ps.toSet)) } } def noiseFilter(nLost: Int): BucketFilter = bucket => Random.shuffle(bucket.toSeq).drop(nLost / 2).dropRight(nLost / 2).toSet def evaluateStep(clusterer: RealClusterer, ladderUpdate: LadderUpdate, games: Set[Game], nLost: Int = 0): Statistics.Metrics = { print(".") val actualTeamsPlayed: Set[Team] = games.flatMap(g => Seq(g._1, g._2)) val bucketFilter: BucketFilter = noiseFilter(nLost) val teamsFound: Set[Team] = findTeamsInUpdate(ladderUpdate, clusterer, bucketFilter) Statistics.calcMetrics(teamsFound, actualTeamsPlayed) // TODO: add noise filtering } def evaluate(clusterer: RealClusterer, data: Stream[DataPoint]): Double = { val stats: Seq[Metrics] = for { (ladderUpdate, games) <- data noise = 2 * games.head._1.members.size - 1 } yield evaluateStep(clusterer, ladderUpdate, games, noise) val combinedMetrics: Metrics = stats.reduce(_ + _) println(s"\n$combinedMetrics") Statistics.fScore(0.5)(combinedMetrics) } }
Example 13
Source File: S3KVPersisted.scala From fotm-info with MIT License | 5 votes |
package info.fotm.util import java.io.ByteArrayInputStream import com.amazonaws.services.s3.AmazonS3Client import com.amazonaws.services.s3.model.{GetObjectRequest, ObjectListing, ObjectMetadata, S3ObjectInputStream} import com.amazonaws.util.IOUtils import com.twitter.bijection.Bijection import scala.collection.JavaConverters._ import scala.collection.breakOut import scala.util.Try class S3KVPersisted[K, V](bucket: String, keyPathBijection: Bijection[K, String]) (implicit valueSerializer: Bijection[V, Array[Byte]]) extends Persisted[Map[K, V]] { val s3client = new AmazonS3Client() override def save(state: Map[K, V]): Try[Unit] = Try { for ((k, v) <- state) { val path: String = keyPathBijection(k) val bytes = valueSerializer(v) val stream = new ByteArrayInputStream(bytes) val meta = new ObjectMetadata() meta.setContentLength(bytes.length) s3client.putObject(bucket, path, stream, meta) } } override def fetch(): Try[Map[K, V]] = Try { val listing: ObjectListing = s3client.listObjects(bucket) val bucketEntries = listing.getObjectSummaries.asScala.toList val s3keys = bucketEntries.map(_.getKey) val result: Map[K, V] = ( for (s3key <- s3keys) yield { println(s"Loading $s3key...") val request = new GetObjectRequest(bucket, s3key) val s3object = s3client.getObject(request) val objectData: S3ObjectInputStream = s3object.getObjectContent val bytes = IOUtils.toByteArray(objectData) objectData.close() println(s"Loaded $s3key! Deserializing...") val k = keyPathBijection.inverse(s3key) val v = valueSerializer.inverse(bytes) println(s"Done with $s3key.") (k, v) })(breakOut) result } }
Example 14
Source File: CommonQueries.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.on.sql.queries import java.sql.Connection import anorm.SqlParser._ import anorm._ import com.daml.ledger.on.sql.Index import com.daml.ledger.on.sql.queries.Queries._ import com.daml.ledger.participant.state.kvutils.KVOffset import com.daml.ledger.participant.state.kvutils.api.LedgerRecord import com.daml.ledger.validator.LedgerStateOperations.{Key, Value} import scala.collection.{breakOut, immutable} import scala.util.Try trait CommonQueries extends Queries { protected implicit val connection: Connection override final def selectLatestLogEntryId(): Try[Option[Index]] = Try { SQL"SELECT MAX(sequence_no) max_sequence_no FROM #$LogTable" .as(get[Option[Long]]("max_sequence_no").singleOpt) .flatten } override final def selectFromLog( startExclusive: Index, endInclusive: Index, ): Try[immutable.Seq[(Index, LedgerRecord)]] = Try { SQL"SELECT sequence_no, entry_id, envelope FROM #$LogTable WHERE sequence_no > $startExclusive AND sequence_no <= $endInclusive ORDER BY sequence_no" .as((long("sequence_no") ~ getBytes("entry_id") ~ getBytes("envelope")).map { case index ~ entryId ~ envelope => index -> LedgerRecord(KVOffset.fromLong(index), entryId, envelope) }.*) } override final def selectStateValuesByKeys(keys: Seq[Key]): Try[immutable.Seq[Option[Value]]] = Try { val results = SQL"SELECT key, value FROM #$StateTable WHERE key IN ($keys)" .fold(Map.newBuilder[Key, Value], ColumnAliaser.empty) { (builder, row) => builder += row("key") -> row("value") } .fold(exceptions => throw exceptions.head, _.result()) keys.map(results.get)(breakOut) } override final def updateState(stateUpdates: Seq[(Key, Value)]): Try[Unit] = Try { executeBatchSql(updateStateQuery, stateUpdates.map { case (key, value) => Seq[NamedParameter]("key" -> key, "value" -> value) }) } protected val updateStateQuery: String }
Example 15
Source File: AnalyzeInconsistenciesResult.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.analytics.main import java.io.File import java.nio.charset.StandardCharsets.UTF_8 import cmwell.analytics.data.InfotonAndIndexWithSystemFields import cmwell.analytics.util.Connector import org.apache.commons.io.FileUtils import org.apache.log4j.LogManager import org.apache.spark.sql.{Column, DataFrame, Row} import org.rogach.scallop.{ScallopConf, ScallopOption} import scala.collection.breakOut object AnalyzeInconsistenciesResult { def main(args: Array[String]): Unit = { val logger = LogManager.getLogger(AnalyzeInconsistenciesResult.getClass) try { object Opts extends ScallopConf(args) { val in: ScallopOption[String] = opt[String]("in", short = 'i', descr = "The path to read the (parquet) inconsistencies dataset from", required = true) val out: ScallopOption[String] = opt[String]("out", short = 'o', descr = "The path to save the (csv) output to", required = true) val shell: ScallopOption[Boolean] = opt[Boolean]("spark-shell", short = 's', descr = "Run a Spark shell", required = false, default = Some(false)) verify() } Connector( appName = "Analyze InfotonAndIndexWithSystemFields Output", sparkShell = Opts.shell() ).withSparkSessionDo { spark => val ds: DataFrame = spark.read.parquet(Opts.in()) import org.apache.spark.sql.functions._ // A column expression that counts the number of failures for each constraint. // This will also include null counts, needed to interpret the results. val constraints: Seq[(String, Column)] = InfotonAndIndexWithSystemFields.constraints(ds).map { case (name, predicate) => name -> sum(when(predicate, 0L).otherwise(1L)).as(name) }(breakOut) // Compute the failure counts val failureCounts: Row = ds.agg(constraints.head._2, constraints.tail.map(_._2): _*).head val results = for { i <- constraints.indices constraintName = constraints(i)._1 failureCount = if (failureCounts.isNullAt(i)) 0 else failureCounts.getAs[Long](i) } yield s"$constraintName,$failureCount" FileUtils.write(new File(Opts.out()), "constraint,failures\n" + results.mkString("\n"), UTF_8) } } catch { case ex: Throwable => logger.error(ex.getMessage, ex) System.exit(1) } } }
Example 16
Source File: JournaledExchangeNodeBootable.scala From akka-exchange with Apache License 2.0 | 5 votes |
package com.boldradius.akka_exchange.journal.util import akka.actor.{Props, ActorSystem} import akka.cluster.Cluster import com.boldradius.akka_exchange.journal.{SharedJournalFinder, SharedJournal} import com.boldradius.akka_exchange.util.ExchangeNodeBootable import scala.collection.breakOut abstract class JournaledExchangeNodeBootable extends ExchangeNodeBootable { val findJournal: Boolean = true val persistentJournal = { println("Booting up Journal Finder...") system.actorOf( Props( classOf[SharedJournalFinder] ), SharedJournalFinder.name ) } }
Example 17
Source File: GrpcCallActionBuilder.scala From gatling-grpc with Apache License 2.0 | 5 votes |
package com.github.phisgr.gatling.grpc.action import com.github.phisgr.gatling.grpc.HeaderPair import com.github.phisgr.gatling.grpc.check.{GrpcCheck, ResponseExtract} import com.github.phisgr.gatling.grpc.protocol.GrpcProtocol import io.gatling.commons.validation.Success import io.gatling.core.action.Action import io.gatling.core.action.builder.ActionBuilder import io.gatling.core.check.{MultipleFindCheckBuilder, ValidatorCheckBuilder} import io.gatling.core.session.{Expression, ExpressionSuccessWrapper, Session} import io.gatling.core.structure.ScenarioContext import io.grpc.{CallOptions, Metadata, MethodDescriptor} import scala.collection.breakOut case class GrpcCallActionBuilder[Req, Res]( requestName: Expression[String], method: MethodDescriptor[Req, Res], payload: Expression[Req], callOptions: Expression[CallOptions] = CallOptions.DEFAULT.expressionSuccess, reversedHeaders: List[HeaderPair[_]] = Nil, checks: List[GrpcCheck[Res]] = Nil, protocolOverride: Option[GrpcProtocol] = None, isSilent: Boolean = false ) extends ActionBuilder { override def build(ctx: ScenarioContext, next: Action): Action = GrpcCallAction(this, ctx, next) def callOptions(callOptions: Expression[CallOptions]) = copy( callOptions = callOptions ) def callOptions(callOptions: => CallOptions) = copy( callOptions = { _: Session => Success(callOptions) } ) def header[T](key: Metadata.Key[T])(value: Expression[T]) = copy( reversedHeaders = HeaderPair(key, value) :: reversedHeaders ) private def mapToList[T, U](s: Seq[T])(f: T => U) = s.map[U, List[U]](f)(breakOut) def check(checks: GrpcCheck[Res]*) = copy( checks = this.checks ::: checks.toList ) // In fact they can be added to checks using .check // but the type Res cannot be inferred there def extract[X]( f: Res => Option[X])( ts: (ValidatorCheckBuilder[ResponseExtract, Res, X] => GrpcCheck[Res])* ) = { val e = ResponseExtract.extract(f) copy( checks = checks ::: mapToList(ts)(_.apply(e)) ) } def exists[X](f: Res => Option[X]) = extract(f)(_.exists.build(ResponseExtract.materializer)) def extractMultiple[X]( f: Res => Option[Seq[X]])( ts: (MultipleFindCheckBuilder[ResponseExtract, Res, X] => GrpcCheck[Res])* ) = { val e = ResponseExtract.extractMultiple[Res, X](f) copy( checks = checks ::: mapToList(ts)(_.apply(e)) ) } def target(protocol: GrpcProtocol) = copy(protocolOverride = Some(protocol)) def silent = copy(isSilent = true) }
Example 18
Source File: LicenseDetection.scala From sbt-header with Apache License 2.0 | 5 votes |
package de.heikoseeberger.sbtheader import sbt.URL import scala.collection.breakOut object LicenseDetection { private val spdxMapping = License.spdxLicenses.map(l => (l.spdxIdentifier, l))(breakOut): Map[String, SpdxLicense] def apply( licenses: Seq[(String, URL)], organizationName: String, startYear: Option[String], licenseStyle: LicenseStyle = LicenseStyle.Detailed ): Option[License] = { val licenseName = licenses match { case (name, _) :: Nil => Some(name) case _ => None } for { name <- licenseName license <- spdxMapping.get(name) year <- startYear } yield license(year, organizationName, licenseStyle) } }
Example 19
Source File: Dns.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka.io import java.net.{ Inet4Address, Inet6Address, InetAddress, UnknownHostException } import akka.actor._ import akka.routing.ConsistentHashingRouter.ConsistentHashable import com.typesafe.config.Config import scala.collection.{ breakOut, immutable } abstract class Dns { def cached(name: String): Option[Dns.Resolved] = None def resolve(name: String)(system: ActorSystem, sender: ActorRef): Option[Dns.Resolved] = { val ret = cached(name) if (ret.isEmpty) IO(Dns)(system).tell(Dns.Resolve(name), sender) ret } } object Dns extends ExtensionId[DnsExt] with ExtensionIdProvider { sealed trait Command case class Resolve(name: String) extends Command with ConsistentHashable { override def consistentHashKey = name } case class Resolved(name: String, ipv4: immutable.Seq[Inet4Address], ipv6: immutable.Seq[Inet6Address]) extends Command { val addrOption: Option[InetAddress] = IpVersionSelector.getInetAddress(ipv4.headOption, ipv6.headOption) @throws[UnknownHostException] def addr: InetAddress = addrOption match { case Some(ipAddress) ⇒ ipAddress case None ⇒ throw new UnknownHostException(name) } } object Resolved { def apply(name: String, addresses: Iterable[InetAddress]): Resolved = { val ipv4: immutable.Seq[Inet4Address] = addresses.collect({ case a: Inet4Address ⇒ a })(breakOut) val ipv6: immutable.Seq[Inet6Address] = addresses.collect({ case a: Inet6Address ⇒ a })(breakOut) Resolved(name, ipv4, ipv6) } } def cached(name: String)(system: ActorSystem): Option[Resolved] = { Dns(system).cache.cached(name) } def resolve(name: String)(system: ActorSystem, sender: ActorRef): Option[Resolved] = { Dns(system).cache.resolve(name)(system, sender) } override def lookup() = Dns override def createExtension(system: ExtendedActorSystem): DnsExt = new DnsExt(system) override def get(system: ActorSystem): DnsExt = super.get(system) } class DnsExt(system: ExtendedActorSystem) extends IO.Extension { val Settings = new Settings(system.settings.config.getConfig("akka.io.dns")) class Settings private[DnsExt] (_config: Config) { import _config._ val Dispatcher: String = getString("dispatcher") val Resolver: String = getString("resolver") val ResolverConfig: Config = getConfig(Resolver) val ProviderObjectName: String = ResolverConfig.getString("provider-object") } val provider: DnsProvider = system.dynamicAccess.getClassFor[DnsProvider](Settings.ProviderObjectName).get.newInstance() val cache: Dns = provider.cache val manager: ActorRef = { system.systemActorOf( props = Props(provider.managerClass, this).withDeploy(Deploy.local).withDispatcher(Settings.Dispatcher), name = "IO-DNS") } def getResolver: ActorRef = manager } object IpVersionSelector { def getInetAddress(ipv4: Option[Inet4Address], ipv6: Option[Inet6Address]): Option[InetAddress] = System.getProperty("java.net.preferIPv6Addresses") match { case "true" ⇒ ipv6 orElse ipv4 case _ ⇒ ipv4 orElse ipv6 } }
Example 20
Source File: AkkaBuild.scala From perf_tester with Apache License 2.0 | 5 votes |
package akka import java.io.{ FileInputStream, InputStreamReader } import java.util.Properties import sbt.Keys._ import sbt._ import scala.collection.breakOut object AkkaBuild { val enableMiMa = true lazy val buildSettings = Dependencies.Versions ++ Seq( organization := "com.typesafe.akka", version := "2.5-SNAPSHOT") private def allWarnings: Boolean = System.getProperty("akka.allwarnings", "false").toBoolean lazy val defaultSettings = Seq[Setting[_]]( // compile options scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.8", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Xlint"), scalacOptions in Compile ++= (if (allWarnings) Seq("-deprecation") else Nil), // -XDignore.symbol.file suppresses sun.misc.Unsafe warnings javacOptions in compile ++= Seq("-encoding", "UTF-8", "-source", "1.8", "-target", "1.8", "-Xlint:unchecked", "-XDignore.symbol.file"), javacOptions in compile ++= (if (allWarnings) Seq("-Xlint:deprecation") else Nil), javacOptions in doc ++= Seq(), crossVersion := CrossVersion.binary, ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet, licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))), homepage := Some(url("http://akka.io/")), apiURL := Some(url(s"http://doc.akka.io/api/akka/${version.value}")) ) def loadSystemProperties(fileName: String): Unit = { import scala.collection.JavaConverters._ val file = new File(fileName) if (file.exists()) { println("Loading system properties from file `" + fileName + "`") val in = new InputStreamReader(new FileInputStream(file), "UTF-8") val props = new Properties props.load(in) in.close() sys.props ++ props.asScala } } def majorMinor(version: String): Option[String] = """\d+\.\d+""".r.findFirstIn(version) }
Example 21
Source File: PackageIDsGen.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen.lf import java.io.File import com.daml.codegen.Util import com.daml.lf.data.Ref._ import scala.collection.breakOut import scala.reflect.runtime.universe._ object PackageIDsGen { def generate(util: LFUtil): (File, Iterable[Tree]) = { val imports: Seq[Tree] = Seq() val packageIdsByModule: Map[ModuleName, PackageId] = util.iface.typeDecls.keys.map(id => (id.qualifiedName.module, id.packageId))(breakOut) val packageIdBindings = packageIdsByModule.toSeq.sortBy(_._1.dottedName) map { case (mn, pid) => q"val ${TermName(mn.dottedName)}: _root_.scala.Predef.String = $pid" } val packageIdsSrc: Tree = q""" package ${Util.packageNameToRefTree(util.packageName)} { private object `Package IDs` { ..$packageIdBindings } } """ val trees: Seq[Tree] = imports :+ packageIdsSrc val filePath = util.mkDamlScalaNameFromDirsAndName(Array(), "PackageIDs").toFileName filePath.getParentFile.mkdirs() (filePath, trees) } private[lf] def reference(util: LFUtil)(moduleName: ModuleName) = q"`Package IDs`.${TermName(moduleName.dottedName)}" }
Example 22
Source File: Main.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import java.io.File import java.nio.file.Path import ch.qos.logback.classic.Level import com.daml.lf.codegen.conf.Conf import com.typesafe.scalalogging.StrictLogging import org.slf4j.{Logger, LoggerFactory} import scalaz.Cord import scala.collection.breakOut object Main extends StrictLogging { private val codegenId = "Scala Codegen" @deprecated("Use codegen font-end: com.daml.codegen.CodegenMain.main", "0.13.23") def main(args: Array[String]): Unit = Conf.parse(args) match { case Some(conf) => generateCode(conf) case None => throw new IllegalArgumentException( s"Invalid ${codegenId: String} command line arguments: ${args.mkString(" "): String}") } def generateCode(conf: Conf): Unit = conf match { case Conf(darMap, outputDir, decoderPkgAndClass, verbosity, roots) => setGlobalLogLevel(verbosity) logUnsupportedEventDecoderOverride(decoderPkgAndClass) val (dars, packageName) = darsAndOnePackageName(darMap) CodeGen.generateCode(dars, packageName, outputDir.toFile, CodeGen.Novel, roots) } private def setGlobalLogLevel(verbosity: Level): Unit = { LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) match { case a: ch.qos.logback.classic.Logger => a.setLevel(verbosity) logger.info(s"${codegenId: String} verbosity: ${verbosity.toString}") case _ => logger.warn(s"${codegenId: String} cannot set requested verbosity: ${verbosity.toString}") } } private def logUnsupportedEventDecoderOverride(mapping: Option[(String, String)]): Unit = mapping.foreach { case (a, b) => logger.warn( s"${codegenId: String} does not allow overriding Event Decoder, skipping: ${a: String} -> ${b: String}") } private def darsAndOnePackageName(darMap: Map[Path, Option[String]]): (List[File], String) = { val dars: List[File] = darMap.keys.map(_.toFile)(breakOut) val uniquePackageNames: Set[String] = darMap.values.collect { case Some(x) => x }(breakOut) uniquePackageNames.toSeq match { case Seq(packageName) => (dars, packageName) case _ => throw new IllegalStateException( s"${codegenId: String} expects all dars mapped to the same package name, " + s"requested: ${format(darMap): String}") } } private def format(map: Map[Path, Option[String]]): String = { val cord = map.foldLeft(Cord("{")) { (str, kv) => str ++ kv._1.toFile.getAbsolutePath ++ "->" ++ kv._2.toString ++ "," } (cord ++ "}").toString } }
Example 23
Source File: AkkaExecutionSequencerPool.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.grpc.adapter import java.util.concurrent.atomic.AtomicInteger import akka.Done import akka.actor.ActorSystem import scala.collection.breakOut import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.concurrent.{Await, ExecutionContext, Future} class AkkaExecutionSequencerPool( poolName: String, actorCount: Int = AkkaExecutionSequencerPool.defaultActorCount, terminationTimeout: FiniteDuration = 30.seconds, )(implicit system: ActorSystem) extends ExecutionSequencerFactory { require(actorCount > 0) private val counter = new AtomicInteger() private val pool = Array.fill(actorCount)( AkkaExecutionSequencer(s"$poolName-${counter.getAndIncrement()}", terminationTimeout)) override def getExecutionSequencer: ExecutionSequencer = pool(counter.getAndIncrement() % actorCount) override def close(): Unit = Await.result(closeAsync(), terminationTimeout) def closeAsync(): Future[Unit] = { implicit val ec: ExecutionContext = system.dispatcher val eventuallyClosed: Future[Seq[Done]] = Future.sequence(pool.map(_.closeAsync)(breakOut)) Future.firstCompletedOf( Seq( system.whenTerminated.map(_ => ()), // Cut it short if the ActorSystem stops. eventuallyClosed.map(_ => ()), ) ) } } object AkkaExecutionSequencerPool { private val defaultActorCount: Int = Runtime.getRuntime.availableProcessors() * 8 }
Example 24
Source File: CustomScoptReaders.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.config import com.daml.lf.data.Ref.Party import scalaz.OneAnd import scopt.Read import scopt.Read.reads import scala.language.higherKinds import scala.collection.breakOut import scala.collection.generic.CanBuildFrom private[extractor] object CustomScoptReaders { implicit val partyRead: Read[Party] = reads { s => Party fromString s fold (e => throw new IllegalArgumentException(e), identity) } implicit val templateConfigRead: Read[TemplateConfig] = reads { s => s.split(':') match { case Array(moduleName, entityName) => TemplateConfig(moduleName, entityName) case _ => throw new IllegalArgumentException( s"Expected TemplateConfig string: '<moduleName>:<entityName>', got: '$s'") } } implicit def nonEmptySeqRead[F[_], A]( implicit ev: Read[A], target: CanBuildFrom[Nothing, A, F[A]]): Read[OneAnd[F, A]] = reads { s => val Array(hd, tl @ _*) = s split Read.sep OneAnd(ev reads hd, tl.map(ev.reads)(breakOut)) } }
Example 25
Source File: LogAppendingCommitStrategy.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.validator import com.daml.ledger.participant.state.kvutils.DamlKvutils.{ DamlLogEntry, DamlLogEntryId, DamlStateKey, DamlStateValue } import com.daml.ledger.participant.state.kvutils.Envelope import com.daml.ledger.participant.state.kvutils.export.LedgerDataExporter import com.daml.ledger.participant.state.v1.ParticipantId import scala.collection.breakOut import scala.concurrent.{ExecutionContext, Future} class LogAppendingCommitStrategy[Index]( ledgerStateOperations: LedgerStateOperations[Index], keySerializationStrategy: StateKeySerializationStrategy, ledgerDataExporter: LedgerDataExporter = LedgerDataExporter())( implicit executionContext: ExecutionContext) extends CommitStrategy[Index] { override def commit( participantId: ParticipantId, correlationId: String, entryId: DamlLogEntryId, entry: DamlLogEntry, inputState: Map[DamlStateKey, Option[DamlStateValue]], outputState: Map[DamlStateKey, DamlStateValue]): Future[Index] = for { serializedKeyValuePairs <- Future.successful(outputState.map { case (key, value) => (keySerializationStrategy.serializeStateKey(key), Envelope.enclose(value)) }(breakOut)) _ = ledgerDataExporter.addToWriteSet(correlationId, serializedKeyValuePairs) _ <- if (serializedKeyValuePairs.nonEmpty) { ledgerStateOperations.writeState(serializedKeyValuePairs) } else { Future.unit } envelopedLogEntry <- Future.successful(Envelope.enclose(entry)) _ = ledgerDataExporter .addToWriteSet(correlationId, List((entryId.toByteString, envelopedLogEntry))) index <- ledgerStateOperations .appendToLog( entryId.toByteString, envelopedLogEntry ) } yield index }
Example 26
Source File: CommandCompletionSource.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.services.commands import akka.NotUsed import akka.stream.scaladsl.Source import com.daml.grpc.adapter.ExecutionSequencerFactory import com.daml.grpc.adapter.client.akka.ClientAdapter import com.daml.ledger.api.v1.command_completion_service.{ CompletionStreamRequest, CompletionStreamResponse } import io.grpc.stub.StreamObserver import scala.collection.{breakOut, immutable} object CommandCompletionSource { def toStreamElements( response: CompletionStreamResponse): immutable.Iterable[CompletionStreamElement] = { val completions: Vector[CompletionStreamElement] = response.completions.map(CompletionStreamElement.CompletionElement)(breakOut) response.checkpoint.fold(completions)(cp => completions :+ CompletionStreamElement.CheckpointElement(cp)) } def apply( request: CompletionStreamRequest, stub: (CompletionStreamRequest, StreamObserver[CompletionStreamResponse]) => Unit)( implicit esf: ExecutionSequencerFactory): Source[CompletionStreamElement, NotUsed] = { ClientAdapter .serverStreaming(request, stub) .mapConcat(toStreamElements) .log( "completion at client", { case CompletionStreamElement.CheckpointElement(c) => s"Checkpoint ${c.offset}" case CompletionStreamElement.CompletionElement(c) => s"Completion $c" } ) } }