akka.util.ByteString Scala Examples
The following examples show how to use akka.util.ByteString.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: AkkaHTTPClient.scala From learn-akka with Apache License 2.0 | 5 votes |
package com.allaboutscala.learn.akka.client import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.{MediaTypes, HttpEntity, HttpMethods, HttpRequest} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.ActorMaterializer import akka.util.ByteString import com.allaboutscala.learn.akka.http.jsonsupport.{Donuts, JsonSupport} import scala.concurrent.{Await, Future} import scala.util.{Failure, Success} import scala.concurrent.duration._ object AkkaHttpClient extends App with JsonSupport { implicit val system = ActorSystem("akka-http-donuts-client") implicit val materializer = ActorMaterializer() implicit val ec = system.dispatcher // HTTP GET request val donutsUri = "http://localhost:8080/donuts" val donutsHttpRequest = HttpRequest( uri = donutsUri, method = HttpMethods.GET ) val donutsResponse = Http().singleRequest(donutsHttpRequest) donutsResponse .onComplete { case Success(donutsResponse) => println(s"Raw HttpResponse = $donutsResponse") // You obviously should not block using Await.result(...) and use flatMap or other similar future sequencing mechanics val donutsF: Future[Donuts] = Unmarshal(donutsResponse).to[Donuts] val donuts: Donuts = Await.result(donutsF, 5.second) println(s"Unmarshalled HttpResponse to Case Class = $donuts") case Failure(e) => println(s"Failed to HTTP GET $donutsUri, error = ${e.getMessage}") } Thread.sleep(3000) // HTTP POST request val jsonDonutInput = ByteString("""{"name":"plain donut", "price":1.50}""") val httpPostCreateDonut = HttpRequest( uri = "http://localhost:8080/create-donut", method = HttpMethods.POST, entity = HttpEntity(MediaTypes.`application/json`, jsonDonutInput)) val createDonutF = for { response <- Http().singleRequest(httpPostCreateDonut) _ = println(s"Akka HTTP request status = ${response.status}") if response.status.isSuccess() output <- Unmarshal(response).to[String] } yield println(s"HTTP POST request output = $output") Await.result(createDonutF, 5.second) system.terminate() }
Example 2
Source File: VoiceUDPHandler.scala From AckCord with MIT License | 5 votes |
package ackcord.voice import java.net.InetSocketAddress import scala.concurrent.duration._ import scala.util.{Failure, Success} import ackcord.data.{RawSnowflake, UserId} import akka.NotUsed import akka.actor.typed._ import akka.actor.typed.scaladsl._ import akka.stream.OverflowStrategy import akka.stream.scaladsl.{Keep, Sink, Source, SourceQueueWithComplete} import akka.util.ByteString import org.slf4j.Logger object VoiceUDPHandler { def apply( address: String, port: Int, ssrc: Int, serverId: RawSnowflake, userId: UserId, soundProducer: Source[ByteString, NotUsed], soundConsumer: Sink[AudioAPIMessage, NotUsed], parent: ActorRef[VoiceHandler.Command] ): Behavior[Command] = Behaviors .supervise( Behaviors.setup[Command] { ctx => implicit val system: ActorSystem[Nothing] = ctx.system val ((queue, futIp), watchDone) = soundProducer .viaMat( VoiceUDPFlow .flow( new InetSocketAddress(address, port), ssrc, serverId, userId, Source.queue[Option[ByteString]](0, OverflowStrategy.dropBuffer) ) .watchTermination()(Keep.both) )(Keep.right) .to(soundConsumer) .run() ctx.pipeToSelf(futIp) { case Success(value) => IPDiscoveryResult(value) case Failure(e) => SendExeption(e) } ctx.pipeToSelf(watchDone)(_ => ConnectionDied) handle(ctx, ctx.log, ssrc, queue, parent) } ) .onFailure( SupervisorStrategy .restartWithBackoff(100.millis, 5.seconds, 1D) .withResetBackoffAfter(10.seconds) .withMaxRestarts(5) ) def handle( ctx: ActorContext[Command], log: Logger, ssrc: Int, queue: SourceQueueWithComplete[Option[ByteString]], parent: ActorRef[VoiceHandler.Command] ): Behavior[Command] = Behaviors.receiveMessage { case SendExeption(e) => throw e case ConnectionDied => Behaviors.stopped case Shutdown => queue.complete() Behaviors.same case IPDiscoveryResult(VoiceUDPFlow.FoundIP(localAddress, localPort)) => parent ! VoiceHandler.GotLocalIP(localAddress, localPort) Behaviors.same case SetSecretKey(key) => queue.offer(key) Behaviors.same } sealed trait Command case object Shutdown extends Command private case class SendExeption(e: Throwable) extends Command private case object ConnectionDied extends Command private case class IPDiscoveryResult(foundIP: VoiceUDPFlow.FoundIP) extends Command private[voice] case class SetSecretKey(key: Option[ByteString]) extends Command }
Example 3
Source File: LoadProduct.scala From pfhais with Creative Commons Attribution Share Alike 4.0 International | 5 votes |
package com.wegtam.books.pfhais.impure.usecases import java.util.UUID import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.util.ByteString import com.wegtam.books.pfhais.impure.models._ import com.wegtam.books.pfhais.impure.models.TypeGenerators._ import io.circe.parser._ import scala.collection.immutable._ class LoadProduct extends BaseUseCaseSpec { private final val http = Http() override protected def afterEach(): Unit = { flyway.clean() super.afterEach() } "Loading a Product by ID" when { "the ID does exist" must { val expectedStatus = StatusCodes.OK s"return $expectedStatus and the Product" in { genProduct.sample match { case None => fail("Could not generate data sample!") case Some(p) => for { _ <- repo.saveProduct(p) rows <- repo.loadProduct(p.id) resp <- http.singleRequest( HttpRequest( method = HttpMethods.GET, uri = s"$baseUrl/product/${p.id}", headers = Seq(), entity = HttpEntity( contentType = ContentTypes.`application/json`, data = ByteString("") ) ) ) body <- resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _) } yield { withClue("Seeding product data failed!")(rows must not be(empty)) resp.status must be(expectedStatus) decode[Product](body.utf8String) match { case Left(e) => fail(s"Could not decode response: $e") case Right(d) => d mustEqual p } } } } } "the ID does not exist" must { val expectedStatus = StatusCodes.NotFound s"return $expectedStatus" in { val id = UUID.randomUUID for { resp <- http.singleRequest( HttpRequest( method = HttpMethods.GET, uri = s"$baseUrl/product/$id", headers = Seq(), entity = HttpEntity( contentType = ContentTypes.`application/json`, data = ByteString("") ) ) ) } yield { resp.status must be(expectedStatus) } } } } }
Example 4
Source File: LoadProducts.scala From pfhais with Creative Commons Attribution Share Alike 4.0 International | 5 votes |
package com.wegtam.books.pfhais.impure.usecases import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.util.ByteString import cats.implicits._ import com.wegtam.books.pfhais.impure.models._ import com.wegtam.books.pfhais.impure.models.TypeGenerators._ import io.circe.parser._ import scala.collection.immutable._ import scala.concurrent.Future class LoadProducts extends BaseUseCaseSpec { private final val http = Http() override protected def afterEach(): Unit = { flyway.clean() super.afterEach() } "Loading all products" when { "no products exist" must { val expectedStatus = StatusCodes.OK s"return $expectedStatus and an empty list" in { for { resp <- http.singleRequest( HttpRequest( method = HttpMethods.GET, uri = s"$baseUrl/products", headers = Seq(), entity = HttpEntity( contentType = ContentTypes.`application/json`, data = ByteString("") ) ) ) body <- resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _) } yield { resp.status must be(expectedStatus) decode[List[Product]](body.utf8String) match { case Left(e) => fail(s"Could not decode response: $e") case Right(d) => d must be(empty) } } } } "products exist" must { val expectedStatus = StatusCodes.OK s"return $expectedStatus and a list with all products" in { genProducts.sample match { case None => fail("Could not generate data sample!") case Some(ps) => for { _ <- Future.sequence(ps.map(p => repo.saveProduct(p))) resp <- http.singleRequest( HttpRequest( method = HttpMethods.GET, uri = s"$baseUrl/products", headers = Seq(), entity = HttpEntity( contentType = ContentTypes.`application/json`, data = ByteString("") ) ) ) body <- resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _) } yield { resp.status must be(expectedStatus) decode[List[Product]](body.utf8String) match { case Left(e) => fail(s"Could not decode response: $e") case Right(d) => d.sorted mustEqual ps.sorted } } } } } } }
Example 5
Source File: writableHelperTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import akka.util.ByteString import de.zalando.play.controllers.ResponseWriters.choose import org.specs2.mutable.Specification import play.api.http.Writeable import play.api.mvc.RequestHeader import scala.concurrent.ExecutionContext.Implicits override val custom: Seq[(String, ParserWrapper[_])] = Seq( "text/plain" -> any ) } "WrappedBodyParsers" should { "find something" in { WrappedBodyParsers.anyParser[Any] must_== Nil } "not find anything for wrong type" in { binaryString.anyParser[String] must_== Nil } "find something for correct type" in { binaryString.anyParser[BinaryString].size must_== 1 } "find something for every type if target is 'Any'" in { catchAll.anyParser[String].size must_== 1 catchAll.anyParser[BinaryString].size must_== 1 } } } object TestEnvironment { import Implicits.global val transformSeq: Seq[Any] => ByteString = a => ByteString.fromString(a.toString) val transformStr: String => ByteString = ByteString.fromString val seqText: WriteableWrapper[Seq[Any]] = Writeable(transformSeq, Some("text/plain")) val stringText: WriteableWrapper[String] = Writeable(transformStr, Some("text/plain")) val reg = Seq(seqText, stringText) }
Example 6
Source File: writeableHelper.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import akka.util.ByteString import play.api.http.Writeable import play.api.libs.json._ import play.api.mvc.Results.{ Redirect, Status } import play.api.mvc.{ AnyContentAsMultipartFormData, RequestHeader, Results } import scala.language.implicitConversions case class WriteableWrapper[T](w: Writeable[T], m: Manifest[T]) object WriteableWrapper { implicit def writeable2wrapper[T](w: Writeable[T])(implicit m: Manifest[T]): WriteableWrapper[T] = WriteableWrapper(w, m) implicit val anyContentAsMultipartFormWritable: Writeable[AnyContentAsMultipartFormData] = { MultipartFormDataWritable.singleton.map(_.mdf) } } object ResponseWriters extends ResponseWritersBase trait ResponseWritersBase { type ContentType = String def custom: Seq[WriteableWrapper[_]] = Seq.empty case class choose[T](mimeType: ContentType) { def apply[R <: Any](registry: Seq[WriteableWrapper[_]] = custom)(implicit m: Manifest[R]): Option[Writeable[R]] = registry filter { _.w.contentType.exists(_ == mimeType) } find { p => m.runtimeClass.isAssignableFrom(p.m.runtimeClass) } map { _.asInstanceOf[WriteableWrapper[R]] } map (_.w) } implicit val jsonTranslatedParsingErrorWrites = Json.writes[TranslatedParsingError] implicit val jsonTranslatedParsingErrorsContainerWrites = Json.writes[TranslatedParsingErrorsContainer] } object WrappedBodyParsers extends WrappedBodyParsersBase trait WrappedBodyParsersBase { implicit def parser2parserWrapper[T](p: Parser[T])(implicit m: Manifest[T]): ParserWrapper[T] = ParserWrapper(p, m) type Parser[T] = ByteString => T case class ParserWrapper[T](p: Parser[T], m: Manifest[T]) val custom: Seq[(String, ParserWrapper[_])] = Seq.empty def anyParser[T](implicit manifest: Manifest[T]): Seq[(String, Parser[T])] = custom.filter(_._2.m.runtimeClass.isAssignableFrom(manifest.runtimeClass)).map { e => e.copy(_2 = e._2.asInstanceOf[ParserWrapper[T]].p) } def optionParser[T](implicit manifest: Manifest[T]): Seq[(String, Parser[Option[T]])] = anyParser[Option[T]] } trait ResultWrapper[ResultT] { val emptyByteString = akka.util.CompactByteString.empty def statusCode: Int def result: ResultT def toResultWithWriter(implicit writer: Writeable[ResultT]): play.api.mvc.Result = if (statusCode / 100 == 3) Redirect(result.toString, statusCode) else Status(statusCode)(result) def writer: String => Option[Writeable[ResultT]] def toResult(mimeType: String): Option[play.api.mvc.Result] = if (statusCode / 100 == 3) Option(Redirect(result.toString, statusCode)) else writer(mimeType).map(Status(statusCode)(result)(_)) }
Example 7
Source File: MultipartFormDataWritable.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import java.nio.file.{ Files, Paths } import akka.util.ByteString import play.api.http.{ HeaderNames, Writeable } import play.api.libs.Files.TemporaryFile import play.api.mvc.MultipartFormData.FilePart import play.api.mvc.{ Codec, MultipartFormData } object MultipartFormDataWritable { val boundary = "--------ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890" def formatDataParts(data: Map[String, Seq[String]]): ByteString = { val dataParts = data.flatMap { case (key, values) => values.map { value => val name = s""""$key"""" s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name\r\n\r\n$value\r\n" } }.mkString("") val bytes: ByteString = Codec.utf_8.encode(dataParts) bytes } def filePartHeader(file: FilePart[TemporaryFile]): ByteString = { val name = s""""${file.key}"""" val filename = s""""${file.filename}"""" val contentType = file.contentType.map { ct => s"${HeaderNames.CONTENT_TYPE}: $ct\r\n" }.getOrElse("") Codec.utf_8.encode(s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name; filename=$filename\r\n$contentType\r\n") } val singleton = Writeable[MultipartFormData[TemporaryFile]]( transform = { form: MultipartFormData[TemporaryFile] => formatDataParts(form.dataParts) ++ form.files.flatMap { file => val fileBytes = Files.readAllBytes(Paths.get(file.ref.file.getAbsolutePath)) filePartHeader(file) ++ fileBytes ++ Codec.utf_8.encode("\r\n") } ++ Codec.utf_8.encode(s"--$boundary--") }, contentType = Some(s"multipart/form-data; boundary=$boundary") ) }
Example 8
Source File: TestHelper.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg import java.time.Clock import java.util.UUID import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.util.ByteString import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Randomness} import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous import ch.epfl.bluebrain.nexus.iam.client.types.{AccessControlList, Identity, Permission, ResourceAccessControlList} import ch.epfl.bluebrain.nexus.kg.config.Schemas.unconstrainedSchemaUri import ch.epfl.bluebrain.nexus.kg.resources.ResourceF.Value import ch.epfl.bluebrain.nexus.kg.resources.{Ref, ResId, ResourceF} import ch.epfl.bluebrain.nexus.kg.storage.AkkaSource import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri import ch.epfl.bluebrain.nexus.rdf.implicits._ import io.circe.Json trait TestHelper extends EitherValues with Randomness { private val clock = Clock.systemUTC() val read: Permission = Permission.unsafe("resources/read") val write: Permission = Permission.unsafe("files/write") def consume(source: AkkaSource)(implicit mt: Materializer): String = { import org.scalatest.concurrent.ScalaFutures._ source.runFold("")(_ ++ _.utf8String).futureValue } def produce(string: String, chunkSize: Int = 100): AkkaSource = Source(string.grouped(chunkSize).map(ByteString(_)).toList) def resourceAcls(acl: AccessControlList): ResourceAccessControlList = ResourceAccessControlList( url"http://example.com/id", 1L, Set.empty, clock.instant(), Anonymous, clock.instant(), Anonymous, acl ) def simpleV( id: ResId, value: Json, rev: Long = 1L, types: Set[AbsoluteIri] = Set.empty, deprecated: Boolean = false, schema: Ref = Ref(unconstrainedSchemaUri), created: Identity = Anonymous, updated: Identity = Anonymous )(implicit clock: Clock): ResourceF[Value] = ResourceF( id, rev, types, deprecated, Map.empty, None, clock.instant(), clock.instant(), created, updated, schema, Value(value, value.contextValue, value.toGraph(id.value).rightValue) ) def simpleV(res: ResourceF[Json])(implicit clock: Clock) = ResourceF( res.id, res.rev, res.types, res.deprecated, Map.empty, None, clock.instant(), clock.instant(), res.createdBy, res.updatedBy, res.schema, Value(res.value, res.value.contextValue, res.value.toGraph(res.id.value).rightValue) ) def genUUID: UUID = UUID.randomUUID() def genIri: AbsoluteIri = url"http://example.com/" + genUUID.toString private def sourceInChunks(input: String): AkkaSource = Source.fromIterator(() => input.grouped(10000).map(ByteString(_))) def genSource: AkkaSource = sourceInChunks(genString()) }
Example 9
Source File: CreateDonutTest.scala From learn-akka with Apache License 2.0 | 5 votes |
package com.allaboutscala.learn.akka.http import akka.http.scaladsl.model.{HttpEntity, HttpMethods, HttpRequest, MediaTypes} import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.util.ByteString import com.allaboutscala.learn.akka.http.routes.DonutRoutes import org.scalatest.{Matchers, WordSpec} class CreateDonutTest extends WordSpec with Matchers with ScalatestRouteTest { val donutRoutes = new DonutRoutes().route() "Donut API" should { "Create a valid Donut when posting JSON to /create-donut path" in { val jsonDonutInput = ByteString("""{"name":"plain donut", "price":1.50}""") val httpPostCreateDonut = HttpRequest( uri = "http://localhost:8080/create-donut", method = HttpMethods.POST, entity = HttpEntity(MediaTypes.`application/json`, jsonDonutInput)) httpPostCreateDonut ~> donutRoutes ~> check { status.isSuccess() shouldEqual true status.intValue() shouldEqual 201 status.reason shouldEqual "Created" } } } }
Example 10
Source File: IPDiscoveryFlow.scala From AckCord with MIT License | 5 votes |
package ackcord.voice import java.nio.ByteOrder import scala.concurrent.{Future, Promise} import akka.stream.scaladsl.Flow import akka.stream.stage._ import akka.stream.{Attributes, FlowShape, Inlet, Outlet} import akka.util.ByteString class IPDiscoveryFlow(openValve: () => Unit) extends GraphStageWithMaterializedValue[FlowShape[ByteString, ByteString], Future[VoiceUDPFlow.FoundIP]] { val in: Inlet[ByteString] = Inlet("IPDiscoveryFlow.in") val out: Outlet[ByteString] = Outlet("IPDiscoveryFlow.out") override def shape: FlowShape[ByteString, ByteString] = FlowShape(in, out) override def createLogicAndMaterializedValue( inheritedAttributes: Attributes ): (GraphStageLogic, Future[VoiceUDPFlow.FoundIP]) = { val promise = Promise[VoiceUDPFlow.FoundIP] val logic = new GraphStageLogicWithLogging(shape) with InHandler with OutHandler { override def onPush(): Unit = { val data = grab(in) log.debug(s"Grabbing data for IP discovery $data") val byteBuf = data.asByteBuffer.order(ByteOrder.BIG_ENDIAN) val tpe = byteBuf.getShort require(tpe == 0x2, s"Was expecting IP discovery result, got $tpe") byteBuf.getShort //Length byteBuf.getInt //SSRC val nullTermString = new Array[Byte](64) byteBuf.get(nullTermString) val address = new String(nullTermString, 0, nullTermString.iterator.takeWhile(_ != 0).length) val port = byteBuf.getChar.toInt //Char is unsigned short promise.success(VoiceUDPFlow.FoundIP(address, port)) log.debug("Success doing IP discovery") setHandler( in, new InHandler { override def onPush(): Unit = push(out, grab(in)) } ) openValve() } override def onPull(): Unit = pull(in) override def onUpstreamFailure(ex: Throwable): Unit = { promise.tryFailure(new Exception("Connection failed.", ex)) super.onUpstreamFailure(ex) } setHandlers(in, out, this) } (logic, promise.future) } } object IPDiscoveryFlow { def flow(openValve: () => Unit): Flow[ByteString, ByteString, Future[VoiceUDPFlow.FoundIP]] = Flow.fromGraph(new IPDiscoveryFlow(openValve)) }
Example 11
Source File: JdbcFlow.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.connector.sql import java.nio.charset.{ Charset, StandardCharsets } import java.sql.ResultSet import akka.NotUsed import akka.stream.scaladsl.Flow import akka.util.ByteString import fusion.jdbc.util.JdbcUtils import scala.collection.immutable case class JdbcResultSet(rs: ResultSet, values: immutable.IndexedSeq[AnyRef]) object JdbcFlow { def flowToText(valueSeparator: Char = ','): Flow[immutable.IndexedSeq[AnyRef], String, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = new java.lang.StringBuilder() var i = 0 while (i < values.length) { builder.append(values(i).toString) i += 1 if (i < values.length) { builder.append(valueSeparator) } } builder.toString } def flowToSeq: Flow[ResultSet, immutable.IndexedSeq[AnyRef], NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData (1 to rs.getMetaData.getColumnCount).map { i => val typ = metaData.getColumnType(i) if (JdbcUtils.isString(typ)) { rs.getString(i) } else rs.getObject(i) } } def flowToByteString( valueSeparator: Char = ',', charset: Charset = StandardCharsets.UTF_8): Flow[immutable.IndexedSeq[AnyRef], ByteString, NotUsed] = Flow[immutable.IndexedSeq[AnyRef]].map { values => val builder = ByteString.newBuilder var i = 0 while (i < values.length) { builder.putBytes(values(i).toString.getBytes(charset)) i += 1 if (i < values.length) { builder.putByte(valueSeparator.toByte) } } builder.result() } def flowJdbcResultSet: Flow[ResultSet, JdbcResultSet, NotUsed] = Flow[ResultSet].map { rs => val metaData = rs.getMetaData JdbcResultSet(rs, (1 to metaData.getColumnCount).map(i => rs.getObject(i))) } }
Example 12
Source File: MaterializeValue.scala From fusion-data with Apache License 2.0 | 5 votes |
package example.akkastream.graph import akka.NotUsed import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Flow, Keep, RunnableGraph, Sink, Source, Tcp } import akka.util.ByteString import scala.concurrent.{ Future, Promise } object MaterializeValue { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() import system.dispatcher case class MyClass(private val p: Promise[Option[Int]], conn: Tcp.OutgoingConnection) extends AutoCloseable { override def close(): Unit = p.trySuccess(None) } // Materializes to Promise[Option[Int]] val source: Source[Int, Promise[Option[Int]]] = Source.maybe[Int] // Materializes to NotUsed val flow1: Flow[Int, Int, NotUsed] = Flow[Int].take(100) // Materializes to Promise[Int] val nestedSource : Source[Int, Promise[Option[Int]]] = source.viaMat(flow1)(Keep.left).named("nestedSource") // viaMat === via()(Keep.left) // val nestedSource2: Source[Int, NotUsed] = source.viaMat(flow1)(Keep.right) // Materializes to NotUsed val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map(i => ByteString(i.toString)) // Materializes to Future[Tcp.OutgoingConnection (Keep.right) val flow3: Flow[ByteString, ByteString, Future[Tcp.OutgoingConnection]] = Tcp().outgoingConnection("localhost", 8080) val nestedFlow: Flow[Int, ByteString, Future[Tcp.OutgoingConnection]] = flow2.viaMat(flow3)(Keep.right) val nestedFlow2: Flow[Int, ByteString, NotUsed] = flow2.viaMat(flow3)(Keep.left) // flow2.via(flow3) val nestedFlow3: Flow[Int, ByteString, (NotUsed, Future[Tcp.OutgoingConnection])] = flow2.viaMat(flow3)(Keep.both) // Materializes to Future[String] (Keep.right) val sink: Sink[ByteString, Future[String]] = Sink.fold[String, ByteString]("")(_ + _.utf8String) val nestedSink: Sink[Int, (Future[Tcp.OutgoingConnection], Future[String])] = nestedFlow.toMat(sink)(Keep.both) def f(p: Promise[Option[Int]], rest: (Future[Tcp.OutgoingConnection], Future[String])): Future[MyClass] = { val connFuture = rest._1 connFuture.map(outConn => MyClass(p, outConn)) } // Materializes to Future[MyClass] val runnableGraph: RunnableGraph[Future[MyClass]] = nestedSource.toMat(nestedSink)(f) val r: RunnableGraph[Promise[Option[Int]]] = nestedSource.toMat(nestedSink)(Keep.left) val r2: RunnableGraph[(Future[Tcp.OutgoingConnection], Future[String])] = nestedSource.toMat(nestedSink)(Keep.right) }
Example 13
Source File: EchoDemo.scala From fusion-data with Apache License 2.0 | 5 votes |
package example.akkastream.streamio import akka.NotUsed import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Flow, Framing, Sink, Source, Tcp } import akka.util.ByteString import example.akkastream.streamio.EchoServer.system import scala.concurrent.Future import scala.io.StdIn object EchoServer extends App { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() val connections = Tcp().bind("localhost", 8888) connections.runForeach { connection => println(s"New connection from: ${connection.remoteAddress}") val echo: Flow[ByteString, ByteString, NotUsed] = Flow[ByteString] .via(Framing.delimiter(ByteString("\n"), 256, true)) .map(_.utf8String) .map(_ + "!!!\n") .map(ByteString(_)) connection.handleWith(echo) } StdIn.readLine() system.terminate() } object EchoClient extends App { implicit val system = ActorSystem() implicit val mat = ActorMaterializer() val connection = Tcp().outgoingConnection("localhost", 8888) val replParser = Flow[String].takeWhile(_ != "q").concat(Source.single("BYE")).map { elem => println(s"send msg: $elem") ByteString(s"$elem\n") } val repl = Flow[ByteString] .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 256, allowTruncation = true)) .map(_.utf8String) .map(text => println("Server: " + text)) .map(_ => StdIn.readLine("> ")) .via(replParser) val connected: Future[Tcp.OutgoingConnection] = connection.join(repl).run() // StdIn.readLine() // system.terminate() } object EchoDemo {}
Example 14
Source File: UtilitiesSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.scalatest.{FunSpec, Matchers} class UtilitiesSpec extends FunSpec with Matchers { val header: Header = Header( "<UUID>", "<STRING>", "<UUID>", "<STRING>", "<FLOAT>" ) val parentHeader : ParentHeader = ParentHeader( "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-FLOAT>" ) val kernelMessage = KernelMessage( Seq("<STRING-1>","<STRING-2>").map(x => x.getBytes), "<SIGNATURE>", header, parentHeader, Map(), "<STRING>" ) val zmqMessage = ZMQMessage( ByteString("<STRING-1>".replaceAll("""\s""", "").getBytes), ByteString("<STRING-2>".replaceAll("""\s""", "").getBytes), ByteString("<IDS|MSG>".replaceAll("""\s""", "").getBytes), ByteString("<SIGNATURE>".replaceAll("""\s""", "").getBytes), ByteString( """ { "msg_id": "<UUID>", "username": "<STRING>", "session": "<UUID>", "msg_type": "<STRING>", "version": "<FLOAT>" } """.stripMargin.replaceAll("""\s""", "").getBytes), ByteString( """ { "msg_id": "<PARENT-UUID>", "username": "<PARENT-STRING>", "session": "<PARENT-UUID>", "msg_type": "<PARENT-STRING>", "version": "<PARENT-FLOAT>" } """.stripMargin.replaceAll("""\s""", "").getBytes), ByteString("{}".replaceAll("""\s""", "").getBytes), ByteString("<STRING>".replaceAll("""\s""", "").getBytes) ) describe("Utilities") { describe("implicit #KernelMessageToZMQMessage") { it("should correctly convert a kernel message to a ZMQMessage") { Utilities.KernelMessageToZMQMessage(kernelMessage) should equal (zmqMessage) } } describe("implicit #ZMQMessageToKernelMessage") { it("should correctly convert a ZMQMessage to a kernel message") { Utilities.ZMQMessageToKernelMessage(zmqMessage) should equal (kernelMessage) } } describe("implicit conversions should be inverses of each other") { it("should convert back to the original message, ZMQ -> Kernel -> ZMQ") { Utilities.KernelMessageToZMQMessage( Utilities.ZMQMessageToKernelMessage(zmqMessage) ) should equal (zmqMessage) } it("should convert back to the original message, Kernel -> ZMQ -> Kernel") { Utilities.ZMQMessageToKernelMessage( Utilities.KernelMessageToZMQMessage(kernelMessage) ) should equal (kernelMessage) } } describe("implicit #StringToByteString") { it("should correctly convert a string to a ByteString") { val someString = "some content" val expected = ByteString(someString) Utilities.StringToByteString(someString) should be (expected) } } describe("implicit #ByteStringToString") { it("should correctly convert a ByteString to a string") { val expected = "some content" val byteString = ByteString(expected) Utilities.ByteStringToString(byteString) should be (expected) } } } }
Example 15
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 16
Source File: HeartbeatSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object HeartbeatSpec { val config = """ akka { loglevel = "WARNING" }""" } class HeartbeatSpec extends TestKit( ActorSystem( "HeartbeatActorSpec", ConfigFactory.parseString(HeartbeatSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val SomeMessage: String = "some message" val SomeZMQMessage: ZMQMessage = ZMQMessage(ByteString(SomeMessage.getBytes)) describe("HeartbeatActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.Heartbeat(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeat = system.actorOf(Props(classOf[Heartbeat], socketFactory)) describe("send heartbeat") { it("should receive and send same ZMQMessage") { heartbeat ! SomeZMQMessage probe.expectMsg(MaxAkkaTestTimeout, SomeZMQMessage) } } } }
Example 17
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 18
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions implicit val timeout = Timeout(21474835.seconds) implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] // TODO: Investigate better solution than setting parentHeader to null for {} val parentHeader = parseAndHandle(message.frames(delimiterIndex + 3), ParentHeader.headerReads, handler = (valid: ParentHeader) => valid, errHandler = _ => null ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U) : U = { parseAndHandle(json, reads, handler, (invalid: Seq[(JsPath, Seq[ValidationError])]) => { logger.error(s"Could not parse JSON, ${json}") throw new Throwable(s"Could not parse JSON, ${json}") } ) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U, errHandler: Seq[(JsPath, Seq[ValidationError])] => U) : U = { Json.parse(json).validate[T](reads).fold( errHandler, (content: T) => handler(content) ) } }
Example 19
Source File: RestPi.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.actor.ActorSystem import akka.event.{ Logging, LoggingAdapter } import akka.http.scaladsl._ import akka.http.scaladsl.common.{ EntityStreamingSupport, JsonEntityStreamingSupport } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.server.{ Directives, Route } import akka.stream.scaladsl.{ Flow, Source } import akka.stream.{ ActorMaterializer, Materializer } import akka.util.ByteString import com.github.dnvriend.spark.CalculatePi import org.apache.spark.SparkContext import org.apache.spark.sql.SparkSession import spray.json.DefaultJsonProtocol import scala.concurrent.{ ExecutionContext, Future } object RestPi extends App with Directives with SprayJsonSupport with DefaultJsonProtocol { implicit val system: ActorSystem = ActorSystem() implicit val mat: Materializer = ActorMaterializer() implicit val ec: ExecutionContext = system.dispatcher implicit val log: LoggingAdapter = Logging(system, this.getClass) val spark = SparkSession.builder() .config("spark.sql.warehouse.dir", "file:/tmp/spark-warehouse") .config("spark.scheduler.mode", "FAIR") .config("spark.sql.crossJoin.enabled", "true") .master("local") // use as many threads as cores .appName("RestPi") // The appName parameter is a name for your application to show on the cluster UI. .getOrCreate() final case class Pi(pi: Double) implicit val piJsonFormat = jsonFormat1(Pi) val start = ByteString.empty val sep = ByteString("\n") val end = ByteString.empty implicit val jsonStreamingSupport: JsonEntityStreamingSupport = EntityStreamingSupport.json() .withFramingRenderer(Flow[ByteString].intersperse(start, sep, end)) .withParallelMarshalling(parallelism = 8, unordered = true) def sparkContext: SparkContext = spark.newSession().sparkContext def calculatePi(num: Long = 1000000, slices: Int = 2): Future[Double] = Future(CalculatePi(sparkContext, num, slices)).map(count => slices.toDouble * count / (num - 1)) val route: Route = pathEndOrSingleSlash { complete(calculatePi().map(Pi)) } ~ path("pi" / LongNumber / IntNumber) { (num, slices) => complete(calculatePi(num, slices).map(Pi)) } ~ path("stream" / "pi" / LongNumber) { num => complete(Source.fromFuture(calculatePi()).map(Pi) .flatMapConcat(Source.repeat).take(num)) } Http().bindAndHandle(route, "0.0.0.0", 8008) sys.addShutdownHook { spark.stop() system.terminate() } }
Example 20
Source File: OAuthController.scala From izanami with Apache License 2.0 | 5 votes |
package controllers import akka.util.ByteString import com.auth0.jwt.algorithms.Algorithm import controllers.dto.error.ApiErrors import domains.auth.{OAuthModule, OAuthServiceModule, Oauth2Service} import domains.user.User import env.{Env, Oauth2Config} import play.api.libs.json.Json import play.api.mvc.{AbstractController, ControllerComponents, Cookie} import zio.{Runtime, ZIO} import libs.http.HttpContext class OAuthController(_env: Env, mayBeOauth2Config: Option[Oauth2Config], cc: ControllerComponents)( implicit R: HttpContext[OAuthServiceModule] ) extends AbstractController(cc) { import libs.http._ lazy val _config = _env.izanamiConfig.filter match { case env.Default(config) => config case _ => throw new RuntimeException("Wrong config") } lazy val algorithm: Algorithm = Algorithm.HMAC512(_config.sharedKey) def appLoginPage() = Action { implicit request => mayBeOauth2Config match { case Some(openIdConnectConfig) => val clientId = openIdConnectConfig.clientId val responseType = "code" val scope = openIdConnectConfig.scope.map(s => s"scope=${s}&").getOrElse("") val claims = Option(openIdConnectConfig.claims).filterNot(_.isEmpty).map(v => s"claims=$v&").getOrElse("") val queryParam = if (openIdConnectConfig.useCookie) "" else s"?desc=izanami" val redirectUri = if (_env.baseURL.startsWith("http")) { s"${_env.baseURL}/${controllers.routes.OAuthController.appCallback().url}${queryParam}" } else { s"${controllers.routes.OAuthController.appCallback().absoluteURL()}${queryParam}" } val loginUrl = s"${openIdConnectConfig.loginUrl}?${scope}&${claims}client_id=$clientId&response_type=$responseType&redirect_uri=$redirectUri" Redirect(loginUrl) case None => BadRequest(Json.toJson(ApiErrors.error("Open Id Connect module not configured"))) } } def appLogout() = Action { req => val redirectToOpt: Option[String] = req.queryString.get("redirectTo").map(_.last) redirectToOpt match { case Some(redirectTo) => Redirect(redirectTo).discardingCookies() case _ => BadRequest(Json.toJson(ApiErrors.error("Missing parameters"))) } } def appCallback() = Action.asyncZio[OAuthServiceModule] { implicit ctx => mayBeOauth2Config match { case Some(openIdConnectConfig) => Oauth2Service .paCallback(_env.baseURL, openIdConnectConfig) .map { user => Redirect(controllers.routes.HomeController.index()) .withCookies(Cookie(name = _env.cookieName, value = User.buildToken(user, _config.issuer, algorithm))) } .mapError { err => BadRequest(Json.toJson(ApiErrors.fromErrors(err.toList))) } case None => ZIO.succeed(BadRequest(Json.toJson(ApiErrors.error("Open Id Connect module not configured")))) } } }
Example 21
Source File: InitIza.scala From izanami with Apache License 2.0 | 5 votes |
package experiments import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.RawHeader import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import scala.collection.immutable import scala.concurrent.{Future} object InitIza extends App { implicit val system: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() import system.dispatcher private val http = Http() private val features = "http://localhost:9000/api/features" //private val features = "http://izanami-perfs.cleverapps.io/api/features" Source(0 to 2000) .mapAsyncUnordered(10) { postFeature } .alsoTo(Sink.foreach { case (c, s) if c == StatusCodes.Created => case (c, s) => println(s"Oups $c $s") }) .runWith(Sink.ignore) .onComplete { _ => println("Done") } private def postFeature(i: Int): Future[(StatusCode, String)] = { val headers: immutable.Seq[HttpHeader] = immutable.Seq( RawHeader("Izanami-Client-Id", "xxxx"), RawHeader("Izanami-Client-Secret", "xxxx") ) val body = s""" | { | "id": "a:key:$i", | "enabled": true, | "activationStrategy": "NO_STRATEGY" | } """.stripMargin http .singleRequest( HttpRequest( HttpMethods.POST, Uri(features), headers = headers, entity = HttpEntity.Strict(ContentTypes.`application/json`, ByteString(body)) ) ) .flatMap { case HttpResponse(code, _, entity, _) => entity.dataBytes.map(_.utf8String).runFold("")((str, acc) => str + acc).map(s => (code, s)) } } }
Example 22
Source File: SensorDataFileIngress.scala From pipelines-examples with Apache License 2.0 | 5 votes |
package pipelines.examples.sensordata import java.nio.file import java.nio.file._ import akka.NotUsed import akka.stream.IOResult import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl._ import akka.util.ByteString import pipelines.akkastream._ import pipelines.akkastream.scaladsl._ import pipelines.streamlets._ import pipelines.streamlets.avro._ import spray.json.JsonParser import scala.concurrent.Future import scala.concurrent.duration._ class SensorDataFileIngress extends AkkaStreamlet { import SensorDataJsonSupport._ val out = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) def shape = StreamletShape.withOutlets(out) private val sourceData = VolumeMount("source-data-mount", "/mnt/data", ReadWriteMany) override def volumeMounts = Vector(sourceData) // Streamlet processing steps // 1. Every X seconds // 2. Enumerate all files in the mounted path // 3. Read each file *) // 4. Deserialize file content to a SensorData value *) // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. override def createLogic = new RunnableGraphStreamletLogic() { val listFiles: NotUsed ⇒ Source[file.Path, NotUsed] = { _ ⇒ Directory.ls(getMountedPath(sourceData)) } val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } val parseFile: ByteString ⇒ SensorData = { jsonByteString ⇒ JsonParser(jsonByteString.utf8String).convertTo[SensorData] } val emitFromFilesContinuously = Source.tick(1.second, 5.second, NotUsed) .flatMapConcat(listFiles) .flatMapConcat(readFile) .map(parseFile) def runnableGraph = emitFromFilesContinuously.to(plainSink(out)) } }
Example 23
Source File: PlayRequestToRawBody.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.server.play import java.io.ByteArrayInputStream import java.nio.charset.Charset import akka.stream.Materializer import akka.util.ByteString import play.api.mvc.{RawBuffer, Request} import play.core.parsers.Multipart import sttp.model.Part import sttp.tapir.{RawBodyType, RawPart} import sttp.tapir.internal._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class PlayRequestToRawBody(serverOptions: PlayServerOptions) { def apply[R](bodyType: RawBodyType[R], charset: Option[Charset], request: Request[RawBuffer], body: ByteString)(implicit mat: Materializer ): Future[R] = { bodyType match { case RawBodyType.StringBody(defaultCharset) => Future(new String(body.toArray, charset.getOrElse(defaultCharset))) case RawBodyType.ByteArrayBody => Future(body.toArray) case RawBodyType.ByteBufferBody => Future(body.toByteBuffer) case RawBodyType.InputStreamBody => Future(body.toArray).map(new ByteArrayInputStream(_)) case RawBodyType.FileBody => Future(java.nio.file.Files.write(serverOptions.temporaryFileCreator.create().path, body.toArray)) .map(p => p.toFile) case m: RawBodyType.MultipartBody => multiPartRequestToRawBody(request, m, body) } } private def multiPartRequestToRawBody[R](request: Request[RawBuffer], m: RawBodyType.MultipartBody, body: ByteString)(implicit mat: Materializer ): Future[Seq[RawPart]] = { val bodyParser = serverOptions.playBodyParsers.multipartFormData( Multipart.handleFilePartAsTemporaryFile(serverOptions.temporaryFileCreator) ) bodyParser.apply(request).run(body).flatMap { case Left(_) => Future.failed(new IllegalArgumentException("Unable to parse multipart form data.")) // TODO case Right(value) => val dataParts = value.dataParts.map { case (key, value) => apply( m.partType(key).get, charset(m.partType(key).get), request, ByteString(value.flatMap(_.getBytes).toArray) ).map(body => Part(key, body).asInstanceOf[RawPart]) }.toSeq val fileParts = value.files.map(f => { apply( m.partType(f.key).get, charset(m.partType(f.key).get), request, ByteString.apply(java.nio.file.Files.readAllBytes(f.ref.path)) ).map(body => Part(f.key, body, Map(f.key -> f.dispositionType, Part.FileNameDispositionParam -> f.filename), Nil) .asInstanceOf[RawPart] ) }) Future.sequence(dataParts ++ fileParts) } } }
Example 24
Source File: StreamingAkkaServer.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.examples import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.server.Route import akka.stream.scaladsl.Source import akka.util.ByteString import sttp.client._ import sttp.tapir._ import sttp.tapir.server.akkahttp._ import scala.concurrent.duration._ import scala.concurrent.{Await, Future} object StreamingAkkaServer extends App { // The endpoint: corresponds to GET /receive. // We need to provide both the schema of the value (for documentation), as well as the format (media type) of the // body. Here, the schema is a `string` and the media type is `text/plain`. val streamingEndpoint: Endpoint[Unit, Unit, Source[ByteString, Any], Source[ByteString, Any]] = endpoint.get.in("receive").out(streamBody[Source[ByteString, Any]](schemaFor[String], CodecFormat.TextPlain())) // converting an endpoint to a route (providing server-side logic); extension method comes from imported packages val testStream: Source[ByteString, Any] = Source.repeat("Hello!").take(10).map(s => ByteString(s)) val streamingRoute: Route = streamingEndpoint.toRoute(_ => Future.successful(Right(testStream))) // starting the server implicit val actorSystem: ActorSystem = ActorSystem() import actorSystem.dispatcher val bindAndCheck = Http().bindAndHandle(streamingRoute, "localhost", 8080).map { _ => // testing implicit val backend: SttpBackend[Identity, Nothing, NothingT] = HttpURLConnectionBackend() val result: String = basicRequest.response(asStringAlways).get(uri"http://localhost:8080/receive").send().body println("Got result: " + result) assert(result == "Hello!" * 10) } Await.result(bindAndCheck.transformWith { r => actorSystem.terminate().transform(_ => r) }, 1.minute) }
Example 25
Source File: SocketWordCountTest.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.spark.sstreaming import akka.Done import akka.stream.scaladsl.Tcp._ import akka.stream.scaladsl.{ Flow, Sink, Source, Tcp } import akka.util.ByteString import com.github.dnvriend.TestSpec import org.scalatest.Ignore import scala.collection.immutable._ import scala.concurrent.Future import scala.concurrent.duration._ @Ignore class SocketWordCountTest extends TestSpec { def withSocketServer(xs: Seq[String])(f: Future[Done] => Unit): Unit = { val connections: Source[IncomingConnection, Future[ServerBinding]] = Tcp().bind("127.0.0.1", 9999) val socketServer = connections.runForeach { connection => println(s"New connection from: ${connection.remoteAddress}") val src = Source.cycle(() => xs.iterator).map(txt => ByteString(txt) ++ ByteString("\n")) .flatMapConcat(msg => Source.tick(0.seconds, 200.millis, msg)) val echo = Flow.fromSinkAndSource(Sink.ignore, src) connection.handleWith(echo) } f(socketServer) } it should "a running word count of text data received via a TCP server" in withSparkSession { spark => withSocketServer(List("apache spark")) { socketServer => import spark.implicits._ val lines = spark.readStream .format("socket") .option("host", "localhost") .option("port", 9999) .load() // Split the lines into words val words = lines.as[String].flatMap(_.split(" ")) // Generate running word count val wordCounts = words.groupBy("value").count() // Start running the query that prints the running counts to the console val query = wordCounts.writeStream .outputMode("complete") .format("console") .start() query.awaitTermination(10.seconds) } } }
Example 26
Source File: CreatePosts.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import java.nio.file.Paths import java.nio.file.StandardOpenOption._ import java.text.SimpleDateFormat import java.util.Date import akka.actor.{ ActorSystem, Terminated } import akka.stream.scaladsl.{ FileIO, Source } import akka.stream.{ ActorMaterializer, Materializer } import akka.util.ByteString import play.api.libs.json.Json import scala.concurrent.{ ExecutionContext, Future } import scala.util.Random object CreatePosts extends App { implicit val system: ActorSystem = ActorSystem() implicit val mat: Materializer = ActorMaterializer() implicit val ec: ExecutionContext = system.dispatcher def terminate: Future[Terminated] = system.terminate() sys.addShutdownHook { terminate } object Post { implicit val format = Json.format[Post] } final case class Post( commentCount: Int, lastActivityDate: String, ownerUserId: Long, body: String, score: Int, creationDate: String, viewCount: Int, title: String, tags: String, answerCount: Int, acceptedAnswerId: Long, postTypeId: Long, id: Long ) def rng = Random.nextInt(20000) def now: String = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(new Date()) val lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam fringilla magna et pharetra vestibulum." val title = " Ut id placerat sapien. Aliquam vel metus orci." Source.fromIterator(() => Iterator from 0).map { id => Post(rng, now, rng, List.fill(Random.nextInt(5))(lorem).mkString("\n"), rng, now, rng, s"$rng - $title", title, rng, rng, rng, id) }.map(Json.toJson(_).toString) .map(json => ByteString(json + "\n")) .take(1000000) .via(LogProgress.flow()) .runWith(FileIO.toPath(Paths.get("/tmp/posts.json"), Set(WRITE, TRUNCATE_EXISTING, CREATE))) .flatMap { done => println(done) terminate }.recoverWith { case cause: Throwable => cause.printStackTrace() terminate } }
Example 27
Source File: CreateZipcodes.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import java.nio.file.Paths import java.nio.file.StandardOpenOption._ import akka.NotUsed import akka.actor.{ ActorSystem, Terminated } import akka.stream.scaladsl.{ FileIO, Source } import akka.stream.{ ActorMaterializer, Materializer } import akka.util.ByteString import play.api.libs.json.Json import scala.concurrent.{ ExecutionContext, Future } object CreateZipcodes extends App { implicit val system: ActorSystem = ActorSystem() implicit val mat: Materializer = ActorMaterializer() implicit val ec: ExecutionContext = system.dispatcher sys.addShutdownHook { terminate } object Zipcode { implicit val format = Json.format[Zipcode] } final case class Zipcode(value: String) val numZips = 50000000 def zips(range: Range): Source[ByteString, NotUsed] = Source(range).flatMapConcat { district => Source('A' to 'Z').flatMapConcat { l1 => Source('A' to 'Z').flatMapConcat { l2 => Source(1 to 399).map(num => f"$district$l1$l2-$num%03d") } } }.map(Zipcode.apply).map(Json.toJson(_).toString).map(json => ByteString(json + "\n")) zips(1000 until 2000) .merge(zips(2000 until 3000)) .merge(zips(3000 until 4000)) .merge(zips(4000 until 5000)) .merge(zips(5000 until 6000)) .merge(zips(6000 until 7000)) .merge(zips(7000 until 8000)) .merge(zips(8000 until 9000)) .take(numZips) .via(LogProgress.flow(each = 250000)) .runWith(FileIO.toPath(Paths.get("/tmp/zips.json"), Set(WRITE, TRUNCATE_EXISTING, CREATE))) .flatMap { done => println(done) terminate }.recoverWith { case cause: Throwable => cause.printStackTrace() terminate } def terminate: Future[Terminated] = system.terminate() }
Example 28
Source File: Heartbeat.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.Actor import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.utils.LogLike class Heartbeat(socketFactory : SocketFactory) extends Actor with LogLike { logger.debug("Created new Heartbeat actor") val socket = socketFactory.Heartbeat(context.system, self) override def receive: Receive = { case message: ZMQMessage => logger.trace("Heartbeat received message: " + message.frames.map((byteString: ByteString) => new String(byteString.toArray)).mkString("\n")) socket ! message } }
Example 29
Source File: UpickleCustomizationSupport.scala From akka-http-json with Apache License 2.0 | 5 votes |
package de.heikoseeberger.akkahttpupickle import akka.http.javadsl.common.JsonEntityStreamingSupport import akka.http.scaladsl.common.EntityStreamingSupport import akka.http.scaladsl.marshalling.{ Marshaller, Marshalling, ToEntityMarshaller } import akka.http.scaladsl.model.{ ContentTypeRange, HttpEntity, MediaType, MessageEntity } import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshal, Unmarshaller } import akka.http.scaladsl.util.FastFuture import akka.stream.scaladsl.{ Flow, Source } import akka.util.ByteString import UpickleCustomizationSupport._ import scala.collection.immutable.Seq import scala.concurrent.Future import scala.util.Try import scala.util.control.NonFatal // This companion object only exists for binary compatibility as adding methods with default implementations // (including val's as they create synthetic methods) is not compatible. private object UpickleCustomizationSupport { private def jsonStringUnmarshaller(support: UpickleCustomizationSupport) = Unmarshaller.byteStringUnmarshaller .forContentTypes(support.unmarshallerContentTypes: _*) .mapWithCharset { case (ByteString.empty, _) => throw Unmarshaller.NoContentException case (data, charset) => data.decodeString(charset.nioCharset.name) } private def jsonSourceStringMarshaller(support: UpickleCustomizationSupport) = Marshaller.oneOf(support.mediaTypes: _*)(support.sourceByteStringMarshaller) private def jsonStringMarshaller(support: UpickleCustomizationSupport) = Marshaller.oneOf(support.mediaTypes: _*)(Marshaller.stringMarshaller) } implicit def sourceMarshaller[A](implicit writes: apiInstance.Writer[A], support: JsonEntityStreamingSupport = EntityStreamingSupport.json() ): ToEntityMarshaller[SourceOf[A]] = jsonSourceStringMarshaller(this).compose(jsonSource[A]) }
Example 30
Source File: GenCodecSupport.scala From akka-http-json with Apache License 2.0 | 5 votes |
package de.heikoseeberger.akkahttpavsystemgencodec import akka.http.scaladsl.marshalling.{ Marshaller, ToEntityMarshaller } import akka.http.scaladsl.model.ContentTypeRange import akka.http.scaladsl.model.MediaType import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller } import akka.util.ByteString import com.avsystem.commons.serialization.GenCodec import com.avsystem.commons.serialization.json.{ JsonStringInput, JsonStringOutput } import scala.collection.immutable.Seq object GenCodecSupport extends GenCodecSupport {} trait GenCodecSupport { def unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply) def mediaTypes: Seq[MediaType.WithFixedCharset] = List(`application/json`) private val jsonStringUnmarshaller = Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .mapWithCharset { case (ByteString.empty, _) => throw Unmarshaller.NoContentException case (data, charset) => data.decodeString(charset.nioCharset.name) } private val jsonStringMarshaller = Marshaller.oneOf(mediaTypes: _*)(Marshaller.stringMarshaller) implicit def marshaller[A: GenCodec]: ToEntityMarshaller[A] = jsonStringMarshaller.compose(JsonStringOutput.write(_)) }
Example 31
Source File: UserSerializer.scala From akka-http-rest-api with MIT License | 5 votes |
package core.authentication import akka.util.ByteString import redis.ByteStringFormatter object UserSerializer { implicit val byteStringFormatter = new ByteStringFormatter[Identity.User] { def serialize(data: Identity.User): ByteString = { ByteString( data.id + "|" + data.role ) } def deserialize(bs: ByteString): Identity.User = { val r = bs.utf8String.split('|').toList Identity.User(r(0).toLong, r(1).toInt) } } }
Example 32
Source File: LavaplayerSource.scala From AckCord with MIT License | 5 votes |
package ackcord.lavaplayer import scala.concurrent.duration._ import akka.NotUsed import akka.stream.scaladsl.Source import akka.stream.stage.{GraphStage, GraphStageLogic, OutHandler, TimerGraphStageLogicWithLogging} import akka.stream.{Attributes, Outlet, SourceShape} import akka.util.ByteString import com.sedmelluq.discord.lavaplayer.player.AudioPlayer class LavaplayerSource(player: AudioPlayer) extends GraphStage[SourceShape[ByteString]] { val out: Outlet[ByteString] = Outlet("LavaplayerSource.out") override def shape: SourceShape[ByteString] = SourceShape(out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new TimerGraphStageLogicWithLogging(shape) with OutHandler { override def onPull(): Unit = tryPushFrame() override protected def onTimer(timerKey: Any): Unit = timerKey match { case "RetryProvide" => tryPushFrame() } def tryPushFrame(): Unit = { val frame = player.provide() if (frame != null) { push(out, ByteString.fromArray(frame.getData)) //log.debug("Sending data") } else { //log.debug("Scheduling attempt to provide frame") scheduleOnce("RetryProvide", 20.millis) } } setHandler(out, this) } } object LavaplayerSource { def source(player: AudioPlayer): Source[ByteString, NotUsed] = Source.fromGraph(new LavaplayerSource(player)) }
Example 33
Source File: VoiceWsProtocol.scala From AckCord with MIT License | 5 votes |
package ackcord.voice import ackcord.data.DiscordProtocol import ackcord.util.{JsonOption, JsonSome} import akka.util.ByteString import io.circe.syntax._ import io.circe.{derivation, _} //noinspection NameBooleanParameters object VoiceWsProtocol extends DiscordProtocol { implicit val speakingFlagsCodec: Codec[SpeakingFlag] = Codec.from( Decoder[Long].emap(i => Right(SpeakingFlag.fromLong(i))), Encoder[Long].contramap(identity) ) implicit val identifyDataCodec: Codec[IdentifyData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit val selectProtocolDataCodec: Codec[SelectProtocolData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit val selectProtocolConnectionDataCodec: Codec[SelectProtocolConnectionData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit val readyDataCodec: Codec[ReadyData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit val sessionDescriptionDataEncoder: Encoder[SessionDescriptionData] = (a: SessionDescriptionData) => { Json.obj("mode" -> a.mode.asJson, "secret_key" -> a.secretKey.toArray.asJson) } implicit val sessionDescriptionDataDecoder: Decoder[SessionDescriptionData] = (c: HCursor) => { for { mode <- c.get[String]("mode") secretKey <- c.get[Seq[Int]]("secret_key") } yield SessionDescriptionData(mode, ByteString(secretKey.map(_.toByte): _*)) } implicit val speakingDataEncoder: Encoder[SpeakingData] = (a: SpeakingData) => JsonOption.removeUndefinedToObj( "speaking" -> JsonSome(a.speaking.asJson), "delay" -> a.delay.map(_.asJson), "ssrc" -> a.ssrc.map(_.asJson), "user_id" -> a.userId.map(_.asJson) ) implicit val speakingDataDecoder: Decoder[SpeakingData] = derivation.deriveDecoder(derivation.renaming.snakeCase, false, None) implicit val resumeDataCodec: Codec[ResumeData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit val helloDataCodec: Codec[HelloData] = derivation.deriveCodec(derivation.renaming.snakeCase, false, None) implicit def wsMessageEncoder[Data]: Encoder[VoiceMessage[Data]] = (a: VoiceMessage[Data]) => { val data = a match { case Identify(d) => d.asJson case SelectProtocol(d) => d.asJson case Ready(d) => d.asJson case Heartbeat(d) => d.asJson case SessionDescription(d) => d.asJson case Speaking(d) => d.asJson case HeartbeatACK(d) => d.asJson case Resume(d) => d.asJson case Hello(d) => d.asJson case Resumed => Json.obj() case IgnoreClientDisconnect => Json.obj() case UnknownVoiceMessage(_) => Json.obj() } JsonOption.removeUndefinedToObj( "op" -> JsonSome(a.op.asJson), "d" -> JsonSome(data), "s" -> a.s.map(_.asJson) ) } implicit val wsMessageDecoder: Decoder[VoiceMessage[_]] = (c: HCursor) => { val dCursor = c.downField("d") val op = c.get[VoiceOpCode]("op") def mkMsg[Data: Decoder, B](create: Data => B): Either[DecodingFailure, B] = dCursor.as[Data].map(create) //We use the apply method on the companion object here op.flatMap { case VoiceOpCode.Identify => mkMsg(Identify) case VoiceOpCode.SelectProtocol => mkMsg(SelectProtocol.apply) case VoiceOpCode.Ready => mkMsg(Ready) case VoiceOpCode.Heartbeat => mkMsg(Heartbeat) case VoiceOpCode.SessionDescription => mkMsg(SessionDescription) case VoiceOpCode.Speaking => mkMsg(Speaking.apply) case VoiceOpCode.HeartbeatACK => mkMsg(HeartbeatACK) case VoiceOpCode.Resume => mkMsg(Resume) case VoiceOpCode.Resumed => Right(Resumed) case VoiceOpCode.ClientDisconnect => Right(IgnoreClientDisconnect) //We don't know what to do with this case VoiceOpCode.Hello => mkMsg(Hello) case tpe @ VoiceOpCode.Unknown(_) => Right(UnknownVoiceMessage(tpe)) } } }
Example 34
Source File: RTPHeader.scala From AckCord with MIT License | 5 votes |
package ackcord.voice import java.nio.{ByteBuffer, ByteOrder} import akka.util.ByteString def fromBytes(bytes: ByteString): (RTPHeader, ByteString) = { val (header, extra) = bytes.splitAt(12) val buffer = header.asByteBuffer.order(ByteOrder.BIG_ENDIAN) val tpe = buffer.get() val version = buffer.get() val sequence = buffer.getShort() val timestamp = buffer.getInt() val ssrc = buffer.getInt() //https://tools.ietf.org/html/rfc5285#section-4.2 //I have no idea what this does if (tpe == 0x90 && extra(0) == 0xBE && extra(1) == 0xDE) { val hlen = extra(2) << 8 | extra(3) var i = 4 while (i < hlen + 4) { val b = extra(i) val len = (b & 0x0F) + 1 i += (len + 1) } while (extra(i) == 0) i += 1 val newAudio = extra.drop(i) (RTPHeader(tpe, version, sequence, timestamp, ssrc), newAudio) } else (RTPHeader(tpe, version, sequence, timestamp, ssrc), extra) } def apply(sequence: Short, timestamp: Int, ssrc: Int): RTPHeader = RTPHeader(0x80.toByte, 0x78, sequence, timestamp, ssrc) }
Example 35
Source File: VoiceUDPFlow.scala From AckCord with MIT License | 5 votes |
package ackcord.voice import java.net.InetSocketAddress import java.nio.ByteOrder import scala.concurrent.{Future, Promise} import ackcord.data.{RawSnowflake, UserId} import ackcord.util.UdpConnectedFlow import akka.NotUsed import akka.actor.typed.ActorSystem import akka.stream.scaladsl.{BidiFlow, Concat, Flow, GraphDSL, Keep, Source} import akka.stream.{BidiShape, OverflowStrategy} import akka.util.ByteString object VoiceUDPFlow { val silence = ByteString(0xF8, 0xFF, 0xFE) val SampleRate = 48000 val FrameSize = 960 val FrameTime = 20 def flow[Mat]( remoteAddress: InetSocketAddress, ssrc: Int, serverId: RawSnowflake, userId: UserId, secretKeys: Source[Option[ByteString], Mat] )(implicit system: ActorSystem[Nothing]): Flow[ByteString, AudioAPIMessage.ReceivedData, (Mat, Future[FoundIP])] = NaclBidiFlow .bidiFlow(ssrc, serverId, userId, secretKeys) .atopMat(voiceBidi(ssrc).reversed)(Keep.both) .async .join(Flow[ByteString].buffer(32, OverflowStrategy.backpressure).via(UdpConnectedFlow.flow(remoteAddress))) def voiceBidi(ssrc: Int): BidiFlow[ByteString, ByteString, ByteString, ByteString, Future[FoundIP]] = { implicit val byteOrder: ByteOrder = ByteOrder.BIG_ENDIAN val ipDiscoveryPacket = { val byteBuilder = ByteString.createBuilder byteBuilder.sizeHint(74) byteBuilder.putShort(0x1).putShort(70).putInt(ssrc) byteBuilder.putBytes(new Array[Byte](66)) byteBuilder.result() } val valvePromise = Promise[Unit] val valve = Source.future(valvePromise.future).drop(1).asInstanceOf[Source[ByteString, NotUsed]] val ipDiscoveryFlow = Flow[ByteString] .viaMat(new IPDiscoveryFlow(() => valvePromise.success(())))(Keep.right) BidiFlow .fromGraph(GraphDSL.create(ipDiscoveryFlow) { implicit b => ipDiscovery => import GraphDSL.Implicits._ val voiceIn = b.add(Flow[ByteString]) val ipDiscoverySource = b.add(Source.single(ipDiscoveryPacket) ++ valve) val ipDiscoveryAndThenVoiceData = b.add(Concat[ByteString]()) ipDiscoverySource ~> ipDiscoveryAndThenVoiceData voiceIn ~> ipDiscoveryAndThenVoiceData BidiShape( ipDiscovery.in, ipDiscovery.out, voiceIn.in, ipDiscoveryAndThenVoiceData.out ) }) } case class FoundIP(address: String, port: Int) }
Example 36
Source File: CirceStreamSupport.scala From akka-stream-json with Apache License 2.0 | 5 votes |
package de.knutwalker.akka.stream package support import akka.NotUsed import akka.stream.scaladsl.Flow import akka.util.ByteString import io.circe.CursorOp.DownField import io.circe.jawn.CirceSupportParser._ import io.circe.{ CursorOp, Decoder, DecodingFailure, Encoder, HCursor, Json, Printer } import jawn.AsyncParser object CirceStreamSupport extends CirceStreamSupport trait CirceStreamSupport { def decode[A: Decoder]: Flow[ByteString, A, NotUsed] = JsonStreamParser.flow[Json].map(decodeJson[A]) def decode[A: Decoder](mode: AsyncParser.Mode): Flow[ByteString, A, NotUsed] = JsonStreamParser.flow[Json](mode).map(decodeJson[A]) def encode[A](implicit A: Encoder[A], P: Printer = Printer.noSpaces): Flow[A, String, NotUsed] = Flow[A].map(a => P.pretty(A(a))) case class JsonParsingException(df: DecodingFailure, cursor: HCursor) extends Exception(errorMessage(df.history, cursor, df.message), df) private[knutwalker] def decodeJson[A](json: Json)(implicit decoder: Decoder[A]): A = { val cursor = json.hcursor decoder(cursor) match { case Right(e) => e case Left(f) => throw JsonParsingException(f, cursor) } } private[this] def errorMessage(hist: List[CursorOp], cursor: HCursor, typeHint: String) = { val ac = cursor.replay(hist) if (ac.failed && lastWasDownField(hist)) { s"The field [${CursorOp.opsToPath(hist)}] is missing." } else { s"Could not decode [${ac.focus.getOrElse(Json.Null)}] at [${CursorOp.opsToPath(hist)}] as [$typeHint]." } } private[this] def lastWasDownField(hist: List[CursorOp]) = hist.headOption match { case Some(DownField(_)) => true case _ => false } }
Example 37
Source File: ControllerSpec.scala From akka-ddd-cqrs-es-example with MIT License | 5 votes |
package com.github.j5ik2o.bank.adaptor.util import akka.http.scaladsl.model.{ HttpEntity, MediaTypes } import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.testkit.TestKitBase import akka.util.ByteString import com.github.j5ik2o.scalatestplus.db.{ MySQLdConfig, UserWithPassword } import com.wix.mysql.distribution.Version.v5_6_21 import io.circe.Encoder import io.circe.syntax._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.prop.PropertyChecks import org.scalatest.time.{ Millis, Seconds, Span } import org.scalatest.{ BeforeAndAfterAll, FreeSpecLike, Matchers } import scala.concurrent.duration._ object ControllerSpec { implicit class JsonOps[A](val self: A) extends AnyVal { def toEntity(implicit enc: Encoder[A]): HttpEntity.Strict = HttpEntity(MediaTypes.`application/json`, ByteString(self.asJson.noSpaces)) } } abstract class ControllerSpec extends FreeSpecLike with PropertyChecks with Matchers with BeforeAndAfterAll with ScalaFutures with FlywayWithMySQLSpecSupport with ScalatestRouteTest with TestKitBase { override implicit val patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(10, Seconds), interval = Span(200, Millis)) override def afterAll: Unit = cleanUp() override protected lazy val mySQLdConfig: MySQLdConfig = MySQLdConfig( version = v5_6_21, port = Some(12345), userWithPassword = Some(UserWithPassword("bank", "passwd")), timeout = Some((30 seconds) * sys.env.getOrElse("SBT_TEST_TIME_FACTOR", "1").toDouble) ) }
Example 38
Source File: StreamLoader.scala From recogito2 with Apache License 2.0 | 5 votes |
package controllers.admin.authorities import akka.stream.{ActorAttributes, ClosedShape, Materializer, Supervision} import akka.stream.scaladsl._ import akka.util.ByteString import java.io.InputStream import services.entity.EntityRecord import services.entity.builtin.importer.EntityImporter import play.api.Logger import play.api.libs.json.Json import scala.concurrent.{Await, ExecutionContext} import scala.concurrent.duration._ class StreamLoader(implicit materializer: Materializer) { private val BATCH_SIZE = 100 private val decider: Supervision.Decider = { case t: Throwable => t.printStackTrace() Supervision.Stop } def importPlaces(is: InputStream, crosswalk: String => Option[EntityRecord], importer: EntityImporter)(implicit ctx: ExecutionContext) = { val source = StreamConverters.fromInputStream(() => is, 1024) .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = Int.MaxValue, allowTruncation = false)) .map(_.utf8String) val parser = Flow.fromFunction[String, Option[EntityRecord]](crosswalk) .withAttributes(ActorAttributes.supervisionStrategy(decider)) .grouped(BATCH_SIZE) val sink = Sink.foreach[Seq[Option[EntityRecord]]] { records => val toImport = records.flatten Await.result(importer.importRecords(toImport), 60.minutes) } val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder => import GraphDSL.Implicits._ source ~> parser ~> sink ClosedShape }).withAttributes(ActorAttributes.supervisionStrategy(decider)) graph.run() } }
Example 39
Source File: ByteStringBytes.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.compat.akka.impl import java.io.OutputStream import java.nio.charset.{CharacterCodingException, Charset} import java.nio.{ByteBuffer, CharBuffer} import java.util import scala.annotation.tailrec import scala.collection.GenTraversableOnce import akka.util.ByteString import swave.core.io.Bytes import swave.core.macros._ class ByteStringBytes extends Bytes[ByteString] { ///////////////// CONSTRUCTION /////////////////// def empty = ByteString.empty def fill[A: Integral](size: Long)(byte: A) = { requireArg(0 <= size && size <= Int.MaxValue, "`size` must be >= 0 and <= Int.MaxValue") val b = implicitly[Integral[A]].toInt(byte).toByte apply(Array.fill(size.toInt)(b)) } def apply(array: Array[Byte]) = ByteString(array) def apply(bytes: Array[Byte], offset: Int, length: Int) = ByteString(util.Arrays.copyOfRange(bytes, offset, offset + length)) def apply[A: Integral](bytes: A*) = { val integral = implicitly[Integral[A]] val buf = new Array[Byte](bytes.size) @tailrec def rec(ix: Int): ByteString = if (ix < buf.length) { buf(ix) = integral.toInt(bytes(ix)).toByte rec(ix + 1) } else view(buf) rec(0) } def apply(bytes: Vector[Byte]) = ByteString(bytes: _*) def apply(buffer: ByteBuffer) = ByteString(buffer) def apply(bs: GenTraversableOnce[Byte]) = ByteString(bs.toArray) def view(bytes: Array[Byte]) = ByteString(bytes) // no view-like constructor available on ByteStrings def view(bytes: ByteBuffer) = ByteString(bytes) // no view-like constructor available on ByteStrings def encodeString(str: String, charset: Charset) = ByteString(str, charset.name) def encodeStringStrict(str: String, charset: Charset) = try Right(ByteString(charset.newEncoder.encode(CharBuffer.wrap(str)))) catch { case e: CharacterCodingException ⇒ Left(e) } ///////////////// QUERY /////////////////// def size(value: ByteString): Long = value.size.toLong def byteAt(value: ByteString, ix: Long): Byte = { requireArg(0 <= ix && ix <= Int.MaxValue, "`ix` must be >= 0 and <= Int.MaxValue") value(ix.toInt) } def indexOfSlice(value: ByteString, slice: ByteString, startIx: Long): Long = { requireArg(0 <= startIx && startIx <= Int.MaxValue, "`startIx` must be >= 0 and <= Int.MaxValue") value.indexOfSlice(slice, startIx.toInt).toLong } ///////////////// TRANSFORMATION TO ByteString /////////////////// def update(value: ByteString, ix: Long, byte: Byte) = concat(concat(take(value, ix), byte), drop(value, ix + 1)) def concat(value: ByteString, other: ByteString) = value ++ other def concat(value: ByteString, byte: Byte) = value ++ ByteString(byte) def concat(byte: Byte, value: ByteString) = ByteString(byte) ++ value def drop(value: ByteString, n: Long) = { requireArg(0 <= n && n <= Int.MaxValue, "`n` must be >= 0 and <= Int.MaxValue") value.drop(n.toInt) } def take(value: ByteString, n: Long) = { requireArg(0 <= n && n <= Int.MaxValue, "`n` must be >= 0 and <= Int.MaxValue") value.take(n.toInt) } def map(value: ByteString, f: Byte ⇒ Byte) = value.map(f) def reverse(value: ByteString) = value.reverse def compact(value: ByteString) = value.compact ///////////////// TRANSFORMATION TO OTHER TYPES /////////////////// def toArray(value: ByteString) = value.toArray def copyToArray(value: ByteString, xs: Array[Byte], offset: Int) = value.copyToArray(xs, offset) def copyToArray(value: ByteString, sourceOffset: Long, xs: Array[Byte], destOffset: Int, len: Int) = drop(value, sourceOffset).copyToArray(xs, destOffset, len) def copyToBuffer(value: ByteString, buffer: ByteBuffer): Int = value.copyToBuffer(buffer) def copyToOutputStream(value: ByteString, s: OutputStream) = { @tailrec def rec(ix: Int, size: Int): Unit = if (ix < size) { s.write(value(ix).toInt); rec(ix + 1, size) } rec(0, value.size) } def toByteBuffer(value: ByteString) = value.toByteBuffer def toIndexedSeq(value: ByteString): IndexedSeq[Byte] = value def toSeq(value: ByteString): Seq[Byte] = value def decodeString(value: ByteString, charset: Charset): Either[CharacterCodingException, String] = try Right(charset.newDecoder.decode(toByteBuffer(value)).toString) catch { case e: CharacterCodingException ⇒ Left(e) } ///////////////// ITERATION /////////////////// def foldLeft[A](value: ByteString, z: A, f: (A, Byte) ⇒ A) = value.foldLeft(z)(f) def foldRight[A](value: ByteString, z: A, f: (Byte, A) ⇒ A) = value.foldRight(z)(f) def foreach(value: ByteString, f: Byte ⇒ Unit) = value.foreach(f) }
Example 40
Source File: AkkaHttpStreamingTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.akkahttp import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.Source import akka.util.ByteString import sttp.client.{NothingT, SttpBackend} import sttp.client.testing.ConvertToFuture import sttp.client.testing.streaming.StreamingTest import scala.concurrent.Future class AkkaHttpStreamingTest extends StreamingTest[Future, Source[ByteString, Any]] { private implicit val actorSystem: ActorSystem = ActorSystem("sttp-test") private implicit val materializer: ActorMaterializer = ActorMaterializer() override implicit val backend: SttpBackend[Future, Source[ByteString, Any], NothingT] = AkkaHttpBackend.usingActorSystem(actorSystem) override implicit val convertToFuture: ConvertToFuture[Future] = ConvertToFuture.future override def bodyProducer(chunks: Iterable[Array[Byte]]): Source[ByteString, Any] = Source.apply(chunks.toList.map(ByteString(_))) override def bodyConsumer(stream: Source[ByteString, Any]): Future[String] = stream.map(_.utf8String).runReduce(_ + _) }
Example 41
Source File: PrometheusController.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters.controllers import akka.util.ByteString import com.github.stijndehaes.playprometheusfilters.utils.WriterAdapter import javax.inject._ import play.api.mvc._ import io.prometheus.client.CollectorRegistry import io.prometheus.client.exporter.common.TextFormat import org.slf4j.LoggerFactory import play.api.http.HttpEntity class PrometheusController @Inject()(registry: CollectorRegistry, cc: ControllerComponents) extends AbstractController(cc) { private val logger = LoggerFactory.getLogger(classOf[PrometheusController]) def getMetrics = Action { logger.trace("Metrics call received") val samples = new StringBuilder() val writer = new WriterAdapter(samples) TextFormat.write004(writer, registry.metricFamilySamples()) writer.close() Result( header = ResponseHeader(200, Map.empty), body = HttpEntity.Strict(ByteString(samples.toString), Some(TextFormat.CONTENT_TYPE_004)) ) } }
Example 42
Source File: DataFileIngress.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.akkastreamsdoc import java.nio.file import java.nio.file._ import akka.NotUsed import akka.stream.IOResult import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl._ import akka.util.ByteString import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import spray.json.JsonParser import scala.concurrent.Future import scala.concurrent.duration._ class DataFileIngress extends AkkaStreamlet { import JsonSupport._ val out = AvroOutlet[Data]("out").withPartitioner(RoundRobinPartitioner) def shape = StreamletShape.withOutlets(out) private val sourceData = VolumeMount("source-data-mount", "/mnt/data", ReadWriteMany) override def volumeMounts = Vector(sourceData) // Streamlet processing steps // 1. Every X seconds // 2. Enumerate all files in the mounted path // 3. Read each file *) // 4. Deserialize file content to a Data value *) // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. override def createLogic = new RunnableGraphStreamletLogic() { val listFiles: NotUsed ⇒ Source[file.Path, NotUsed] = { _ ⇒ Directory.ls(getMountedPath(sourceData)) } val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } val parseFile: ByteString ⇒ Data = { jsonByteString ⇒ JsonParser(jsonByteString.utf8String).convertTo[Data] } val emitFromFilesContinuously = Source .tick(1.second, 5.second, NotUsed) .flatMapConcat(listFiles) .flatMapConcat(readFile) .map(parseFile) def runnableGraph = emitFromFilesContinuously.to(plainSink(out)) } }
Example 43
Source File: LeaderProxyFilter.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome package api.v1 import akka.util.ByteString import dcos.metronome.api.ApiConfig import mesosphere.marathon.core.election.ElectionService import org.slf4j.LoggerFactory import play.api.http.HttpEntity import play.api.libs.streams.Accumulator import play.api.libs.ws.WSClient import play.api.mvc._ class LeaderProxyFilter(ws: WSClient, electionService: ElectionService, config: ApiConfig) extends EssentialFilter with Results { import LeaderProxyFilter._ import scala.concurrent.ExecutionContext.Implicits.global val log = LoggerFactory.getLogger(getClass) val localHostPort = config.hostnameWithPort val localRoutes = Set("/ping", "/v1/metrics") override def apply(next: EssentialAction): EssentialAction = new EssentialAction { override def apply(request: RequestHeader): Accumulator[ByteString, Result] = { def isProxiedToSelf = request.headers.get(HEADER_VIA).contains(localHostPort) def doNotProxy() = localRoutes(request.path) if (electionService.isLeader || doNotProxy()) { next(request) } else if (isProxiedToSelf) { Accumulator.done(BadRequest("Prevent proxying already proxied request")) } else { electionService.leaderHostPort match { case Some(hostPort) => proxyRequest(request, hostPort) case None => Accumulator.done(ServiceUnavailable("No consistent leadership")) } } } } def proxyRequest(request: RequestHeader, leaderHostPort: String): Accumulator[ByteString, Result] = { log.info(s"Proxy request ${request.path} to $leaderHostPort") val headers = request.headers.headers ++ Seq(HEADER_LEADER -> leaderHostPort, HEADER_VIA -> localHostPort) val scheme = if (request.secure) "https" else "http" Accumulator.source[ByteString].mapFuture { source => ws.url(s"$scheme://$leaderHostPort${request.path}?${request.rawQueryString}") .withMethod(request.method) .withHttpHeaders(headers: _*) .withRequestTimeout(config.leaderProxyTimeout) .withBody(source) .execute() .map { r => val proxyHeaders = Map(HEADER_LEADER -> leaderHostPort, HEADER_VIA -> localHostPort) val responseHeaders = r.headers.map { case (k, v) => k -> v.mkString(", ") } val header = ResponseHeader(r.status, proxyHeaders ++ responseHeaders, Some(r.statusText)) val body = HttpEntity.Strict(r.bodyAsBytes, None) Result(header, body) } } } } object LeaderProxyFilter { val HEADER_VIA = "X-VIA" val HEADER_LEADER = "X-LEADER" }
Example 44
Source File: StatsdExporter.scala From akka-mon with MIT License | 5 votes |
package org.akkamon.core.exporters import java.net.InetSocketAddress import akka.actor.{ActorRef, Actor, Props} import akka.io.{Udp, IO} import akka.util.ByteString import org.akkamon.core.{Config, InstrumentExporter} object StatsdExporter extends InstrumentExporter { def formatEvent(event: TimerEvent): String = s"${event.timer}:${event.value}|ms" def formatEvent(event: CounterEvent): String = s"${event.key}:${event.value}|g" // we sent this as a gauge so statsd keeps track of the values def formatEvent(event: SampledEvent): String = s"${event.key}:${event.value}|c" val instrumentActor = system.actorOf(Props(classOf[SimpleSender], new InetSocketAddress(Config.StatsdHost, Config.StatsdPort))) class SimpleSender(remote: InetSocketAddress) extends Actor { IO(Udp) ! Udp.SimpleSender def receive = { case Udp.SimpleSenderReady => context.become(ready(sender())) } def ready(send: ActorRef): Receive = { case msg: MessageEvent => // do nothing, a message can't be sent to statsd case timer: TimerEvent => send ! Udp.Send(ByteString(formatEvent(timer)), remote) case counter: CounterEvent => send ! Udp.Send(ByteString(formatEvent(counter)), remote) case counter: SampledEvent => send ! Udp.Send(ByteString(formatEvent(counter)), remote) case counterMap: CounterEventMap => counterMap.counts.foreach{ case (key, value) => send ! Udp.Send(ByteString(formatEvent(CounterEvent(key, value))), remote)} } } }
Example 45
Source File: PodLogsExample.scala From skuber with Apache License 2.0 | 5 votes |
package skuber.examples.podlogs import akka.NotUsed import skuber._ import skuber.json.format._ import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.util.ByteString import skuber.api.client import scala.concurrent.Await import scala.concurrent.duration._ object PodLogExample extends App { def printLogFlow(cntrName: String): Sink[ByteString, NotUsed] = Flow[ByteString] .via(Framing.delimiter( ByteString("\n"), maximumFrameLength = 256, allowTruncation = true)) .map(_.utf8String) .to(Sink.foreach(text => println(s"[${cntrName} logs] $text"))) implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() implicit val dispatcher = system.dispatcher val k8s = client.init( client.defaultK8sConfig.currentContext, client.LoggingConfig(logRequestBasic = false, logResponseBasic = false) ) val helloWorldContainer=Container(name="hello-world", image="busybox", command=List("sh", "-c", "echo Hello World! && echo Goodbye World && sleep 60")) val helloWorldContainer2=Container(name="hello-world2", image="busybox", command=List("sh", "-c", "echo Hello World again! && echo Goodbye World again && sleep 60")) val helloWorldPod=Pod("hello-world", Pod.Spec().addContainer(helloWorldContainer).addContainer(helloWorldContainer2)) val podFut = k8s.create(helloWorldPod) println("Waiting 30 seconds to allow pod initialisation to complete before getting logs...") Thread.sleep(30000) for { pod <- podFut logsSource <- k8s.getPodLogSource("hello-world", Pod.LogQueryParams(containerName = Some("hello-world"), sinceSeconds = Some(9999999))) logsSource1 <- k8s.getPodLogSource("hello-world", Pod.LogQueryParams(containerName = Some("hello-world2"), sinceTime = pod.metadata.creationTimestamp)) donePrinting = logsSource.runWith(printLogFlow("hello-world")) donePrinting1 = logsSource1.runWith(printLogFlow("hello-world2")) } yield (donePrinting, donePrinting1) // allow another 5 seconds for logs to be streamed from the pod to stdout before cleaning up Thread.sleep(5000) Await.result(k8s.delete[Pod]("hello-world"), 5.seconds) k8s.close system.terminate System.exit(0) }
Example 46
Source File: DataCenterAwarenessSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.cluster import org.scalatest.{FlatSpec, Matchers} import org.mockito.Mockito._ import akka.actor._ import akka.routing.ActorSelectionRoutee import akka.util.ByteString import org.scalatestplus.mockito.MockitoSugar import org.squbs.cluster.rebalance.{CorrelateRoundRobinRoutingLogic, DataCenterAwareRebalanceLogic, DefaultCorrelation} class DataCenterAwarenessSpec extends FlatSpec with Matchers with MockitoSugar { val myAddress = Address("akka.tcp", "pubsub", "10.100.194.253", 8080) val correlates = Seq(Address("akka.tcp", "pubsub", "10.100.65.147", 8080), Address("akka.tcp", "pubsub", "10.100.98.134", 8080)) val distances = Seq(Address("akka.tcp", "pubsub", "10.210.45.119", 8080), Address("akka.tcp", "pubsub", "10.210.79.201", 8080)) "DefaultCorrelation" should "extract ipv4 subnet domain" in { val mockAddress = Address("akka.tcp", "pubsub", "10.100.194.253", 8080) DefaultCorrelation().common(mockAddress) should equal("[email protected]") } "CorrelateRoundRobinRoutingLogic" should "prefer routees that correlate with itself" in { val routees = (correlates ++ distances).map(address => { val mockActorSelection = mock[ActorSelection] when(mockActorSelection.pathString).thenReturn(address.toString) ActorSelectionRoutee(mockActorSelection) }).toIndexedSeq val logic = CorrelateRoundRobinRoutingLogic(myAddress) logic.select("whatever", routees) match { case ActorSelectionRoutee(selection) => selection.pathString should equal("akka.tcp://[email protected]:8080") } logic.select("whatever", routees) match { case ActorSelectionRoutee(selection) => selection.pathString should equal("akka.tcp://[email protected]:8080") } logic.select("whatever", routees) match { case ActorSelectionRoutee(selection) => selection.pathString should equal("akka.tcp://[email protected]:8080") } } "DefaultDataCenterAwareRebalanceLogic" should "rebalance with correlations in considerations" in { val partitionKey = ByteString("some partition") val partitionsToMembers = Map(partitionKey -> Set.empty[Address]) def size(partitionKey:ByteString) = 2 var compensation = DataCenterAwareRebalanceLogic().compensate(partitionsToMembers, correlates ++ distances, size) compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head)) val morePartition = ByteString("another partition") compensation = DataCenterAwareRebalanceLogic(). compensate(compensation.updated(morePartition, Set.empty), correlates ++ distances, size) compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head)) compensation.getOrElse(morePartition, Set.empty) should equal(Set(correlates.head, distances.head)) val balanced = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet) balanced.getOrElse(partitionKey, Set.empty) shouldNot equal(balanced.getOrElse(morePartition, Set.empty)) } "DefaultDataCenterAwareRebalanceLogic" should "rebalance after a DC failure recovery" in { val partitionKey = ByteString("some partition") val partitionsToMembers = Map(partitionKey -> Set.empty[Address]) def size(partitionKey:ByteString) = 2 var compensation = DataCenterAwareRebalanceLogic().compensate(partitionsToMembers, correlates ++ distances, size) compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head)) val balanced = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet) balanced.getOrElse(partitionKey, Set.empty) should have size 2 //unfortunately correlates are gone?! compensation = DataCenterAwareRebalanceLogic(). compensate(partitionsToMembers.updated(partitionKey, Set(distances.head)), distances, size) compensation.getOrElse(partitionKey, Set.empty) should equal(distances.toSet) val rebalanced = DataCenterAwareRebalanceLogic().rebalance(compensation, distances.toSet) rebalanced.getOrElse(partitionKey, Set.empty) should equal(distances.toSet) val recovered = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet) recovered.getOrElse(partitionKey, Set.empty) should have size 2 recovered.getOrElse(partitionKey, Set.empty) shouldNot equal(distances.toSet) correlates.contains(recovered.getOrElse(partitionKey, Set.empty).diff(distances.toSet).head) should equal(true) } }
Example 47
Source File: ZkClusterInitTest.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.cluster import akka.util.ByteString import com.typesafe.scalalogging.LazyLogging import org.apache.curator.framework.CuratorFrameworkFactory import org.apache.curator.retry.ExponentialBackoffRetry import org.apache.zookeeper.CreateMode import org.squbs.cluster.test.{ZkClusterMultiActorSystemTestKit, ZkClusterTestHelper} import scala.language.implicitConversions class ZkClusterInitTest extends ZkClusterMultiActorSystemTestKit("ZkClusterInitTest") with LazyLogging with ZkClusterTestHelper { val par1 = ByteString("myPar1") val par2 = ByteString("myPar2") val par3 = ByteString("myPar3") implicit val log = logger implicit def string2ByteArray(s: String): Array[Byte] = s.toCharArray map (c => c.toByte) implicit def byteArray2String(array: Array[Byte]): String = array.map(_.toChar).mkString override def beforeAll(): Unit = { // Don't need to start the cluster for now // We preset the data in Zookeeper instead. val zkClient = CuratorFrameworkFactory.newClient( zkConfig.getString("zkCluster.connectionString"), new ExponentialBackoffRetry(ZkCluster.DEFAULT_BASE_SLEEP_TIME_MS, ZkCluster.DEFAULT_MAX_RETRIES) ) zkClient.start() zkClient.blockUntilConnected() implicit val zkClientWithNS = zkClient.usingNamespace(zkConfig.getString("zkCluster.namespace")) guarantee("/leader", Some(Array[Byte]()), CreateMode.PERSISTENT) guarantee("/members", Some(Array[Byte]()), CreateMode.PERSISTENT) guarantee("/segments", Some(Array[Byte]()), CreateMode.PERSISTENT) guarantee("/segments/segment-0", Some(Array[Byte]()), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par1)}", Some("myPar1"), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par1)}/servants", None, CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par1)}/$$size", Some(3), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par2)}", Some("myPar2"), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par2)}/servants", None, CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par2)}/$$size", Some(3), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par3)}", Some("myPar3"), CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par3)}/servants", None, CreateMode.PERSISTENT) guarantee(s"/segments/segment-0/${keyToPath(par3)}/$$size", Some(3), CreateMode.PERSISTENT) zkClient.close() } "ZkCluster" should "list the partitions" in { startCluster() zkClusterExts foreach { case (_, ext) => ext tell (ZkListPartitions(ext.zkAddress), self) expectMsgType[ZkPartitions](timeout) } } "ZkCluster" should "load persisted partition information and sync across the cluster" in { zkClusterExts foreach { case (_, ext) => ext tell (ZkQueryPartition(par1), self) expectMsgType[ZkPartition](timeout).members should have size 3 } zkClusterExts foreach { case (_, ext) => ext tell (ZkQueryPartition(par2), self) expectMsgType[ZkPartition](timeout).members should have size 3 } zkClusterExts foreach { case (_, ext) => ext tell (ZkQueryPartition(par3), self) expectMsgType[ZkPartition](timeout).members should have size 3 } } "ZkCluster" should "list all the members across the cluster" in { val members = zkClusterExts.map(_._2.zkAddress).toSet zkClusterExts foreach { case (_, ext) => ext tell (ZkQueryMembership, self) expectMsgType[ZkMembership](timeout).members should be (members) } } }
Example 48
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs import java.net.{URLDecoder, URLEncoder} import java.nio.ByteBuffer import java.nio.charset.Charset import akka.actor.{Address, AddressFromURIString} import akka.util.ByteString import com.typesafe.scalalogging.Logger import org.apache.curator.framework.CuratorFramework import org.apache.zookeeper.CreateMode import org.apache.zookeeper.KeeperException.NodeExistsException import scala.language.implicitConversions import scala.util.Try import scala.util.control.NonFatal import scala.collection.JavaConverters._ package object cluster { trait SegmentationLogic { val segmentsSize:Int def segmentation(partitionKey:ByteString): String = s"segment-${Math.abs(partitionKey.hashCode()) % segmentsSize}" def partitionZkPath(partitionKey:ByteString): String = s"/segments/${segmentation(partitionKey)}/${keyToPath(partitionKey)}" def sizeOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/$$size" def servantsOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/servants" } case class DefaultSegmentationLogic(segmentsSize:Int) extends SegmentationLogic def guarantee(path:String, data:Option[Array[Byte]], mode:CreateMode = CreateMode.EPHEMERAL) (implicit zkClient:CuratorFramework, logger:Logger):String = { try{ data match { case None => zkClient.create.withMode(mode).forPath(path) case Some(bytes) => zkClient.create.withMode(mode).forPath(path, bytes) } } catch{ case e: NodeExistsException => if(data.nonEmpty && data.get.length > 0){ zkClient.setData().forPath(path, data.get) } path case NonFatal(e) => logger.info("leader znode creation failed due to %s\n", e) path } } def safelyDiscard(path:String, recursive: Boolean = true)(implicit zkClient: CuratorFramework): String = Try { if(recursive) zkClient.getChildren.forPath(path).asScala.foreach(child => safelyDiscard(s"$path/$child", recursive)) zkClient.delete.forPath(path) path } getOrElse path def keyToPath(name:String):String = URLEncoder.encode(name, "utf-8") def pathToKey(name:String):String = URLDecoder.decode(name, "utf-8") private[cluster] val BYTES_OF_INT = Integer.SIZE / java.lang.Byte.SIZE implicit def intToBytes(integer:Int):Array[Byte] = { val buf = ByteBuffer.allocate(BYTES_OF_INT) buf.putInt(integer) buf.rewind buf.array() } val UTF_8 = Charset.forName("utf-8") implicit class ByteConversions(val bytes: Array[Byte]) extends AnyVal { def toAddress: Option[Address] = Option(bytes) flatMap (b => if (b.length <= 0) None else Some(AddressFromURIString(new String(b, UTF_8)))) def toInt: Int = ByteBuffer.wrap(bytes).getInt def toUtf8: String = new String(bytes, UTF_8) def toByteString: ByteString = ByteString(bytes) def toAddressSet: Set[Address] = Try { new String(bytes, UTF_8).split("[,]").map(seg => AddressFromURIString(seg.trim)).toSet } getOrElse Set.empty } implicit def byteStringToUtf8(bs:ByteString):String = new String(bs.toArray, UTF_8) implicit def addressToBytes(address:Address):Array[Byte] = { address.toString.getBytes(UTF_8) } implicit def addressSetToBytes(members: Set[Address]): Array[Byte] = { members.mkString(",").getBytes(UTF_8) } }
Example 49
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.cluster import akka.actor.Address import akka.util.ByteString import scala.annotation.tailrec package object rebalance { trait RebalanceLogic { val spareLeader:Boolean def rebalance(partitionsToMembers: Map[ByteString, Set[Address]], members:Set[Address]): Map[ByteString, Set[Address]] = { val utilization = partitionsToMembers.foldLeft(Map.empty[Address, Seq[ByteString]]){(memoize, assign) => assign._2.foldLeft(memoize){(memoize, member) => memoize.updated(member, memoize.getOrElse(member, Seq.empty) :+ assign._1) } } val ordered = members.toSeq sortWith { (one, two) => utilization.getOrElse(one, Seq.empty).size < utilization.getOrElse(two, Seq.empty).size } @tailrec def rebalanceRecursively(partitionsToMembers: Map[ByteString, Set[Address]], utilization: Map[Address, Seq[ByteString]], ordered:Seq[Address]): Map[ByteString, Set[Address]] = { val overflows = utilization.getOrElse(ordered.last, Seq.empty) val underflow = utilization.getOrElse(ordered.head, Seq.empty) if (overflows.size - underflow.size > 1) { val move = overflows.head val updatedUtil = utilization.updated(ordered.last, overflows.tail).updated(ordered.head, underflow :+ move) var headOrdered = ordered.tail takeWhile { next => updatedUtil.getOrElse(ordered.head, Seq.empty).size < updatedUtil.getOrElse(next, Seq.empty).size } headOrdered = (headOrdered :+ ordered.head) ++ ordered.tail.drop(headOrdered.size) var rearOrdered = headOrdered takeWhile { next => updatedUtil.getOrElse(headOrdered.last, Seq.empty).size > updatedUtil.getOrElse(next, Seq.empty).size } // Drop the headOrdered.last rearOrdered = (rearOrdered :+ headOrdered.last) ++ headOrdered.drop(rearOrdered.size).dropRight(1) rebalanceRecursively(partitionsToMembers.updated(move, partitionsToMembers.getOrElse(move, Set.empty) + ordered.head - ordered.last), updatedUtil, rearOrdered) } else partitionsToMembers } rebalanceRecursively(partitionsToMembers, utilization, ordered) } } class DefaultRebalanceLogic(val spareLeader: Boolean) extends RebalanceLogic object DefaultRebalanceLogic { def apply(spareLeader: Boolean) = new DefaultRebalanceLogic(spareLeader) } }
Example 50
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs import java.net.InetSocketAddress import java.nio.channels.ServerSocketChannel import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.Future package object unicomplex { // Remove this once Akka-Http exposes this test utility. def temporaryServerAddress(interface: String = "127.0.0.1"): InetSocketAddress = { val serverSocket = ServerSocketChannel.open() try { serverSocket.socket.bind(new InetSocketAddress(interface, 0)) val port = serverSocket.socket.getLocalPort new InetSocketAddress(interface, port) } finally serverSocket.close() } def temporaryServerHostnameAndPort(interface: String = "127.0.0.1"): (InetSocketAddress, String, Int) = { val socketAddress = temporaryServerAddress(interface) (socketAddress, socketAddress.getHostName, socketAddress.getPort) } def extractEntityAsString(response: HttpResponse) (implicit am: ActorMaterializer, system: ActorSystem): Future[String] = { import system.dispatcher response.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String) } def entityAsString(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[String] = { import system.dispatcher get(uri) flatMap extractEntityAsString } def entityAsStringWithHeaders(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[(String, Seq[HttpHeader])] = { import system.dispatcher get(uri) flatMap( response => extractEntityAsString(response) map((_, response.headers))) } def entityAsInt(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[Int] = { import system.dispatcher entityAsString(uri) map (s => s.toInt) } def get(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[HttpResponse] = { Http().singleRequest(HttpRequest(uri = Uri(uri))) } def post(uri: String, e: RequestEntity)(implicit am: ActorMaterializer, system: ActorSystem): Future[HttpResponse] = { Http().singleRequest(HttpRequest(method = HttpMethods.POST, uri = Uri(uri), entity = e)) } def put(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[HttpResponse] = { Http().singleRequest(HttpRequest(method = HttpMethods.PUT, uri = Uri(uri))) } }
Example 51
Source File: StashCubeSvc.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.unicomplex.stashcube import akka.actor.{Actor, Stash} import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.util.ByteString import scala.collection.mutable.ListBuffer class StashCubeSvc extends Actor with Stash { private val msgList = new ListBuffer[String]() implicit val am = ActorMaterializer() import context.dispatcher override def receive: Receive = { case req@HttpRequest(HttpMethods.POST, _, _, _, _) => stash() val resp = HttpResponse(status = StatusCodes.Accepted, entity = HttpEntity("Stashed away!")) sender() ! resp case req@HttpRequest(HttpMethods.PUT, _, _, _, _) => context.become(report) val resp = HttpResponse(status = StatusCodes.Created, entity = HttpEntity("Un-stashed")) sender() ! resp unstashAll() case req@HttpRequest(HttpMethods.GET, _, _, _, _) => val resp = HttpResponse(entity = msgList.toSeq.toString()) sender() ! resp } def report: Receive = { case req@HttpRequest(HttpMethods.POST, _, _, _, _) => req.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String) foreach(msgList.append(_)) case req@HttpRequest(HttpMethods.GET, _, _, _, _) => val resp = HttpResponse(entity = msgList.toSeq.toString()) sender() ! resp } }
Example 52
Source File: package.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, HttpResponse, Uri} import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.Future package object testkit { case object TestPing case object TestPong def entityAsString(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[String] = { import system.dispatcher get(uri) flatMap extractEntityAsString } def get(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[HttpResponse] = { Http().singleRequest(HttpRequest(uri = Uri(uri))) } def extractEntityAsString(response: HttpResponse) (implicit am: ActorMaterializer, system: ActorSystem): Future[String] = { import system.dispatcher response.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String) } }
Example 53
Source File: ClientFlowHttpsSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.httpclient import java.io.InputStream import java.security.{KeyStore, SecureRandom} import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory} import akka.actor.ActorSystem import akka.http.scaladsl.model._ import akka.http.scaladsl.{ConnectionContext, Http} import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import com.typesafe.config.ConfigFactory import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} import org.squbs.resolver.ResolverRegistry import org.squbs.testkit.Timeouts._ import scala.concurrent.{Await, Future} import scala.util.{Success, Try} object ClientFlowHttpsSpec { val config = ConfigFactory.parseString( """ |helloHttps { | type = squbs.httpclient | akka.ssl-config.loose.disableHostnameVerification = true |} """.stripMargin) implicit val system = ActorSystem("ClientFlowHttpsSpec", config) implicit val materializer = ActorMaterializer() ResolverRegistry(system).register[HttpEndpoint]("LocalhostHttpsEndpointResolver") { (name, _) => name match { case "helloHttps" => Some(HttpEndpoint(s"https://localhost:$port", Some(sslContext("exampletrust.jks", "changeit")), None)) case _ => None } } import akka.http.scaladsl.server.Directives._ import system.dispatcher val route = path("hello") { get { complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "Hello World!")) } } val serverBinding = Await.result(Http().bindAndHandle(route, "localhost", 0, ConnectionContext.https(sslContext("example.com.jks", "changeit"))), awaitMax) val port = serverBinding.localAddress.getPort } class ClientFlowHttpsSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import ClientFlowHttpsSpec._ override def afterAll: Unit = { serverBinding.unbind() map {_ => system.terminate()} } it should "make a call to Hello Service" in { val clientFlow = ClientFlow[Int]("helloHttps") val responseFuture: Future[(Try[HttpResponse], Int)] = Source.single(HttpRequest(uri = "/hello") -> 42) .via(clientFlow) .runWith(Sink.head) val (Success(response), _) = Await.result(responseFuture, awaitMax) response.status should be (StatusCodes.OK) val entity = response.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String) entity map { e => e shouldEqual "Hello World!" } } }
Example 54
Source File: WSPlayHandler.scala From scala-loci with Apache License 2.0 | 5 votes |
package loci package communicator package ws.akka import akka.util.ByteString import play.api.http.websocket.{BinaryMessage, Message, TextMessage} import scala.concurrent.Future import scala.util.Try private object WSPlayHandler { locally(WSPlayHandler) def handleWebSocket[P <: WS]( ws: Future[P], properties: WS.Properties, connectionEstablished: Try[Connection[P]] => Unit) = { new WSAbstractHandler[Message] { def createTextMessage(data: String) = TextMessage(data) def createBinaryMessage(data: ByteString) = BinaryMessage(data) def processMessage(message: Message) = message match { case BinaryMessage(data) => Some(Future successful data) case _ => None } } handleWebSocket (ws, properties, connectionEstablished) } }
Example 55
Source File: ZeromqKernelMessageSocket.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorSystem, ActorRef, Actor} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage //import org.apache.toree.kernel.protocol.v5.kernel.ZMQMessage import org.apache.toree.kernel.protocol.v5.KernelMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.utils.MessageLogSupport abstract class ZeromqKernelMessageSocket( actorSocketFunc: (ActorSystem, ActorRef) => ActorRef, actorForwardFunc: () => ActorSelection ) extends Actor with MessageLogSupport { val actorSocketRef = actorSocketFunc(context.system, self) val actorForwardRef = actorForwardFunc() override def receive: Receive = { case message: ZMQMessage => val kernelMessage: KernelMessage = message logMessage(kernelMessage) // Grab the strings to use for signature verification val zmqStrings = message.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) // TODO: This assumes NO extra buffers, refactor? // Forward along our message (along with the strings used for // signatures) actorForwardRef ! ((zmqStrings, kernelMessage)) case message: KernelMessage => val zmqMessage: ZMQMessage = message logMessage(message) actorSocketRef ! zmqMessage } }
Example 56
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions private val sessionId: UUID = java.util.UUID.randomUUID().toString implicit val timeout = Timeout(21474835.seconds) // Maximum delay implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] val parentHeader = Json.parse(message.frames(delimiterIndex + 3)).validate[ParentHeader].fold[ParentHeader]( // TODO: Investigate better solution than setting parentHeader to null for {} (invalid: Seq[(JsPath, Seq[ValidationError])]) => null, //HeaderBuilder.empty, (valid: ParentHeader) => valid ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T](json: String, reads: Reads[T], handler: T => Unit) : Unit = { Json.parse(json).validate[T](reads).fold( (invalid: Seq[(JsPath, Seq[ValidationError])]) => logger.error(s"Could not parse JSON, ${json}"), (content: T) => handler(content) ) } def getSessionId = sessionId def toKernelMessage(message: ExecuteRequest): KernelMessage = { // construct a kernel message whose content is an ExecuteRequest val id = java.util.UUID.randomUUID().toString val header = Header( id, "spark", sessionId, MessageType.Incoming.ExecuteRequest.toString, "5.0") KMBuilder().withIds(Seq[Array[Byte]]()).withSignature("").withHeader(header) .withParentHeader(HeaderBuilder.empty).withContentString(message).build } }
Example 57
Source File: HeartbeatClient.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import akka.actor.{ActorRef, Actor} import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import akka.pattern.ask import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.apache.toree.utils.LogLike import org.apache.toree.kernel.protocol.v5.UUID import scala.collection.concurrent.{Map, TrieMap} import scala.concurrent.duration._ object HeartbeatMessage {} class HeartbeatClient( socketFactory : SocketFactory, actorLoader: ActorLoader, signatureEnabled: Boolean ) extends Actor with LogLike { logger.debug("Created new Heartbeat Client actor") implicit val timeout = Timeout(1.minute) val futureMap: Map[UUID, ActorRef] = TrieMap[UUID, ActorRef]() val socket = socketFactory.HeartbeatClient(context.system, self) override def receive: Receive = { // from Heartbeat case message: ZMQMessage => val id = message.frames.map((byteString: ByteString) => new String(byteString.toArray)).mkString("\n") logger.info(s"Heartbeat client receive:$id") futureMap(id) ! true futureMap.remove(id) // from SparkKernelClient case HeartbeatMessage => import scala.concurrent.ExecutionContext.Implicits.global val id = java.util.UUID.randomUUID().toString futureMap += (id -> sender) logger.info(s"Heartbeat client send: $id") val future = socket ? ZMQMessage(ByteString(id.getBytes)) future.onComplete { // future always times out because server "tells" response { case(_) => futureMap.remove(id) } } }
Example 58
Source File: RouterSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{SocketManager, ZMQMessage} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class RouterSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing router socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newRouterSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 59
Source File: RepSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{SocketManager, ZMQMessage} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class RepSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing reply socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newRepSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 60
Source File: SubSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike class SubSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing subscribe socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newSubSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case _ => } }
Example 61
Source File: DealerSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class DealerSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing dealer socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newDealerSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 62
Source File: ReqSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class ReqSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing request socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newReqSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 63
Source File: EntityUtils.scala From asura with MIT License | 5 votes |
package asura.core.http import java.net.URLEncoder import akka.http.scaladsl.model.{ContentType, ContentTypes, HttpEntity, RequestEntity} import akka.util.ByteString import asura.common.util.{LogUtils, StringUtils} import asura.core.es.model.{HttpCaseRequest, KeyValueObject} import asura.core.http.UriUtils.UTF8 import asura.core.runtime.RuntimeContext import asura.core.util.JacksonSupport import com.fasterxml.jackson.core.`type`.TypeReference import com.typesafe.scalalogging.Logger object EntityUtils { val logger = Logger("EntityUtils") def toEntity(cs: HttpCaseRequest, context: RuntimeContext): RequestEntity = { val request = cs.request var contentType: ContentType = ContentTypes.`text/plain(UTF-8)` var byteString: ByteString = ByteString.empty if (StringUtils.isNotEmpty(request.contentType) && null != request.body && request.body.nonEmpty) { request.contentType match { case HttpContentTypes.JSON => contentType = ContentTypes.`application/json` val body = request.body.find(_.contentType == HttpContentTypes.JSON) if (body.nonEmpty) { byteString = ByteString(context.renderBodyAsString(body.get.data)) } case HttpContentTypes.X_WWW_FORM_URLENCODED => contentType = ContentTypes.`application/x-www-form-urlencoded` val body = request.body.find(_.contentType == HttpContentTypes.X_WWW_FORM_URLENCODED) if (body.nonEmpty) { var bodyStr: String = null try { val sb = StringBuilder.newBuilder val params = JacksonSupport.parse(body.get.data, new TypeReference[Seq[KeyValueObject]]() {}) for (pair <- params if (pair.enabled && StringUtils.isNotEmpty(pair.key))) { val rendered = context.renderBodyAsString(pair.value) sb.append(pair.key).append("=").append(URLEncoder.encode(rendered, UTF8)).append("&") } if (sb.nonEmpty) { sb.deleteCharAt(sb.length - 1) } bodyStr = sb.toString } catch { case t: Throwable => val errLog = LogUtils.stackTraceToString(t) logger.warn(errLog) bodyStr = errLog } byteString = ByteString(bodyStr) } case HttpContentTypes.TEXT_PLAIN => contentType = ContentTypes.`text/plain(UTF-8)` val body = request.body.find(_.contentType == HttpContentTypes.TEXT_PLAIN) if (body.nonEmpty) { byteString = ByteString(context.renderBodyAsString(body.get.data)) } case _ => } } HttpEntity(contentType, byteString) } }
Example 64
Source File: CompleteChunkSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.util.ByteString import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.scaladsl.TestSource class CompleteChunkSpec extends FlatSpec { def createStream() = { val as = ActorSystem("CompleteChunkSpec") val am = ActorMaterializer()(as) val source = TestSource.probe[ByteString](as) val sink = TestSink.probe[ParsingData](as) val chunk = CompleteChunkStage.parser() //source.map(a => {println("<< " + a.decodeString("UTF-8"));a}).via(chunk).alsoTo(Sink.foreach(a => println(">> " + a))).toMat(sink)(Keep.both).run()(am) //Use for debugging source.via(chunk).toMat(sink)(Keep.both).run()(am) } it should "only let whole xml tags through" in { //This is our entire test xml: <xml><header><id>Joska</id><aa>Pista</aa><bb>Miska</bb></header></xml> val (pub,sub) = createStream() sub.request(20) pub.sendNext(ByteString("<xml><hea")) sub.expectNext(ParsingData(ByteString("<xml>"), Set.empty, 5)) pub.sendNext(ByteString("der><id>Jo")) sub.expectNext(ParsingData(ByteString("<header><id>Jo"), Set.empty, 19)) pub.sendNext(ByteString("ska</i")) sub.expectNext(ParsingData(ByteString("ska"), Set.empty, 22)) pub.sendNext(ByteString("d><aa>Pista</a")) sub.expectNext(ParsingData(ByteString("</id><aa>Pista"), Set(), 36)) pub.sendNext(ByteString("a><bb>Mis")) sub.expectNext(ParsingData(ByteString("</aa><bb>Mis"), Set(), 48)) pub.sendNext(ByteString("ka</bb></he")) sub.expectNext(ParsingData(ByteString("ka</bb>"), Set(), 55)) pub.sendNext(ByteString("ader></xml>")) sub.expectNext(ParsingData(ByteString("</header></xml>"), Set(), 70)) pub.sendComplete() sub.expectNext(ParsingData(ByteString.empty, Set(XMLElement(List(),Map("Stream Size" -> "70"), Some("Stream Size"))), 70)) sub.expectComplete() } }
Example 65
Source File: XMLParserXMLExtractNamespaceSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.stream.scaladsl.{Keep, Source} import akka.util.ByteString import org.scalatest.{FlatSpec, Matchers} import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.mock.MockitoSugar import org.scalatest.time.{Millis, Seconds, Span} class XMLParserXMLExtractNamespaceSpec extends FlatSpec with Matchers with ScalaFutures with MockitoSugar with Eventually with XMLParserFixtures { val f = fixtures implicit override val patienceConfig = PatienceConfig(timeout = Span(5, Seconds), interval = Span(5, Millis)) import f._ behavior of "CompleteChunkStage#parser" it should "Parse and extract several non-default namespaces" in { val testXMLX = <ns5:GovTalkMessage xmlns:ns0="http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/13-14/2" xmlns:ns2="http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/15-16/1" xmlns:ns5="http://www.govtalk.gov.uk/CM/envelope" xmlns:ns1="http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/14-15/1" xmlns:ns3="http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/16-17/1" xmlns:ns4="http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/17-18/1" xmlns=""> <ns5:EnvelopeVersion>2.0</ns5:EnvelopeVersion> <ns5:Header></ns5:Header> <ns5:GovTalkDetails></ns5:GovTalkDetails> </ns5:GovTalkMessage> val source = Source(List(ByteString(testXMLX.toString()))) val paths = Seq[XMLInstruction]( XMLExtract(Seq("GovTalkMessage"), Map("xmlns:ns2" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/15-16/1")), XMLExtract(Seq("GovTalkMessage"), Map("xmlns:BLABLA" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/13-14/2")), XMLExtract(Seq("GovTalkMessage"), Map("xmlns" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/17-18/1")), XMLExtract(Seq("GovTalkMessage"), Map("xmlns" -> "http://www.govtalk.gov.uk/CM/envelope")) ) val expected = Set( XMLElement(List("GovTalkMessage"), Map("xmlns:ns2" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/15-16/1"), Some("")), XMLElement(List("GovTalkMessage"), Map("xmlns:ns0" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/13-14/2"), Some("")), XMLElement(List("GovTalkMessage"), Map("xmlns:ns4" -> "http://www.govtalk.gov.uk/taxation/PAYE/RTI/EmployerPaymentSummary/17-18/1"), Some("")), XMLElement(List("GovTalkMessage"), Map("xmlns:ns5" -> "http://www.govtalk.gov.uk/CM/envelope"), Some("")), XMLElement(List(), Map(CompleteChunkStage.STREAM_SIZE -> "681"), Some(CompleteChunkStage.STREAM_SIZE)) ) whenReady(source.runWith(parseToXMLElements(paths))) { r => r shouldBe expected } whenReady(source.runWith(parseToByteString(paths))) { r => whenReady(source.toMat(collectByteString)(Keep.right).run()) { t => r shouldBe t } } } }
Example 66
Source File: ParsingStageSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.util.ByteString import org.scalatest.{FlatSpec, Matchers} import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.scaladsl.TestSource class ParsingStageSpec extends FlatSpec { def createStream(instructions:Seq[XMLInstruction],validationMaxSize: Option[Int] = None) = { val as = ActorSystem("CompleteChunkSpec") val am = ActorMaterializer()(as) val source = TestSource.probe[ParsingData](as) val sink = TestSink.probe[(ByteString, Set[XMLElement])](as) val chunk = ParsingStage.parser(instructions) //source.via(chunk).alsoTo(Sink.foreach(a => println(">> " + a._1.decodeString("UTF-8") + " | " + a._2))).toMat(sink)(Keep.both).run()(am) source.via(chunk).toMat(sink)(Keep.both).run()(am) } def getEmptyResult(in:String):(ByteString, Set[XMLElement]) = { (ByteString(in), Set.empty[XMLElement]) } it should "Extract XMLInstruction from a xml broken into pieces (even xml tags are broken up)" in { val idHeader = XMLExtract(List("xml","header","id")) val aaHeader = XMLExtract(List("xml","header","aa")) //This is our entire test xml: <xml><header><id>Joska</id><aa>Pista</aa><bb>Miska</bb><cc/><dd/></header></xml> //The test xml val (pub,sub) = createStream(Seq(idHeader,aaHeader)) sub.request(10) pub.sendNext(ParsingData(ByteString("<xml><hea"),Set.empty,5)) sub.expectNext(getEmptyResult("<xml><hea")) pub.sendNext(ParsingData(ByteString("der><id>Jo"),Set.empty,19)) sub.expectNext(getEmptyResult("der><id>Jo")) pub.sendNext(ParsingData(ByteString("ska</i"),Set.empty,26)) sub.expectNext(getEmptyResult("ska</i")) pub.sendNext(ParsingData(ByteString("d><aa>Pista</a"),Set.empty,32)) sub.expectNext((ByteString("d><aa>Pista</a"),Set(XMLElement(List("xml", "header", "id"),Map(),Some("Joska"))))) pub.sendNext(ParsingData(ByteString("a><bb>Mis"),Set.empty,38)) sub.expectNext((ByteString("a><bb>Mis"),Set(XMLElement(List("xml", "header", "aa"),Map(),Some("Pista"))))) pub.sendNext(ParsingData(ByteString("ka</bb><cc/"),Set.empty,50)) sub.expectNext(getEmptyResult("ka</bb><cc/")) pub.sendNext(ParsingData(ByteString("><dd"),Set.empty,57)) sub.expectNext(getEmptyResult("><dd")) pub.sendNext(ParsingData(ByteString("/></header></xml>"),Set.empty,57)) sub.expectNext(getEmptyResult("/></header></xml>")) pub.sendComplete() } }
Example 67
Source File: OutputXMLMatchesInputXMLSpec.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
import akka.stream.scaladsl.{Keep, Source} import akka.util.ByteString import org.scalatest import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.mockito.MockitoSugar import org.scalatest.{BeforeAndAfterEach, Matchers} import uk.gov.hmrc.akka.xml._ import uk.gov.hmrc.play.test.UnitSpec import scala.concurrent.ExecutionContext.Implicits.global class OutputXMLMatchesInputXMLSpec extends UnitSpec with BeforeAndAfterEach with Matchers with ScalaFutures with MockitoSugar with Eventually with XMLParserFixtures { val inputXml = "<Address xmlns=\"http://www.govtalk.gov.uk/CM/address\"><Line>Line 1</Line><Line>Line 2</Line><PostCode>Tf3 4NT</PostCode></Address>" val inputXmlWithSelfClosingElement = "<Address xmlns=\"http://www.govtalk.gov.uk/CM/address\"><Line>Line 1</Line><Line>Line 2</Line><Line/><PostCode>Tf3 4NT</PostCode></Address>" val inputXmlWithBlankElement = "<Address xmlns=\"http://www.govtalk.gov.uk/CM/address\"><Line>Line 1</Line><Line>Line 2</Line><Line></Line><PostCode>Tf3 4NT</PostCode></Address>" val f = fixtures def xpathValue(xmlElements: Set[XMLElement], xPath: Seq[String]): Option[String] = xmlElements.collectFirst { case XMLElement(`xPath`, _, Some(xpathValue)) => xpathValue } def parseAndCompare(inputXml: String): scalatest.Assertion = { val inputXmlSource: Source[ByteString, _] = Source.single(ByteString(inputXml)) await( for { parsedXmlElements <- inputXmlSource .via(CompleteChunkStage.parser()) .via(ParsingStage.parser(Seq(XMLExtract(Seq("Address"), Map.empty, true)))) .via(f.flowXMLElements) .toMat(f.collectXMLElements)(Keep.right) .run()(f.mat) parsedXml = xpathValue(parsedXmlElements, Seq("Address")) } yield { val outputXml = parsedXml.get println(s"INPUT XML = $inputXml") println(s"OUTPUT XML = $outputXml") println() outputXml shouldBe inputXml } ) } "The output XML" should { "match the input XML" when { "blank elements *** ARE *** present" in parseAndCompare(inputXmlWithBlankElement) "self closing elements are *** NOT *** present" in parseAndCompare(inputXml) "self closing elements *** ARE *** present" in parseAndCompare(inputXmlWithSelfClosingElement) } } }
Example 68
Source File: MinimumChunk.scala From akka-xml-parser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.akka.xml import akka.NotUsed import akka.stream.scaladsl.Flow import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler} import akka.stream.{Attributes, FlowShape, Inlet, Outlet} import akka.util.ByteString @deprecated("Use FastParsingStage instead","akka-xml-parser 1.0.0") object MinimumChunk { def parser(minimumChunkSize: Int): Flow[ByteString, ByteString, NotUsed] = { Flow.fromGraph(new StreamingXmlParser(minimumChunkSize)) } private class StreamingXmlParser(minimumChunkSize: Int) extends GraphStage[FlowShape[ByteString, ByteString]] with StreamHelper with ParsingDataFunctions { val in: Inlet[ByteString] = Inlet("Chunking.in") val out: Outlet[ByteString] = Outlet("Chunking.out") override val shape: FlowShape[ByteString, ByteString] = FlowShape(in, out) override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { private var buffer = ByteString.empty setHandler(in, new InHandler { override def onPush(): Unit = { val elem = grab(in) buffer ++= elem emitChunk() } override def onUpstreamFinish(): Unit = { emit(out, buffer) completeStage() } }) setHandler(out, new OutHandler { override def onPull(): Unit = { pull(in) } }) private def emitChunk(): Unit = { if (buffer.size > minimumChunkSize) { push(out, buffer) buffer = ByteString.empty } else { pull(in) } } } } }
Example 69
Source File: Json4sSupport.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.json import java.lang.reflect.InvocationTargetException import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import org.json4s.JsonAST.JValue import org.json4s.{Formats, MappingException, Serialization} implicit def json4sMarshaller[A <: AnyRef]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } implicit def json4sJValueMarshaller[A <: JValue]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } }
Example 70
Source File: BytesToWatchEventSource.scala From skuber with Apache License 2.0 | 5 votes |
package skuber.api.watch import akka.stream.scaladsl.{JsonFraming, Source} import akka.util.ByteString import play.api.libs.json.{Format, JsError, JsSuccess, Json} import skuber.ObjectResource import skuber.api.client.{K8SException, Status, WatchEvent} import scala.concurrent.ExecutionContext private[api] object BytesToWatchEventSource { def apply[O <: ObjectResource](bytesSource: Source[ByteString, _], bufSize: Int)(implicit ec: ExecutionContext, format: Format[O]): Source[WatchEvent[O], _] = { import skuber.json.format.apiobj.watchEventFormat bytesSource.via( JsonFraming.objectScanner(bufSize) ).map { singleEventBytes => Json.parse(singleEventBytes.utf8String).validate(watchEventFormat[O]) match { case JsSuccess(value, _) => value case JsError(e) => throw new K8SException(Status(message = Some("Error parsing watched object"), details = Some(e.toString))) } } } }
Example 71
Source File: JsonBodyReadablesSpec.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws import java.nio.charset.Charset import java.nio.charset.StandardCharsets._ import akka.stream.scaladsl.Source import akka.util.ByteString import org.specs2.matcher.MustMatchers import org.specs2.mutable.Specification import play.api.libs.json.JsSuccess import play.api.libs.json.JsValue class JsonBodyReadablesSpec extends Specification with MustMatchers { class StubResponse(byteArray: Array[Byte], charset: Charset = UTF_8) extends StandaloneWSResponse { override def uri: java.net.URI = ??? override def headers: Map[String, Seq[String]] = ??? override def underlying[T]: T = ??? override def status: Int = ??? override def statusText: String = ??? override def cookies: Seq[WSCookie] = ??? override def cookie(name: String): Option[WSCookie] = ??? override def body: String = new String(byteArray, charset) override def bodyAsBytes: ByteString = ByteString.fromArray(byteArray) override def bodyAsSource: Source[ByteString, _] = ??? } "decode encodings correctly" should { "read an encoding of UTF-32BE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charsetName = "UTF-32BE" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charsetName), Charset.forName(charsetName))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-32LE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charsetName = "UTF-32LE" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charsetName), Charset.forName(charsetName))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-16BE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charset = UTF_16BE val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charset), charset)) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-16LE" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val charset = UTF_16LE val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(charset), charset)) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-8" in { val readables = new JsonBodyReadables() {} val json = """{"menu": {"id": "file", "value": "File"} }""" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) (value \ "menu" \ "id").validate[String] must beEqualTo(JsSuccess("file")) } "read an encoding of UTF-8 with empty object" in { val readables = new JsonBodyReadables() {} val json = "{}" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) value.toString() must beEqualTo("{}") } "read an encoding of UTF-8 with empty array" in { val readables = new JsonBodyReadables() {} val json = "[]" val value: JsValue = readables.readableAsJson.transform(new StubResponse(json.getBytes(UTF_8))) value.toString() must beEqualTo("[]") } } }
Example 72
Source File: DefaultBodyWritables.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws import java.io.File import java.nio.ByteBuffer import java.util.function.Supplier import akka.stream.scaladsl.StreamConverters.fromInputStream import akka.stream.scaladsl.FileIO import akka.stream.scaladsl.Source import akka.util.ByteString import scala.compat.java8.FunctionConverters.asScalaFromSupplier implicit val writeableOf_urlEncodedForm: BodyWritable[Map[String, Seq[String]]] = { import java.net.URLEncoder BodyWritable( formData => InMemoryBody( ByteString.fromString( formData.flatMap(item => item._2.map(c => s"${item._1}=${URLEncoder.encode(c, "UTF-8")}")).mkString("&") ) ), "application/x-www-form-urlencoded" ) } implicit val writeableOf_urlEncodedSimpleForm: BodyWritable[Map[String, String]] = { writeableOf_urlEncodedForm.map[Map[String, String]](_.map(kv => kv._1 -> Seq(kv._2))) } } object DefaultBodyWritables extends DefaultBodyWritables
Example 73
Source File: Body.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws import akka.stream.scaladsl.Source import akka.util.ByteString import scala.annotation.implicitNotFound @implicitNotFound( "Cannot find an instance of ${A} to WSBody. Define a BodyWritable[${A}] or extend play.api.libs.ws.ahc.DefaultBodyWritables" ) class BodyWritable[-A](val transform: A => WSBody, val contentType: String) { def map[B](f: B => A): BodyWritable[B] = new BodyWritable(b => transform(f(b)), contentType) } object BodyWritable { def apply[A](transform: (A => WSBody), contentType: String): BodyWritable[A] = new BodyWritable(transform, contentType) }
Example 74
Source File: JsonUnmarshaller.scala From akka-http-oauth2-client with Apache License 2.0 | 5 votes |
package com.github.dakatsuka.akka.http.oauth2.client.utils import akka.http.scaladsl.model.ContentTypeRange import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller } import akka.util.ByteString import io.circe.{ jawn, Decoder, Json } trait JsonUnmarshaller { def unmarshallerContentTypes: Seq[ContentTypeRange] = List(`application/json`) implicit def jsonUnmarshaller: FromEntityUnmarshaller[Json] = Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .map { case ByteString.empty => throw Unmarshaller.NoContentException case data => jawn.parseByteBuffer(data.asByteBuffer).fold(throw _, identity) } implicit def unmarshaller[A: Decoder]: FromEntityUnmarshaller[A] = { def decode(json: Json) = implicitly[Decoder[A]].decodeJson(json).fold(throw _, identity) jsonUnmarshaller.map(decode) } }
Example 75
Source File: CarbonClient.scala From akka-http-metrics with Apache License 2.0 | 5 votes |
package fr.davit.akka.http.metrics.graphite import java.time.{Clock, Instant} import akka.NotUsed import akka.actor.ActorSystem import akka.event.Logging import akka.stream.scaladsl.{Flow, Keep, RestartFlow, Sink, Source, Tcp} import akka.stream.{OverflowStrategy, QueueOfferResult} import akka.util.ByteString import fr.davit.akka.http.metrics.core.Dimension import scala.concurrent.Await import scala.concurrent.duration.{Duration, _} object CarbonClient { def apply(host: String, port: Int)(implicit system: ActorSystem): CarbonClient = new CarbonClient(host, port) } class CarbonClient(host: String, port: Int)(implicit system: ActorSystem) extends AutoCloseable { private val logger = Logging(system.eventStream, classOf[CarbonClient]) protected val clock: Clock = Clock.systemUTC() private def serialize[T](name: String, value: T, dimensions: Seq[Dimension], ts: Instant): ByteString = { val tags = dimensions.map(d => d.key + "=" + d.value).toList val taggedMetric = (name :: tags).mkString(";") ByteString(s"$taggedMetric $value ${ts.getEpochSecond}\n") } // TODO read backoff from config private def connection: Flow[ByteString, ByteString, NotUsed] = RestartFlow.withBackoff( minBackoff = 3.seconds, maxBackoff = 30.seconds, randomFactor = 0.2, // adds 20% "noise" to vary the intervals slightly maxRestarts = -1 // keep retrying forever )(() => Tcp().outgoingConnection(host, port)) private val queue = Source .queue[ByteString](19, OverflowStrategy.dropHead) .via(connection) .toMat(Sink.ignore)(Keep.left) .run() def publish[T]( name: String, value: T, dimensions: Seq[Dimension] = Seq.empty, ts: Instant = Instant .now(clock) ): Unit = { // it's reasonable to block until the message in enqueued Await.result(queue.offer(serialize(name, value, dimensions, ts)), Duration.Inf) match { case QueueOfferResult.Enqueued => logger.debug("Metric {} enqueued", name) case QueueOfferResult.Dropped => logger.debug("Metric {} dropped", name) case QueueOfferResult.Failure(e) => logger.error(e, s"Failed publishing metric $name") case QueueOfferResult.QueueClosed => throw new Exception("Failed publishing metric to closed carbon client") } } override def close(): Unit = { queue.complete() Await.result(queue.watchCompletion(), Duration.Inf) } }
Example 76
Source File: ClasspathResources.scala From reactive-activemq with Apache License 2.0 | 5 votes |
package akka.stream.integration import java.io.InputStream import akka.stream.IOResult import akka.stream.scaladsl.{ Source, StreamConverters } import akka.util.ByteString import scala.concurrent.Future import scala.io.{ Source => ScalaIOSource } trait ClasspathResources { def withInputStream[T](fileName: String)(f: InputStream => T): T = { val is: InputStream = fromClasspathAsStream(fileName) try f(is) finally is.close() } def withInputStreamAsText[T](fileName: String)(f: String => T): T = f(fromClasspathAsString(fileName)) def withByteStringSource[T](fileName: String)(f: Source[ByteString, Future[IOResult]] => T): T = withInputStream(fileName) { inputStream => f(StreamConverters.fromInputStream(() => inputStream)) } def streamToString(is: InputStream): String = ScalaIOSource.fromInputStream(is).mkString def fromClasspathAsString(fileName: String): String = streamToString(fromClasspathAsStream(fileName)) def fromClasspathAsStream(fileName: String): InputStream = getClass.getClassLoader.getResourceAsStream(fileName) }
Example 77
Source File: MemoryAttachmentStore.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.database.memory import akka.actor.ActorSystem import akka.http.scaladsl.model.ContentType import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Keep, Sink, Source} import akka.util.{ByteString, ByteStringBuilder} import org.apache.openwhisk.common.LoggingMarkers.{ DATABASE_ATTS_DELETE, DATABASE_ATT_DELETE, DATABASE_ATT_GET, DATABASE_ATT_SAVE } import org.apache.openwhisk.common.{Logging, TransactionId} import org.apache.openwhisk.core.database.StoreUtils._ import org.apache.openwhisk.core.database._ import org.apache.openwhisk.core.entity.DocId import scala.collection.concurrent.TrieMap import scala.concurrent.{ExecutionContext, Future} import scala.reflect.ClassTag object MemoryAttachmentStoreProvider extends AttachmentStoreProvider { override def makeStore[D <: DocumentSerializer: ClassTag]()(implicit actorSystem: ActorSystem, logging: Logging, materializer: ActorMaterializer): AttachmentStore = new MemoryAttachmentStore(implicitly[ClassTag[D]].runtimeClass.getSimpleName.toLowerCase) } override protected[core] def readAttachment[T](docId: DocId, name: String, sink: Sink[ByteString, Future[T]])( implicit transid: TransactionId): Future[T] = { val start = transid.started( this, DATABASE_ATT_GET, s"[ATT_GET] '$dbName' finding attachment '$name' of document 'id: $docId'") val f = attachments.get(attachmentKey(docId, name)) match { case Some(Attachment(bytes)) => val r = Source.single(bytes).toMat(sink)(Keep.right).run r.map(t => { transid.finished(this, start, s"[ATT_GET] '$dbName' completed: found attachment '$name' of document '$docId'") t }) case None => transid.finished( this, start, s"[ATT_GET] '$dbName', retrieving attachment '$name' of document '$docId'; not found.") Future.failed(NoDocumentException("Not found on 'readAttachment'.")) } reportFailure( f, start, failure => s"[ATT_GET] '$dbName' internal error, name: '$name', doc: '$docId', failure: '${failure.getMessage}'") } override protected[core] def deleteAttachments(docId: DocId)(implicit transid: TransactionId): Future[Boolean] = { val start = transid.started(this, DATABASE_ATTS_DELETE, s"[ATTS_DELETE] uploading attachment of document '$docId'") val prefix = docId + "/" attachments --= attachments.keySet.filter(_.startsWith(prefix)) transid.finished(this, start, s"[ATTS_DELETE] completed: delete attachment of document '$docId'") Future.successful(true) } override protected[core] def deleteAttachment(docId: DocId, name: String)( implicit transid: TransactionId): Future[Boolean] = { val start = transid.started(this, DATABASE_ATT_DELETE, s"[ATT_DELETE] uploading attachment of document '$docId'") attachments.remove(attachmentKey(docId, name)) transid.finished(this, start, s"[ATT_DELETE] completed: delete attachment of document '$docId'") Future.successful(true) } def attachmentCount: Int = attachments.size def isClosed = closed override def shutdown(): Unit = { closed = true } private def attachmentKey(docId: DocId, name: String) = s"${docId.id}/$name" }
Example 78
Source File: DockerClientWithFileAccess.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.core.containerpool.docker import java.io.File import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.alpakka.file.scaladsl.FileTailSource import akka.stream.scaladsl.{FileIO, Source => AkkaSource} import akka.util.ByteString import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.concurrent.blocking import spray.json.DefaultJsonProtocol._ import spray.json._ import org.apache.openwhisk.common.Logging import org.apache.openwhisk.common.TransactionId import org.apache.openwhisk.core.containerpool.ContainerId import org.apache.openwhisk.core.containerpool.ContainerAddress import scala.io.Source import scala.concurrent.duration.FiniteDuration class DockerClientWithFileAccess(dockerHost: Option[String] = None, containersDirectory: File = Paths.get("containers").toFile)( executionContext: ExecutionContext)(implicit log: Logging, as: ActorSystem) extends DockerClient(dockerHost)(executionContext) with DockerApiWithFileAccess { implicit private val ec = executionContext def rawContainerLogs(containerId: ContainerId, fromPos: Long, pollInterval: Option[FiniteDuration]): AkkaSource[ByteString, Any] }
Example 79
Source File: VisualMailboxMetricClient.scala From akka-visualmailbox with Apache License 2.0 | 5 votes |
package de.aktey.akka.visualmailbox import java.net.InetSocketAddress import akka.actor.{Actor, ActorRef, ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider, Props} import akka.io.{IO, Udp} import akka.util.ByteString import de.aktey.akka.visualmailbox.packing.Packing object VisualMailboxMetricClient extends ExtensionId[VisualMailboxMetricClient] with ExtensionIdProvider { override def createExtension(system: ExtendedActorSystem): VisualMailboxMetricClient = { new VisualMailboxMetricClient( system, VisualMailboxMetricClientConfig.fromConfig(system.settings.config) ) } override def lookup(): ExtensionId[_ <: Extension] = VisualMailboxMetricClient } class VisualMailboxMetricClient(system: ExtendedActorSystem, config: VisualMailboxMetricClientConfig) extends Extension { private val udpSender = system.systemActorOf( Props(new UdpSender(config.serverAddress)).withDispatcher("de.aktey.akka.visualmailbox.client.dispatcher"), "de-aktey-akka-visualmailbox-sender" ) system.systemActorOf( Props(new VisualMailboxMetricListener(udpSender)).withDispatcher("de.aktey.akka.visualmailbox.client.dispatcher"), "de-aktey-akka-visualmailbox-receiver" ) } class VisualMailboxMetricListener(udpSender: ActorRef) extends Actor { import context._ import concurrent.duration._ var buffer: List[VisualMailboxMetric] = Nil system.eventStream.subscribe(self, classOf[VisualMailboxMetric]) system.scheduler.schedule(1.second, 1.second, self, "flush") @scala.throws[Exception](classOf[Exception]) override def postStop(): Unit = { system.eventStream.unsubscribe(self) } def receive: Receive = { case v: VisualMailboxMetric => buffer ::= v if (buffer.size > 40) self ! "flush" case "flush" if buffer.nonEmpty => udpSender ! Packing.pack(MetricEnvelope(1, Packing.pack(buffer))) buffer = Nil } } class UdpSender(remote: InetSocketAddress) extends Actor { import context._ IO(Udp) ! Udp.SimpleSender def receive = { case Udp.SimpleSenderReady => context.become(ready(sender())) } def ready(send: ActorRef): Receive = { case msg: Array[Byte] => send ! Udp.Send(ByteString(msg), remote) } }
Example 80
Source File: NaptimePlayRouterTest.scala From naptime with Apache License 2.0 | 5 votes |
package org.coursera.naptime.router2 import akka.util.ByteString import com.google.inject.Injector import org.coursera.naptime.resources.RootResource import org.coursera.naptime.schema.Handler import org.coursera.naptime.schema.HandlerKind import org.coursera.naptime.schema.Parameter import org.coursera.naptime.schema.Resource import org.coursera.naptime.schema.ResourceKind import org.junit.Test import org.mockito.Mockito.when import org.mockito.Matchers.any import org.scalatest.junit.AssertionsForJUnit import org.scalatest.mockito.MockitoSugar import play.api.libs.streams.Accumulator import play.api.mvc.EssentialAction import play.api.mvc.RequestHeader import play.api.mvc.RequestTaggingHandler import play.api.mvc.Result import play.api.test.FakeRequest class NaptimePlayRouterTest extends AssertionsForJUnit with MockitoSugar { object FakeHandler extends EssentialAction with RequestTaggingHandler { override def tagRequest(request: RequestHeader): RequestHeader = request override def apply(v1: RequestHeader): Accumulator[ByteString, Result] = ??? } val resourceSchema = Resource( kind = ResourceKind.COLLECTION, name = "fakeResource", version = Some(1L), parentClass = Some(classOf[RootResource].getName), keyType = "java.lang.String", valueType = "FakeModel", mergedType = "FakeResourceModel", handlers = List( Handler( kind = HandlerKind.GET, name = "get", parameters = List(Parameter(name = "id", `type` = "String", attributes = List.empty, default = None)), inputBodyType = None, customOutputBodyType = None, attributes = List.empty)), className = "org.coursera.naptime.FakeResource", attributes = List.empty) val resourceRouter = mock[ResourceRouter] val resourceRouterBuilder = mock[ResourceRouterBuilder] when(resourceRouterBuilder.build(any())).thenReturn(resourceRouter) when(resourceRouterBuilder.schema).thenReturn(resourceSchema) val injector = mock[Injector] val naptimeRoutes = NaptimeRoutes(injector, Set(resourceRouterBuilder)) val router = new NaptimePlayRouter(naptimeRoutes) @Test def simpleRouting(): Unit = { when(resourceRouter.routeRequest(any(), any())).thenReturn(Some(FakeHandler)) val handler = router.handlerFor(FakeRequest()) assert(handler.isDefined) } @Test def simpleRoutingNothing(): Unit = { when(resourceRouter.routeRequest(any(), any())).thenReturn(None) val handler = router.handlerFor(FakeRequest()) assert(handler.isEmpty) } @Test def generateDocumentation(): Unit = { val documentation = router.documentation assert(1 === documentation.length) assert( ( "GET --- GET", "/fakeResource.v1/$id", "[NAPTIME] org.coursera.naptime.FakeResource.get(id: String)") === documentation.head) } }
Example 81
Source File: AkkaHttpCirceSupport.scala From scalanda-v20 with MIT License | 5 votes |
package com.msilb.scalandav20.common import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import io.circe._ object AkkaHttpCirceSupport extends AkkaHttpCirceSupport trait AkkaHttpCirceSupport { private val jsonStringUnmarshaller = Unmarshaller.byteStringUnmarshaller .forContentTypes(`application/json`) .mapWithCharset { case (ByteString.empty, _) => throw Unmarshaller.NoContentException case (data, charset) => data.decodeString(charset.nioCharset.name) } private val jsonStringMarshaller = Marshaller.stringMarshaller(`application/json`) implicit def circeUnmarshaller[A](implicit decoder: Decoder[A]): FromEntityUnmarshaller[A] = jsonStringUnmarshaller.map(jawn.decode(_).fold(throw _, identity)) implicit def circeToEntityMarshaller[A](implicit encoder: Encoder[A], printer: Json => String = Printer.noSpaces.copy(dropNullValues = true).pretty): ToEntityMarshaller[A] = jsonStringMarshaller.compose(printer).compose(encoder.apply) }
Example 82
Source File: AkkaBackend.scala From drunk with Apache License 2.0 | 5 votes |
package com.github.jarlakxen.drunk.backend import java.io.UnsupportedEncodingException import akka.actor.ActorSystem import akka.http.scaladsl.coding.{Deflate, Gzip, NoCoding} import akka.http.scaladsl.model.HttpResponse import akka.http.scaladsl.model.headers.HttpEncodings import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.{ExecutionContext, Future} trait AkkaBackend { implicit val as: ActorSystem implicit val mat: ActorMaterializer def send(body: String): Future[(Int, String)] protected def encodingFromContentType(ct: String): Option[String] = ct.split(";").map(_.trim.toLowerCase).collectFirst { case s if s.startsWith("charset=") => s.substring(8) } protected def decodeResponse(response: HttpResponse): HttpResponse = { val decoder = response.encoding match { case HttpEncodings.gzip => Gzip case HttpEncodings.deflate => Deflate case HttpEncodings.identity => NoCoding case ce => throw new UnsupportedEncodingException(s"Unsupported encoding: $ce") } decoder.decodeMessage(response) } protected def bodyToString(hr: HttpResponse, charsetFromHeaders: String): Future[String] = { implicit val ec: ExecutionContext = as.dispatcher hr.entity.dataBytes .runFold(ByteString.empty)(_ ++ _) .map(_.decodeString(charsetFromHeaders)) } }
Example 83
Source File: CsvStreamingRoute.scala From akka-http-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.component.simpleserver.route import akka.http.scaladsl.common.{ CsvEntityStreamingSupport, EntityStreamingSupport } import akka.http.scaladsl.marshalling.{ Marshaller, Marshalling } import akka.http.scaladsl.model.ContentTypes import akka.http.scaladsl.server.{ Directives, Route } import akka.util.ByteString import com.github.dnvriend.component.repository.PersonRepository import com.github.dnvriend.component.simpleserver.dto.http.Person import de.heikoseeberger.akkahttpplayjson.PlayJsonSupport object CsvStreamingRoute extends Directives with PlayJsonSupport { implicit val personAsCsv = Marshaller.strict[Person, ByteString] { person => Marshalling.WithFixedContentType(ContentTypes.`text/csv(UTF-8)`, () => { ByteString(List(person.name.replace(",", "."), person.age, person.married).mkString(",")) }) } implicit val csvStreamingSupport: CsvEntityStreamingSupport = EntityStreamingSupport.csv() def route(dao: PersonRepository): Route = path("stream" / IntNumber) { numberOfPeople => pathEnd { get { complete(dao.people(numberOfPeople)) } } } }
Example 84
Source File: JsonStreamingRoute.scala From akka-http-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.component.simpleserver.route import akka.event.LoggingAdapter import akka.http.scaladsl.common.{ EntityStreamingSupport, JsonEntityStreamingSupport } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import akka.http.scaladsl.server.{ Directives, Route } import akka.stream.Materializer import akka.stream.scaladsl.Flow import akka.util.ByteString import com.github.dnvriend.component.repository.PersonRepository import com.github.dnvriend.component.simpleserver.dto.http.Person import com.github.dnvriend.component.simpleserver.marshaller.Marshallers import scala.concurrent.ExecutionContext object JsonStreamingRoute extends Directives with SprayJsonSupport with Marshallers { val start = ByteString.empty val sep = ByteString("\n") val end = ByteString.empty implicit val jsonStreamingSupport: JsonEntityStreamingSupport = EntityStreamingSupport.json() .withFramingRenderer(Flow[ByteString].intersperse(start, sep, end)) .withParallelMarshalling(parallelism = 8, unordered = true) def route(dao: PersonRepository)(implicit mat: Materializer, ec: ExecutionContext): Route = path("stream" / IntNumber) { numberOfPersons => (get & pathEnd) { complete(dao.people(numberOfPersons)) } } ~ (post & path("stream") & entity(asSourceOf[Person])) { people => val total = people.log("people").runFold(0) { case (c, _) => c + 1 } complete(total.map(n => s"Received $n number of person")) } }
Example 85
Source File: InputStreamLineReader.scala From perf_tester with Apache License 2.0 | 5 votes |
package org.perftester.sbtbot.process import java.io._ import akka.util.ByteString import scala.annotation.tailrec import scala.util.Try class InputStreamLineReader(inputStream: InputStream, name: String) { var bs: ByteString = ByteString() val bis = new BufferedInputStream(inputStream) var limit = 2048 val byteBuffer = new Array[Byte](limit) var isClosed = false def close(): Option[String] = { val res = if (isClosed) None else { val remainingOutput = bs.decodeString("UTF-8") if (remainingOutput.length > 0) Some(remainingOutput) else None } bs = ByteString.empty res } def read(): List[String] = { if (isClosed) Nil else { val rawRes = performRead(Nil) rawRes.reverse } } @tailrec private final def performRead(cur: List[String]): List[String] = { val actualRead = if (bis.available > 0) bis.read(byteBuffer) else 0 if (actualRead == -1) { Try(inputStream.close()) isClosed = true val remainingOutput = bs.decodeString("UTF-8") if (remainingOutput.length > 0) { remainingOutput :: cur } else cur } else if (actualRead > 0) { val byteString = ByteString.fromArray(byteBuffer, 0, actualRead) bs ++= byteString var activeCurrent = cur var offset = bs.indexWhere(_ == '\n') while (offset != -1) { val (init, rem) = bs.splitAt(offset) val lineStr = init.decodeString("UTF-8") activeCurrent ::= lineStr bs = rem.drop(1) // drop the newline offset = bs.indexWhere(_ == '\n') } performRead(activeCurrent) } else { cur } } }
Example 86
Source File: ServerTestBase.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.algebra.server import java.nio.charset.StandardCharsets import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.headers.`Content-Type` import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import akka.util.ByteString import endpoints4s.algebra import org.scalatest.concurrent.ScalaFutures import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll} import scala.concurrent.duration._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.concurrent.{ExecutionContext, Future} trait ServerTestBase[T <: algebra.Endpoints] extends AnyWordSpec with Matchers with ScalaFutures with BeforeAndAfterAll with BeforeAndAfter { override implicit def patienceConfig: PatienceConfig = PatienceConfig(10.seconds, 10.millisecond) val serverApi: T case class Malformed(errors: Seq[String]) extends DecodedUrl[Nothing] }
Example 87
Source File: ChunkedEntities.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.play.server import akka.stream.scaladsl.Source import akka.util.ByteString import endpoints4s.algebra import play.api.http.{ContentTypes, HttpChunk, HttpEntity} import play.api.libs.streams.Accumulator import play.api.mvc.BodyParser trait ChunkedJsonEntities extends ChunkedEntities with algebra.ChunkedJsonEntities with JsonEntitiesFromCodecs { def jsonChunksRequest[A](implicit codec: JsonCodec[A] ): RequestEntity[Chunks[A]] = { val decoder = stringCodec(codec) chunkedRequestEntity { byteString => val string = byteString.utf8String decoder .decode(string) .toEither .left .map(errors => new Throwable(errors.mkString(". "))) } } def jsonChunksResponse[A](implicit codec: JsonCodec[A] ): ResponseEntity[Chunks[A]] = { val encoder = stringCodec(codec) chunkedResponseEntity(ContentTypes.JSON, a => ByteString(encoder.encode(a))) } }
Example 88
Source File: JsonEntities.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.play.server import akka.util.ByteString import endpoints4s.{Codec, Decoder, Encoder, Invalid, algebra} import play.api.mvc.RequestHeader import play.api.http.{ContentTypes, Writeable} trait JsonEntitiesFromEncodersAndDecoders extends algebra.JsonEntities with EndpointsWithCustomErrors { type JsonResponse[A] = Encoder[A, String] type JsonRequest[A] = Decoder[String, A] def jsonRequest[A](implicit decoder: JsonRequest[A]): RequestEntity[A] = JsonEntities.decodeRequest(this)(decoder) def jsonResponse[A](implicit encoder: JsonResponse[A]): ResponseEntity[A] = responseEntityFromWriteable(JsonEntities.encodeResponse(encoder)) } private object JsonEntities { def decodeRequest[A]( endpoints: EndpointsWithCustomErrors )(decoder: Decoder[String, A]): endpoints.RequestEntity[A] = (request: RequestHeader) => { if (request.contentType.exists(_.equalsIgnoreCase("application/json"))) { val decodeJson = (bs: ByteString) => decoder .decode(bs.utf8String) .toEither .left .map(errs => endpoints.handleClientErrors(Invalid(errs))) val bodyParser = endpoints.playComponents.playBodyParsers.byteString .validate(decodeJson)(endpoints.playComponents.executionContext) Some(bodyParser) } else { None } } def encodeResponse[A](encoder: Encoder[A, String]): Writeable[A] = Writeable(a => ByteString(encoder.encode(a)), Some(ContentTypes.JSON)) }
Example 89
Source File: ChunkedEntities.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.akkahttp.client import akka.http.scaladsl.model.{ContentType, ContentTypes, HttpEntity} import akka.stream.scaladsl.Source import akka.util.ByteString import endpoints4s.algebra import scala.concurrent.Future trait ChunkedJsonEntities extends algebra.ChunkedJsonEntities with ChunkedEntities with JsonEntitiesFromCodecs { def jsonChunksRequest[A](implicit codec: JsonCodec[A] ): RequestEntity[Chunks[A]] = { val encoder = stringCodec(codec) chunkedRequestEntity( ContentTypes.`application/json`, a => ByteString.fromString(encoder.encode(a)) ) } def jsonChunksResponse[A](implicit codec: JsonCodec[A] ): ResponseEntity[Chunks[A]] = { val decoder = stringCodec(codec) chunkedResponseEntity { byteString => val string = byteString.utf8String decoder .decode(string) .toEither .left .map(errors => new Throwable(errors.mkString(". "))) } } }
Example 90
Source File: ChunkedEntities.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.akkahttp.server import akka.http.scaladsl.marshalling.Marshaller import akka.http.scaladsl.model.{ContentType, ContentTypes, HttpEntity, HttpRequest, MessageEntity} import akka.http.scaladsl.server.Directives import akka.http.scaladsl.unmarshalling.Unmarshaller import akka.stream.scaladsl.Source import akka.util.ByteString import endpoints4s.algebra import scala.concurrent.Future trait ChunkedJsonEntities extends algebra.ChunkedJsonEntities with ChunkedEntities with JsonEntitiesFromCodecs { def jsonChunksRequest[A](implicit codec: JsonCodec[A] ): RequestEntity[Chunks[A]] = { val decoder = stringCodec(codec) chunkedRequestEntity { byteString => val string = byteString.utf8String decoder .decode(string) .toEither .left .map(errors => new Throwable(errors.mkString(". "))) } } def jsonChunksResponse[A](implicit codec: JsonCodec[A] ): ResponseEntity[Chunks[A]] = { val encoder = stringCodec(codec) chunkedResponseEntity( ContentTypes.`application/json`, a => ByteString(encoder.encode(a)) ) } }
Example 91
Source File: request.scala From wix-http-testkit with MIT License | 5 votes |
package com.wix.e2e.http.client.transformers.internals import java.io.File import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.{Cookie, RawHeader, `User-Agent`} import akka.util.ByteString import com.wix.e2e.http.api.Marshaller import com.wix.e2e.http.client.transformers._ import com.wix.e2e.http.client.transformers.internals.RequestPartOps._ import com.wix.e2e.http.exceptions.UserAgentModificationNotSupportedException import com.wix.e2e.http.{RequestTransformer, WixHttpTestkitResources} import scala.xml.Node trait HttpClientRequestUrlTransformers { def withParam(param: (String, String)): RequestTransformer = withParams(param) def withParams(params: (String, String)*): RequestTransformer = r => r.copy(uri = r.uri .withQuery( Query(currentParams(r) ++ params: _*)) ) private def currentParams(r: HttpRequest): Seq[(String, String)] = r.uri.rawQueryString .map( Query(_).toSeq ) .getOrElse( Seq.empty ) } trait HttpClientRequestHeadersTransformers { def withHeader(header: (String, String)): RequestTransformer = withHeaders(header) def withHeaders(headers: (String, String)*): RequestTransformer = appendHeaders( headers.map { case (h, _) if h.toLowerCase == "user-agent" => throw new UserAgentModificationNotSupportedException case (h, v) => RawHeader(h, v) } ) def withUserAgent(value: String): RequestTransformer = appendHeaders(Seq(`User-Agent`(value))) def withCookie(cookie: (String, String)): RequestTransformer = withCookies(cookie) def withCookies(cookies: (String, String)*): RequestTransformer = appendHeaders( cookies.map(p => Cookie(p._1, p._2)) ) private def appendHeaders[H <: HttpHeader](headers: Iterable[H]): RequestTransformer = r => r.withHeaders( r.headers ++ headers) } trait HttpClientRequestBodyTransformers extends HttpClientContentTypes { @deprecated("use `withTextPayload`", since = "Dec18, 2017") def withPayload(body: String, contentType: ContentType = TextPlain): RequestTransformer = withPayload(ByteString(body).toByteBuffer.array, contentType) def withTextPayload(body: String, contentType: ContentType = TextPlain): RequestTransformer = withPayload(ByteString(body).toByteBuffer.array, contentType) def withPayload(bytes: Array[Byte], contentType: ContentType): RequestTransformer = setBody(HttpEntity(contentType, bytes)) def withPayload(xml: Node): RequestTransformer = setBody(HttpEntity(XmlContent, WixHttpTestkitResources.xmlPrinter.format(xml))) // todo: enable default marshaller when deprecated `withPayload` is removed def withPayload(entity: AnyRef)(implicit marshaller: Marshaller): RequestTransformer = withTextPayload(marshaller.marshall(entity), JsonContent) def withFormData(formParams: (String, String)*): RequestTransformer = setBody(FormData(formParams.toMap).toEntity) def withMultipartData(parts: (String, RequestPart)*): RequestTransformer = setBody( Multipart.FormData(parts.map { case (n, p) => Multipart.FormData.BodyPart(n, p.asBodyPartEntity, p.withAdditionalParams) }:_*) .toEntity) private def setBody(entity: RequestEntity): RequestTransformer = _.copy(entity = entity) } object RequestPartOps { implicit class `RequestPart --> HttpEntity`(private val r: RequestPart) extends AnyVal { def asBodyPartEntity: BodyPartEntity = r match { case PlainRequestPart(v, c) => HttpEntity(v).withContentType(c) case BinaryRequestPart(b, c, _) => HttpEntity(c, b) case FileRequestPart(f, c, _) => HttpEntity.fromPath(c, f.toPath) case FileNameRequestPart(p, c, fn) => FileRequestPart(new File(p), c, fn).asBodyPartEntity } } implicit class `RequestPart --> AdditionalParams`(private val r: RequestPart) extends AnyVal { def withAdditionalParams: Map[String, String] = r match { case _: PlainRequestPart => NoAdditionalParams case BinaryRequestPart(_, _, fn) => additionalParams(fn) case FileRequestPart(_, _, fn) => additionalParams(fn) case FileNameRequestPart(_, _, fn) => additionalParams(fn) } private def additionalParams(filenameOpt: Option[String]) = filenameOpt.map(fn => Map("filename" -> fn)) .getOrElse( NoAdditionalParams ) private def NoAdditionalParams = Map.empty[String, String] } } trait HttpClientRequestTransformersOps { implicit class TransformerConcatenation(first: RequestTransformer) { def and(second: RequestTransformer): RequestTransformer = first andThen second } }
Example 92
Source File: Test14.scala From incubator-retired-gearpump with Apache License 2.0 | 5 votes |
package org.apache.gearpump.akkastream.example import java.io.File import akka.NotUsed import akka.actor.ActorSystem import akka.stream._ import akka.stream.scaladsl._ import akka.util.ByteString import org.apache.gearpump.akkastream.GearpumpMaterializer import org.apache.gearpump.cluster.main.ArgumentsParser import org.apache.gearpump.util.AkkaApp import scala.concurrent._ import scala.concurrent.duration._ object Test14 extends AkkaApp with ArgumentsParser { // scalastyle:off println override def main(akkaConf: Config, args: Array[String]): Unit = { implicit val system = ActorSystem("Test14", akkaConf) implicit val materializer = GearpumpMaterializer() def lineSink(filename: String): Sink[String, Future[IOResult]] = { Flow[String] .alsoTo(Sink.foreach(s => println(s"$filename: $s"))) .map(s => ByteString(s + "\n")) .toMat(FileIO.toPath(new File(filename).toPath))(Keep.right) } val source: Source[Int, NotUsed] = Source(1 to 100) val factorials: Source[BigInt, NotUsed] = source.scan(BigInt(1))((acc, next) => acc * next) val sink1 = lineSink("factorial1.txt") val sink2 = lineSink("factorial2.txt") val slowSink2 = Flow[String].via( Flow[String].throttle(1, 1.second, 1, ThrottleMode.shaping) ).toMat(sink2)(Keep.right) val bufferedSink2 = Flow[String].buffer(50, OverflowStrategy.backpressure).via( Flow[String].throttle(1, 1.second, 1, ThrottleMode.shaping) ).toMat(sink2)(Keep.right) val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit b => import GraphDSL.Implicits._ val bcast = b.add(Broadcast[String](2)) factorials.map(_.toString) ~> bcast.in bcast.out(0) ~> sink1 bcast.out(1) ~> bufferedSink2 ClosedShape }) g.run() Await.result(system.whenTerminated, 60.minutes) } // scalastyle:on println }
Example 93
Source File: ReqSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class ReqSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing request socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newReqSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 94
Source File: DealerSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class DealerSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing dealer socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newDealerSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 95
Source File: SubSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{ZMQMessage, SocketManager} import org.apache.toree.utils.LogLike class SubSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing subscribe socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newSubSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case _ => } }
Example 96
Source File: RepSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{SocketManager, ZMQMessage} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class RepSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing reply socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newRepSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 97
Source File: RouterSocketActor.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.communication.actors import akka.actor.{Actor, ActorRef} import akka.util.ByteString import org.apache.toree.communication.{SocketManager, ZMQMessage} import org.apache.toree.utils.LogLike import org.zeromq.ZMQ class RouterSocketActor(connection: String, listener: ActorRef) extends Actor with LogLike { logger.debug(s"Initializing router socket actor for $connection") private val manager: SocketManager = new SocketManager private val socket = manager.newRouterSocket(connection, (message: Seq[Array[Byte]]) => { listener ! ZMQMessage(message.map(ByteString.apply): _*) }) override def postStop(): Unit = { manager.closeSocket(socket) } override def receive: Actor.Receive = { case zmqMessage: ZMQMessage => val frames = zmqMessage.frames.map(byteString => byteString.toArray ) socket.send(frames: _*) } }
Example 98
Source File: HeartbeatClient.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client.socket import akka.actor.{ActorRef, Actor} import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import akka.pattern.ask import org.apache.toree.kernel.protocol.v5.client.ActorLoader import org.apache.toree.utils.LogLike import org.apache.toree.kernel.protocol.v5.UUID import scala.collection.concurrent.{Map, TrieMap} import scala.concurrent.duration._ object HeartbeatMessage {} class HeartbeatClient( socketFactory : SocketFactory, actorLoader: ActorLoader, signatureEnabled: Boolean ) extends Actor with LogLike { logger.debug("Created new Heartbeat Client actor") implicit val timeout = Timeout(1.minute) val futureMap: Map[UUID, ActorRef] = TrieMap[UUID, ActorRef]() val socket = socketFactory.HeartbeatClient(context.system, self) override def receive: Receive = { // from Heartbeat case message: ZMQMessage => val id = message.frames.map((byteString: ByteString) => new String(byteString.toArray)).mkString("\n") logger.info(s"Heartbeat client receive:$id") futureMap(id) ! true futureMap.remove(id) // from SparkKernelClient case HeartbeatMessage => import scala.concurrent.ExecutionContext.Implicits.global val id = java.util.UUID.randomUUID().toString futureMap += (id -> sender) logger.info(s"Heartbeat client send: $id") val future = socket ? ZMQMessage(ByteString(id.getBytes)) future.onComplete { // future always times out because server "tells" response { case(_) => futureMap.remove(id) } } }
Example 99
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.client import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions private val sessionId: UUID = java.util.UUID.randomUUID().toString implicit val timeout = Timeout(21474835.seconds) // Maximum delay implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] val parentHeader = Json.parse(message.frames(delimiterIndex + 3)).validate[ParentHeader].fold[ParentHeader]( // TODO: Investigate better solution than setting parentHeader to null for {} (invalid: Seq[(JsPath, Seq[ValidationError])]) => null, //HeaderBuilder.empty, (valid: ParentHeader) => valid ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T](json: String, reads: Reads[T], handler: T => Unit) : Unit = { Json.parse(json).validate[T](reads).fold( (invalid: Seq[(JsPath, Seq[ValidationError])]) => logger.error(s"Could not parse JSON, ${json}"), (content: T) => handler(content) ) } def getSessionId = sessionId def toKernelMessage(message: ExecuteRequest): KernelMessage = { // construct a kernel message whose content is an ExecuteRequest val id = java.util.UUID.randomUUID().toString val header = Header( id, "spark", sessionId, MessageType.Incoming.ExecuteRequest.toString, "5.0") KMBuilder().withIds(Seq[Array[Byte]]()).withSignature("").withHeader(header) .withParentHeader(HeaderBuilder.empty).withContentString(message).build } }
Example 100
Source File: ZeromqKernelMessageSocket.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorSystem, ActorRef, Actor} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage //import org.apache.toree.kernel.protocol.v5.kernel.ZMQMessage import org.apache.toree.kernel.protocol.v5.KernelMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.utils.MessageLogSupport abstract class ZeromqKernelMessageSocket( actorSocketFunc: (ActorSystem, ActorRef) => ActorRef, actorForwardFunc: () => ActorSelection ) extends Actor with MessageLogSupport { val actorSocketRef = actorSocketFunc(context.system, self) val actorForwardRef = actorForwardFunc() override def receive: Receive = { case message: ZMQMessage => val kernelMessage: KernelMessage = message logMessage(kernelMessage) // Grab the strings to use for signature verification val zmqStrings = message.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) // TODO: This assumes NO extra buffers, refactor? // Forward along our message (along with the strings used for // signatures) actorForwardRef ! ((zmqStrings, kernelMessage)) case message: KernelMessage => val zmqMessage: ZMQMessage = message logMessage(message) actorSocketRef ! zmqMessage } }
Example 101
Source File: Heartbeat.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.Actor import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.utils.LogLike class Heartbeat(socketFactory : SocketFactory) extends Actor with LogLike { logger.debug("Created new Heartbeat actor") val socket = socketFactory.Heartbeat(context.system, self) override def receive: Receive = { case message: ZMQMessage => logger.trace("Heartbeat received message: " + message.frames.map((byteString: ByteString) => new String(byteString.toArray)).mkString("\n")) socket ! message } }
Example 102
Source File: Utilities.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import java.nio.charset.Charset import akka.util.{ByteString, Timeout} import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.utils.LogLike import play.api.data.validation.ValidationError import play.api.libs.json.{JsPath, Json, Reads} import scala.concurrent.duration._ object Utilities extends LogLike { // // NOTE: This is brought in to remove feature warnings regarding the use of // implicit conversions regarding the following: // // 1. ByteStringToString // 2. ZMQMessageToKernelMessage // import scala.language.implicitConversions implicit val timeout = Timeout(21474835.seconds) implicit def ByteStringToString(byteString : ByteString) : String = { new String(byteString.toArray, Charset.forName("UTF-8")) } implicit def StringToByteString(string : String) : ByteString = { ByteString(string.getBytes) } implicit def ZMQMessageToKernelMessage(message: ZMQMessage): KernelMessage = { val delimiterIndex: Int = message.frames.indexOf(ByteString("<IDS|MSG>".getBytes)) // TODO Handle the case where there is no delimiter val ids: Seq[Array[Byte]] = message.frames.take(delimiterIndex).map( (byteString : ByteString) => { byteString.toArray } ) val header = Json.parse(message.frames(delimiterIndex + 2)).as[Header] // TODO: Investigate better solution than setting parentHeader to null for {} val parentHeader = parseAndHandle(message.frames(delimiterIndex + 3), ParentHeader.headerReads, handler = (valid: ParentHeader) => valid, errHandler = _ => null ) val metadata = Json.parse(message.frames(delimiterIndex + 4)).as[Metadata] KMBuilder().withIds(ids.toList) .withSignature(message.frame(delimiterIndex + 1)) .withHeader(header) .withParentHeader(parentHeader) .withMetadata(metadata) .withContentString(message.frame(delimiterIndex + 5)).build(false) } implicit def KernelMessageToZMQMessage(kernelMessage : KernelMessage) : ZMQMessage = { val frames: scala.collection.mutable.ListBuffer[ByteString] = scala.collection.mutable.ListBuffer() kernelMessage.ids.map((id : Array[Byte]) => frames += ByteString.apply(id) ) frames += "<IDS|MSG>" frames += kernelMessage.signature frames += Json.toJson(kernelMessage.header).toString() frames += Json.toJson(kernelMessage.parentHeader).toString() frames += Json.toJson(kernelMessage.metadata).toString frames += kernelMessage.contentString ZMQMessage(frames : _*) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U) : U = { parseAndHandle(json, reads, handler, (invalid: Seq[(JsPath, Seq[ValidationError])]) => { logger.error(s"Could not parse JSON, ${json}") throw new Throwable(s"Could not parse JSON, ${json}") } ) } def parseAndHandle[T, U](json: String, reads: Reads[T], handler: T => U, errHandler: Seq[(JsPath, Seq[ValidationError])] => U) : U = { Json.parse(json).validate[T](reads).fold( errHandler, (content: T) => handler(content) ) } }
Example 103
Source File: ShellSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{ActorSelection, ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities} import org.apache.toree.kernel.protocol.v5Test._ import Utilities._ import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object ShellSpec { val config =""" akka { loglevel = "WARNING" }""" } class ShellSpec extends TestKit( ActorSystem( "ShellActorSpec", ConfigFactory.parseString(ShellSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Shell") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Shell(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val shell = system.actorOf(Props(classOf[Shell], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage shell ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 104
Source File: HeartbeatSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import akka.actor.{ActorRef, ActorSystem, Props} import akka.testkit.{ImplicitSender, TestKit, TestProbe} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import com.typesafe.config.ConfigFactory import org.mockito.Matchers._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FunSpecLike, Matchers} import test.utils.MaxAkkaTestTimeout object HeartbeatSpec { val config = """ akka { loglevel = "WARNING" }""" } class HeartbeatSpec extends TestKit( ActorSystem( "HeartbeatActorSpec", ConfigFactory.parseString(HeartbeatSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { val SomeMessage: String = "some message" val SomeZMQMessage: ZMQMessage = ZMQMessage(ByteString(SomeMessage.getBytes)) describe("HeartbeatActor") { val socketFactory = mock[SocketFactory] val probe : TestProbe = TestProbe() when(socketFactory.Heartbeat(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(probe.ref) val heartbeat = system.actorOf(Props(classOf[Heartbeat], socketFactory)) describe("send heartbeat") { it("should receive and send same ZMQMessage") { heartbeat ! SomeZMQMessage probe.expectMsg(MaxAkkaTestTimeout, SomeZMQMessage) } } } }
Example 105
Source File: StdinSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel.socket import java.nio.charset.Charset import akka.actor.{Props, ActorSelection, ActorRef, ActorSystem} import akka.testkit.{TestProbe, ImplicitSender, TestKit} import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5.kernel.Utilities._ import org.apache.toree.kernel.protocol.v5Test._ import org.apache.toree.kernel.protocol.v5.{KernelMessage, SystemActorType} import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader import com.typesafe.config.ConfigFactory import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, FunSpecLike} import org.mockito.Mockito._ import org.mockito.Matchers._ import test.utils.MaxAkkaTestTimeout object StdinSpec { val config =""" akka { loglevel = "WARNING" }""" } class StdinSpec extends TestKit(ActorSystem( "StdinActorSpec", ConfigFactory.parseString(StdinSpec.config), org.apache.toree.Main.getClass.getClassLoader )) with ImplicitSender with FunSpecLike with Matchers with MockitoSugar { describe("Stdin") { val socketFactory = mock[SocketFactory] val actorLoader = mock[ActorLoader] val socketProbe : TestProbe = TestProbe() when(socketFactory.Stdin(any(classOf[ActorSystem]), any(classOf[ActorRef]))).thenReturn(socketProbe.ref) val relayProbe : TestProbe = TestProbe() val relaySelection : ActorSelection = system.actorSelection(relayProbe.ref.path) when(actorLoader.load(SystemActorType.KernelMessageRelay)).thenReturn(relaySelection) val stdin = system.actorOf(Props(classOf[Stdin], socketFactory, actorLoader)) describe("#receive") { it("( KernelMessage ) should reply with a ZMQMessage via the socket") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockKernelMessage socketProbe.expectMsg(MockZMQMessage) } it("( ZMQMessage ) should forward ZMQ Strings and KernelMessage to Relay") { // Use the implicit to convert the KernelMessage to ZMQMessage val MockZMQMessage : ZMQMessage = MockKernelMessage stdin ! MockZMQMessage // Should get the last four (assuming no buffer) strings in UTF-8 val zmqStrings = MockZMQMessage.frames.map((byteString: ByteString) => new String(byteString.toArray, Charset.forName("UTF-8")) ).takeRight(4) val kernelMessage: KernelMessage = MockZMQMessage relayProbe.expectMsg(MaxAkkaTestTimeout, (zmqStrings, kernelMessage)) } } } }
Example 106
Source File: UtilitiesSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.kernel.protocol.v5.kernel import akka.util.ByteString import org.apache.toree.communication.ZMQMessage import org.apache.toree.kernel.protocol.v5._ import org.scalatest.{FunSpec, Matchers} class UtilitiesSpec extends FunSpec with Matchers { val header: Header = Header( "<UUID>", "<STRING>", "<UUID>", "<STRING>", "<FLOAT>" ) val parentHeader : ParentHeader = ParentHeader( "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-UUID>", "<PARENT-STRING>", "<PARENT-FLOAT>" ) val kernelMessage = KernelMessage( Seq("<STRING-1>","<STRING-2>").map(x => x.getBytes), "<SIGNATURE>", header, parentHeader, Map(), "<STRING>" ) val zmqMessage = ZMQMessage( ByteString("<STRING-1>".replaceAll("""\s""", "").getBytes), ByteString("<STRING-2>".replaceAll("""\s""", "").getBytes), ByteString("<IDS|MSG>".replaceAll("""\s""", "").getBytes), ByteString("<SIGNATURE>".replaceAll("""\s""", "").getBytes), ByteString( """ { "msg_id": "<UUID>", "username": "<STRING>", "session": "<UUID>", "msg_type": "<STRING>", "version": "<FLOAT>" } """.stripMargin.replaceAll("""\s""", "").getBytes), ByteString( """ { "msg_id": "<PARENT-UUID>", "username": "<PARENT-STRING>", "session": "<PARENT-UUID>", "msg_type": "<PARENT-STRING>", "version": "<PARENT-FLOAT>" } """.stripMargin.replaceAll("""\s""", "").getBytes), ByteString("{}".replaceAll("""\s""", "").getBytes), ByteString("<STRING>".replaceAll("""\s""", "").getBytes) ) describe("Utilities") { describe("implicit #KernelMessageToZMQMessage") { it("should correctly convert a kernel message to a ZMQMessage") { Utilities.KernelMessageToZMQMessage(kernelMessage) should equal (zmqMessage) } } describe("implicit #ZMQMessageToKernelMessage") { it("should correctly convert a ZMQMessage to a kernel message") { Utilities.ZMQMessageToKernelMessage(zmqMessage) should equal (kernelMessage) } } describe("implicit conversions should be inverses of each other") { it("should convert back to the original message, ZMQ -> Kernel -> ZMQ") { Utilities.KernelMessageToZMQMessage( Utilities.ZMQMessageToKernelMessage(zmqMessage) ) should equal (zmqMessage) } it("should convert back to the original message, Kernel -> ZMQ -> Kernel") { Utilities.ZMQMessageToKernelMessage( Utilities.KernelMessageToZMQMessage(kernelMessage) ) should equal (kernelMessage) } } describe("implicit #StringToByteString") { it("should correctly convert a string to a ByteString") { val someString = "some content" val expected = ByteString(someString) Utilities.StringToByteString(someString) should be (expected) } } describe("implicit #ByteStringToString") { it("should correctly convert a ByteString to a string") { val expected = "some content" val byteString = ByteString(expected) Utilities.ByteStringToString(byteString) should be (expected) } } } }
Example 107
Source File: ResumingEventFilter.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import scala.concurrent.Future import akka.NotUsed import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.util.ByteString import spray.json._ import com.typesafe.config.{ Config, ConfigFactory } object ResumingEventFilter extends App with EventMarshalling { val config = ConfigFactory.load() val maxLine = config.getInt("log-stream-processor.max-line") if(args.length != 3) { System.err.println("Provide args: input-file output-file state") System.exit(1) } val inputFile = FileArg.shellExpanded(args(0)) val outputFile = FileArg.shellExpanded(args(1)) val filterState = args(2) match { case State(state) => state case unknown => System.err.println(s"Unknown state $unknown, exiting.") System.exit(1) } import akka.stream.scaladsl._ val source: Source[ByteString, Future[IOResult]] = FileIO.fromPath(inputFile) val sink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND)) val frame: Flow[ByteString, String, NotUsed] = Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) import akka.stream.ActorAttributes import akka.stream.Supervision import LogStreamProcessor.LogParseException val decider : Supervision.Decider = { case _: LogParseException => Supervision.Resume case _ => Supervision.Stop } val parse: Flow[String, Event, NotUsed] = Flow[String].map(LogStreamProcessor.parseLineEx) .collect { case Some(e) => e } .withAttributes(ActorAttributes.supervisionStrategy(decider)) val filter: Flow[Event, Event, NotUsed] = Flow[Event].filter(_.state == filterState) val serialize: Flow[Event, ByteString, NotUsed] = Flow[Event].map(event => ByteString(event.toJson.compactPrint)) implicit val system = ActorSystem() implicit val ec = system.dispatcher val graphDecider : Supervision.Decider = { case _: LogParseException => Supervision.Resume case _ => Supervision.Stop } import akka.stream.ActorMaterializerSettings implicit val materializer = ActorMaterializer( ActorMaterializerSettings(system) .withSupervisionStrategy(graphDecider) ) val composedFlow: Flow[ByteString, ByteString, NotUsed] = frame.via(parse) .via(filter) .via(serialize) val runnableGraph: RunnableGraph[Future[IOResult]] = source.via(composedFlow).toMat(sink)(Keep.right) runnableGraph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$outputFile'.") system.terminate() } }
Example 108
Source File: BidiEventFilter.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import scala.concurrent.Future import akka.NotUsed import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.stream.scaladsl._ import akka.stream.scaladsl.JsonFraming import akka.util.ByteString import spray.json._ import com.typesafe.config.{ Config, ConfigFactory } object BidiEventFilter extends App with EventMarshalling { val config = ConfigFactory.load() val maxLine = config.getInt("log-stream-processor.max-line") val maxJsonObject = config.getInt("log-stream-processor.max-json-object") if(args.length != 5) { System.err.println("Provide args: input-format output-format input-file output-file state") System.exit(1) } val inputFile = FileArg.shellExpanded(args(2)) val outputFile = FileArg.shellExpanded(args(3)) val filterState = args(4) match { case State(state) => state case unknown => System.err.println(s"Unknown state $unknown, exiting.") System.exit(1) } val inFlow: Flow[ByteString, Event, NotUsed] = if(args(0).toLowerCase == "json") { JsonFraming.objectScanner(maxJsonObject) .map(_.decodeString("UTF8").parseJson.convertTo[Event]) } else { Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) .map(LogStreamProcessor.parseLineEx) .collect { case Some(event) => event } } val outFlow: Flow[Event, ByteString, NotUsed] = if(args(1).toLowerCase == "json") { Flow[Event].map(event => ByteString(event.toJson.compactPrint)) } else { Flow[Event].map{ event => ByteString(LogStreamProcessor.logLine(event)) } } val bidiFlow = BidiFlow.fromFlows(inFlow, outFlow) val source: Source[ByteString, Future[IOResult]] = FileIO.fromPath(inputFile) val sink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND)) val filter: Flow[Event, Event, NotUsed] = Flow[Event].filter(_.state == filterState) val flow = bidiFlow.join(filter) val runnableGraph: RunnableGraph[Future[IOResult]] = source.via(flow).toMat(sink)(Keep.right) implicit val system = ActorSystem() implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() runnableGraph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$outputFile'.") system.terminate() } }
Example 109
Source File: LogEntityMarshaller.scala From 006877 with MIT License | 5 votes |
package aia.stream import akka.NotUsed import akka.stream.scaladsl.Framing import akka.stream.scaladsl.JsonFraming import akka.http.scaladsl.model.HttpCharsets._ import akka.http.scaladsl.model.MediaTypes._ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.model._ import akka.stream.scaladsl.Source import akka.util.ByteString import spray.json._ import akka.http.scaladsl.marshalling.Marshaller import akka.http.scaladsl.marshalling.ToEntityMarshaller object LogEntityMarshaller extends EventMarshalling { type LEM = ToEntityMarshaller[Source[ByteString, _]] def create(maxJsonObject: Int): LEM = { val js = ContentTypes.`application/json` val txt = ContentTypes.`text/plain(UTF-8)` val jsMarshaller = Marshaller.withFixedContentType(js) { src:Source[ByteString, _] => HttpEntity(js, src) } val txtMarshaller = Marshaller.withFixedContentType(txt) { src:Source[ByteString, _] => HttpEntity(txt, toText(src, maxJsonObject)) } Marshaller.oneOf(jsMarshaller, txtMarshaller) } def toText(src: Source[ByteString, _], maxJsonObject: Int): Source[ByteString, _] = { src.via(LogJson.jsonToLogFlow(maxJsonObject)) } }
Example 110
Source File: GenerateLogFile.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import scala.concurrent.Future import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.stream.scaladsl._ import akka.util.ByteString object GenerateLogFile extends App { val filePath = args(0) val numberOfLines = args(1).toInt val rnd = new java.util.Random() val sink = FileIO.toPath(FileArg.shellExpanded(filePath), Set(CREATE, WRITE, APPEND)) def line(i: Int) = { val host = "my-host" val service = "my-service" val time = ZonedDateTime.now.format(DateTimeFormatter.ISO_INSTANT) val state = if( i % 10 == 0) "warning" else if(i % 101 == 0) "error" else if(i % 1002 == 0) "critical" else "ok" val description = "Some description of what has happened." val tag = "tag" val metric = rnd.nextDouble() * 100 s"$host | $service | $state | $time | $description | $tag | $metric \n" } val graph = Source.fromIterator{() => Iterator.tabulate(numberOfLines)(line) }.map(l=> ByteString(l)).toMat(sink)(Keep.right) implicit val system = ActorSystem() implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() graph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$filePath'.") system.terminate() } }
Example 111
Source File: EventUnmarshaller.scala From 006877 with MIT License | 5 votes |
package aia.stream import scala.concurrent.{ ExecutionContext, Future } import akka.NotUsed import akka.stream.scaladsl.Framing import akka.stream.scaladsl.JsonFraming import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.http.scaladsl.model.HttpCharsets._ import akka.http.scaladsl.model.MediaTypes._ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.marshalling._ import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.model._ import akka.util.ByteString import spray.json._ import akka.http.scaladsl.unmarshalling.Unmarshaller import akka.http.scaladsl.unmarshalling.Unmarshaller._ object EventUnmarshaller extends EventMarshalling { val supported = Set[ContentTypeRange]( ContentTypes.`text/plain(UTF-8)`, ContentTypes.`application/json` ) def create(maxLine: Int, maxJsonObject: Int) = { new Unmarshaller[HttpEntity, Source[Event, _]] { def apply(entity: HttpEntity)(implicit ec: ExecutionContext, materializer: Materializer): Future[Source[Event, _]] = { val future = entity.contentType match { case ContentTypes.`text/plain(UTF-8)` => Future.successful(LogJson.textInFlow(maxLine)) case ContentTypes.`application/json` => Future.successful(LogJson.jsonInFlow(maxJsonObject)) case other => Future.failed( new UnsupportedContentTypeException(supported) ) } future.map(flow => entity.dataBytes.via(flow))(ec) } }.forContentTypes(supported.toList:_*) } }
Example 112
Source File: Event.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.io.File import java.time.ZonedDateTime import scala.concurrent.Future import akka.NotUsed import akka.util.ByteString import akka.stream.IOResult import akka.stream.scaladsl.{ Source, FileIO, Framing } import scala.concurrent.duration.FiniteDuration case class Event( host: String, service: String, state: State, time: ZonedDateTime, description: String, tag: Option[String] = None, metric: Option[Double] = None ) sealed trait State case object Critical extends State case object Error extends State case object Ok extends State case object Warning extends State object State { def norm(str: String): String = str.toLowerCase def norm(state: State): String = norm(state.toString) val ok = norm(Ok) val warning = norm(Warning) val error = norm(Error) val critical = norm(Critical) def unapply(str: String): Option[State] = { val normalized = norm(str) if(normalized == norm(Ok)) Some(Ok) else if(normalized == norm(Warning)) Some(Warning) else if(normalized == norm(Error)) Some(Error) else if(normalized == norm(Critical)) Some(Critical) else None } } case class LogReceipt(logId: String, written: Long) case class ParseError(logId: String, msg: String)
Example 113
Source File: LogJson.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Files, Path } import java.io.File import java.time.ZonedDateTime import scala.concurrent.duration._ import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.{ Success, Failure } import akka.Done import akka.actor._ import akka.util.ByteString import akka.stream.{ ActorAttributes, ActorMaterializer, IOResult } import akka.stream.scaladsl.JsonFraming import akka.stream.scaladsl.{ FileIO, BidiFlow, Flow, Framing, Keep, Sink, Source } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import spray.json._ object LogJson extends EventMarshalling with NotificationMarshalling with MetricMarshalling { def textInFlow(maxLine: Int) = { Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) .map(LogStreamProcessor.parseLineEx) .collect { case Some(e) => e } } def jsonInFlow(maxJsonObject: Int) = { JsonFraming.objectScanner(maxJsonObject) .map(_.decodeString("UTF8").parseJson.convertTo[Event]) } def jsonFramed(maxJsonObject: Int) = JsonFraming.objectScanner(maxJsonObject) val jsonOutFlow = Flow[Event].map { event => ByteString(event.toJson.compactPrint) } val notifyOutFlow = Flow[Summary].map { ws => ByteString(ws.toJson.compactPrint) } val metricOutFlow = Flow[Metric].map { m => ByteString(m.toJson.compactPrint) } val textOutFlow = Flow[Event].map{ event => ByteString(LogStreamProcessor.logLine(event)) } def logToJson(maxLine: Int) = { BidiFlow.fromFlows(textInFlow(maxLine), jsonOutFlow) } def jsonToLog(maxJsonObject: Int) = { BidiFlow.fromFlows(jsonInFlow(maxJsonObject), textOutFlow) } def logToJsonFlow(maxLine: Int) = { logToJson(maxLine).join(Flow[Event]) } def jsonToLogFlow(maxJsonObject: Int) = { jsonToLog(maxJsonObject).join(Flow[Event]) } }
Example 114
Source File: HttpFeeRateProvider.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.feeprovider import java.time.{Duration, Instant} import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, Uri} import akka.util.ByteString import org.bitcoins.core.api.FeeRateApi import org.bitcoins.core.util.TimeUtil import org.bitcoins.core.wallet.fee.FeeUnit import scala.concurrent.{ExecutionContextExecutor, Future} import scala.util.Try object HttpFeeRateProvider { def makeApiCall(uri: Uri)(implicit system: ActorSystem): Future[String] = { implicit val ec: ExecutionContextExecutor = system.dispatcher Http() .singleRequest(HttpRequest(uri = uri)) .flatMap(response => response.entity.dataBytes .runFold(ByteString.empty)(_ ++ _) .map(payload => payload.decodeString(ByteString.UTF_8))) } } abstract class HttpFeeRateProvider extends FeeRateApi { implicit protected val system: ActorSystem protected def uri: Uri protected def converter(str: String): Try[FeeUnit] def getFeeRate: Future[FeeUnit] = { HttpFeeRateProvider .makeApiCall(uri) .flatMap(ret => Future.fromTry(converter(ret)))(system.dispatcher) } } abstract class CachedHttpFeeRateProvider extends HttpFeeRateProvider { private var cachedFeeRateOpt: Option[(FeeUnit, Instant)] = None val cacheDuration: Duration = Duration.ofMinutes(5) private def updateFeeRate(): Future[FeeUnit] = { implicit val ec: ExecutionContextExecutor = system.dispatcher super.getFeeRate.map { feeRate => cachedFeeRateOpt = Some(feeRate, TimeUtil.now) feeRate } } override def getFeeRate: Future[FeeUnit] = { cachedFeeRateOpt match { case None => updateFeeRate() case Some((cachedFeeRate, time)) => val now = TimeUtil.now if (time.plus(cacheDuration).isAfter(now)) { updateFeeRate() } else { Future.successful(cachedFeeRate) } } } }
Example 115
Source File: CustomEntityWithJson.scala From introduction-to-akkahttp with Apache License 2.0 | 5 votes |
package com.shashank.akkahttp.basic.routing import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.util.ByteString import com.shashank.akkahttp.basic.routing.CustomEntityWithJsonModels.{Employee, EmployeeList} import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import spray.json._ object CustomEntityWithJsonModels { case class Employee(id:String, name:String, age:Int, department:Option[String]) case class EmployeeList(employees:Array[Employee]) object ServiceJsonProtocol extends DefaultJsonProtocol { implicit val employeeFormat = jsonFormat4(Employee) implicit val employeeListFormat = jsonFormat1(EmployeeList) } } object CustomEntityWithJson extends BaseSpec { def main(args: Array[String]) { import com.shashank.akkahttp.basic.routing.CustomEntityWithJsonModels.ServiceJsonProtocol._ val employeeBuffer = scala.collection.mutable.ArrayBuffer.empty[Employee] val route = path("employee"){ post{ entity(as[Employee]){ employee => complete { if(employeeBuffer.exists(_.id == employee.id)) require(false, s"${employee.id} already exists") employeeBuffer += employee employee } } } ~ get{ complete { EmployeeList(employeeBuffer.toArray) } } } Post("/employee", HttpEntity(MediaTypes.`application/json`, ByteString("""{"id":"1", "name":"Eric", "age":30}"""))) ~> route ~> check { status shouldEqual StatusCodes.OK } Get("/employee") ~> route ~> check { status shouldEqual StatusCodes.OK entityAs[String].parseJson.compactPrint shouldEqual """{"employees":[{"id":"1","name":"Eric","age":30}]}""".parseJson.compactPrint } system.terminate() } }
Example 116
Source File: FileUploadStream.scala From introduction-to-akkahttp with Apache License 2.0 | 5 votes |
package com.shashank.akkahttp.basic.routing import akka.http.scaladsl.model.{ContentTypes, HttpEntity, Multipart, StatusCodes} import akka.http.scaladsl.server.Directives._ import akka.stream.scaladsl.Framing import akka.util.ByteString import scala.concurrent.Future object FileUploadStream extends BaseSpec{ def main(args: Array[String]) { val route = extractRequestContext { ctx => implicit val materializer = ctx.materializer implicit val ec = ctx.executionContext fileUpload("csv") { case (metadata, byteSource) => val sumF: Future[Int] = // sum the numbers as they arrive so that we can byteSource.via(Framing.delimiter(ByteString("\n"), 1024)) .mapConcat(_.utf8String.split(",").toVector) .map(_.toInt) .runFold(0) { (acc, n) => acc + n } onSuccess(sumF) { sum => complete(s"Sum: $sum") } } } //Test file upload stream val multipartForm = Multipart.FormData(Multipart.FormData.BodyPart.Strict( "csv", HttpEntity(ContentTypes.`text/plain(UTF-8)`, "2,3,5\n7,11,13,17,23\n29,31,37\n"), Map("filename" -> "primes.csv"))) Post("/", multipartForm) ~> route ~> check { status shouldEqual StatusCodes.OK responseAs[String] shouldEqual "Sum: 178" } system.terminate() } } //File upload direct //curl --form "[email protected]" http://<host>:<port>
Example 117
Source File: UnMarshalling.scala From introduction-to-akkahttp with Apache License 2.0 | 5 votes |
package com.shashank.akkahttp.basic.routing import akka.actor.ActorSystem import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.{HttpMethods, HttpRequest, HttpResponse, MessageEntity} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.{ActorMaterializer, Materializer} import akka.util.ByteString import scala.concurrent.Await import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global import spray.json._ object UnMarshalling { def main(args: Array[String]) { implicit val sys = ActorSystem("IntroductionToAkkaHttp") implicit val mat:Materializer = ActorMaterializer() //type FromStringUnmarshaller[T] = Unmarshaller[String, T] val intFuture = Unmarshal("42").to[Int] val int = Await.result(intFuture, 1.second) println("int unmarshalling "+int) //type FromStringUnmarshaller[T] = Unmarshaller[String, T] val boolFuture = Unmarshal("off").to[Boolean] val bool = Await.result(boolFuture, 1.second) println("off unmarshalling "+bool) //type ToEntityMarshaller[T] = Marshaller[T, MessageEntity] val string = "Yeah" val entityFuture = Marshal(string).to[MessageEntity] val entity = Await.result(entityFuture, 1.second) // don't block in non-test code! println(entity) //type ToResponseMarshaller[T] = Marshaller[T, HttpResponse] val errorMsg = "Not found, pal!" val responseFuture = Marshal(404 -> errorMsg).to[HttpResponse] val response = Await.result(responseFuture, 1.second) println(response) //type FromEntityUnmarshaller[T] = Unmarshaller[HttpEntity, T] val jsonByteString = ByteString("""{"name":"Hello"}""") val httpRequest = HttpRequest(HttpMethods.POST, entity = jsonByteString) val jsonDataUnmarshalledFuture = Unmarshal(httpRequest).to[String] val jsonDataUnmarshalled = Await.result(jsonDataUnmarshalledFuture, 1.second) println(jsonDataUnmarshalled) sys.terminate() } }
Example 118
Source File: UploadingFileServer.scala From Akka-Cookbook with MIT License | 5 votes |
package com.packt.chapter9 import akka.http.scaladsl.model.Multipart import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.HttpApp import akka.http.scaladsl.settings.ServerSettings import akka.stream.scaladsl.Framing import akka.util.ByteString import com.typesafe.config.ConfigFactory import scala.concurrent.duration._ object UploadingFileServer extends HttpApp { val route = extractRequestContext { ctx => implicit val materializer = ctx.materializer implicit val ec = ctx.executionContext path("regularupload") { entity(as[Multipart.FormData]) { formData => val partsFuture = formData.parts.mapAsync(1) { b => b.toStrict(2.seconds).map(_.entity.data.utf8String) }.runFold(List.empty[String])(_ :+ _) onSuccess(partsFuture) { allParts => complete { val wholeFile = allParts.mkString s"Regular upload: submitted file has ${wholeFile.split("\n").size} lines" } } } } ~ path("streamupload") { entity(as[Multipart.FormData]) { formData => val partsFuture = formData.parts.mapAsync(1) { b => b.entity.dataBytes .via(Framing.delimiter(ByteString("\n"), 1024, allowTruncation = true)) .map(_ => 1) .runReduce(_ + _) }.runReduce(_ + _) onSuccess(partsFuture) { lines => complete { s"Regular upload: submitted file has $lines lines" } } } } } } object UploadingFileServerApplication extends App { UploadingFileServer.startServer("0.0.0.0", 8088, ServerSettings(ConfigFactory.load)) }
Example 119
Source File: EncodingDecodingClientApplication.scala From Akka-Cookbook with MIT License | 5 votes |
package com.packt.chapter9 import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.coding.{Encoder, Gzip, NoCoding} import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers._ import akka.http.scaladsl.model.headers.HttpEncodings._ import akka.http.scaladsl.model.HttpMethods._ import headers.HttpEncodings import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.duration._ import scala.concurrent.Future import scala.util.{Failure, Success} object EncodingDecodingClientApplication extends App { implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() import system.dispatcher val http = Http() val uriServer = "http://localhost:8088/" val requests = Seq ( HttpRequest(POST, uriServer, List(`Accept-Encoding`(gzip)), HttpEntity("Hello!")), HttpRequest(POST, uriServer, List(`Content-Encoding`(gzip), `Accept-Encoding`(gzip)), HttpEntity(compress("Hello compressed!", Gzip)) ) ) Future.traverse(requests)(http.singleRequest(_).map(decodeResponse)) andThen { case Success(responses) => responses.foreach(response => response.entity.toStrict(5 seconds).map(_.data.decodeString("UTF-8")).andThen { case Success(content) => println(s"Response: $content") case _ => }) case Failure(e) => println(s"request failed $e") } private def decodeResponse(response: HttpResponse) = { val decoder = response.encoding match { case HttpEncodings.gzip => Gzip case HttpEncodings.identity => NoCoding } decoder.decode(response) } private def compress(input: String, encoder: Encoder): ByteString = encoder.encode(ByteString(input)) }
Example 120
Source File: ProcessingRabbitMQApplication.scala From Akka-Cookbook with MIT License | 5 votes |
package com.packt.chapter8 import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.alpakka.amqp._ import akka.stream.alpakka.amqp.scaladsl.{AmqpSink, AmqpSource} import akka.util.ByteString object ProcessingRabbitMQApplication extends App { implicit val actorSystem = ActorSystem("SimpleStream") implicit val actorMaterializer = ActorMaterializer() val consumerQueueName = "akka_streams_consumer_queue" val consumerQueueDeclaration = QueueDeclaration(consumerQueueName) val sourceDeclarations = Seq(consumerQueueDeclaration) val exchangeName = "akka_streams_exchange" val exchangeDeclaration = ExchangeDeclaration(exchangeName, "direct") val destinationQueueName = "akka_streams_destination_queue" val destinationQueueDeclaration = QueueDeclaration(destinationQueueName) val bindingDeclaration = BindingDeclaration(destinationQueueName, exchangeName) val sinkDeclarations = Seq(exchangeDeclaration, destinationQueueDeclaration, bindingDeclaration) val credentials = AmqpCredentials("guest", "guest") val connectionSetting = AmqpConnectionDetails("127.0.0.1", 5672, Some(credentials)) val amqpSourceConfig = NamedQueueSourceSettings(connectionSetting, consumerQueueName, sourceDeclarations) val rabbitMQSource = AmqpSource(amqpSourceConfig, 1000) val amqpSinkConfig = AmqpSinkSettings(connectionSetting, Some(exchangeName), None, sinkDeclarations) val rabbitMQSink = AmqpSink(amqpSinkConfig) val stream = rabbitMQSource .map(incomingMessage => { val upperCased = incomingMessage.bytes.utf8String.toUpperCase OutgoingMessage(bytes = ByteString(upperCased), immediate = false, mandatory = false, props = None) }) .to(rabbitMQSink) stream.run() }
Example 121
Source File: ModularizingStreamsApplication.scala From Akka-Cookbook with MIT License | 5 votes |
package com.packt.chapter8 import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.util.ByteString object ModularizingStreamsApplication extends App { implicit val actorSystem = ActorSystem("TransformingStream") implicit val actorMaterializer = ActorMaterializer() val MaxGroups = 1000 val path = Paths.get("src/main/resources/gzipped-file.gz") val streamUppercase = source .via(gunzip) .via(utf8UppercaseMapper) .via(splitter) .via(punctuationMapper) .via(filterEmptyElements) .via(wordCountFlow) .to(printlnSink) val streamLowercase = source .via(gunzip) .via(utf8LowercaseMapper) .via(splitter) .via(punctuationMapper) .via(filterEmptyElements) .via(wordCountFlow) .to(printlnSink) streamUppercase.run() streamLowercase.run() // val sourceGunzip = source.via(gunzip) // val reusableProcessingFlow = Flow[String].via(splitter) // .via(punctuationMapper) // .via(filterEmptyElements) // .via(wordCountFlow) // // val streamLowercase = sourceGunzip // .via(utf8LowercaseMapper) // .via(reusableProcessingFlow) // .to(printlnSink) }
Example 122
Source File: WorkingIOStreamsApplication.scala From Akka-Cookbook with MIT License | 5 votes |
package com.packt.chapter8 import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.Tcp.{IncomingConnection, ServerBinding} import akka.stream.scaladsl._ import akka.util.ByteString import scala.concurrent.Future object WorkingIOStreamsApplication extends App { implicit val actorSystem = ActorSystem("WorkingIOStreams") implicit val actorMaterializer = ActorMaterializer() val MaxGroups = 1000 val connections = Tcp().bind("127.0.0.1", 1234) connections.runForeach(connection => connection.handleWith(wordCount)) val wordCount = Flow[ByteString].map(_.utf8String.toUpperCase) .mapConcat(_.split(" ").toList) .collect { case w if w.nonEmpty => w.replaceAll("""[p{Punct}&&[^.]]""", "").replaceAll(System.lineSeparator(), "") } .groupBy(MaxGroups, identity) .map(_ -> 1) .reduce((l, r) => (l._1, l._2 + r._2)) .mergeSubstreams .map(x => ByteString(s"[${x._1} => ${x._2}]\n")) }
Example 123
Source File: ServiceRegistryInteropSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.registry.impl import java.net.URI import java.util.Collections import java.util.Optional import akka.actor.ActorSystem import akka.testkit.TestKit import akka.util.ByteString import com.lightbend.lagom.devmode.internal.scaladsl.registry.RegisteredService import com.lightbend.lagom.devmode.internal.scaladsl.registry.ServiceRegistryService import com.lightbend.lagom.internal.javadsl.registry.{ RegisteredService => jRegisteredService } import com.lightbend.lagom.internal.javadsl.registry.{ ServiceRegistryService => jServiceRegistryService } import com.lightbend.lagom.devmode.internal.scaladsl.registry.{ RegisteredService => sRegisteredService } import com.lightbend.lagom.devmode.internal.scaladsl.registry.{ ServiceRegistryService => sServiceRegistryService } import com.lightbend.lagom.javadsl.api.ServiceAcl import com.lightbend.lagom.javadsl.api.deser.MessageSerializer import com.lightbend.lagom.javadsl.api.deser.StrictMessageSerializer import com.lightbend.lagom.javadsl.api.transport.MessageProtocol import com.lightbend.lagom.javadsl.api.transport.Method import com.lightbend.lagom.javadsl.jackson.JacksonSerializerFactory import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.Futures import play.api.libs.json.Format import play.api.libs.json.Json import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ServiceRegistryInteropSpec extends AnyFlatSpec with Matchers with Futures with BeforeAndAfterAll { val system = ActorSystem() val jacksonSerializerFactory = new JacksonSerializerFactory(system) protected override def afterAll(): Unit = { TestKit.shutdownActorSystem(actorSystem = system, verifySystemShutdown = true) } behavior.of("ServiceRegistry serializers") it should "should interop between java and scala (RegisteredService)" in { val msg = jRegisteredService.of("inventory", URI.create("https://localhost:123/asdf"), Optional.of("https")) roundTrip(msg) should be(msg) } it should "should interop between java and scala when optional fields are empty (RegisteredService)" in { val msg = jRegisteredService.of("inventory", URI.create("https://localhost:123/asdf"), Optional.empty[String]) roundTrip(msg) should be(msg) } it should "should interop between java and scala (ServiceRegistryService)" in { val msg = jServiceRegistryService.of( URI.create("https://localhost:123/asdf"), Collections.singletonList(ServiceAcl.methodAndPath(Method.GET, "/items")) ) roundTrip(msg) should be(msg) } it should "should interop between java and scala when optional fields are empty (ServiceRegistryService)" in { val msg = jServiceRegistryService.of(URI.create("https://localhost:123/asdf"), Collections.emptyList[ServiceAcl]) roundTrip(msg) should be(msg) } private def roundTrip(input: jServiceRegistryService): jServiceRegistryService = { roundTrip( input, jacksonSerializerFactory.messageSerializerFor[jServiceRegistryService](classOf[jServiceRegistryService]), com.lightbend.lagom.scaladsl.playjson.JsonSerializer[ServiceRegistryService].format )(sServiceRegistryService.format) } private def roundTrip(input: jRegisteredService): jRegisteredService = { roundTrip( input, jacksonSerializerFactory.messageSerializerFor[jRegisteredService](classOf[jRegisteredService]), com.lightbend.lagom.scaladsl.playjson.JsonSerializer[RegisteredService].format )(sRegisteredService.format) } private def roundTrip[J, S]( input: J, jacksonSerializer: StrictMessageSerializer[J], playJsonFormatter: Format[S] )(implicit format: Format[S]): J = { val byteString: ByteString = jacksonSerializer.serializerForRequest().serialize(input) val scalaValue: S = playJsonFormatter.reads(Json.parse(byteString.toArray)).get val str: String = playJsonFormatter.writes(scalaValue).toString() val jacksonDeserializer: MessageSerializer.NegotiatedDeserializer[J, ByteString] = jacksonSerializer.deserializer( new MessageProtocol(Optional.of("application/json"), Optional.empty[String], Optional.empty[String]) ) jacksonDeserializer.deserialize(ByteString(str)) } }
Example 124
Source File: LagomDevModeServiceRegistry.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.scaladsl.registry import java.net.URI import akka.NotUsed import akka.util.ByteString import com.lightbend.lagom.devmode.internal.registry.ServiceRegistryClient import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer.NegotiatedDeserializer import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer.NegotiatedSerializer import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer import com.lightbend.lagom.scaladsl.api.deser.StrictMessageSerializer import com.lightbend.lagom.scaladsl.api.transport.MessageProtocol import com.lightbend.lagom.scaladsl.api.transport.Method import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceAcl import com.lightbend.lagom.scaladsl.api.ServiceCall import play.api.libs.functional.syntax._ import play.api.libs.json._ import scala.collection.immutable import scala.collection.immutable.Seq trait ServiceRegistry extends Service { def register(name: String): ServiceCall[ServiceRegistryService, NotUsed] def unregister(name: String): ServiceCall[NotUsed, NotUsed] def lookup(name: String, portName: Option[String]): ServiceCall[NotUsed, URI] def registeredServices: ServiceCall[NotUsed, immutable.Seq[RegisteredService]] import Service._ import ServiceRegistry._ def descriptor: Descriptor = { named(ServiceRegistryClient.ServiceName) .withCalls( restCall(Method.PUT, "/services/:id", register _), restCall(Method.DELETE, "/services/:id", this.unregister _), restCall(Method.GET, "/services/:id?portName", lookup _), pathCall("/services", registeredServices) ) .withLocatableService(false) } } object ServiceRegistry { implicit val uriMessageSerializer: MessageSerializer[URI, ByteString] = new StrictMessageSerializer[URI] { private val serializer = new NegotiatedSerializer[URI, ByteString] { override def serialize(message: URI): ByteString = ByteString.fromString(message.toString, "utf-8") override val protocol: MessageProtocol = MessageProtocol.empty.withContentType("text/plain").withCharset("utf-8") } override def serializerForRequest = serializer override def serializerForResponse(acceptedMessageProtocols: Seq[MessageProtocol]) = serializer override def deserializer(protocol: MessageProtocol): NegotiatedDeserializer[URI, ByteString] = new NegotiatedDeserializer[URI, ByteString] { override def deserialize(wire: ByteString) = URI.create(wire.decodeString(protocol.charset.getOrElse("utf-8"))) } } } case class RegisteredService(name: String, url: URI, portName: Option[String]) object RegisteredService { import UriFormat.uriFormat implicit val format: Format[RegisteredService] = Json.format[RegisteredService] } case class ServiceRegistryService(uris: immutable.Seq[URI], acls: immutable.Seq[ServiceAcl]) object ServiceRegistryService { def apply(uri: URI, acls: immutable.Seq[ServiceAcl]): ServiceRegistryService = ServiceRegistryService(Seq(uri), acls) import UriFormat.uriFormat implicit val methodFormat: Format[Method] = (__ \ "name").format[String].inmap(new Method(_), _.name) implicit val serviceAclFormat: Format[ServiceAcl] = (__ \ "method") .formatNullable[Method] .and((__ \ "pathRegex").formatNullable[String]) .apply(ServiceAcl.apply, acl => (acl.method, acl.pathRegex)) implicit val format: Format[ServiceRegistryService] = Json.format[ServiceRegistryService] } object UriFormat { implicit val uriFormat: Format[URI] = implicitly[Format[String]].inmap(URI.create, _.toString) }
Example 125
Source File: MessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.api.deser import akka.util.ByteString import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer._ import com.lightbend.lagom.scaladsl.api.transport.DeserializationException import com.lightbend.lagom.scaladsl.api.transport.MessageProtocol import play.api.libs.json._ import scala.collection.immutable.Seq import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class MessageSerializerSpec extends AnyWordSpec with Matchers { case class Dummy(prop: Option[String]) "ByteString-to-PlayJson (via JsValueMessageSerializer)" should { "deserialize empty ByteString as JSON null" in { val deserializer = JsValueMessageSerializer.deserializer(MessageProtocol.empty) deserializer.deserialize(ByteString.empty) shouldBe JsNull } } implicit def optionFormat[T: Format]: Format[Option[T]] = new Format[Option[T]] { override def reads(json: JsValue): JsResult[Option[T]] = json.validateOpt[T] override def writes(o: Option[T]): JsValue = o match { case Some(t) => implicitly[Writes[T]].writes(t) case None => JsNull } } "PlayJson-to-RequestPayload formatters" should { implicit val format: Format[Dummy] = Json.format "fail when converting JSNull into T." in { intercept[JsResultException] { JsNull.as[Dummy] } } "convert JS null to None by default" in { val dummy = JsNull.as[Option[Dummy]] dummy shouldBe None } } "ByteString-to-RequestPayload (for JSON payloads, using jsValueFormatMessageSerializer)" should { "deserialize empty ByteString's to Option[T] as None" in { val serializer = jsValueFormatMessageSerializer(JsValueMessageSerializer, optionFormat[String]) val out = serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty) out shouldBe None } "fail to deserialize empty ByteString to Dummy(prop: Option[T])" in { val format: Format[Dummy] = Json.format val serializer = jsValueFormatMessageSerializer(JsValueMessageSerializer, format) intercept[DeserializationException] { serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty) } } } "ByteString-to-ByteString" should { "serialize any request of type ByteString to the same ByteSting" in { val serializer = NoopMessageSerializer.serializerForRequest val out = serializer.serialize(ByteString("sample string")) out shouldBe ByteString("sample string") } "serialize any response of type ByteString to the same ByteSting" in { val serializer = NoopMessageSerializer.serializerForResponse(Seq(MessageProtocol.empty)) val out = serializer.serialize(ByteString("sample string")) out shouldBe ByteString("sample string") } "deserialize any ByteString's to the same ByteSting" in { val deserializer = NoopMessageSerializer.deserializer(MessageProtocol.empty) val out = deserializer.deserialize(ByteString("sample string")) out shouldBe ByteString("sample string") } } }
Example 126
Source File: QrCodesBot.scala From telegram with Apache License 2.0 | 5 votes |
import java.net.URLEncoder import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, Uri} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.util.ByteString import com.bot4s.telegram.api.declarative.Commands import com.bot4s.telegram.api._ import com.bot4s.telegram.future.Polling import com.bot4s.telegram.methods._ import com.bot4s.telegram.models.AkkaInputFile import scala.concurrent.Future class QrCodesBot(token: String) extends AkkaExampleBot(token) with Polling with Commands[Future] with ChatActions[Future] { // Multiple variants onCommand('qr | 'qrcode | 'qr_code) { implicit msg => withArgs { args => val url = "https://api.qrserver.com/v1/create-qr-code/?data=" + URLEncoder.encode(args mkString " ", "UTF-8") for { response <- Http().singleRequest(HttpRequest(uri = Uri(url))) if response.status.isSuccess() bytes <- Unmarshal(response).to[ByteString] photo = AkkaInputFile("qrcode.png", bytes) _ <- uploadingPhoto // Hint the user _ <- request(SendPhoto(msg.source, photo)) } yield () } } }
Example 127
Source File: VoiceFileBot.scala From telegram with Apache License 2.0 | 5 votes |
import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, Uri} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.util.ByteString import cats.instances.future._ import cats.syntax.functor._ import com.bot4s.telegram.api.declarative.Commands import com.bot4s.telegram.future.Polling import com.bot4s.telegram.methods._ import scala.concurrent.Future import scala.util.{Failure, Success} class VoiceFileBot(token: String) extends AkkaExampleBot(token) with Polling with Commands[Future] { onMessage { implicit msg => using(_.voice) { voice => request(GetFile(voice.fileId)).andThen({ case Success(file) => file.filePath match { case Some(filePath) => // See https://core.telegram.org/bots/api#getfile val url = s"https://api.telegram.org/file/bot${token}/${filePath}" for { res <- Http().singleRequest(HttpRequest(uri = Uri(url))) if res.status.isSuccess() bytes <- Unmarshal(res).to[ByteString] _ <- reply(s"File with ${bytes.size} bytes received.") } yield () case None => reply("No file_path was returned") } case Failure(e) => logger.error("Exception: " + e) // poor's man logging }).void } } }
Example 128
Source File: AkkaHttpMarshallingSuite.scala From telegram with Apache License 2.0 | 5 votes |
package com.bot4s.telegram.marshalling import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.RequestEntity import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.util.ByteString import com.bot4s.telegram.api.TestUtils import com.bot4s.telegram.marshalling.AkkaHttpMarshalling.underscore_case_marshaller import com.bot4s.telegram.methods.SendDocument import com.bot4s.telegram.models.{AkkaInputFile, InputFile} import org.scalatest.{FunSuite, Matchers} class AkkaHttpMarshallingSuite extends FunSuite with ScalatestRouteTest with Matchers with TestUtils { test("Correctly serialize top-level string members in Akka multipart requests") { val captionWithLineBreak = "this is a line\nand then\t another line" val channelId = "this_is_a_channel" val fileId = "and_a_file_id" val entity = SendDocument(channelId, InputFile(fileId), caption = Some(captionWithLineBreak)) Post("/", Marshal(entity).to[RequestEntity]) ~> { formFields(('caption, 'chat_id, 'document)) { (caption, chat_id, document) => complete(caption + chat_id + document) } } ~> check { responseAs[String] shouldEqual (captionWithLineBreak + channelId + fileId) } } test("Handles AkkaInputFile") { val channelId = "this_is_a_channel" val content = "file content" val entity = SendDocument(channelId, AkkaInputFile("Pepe", ByteString(content))) Post("/", Marshal(entity).to[RequestEntity]) ~> { formFields('document) { document => complete(document) } } ~> check { responseAs[ByteString] shouldEqual ByteString(content) } } }
Example 129
Source File: AkkaBodyUnmarshaller.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.akka.shared.handlers import akka.http.scaladsl.coding.Gzip import akka.http.scaladsl.model.{HttpCharsets, HttpEntity} import akka.http.scaladsl.unmarshalling.Unmarshaller import akka.stream.Materializer import akka.util.ByteString import com.github.fsanaulla.chronicler.core.alias.ErrorOr import com.github.fsanaulla.chronicler.core.jawn.RichJParser import org.typelevel.jawn.ast.{JParser, JValue} import scala.concurrent.{ExecutionContext, Future} final class AkkaBodyUnmarshaller(compressed: Boolean) extends Unmarshaller[HttpEntity, ErrorOr[JValue]] { override def apply( value: HttpEntity )(implicit ec: ExecutionContext, mat: Materializer ): Future[ErrorOr[JValue]] = { // get encoding from response content type, otherwise use UTF-8 as default val encoding = value.contentType.charsetOption .getOrElse(HttpCharsets.`UTF-8`) .nioCharset() val srcBody = if (compressed) value.dataBytes.via(Gzip.decoderFlow) else value.dataBytes srcBody .runFold(ByteString.empty)(_ ++ _) .map(_.decodeString(encoding)) .map(JParser.parseFromStringEither) } }
Example 130
Source File: AkkaResponseHandler.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.akka.shared.handlers import akka.http.scaladsl.model.HttpResponse import akka.stream.scaladsl.{Framing, Source} import akka.util.ByteString import com.github.fsanaulla.chronicler.akka.shared.implicits._ import com.github.fsanaulla.chronicler.core.alias.ErrorOr import com.github.fsanaulla.chronicler.core.components.{JsonHandler, ResponseHandler} import com.github.fsanaulla.chronicler.core.either import com.github.fsanaulla.chronicler.core.either.EitherOps import com.github.fsanaulla.chronicler.core.jawn.RichJParser import com.github.fsanaulla.chronicler.core.model.{InfluxReader, ParsingException} import org.typelevel.jawn.ast.{JArray, JParser} import scala.concurrent.{ExecutionContext, Future} import scala.reflect.ClassTag class AkkaResponseHandler( jsonHandler: JsonHandler[Future, HttpResponse] )(implicit ex: ExecutionContext) extends ResponseHandler[Future, HttpResponse](jsonHandler) { final def queryChunkedResultJson(response: HttpResponse): Source[ErrorOr[Array[JArray]], Any] = { response.entity.dataBytes .via(Framing.delimiter(ByteString("\n"), Int.MaxValue)) .map(_.utf8String) .map(JParser.parseFromStringEither) .map( _.flatMapRight( jv => jsonHandler .queryResult(jv) .toRight[Throwable](new ParsingException("Can't extract query result from response")) ) ) } final def queryChunkedResult[T: ClassTag]( response: HttpResponse )(implicit rd: InfluxReader[T] ): Source[ErrorOr[Array[T]], Any] = { queryChunkedResultJson(response) .map(_.flatMapRight { arr => either.array(arr.map(rd.read)) }) } }
Example 131
Source File: CallServiceSpec.scala From microservice-dependency-graph with MIT License | 5 votes |
package io.github.rlazoti.servicestats.services import akka.http.scaladsl.model._ import akka.http.scaladsl.model.ContentTypes._ import akka.http.scaladsl.testkit.ScalatestRouteTest import akka.util.ByteString import org.scalatest._ class CallServiceSpec extends WordSpec with Matchers with ScalatestRouteTest { val API = new Object with CallService "CallService API" should { "Send POST to /calling should add the CallService" in { val jsonRequest = ByteString( s""" |{ |"serviceCaller":"checkout", |"serviceCalled":"users", |"endpointCalled":"findById" |} """.stripMargin) val postRequest = HttpRequest( HttpMethods.POST, uri = "/calling", entity = HttpEntity(MediaTypes.`application/json`, jsonRequest)) postRequest ~> API.routes ~> check { status.isSuccess() shouldEqual true } } // This test needs a Redis instance running... "Send GET to /graphdata?time=01.01 should return the graph nodes ands links for that time" in { val getRequest = HttpRequest( HttpMethods.GET, uri = "/graphdata?time=01.01" ) getRequest ~> API.routes ~> check { status.isSuccess() shouldEqual true contentType shouldBe `application/json` } } "Send GET to /graphdata should be rejected by a missing parameter 'time'" in { val getRequest = HttpRequest( HttpMethods.GET, uri = "/graphdata" ) getRequest ~> API.routes ~> check { handled shouldBe false rejections.size shouldBe 1 } } } }
Example 132
Source File: DecoderActor.scala From scredis with Apache License 2.0 | 5 votes |
package scredis.io import akka.actor.{Actor, ActorLogging} import akka.util.ByteString import scredis.PubSubMessage.Message import scredis.exceptions.RedisProtocolException import scredis.protocol.{ErrorResponse, Protocol, Request} import scredis.{PubSubMessage, Subscription} import scala.concurrent.{ExecutionContext, Future} class DecoderActor(subscriptionOption: Option[Subscription]) extends Actor with ActorLogging { import DecoderActor._ def receive: Receive = { case Partition(data, requests, skip) => val buffer = data.asByteBuffer for (i <- 1 to skip) { try { Protocol.decode(buffer) } catch { case e: Throwable => log.error("Could not decode response", e) } } while (requests.hasNext) { val request = requests.next() try { val response = Protocol.decode(buffer) request.complete(response) } catch { case e: Throwable => log.error("Could not decode response", e) request.failure(RedisProtocolException("Could not decode response", e)) } } case SubscribePartition(data) => val buffer = data.asByteBuffer while (buffer.remaining > 0) { try { val result = Protocol.decodePubSubResponse(Protocol.decode(buffer)) result match { case Left(ErrorResponse(message)) => sender ! SubscriberListenerActor.Fail(message) case Right(msgEither) => msgEither match { case Right(m: PubSubMessage.Subscribe) => sender ! SubscriberListenerActor.Complete(m) case Right(m: PubSubMessage.PSubscribe) => sender ! SubscriberListenerActor.Complete(m) case Right(m: PubSubMessage.Unsubscribe) => sender ! SubscriberListenerActor.Complete(m) case Right(m: PubSubMessage.PUnsubscribe) => sender ! SubscriberListenerActor.Complete(m) case Right(m: PubSubMessage.Error) => sender ! SubscriberListenerActor.Complete(m) case Right(m: PubSubMessage.Message) => case Right(m: PubSubMessage.PMessage) => case Left(value) => sender ! SubscriberListenerActor.Confirm(value) } } result match { case Right(Right(message)) => subscriptionOption match { case Some(subscription) => Future {subscription.apply(message)}(ExecutionContext.global) case None => log.error("Received SubscribePartition without any subscription") } case _ => } } catch { case e: Throwable => val msg = data.decodeString("UTF-8").replace("\r\n", "\\r\\n") log.error(s"Could not decode PubSubMessage: $msg", e) } } case x => log.error(s"Received unexpected message: $x") } } object DecoderActor { case class Partition(data: ByteString, requests: Iterator[Request[_]], skip: Int) case class SubscribePartition(data: ByteString) }
Example 133
Source File: UpdateMessage.scala From tepkin with Apache License 2.0 | 5 votes |
package net.fehmicansaglam.tepkin.protocol.message import akka.util.ByteString import net.fehmicansaglam.bson.BsonDocument case class UpdateMessage(fullCollectionName: String, selector: BsonDocument, update: BsonDocument, upsert: Boolean = false, multi: Boolean = false) extends Message { override val responseTo: Int = 0 override val opCode: Int = 2001 override def encodeBody: ByteString = { val flags = (if (upsert) 0x00000001 else 0x00000000) | (if (multi) 0x00000002 else 0x00000000) ByteString.newBuilder .putInt(0) // ZERO .putBytes(fullCollectionName.getBytes("utf-8")) .putByte(0) .putInt(flags) .append(selector.encode) .append(update.encode) .result() } }
Example 134
Source File: BsonDocumentReader.scala From tepkin with Apache License 2.0 | 5 votes |
package net.fehmicansaglam.bson.reader import java.nio.ByteBuffer import akka.util.ByteString import net.fehmicansaglam.bson.BsonDocument import net.fehmicansaglam.bson.element.BsonElement import scala.collection.mutable.ArrayBuffer import scala.util.control.Breaks._ object BsonDocumentReader extends Reader[BsonDocument] { private def readElement(buffer: ByteBuffer, code: Byte): Option[BsonElement] = code match { case 0x01 => BsonDoubleReader.read(buffer) case 0x02 => BsonStringReader.read(buffer) case 0x03 => BsonObjectReader.read(buffer) case 0x04 => BsonArrayReader.read(buffer) case 0x05 => BsonBinaryReader.read(buffer) case 0x07 => BsonObjectIdReader.read(buffer) case 0x08 => BsonBooleanReader.read(buffer) case 0x09 => BsonDateTimeReader.read(buffer) case 0x0A => BsonNullReader.read(buffer) case 0x0B => BsonRegexReader.read(buffer) case 0x10 => BsonIntegerReader.read(buffer) case 0x11 => BsonTimestampReader.read(buffer) case 0x12 => BsonLongReader.read(buffer) } override def read(buffer: ByteBuffer): Option[BsonDocument] = { val elements: ArrayBuffer[Option[BsonElement]] = new ArrayBuffer[Option[BsonElement]] val size = buffer.getInt() breakable { while (buffer.hasRemaining) { val code = buffer.get() if (code != 0x00) { elements += readElement(buffer, code) } else { break } } } Some(BsonDocument(elements.flatten: _*)) } def read(array: Array[Byte]): Option[BsonDocument] = read(ByteBuffer.wrap(array)) }
Example 135
Source File: PostgresExtensionsSpec.scala From akka-stream-extensions with Apache License 2.0 | 5 votes |
package com.mfglabs.stream package extensions.postgres import java.io.File import java.util.concurrent.atomic.AtomicLong import akka.actor.ActorSystem import akka.stream.scaladsl._ import akka.util.ByteString import org.scalatest.time._ import org.scalatest._ import concurrent.ScalaFutures import akka.stream._ import scala.util.Try class PostgresExtensionsSpec extends FlatSpec with Matchers with ScalaFutures with BeforeAndAfterAll with DockerTmpDB { self:DockerTmpDB => implicit val as = ActorSystem() implicit val fm = ActorMaterializer() implicit override val patienceConfig = PatienceConfig(timeout = Span(5, Minutes), interval = Span(5, Millis)) "PgStream" should "stream a file to a Postgres table and stream a sql query from a Postgres table" in { val stmt = conn.createStatement() implicit val pgConn = PgStream.sqlConnAsPgConnUnsafe(conn) implicit val blockingEc = ExecutionContextForBlockingOps(scala.concurrent.ExecutionContext.Implicits.global) stmt.execute( s""" create table public.test_postgres( io_id integer, dsp_name text, advertiser_id integer, campaign_id integer, strategy_id integer, day date, impressions integer, clicks integer, post_view_conversions float8, post_click_conversions float8, media_cost float8, total_ad_cost float8, total_cost float8 ) """ ) val insertTable = "test_postgres(io_id, dsp_name, advertiser_id, campaign_id, strategy_id, day, impressions, " + "clicks, post_view_conversions, post_click_conversions, media_cost, total_ad_cost, total_cost)" val nbLinesInserted = new AtomicLong(0L) val futLines = SourceExt .fromFile(new File(getClass.getResource("/report.csv0000_part_00").getPath), maxChunkSize = 5 * 1024 * 1024) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\n"), maximumChunkBytes = 1 * 1024 * 1024)) .via(PgStream.insertStreamToTable("public", insertTable, Map("DELIMITER" -> "','"), pgVersion = self.version, chunkInsertionConcurrency = 2)) .via(FlowExt.fold(0L)(_ + _)) .map { total => nbLinesInserted.set(total) PgStream.getQueryResultAsStream("select * from public.test_postgres", Map("DELIMITER" -> "','"),pgVersion = self.version) } .flatMapConcat(identity) .via(FlowExt.rechunkByteStringBySize(5 * 1024 * 1024)) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\n"), maximumChunkBytes = 1 * 1024 * 1024)) .map(_.utf8String) .runWith(Sink.seq) val futExpectedLines = SourceExt .fromFile(new File(getClass.getResource("/report.csv0000_part_00").getPath), maxChunkSize = 5 * 1024 * 1024) .via(FlowExt.rechunkByteStringBySeparator(ByteString("\n"), maximumChunkBytes = 1 * 1024 * 1024)) .map(_.utf8String) .runWith(Sink.seq) whenReady(futLines zip futExpectedLines) { case (lines, expectedLines) => lines.length shouldEqual expectedLines.length lines.length shouldEqual nbLinesInserted.get lines.sorted.zip(expectedLines.sorted).foreach { case (line, expectedLine) => line.split(",").map { s => Try(s.toDouble).map(BigDecimal(_).setScale(3, BigDecimal.RoundingMode.HALF_UP).toDouble).getOrElse(s) } shouldEqual expectedLine.split(",").map { s => Try(s.toDouble).map(BigDecimal(_).setScale(3, BigDecimal.RoundingMode.HALF_UP).toDouble).getOrElse(s) } } stmt.close() } } }
Example 136
Source File: Controller.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.tor import java.net.InetSocketAddress import akka.actor.{Actor, ActorLogging, OneForOneStrategy, Props, SupervisorStrategy, Terminated} import akka.io.{IO, Tcp} import akka.util.ByteString import scala.concurrent.ExecutionContext class Controller(address: InetSocketAddress, protocolHandlerProps: Props) (implicit ec: ExecutionContext = ExecutionContext.global) extends Actor with ActorLogging { import Controller._ import Tcp._ import context.system IO(Tcp) ! Connect(address) def receive = { case e@CommandFailed(_: Connect) => e.cause match { case Some(ex) => log.error(ex, "Cannot connect") case _ => log.error("Cannot connect") } context stop self case c: Connected => val protocolHandler = context actorOf protocolHandlerProps protocolHandler ! c val connection = sender() connection ! Register(self) context watch connection context become { case data: ByteString => connection ! Write(data) case CommandFailed(w: Write) => // O/S buffer was full protocolHandler ! SendFailed log.error("Tor command failed") case Received(data) => protocolHandler ! data case _: ConnectionClosed => context stop self case Terminated(actor) if actor == connection => context stop self } } // we should not restart a failing tor session override val supervisorStrategy = OneForOneStrategy(loggingEnabled = true) { case _ => SupervisorStrategy.Escalate } } object Controller { def props(address: InetSocketAddress, protocolHandlerProps: Props)(implicit ec: ExecutionContext = ExecutionContext.global) = Props(new Controller(address, protocolHandlerProps)) case object SendFailed }
Example 137
Source File: HttpClient.scala From heimdallr with Apache License 2.0 | 5 votes |
package chat import scala.util.{Failure, Success, Try} import akka.actor._ import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.RawHeader import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.{ExecutionContext, Future} import chat.HttpClient._ import UserActor.CustomResponse object HttpClient { case class HttpClientGet(event: String, path : String) case class HttpClientPost(event: String, path : String, token: String, jsonBody: String) case class HttpClientResponseSuccess(event: String, resHttp: HttpResponse, recipient: ActorRef) case class HttpClientResponseFailure(event: String, reason: String, recipient: ActorRef) } class HttpClient()(implicit system: ActorSystem, mat: ActorMaterializer, dispatcher: ExecutionContext) extends Actor with ActorLogging { def pipeToSelf(event: String, future: Future[HttpResponse], recipient: ActorRef): Future[HttpResponse] = { future andThen { case Success(r) => self ! HttpClientResponseSuccess(event, r, recipient) case Failure(f) => self ! HttpClientResponseFailure(event, f.toString, recipient) } } def post(event: String, path: String, token: String, jsonBody: String, recipient: ActorRef) = { val objectEntity = HttpEntity(ContentTypes.`application/json`, jsonBody) val responseFuture: Future[HttpResponse] = Http().singleRequest(HttpRequest( method = HttpMethods.POST, uri = path, entity = objectEntity ).withHeaders( RawHeader("Authorization", "Token " + token) ) ) pipeToSelf(event, responseFuture, recipient) } def get(event: String, path: String, recipient:ActorRef) = { val responseFuture: Future[HttpResponse] = Http().singleRequest(HttpRequest( method = HttpMethods.GET, uri = path ) ) pipeToSelf(event, responseFuture, recipient) } def receive = { case HttpClientGet(event, path) => get(event, path, sender) case HttpClientPost(event, path, token, jsonBody) => post(event, path, token, jsonBody, sender) // connection success case HttpClientResponseSuccess(event, resp, recipient) => resp match { case HttpResponse(StatusCodes.OK, headers, entity, _) => entity.dataBytes.runFold(ByteString(""))(_ ++ _).foreach { body => log.info("Got response, body: " + body.utf8String) recipient ! CustomResponse(event, 200, body.utf8String) } case resp @ HttpResponse(code, _, _, _) => log.info("Request failed, response code: " + code) resp.discardEntityBytes() recipient ! CustomResponse(event, code.intValue(), s"Request failed, response code: $code") } // connection failure case HttpClientResponseFailure(event, resp, recipient) => log.info("Request failed, reason: " + resp) recipient ! CustomResponse(event, 599, s"Request failed, response code: ${resp}") case x => log.info(s"HttpClient Request failed: ${x}") } override def preStart(): Unit = { } override def preRestart(reason: Throwable, message: Option[Any]): Unit = { preStart() } override def postRestart(reason: Throwable): Unit = { log.info( reason.toString ) } override def postStop(): Unit = { } }
Example 138
Source File: EncodingBenchmark.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.{ByteString, ByteStringBuilder} import com.avsystem.commons.redis.protocol.{ArrayMsg, BulkStringMsg, IntegerMsg, NullBulkStringMsg, RedisMsg, SimpleStringMsg} import org.openjdk.jmh.annotations._ @Warmup(iterations = 5) @Measurement(iterations = 20) @Fork(1) @Threads(1) @BenchmarkMode(Array(Mode.Throughput)) @State(Scope.Benchmark) class EncodingBenchmark { private val bsb = new ByteStringBuilder final val msg = ArrayMsg(IndexedSeq( IntegerMsg(12345342323L), IntegerMsg(1231), // BulkStringMsg(ByteString("jkalsjdkflajsdkfhlkasd")), // SimpleStringMsg(ByteString("sjakdlfjaksdhfjakshd")), NullBulkStringMsg )) @Benchmark def encodeBenchmark() = { RedisMsg.encode(msg, bsb) bsb.clear() bsb } } object EncodingBenchmark { def main(args: Array[String]): Unit = { val b = new EncodingBenchmark while (true) { b.encodeBenchmark() } } }
Example 139
Source File: connection.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis.commands import akka.util.ByteString import com.avsystem.commons.redis._ import com.avsystem.commons.redis.commands.ReplyDecoders._ trait NodeConnectionApi extends ApiSubset { def select(index: Int): Result[Unit] = execute(new Select(index)) private final class Auth(username: Opt[String], password: String) extends RedisUnitCommand with ConnectionCommand { val encoded: Encoded = encoder("AUTH").optAdd(username).add(password).result } private object Quit extends RedisUnitCommand with ConnectionCommand { val encoded: Encoded = encoder("QUIT").result } private final class Select(index: Int) extends RedisUnitCommand with ConnectionCommand { val encoded: Encoded = encoder("SELECT").add(index).result } }
Example 140
Source File: pubsub.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons.redis.commands import akka.util.ByteString import com.avsystem.commons.redis.{RawCommandPack, RawCommandPacks, RedisBatch, UnsafeCommand} import com.avsystem.commons.redis.protocol.{BulkStringMsg, ValidRedisMsg} private[redis] sealed abstract class PubSubCommand extends UnsafeCommand private[redis] final class Subscribe(channels: Iterable[String]) extends PubSubCommand { require(channels.nonEmpty, "at least one channel must be specified") val encoded: Encoded = encoder("SUBSCRIBE").add(channels).result } private[redis] final class Psubscribe(patterns: Iterable[String]) extends PubSubCommand { require(patterns.nonEmpty, "at least one pattern must be specified") val encoded: Encoded = encoder("PSUBSCRIBE").add(patterns).result } private[redis] final class Unsubscribe(channels: Iterable[String]) extends PubSubCommand { require(channels.nonEmpty, "at least one channel must be specified") val encoded: Encoded = encoder("UNSUBSCRIBE").add(channels).result } private[redis] final class Punsubscribe(patterns: Iterable[String]) extends PubSubCommand { require(patterns.nonEmpty, "at least one pattern must be specified") val encoded: Encoded = encoder("PUNSUBSCRIBE").add(patterns).result } sealed abstract class PubSubEvent object PubSubEvent { final case class Subscribe(channel: String, subscribed: Int) extends PubSubEvent final case class Psubscribe(pattern: String, subscribed: Int) extends PubSubEvent final case class Unsubscribe(channel: String, subscribed: Int) extends PubSubEvent final case class Punsubscribe(pattern: String, subscribed: Int) extends PubSubEvent final case class Message(channel: String, message: ValidRedisMsg) extends PubSubEvent final case class Pmessage(pattern: String, channel: String, message: ValidRedisMsg) extends PubSubEvent case object ConnectionLost extends PubSubEvent final val MessageStr = BulkStringMsg(ByteString("message")) final val PmessageStr = BulkStringMsg(ByteString("pmessage")) final val SubscribeStr = BulkStringMsg(ByteString("subscribe")) final val PsubscribeStr = BulkStringMsg(ByteString("psubscribe")) final val UnsubscribeStr = BulkStringMsg(ByteString("unsubscribe")) final val PunsubscribeStr = BulkStringMsg(ByteString("punsubscribe")) }
Example 141
Source File: Hash.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.ByteString object Hash { val TotalSlots = 16384 private[this] val Crc16Tab = Array[Int]( 0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50A5, 0x60C6, 0x70E7, 0x8108, 0x9129, 0xA14A, 0xB16B, 0xC18C, 0xD1AD, 0xE1CE, 0xF1EF, 0x1231, 0x0210, 0x3273, 0x2252, 0x52B5, 0x4294, 0x72F7, 0x62D6, 0x9339, 0x8318, 0xB37B, 0xA35A, 0xD3BD, 0xC39C, 0xF3FF, 0xE3DE, 0x2462, 0x3443, 0x0420, 0x1401, 0x64E6, 0x74C7, 0x44A4, 0x5485, 0xA56A, 0xB54B, 0x8528, 0x9509, 0xE5EE, 0xF5CF, 0xC5AC, 0xD58D, 0x3653, 0x2672, 0x1611, 0x0630, 0x76D7, 0x66F6, 0x5695, 0x46B4, 0xB75B, 0xA77A, 0x9719, 0x8738, 0xF7DF, 0xE7FE, 0xD79D, 0xC7BC, 0x48C4, 0x58E5, 0x6886, 0x78A7, 0x0840, 0x1861, 0x2802, 0x3823, 0xC9CC, 0xD9ED, 0xE98E, 0xF9AF, 0x8948, 0x9969, 0xA90A, 0xB92B, 0x5AF5, 0x4AD4, 0x7AB7, 0x6A96, 0x1A71, 0x0A50, 0x3A33, 0x2A12, 0xDBFD, 0xCBDC, 0xFBBF, 0xEB9E, 0x9B79, 0x8B58, 0xBB3B, 0xAB1A, 0x6CA6, 0x7C87, 0x4CE4, 0x5CC5, 0x2C22, 0x3C03, 0x0C60, 0x1C41, 0xEDAE, 0xFD8F, 0xCDEC, 0xDDCD, 0xAD2A, 0xBD0B, 0x8D68, 0x9D49, 0x7E97, 0x6EB6, 0x5ED5, 0x4EF4, 0x3E13, 0x2E32, 0x1E51, 0x0E70, 0xFF9F, 0xEFBE, 0xDFDD, 0xCFFC, 0xBF1B, 0xAF3A, 0x9F59, 0x8F78, 0x9188, 0x81A9, 0xB1CA, 0xA1EB, 0xD10C, 0xC12D, 0xF14E, 0xE16F, 0x1080, 0x00A1, 0x30C2, 0x20E3, 0x5004, 0x4025, 0x7046, 0x6067, 0x83B9, 0x9398, 0xA3FB, 0xB3DA, 0xC33D, 0xD31C, 0xE37F, 0xF35E, 0x02B1, 0x1290, 0x22F3, 0x32D2, 0x4235, 0x5214, 0x6277, 0x7256, 0xB5EA, 0xA5CB, 0x95A8, 0x8589, 0xF56E, 0xE54F, 0xD52C, 0xC50D, 0x34E2, 0x24C3, 0x14A0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405, 0xA7DB, 0xB7FA, 0x8799, 0x97B8, 0xE75F, 0xF77E, 0xC71D, 0xD73C, 0x26D3, 0x36F2, 0x0691, 0x16B0, 0x6657, 0x7676, 0x4615, 0x5634, 0xD94C, 0xC96D, 0xF90E, 0xE92F, 0x99C8, 0x89E9, 0xB98A, 0xA9AB, 0x5844, 0x4865, 0x7806, 0x6827, 0x18C0, 0x08E1, 0x3882, 0x28A3, 0xCB7D, 0xDB5C, 0xEB3F, 0xFB1E, 0x8BF9, 0x9BD8, 0xABBB, 0xBB9A, 0x4A75, 0x5A54, 0x6A37, 0x7A16, 0x0AF1, 0x1AD0, 0x2AB3, 0x3A92, 0xFD2E, 0xED0F, 0xDD6C, 0xCD4D, 0xBDAA, 0xAD8B, 0x9DE8, 0x8DC9, 0x7C26, 0x6C07, 0x5C64, 0x4C45, 0x3CA2, 0x2C83, 0x1CE0, 0x0CC1, 0xEF1F, 0xFF3E, 0xCF5D, 0xDF7C, 0xAF9B, 0xBFBA, 0x8FD9, 0x9FF8, 0x6E17, 0x7E36, 0x4E55, 0x5E74, 0x2E93, 0x3EB2, 0x0ED1, 0x1EF0 ) def slot(key: ByteString): Int = { val tagStart = key.indexOf('{') val bytes = if (tagStart >= 0) { val tagEnd = key.indexOf('}', tagStart + 1) if (tagEnd > tagStart + 1) key.iterator.slice(tagStart + 1, tagEnd) else key.iterator } else key.iterator var crc = 0 while (bytes.hasNext) { crc = ((crc << 8) ^ Crc16Tab(((crc >> 8) ^ bytes.next()) & 0x00FF)) & 0xFFFF } crc & 0x3FFF } }
Example 142
Source File: RedisDataGenCodecRoundtripTest.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.ByteString import com.avsystem.commons.serialization.{GenCodecRoundtripTest, Input, Output} class RedisDataGenCodecRoundtripTest extends GenCodecRoundtripTest { type Raw = ByteString def writeToOutput(write: Output => Unit): ByteString = { var result: ByteString = null write(new RedisDataOutput(result = _)) result } def createInput(raw: ByteString): Input = new RedisDataInput(raw) }
Example 143
Source File: ConnectionApiSuite.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis.commands import akka.util.ByteString import com.avsystem.commons.redis.exception.ErrorReplyException import com.avsystem.commons.redis.{RedisApi, RedisConnectionCommandsSuite} trait ConnectionApiSuite extends RedisConnectionCommandsSuite { import RedisApi.Batches.StringTyped._ apiTest("ECHO") { echo(ByteString("lol")).assertEquals(ByteString("lol")) } apiTest("PING") { ping.assertEquals(bs"PONG") } apiTest("SELECT") { select(1).get } apiTest("SWAPDB") { swapdb(0, 1).get } } class AuthenticationTest extends RedisConnectionCommandsSuite { override def password: Opt[String] = "hassword".opt import RedisApi.Batches.StringTyped._ test("AUTH") { get("key").intercept[ErrorReplyException] auth("hassword").get get("key").assertEquals(Opt.Empty) } }
Example 144
Source File: RedisMsgTest.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis.protocol import akka.util.ByteString import com.avsystem.commons.redis.protocol.RedisMsgScalacheck._ import org.scalacheck.Gen import org.scalatest.funsuite.AnyFunSuite import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import scala.collection.immutable.VectorBuilder class RedisMsgTest extends AnyFunSuite with ScalaCheckPropertyChecks { def splitAtIndices(repr: ByteString, indices: Seq[Int]): Seq[ByteString] = (indices :+ repr.length).foldLeft((0, Vector.empty[ByteString])) { case ((prevIdx, acc), nextIdx) => (nextIdx, acc :+ repr.slice(prevIdx, nextIdx)) }._2 test("encoded and then decoded messages should be equal to the original messages") { val gen = for { redisMsgs <- Gen.buildableOf[Seq[RedisMsg], RedisMsg](redisProtocolMsgGen) splitPoints <- Gen.buildableOf[Seq[Double], Double](Gen.choose(0.0, 1.0)) } yield (redisMsgs, splitPoints) forAll(gen) { case (redisMsgs, splitPoints) => val repr = RedisMsg.encode(redisMsgs) val splitIndices = splitPoints.map(sp => (sp * (repr.size - 1)).toInt).toSet.toVector.sorted val encodedParts = splitAtIndices(repr, splitIndices) val decoded = new VectorBuilder[RedisMsg] val decoder = new RedisMsg.Decoder encodedParts.foreach(bs => decoder.decodeMore(bs)(decoded += _)) val decodedMsgs = decoded.result() assert(decodedMsgs == redisMsgs) } } test("encoded size") { forAll(redisProtocolMsgGen) { msg => assert(RedisMsg.encode(msg).length == RedisMsg.encodedSize(msg)) } } test("simple string encode") { assert(RedisMsg.encode(SimpleStringMsg("asdf")).utf8String == "+asdf\r\n") } test("error encode") { assert(RedisMsg.encode(ErrorMsg("asdf")).utf8String == "-asdf\r\n") } test("bulk string encode") { assert(RedisMsg.encode(BulkStringMsg(ByteString("srsly"))).utf8String == "$5\r\nsrsly\r\n") } test("null bulk string encode") { assert(RedisMsg.encode(NullBulkStringMsg).utf8String == "$-1\r\n") } test("array encode") { assert(RedisMsg.encode(ArrayMsg(Vector(IntegerMsg(42), IntegerMsg(43)))).utf8String == "*2\r\n:42\r\n:43\r\n") } test("null array encode") { assert(RedisMsg.encode(NullArrayMsg).utf8String == "*-1\r\n") } test("integer encode") { assert(RedisMsg.encode(IntegerMsg(-1)).utf8String == ":-1\r\n") } }
Example 145
Source File: RedisMsgScalacheck.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis.protocol import akka.util.ByteString import com.github.ghik.silencer.silent import org.scalacheck.util.Buildable import org.scalacheck.{Arbitrary, Gen, Shrink} object RedisMsgScalacheck { implicit val byteStringBuildable: Buildable[Byte, ByteString] = new Buildable[Byte, ByteString] { def builder: MBuilder[Byte, ByteString] = ByteString.newBuilder } implicit val shrinkSimpleString: Shrink[SimpleStringMsg] = Shrink(ss => Shrink.shrink(ss.string).map(SimpleStringMsg(_))) implicit val shrinkError: Shrink[ErrorMsg] = Shrink(err => Shrink.shrink(err.errorString).map(ErrorMsg(_))) implicit val shrinkBulkString: Shrink[BulkStringMsg] = Shrink(bs => Shrink.shrink(bs.string).map(BulkStringMsg(_))) implicit val shrinkArray: Shrink[ArrayMsg[RedisMsg]] = Shrink(arr => Shrink.shrink(arr.elements).map(ArrayMsg(_))) @silent("deprecated") implicit val shrinkRedisProtocolMsg: Shrink[RedisMsg] = Shrink { case ss: SimpleStringMsg => Shrink.shrink(ss) case er: ErrorMsg => Shrink.shrink(er) case NullBulkStringMsg => Stream.empty case bs: BulkStringMsg => Shrink.shrink(bs) case im: IntegerMsg => Shrink.shrink(im) case NullArrayMsg => Stream.empty case am: ArrayMsg[RedisMsg] => Shrink.shrink(am) } val simpleBytes = (Byte.MinValue.toInt to Byte.MaxValue.toInt) .filter(b => b != '\n'.toInt && b != '\r'.toInt).map(_.toByte) def byteGen = Gen.chooseNum(Byte.MinValue, Byte.MaxValue, '\r'.toByte, '\n'.toByte) def simpleByteGen = Gen.oneOf(simpleBytes) def bytesGen: Gen[ByteString] = Gen.buildableOf[ByteString, Byte](byteGen) def simpleBytesGen: Gen[ByteString] = Gen.buildableOf[ByteString, Byte](simpleByteGen) def simpleStringGen = simpleBytesGen.map(SimpleStringMsg(_)) def errorGen = simpleBytesGen.map(ErrorMsg(_)) def integerGen = Arbitrary.arbitrary[Long].map(IntegerMsg(_)) def bulkStringGen: Gen[RedisMsg] = Gen.sized(s => Gen.choose(-1, s).flatMap { case -1 => Gen.const(NullBulkStringMsg) case n => Gen.buildableOfN[ByteString, Byte](n, byteGen).map(bs => BulkStringMsg(bs)) }) def arrayGen: Gen[RedisMsg] = Gen.sized(s => Gen.choose(-1, s).flatMap { case -1 => Gen.const(NullArrayMsg) case 0 => Gen.const(ArrayMsg(IndexedSeq.empty)) case n => Gen.buildableOfN[IndexedSeq[RedisMsg], RedisMsg](n, Gen.resize(s / n, redisProtocolMsgGen)) .map(els => ArrayMsg(els)) }) def redisProtocolMsgGen: Gen[RedisMsg] = Gen.oneOf(simpleStringGen, errorGen, integerGen, bulkStringGen, arrayGen) }
Example 146
Source File: ApiCustomizationExample.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis.examples import akka.actor.ActorSystem import akka.util.ByteString import com.avsystem.commons.redis._ import com.avsystem.commons.serialization.GenCodec object ApiCustomizationExample extends App { implicit val actorSystem: ActorSystem = ActorSystem() val client = new RedisNodeClient // By default, the driver provides textual and binary API variants, e.g. val textualApi = RedisApi.Node.Async.StringTyped(client) val binaryApi = RedisApi.Node.Async.BinaryTyped(client) // As you can see above, both API variants reuse the same client // Textual API uses String as key, hash key and value type def getKeyTextual: Future[Opt[String]] = textualApi.get("key") // Binary API uses ByteString as key, hash key and value type def getKeyBinary: Future[Opt[ByteString]] = binaryApi.get(ByteString("key")) // You can create your own API variants which use different types for keys, hash keys and values // These types must be serializable to binary form (and de-serializable from it), which is expressed by // `RedisDataCodec` typeclass. // By default, `RedisDataCodec` is provided for many simple types and all types which have a `GenCodec` instance. case class Person( name: String, birthYear: Int ) object Person { implicit val codec: GenCodec[Person] = GenCodec.materialize[Person] } val personApi = RedisApi.Node.Async.StringTyped(client).valueType[Person] def storePerson(person: Person): Future[Boolean] = personApi.set(person.name, person) // It is also possible to customize types "on the fly", without having to create completely separate API variant def storePerson2(person: Person): Future[Boolean] = textualApi.valueType[Person].set(person.name, person) }
Example 147
Source File: RedisConnectionClientTest.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.ByteString import com.avsystem.commons.redis.config.ConnectionConfig import com.avsystem.commons.redis.exception.{ConnectionFailedException, ConnectionInitializationFailure} import org.scalatest.concurrent.ScalaFutures import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class RedisConnectionClientTest extends AnyFunSuite with Matchers with ScalaFutures with UsesActorSystem with UsesRedisServer with ByteStringInterpolation { def createClient(initCommands: RedisBatch[Any]): RedisConnectionClient = new RedisConnectionClient(address, config = ConnectionConfig(initCommands)) test("client initialization test") { import RedisApi.Batches.StringTyped._ val client = createClient(select(0) *> clientSetname("name") *> ping) val f1 = client.executeBatch(echo(ByteString("LOL1"))) val f2 = client.executeBatch(echo(ByteString("LOL2"))) val f3 = client.executeBatch(clientGetname) client.initialized.futureValue shouldBe client f1.futureValue shouldBe ByteString("LOL1") f2.futureValue shouldBe ByteString("LOL2") f3.futureValue shouldBe "name".opt } test("client connection failure test") { import RedisApi.Batches.StringTyped._ val client = new RedisConnectionClient(NodeAddress(port = 63498)) val f1 = client.executeBatch(echo(ByteString("LOL1"))) val f2 = client.executeBatch(echo(ByteString("LOL2"))) client.initialized.failed.futureValue shouldBe a[ConnectionFailedException] f1.failed.futureValue shouldBe a[ConnectionFailedException] f2.failed.futureValue shouldBe a[ConnectionFailedException] } test("client initialization failure test") { import RedisApi.Batches.StringTyped._ val client = createClient(clusterInfo) val f1 = client.executeBatch(echo(ByteString("LOL1"))) val f2 = client.executeBatch(echo(ByteString("LOL2"))) client.initialized.failed.futureValue shouldBe a[ConnectionInitializationFailure] f1.failed.futureValue shouldBe a[ConnectionInitializationFailure] f2.failed.futureValue shouldBe a[ConnectionInitializationFailure] } test("api traits usage test") { val api = RedisApi.Connection.Async.StringTyped(createClient(RedisBatch.unit)) val bvApi: api.WithValue[ByteString] = api.valueType[ByteString] api.set("key", "value").futureValue shouldEqual true bvApi.set("key", ByteString.empty).futureValue shouldEqual true api.keyType[ByteString].set(ByteString("key"), "value").futureValue shouldEqual true bvApi.keyType[ByteString].set(ByteString("key"), ByteString.empty).futureValue shouldEqual true } } class RedisTlsConnectionClientTest extends RedisConnectionClientTest with UsesSslContext { override def createClient(initCommands: RedisBatch[Any]): RedisConnectionClient = new RedisConnectionClient(tlsAddress, config = ConnectionConfig(initCommands, () => sslContext.createSSLEngine)) }
Example 148
Source File: TestDebugListener.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.ByteString import com.avsystem.commons.redis.actor.RedisConnectionActor.DebugListener import com.avsystem.commons.redis.protocol.RedisMsg class TestDebugListener extends DebugListener { private var receiving = false private val builder = new StringBuilder def clear(): Unit = synchronized { receiving = true builder.clear() } def onSend(data: ByteString) = synchronized { if (receiving) { builder.append("\n") receiving = false } builder.append(RedisMsg.escape(data, quote = false).replaceAllLiterally("\\r\\n", "\\r\\n\n")) } def onReceive(data: ByteString) = synchronized { if (!receiving) { builder.append("\n") receiving = true } builder.append(RedisMsg.escape(data, quote = false).replaceAllLiterally("\\r\\n", "\\r\\n\n")) } def result(): String = synchronized { builder.result() } }
Example 149
Source File: ClusterUtils.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package redis import akka.util.ByteString import scala.io.Source object ClusterUtils { final val SlotKeys = Source.fromInputStream(getClass.getResourceAsStream("/slotkeys.txt")) .getLines().toArray def keyWithSameSlotAs(key: String): String = SlotKeys(Hash.slot(ByteString(key))) def strings(length: Int): Iterator[String] = if (length == 0) Iterator("") else strings(length - 1).flatMap(p => ('a' to 'z').iterator.map(c => p + c)) def findKeyForSlot(slot: Int, length: Int): String = strings(length).find(bs => Hash.slot(ByteString(bs)) == slot) .getOrElse(throw new IllegalArgumentException(s"Could not find key that would map to slot $slot")) def main(args: Array[String]): Unit = { println(SlotKeys(123)) } }
Example 150
Source File: ErrorDirectivesSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.commons.http.directives import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.{HttpEntity, HttpResponse, StatusCodes} import akka.util.ByteString import ch.epfl.bluebrain.nexus.commons.circe.ContextUri import ch.epfl.bluebrain.nexus.commons.http.RdfMediaTypes import ch.epfl.bluebrain.nexus.commons.http.directives.ErrorDirectives._ import ch.epfl.bluebrain.nexus.commons.http.directives.ErrorDirectivesSpec.CustomError import ch.epfl.bluebrain.nexus.rdf.syntax.iri._ import ch.epfl.bluebrain.nexus.util.ActorSystemFixture import io.circe.generic.auto._ import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ class ErrorDirectivesSpec extends ActorSystemFixture("ErrorDirectivesSpec") with AnyWordSpecLike with Matchers with ScalaFutures { implicit override val patienceConfig = PatienceConfig(3.seconds, 100.millis) "A ErrorDirectives" should { import system.dispatcher implicit val statusFromJson: StatusFrom[CustomError] = StatusFrom((_: CustomError) => StatusCodes.NotFound) implicit val contextUri: ContextUri = ContextUri(url"http://localhost.com/error/") "marshall error JSON-LD" in { val error = CustomError("some error") val jsonString = s"""{"@context":"${contextUri.value}","message":"${error.message}"}""" Marshal(error).to[HttpResponse].futureValue shouldEqual HttpResponse( status = StatusCodes.NotFound, entity = HttpEntity.Strict(RdfMediaTypes.`application/ld+json`, ByteString(jsonString, "UTF-8")) ) } } } object ErrorDirectivesSpec { final case class CustomError(message: String) }
Example 151
Source File: TestHelper.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg import java.time.Clock import java.util.UUID import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.util.ByteString import ch.epfl.bluebrain.nexus.commons.test.{EitherValues, Randomness} import ch.epfl.bluebrain.nexus.iam.acls.AccessControlList import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous import ch.epfl.bluebrain.nexus.iam.types.{Identity, Permission, ResourceF => IamResourceF} import ch.epfl.bluebrain.nexus.kg.config.Schemas.unconstrainedSchemaUri import ch.epfl.bluebrain.nexus.kg.resources.ResourceF.Value import ch.epfl.bluebrain.nexus.kg.resources.{Ref, ResId, ResourceF} import ch.epfl.bluebrain.nexus.kg.storage.AkkaSource import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri import ch.epfl.bluebrain.nexus.rdf.implicits._ import io.circe.Json trait TestHelper extends EitherValues with Randomness { private val clock = Clock.systemUTC() val read: Permission = Permission.unsafe("resources/read") val write: Permission = Permission.unsafe("files/write") def consume(source: AkkaSource)(implicit mt: Materializer): String = { import org.scalatest.concurrent.ScalaFutures._ source.runFold("")(_ ++ _.utf8String).futureValue } def produce(string: String, chunkSize: Int = 100): AkkaSource = Source(string.grouped(chunkSize).map(ByteString(_)).toList) def resourceAcls(acl: AccessControlList): IamResourceF[AccessControlList] = IamResourceF( url"http://example.com/id", 1L, Set.empty, clock.instant(), Anonymous, clock.instant(), Anonymous, acl ) def simpleV( id: ResId, value: Json, rev: Long = 1L, types: Set[AbsoluteIri] = Set.empty, deprecated: Boolean = false, schema: Ref = Ref(unconstrainedSchemaUri), created: Identity = Anonymous, updated: Identity = Anonymous )(implicit clock: Clock): ResourceF[Value] = ResourceF( id, rev, types, deprecated, Map.empty, None, clock.instant(), clock.instant(), created, updated, schema, Value(value, value.contextValue, value.toGraph(id.value).rightValue) ) def simpleV(res: ResourceF[Json])(implicit clock: Clock) = ResourceF( res.id, res.rev, res.types, res.deprecated, Map.empty, None, clock.instant(), clock.instant(), res.createdBy, res.updatedBy, res.schema, Value(res.value, res.value.contextValue, res.value.toGraph(res.id.value).rightValue) ) def genUUID: UUID = UUID.randomUUID() def genIri: AbsoluteIri = url"http://example.com/" + genUUID.toString private def sourceInChunks(input: String): AkkaSource = Source.fromIterator(() => input.grouped(10000).map(ByteString(_))) def genSource: AkkaSource = sourceInChunks(genString()) }
Example 152
Source File: AttributesComputation.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage.attributes import java.nio.file.{Files, Path} import java.security.MessageDigest import akka.http.scaladsl.model.HttpCharsets.`UTF-8` import akka.http.scaladsl.model.MediaTypes.{`application/octet-stream`, `application/x-tar`} import akka.http.scaladsl.model.{ContentType, MediaType, MediaTypes} import akka.stream.Materializer import akka.stream.scaladsl.{Keep, Sink} import akka.util.ByteString import cats.effect.Effect import cats.implicits._ import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError import ch.epfl.bluebrain.nexus.storage._ import org.apache.commons.io.FilenameUtils import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} trait AttributesComputation[F[_], Source] { implicit def akkaAttributes[F[_]](implicit ec: ExecutionContext, mt: Materializer, F: Effect[F] ): AttributesComputation[F, AkkaSource] = (path: Path, algorithm: String) => { if (!Files.exists(path)) F.raiseError(InternalError(s"Path not found '$path'")) else Try(MessageDigest.getInstance(algorithm)) match { case Success(msgDigest) => val isDir = Files.isDirectory(path) val source = if (isDir) folderSource(path) else fileSource(path) source .alsoToMat(sinkSize)(Keep.right) .toMat(sinkDigest(msgDigest)) { (bytesF, digestF) => (bytesF, digestF).mapN { case (bytes, digest) => FileAttributes(path.toAkkaUri, bytes, digest, detectMediaType(path, isDir)) } } .run() .to[F] case Failure(_) => F.raiseError(InternalError(s"Invalid algorithm '$algorithm'.")) } } }
Example 153
Source File: IamIdentitiesClient.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.client.RequestBuilding.Get import akka.http.scaladsl.model.HttpRequest import akka.http.scaladsl.model.headers.OAuth2BearerToken import akka.http.scaladsl.unmarshalling.FromEntityUnmarshaller import akka.util.ByteString import cats.effect.{ContextShift, Effect, IO} import cats.implicits._ import ch.epfl.bluebrain.nexus.rdf.implicits._ import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClient.Identity._ import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClient._ import ch.epfl.bluebrain.nexus.storage.IamIdentitiesClientError.IdentitiesSerializationError import ch.epfl.bluebrain.nexus.storage.config.IamClientConfig import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport.{DecodingFailures => AccDecodingFailures} import io.circe.Decoder.Result import io.circe.{Decoder, DecodingFailure, HCursor} import scala.concurrent.ExecutionContext class IamIdentitiesClient[F[_]](config: IamClientConfig)(implicit F: Effect[F], as: ActorSystem) extends JsonLdCirceSupport { private val um: FromEntityUnmarshaller[Caller] = unmarshaller[Caller] implicit private val ec: ExecutionContext = as.dispatcher implicit private val contextShift: ContextShift[IO] = IO.contextShift(ec) def apply()(implicit credentials: Option[AccessToken]): F[Caller] = credentials match { case Some(token) => execute(Get(config.identitiesIri.asAkka).addCredentials(OAuth2BearerToken(token.value))) case None => F.pure(Caller.anonymous) } private def execute(req: HttpRequest): F[Caller] = { IO.fromFuture(IO(Http().singleRequest(req))).to[F].flatMap { resp => if (resp.status.isSuccess()) IO.fromFuture(IO(um(resp.entity))).to[F].recoverWith { case err: AccDecodingFailures => F.raiseError(IdentitiesSerializationError(err.getMessage)) case err: Error => F.raiseError(IdentitiesSerializationError(err.getMessage)) } else IO.fromFuture(IO(resp.entity.dataBytes.runFold(ByteString(""))(_ ++ _).map(_.utf8String))) .to[F] .flatMap { err => F.raiseError(IamIdentitiesClientError.unsafe(resp.status, err)) } } } } object IamIdentitiesClient { final case class Authenticated(realm: String) extends Identity private def decodeAnonymous(hc: HCursor): Result[Subject] = hc.get[String]("@type").flatMap { case "Anonymous" => Right(Anonymous) case _ => Left(DecodingFailure("Cannot decode Anonymous Identity", hc.history)) } private def decodeUser(hc: HCursor): Result[Subject] = (hc.get[String]("subject"), hc.get[String]("realm")).mapN { case (subject, realm) => User(subject, realm) } private def decodeGroup(hc: HCursor): Result[Identity] = (hc.get[String]("group"), hc.get[String]("realm")).mapN { case (group, realm) => Group(group, realm) } private def decodeAuthenticated(hc: HCursor): Result[Identity] = hc.get[String]("realm").map(Authenticated) private val attempts = List[HCursor => Result[Identity]](decodeAnonymous, decodeUser, decodeGroup, decodeAuthenticated) implicit val identityDecoder: Decoder[Identity] = Decoder.instance { hc => attempts.foldLeft(Left(DecodingFailure("Unexpected", hc.history)): Result[Identity]) { case (acc @ Right(_), _) => acc case (_, f) => f(hc) } } } }
Example 154
Source File: TarFlow.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.{Files, Path} import akka.NotUsed import akka.stream.scaladsl.{FileIO, Flow, Source} import akka.util.ByteString import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarConstants} def writer(basePath: Path): Flow[Path, ByteString, NotUsed] = Flow[Path] .flatMapConcat { case path if Files.isRegularFile(path) => val headerSource = Source.single(headerBytes(basePath, path)) val paddingSource = Source.single(padToBoundary(path)) headerSource.concat(FileIO.fromPath(path)).concat(paddingSource) case path => Source.single(headerBytes(basePath, path)) } .concat(Source.single(terminalChunk)) }
Example 155
Source File: TarFlowSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage import java.io.ByteArrayInputStream import java.nio.file.{Files, Path, Paths} import akka.actor.ActorSystem import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl.{FileIO, Source} import akka.testkit.TestKit import akka.util.ByteString import ch.epfl.bluebrain.nexus.storage.utils.{EitherValues, IOEitherValues, Randomness} import org.apache.commons.compress.archivers.tar.TarArchiveInputStream import org.apache.commons.io.FileUtils import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.{BeforeAndAfterAll, Inspectors, OptionValues} import scala.annotation.tailrec class TarFlowSpec extends TestKit(ActorSystem("TarFlowSpec")) with AnyWordSpecLike with Matchers with IOEitherValues with Randomness with EitherValues with OptionValues with Inspectors with BeforeAndAfterAll { val basePath = Files.createTempDirectory("tarflow") val dir1 = basePath.resolve("one") val dir2 = basePath.resolve("two") override def afterAll(): Unit = { super.afterAll() FileUtils.cleanDirectory(basePath.toFile) () } type PathAndContent = (Path, String) "A TarFlow" should { Files.createDirectories(dir1) Files.createDirectories(dir2) def relativize(path: Path): String = basePath.getParent().relativize(path).toString "generate the byteString for a tar file correctly" in { val file1 = dir1.resolve("file1.txt") val file1Content = genString() val file2 = dir1.resolve("file3.txt") val file2Content = genString() val file3 = dir2.resolve("file3.txt") val file3Content = genString() val files = List(file1 -> file1Content, file2 -> file2Content, file3 -> file3Content) forAll(files) { case (file, content) => Source.single(ByteString(content)).runWith(FileIO.toPath(file)).futureValue } val byteString = Directory.walk(basePath).via(TarFlow.writer(basePath)).runReduce(_ ++ _).futureValue val bytes = new ByteArrayInputStream(byteString.toArray) val tar = new TarArchiveInputStream(bytes) @tailrec def readEntries( tar: TarArchiveInputStream, entries: List[PathAndContent] = Nil ): List[PathAndContent] = { val entry = tar.getNextTarEntry if (entry == null) entries else { val data = Array.ofDim[Byte](entry.getSize.toInt) tar.read(data) readEntries(tar, (Paths.get(entry.getName) -> ByteString(data).utf8String) :: entries) } } val directories = List(relativize(basePath) -> "", relativize(dir1) -> "", relativize(dir2) -> "") val untarred = readEntries(tar).map { case (path, content) => path.toString -> content } val expected = files.map { case (path, content) => relativize(path) -> content } ++ directories untarred should contain theSameElementsAs expected } } }
Example 156
Source File: AkkaHttpPrometheusExporter.scala From cloudstate with Apache License 2.0 | 5 votes |
package io.cloudstate.proxy import java.io.OutputStreamWriter import java.util import akka.actor.ActorSystem import akka.http.scaladsl.Http import io.prometheus.client.CollectorRegistry import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.stream.Materializer import akka.util.ByteString import io.prometheus.client.exporter.common.TextFormat import scala.concurrent.Future class AkkaHttpPrometheusExporter(metricsPort: Int, registry: CollectorRegistry = CollectorRegistry.defaultRegistry)( implicit system: ActorSystem, mat: Materializer ) { private[this] final val PrometheusContentType = ContentType.parse(TextFormat.CONTENT_TYPE_004).right.get private def routes = get { (path("metrics") | pathSingleSlash) { encodeResponse { parameter(Symbol("name[]").*) { names => complete { val namesSet = new util.HashSet[String]() names.foreach(namesSet.add) val builder = ByteString.newBuilder val writer = new OutputStreamWriter(builder.asOutputStream) TextFormat.write004(writer, registry.filteredMetricFamilySamples(namesSet)) // Very important to flush the writer before we build the byte string! writer.flush() HttpEntity(PrometheusContentType, builder.result()) } } } } } def start(): Future[Http.ServerBinding] = Http().bindAndHandle(routes, "0.0.0.0", metricsPort) }
Example 157
Source File: BintrayClientTests.scala From scaladex with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.scala.index.bintray import akka.stream.scaladsl.Source import akka.util.ByteString import ch.epfl.scala.index.data.bintray.BintrayClient import org.scalatest._ import play.api.libs.json.JsValue import play.api.libs.ws.{WSCookie, WSResponse} import scala.xml.Elem class BintrayClientTests extends FlatSpec with Matchers { "BintrayClient" should "calculate pagination properly" in { getPagination(startPos = 50, endPos = 59, total = 100) should contain theSameElementsAs List( 60, 70, 80, 90 ) getPagination(startPos = 0, endPos = 49, total = 100) should contain theSameElementsAs List( 50 ) getPagination(startPos = 50, endPos = 99, total = 100) should contain theSameElementsAs Nil getPagination(startPos = 0, endPos = 49, total = 50) should contain theSameElementsAs Nil getPagination(startPos = 0, endPos = 49, total = 51) should contain theSameElementsAs List( 50 ) getPagination(startPos = 0, endPos = 0, total = 10) should contain theSameElementsAs List( 1, 2, 3, 4, 5, 6, 7, 8, 9) } def getPagination(startPos: Int, endPos: Int, total: Int): Seq[Int] = { val wsResponse = wsResponseWithHeaders( Map("X-RangeLimit-Total" -> Seq(total.toString), "X-RangeLimit-StartPos" -> Seq(startPos.toString), "X-RangeLimit-EndPos" -> Seq(endPos.toString)) ) BintrayClient.remainingPages(wsResponse) } private def wsResponseWithHeaders(providedHeaders: Map[String, Seq[String]]) = new WSResponse { override def status: Int = ??? override def statusText: String = ??? override def underlying[T]: T = ??? override def cookies: Seq[WSCookie] = ??? override def cookie(name: String): Option[WSCookie] = ??? override def body: String = ??? override def bodyAsBytes: ByteString = ??? override def bodyAsSource: Source[ByteString, _] = ??? override def allHeaders: Map[String, Seq[String]] = ??? override def xml: Elem = ??? override def json: JsValue = ??? override def headers: Map[String, Seq[String]] = providedHeaders } }
Example 158
Source File: Producer.scala From kinesis-stream with MIT License | 5 votes |
import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import com.amazonaws.auth.DefaultAWSCredentialsProviderChain import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration import com.contxt.kinesis.ScalaKinesisProducer object Producer extends App { implicit val system = ActorSystem("kinesis-producer") implicit val ec = system.dispatcher implicit val mat = ActorMaterializer() val producer = ScalaKinesisProducer( "activity-test", new KinesisProducerConfiguration() .setRegion("us-east-1") .setCredentialsProvider(new DefaultAWSCredentialsProviderChain)) Source(1 to 10) .map(i => (i.toString, ByteString(s"Data: $i"))) .mapAsync(1) { case (key, data) => producer.send(key, data.toByteBuffer) } .runWith(Sink.foreach(r => println(s"${r.getShardId}-${r.getSequenceNumber.takeRight(10)}"))) .onComplete { case _ => system.terminate() } }
Example 159
Source File: Record.scala From kinesis-stream with MIT License | 5 votes |
package px.kinesis.stream.consumer import java.time.Instant import akka.Done import akka.util.ByteString import px.kinesis.stream.consumer.checkpoint.CheckpointTracker import software.amazon.kinesis.retrieval.KinesisClientRecord import software.amazon.kinesis.retrieval.kpl.ExtendedSequenceNumber import scala.concurrent.Future case class Record( key: String, data: ByteString, sequenceNumber: String, subSequenceNumber: Long, shardId: String, approximateArrivalTimestamp: Instant, markProcessed: () => Future[Done] ) { def extendedSequenceNumber = new ExtendedSequenceNumber(sequenceNumber, subSequenceNumber) } object Record { def from(kinesisRecord: KinesisClientRecord, shardId: String, tracker: CheckpointTracker): Record = { val extendedSequenceNumber = new ExtendedSequenceNumber( kinesisRecord.sequenceNumber(), kinesisRecord.subSequenceNumber() ) val markProcessed: () => Future[Done] = () => tracker.process(shardId, extendedSequenceNumber) Record( kinesisRecord.partitionKey(), ByteString(kinesisRecord.data()), kinesisRecord.sequenceNumber(), kinesisRecord.subSequenceNumber(), shardId, kinesisRecord.approximateArrivalTimestamp(), markProcessed ) } }
Example 160
Source File: WolframServiceImpl.scala From lagom-on-kube with Apache License 2.0 | 5 votes |
package me.alexray.wolfram.impl import java.net.URLEncoder import akka.NotUsed import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpRequest, Uri} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.Materializer import akka.util.ByteString import com.lightbend.lagom.scaladsl.api.ServiceCall import me.alexray.wolfram.api.WolframService import play.api.Configuration import scala.concurrent.{ExecutionContext, Future} class WolframServiceImpl(config: Configuration) (implicit system: ActorSystem, mat: Materializer, ec: ExecutionContext) extends WolframService { val appID = config.underlying.getString("wolfram.appid") val apiUrl = s"http://api.wolframalpha.com/v2/" override def query(q: String): ServiceCall[NotUsed, String] = ServiceCall { _ => val url = apiUrl + s"query?appid=$appID&input=" + URLEncoder.encode(q, "UTF-8") for { response <- Http().singleRequest(HttpRequest(uri = Uri(url))) if response.status.isSuccess() data <- Unmarshal(response).to[String] } yield data } override def simple(q: String): ServiceCall[NotUsed, Array[Byte]] = ServiceCall { _ => println(s"quetions = '$q'") val url = apiUrl + s"simple?appid=$appID&input=" + URLEncoder.encode(q, "UTF-8").replace("+", "%20") println(s"url = '$url'") for { response <- Http().singleRequest(HttpRequest(uri = Uri(url))) if response.status.isSuccess() bytes <- Unmarshal(response).to[ByteString] } yield { println(s"received image ${bytes.size} bytes long") bytes.toArray } } }
Example 161
Source File: DataPostProcessor.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.data.downloader import akka.stream.scaladsl._ import akka.util.{ByteString, ByteStringBuilder} import cmwell.tools.data.utils.akka._ import cmwell.tools.data.utils.chunkers.GroupChunker import cmwell.tools.data.utils.logging.DataToolsLogging import scala.collection.mutable object DataPostProcessor extends DataToolsLogging { def postProcessByFormat(format: String, dataBytes: Source[ByteString, _]) = format match { case "ntriples" | "nquads" => sortBySubjectOfNTuple(dataBytes) case _ => splitByLines(dataBytes) } private def splitByLines(dataBytes: Source[ByteString, _]) = { dataBytes .via(lineSeparatorFrame) .map(_ ++ endl) } private def sortBySubjectOfNTuple(dataBytes: Source[ByteString, _]): Source[ByteString, _] = { dataBytes .via(lineSeparatorFrame) .filter { case line if line.startsWith("_") => badDataLogger.debug("was filtered: {}", line.utf8String) false case _ => true } .fold(mutable.Map.empty[ByteString, ByteStringBuilder]) { (agg, line) => // aggregate each line according to its subject (i.e., bucket) val subject = GroupChunker.extractSubject(line) val builder = agg.getOrElse(subject, new ByteStringBuilder) builder ++= (line ++ endl) agg + (subject -> builder) } .map(_.toMap) .mapConcat(_.map { case (_, ntupleBuilder) => ntupleBuilder.result }) } }
Example 162
Source File: GroupChunkerSpec.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.data.utils.chunkers import akka.stream.scaladsl.Keep import akka.stream.testkit.scaladsl.{TestSink, TestSource} import akka.util.ByteString import cmwell.tools.data.helpers.BaseStreamSpec import scala.concurrent.duration._ class GroupSpecAutoFusingOn extends { val autoFusing = true } with GroupChunkerSpec class GroupSpecAutoFusingOff extends { val autoFusing = false } with GroupChunkerSpec trait GroupChunkerSpec extends BaseStreamSpec { "GroupChunker" should "emit elements when new group has arrived" in { val (pub, sub) = TestSource.probe[String] .map(x => ByteString(x.toString)) .via(GroupChunker(b => ByteString(b.size), 2.seconds)) // group byte-strings by size .map(_.map(_.utf8String)) .toMat(TestSink.probe[Seq[String]])(Keep.both) .run() sub.request(100) pub.sendNext("hello") pub.sendNext("world") pub.sendNext("nba") pub.sendNext("ibm") pub.sendNext("what") pub.sendNext("is") pub.sendNext("life") pub.sendComplete() sub.expectNext(Seq("hello", "world")) sub.expectNext(Seq("nba", "ibm")) sub.expectNext(Seq("what")) sub.expectNext(Seq("is")) sub.expectNext(Seq("life")) sub.expectComplete() } it should "emit elements when time threshold has reached" in { val (pub, sub) = TestSource.probe[String] .map(x => ByteString(x.toString)) .via(GroupChunker(b => ByteString(b.size), 2.seconds)) // group byte-strings by size .map(_.map(_.utf8String)) .toMat(TestSink.probe[Seq[String]])(Keep.both) .run() sub.request(4) pub.sendNext("one") sub.expectNext(Seq("one")) pub.sendNext("two") sub.expectNext(Seq("two")) pub.sendNext("four") pub.sendNext("five") pub.sendComplete() sub.expectNext(Seq("four","five")) sub.expectComplete() } }
Example 163
Source File: DownloaderSpec.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.data.downloader.streams import akka.actor.ActorSystem import akka.http.scaladsl.model.StatusCodes import akka.stream.scaladsl._ import akka.stream.{ActorMaterializer, Materializer} import akka.util.ByteString import org.scalatest._ import org.scalatest.prop._ import com.github.tomakehurst.wiremock.WireMockServer import com.github.tomakehurst.wiremock.client.WireMock import com.github.tomakehurst.wiremock.client.WireMock._ import com.github.tomakehurst.wiremock.core.WireMockConfiguration._ import scala.concurrent.Await import scala.concurrent.duration._ import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks class DownloaderSpec extends PropSpec with ScalaCheckPropertyChecks with Matchers with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem("reactive-tools-system") implicit val mat: Materializer = ActorMaterializer() val host = "localhost" val numUuidsPerRequest = 25 override protected def afterAll(): Unit = { system.terminate() super.afterAll() } property ("Download from uuids stream sends request blocks of uuids") { val uuids = Table( ("numUuids", "blocksToSend"), (1 , 1 ), (10 , 1 ), (20 , 1 ), (30 , 2 ), (60 , 3 ) ) forAll(uuids) { (numUuids: Int, expectedBlocksToSend: Int) => // setup wiremock val wireMockServer = new WireMockServer(wireMockConfig().dynamicPort()) wireMockServer.start() val port = wireMockServer.port WireMock.configureFor(host, port) // create sample uuids to download val data = for (x <- 1 to numUuids) yield s"uuid$x\n" // wiremock stubbing stubFor(post(urlPathMatching("/_out.*")) .willReturn(aResponse() .withBody("body") .withStatus(StatusCodes.OK.intValue))) // create uuid input-stream val in = Source.fromIterator(() => data.iterator) .map(ByteString.apply) .runWith(StreamConverters.asInputStream()) // download mock data Await.result ( Downloader.downloadFromUuidInputStream( baseUrl = s"$host:$port", numInfotonsPerRequest = numUuidsPerRequest, in = in) ,30.seconds ) // verifying val numBlocksSent = findAll(postRequestedFor((urlPathMatching("/_out.*")))).size // teardown wiremock wireMockServer.shutdown() wireMockServer.stop() while (wireMockServer.isRunning) {} wireMockServer.resetRequests() numBlocksSent should be (expectedBlocksToSend) } } }
Example 164
Source File: TsvRetrieverFromFile.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.dc.stream import java.io.{BufferedWriter, File, FileWriter} import akka.actor.ActorSystem import akka.stream.{KillSwitch, KillSwitches, Materializer} import akka.stream.Supervision.Decider import akka.stream.contrib.SourceGen import akka.stream.scaladsl.{Flow, Keep, Sink, Source} import akka.util.ByteString import cmwell.dc.LazyLogging import cmwell.dc.stream.MessagesTypesAndExceptions.{DcInfo, InfotonData} import cmwell.dc.stream.TsvRetriever.{logger, TsvFlowOutput} import cmwell.util.resource._ import scala.concurrent.Future import scala.util.{Failure, Success} import scala.concurrent.ExecutionContext.Implicits.global object TsvRetrieverFromFile extends LazyLogging { def apply(dcInfo: DcInfo)(implicit mat: Materializer, system: ActorSystem): Source[InfotonData, (KillSwitch, Future[Seq[Option[String]]])] = { val persistFile = dcInfo.tsvFile.get + ".persist" def appendToPersistFile(str: String): Unit = { val bw = new BufferedWriter(new FileWriter(persistFile, true)) bw.write(str) bw.close() } val linesToDrop = dcInfo.positionKey.fold { if (!new File(persistFile).exists) 0L else using(scala.io.Source.fromFile(persistFile))(_.getLines.toList.last.toLong) }(pos => pos.toLong) val positionKeySink = Flow[InfotonData] .recover { case e: Throwable => InfotonData(null, null, -1) } .scan(linesToDrop) { case (count, InfotonData(null, null, -1)) => { appendToPersistFile("crash at: " + count + "\n" + count.toString + "\n") count } case (count, _) => { val newCount = count + 1 if (newCount % 10000 == 0) appendToPersistFile(newCount.toString + "\n") newCount } } .toMat(Sink.last)( (_, right) => right.map { count => appendToPersistFile(count.toString + "\n") Seq.fill(2)(Option(count.toString)) } ) Source .fromIterator(() => scala.io.Source.fromFile(dcInfo.tsvFile.get).getLines()) .drop { logger.info(s"Dropping $linesToDrop initial lines from file ${dcInfo.tsvFile.get} for sync ${dcInfo.key}") linesToDrop } .viaMat(KillSwitches.single)(Keep.right) .map(line => TsvRetriever.parseTSVAndCreateInfotonDataFromIt(ByteString(line))) .alsoToMat(positionKeySink)(Keep.both) } }
Example 165
Source File: package.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.dc import akka.util.ByteString package object stream { val empty = ByteString("") val endln = ByteString("\n") val tab = ByteString("\t") val space: Char = 32 val newLine: Char = 10 val ii = ByteString("/ii/") val cmwellPrefix = "cmwell:/" val lessThan = ByteString("<") }
Example 166
Source File: HttpUtil.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.analytics.util import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.RequestEntityAcceptance.Tolerated import akka.http.scaladsl.model.{HttpMethod, HttpRequest, HttpResponse} import akka.stream.ActorMaterializer import akka.stream.scaladsl.Sink import akka.util.ByteString import com.fasterxml.jackson.databind.{JsonNode, ObjectMapper} import com.typesafe.config.ConfigFactory import scala.concurrent.duration.{MILLISECONDS, _} import scala.concurrent.{Await, ExecutionContextExecutor, Future} object HttpUtil { private val mapper = new ObjectMapper() private val config = ConfigFactory.load private val ReadTimeout = FiniteDuration(config.getDuration("extract-index-from-es.read-timeout").toMillis, MILLISECONDS) // Elasticsearch uses the POST verb in some places where the request is actually idempotent. // Requests that use POST, but are known to be idempotent can use this method. // The presence of any non-idempotent request in-flight causes Akka to not retry, and that will tend result in // entire downloads failing more often. val SAFE_POST = HttpMethod( value = "POST", isSafe = true, isIdempotent = true, requestEntityAcceptance = Tolerated) def resultAsync(request: HttpRequest, action: String) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): Future[ByteString] = Http().singleRequest(request).map { case HttpResponse(status, _, entity, _) if status.isSuccess => entity.dataBytes .fold(ByteString.empty)(_ ++ _) .runWith(Sink.head) case HttpResponse(status, _, entity, _) => val message = Await.result(entity.toStrict(10.seconds).map(_.data), 10.seconds).utf8String throw new RuntimeException(s"HTTP request for $action failed. Status code: $status, message:$message") } .flatMap(identity) def result(request: HttpRequest, action: String, timeout: FiniteDuration = ReadTimeout) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): ByteString = Await.result(resultAsync(request, action), timeout) def jsonResult(request: HttpRequest, action: String, timeout: FiniteDuration = ReadTimeout) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): JsonNode = mapper.readTree(result(request, action, timeout).utf8String) def jsonResultAsync(request: HttpRequest, action: String) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): Future[JsonNode] = resultAsync(request, action).map((bytes: ByteString) => mapper.readTree(bytes.utf8String)) }
Example 167
Source File: EsUtil.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.analytics.util import akka.actor.ActorSystem import akka.http.scaladsl.model.HttpRequest import akka.stream.ActorMaterializer import akka.util.ByteString import scala.concurrent.ExecutionContextExecutor object EsUtil { def countDocumentsInShard(httpAddress: String, shard: Shard, filter: String) (implicit system: ActorSystem, executionContext: ExecutionContextExecutor, actorMaterializer: ActorMaterializer): Long = { val request = HttpRequest( method = HttpUtil.SAFE_POST, uri = s"http://$httpAddress/${shard.indexName}/_count?preference=_shards:${shard.shard}", entity = ByteString(s"{$filter}")) val json = HttpUtil.jsonResult(request, "count documents in shard") json.get("count").asLong } }
Example 168
Source File: ZeroMQWordCount.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.examples.streaming import akka.actor.ActorSystem import akka.actor.actorRef2Scala import akka.zeromq._ import akka.zeromq.Subscribe import akka.util.ByteString import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.zeromq._ import scala.language.implicitConversions import org.apache.spark.SparkConf // scalastyle:on object ZeroMQWordCount { def main(args: Array[String]) { if (args.length < 2) { System.err.println("Usage: ZeroMQWordCount <zeroMQurl> <topic>") System.exit(1) } StreamingExamples.setStreamingLogLevels() val Seq(url, topic) = args.toSeq val sparkConf = new SparkConf().setAppName("ZeroMQWordCount") // Create the context and set the batch size val ssc = new StreamingContext(sparkConf, Seconds(2)) def bytesToStringIterator(x: Seq[ByteString]): Iterator[String] = x.map(_.utf8String).iterator // For this stream, a zeroMQ publisher should be running. val lines = ZeroMQUtils.createStream(ssc, url, Subscribe(topic), bytesToStringIterator _) val words = lines.flatMap(_.split(" ")) val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _) wordCounts.print() ssc.start() ssc.awaitTermination() } }
Example 169
Source File: ZeroMQStreamSuite.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.zeromq import akka.actor.SupervisorStrategy import akka.util.ByteString import akka.zeromq.Subscribe import org.apache.spark.SparkFunSuite import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.dstream.ReceiverInputDStream class ZeroMQStreamSuite extends SparkFunSuite { val batchDuration = Seconds(1) private val master: String = "local[2]" private val framework: String = this.getClass.getSimpleName test("zeromq input stream") { val ssc = new StreamingContext(master, framework, batchDuration) val publishUrl = "abc" val subscribe = new Subscribe(null.asInstanceOf[ByteString]) val bytesToObjects = (bytes: Seq[ByteString]) => null.asInstanceOf[Iterator[String]] // tests the API, does not actually test data receiving val test1: ReceiverInputDStream[String] = ZeroMQUtils.createStream(ssc, publishUrl, subscribe, bytesToObjects) val test2: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2) val test3: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2, SupervisorStrategy.defaultStrategy) // TODO: Actually test data receiving ssc.stop() } }
Example 170
Source File: SecretReader.scala From reactive-lib with Apache License 2.0 | 5 votes |
package com.lightbend.rp.secrets.scaladsl import java.nio.charset.StandardCharsets import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.FileIO import akka.util.ByteString import java.nio.file.{ Path, Paths } import scala.concurrent._ object SecretReader { @deprecated("Read from file /rp/secrets/%name%/%key% where %name% is transformed to lowercase, and '-' for non-alphanum", "1.7.0") def get(name: String, key: String)(implicit as: ActorSystem, mat: ActorMaterializer): Future[Option[ByteString]] = { import as.dispatcher sys .env .get(envName(name, key)) .map(data => Future.successful(Some(ByteString(data)))) .getOrElse( FileIO.fromPath(filePath(name, key)) .runFold(ByteString.empty)(_ ++ _) .map(Some(_)) .recover { case _: Throwable => None }) } private[secrets] def envName(namespace: String, name: String): String = s"RP_SECRETS_${namespace}_$name" .toUpperCase .map(c => if (c.isLetterOrDigit) c else '_') private[scaladsl] def filePath(name: String, key: String): Path = Paths .get("/rp") .resolve("secrets") .resolve(name) .resolve(key) }
Example 171
Source File: RedisStore.scala From shield with MIT License | 5 votes |
package shield.kvstore import redis.{ByteStringFormatter, RedisClient} import shield.implicits.FutureUtil import shield.kvstore.protobuf.HttpResponseProtos.ProtoResponse import shield.kvstore.protobuf.HttpResponseProtos.ProtoResponse.ProtoHeader import shield.metrics.Instrumented import spray.http.HttpHeaders.RawHeader import spray.http.parser.HttpParser import spray.httpx.encoding.Gzip import akka.util.{Timeout, ByteString} import spray.http._ import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ import collection.JavaConversions._ object ProtobufResponseHelper { def apply(response: HttpResponse) : ProtoResponse = { val iterHeaders = response.headers.map(ProtobufHeaderHelper.build) ProtoResponse.newBuilder() .setStatus(response.status.intValue) .setData(com.google.protobuf.ByteString.copyFrom(response.entity.data.toByteArray)) .addAllHeaders(iterHeaders) .build() } } object ProtobufHeaderHelper { def build(header: HttpHeader) : ProtoHeader = { ProtoHeader.newBuilder() .setName(header.lowercaseName) .setValue(header.value) .build() } } class RedisStore(id: String, client: RedisClient)(implicit context: ExecutionContext) extends KVStore with Instrumented { val deserializeTimer = metrics.timer("deserialize", id) val encodeTimer = metrics.timer("encode", id) val serializeTimer = metrics.timer("serialize", id) val serializedSize = metrics.histogram("serialized-size", id) implicit val timeout = Timeout(5.seconds) implicit val responseFormatter = new ByteStringFormatter[HttpResponse] { override def deserialize(bs: ByteString): HttpResponse = { deserializeTimer.time { // todo: single pass over headers to convert to rawheader and grab content type // Protobuf val msg = ProtoResponse.parseFrom(bs.toArray) val scalaHeaders: List[ProtoHeader] = msg.getHeadersList.toList // todo: profiling optimization: parsing these headers is more expensive than deserializing the protobuf val contentType = scalaHeaders.find(_.getName == "content-type").map(ct => HttpParser.parse(HttpParser.ContentTypeHeaderValue, ct.getValue).right.get).getOrElse(ContentTypes.`application/octet-stream`) // even if it's technically http-1.0, we'd just scrub it later HttpResponse(msg.getStatus, HttpEntity(contentType, msg.getData.toByteArray), HttpParser.parseHeaders(scalaHeaders.map(h => RawHeader(h.getName, h.getValue)))._2, HttpProtocols.`HTTP/1.1`) } } override def serialize(rawResponse: HttpResponse): ByteString = { val response = encodeTimer.time(Gzip.encode(rawResponse)) serializeTimer.time { // Protobuf val msg = ProtobufResponseHelper(response) val b = msg.toByteArray serializedSize += b.length ByteString(b) } } } val setGetTimer = timing("setGet", id) def setGet(key: String) : Future[Seq[String]] = setGetTimer { client.smembers[String](key) } val setDeleteTimer = timing("setDelete", id) def setDelete(key: String) : Future[Long] = setDeleteTimer { client.del(key) } val setAddTimer = timing("setAdd", id) def setAdd(key: String, value: String) : Future[Long] = setAddTimer { client.sadd(key, value) } val setRemoveTimer = timing("setRemove", id) def setRemove(key: String, value: String) : Future[Long] = setRemoveTimer { client.srem(key, value) } val keyGetTimer = timing("keyGet", id) def keyGet(key: String) : Future[Option[HttpResponse]] = keyGetTimer { client.get[HttpResponse](key) } val keySetTimer = timing("keySet", id) def keySet(key: String, value: HttpResponse) : Future[Boolean] = keySetTimer { client.set(key, value) } val keyDeleteTimer = timing("keyDelete", id) def keyDelete(key: String) : Future[Long] = keyDeleteTimer { client.del(key) } val tokenTimer = timing("tokenRateLimit", id) def tokenRateLimit(key: String, rate: Int, perSeconds: Int) : Future[Boolean] = tokenTimer { val floored = Math.floor(System.currentTimeMillis() / (perSeconds * 1000)).toLong val fullKey = s"rl:$floored:$key" // yes, we're refreshing the expire on the bucket multiple times, but once System.currentTimeMillis // advances enough to increment floored, we'll stop refreshing the expire on this key client.expire(fullKey, perSeconds).andThen(FutureUtil.logFailure("RedisStore::tokenRateLimit-expire")) client.incr(fullKey).map(_ <= rate) } }
Example 172
Source File: ZeroMQStreamSuite.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.zeromq import akka.actor.SupervisorStrategy import akka.util.ByteString import akka.zeromq.Subscribe import org.apache.spark.SparkFunSuite import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.dstream.ReceiverInputDStream class ZeroMQStreamSuite extends SparkFunSuite { val batchDuration = Seconds(1) private val master: String = "local[2]" private val framework: String = this.getClass.getSimpleName test("zeromq input stream") { val ssc = new StreamingContext(master, framework, batchDuration) val publishUrl = "abc" val subscribe = new Subscribe(null.asInstanceOf[ByteString]) val bytesToObjects = (bytes: Seq[ByteString]) => null.asInstanceOf[Iterator[String]] // tests the API, does not actually test data receiving val test1: ReceiverInputDStream[String] = ZeroMQUtils.createStream(ssc, publishUrl, subscribe, bytesToObjects) val test2: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2) val test3: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2, SupervisorStrategy.defaultStrategy) // TODO: Actually test data receiving ssc.stop() } }
Example 173
Source File: ProtocolSerializationSupport.scala From akka-viz with MIT License | 5 votes |
package akkaviz.server import akka.http.scaladsl.model.ws.{BinaryMessage, Message} import akka.stream.scaladsl.Flow import akka.util.ByteString import akkaviz.protocol trait ProtocolSerializationSupport { def protocolServerMessageToByteString: Flow[protocol.ApiServerMessage, ByteString, Any] = Flow[protocol.ApiServerMessage].map { msg => ByteString(protocol.IO.write(msg)) } def websocketMessageToClientMessage: Flow[Message, protocol.ApiClientMessage, _] = Flow[Message].collect { case BinaryMessage.Strict(msg) => protocol.IO.readClient(msg.asByteBuffer) } } object ProtocolSerializationSupport extends ProtocolSerializationSupport
Example 174
Source File: ProtocolSerializationSupportTest.scala From akka-viz with MIT License | 5 votes |
package akkaviz.server import akka.actor.ActorSystem import akka.http.scaladsl.model.ws.BinaryMessage import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import akkaviz.protocol.{IO, Killed, SetEnabled} import org.scalatest.concurrent.ScalaFutures import org.scalatest.{FunSuite, Matchers} import scala.concurrent.duration._ class ProtocolSerializationSupportTest extends FunSuite with ScalaFutures with Matchers { private[this] implicit val system = ActorSystem() private[this] implicit val materializer = ActorMaterializer() override implicit val patienceConfig: PatienceConfig = PatienceConfig(10.seconds) import ProtocolSerializationSupport._ test("websocketMessageToClientMessage") { val msg = SetEnabled(true) val wsMessage = BinaryMessage(ByteString(IO.write(msg))) val res = Source.single(wsMessage).via(websocketMessageToClientMessage).runWith(Sink.head) whenReady(res) { _ shouldBe msg } } test("protocolServerMessageToByteString") { val msg = Killed("ref") val res = Source.single(msg).via(protocolServerMessageToByteString).runWith(Sink.head) whenReady(res) { serialized => IO.readServer(serialized.asByteBuffer) shouldBe msg } } }
Example 175
Source File: Cataloging.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.util.ByteString import akka.NotUsed import scala.concurrent.Await import scala.concurrent.duration.Duration import LibraryProtocol.BookCard object Cataloging extends App { implicit val system = ActorSystem("catalog-loader") implicit val materializer = ActorMaterializer() val file = Paths.get("books.csv") private val framing: Flow[ByteString, ByteString, NotUsed] = Framing.delimiter(ByteString("\n"), maximumFrameLength = 256, allowTruncation = true) private val parsing: ByteString => Array [String] = _.utf8String.split(",") import LibraryProtocol._ val topics = Set(Africa, Asia, Gilgamesh, Greece, Persia, Philosophy, Royalty, Tradition) val topic: String => Set[Topic] = s => Set(topics.find(s == _.toString).getOrElse(Unknown)) private val conversion: Array[String] => BookCard = s => BookCard( isbn = s(0), author = s(1), title = s(2), description = s(3), dateOfOrigin = s(4), topic = topic(s(5)), publisher = s(6), language = s(7), pages = s(8).toInt ) val result = FileIO.fromPath(file) .via(framing) .map(parsing) .map(conversion) .to(Sink.foreach(println(_))) .run() Await.ready(result, Duration.Inf) system.terminate() }
Example 176
Source File: Cataloging.scala From reactive-application-development-scala with Apache License 2.0 | 5 votes |
package com.rarebooks.library import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.util.ByteString import akka.NotUsed import scala.concurrent.Await import scala.concurrent.duration.Duration import LibraryProtocol.BookCard object Cataloging extends App { implicit val system = ActorSystem("catalog-loader") implicit val materializer = ActorMaterializer() val librarian = system.actorOf(Librarian.props(Duration.Zero, 0), "rare-books") val file = Paths.get("books.csv") private val framing: Flow[ByteString, ByteString, NotUsed] = Framing.delimiter(ByteString("\n"), maximumFrameLength = 256, allowTruncation = true) private val parsing: ByteString => Array [String] = _.utf8String.split(",") import LibraryProtocol._ val topics = Set(Africa, Asia, Gilgamesh, Greece, Persia, Philosophy, Royalty, Tradition) val topic: String => Set[Topic] = s => Set(topics.find(s == _.toString).getOrElse(Unknown)) private val conversion: Array[String] => BookCard = s => BookCard( isbn = s(0), author = s(1), title = s(2), description = s(3), dateOfOrigin = s(4), topic = topic(s(5)), publisher = s(6), language = s(7), pages = s(8).toInt ) val result = FileIO.fromPath(file) .via(framing) .map(parsing) .map(conversion) .to(Sink.actorRefWithAck( librarian, LibInit, LibAck, LibComplete, LibError)) .run() Await.ready(result, Duration.Inf) system.terminate() }
Example 177
Source File: GrpcMarshalling.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.javadsl import java.util.concurrent.{ CompletableFuture, CompletionStage } import java.util.Optional import akka.NotUsed import akka.actor.ActorSystem import akka.actor.ClassicActorSystemProvider import akka.grpc._ import akka.grpc.internal.{ CancellationBarrierGraphStage, GrpcResponseHelpers, MissingParameterException } import akka.grpc.GrpcProtocol.{ GrpcProtocolReader, GrpcProtocolWriter } import akka.http.javadsl.model.{ HttpRequest, HttpResponse } import akka.japi.Function import akka.stream.Materializer import akka.stream.javadsl.{ Sink, Source } import akka.util.ByteString import com.github.ghik.silencer.silent object GrpcMarshalling { def negotiated[T]( req: HttpRequest, f: (GrpcProtocolReader, GrpcProtocolWriter) => CompletionStage[T]): Optional[CompletionStage[T]] = GrpcProtocol .negotiate(req) .map { case (maybeReader, writer) => maybeReader.map(reader => f(reader, writer)).fold[CompletionStage[T]](failure, identity) } .fold(Optional.empty[CompletionStage[T]])(Optional.of) def unmarshal[T]( data: Source[ByteString, AnyRef], u: ProtobufSerializer[T], mat: Materializer, reader: GrpcProtocolReader): CompletionStage[T] = data.via(reader.dataFrameDecoder).map(u.deserialize).runWith(Sink.headOption[T], mat).thenCompose[T] { opt => if (opt.isPresent) CompletableFuture.completedFuture(opt.get) else failure(new MissingParameterException()) } def unmarshalStream[T]( data: Source[ByteString, AnyRef], u: ProtobufSerializer[T], @silent("never used") mat: Materializer, reader: GrpcProtocolReader): CompletionStage[Source[T, NotUsed]] = { CompletableFuture.completedFuture[Source[T, NotUsed]]( data .mapMaterializedValue(_ => NotUsed) .via(reader.dataFrameDecoder) .map(japiFunction(u.deserialize)) // In gRPC we signal failure by returning an error code, so we // don't want the cancellation bubbled out .via(new CancellationBarrierGraphStage) .mapMaterializedValue(japiFunction(_ => NotUsed))) } def marshal[T]( e: T, m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider, eHandler: Function[ActorSystem, Function[Throwable, Trailers]] = GrpcExceptionHandler.defaultMapper) : HttpResponse = marshalStream(Source.single(e), m, writer, system, eHandler) def marshalStream[T]( e: Source[T, NotUsed], m: ProtobufSerializer[T], writer: GrpcProtocolWriter, system: ClassicActorSystemProvider, eHandler: Function[ActorSystem, Function[Throwable, Trailers]] = GrpcExceptionHandler.defaultMapper) : HttpResponse = GrpcResponseHelpers(e.asScala, scalaAnonymousPartialFunction(eHandler))(m, writer, system) private def failure[R](error: Throwable): CompletableFuture[R] = { val future: CompletableFuture[R] = new CompletableFuture() future.completeExceptionally(error) future } }
Example 178
Source File: GrpcProtocolNative.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import akka.grpc.GrpcProtocol._ import akka.http.scaladsl.model.HttpEntity.{ Chunk, ChunkStreamPart, LastChunk } import akka.util.ByteString import com.github.ghik.silencer.silent object GrpcProtocolNative extends AbstractGrpcProtocol("grpc") { override protected def writer(codec: Codec) = AbstractGrpcProtocol.writer(this, codec, encodeFrame(codec, _)) override protected def reader(codec: Codec): GrpcProtocolReader = AbstractGrpcProtocol.reader(codec, decodeFrame) @inline private def decodeFrame(@silent("never used") frameType: Int, data: ByteString) = DataFrame(data) @inline private def encodeFrame(codec: Codec, frame: Frame): ChunkStreamPart = { val compressedFlag = AbstractGrpcProtocol.fieldType(codec) frame match { case DataFrame(data) => Chunk(AbstractGrpcProtocol.encodeFrameData(compressedFlag, codec.compress(data))) case TrailerFrame(headers) => LastChunk(trailer = headers) } } }
Example 179
Source File: GrpcProtocolWeb.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import akka.grpc.GrpcProtocol._ import akka.http.scaladsl.model._ import akka.http.scaladsl.model.HttpEntity.{ Chunk, ChunkStreamPart } import akka.stream.scaladsl.Flow import akka.util.ByteString import io.grpc.{ Status, StatusException } abstract class GrpcProtocolWebBase(subType: String) extends AbstractGrpcProtocol(subType) { protected def postEncode(frame: ByteString): ByteString protected def preDecode(frame: ByteString): ByteString override protected def writer(codec: Codec): GrpcProtocolWriter = AbstractGrpcProtocol.writer(this, codec, frame => encodeFrame(codec, frame)) override protected def reader(codec: Codec): GrpcProtocolReader = AbstractGrpcProtocol.reader(codec, decodeFrame, flow => Flow[ByteString].map(preDecode).via(flow)) @inline private def encodeFrame(codec: Codec, frame: Frame): ChunkStreamPart = { val dataFrameType = AbstractGrpcProtocol.fieldType(codec) val (frameType, data) = frame match { case DataFrame(data) => (dataFrameType, data) case TrailerFrame(trailer) => (ByteString(dataFrameType(0) | 0x80), encodeTrailer(trailer)) } val framed = AbstractGrpcProtocol.encodeFrameData(frameType, codec.compress(data)) Chunk(postEncode(framed)) } @inline private final def decodeFrame(frameHeader: Int, data: ByteString): Frame = { (frameHeader & 80) match { case 0 => DataFrame(data) case 1 => TrailerFrame(decodeTrailer(data)) case f => throw new StatusException(Status.INTERNAL.withDescription(s"Unknown frame type [$f]")) } } @inline private final def encodeTrailer(trailer: Seq[HttpHeader]): ByteString = ByteString(trailer.mkString("", "\r\n", "\r\n")) @inline private final def decodeTrailer(data: ByteString): List[HttpHeader] = ??? } object GrpcProtocolWebText extends GrpcProtocolWebBase("grpc-web-text") { override final def postEncode(framed: ByteString): ByteString = framed.encodeBase64 override final def preDecode(frame: ByteString): ByteString = frame.decodeBase64 }
Example 180
Source File: Gzip.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.internal import java.io.{ ByteArrayInputStream, ByteArrayOutputStream } import java.util.zip.{ GZIPInputStream, GZIPOutputStream } import akka.util.ByteString object Gzip extends Codec { override val name: String = "gzip" override def compress(uncompressed: ByteString): ByteString = { val baos = new ByteArrayOutputStream(uncompressed.size) val gzos = new GZIPOutputStream(baos) gzos.write(uncompressed.toArray) gzos.flush() gzos.close() ByteString(baos.toByteArray) } override def uncompress(compressed: ByteString): ByteString = { val gzis = new GZIPInputStream(new ByteArrayInputStream(compressed.toArray)) val baos = new ByteArrayOutputStream(compressed.size) val buffer = new Array[Byte](32 * 1024) var read = gzis.read(buffer) while (read != -1) { baos.write(buffer, 0, read) read = gzis.read(buffer) } ByteString(baos.toByteArray) } }
Example 181
Source File: MetadataBuilderSpec.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.scaladsl import akka.util.ByteString import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class MetadataBuilderSpec extends AnyWordSpec with Matchers { import akka.grpc.internal.MetadataImplSpec._ "MetadataBuilder" should { "return empty metadata" in { MetadataBuilder.empty.asList shouldBe empty MetadataBuilder.empty.asMap shouldBe empty } "handle distinct text entries" in { val b = new MetadataBuilder TEXT_ENTRIES.foreach { case (k, v) => b.addText(k, v) } val m = b.build() TEXT_ENTRIES.foreach { case (k, v) => m.getText(k) shouldBe Some(v) } } "handle repeated text entries" in { val b = new MetadataBuilder DUPE_TEXT_VALUES.foreach { v => b.addText(DUPE_TEXT_KEY, v) } val m = b.build() m.getText(DUPE_TEXT_KEY) shouldBe Some(DUPE_TEXT_VALUES.last) val dupeEntries = DUPE_TEXT_VALUES.map(StringEntry) m.asMap(DUPE_TEXT_KEY) shouldBe dupeEntries m.asList.collect { case (k, e) if k == DUPE_TEXT_KEY => e } shouldBe dupeEntries } "throw exception for '-bin' suffix on text key" in { an[IllegalArgumentException] should be thrownBy (new MetadataBuilder).addText("foo-bin", "x") } "throw exception for missing '-bin' suffix on binary key" in { an[IllegalArgumentException] should be thrownBy (new MetadataBuilder).addBinary("foo", ByteString.empty) } "handle distinct binary entries" in { val b = new MetadataBuilder BINARY_ENTRIES.foreach { case (k, v) => b.addBinary(k, v) } val m = b.build() BINARY_ENTRIES.foreach { case (k, v) => m.getBinary(k) shouldBe Some(v) } } "handle repeated binary entries" in { val b = new MetadataBuilder DUPE_BINARY_VALUES.foreach { v => b.addBinary(DUPE_BINARY_KEY, v) } val m = b.build() m.getBinary(DUPE_BINARY_KEY) shouldBe Some(DUPE_BINARY_VALUES.last) val dupeEntries = DUPE_BINARY_VALUES.map(BytesEntry) m.asMap(DUPE_BINARY_KEY) shouldBe dupeEntries m.asList.collect { case (k, e) if k == DUPE_BINARY_KEY => e } shouldBe dupeEntries } } }
Example 182
Source File: AkkaGrpcServerScala.scala From akka-grpc with Apache License 2.0 | 5 votes |
package akka.grpc.interop import java.io.FileInputStream import java.nio.file.{ Files, Paths } import java.security.cert.CertificateFactory import java.security.spec.PKCS8EncodedKeySpec import java.security.{ KeyFactory, KeyStore, SecureRandom } import scala.concurrent.duration._ import akka.actor.ActorSystem import akka.util.ByteString import akka.http.scaladsl.Http.ServerBinding import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.http.scaladsl.{ Http2, HttpsConnectionContext } import akka.stream.SystemMaterializer import io.grpc.internal.testing.TestUtils import javax.net.ssl.{ KeyManagerFactory, SSLContext } import scala.concurrent.{ Await, Future } case class AkkaGrpcServerScala(serverHandlerFactory: ActorSystem => HttpRequest => Future[HttpResponse]) extends GrpcServer[(ActorSystem, ServerBinding)] { override def start() = { implicit val sys = ActorSystem() implicit val mat = SystemMaterializer(sys).materializer val testService = serverHandlerFactory(sys) val bindingFuture = Http2().bindAndHandleAsync( testService, interface = "127.0.0.1", port = 0, parallelism = 256, // TODO remove once https://github.com/akka/akka-http/pull/2146 is merged connectionContext = serverHttpContext()) val binding = Await.result(bindingFuture, 10.seconds) (sys, binding) } override def stop(binding: (ActorSystem, ServerBinding)) = binding match { case (sys, binding) => sys.log.info("Exception thrown, unbinding") Await.result(binding.unbind(), 10.seconds) Await.result(sys.terminate(), 10.seconds) } private def serverHttpContext() = { val keyEncoded = new String(Files.readAllBytes(Paths.get(TestUtils.loadCert("server1.key").getAbsolutePath)), "UTF-8") .replace("-----BEGIN PRIVATE KEY-----\n", "") .replace("-----END PRIVATE KEY-----\n", "") .replace("\n", "") val decodedKey = ByteString(keyEncoded).decodeBase64.toArray val spec = new PKCS8EncodedKeySpec(decodedKey) val kf = KeyFactory.getInstance("RSA") val privateKey = kf.generatePrivate(spec) val fact = CertificateFactory.getInstance("X.509") val is = new FileInputStream(TestUtils.loadCert("server1.pem")) val cer = fact.generateCertificate(is) val ks = KeyStore.getInstance("PKCS12") ks.load(null) ks.setKeyEntry("private", privateKey, Array.empty, Array(cer)) val keyManagerFactory = KeyManagerFactory.getInstance("SunX509") keyManagerFactory.init(ks, null) val context = SSLContext.getInstance("TLS") context.init(keyManagerFactory.getKeyManagers, null, new SecureRandom) new HttpsConnectionContext(context) } override def getPort(binding: (ActorSystem, ServerBinding)): Int = binding._2.localAddress.getPort }
Example 183
Source File: GraphQLRequestUnmarshaller.scala From graphql-gateway with Apache License 2.0 | 5 votes |
package sangria.gateway.http import java.nio.charset.Charset import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model._ import akka.http.scaladsl.model.headers.Accept import akka.http.scaladsl.server.Directive0 import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import sangria.ast.Document import sangria.parser.QueryParser import sangria.renderer.{QueryRenderer, QueryRendererConfig} import scala.collection.immutable.Seq object GraphQLRequestUnmarshaller { val `application/graphql` = MediaType.applicationWithFixedCharset("graphql", HttpCharsets.`UTF-8`, "graphql") def explicitlyAccepts(mediaType: MediaType): Directive0 = headerValuePF { case Accept(ranges) if ranges.exists(range ⇒ !range.isWildcard && range.matches(mediaType)) ⇒ ranges }.flatMap(_ ⇒ pass) def includeIf(include: Boolean): Directive0 = if (include) pass else reject def unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply) def mediaTypes: Seq[MediaType.WithFixedCharset] = List(`application/graphql`) implicit final def documentMarshaller(implicit config: QueryRendererConfig = QueryRenderer.Compact): ToEntityMarshaller[Document] = Marshaller.oneOf(mediaTypes: _*) { mediaType ⇒ Marshaller.withFixedContentType(ContentType(mediaType)) { json ⇒ HttpEntity(mediaType, QueryRenderer.render(json, config)) } } implicit final val documentUnmarshaller: FromEntityUnmarshaller[Document] = Unmarshaller.byteStringUnmarshaller .forContentTypes(unmarshallerContentTypes: _*) .map { case ByteString.empty ⇒ throw Unmarshaller.NoContentException case data ⇒ import sangria.parser.DeliveryScheme.Throw QueryParser.parse(data.decodeString(Charset.forName("UTF-8"))) } }
Example 184
Source File: PlayHttpClient.scala From graphql-gateway with Apache License 2.0 | 5 votes |
package sangria.gateway.http.client import akka.util.ByteString import io.circe.parser._ import play.api.libs.ws.{BodyWritable, InMemoryBody, StandaloneWSClient, WSAuthScheme} import sangria.gateway.http.client.HttpClient.Method import sangria.gateway.util.Logging import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} class PlayHttpClient(ws: StandaloneWSClient)(implicit ec: ExecutionContext) extends play.api.libs.ws.DefaultBodyWritables with HttpClient with Logging { private[this] val json = "application/json" override def request( method: Method.Value, url: String, queryParams: Seq[(String, String)], headers: Seq[(String, String)], body: Option[(String, String)] ) = { val m = mapMethod(method) val baseRequest = ws.url(url).withRequestTimeout(10.minutes).withMethod(m) val withParams = baseRequest.withQueryStringParameters(queryParams: _*).withHttpHeaders(headers: _*) val withBody = body match { case Some((t, content)) if t == json ⇒ withParams.withBody(parse(content).right.get)(BodyWritable(s ⇒ InMemoryBody(ByteString(s.spaces2)), t)) case Some((t, _)) ⇒ throw new IllegalStateException(s"Unhandled body type [$t].") case None ⇒ withParams } val finalRequest = withBody logger.debug(s"Http request: $m $url") finalRequest.execute().map(rsp ⇒ new HttpClient.HttpResponse { override def statusCode = rsp.status override def isSuccessful = rsp.status >= 200 && rsp.status < 300 override def asString = Future.successful(rsp.body) override def debugInfo = s"$method $url" }) } override def oauthClientCredentials(url: String, clientId: String, clientSecret: String, scopes: Seq[String]): Future[HttpClient.HttpResponse] = { val request = ws.url(url) .withMethod("POST") .withAuth(clientId, clientSecret, WSAuthScheme.BASIC) .withBody(Map("grant_type" → Seq("client_credentials"), "scope" → scopes)) logger.debug(s"HTTP OAuth client credentials request: $url") request.execute().map(rsp ⇒ new HttpClient.HttpResponse { override def statusCode = rsp.status override def isSuccessful = rsp.status >= 200 && rsp.status < 300 override def asString = Future.successful(rsp.body) override def debugInfo = s"POST $url" }) } def mapMethod(method: Method.Value) = method match { case Method.Get ⇒ "GET" case Method.Post ⇒ "POST" } }
Example 185
Source File: AkkaHttpClient.scala From graphql-gateway with Apache License 2.0 | 5 votes |
package sangria.gateway.http.client import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.HttpHeader.ParsingResult import akka.http.scaladsl.model._ import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model.headers.Location import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.Materializer import akka.util.ByteString import sangria.gateway.util.Logging import scala.concurrent.{ExecutionContext, Future} class AkkaHttpClient(implicit system: ActorSystem, mat: Materializer, ec: ExecutionContext) extends HttpClient with Logging { import AkkaHttpClient._ import HttpClient._ override def request(method: Method.Value, url: String, queryParams: Seq[(String, String)] = Seq.empty, headers: Seq[(String, String)] = Seq.empty, body: Option[(String, String)] = None) = { val m = mapMethod(method) val query = Query(queryParams: _*) val hs = headers.map(header) val uri = Uri(url).withQuery(query) val entity = body.fold(HttpEntity.Empty){case (tpe, content) ⇒ HttpEntity(contentType(tpe), ByteString(content))} val request = HttpRequest(m, uri, hs.toVector, entity) val client = Http().singleRequest(_: HttpRequest) val richClient = RichHttpClient.httpClientWithRedirect(client) logger.debug(s"Http request: ${m.value} $url") richClient(request).map(AkkaHttpResponse(m, url, _)) } override def oauthClientCredentials(url: String, clientId: String, clientSecret: String, scopes: Seq[String]): Future[HttpResponse] = throw new IllegalStateException("Not yet implemented, please use play implementation.") private def contentType(str: String) = ContentType.parse(str).fold( errors ⇒ throw ClientError(s"Invalid content type '$str'", errors.map(_.detail)), identity) private def header(nameValue: (String, String)) = HttpHeader.parse(nameValue._1, nameValue._2) match { case ParsingResult.Ok(_, errors) if errors.nonEmpty ⇒ throw ClientError(s"Invalid header '${nameValue._1}'", errors.map(_.detail)) case ParsingResult.Error(error) ⇒ throw ClientError(s"Invalid header '${nameValue._1}'", Seq(error.detail)) case ParsingResult.Ok(h, _) ⇒ h } def mapMethod(method: Method.Value) = method match { case Method.Get ⇒ HttpMethods.GET case Method.Post ⇒ HttpMethods.POST } object RichHttpClient { import akka.http.scaladsl.model.HttpResponse type HttpClient = HttpRequest ⇒ Future[HttpResponse] def redirectOrResult(client: HttpClient)(response: HttpResponse): Future[HttpResponse] = response.status match { case StatusCodes.Found | StatusCodes.MovedPermanently | StatusCodes.SeeOther ⇒ val newUri = response.header[Location].get.uri // Always make sure you consume the response entity streams (of type Source[ByteString,Unit]) by for example connecting it to a Sink (for example response.discardEntityBytes() if you don’t care about the response entity), since otherwise Akka HTTP (and the underlying Streams infrastructure) will understand the lack of entity consumption as a back-pressure signal and stop reading from the underlying TCP connection! response.discardEntityBytes() logger.debug(s"Http redirect: ${HttpMethods.GET.value} $newUri") client(HttpRequest(method = HttpMethods.GET, uri = newUri)) case _ ⇒ Future.successful(response) } def httpClientWithRedirect(client: HttpClient): HttpClient = { lazy val redirectingClient: HttpClient = req ⇒ client(req).flatMap(redirectOrResult(redirectingClient)) // recurse to support multiple redirects redirectingClient } } case class ClientError(message: String, errors: Seq[String]) extends Exception(message + ":\n" + errors.map(" * " + _).mkString("\n")) } object AkkaHttpClient { case class AkkaHttpResponse(method: HttpMethod, url: String, response: HttpResponse)(implicit mat: Materializer) extends HttpClient.HttpResponse { def asString = Unmarshal(response).to[String] def statusCode = response.status.intValue() def isSuccessful = response.status.isSuccess() def debugInfo = s"${method.value} $url" } }
Example 186
Source File: AkkaHttpActionAdapterTest.scala From akka-http-pac4j with Mozilla Public License 2.0 | 5 votes |
package com.stackstate.pac4j import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpRequest, HttpResponse} import org.scalatest.{Matchers, WordSpecLike} import akka.http.scaladsl.model.StatusCodes._ import akka.util.ByteString import com.stackstate.pac4j.AkkaHttpActionAdapterTest.ActionInt import com.stackstate.pac4j.http.AkkaHttpActionAdapter import com.stackstate.pac4j.store.ForgetfulSessionStorage import org.pac4j.core.exception.http.{ BadRequestAction, ForbiddenAction, FoundAction, HttpAction, NoContentAction, OkAction, StatusAction, UnauthorizedAction } import org.scalatest.concurrent.ScalaFutures class AkkaHttpActionAdapterTest extends WordSpecLike with Matchers with ScalaFutures { "AkkaHttpActionAdapter" should { "convert 200 to OK" in withContext { context => AkkaHttpActionAdapter.adapt(new OkAction(""), context).futureValue.response shouldEqual HttpResponse( OK, Nil, HttpEntity(ContentTypes.`application/octet-stream`, ByteString("")) ) } "convert 401 to Unauthorized" in withContext { context => AkkaHttpActionAdapter.adapt(UnauthorizedAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(Unauthorized) context.getChanges.cookies.map(_.name) shouldBe List(AkkaHttpWebContext.DEFAULT_COOKIE_NAME) } "convert 302 to SeeOther (to support login flow)" in withContext { context => val r = AkkaHttpActionAdapter.adapt(new FoundAction("/login"), context).futureValue.response r.status shouldEqual SeeOther r.headers.head.value() shouldEqual "/login" context.getChanges.cookies.map(_.name) shouldBe List(AkkaHttpWebContext.DEFAULT_COOKIE_NAME) } "convert 400 to BadRequest" in withContext { context => AkkaHttpActionAdapter.adapt(BadRequestAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(BadRequest) } "convert 201 to Created" in withContext { context => AkkaHttpActionAdapter.adapt(201.action(), context).futureValue.response shouldEqual HttpResponse(Created) } "convert 403 to Forbidden" in withContext { context => AkkaHttpActionAdapter.adapt(ForbiddenAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(Forbidden) } "convert 204 to NoContent" in withContext { context => AkkaHttpActionAdapter.adapt(NoContentAction.INSTANCE, context).futureValue.response shouldEqual HttpResponse(NoContent) } "convert 200 to OK with content set from the context" in withContext { context => AkkaHttpActionAdapter.adapt(new OkAction("content"), context).futureValue.response shouldEqual HttpResponse .apply(OK, Nil, HttpEntity(ContentTypes.`application/octet-stream`, ByteString("content"))) } "convert 200 to OK with content type set from the context" in withContext { context => context.setResponseContentType("application/json") AkkaHttpActionAdapter.adapt(new OkAction(""), context).futureValue.response shouldEqual HttpResponse .apply(OK, Nil, HttpEntity(ContentTypes.`application/json`, ByteString(""))) } } def withContext(f: AkkaHttpWebContext => Unit): Unit = { f(AkkaHttpWebContext(HttpRequest(), Seq.empty, new ForgetfulSessionStorage, AkkaHttpWebContext.DEFAULT_COOKIE_NAME)) } } object AkkaHttpActionAdapterTest { implicit class ActionInt(val i: Int) extends AnyVal { def action(): HttpAction = new StatusAction(i) } }
Example 187
Source File: ZeroMQWordCount.scala From BigDatalog with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.streaming import akka.actor.ActorSystem import akka.actor.actorRef2Scala import akka.zeromq._ import akka.zeromq.Subscribe import akka.util.ByteString import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.zeromq._ import scala.language.implicitConversions import org.apache.spark.SparkConf // scalastyle:on object ZeroMQWordCount { def main(args: Array[String]) { if (args.length < 2) { System.err.println("Usage: ZeroMQWordCount <zeroMQurl> <topic>") System.exit(1) } StreamingExamples.setStreamingLogLevels() val Seq(url, topic) = args.toSeq val sparkConf = new SparkConf().setAppName("ZeroMQWordCount") // Create the context and set the batch size val ssc = new StreamingContext(sparkConf, Seconds(2)) def bytesToStringIterator(x: Seq[ByteString]): Iterator[String] = x.map(_.utf8String).iterator // For this stream, a zeroMQ publisher should be running. val lines = ZeroMQUtils.createStream(ssc, url, Subscribe(topic), bytesToStringIterator _) val words = lines.flatMap(_.split(" ")) val wordCounts = words.map(x => (x, 1)).reduceByKey(_ + _) wordCounts.print() ssc.start() ssc.awaitTermination() } } // scalastyle:on println
Example 188
Source File: ZeroMQStreamSuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.zeromq import akka.actor.SupervisorStrategy import akka.util.ByteString import akka.zeromq.Subscribe import org.apache.spark.SparkFunSuite import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.{Seconds, StreamingContext} import org.apache.spark.streaming.dstream.ReceiverInputDStream class ZeroMQStreamSuite extends SparkFunSuite { val batchDuration = Seconds(1) private val master: String = "local[2]" private val framework: String = this.getClass.getSimpleName test("zeromq input stream") { val ssc = new StreamingContext(master, framework, batchDuration) val publishUrl = "abc" val subscribe = new Subscribe(null.asInstanceOf[ByteString]) val bytesToObjects = (bytes: Seq[ByteString]) => null.asInstanceOf[Iterator[String]] // tests the API, does not actually test data receiving val test1: ReceiverInputDStream[String] = ZeroMQUtils.createStream(ssc, publishUrl, subscribe, bytesToObjects) val test2: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2) val test3: ReceiverInputDStream[String] = ZeroMQUtils.createStream( ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2, SupervisorStrategy.defaultStrategy) // TODO: Actually test data receiving ssc.stop() } }
Example 189
Source File: BaseApi.scala From asura with MIT License | 5 votes |
package asura.play.api import akka.util.ByteString import asura.common.model.ApiRes import asura.common.util.JsonUtils import org.pac4j.core.profile.{CommonProfile, ProfileManager} import org.pac4j.jwt.credentials.authenticator.JwtAuthenticator import org.pac4j.play.PlayWebContext import org.pac4j.play.scala.Security import play.api.http.{ContentTypes, HttpEntity} import play.api.mvc._ import scala.collection.JavaConverters.asScalaBuffer import scala.concurrent.{ExecutionContext, Future} trait BaseApi extends Security[CommonProfile] { import BaseApi._ def getWsProfile(auth: JwtAuthenticator)(implicit request: RequestHeader): CommonProfile = { val token = request.getQueryString("token") if (token.nonEmpty) { auth.validateToken(token.get) } else { null } } def getProfiles()(implicit request: RequestHeader): List[CommonProfile] = { val webContext = new PlayWebContext(request, playSessionStore) val profileManager = new ProfileManager[CommonProfile](webContext) val profiles = profileManager.getAll(true) asScalaBuffer(profiles).toList } def getProfileId()(implicit request: RequestHeader): String = { getProfiles().head.getId } implicit class JsonToClass(req: Request[ByteString]) { def bodyAs[T <: AnyRef](c: Class[T]): T = JsonUtils.parse[T](req.body.decodeString("UTF-8"), c) } val responseNoCacheHeaders = Seq( ("Cache-Control", "no-cache"), ("X-Accel-Buffering", "no") ) val DEFAULT_SOURCE_BUFFER_SIZE = 100 }
Example 190
Source File: TelnetDubboProviderActor.scala From asura with MIT License | 5 votes |
package asura.dubbo.actor import akka.actor.{ActorRef, Props, Status, Terminated} import akka.util.ByteString import asura.common.actor.{BaseActor, ErrorActorEvent, NotifyActorEvent, SenderMessage} import asura.common.util.LogUtils class TelnetDubboProviderActor(address: String, port: Int) extends BaseActor { override def receive: Receive = { case SenderMessage(sender) => val providerActor = context.actorOf(TelnetClientActor.props(address, port, self)) context.watch(providerActor) context.become(handleRequest(sender, providerActor)) } def handleRequest(wsActor: ActorRef, providerActor: ActorRef): Receive = { case cmd: String => if (cmd == TelnetDubboProviderActor.CMD_EXIT || cmd == TelnetDubboProviderActor.CMD_QUIT) { providerActor ! ByteString(TelnetClientActor.CMD_CLOSE) wsActor ! NotifyActorEvent(TelnetDubboProviderActor.MSG_BYE) } else { providerActor ! ByteString(cmd) } case data: ByteString => wsActor ! NotifyActorEvent(data.utf8String) case Terminated(_) => wsActor ! Status.Success case Status.Failure(t) => val stackTrace = LogUtils.stackTraceToString(t) log.warning(stackTrace) wsActor ! ErrorActorEvent(t.getMessage) providerActor ! ByteString(TelnetClientActor.CMD_CLOSE) wsActor ! Status.Success } override def postStop(): Unit = { log.debug(s"${address}:${port} stopped") } } object TelnetDubboProviderActor { val CMD_QUIT = "quit" val CMD_EXIT = "exit" val MSG_BYE = "Bye!" def props(address: String, port: Int) = Props(new TelnetDubboProviderActor(address, port)) }
Example 191
Source File: InterfaceMethodParamsActor.scala From asura with MIT License | 5 votes |
package asura.dubbo.actor import akka.actor.{ActorRef, Props, Status} import akka.pattern.pipe import akka.util.ByteString import asura.common.actor.BaseActor import asura.common.util.LogUtils import asura.dubbo.DubboConfig import asura.dubbo.actor.GenericServiceInvokerActor.GetInterfaceMethodParams import asura.dubbo.model.InterfaceMethodParams import asura.dubbo.model.InterfaceMethodParams.MethodSignature import scala.collection.mutable.ArrayBuffer import scala.concurrent.{ExecutionContext, Future} class InterfaceMethodParamsActor(invoker: ActorRef, msg: GetInterfaceMethodParams) extends BaseActor { implicit val ec: ExecutionContext = context.dispatcher private val telnet: ActorRef = context.actorOf(TelnetClientActor.props(msg.address, if (msg.port > 0) msg.port else DubboConfig.DEFAULT_PORT, self)) override def receive: Receive = { case telnetData: ByteString => val utf8String = telnetData.utf8String if (utf8String.contains(TelnetClientActor.MSG_CONNECT_TO)) { log.debug(utf8String) if (utf8String.contains(TelnetClientActor.MSG_SUCCESS)) { telnet ! ByteString(s"ls -l ${msg.ref}\r\n") } else if (utf8String.contains(TelnetClientActor.MSG_FAIL)) { Future.failed(new RuntimeException(s"Remote connection to ${msg.address}:${msg.port} failed")) pipeTo invoker telnet ! TelnetClientActor.CMD_CLOSE context stop self } else { Future.failed(new RuntimeException(s"Unknown response ${utf8String}")) pipeTo invoker telnet ! TelnetClientActor.CMD_CLOSE context stop self } } else if (utf8String.contains("(") && utf8String.contains(")")) { getInterfaceMethodParams(msg.ref, utf8String) pipeTo invoker telnet ! TelnetClientActor.CMD_CLOSE } else { Future.failed(new RuntimeException(s"Unknown response: ${utf8String}")) pipeTo invoker telnet ! TelnetClientActor.CMD_CLOSE context stop self } case Status.Failure(t) => val stackTrace = LogUtils.stackTraceToString(t) log.warning(stackTrace) context stop self } def getInterfaceMethodParams(ref: String, content: String): Future[InterfaceMethodParams] = { Future.successful { val methods = ArrayBuffer[MethodSignature]() content.split("\r\n") .filter(!_.startsWith(DubboConfig.DEFAULT_PROMPT)) .map(signature => { val splits = signature.split(" ") if (splits.length == 2) { val ret = splits(0) val secondPart = splits(1) val idx = secondPart.indexOf("(") val method = secondPart.substring(0, idx) val params = secondPart.substring(idx + 1, secondPart.length - 1).split(",") methods += (MethodSignature(ret, method, params)) } }) InterfaceMethodParams(ref, methods) } } override def postStop(): Unit = log.debug(s"${self.path} stopped") } object InterfaceMethodParamsActor { def props(invoker: ActorRef, msg: GetInterfaceMethodParams) = { Props(new InterfaceMethodParamsActor(invoker, msg)) } }
Example 192
Source File: TelnetClientActor.scala From asura with MIT License | 5 votes |
package asura.dubbo.actor import java.net.InetSocketAddress import akka.actor.{ActorRef, Props, Status} import akka.io.{IO, Tcp} import akka.util.ByteString import asura.common.actor.BaseActor import asura.common.util.LogUtils import asura.dubbo.DubboConfig class TelnetClientActor(remote: InetSocketAddress, listener: ActorRef) extends BaseActor { import Tcp._ import context.system IO(Tcp) ! Connect(remote) override def receive: Receive = { case CommandFailed(_: Connect) => listener ! ByteString(s"${TelnetClientActor.MSG_CONNECT_TO} ${remote.getAddress.getHostAddress}:${remote.getPort} ${TelnetClientActor.MSG_FAIL}\r\n") context stop self case Connected(remote, local) => log.debug(s"local address: ${local}, remote address: ${remote}") listener ! ByteString(s"${TelnetClientActor.MSG_CONNECT_TO} ${remote.getAddress.getHostAddress}:${remote.getPort} ${TelnetClientActor.MSG_SUCCESS}\r\n") val remoteConnection = sender() remoteConnection ! Register(self) context.become { case data: ByteString => remoteConnection ! Write(data) case CommandFailed(_: Write) => listener ! ByteString("write failed\r\n") case Received(data) => listener ! data case TelnetClientActor.CMD_CLOSE => remoteConnection ! Close case _: ConnectionClosed => listener ! ByteString(s"connection to ${remote.getAddress.getHostAddress}:${remote.getPort} closed\r\n") context stop self } case Status.Failure(t) => val stackTrace = LogUtils.stackTraceToString(t) log.warning(stackTrace) listener ! t.getMessage context stop self } override def postStop(): Unit = log.debug(s"${self.path} stopped") } object TelnetClientActor { val CMD_CLOSE = "close" val MSG_CONNECT_TO = "connect to" val MSG_SUCCESS = "success" val MSG_FAIL = "fail" def props(remote: InetSocketAddress, replies: ActorRef) = { Props(new TelnetClientActor(remote, replies)) } def props(address: String, port: Int, replies: ActorRef) = { Props( new TelnetClientActor( new InetSocketAddress(address, if (port > 0) port else DubboConfig.DEFAULT_PORT), replies ) ) } }
Example 193
Source File: TelnetClientApp.scala From asura with MIT License | 5 votes |
package asura.dubbo.telnet import akka.actor.{ActorRef, ActorSystem, Props} import akka.stream.ActorMaterializer import akka.util.ByteString import asura.common.actor.BaseActor import asura.dubbo.actor.TelnetClientActor import com.typesafe.scalalogging.Logger object TelnetClientApp { val logger = Logger("TelnetClientApp") implicit val system = ActorSystem("telnet") implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() var clientActor: ActorRef = null def main(args: Array[String]): Unit = { val echoActor = system.actorOf(Props(new Echo())) clientActor = system.actorOf(TelnetClientActor.props("127.0.0.1", 20880, echoActor)) } class Echo() extends BaseActor { var isLs = false override def receive: Receive = { case msg: ByteString => log.info(s"from server:${msg.utf8String}") if (!isLs) { clientActor ! ByteString("ls\r\n") isLs = true } } } }
Example 194
Source File: TelnetEchoApp.scala From asura with MIT License | 5 votes |
package asura.dubbo.telnet import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Flow, Framing, Tcp} import akka.util.ByteString import com.typesafe.scalalogging.Logger object TelnetEchoApp { val logger = Logger("TelnetEchoApp") implicit val system = ActorSystem("telnet") implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() val echo = Flow[ByteString] .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 256, allowTruncation = false)) .map(_.utf8String) .map(txt => { logger.info(s"got(${txt.length}):${txt}") txt + "\n" }) .map(ByteString(_)) def main(args: Array[String]): Unit = { val connections = Tcp().bind("127.0.0.1", 8888) connections runForeach { connection => logger.info(s"New connection from: ${connection.remoteAddress}") connection.handleWith(echo) } } }
Example 195
Source File: ResponseFormats.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.json import akka.NotUsed import akka.http.scaladsl.model._ import akka.stream.scaladsl.{Concat, Source, _} import akka.stream.{FanOutShape2, SourceShape, UniformFanInShape} import akka.util.ByteString import com.daml.http.ContractsFetch import scalaz.syntax.show._ import scalaz.{Show, \/} import spray.json.DefaultJsonProtocol._ import spray.json._ private[http] object ResponseFormats { def errorsJsObject(status: StatusCode, es: String*): JsObject = { val errors = es.toJson JsObject(statusField(status), ("errors", errors)) } def resultJsObject[A: JsonWriter](a: A): JsObject = { resultJsObject(a.toJson) } def resultJsObject(a: JsValue): JsObject = { JsObject(statusField(StatusCodes.OK), ("result", a)) } def resultJsObject[E: Show]( jsVals: Source[E \/ JsValue, NotUsed], warnings: Option[JsValue]): Source[ByteString, NotUsed] = { val graph = GraphDSL.create() { implicit b => import GraphDSL.Implicits._ val partition: FanOutShape2[E \/ JsValue, E, JsValue] = b add ContractsFetch.partition val concat: UniformFanInShape[ByteString, ByteString] = b add Concat(3) // first produce optional warnings and result element warnings match { case Some(x) => Source.single(ByteString(s"""{"warnings":${x.compactPrint},"result":[""")) ~> concat.in(0) case None => Source.single(ByteString("""{"result":[""")) ~> concat.in(0) } jsVals ~> partition.in // second consume all successes partition.out1.zipWithIndex.map(a => formatOneElement(a._1, a._2)) ~> concat.in(1) // then consume all failures and produce the status and optional errors partition.out0.fold(Vector.empty[E])((b, a) => b :+ a).map { case Vector() => ByteString("""],"status":200}""") case errors => val jsErrors: Vector[JsString] = errors.map(e => JsString(e.shows)) ByteString(s"""],"errors":${JsArray(jsErrors).compactPrint},"status":501}""") } ~> concat.in(2) SourceShape(concat.out) } Source.fromGraph(graph) } private def formatOneElement(a: JsValue, index: Long): ByteString = { if (index == 0L) ByteString(a.compactPrint) else ByteString("," + a.compactPrint) } def statusField(status: StatusCode): (String, JsNumber) = ("status", JsNumber(status.intValue())) }
Example 196
Source File: PackageManagementService.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import akka.NotUsed import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.util.ByteString import com.daml.http.util.ProtobufByteStrings import com.daml.jwt.domain.Jwt import scala.concurrent.{ExecutionContext, Future} class PackageManagementService( listKnownPackagesFn: LedgerClientJwt.ListPackages, getPackageFn: LedgerClientJwt.GetPackage, uploadDarFileFn: LedgerClientJwt.UploadDarFile, )(implicit ec: ExecutionContext, mat: Materializer) { def listPackages(jwt: Jwt): Future[Seq[String]] = listKnownPackagesFn(jwt).map(_.packageIds) def getPackage(jwt: Jwt, packageId: String): Future[admin.GetPackageResponse] = getPackageFn(jwt, packageId).map(admin.GetPackageResponse.fromLedgerApi) def uploadDarFile(jwt: Jwt, source: Source[ByteString, NotUsed]): Future[Unit] = uploadDarFileFn(jwt, ProtobufByteStrings.readFrom(source)) }
Example 197
Source File: AkkaTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.testing import java.util import java.util.concurrent.{Executors, ScheduledExecutorService} import akka.NotUsed import akka.actor.{ActorSystem, Scheduler} import akka.stream.scaladsl.{Sink, Source} import akka.stream.Materializer import akka.util.ByteString import com.daml.grpc.adapter.{ExecutionSequencerFactory, SingleThreadExecutionSequencerPool} import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory} import com.typesafe.scalalogging.LazyLogging import org.scalatest.{BeforeAndAfterAll, Suite} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContextExecutor, Future} import scala.util.control.NonFatal trait AkkaTest extends BeforeAndAfterAll with LazyLogging { self: Suite => // TestEventListener is needed for log testing private val loggers = util.Arrays.asList("akka.event.slf4j.Slf4jLogger", "akka.testkit.TestEventListener") protected implicit val sysConfig: Config = ConfigFactory .load() .withValue("akka.loggers", ConfigValueFactory.fromIterable(loggers)) .withValue("akka.logger-startup-timeout", ConfigValueFactory.fromAnyRef("30s")) .withValue("akka.stdout-loglevel", ConfigValueFactory.fromAnyRef("INFO")) protected implicit val system: ActorSystem = ActorSystem("test", sysConfig) protected implicit val ec: ExecutionContextExecutor = system.dispatchers.lookup("test-dispatcher") protected implicit val scheduler: Scheduler = system.scheduler protected implicit val schedulerService: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor() protected implicit val materializer: Materializer = Materializer(system) protected implicit val esf: ExecutionSequencerFactory = new SingleThreadExecutionSequencerPool("testSequencerPool") protected val timeout: FiniteDuration = 2.minutes protected val shortTimeout: FiniteDuration = 5.seconds protected def await[T](fun: => Future[T]): T = Await.result(fun, timeout) protected def awaitShort[T](fun: => Future[T]): T = Await.result(fun, shortTimeout) protected def drain(source: Source[ByteString, NotUsed]): ByteString = { val futureResult: Future[ByteString] = source.runFold(ByteString.empty) { (a, b) => a.concat(b) } awaitShort(futureResult) } protected def drain[A, B](source: Source[A, B]): Seq[A] = { val futureResult: Future[Seq[A]] = source.runWith(Sink.seq) awaitShort(futureResult) } override protected def afterAll(): Unit = { try { val _ = await(system.terminate()) } catch { case NonFatal(_) => () } schedulerService.shutdownNow() super.afterAll() } }
Example 198
Source File: ChaosActorInterface.scala From eventuate-chaos with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.chaos import akka.actor.ActorRef import akka.io.Tcp import akka.util.ByteString import akka.pattern.ask import akka.util.Timeout import com.rbmhtechnology.eventuate.chaos.ChaosActorInterface.HealthCheckResult import com.rbmhtechnology.eventuate.chaos.ChaosActorInterface.HealthCheck import scala.concurrent.duration._ import scala.util.Failure import scala.util.Success object ChaosActorInterface { case class HealthCheck(requester: ActorRef) case class HealthCheckResult(state: Int, requester: ActorRef) } class ChaosActorInterface(chaosActor: ActorRef) extends ChaosInterface { implicit val timeout = Timeout(1.seconds) def handleCommand = { case ("persist", None, recv) => val check = HealthCheck(recv) (chaosActor ? check).mapTo[HealthCheckResult] onComplete { case Success(result) => result.requester ! Tcp.Write(ByteString(result.state.toString)) result.requester ! Tcp.Close case Failure(e) => recv ! Tcp.Close } } }
Example 199
Source File: ChaosInterface.scala From eventuate-chaos with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.chaos import java.net.InetSocketAddress import akka.actor.Actor import akka.actor.ActorLogging import akka.actor.ActorRef import akka.io.IO import akka.io.Tcp import akka.util.ByteString abstract class ChaosInterface extends Actor with ActorLogging { val port = 8080 val endpoint = new InetSocketAddress(port) val command = """(?s)(\w+)\s+(\d+).*""".r implicit val ec = context.dispatcher IO(Tcp)(context.system) ! Tcp.Bind(self, endpoint) println(s"Now listening on port $port") def handleCommand: PartialFunction[(String, Option[Int], ActorRef), Unit] protected def reply(message: String, receiver: ActorRef) = { receiver ! Tcp.Write(ByteString(message)) receiver ! Tcp.Close } protected def closeOnError(receiver: ActorRef): PartialFunction[Throwable, Unit] = { case err: Throwable => receiver ! Tcp.Close } def receive: Receive = { case Tcp.Connected(remote, _) => sender ! Tcp.Register(self) case Tcp.Received(bs) => val content = bs.utf8String content match { case command(c, value) if handleCommand.isDefinedAt(c, Some(value.toInt), sender) => handleCommand(c, Some(value.toInt), sender) case c if c.startsWith("quit") => context.system.terminate() case c if handleCommand.isDefinedAt(c, None, sender) => handleCommand(c, None, sender) case _ => sender ! Tcp.Close } case Tcp.Closed => case Tcp.PeerClosed => } }
Example 200
Source File: TransferTransformFile.scala From streams-tests with Apache License 2.0 | 5 votes |
package com.softwaremill.streams import java.io.File import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.io.Framing import akka.stream.scaladsl.{FileIO, Keep} import akka.util.ByteString import com.softwaremill.streams.util.TestFiles import com.softwaremill.streams.util.Timed._ import scala.concurrent.{Await, Future} import scalaz.stream.{io, text} import scala.concurrent.duration._ trait TransferTransformFile { def run(from: File, to: File): Long } object AkkaStreamsTransferTransformFile extends TransferTransformFile { private lazy implicit val system = ActorSystem() override def run(from: File, to: File) = { implicit val mat = ActorMaterializer() val r: Future[Long] = FileIO.fromFile(from) .via(Framing.delimiter(ByteString("\n"), 1048576)) .map(_.utf8String) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\n") .map(ByteString(_)) .toMat(FileIO.toFile(to))(Keep.right) .run() Await.result(r, 1.hour) } def shutdown() = { system.terminate() } } object ScalazStreamsTransferTransformFile extends TransferTransformFile { override def run(from: File, to: File) = { io.linesR(from.getAbsolutePath) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\n") .pipe(text.utf8Encode) .to(io.fileChunkW(to.getAbsolutePath)) .run .run to.length() } } object TransferTransformFileRunner extends App { def runTransfer(ttf: TransferTransformFile, sizeMB: Int): String = { val output = File.createTempFile("fft", "txt") try { ttf.run(TestFiles.testFile(sizeMB), output).toString } finally output.delete() } val tests = List( (ScalazStreamsTransferTransformFile, 10), (ScalazStreamsTransferTransformFile, 100), (ScalazStreamsTransferTransformFile, 500), (AkkaStreamsTransferTransformFile, 10), (AkkaStreamsTransferTransformFile, 100), (AkkaStreamsTransferTransformFile, 500) ) runTests(tests.map { case (ttf, sizeMB) => (s"${if (ttf == ScalazStreamsTransferTransformFile) "scalaz" else "akka"}, $sizeMB MB", () => runTransfer(ttf, sizeMB)) }, 3) AkkaStreamsTransferTransformFile.shutdown() }