akka.stream.IOResult Scala Examples
The following examples show how to use akka.stream.IOResult.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ClasspathResources.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.util import java.io.InputStream import akka.NotUsed import akka.stream.IOResult import akka.stream.scaladsl.{ Source, StreamConverters } import akka.util.ByteString import scala.concurrent.Future import scala.io.{ Source ⇒ ScalaIOSource } import scala.util.Try import scala.xml.pull.{ XMLEvent, XMLEventReader } trait ClasspathResources { def withInputStream[T](fileName: String)(f: InputStream ⇒ T): T = { val is = fromClasspathAsStream(fileName) try { f(is) } finally { Try(is.close()) } } def withXMLEventReader[T](fileName: String)(f: XMLEventReader ⇒ T): T = withInputStream(fileName) { is ⇒ f(new XMLEventReader(ScalaIOSource.fromInputStream(is))) } def withXMLEventSource[T](fileName: String)(f: Source[XMLEvent, NotUsed] ⇒ T): T = withXMLEventReader(fileName) { reader ⇒ f(Source.fromIterator(() ⇒ reader)) } def withByteStringSource[T](fileName: String)(f: Source[ByteString, Future[IOResult]] ⇒ T): T = withInputStream(fileName) { inputStream ⇒ f(StreamConverters.fromInputStream(() ⇒ inputStream)) } def streamToString(is: InputStream): String = ScalaIOSource.fromInputStream(is).mkString def fromClasspathAsString(fileName: String): String = streamToString(fromClasspathAsStream(fileName)) def fromClasspathAsStream(fileName: String): InputStream = getClass.getClassLoader.getResourceAsStream(fileName) }
Example 2
Source File: SplitWhen.scala From akka_streams_tutorial with MIT License | 5 votes |
package sample.stream_shared_state import java.nio.file.Paths import akka.NotUsed import akka.actor.ActorSystem import akka.stream.IOResult import akka.stream.scaladsl.{FileIO, Flow, Framing, Keep, Sink, Source} import akka.util.ByteString import org.slf4j.{Logger, LoggerFactory} import scala.concurrent.Future import scala.util.{Failure, Success} object SplitWhen extends App { val logger: Logger = LoggerFactory.getLogger(this.getClass) implicit val system = ActorSystem("SplitWhen") implicit val executionContext = system.dispatcher val nonLinearCapacityFactor = 100 //raise to see how it scales val filename = "splitWhen.csv" def genResourceFile() = { logger.info(s"Writing resource file: $filename...") def fileSink(filename: String): Sink[String, Future[IOResult]] = Flow[String] .map(s => ByteString(s + "\n")) .toMat(FileIO.toPath(Paths.get(filename)))(Keep.right) Source.fromIterator(() => (1 to nonLinearCapacityFactor).toList.combinations(2)) .map(each => s"${each.head},${each.last}") .runWith(fileSink(filename)) } val sourceOfLines = FileIO.fromPath(Paths.get(filename)) .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 1024, allowTruncation = true) .map(_.utf8String)) val csvToRecord: Flow[String, Record, NotUsed] = Flow[String] .map(_.split(",").map(_.trim)) .map(stringArrayToRecord) val terminationHook: Flow[Record, Record, Unit] = Flow[Record] .watchTermination() { (_, done) => done.onComplete { case Failure(err) => logger.info(s"Flow failed: $err") case _ => system.terminate(); logger.info(s"Flow terminated") } } val printSink = Sink.foreach[Vector[Record]](each => println(s"Reached sink: $each")) private def stringArrayToRecord(cols: Array[String]) = Record(cols(0), cols(1)) private def hasKeyChanged = { () => { var lastRecordKey: Option[String] = None currentRecord: Record => lastRecordKey match { case Some(currentRecord.key) | None => lastRecordKey = Some(currentRecord.key) List((currentRecord, false)) case _ => lastRecordKey = Some(currentRecord.key) List((currentRecord, true)) } } } genResourceFile().onComplete { case Success(_) => logger.info(s"Start processing...") sourceOfLines .via(csvToRecord) .via(terminationHook) .statefulMapConcat(hasKeyChanged) // stateful decision .splitWhen(_._2) // split when key has changed .map(_._1) // proceed with payload .fold(Vector.empty[Record])(_ :+ _) // sum payload .mergeSubstreams // better performance, but why? .runWith(printSink) case Failure(exception) => logger.info(s"Exception: $exception") } case class Record(key: String, value: String) }
Example 3
Source File: FileIOEcho.scala From akka_streams_tutorial with MIT License | 5 votes |
package alpakka.file import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.IOResult import akka.stream.scaladsl.FileIO import scala.concurrent.Future import scala.util.{Failure, Success} object FileIOEcho extends App { implicit val system = ActorSystem("FileIOEcho") implicit val executionContext = system.dispatcher val sourceFileName = "./src/main/resources/testfile.jpg" val encFileName = "testfile.enc" val resultFileName = "testfile_result.jpg" val sourceOrig = FileIO.fromPath(Paths.get(sourceFileName), 3000) val sinkEnc = FileIO.toPath(Paths.get(encFileName)) val doneEnc = sourceOrig //.wireTap(each => println(s"Chunk enc: $each")) .map(each => each.encodeBase64) .runWith(sinkEnc) doneEnc.onComplete { case Success(_) => val sourceEnc = FileIO.fromPath(Paths.get(encFileName)) val sinkDec = FileIO.toPath(Paths.get(resultFileName)) val doneDec = sourceEnc //.wireTap(each => println(s"Chunk dec: $each")) .map(each => each.decodeBase64) .runWith(sinkDec) terminateWhen(doneDec) case Failure(ex) => println(s"Exception: $ex") } def terminateWhen(done: Future[IOResult]) = { done.onComplete { case Success(_) => println(s"Flow Success. Written file: $resultFileName About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 4
Source File: ClasspathResources.scala From reactive-activemq with Apache License 2.0 | 5 votes |
package akka.stream.integration import java.io.InputStream import akka.stream.IOResult import akka.stream.scaladsl.{ Source, StreamConverters } import akka.util.ByteString import scala.concurrent.Future import scala.io.{ Source => ScalaIOSource } trait ClasspathResources { def withInputStream[T](fileName: String)(f: InputStream => T): T = { val is: InputStream = fromClasspathAsStream(fileName) try f(is) finally is.close() } def withInputStreamAsText[T](fileName: String)(f: String => T): T = f(fromClasspathAsString(fileName)) def withByteStringSource[T](fileName: String)(f: Source[ByteString, Future[IOResult]] => T): T = withInputStream(fileName) { inputStream => f(StreamConverters.fromInputStream(() => inputStream)) } def streamToString(is: InputStream): String = ScalaIOSource.fromInputStream(is).mkString def fromClasspathAsString(fileName: String): String = streamToString(fromClasspathAsStream(fileName)) def fromClasspathAsStream(fileName: String): InputStream = getClass.getClassLoader.getResourceAsStream(fileName) }
Example 5
Source File: ResumingEventFilter.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import scala.concurrent.Future import akka.NotUsed import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.util.ByteString import spray.json._ import com.typesafe.config.{ Config, ConfigFactory } object ResumingEventFilter extends App with EventMarshalling { val config = ConfigFactory.load() val maxLine = config.getInt("log-stream-processor.max-line") if(args.length != 3) { System.err.println("Provide args: input-file output-file state") System.exit(1) } val inputFile = FileArg.shellExpanded(args(0)) val outputFile = FileArg.shellExpanded(args(1)) val filterState = args(2) match { case State(state) => state case unknown => System.err.println(s"Unknown state $unknown, exiting.") System.exit(1) } import akka.stream.scaladsl._ val source: Source[ByteString, Future[IOResult]] = FileIO.fromPath(inputFile) val sink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND)) val frame: Flow[ByteString, String, NotUsed] = Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) import akka.stream.ActorAttributes import akka.stream.Supervision import LogStreamProcessor.LogParseException val decider : Supervision.Decider = { case _: LogParseException => Supervision.Resume case _ => Supervision.Stop } val parse: Flow[String, Event, NotUsed] = Flow[String].map(LogStreamProcessor.parseLineEx) .collect { case Some(e) => e } .withAttributes(ActorAttributes.supervisionStrategy(decider)) val filter: Flow[Event, Event, NotUsed] = Flow[Event].filter(_.state == filterState) val serialize: Flow[Event, ByteString, NotUsed] = Flow[Event].map(event => ByteString(event.toJson.compactPrint)) implicit val system = ActorSystem() implicit val ec = system.dispatcher val graphDecider : Supervision.Decider = { case _: LogParseException => Supervision.Resume case _ => Supervision.Stop } import akka.stream.ActorMaterializerSettings implicit val materializer = ActorMaterializer( ActorMaterializerSettings(system) .withSupervisionStrategy(graphDecider) ) val composedFlow: Flow[ByteString, ByteString, NotUsed] = frame.via(parse) .via(filter) .via(serialize) val runnableGraph: RunnableGraph[Future[IOResult]] = source.via(composedFlow).toMat(sink)(Keep.right) runnableGraph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$outputFile'.") system.terminate() } }
Example 6
Source File: BidiEventFilter.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import scala.concurrent.Future import akka.NotUsed import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.stream.scaladsl._ import akka.stream.scaladsl.JsonFraming import akka.util.ByteString import spray.json._ import com.typesafe.config.{ Config, ConfigFactory } object BidiEventFilter extends App with EventMarshalling { val config = ConfigFactory.load() val maxLine = config.getInt("log-stream-processor.max-line") val maxJsonObject = config.getInt("log-stream-processor.max-json-object") if(args.length != 5) { System.err.println("Provide args: input-format output-format input-file output-file state") System.exit(1) } val inputFile = FileArg.shellExpanded(args(2)) val outputFile = FileArg.shellExpanded(args(3)) val filterState = args(4) match { case State(state) => state case unknown => System.err.println(s"Unknown state $unknown, exiting.") System.exit(1) } val inFlow: Flow[ByteString, Event, NotUsed] = if(args(0).toLowerCase == "json") { JsonFraming.objectScanner(maxJsonObject) .map(_.decodeString("UTF8").parseJson.convertTo[Event]) } else { Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) .map(LogStreamProcessor.parseLineEx) .collect { case Some(event) => event } } val outFlow: Flow[Event, ByteString, NotUsed] = if(args(1).toLowerCase == "json") { Flow[Event].map(event => ByteString(event.toJson.compactPrint)) } else { Flow[Event].map{ event => ByteString(LogStreamProcessor.logLine(event)) } } val bidiFlow = BidiFlow.fromFlows(inFlow, outFlow) val source: Source[ByteString, Future[IOResult]] = FileIO.fromPath(inputFile) val sink: Sink[ByteString, Future[IOResult]] = FileIO.toPath(outputFile, Set(CREATE, WRITE, APPEND)) val filter: Flow[Event, Event, NotUsed] = Flow[Event].filter(_.state == filterState) val flow = bidiFlow.join(filter) val runnableGraph: RunnableGraph[Future[IOResult]] = source.via(flow).toMat(sink)(Keep.right) implicit val system = ActorSystem() implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() runnableGraph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$outputFile'.") system.terminate() } }
Example 7
Source File: GenerateLogFile.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Path, Paths } import java.nio.file.StandardOpenOption import java.nio.file.StandardOpenOption._ import java.time.ZonedDateTime import java.time.format.DateTimeFormatter import scala.concurrent.Future import akka.actor.ActorSystem import akka.stream.{ ActorMaterializer, IOResult } import akka.stream.scaladsl._ import akka.util.ByteString object GenerateLogFile extends App { val filePath = args(0) val numberOfLines = args(1).toInt val rnd = new java.util.Random() val sink = FileIO.toPath(FileArg.shellExpanded(filePath), Set(CREATE, WRITE, APPEND)) def line(i: Int) = { val host = "my-host" val service = "my-service" val time = ZonedDateTime.now.format(DateTimeFormatter.ISO_INSTANT) val state = if( i % 10 == 0) "warning" else if(i % 101 == 0) "error" else if(i % 1002 == 0) "critical" else "ok" val description = "Some description of what has happened." val tag = "tag" val metric = rnd.nextDouble() * 100 s"$host | $service | $state | $time | $description | $tag | $metric \n" } val graph = Source.fromIterator{() => Iterator.tabulate(numberOfLines)(line) }.map(l=> ByteString(l)).toMat(sink)(Keep.right) implicit val system = ActorSystem() implicit val ec = system.dispatcher implicit val materializer = ActorMaterializer() graph.run().foreach { result => println(s"Wrote ${result.count} bytes to '$filePath'.") system.terminate() } }
Example 8
Source File: Event.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.io.File import java.time.ZonedDateTime import scala.concurrent.Future import akka.NotUsed import akka.util.ByteString import akka.stream.IOResult import akka.stream.scaladsl.{ Source, FileIO, Framing } import scala.concurrent.duration.FiniteDuration case class Event( host: String, service: String, state: State, time: ZonedDateTime, description: String, tag: Option[String] = None, metric: Option[Double] = None ) sealed trait State case object Critical extends State case object Error extends State case object Ok extends State case object Warning extends State object State { def norm(str: String): String = str.toLowerCase def norm(state: State): String = norm(state.toString) val ok = norm(Ok) val warning = norm(Warning) val error = norm(Error) val critical = norm(Critical) def unapply(str: String): Option[State] = { val normalized = norm(str) if(normalized == norm(Ok)) Some(Ok) else if(normalized == norm(Warning)) Some(Warning) else if(normalized == norm(Error)) Some(Error) else if(normalized == norm(Critical)) Some(Critical) else None } } case class LogReceipt(logId: String, written: Long) case class ParseError(logId: String, msg: String)
Example 9
Source File: LogJson.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Files, Path } import java.io.File import java.time.ZonedDateTime import scala.concurrent.duration._ import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.{ Success, Failure } import akka.Done import akka.actor._ import akka.util.ByteString import akka.stream.{ ActorAttributes, ActorMaterializer, IOResult } import akka.stream.scaladsl.JsonFraming import akka.stream.scaladsl.{ FileIO, BidiFlow, Flow, Framing, Keep, Sink, Source } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import spray.json._ object LogJson extends EventMarshalling with NotificationMarshalling with MetricMarshalling { def textInFlow(maxLine: Int) = { Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) .map(LogStreamProcessor.parseLineEx) .collect { case Some(e) => e } } def jsonInFlow(maxJsonObject: Int) = { JsonFraming.objectScanner(maxJsonObject) .map(_.decodeString("UTF8").parseJson.convertTo[Event]) } def jsonFramed(maxJsonObject: Int) = JsonFraming.objectScanner(maxJsonObject) val jsonOutFlow = Flow[Event].map { event => ByteString(event.toJson.compactPrint) } val notifyOutFlow = Flow[Summary].map { ws => ByteString(ws.toJson.compactPrint) } val metricOutFlow = Flow[Metric].map { m => ByteString(m.toJson.compactPrint) } val textOutFlow = Flow[Event].map{ event => ByteString(LogStreamProcessor.logLine(event)) } def logToJson(maxLine: Int) = { BidiFlow.fromFlows(textInFlow(maxLine), jsonOutFlow) } def jsonToLog(maxJsonObject: Int) = { BidiFlow.fromFlows(jsonInFlow(maxJsonObject), textOutFlow) } def logToJsonFlow(maxLine: Int) = { logToJson(maxLine).join(Flow[Event]) } def jsonToLogFlow(maxJsonObject: Int) = { jsonToLog(maxJsonObject).join(Flow[Event]) } }
Example 10
Source File: DataFileIngress.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.akkastreamsdoc import java.nio.file import java.nio.file._ import akka.NotUsed import akka.stream.IOResult import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl._ import akka.util.ByteString import cloudflow.akkastream._ import cloudflow.akkastream.scaladsl._ import cloudflow.streamlets._ import cloudflow.streamlets.avro._ import spray.json.JsonParser import scala.concurrent.Future import scala.concurrent.duration._ class DataFileIngress extends AkkaStreamlet { import JsonSupport._ val out = AvroOutlet[Data]("out").withPartitioner(RoundRobinPartitioner) def shape = StreamletShape.withOutlets(out) private val sourceData = VolumeMount("source-data-mount", "/mnt/data", ReadWriteMany) override def volumeMounts = Vector(sourceData) // Streamlet processing steps // 1. Every X seconds // 2. Enumerate all files in the mounted path // 3. Read each file *) // 4. Deserialize file content to a Data value *) // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. override def createLogic = new RunnableGraphStreamletLogic() { val listFiles: NotUsed ⇒ Source[file.Path, NotUsed] = { _ ⇒ Directory.ls(getMountedPath(sourceData)) } val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } val parseFile: ByteString ⇒ Data = { jsonByteString ⇒ JsonParser(jsonByteString.utf8String).convertTo[Data] } val emitFromFilesContinuously = Source .tick(1.second, 5.second, NotUsed) .flatMapConcat(listFiles) .flatMapConcat(readFile) .map(parseFile) def runnableGraph = emitFromFilesContinuously.to(plainSink(out)) } }
Example 11
Source File: SensorDataFileIngress.scala From pipelines-examples with Apache License 2.0 | 5 votes |
package pipelines.examples.sensordata import java.nio.file import java.nio.file._ import akka.NotUsed import akka.stream.IOResult import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl._ import akka.util.ByteString import pipelines.akkastream._ import pipelines.akkastream.scaladsl._ import pipelines.streamlets._ import pipelines.streamlets.avro._ import spray.json.JsonParser import scala.concurrent.Future import scala.concurrent.duration._ class SensorDataFileIngress extends AkkaStreamlet { import SensorDataJsonSupport._ val out = AvroOutlet[SensorData]("out").withPartitioner(RoundRobinPartitioner) def shape = StreamletShape.withOutlets(out) private val sourceData = VolumeMount("source-data-mount", "/mnt/data", ReadWriteMany) override def volumeMounts = Vector(sourceData) // Streamlet processing steps // 1. Every X seconds // 2. Enumerate all files in the mounted path // 3. Read each file *) // 4. Deserialize file content to a SensorData value *) // *) Note that reading and deserializing the file content is done in separate steps for readability only, in production they should be merged into one step for performance reasons. override def createLogic = new RunnableGraphStreamletLogic() { val listFiles: NotUsed ⇒ Source[file.Path, NotUsed] = { _ ⇒ Directory.ls(getMountedPath(sourceData)) } val readFile: Path ⇒ Source[ByteString, Future[IOResult]] = { path: Path ⇒ FileIO.fromPath(path).via(JsonFraming.objectScanner(Int.MaxValue)) } val parseFile: ByteString ⇒ SensorData = { jsonByteString ⇒ JsonParser(jsonByteString.utf8String).convertTo[SensorData] } val emitFromFilesContinuously = Source.tick(1.second, 5.second, NotUsed) .flatMapConcat(listFiles) .flatMapConcat(readFile) .map(parseFile) def runnableGraph = emitFromFilesContinuously.to(plainSink(out)) } }