akka.stream.scaladsl.StreamConverters Scala Examples
The following examples show how to use akka.stream.scaladsl.StreamConverters.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ProtobufByteStrings.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.util import akka.NotUsed import akka.stream.Materializer import akka.stream.scaladsl.{Source, StreamConverters} import com.google.protobuf import scala.collection.JavaConverters._ object ProtobufByteStrings { def readFrom(source: Source[akka.util.ByteString, NotUsed])( implicit mat: Materializer): protobuf.ByteString = { val inputStream = source.runWith(StreamConverters.asInputStream()) protobuf.ByteString.readFrom(inputStream) } def toSource(a: protobuf.ByteString): Source[akka.util.ByteString, NotUsed] = { Source.fromIterator(() => a.asReadOnlyByteBufferList().iterator.asScala.map(x => akka.util.ByteString(x))) } }
Example 2
Source File: ClasspathResources.scala From intro-to-akka-streams with Apache License 2.0 | 5 votes |
package com.github.dnvriend.streams.util import java.io.InputStream import akka.NotUsed import akka.stream.IOResult import akka.stream.scaladsl.{ Source, StreamConverters } import akka.util.ByteString import scala.concurrent.Future import scala.io.{ Source ⇒ ScalaIOSource } import scala.util.Try import scala.xml.pull.{ XMLEvent, XMLEventReader } trait ClasspathResources { def withInputStream[T](fileName: String)(f: InputStream ⇒ T): T = { val is = fromClasspathAsStream(fileName) try { f(is) } finally { Try(is.close()) } } def withXMLEventReader[T](fileName: String)(f: XMLEventReader ⇒ T): T = withInputStream(fileName) { is ⇒ f(new XMLEventReader(ScalaIOSource.fromInputStream(is))) } def withXMLEventSource[T](fileName: String)(f: Source[XMLEvent, NotUsed] ⇒ T): T = withXMLEventReader(fileName) { reader ⇒ f(Source.fromIterator(() ⇒ reader)) } def withByteStringSource[T](fileName: String)(f: Source[ByteString, Future[IOResult]] ⇒ T): T = withInputStream(fileName) { inputStream ⇒ f(StreamConverters.fromInputStream(() ⇒ inputStream)) } def streamToString(is: InputStream): String = ScalaIOSource.fromInputStream(is).mkString def fromClasspathAsString(fileName: String): String = streamToString(fromClasspathAsStream(fileName)) def fromClasspathAsStream(fileName: String): InputStream = getClass.getClassLoader.getResourceAsStream(fileName) }
Example 3
Source File: DatasetExport.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package controllers import akka.stream.scaladsl.{ Source, StreamConverters } import cats.syntax.show.toShow import daf.dataset.{ DatasetParams, ExtraParams } import daf.filesystem.{ CsvFileFormat, FileDataFormat, JsonFileFormat, PathInfo, fileFormatShow } import daf.web.contentType import scala.concurrent.Future import scala.util.{ Failure, Success } trait DatasetExport { this: DatasetController => protected def prepareDirect(params: DatasetParams, targetFormat: FileDataFormat, limit: Option[Int]) = targetFormat match { case JsonFileFormat => datasetService.jsonData(params, limit) case CsvFileFormat => datasetService.csvData(params, limit) case _ => Failure { new IllegalArgumentException("Unable to prepare download; only CSV and JSON are permitted") } } protected def prepareFileExport(pathInfo: PathInfo, sourceFormat: FileDataFormat, targetFormat: FileDataFormat, extraParams: ExtraParams, limit: Option[Int] = None) = fileExportService.exportFile(pathInfo.path, sourceFormat, targetFormat, extraParams, limit).map { downloadService.openPath }.flatMap { case Success(stream) => Future.successful { StreamConverters.fromInputStream { () => stream } } case Failure(error) => Future.failed { error } } protected def prepareTableExport(table: String, targetFormat: FileDataFormat, extraParams: ExtraParams, limit: Option[Int] = None) = fileExportService.exportTable(table, targetFormat, extraParams, limit).map { downloadService.openPath }.flatMap { case Success(stream) => Future.successful { StreamConverters.fromInputStream { () => stream } } case Failure(error) => Future.failed { error } } protected def prepareQueryExport(query: String, targetFormat: FileDataFormat) = fileExportService.exportQuery(query, targetFormat).map { downloadService.openPath }.flatMap { case Success(stream) => Future.successful { StreamConverters.fromInputStream { () => stream } } case Failure(error) => Future.failed { error } } protected def respond(data: Source[String, _], fileName: String, targetFormat: FileDataFormat) = Ok.chunked(data).withHeaders( CONTENT_DISPOSITION -> s"""attachment; filename="$fileName.${targetFormat.show}"""", CONTENT_TYPE -> contentType(targetFormat) ) }
Example 4
Source File: DatasetFunctionsSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset import java.io.ByteArrayInputStream import akka.stream.ActorMaterializer import akka.stream.scaladsl.StreamConverters import controllers.modules.TestAbstractModule import daf.filesystem.MergeStrategy import daf.instances.{ AkkaInstance, ConfigurationInstance } import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpecLike } import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.Random class DatasetFunctionsSpec extends TestAbstractModule with WordSpecLike with MustMatchers with BeforeAndAfterAll with ConfigurationInstance with AkkaInstance { implicit lazy val executionContext = actorSystem.dispatchers.lookup("akka.actor.test-dispatcher") protected implicit lazy val materializer = ActorMaterializer.create { actorSystem } override def beforeAll() = { startAkka() } def data = (1 to 5) .map { i => Random.alphanumeric.grouped(20).take(5).map { s => s"$i - ${s.mkString}" }.toStream :+ defaultSeparator } def stream = MergeStrategy.coalesced { data.map { iter => new ByteArrayInputStream( iter.mkString(defaultSeparator).getBytes("UTF-8") ) } } def source = StreamConverters.fromInputStream(() => stream, 5) "Source manipulation" must { "convert to a string source" in { Await.result( wrapDefault { asStringSource(source) }.runFold("") { _ + _ }, 5.seconds ).split(defaultSeparator).length must be { 25 } } "convert to a json source" in { Await.result( wrapJson { asStringSource(source) }.runFold("") { _ + _ }, 5.seconds ).split(jsonSeparator).length must be { 25 } } } }
Example 5
Source File: ClasspathResources.scala From reactive-activemq with Apache License 2.0 | 5 votes |
package akka.stream.integration import java.io.InputStream import akka.stream.IOResult import akka.stream.scaladsl.{ Source, StreamConverters } import akka.util.ByteString import scala.concurrent.Future import scala.io.{ Source => ScalaIOSource } trait ClasspathResources { def withInputStream[T](fileName: String)(f: InputStream => T): T = { val is: InputStream = fromClasspathAsStream(fileName) try f(is) finally is.close() } def withInputStreamAsText[T](fileName: String)(f: String => T): T = f(fromClasspathAsString(fileName)) def withByteStringSource[T](fileName: String)(f: Source[ByteString, Future[IOResult]] => T): T = withInputStream(fileName) { inputStream => f(StreamConverters.fromInputStream(() => inputStream)) } def streamToString(is: InputStream): String = ScalaIOSource.fromInputStream(is).mkString def fromClasspathAsString(fileName: String): String = streamToString(fromClasspathAsStream(fileName)) def fromClasspathAsStream(fileName: String): InputStream = getClass.getClassLoader.getResourceAsStream(fileName) }
Example 6
Source File: SparqlUtils.scala From CM-Well with Apache License 2.0 | 5 votes |
package cmwell.tools.data.sparql.japi import java.io.InputStream import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, StreamConverters} import cmwell.tools.data.sparql.SparqlProcessor import cmwell.tools.data.utils.akka.{concatByteStrings, endl} import cmwell.tools.data.utils.chunkers.GroupChunker import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.FiniteDuration object SparqlUtils { def createJavaStreamFromPaths(baseUrl: String, parallelism: Int = 4, isNeedWrapping: Boolean = true, sparqlQuery: String, in: InputStream) = { implicit val system = ActorSystem("reactive-sparql-processor") implicit val mat = ActorMaterializer() SparqlProcessor .createSourceFromPathsInputStream( baseUrl = baseUrl, spQueryParamsBuilder = (p: Seq[String], v: Map[String,String], q: Boolean) => "sp.pid=" + p.head.substring(p.head.lastIndexOf('-') + 1), parallelism = parallelism, isNeedWrapping = isNeedWrapping, sparqlQuery = sparqlQuery, in = in ) .map { case (data, _) => data } .via(GroupChunker(GroupChunker.formatToGroupExtractor("ntriples"))) .map(concatByteStrings(_, endl)) .runWith(StreamConverters.asJavaStream()) } def createJavaOutputStreamFromPaths(baseUrl: String, parallelism: Int = 4, isNeedWrapping: Boolean = true, sparqlQuery: String, in: InputStream, timeout: FiniteDuration) = { implicit val system = ActorSystem("reactive-sparql-processor") implicit val mat = ActorMaterializer() SparqlProcessor .createSourceFromPathsInputStream( baseUrl = baseUrl, spQueryParamsBuilder = (p: Seq[String], v: Map[String,String], q: Boolean) => "sp.pid=" + p.head.substring(p.head.lastIndexOf('-') + 1), parallelism = parallelism, isNeedWrapping = isNeedWrapping, sparqlQuery = sparqlQuery, in = in ) .map { case (data, _) => data } .via(GroupChunker(GroupChunker.formatToGroupExtractor("ntriples"))) .map(concatByteStrings(_, endl)) .runWith(StreamConverters.asInputStream(timeout)) } }
Example 7
Source File: AkkaHttpServerWithMinio.scala From c4proto with Apache License 2.0 | 5 votes |
package ee.cone.c4gate_akka_s3 import java.util.UUID import akka.http.scaladsl.model.{HttpMethods, HttpRequest} import akka.stream.scaladsl.StreamConverters import com.typesafe.scalalogging.LazyLogging import ee.cone.c4actor_s3.S3FileStorage import ee.cone.c4di.c4 import ee.cone.c4gate_akka._ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @c4("AkkaMinioGatewayApp") final class AkkaMinioRequestPreHandler( s3FileStorage: S3FileStorage, akkaMat: AkkaMat, ) extends AkkaRequestPreHandler with LazyLogging { def handleAsync( income: HttpRequest )( implicit ec: ExecutionContext ): Future[HttpRequest] = if (income.method == HttpMethods.PUT) { for { mat <- akkaMat.get } yield { val tmpFilename: String = s"tmp/${UUID.randomUUID()}" logger debug s"PUT request received; Storing request body to $tmpFilename" val is = income.entity.dataBytes.runWith(StreamConverters.asInputStream(5.minutes))(mat) //todo check throw logger debug s"Bytes Stream created" s3FileStorage.uploadByteStream(tmpFilename, is) logger debug s"Uploaded bytestream to $tmpFilename" // ? income.headers income.withEntity(tmpFilename) } } else Future.successful(income) }