akka.stream.scaladsl.FileIO Scala Examples
The following examples show how to use akka.stream.scaladsl.FileIO.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: AkkaBodyBuilder.scala From chronicler with Apache License 2.0 | 5 votes |
package com.github.fsanaulla.chronicler.akka.shared.handlers import java.nio.file.Path import akka.http.scaladsl.model.{HttpEntity, MediaTypes, RequestEntity} import akka.stream.scaladsl.FileIO import com.github.fsanaulla.chronicler.core.alias.ErrorOr import com.github.fsanaulla.chronicler.core.components.BodyBuilder import com.github.fsanaulla.chronicler.core.either import com.github.fsanaulla.chronicler.core.either.EitherOps import com.github.fsanaulla.chronicler.core.model.{Appender, InfluxWriter, Point} class AkkaBodyBuilder extends BodyBuilder[RequestEntity] with Appender { override def fromFile(filePath: Path, enc: String): RequestEntity = HttpEntity( MediaTypes.`application/octet-stream`, FileIO .fromPath(filePath) ) override def fromString(string: String): RequestEntity = HttpEntity(string) override def fromStrings(strings: Seq[String]): RequestEntity = HttpEntity(strings.mkString("\n")) override def fromPoint(point: Point): RequestEntity = HttpEntity(point.serialize) override def fromPoints(points: Seq[Point]): RequestEntity = HttpEntity(points.map(_.serialize).mkString("\n")) override def fromT[T](meas: String, t: T)(implicit wr: InfluxWriter[T]): ErrorOr[RequestEntity] = wr.write(t).mapRight(append(meas, _)) override def fromSeqT[T]( meas: String, ts: Seq[T] )(implicit wr: InfluxWriter[T] ): ErrorOr[RequestEntity] = either.seq(ts.map(wr.write)).mapRight(append(meas, _)) }
Example 2
Source File: CreatePosts.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import java.nio.file.Paths import java.nio.file.StandardOpenOption._ import java.text.SimpleDateFormat import java.util.Date import akka.actor.{ ActorSystem, Terminated } import akka.stream.scaladsl.{ FileIO, Source } import akka.stream.{ ActorMaterializer, Materializer } import akka.util.ByteString import play.api.libs.json.Json import scala.concurrent.{ ExecutionContext, Future } import scala.util.Random object CreatePosts extends App { implicit val system: ActorSystem = ActorSystem() implicit val mat: Materializer = ActorMaterializer() implicit val ec: ExecutionContext = system.dispatcher def terminate: Future[Terminated] = system.terminate() sys.addShutdownHook { terminate } object Post { implicit val format = Json.format[Post] } final case class Post( commentCount: Int, lastActivityDate: String, ownerUserId: Long, body: String, score: Int, creationDate: String, viewCount: Int, title: String, tags: String, answerCount: Int, acceptedAnswerId: Long, postTypeId: Long, id: Long ) def rng = Random.nextInt(20000) def now: String = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(new Date()) val lorem = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nam fringilla magna et pharetra vestibulum." val title = " Ut id placerat sapien. Aliquam vel metus orci." Source.fromIterator(() => Iterator from 0).map { id => Post(rng, now, rng, List.fill(Random.nextInt(5))(lorem).mkString("\n"), rng, now, rng, s"$rng - $title", title, rng, rng, rng, id) }.map(Json.toJson(_).toString) .map(json => ByteString(json + "\n")) .take(1000000) .via(LogProgress.flow()) .runWith(FileIO.toPath(Paths.get("/tmp/posts.json"), Set(WRITE, TRUNCATE_EXISTING, CREATE))) .flatMap { done => println(done) terminate }.recoverWith { case cause: Throwable => cause.printStackTrace() terminate } }
Example 3
Source File: CreateZipcodes.scala From apache-spark-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import java.nio.file.Paths import java.nio.file.StandardOpenOption._ import akka.NotUsed import akka.actor.{ ActorSystem, Terminated } import akka.stream.scaladsl.{ FileIO, Source } import akka.stream.{ ActorMaterializer, Materializer } import akka.util.ByteString import play.api.libs.json.Json import scala.concurrent.{ ExecutionContext, Future } object CreateZipcodes extends App { implicit val system: ActorSystem = ActorSystem() implicit val mat: Materializer = ActorMaterializer() implicit val ec: ExecutionContext = system.dispatcher sys.addShutdownHook { terminate } object Zipcode { implicit val format = Json.format[Zipcode] } final case class Zipcode(value: String) val numZips = 50000000 def zips(range: Range): Source[ByteString, NotUsed] = Source(range).flatMapConcat { district => Source('A' to 'Z').flatMapConcat { l1 => Source('A' to 'Z').flatMapConcat { l2 => Source(1 to 399).map(num => f"$district$l1$l2-$num%03d") } } }.map(Zipcode.apply).map(Json.toJson(_).toString).map(json => ByteString(json + "\n")) zips(1000 until 2000) .merge(zips(2000 until 3000)) .merge(zips(3000 until 4000)) .merge(zips(4000 until 5000)) .merge(zips(5000 until 6000)) .merge(zips(6000 until 7000)) .merge(zips(7000 until 8000)) .merge(zips(8000 until 9000)) .take(numZips) .via(LogProgress.flow(each = 250000)) .runWith(FileIO.toPath(Paths.get("/tmp/zips.json"), Set(WRITE, TRUNCATE_EXISTING, CREATE))) .flatMap { done => println(done) terminate }.recoverWith { case cause: Throwable => cause.printStackTrace() terminate } def terminate: Future[Terminated] = system.terminate() }
Example 4
Source File: PackageObjectSpec.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.storage import java.nio.file.Paths import java.util.UUID import akka.actor.ActorSystem import akka.http.scaladsl.model.Uri import akka.stream.scaladsl.FileIO import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef import ch.epfl.bluebrain.nexus.kg.resources.file.File.Digest import org.scalatest.concurrent.ScalaFutures import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers class PackageObjectSpec extends AnyFlatSpecLike with Matchers with ScalaFutures { "uriToPath" should "convert an Akka Uri that represents a valid file path to a Java Path" in { uriToPath("file:///some/path/my%20file.txt") shouldEqual Some(Paths.get("/some/path/my file.txt")) uriToPath("s3://some/path") shouldEqual None uriToPath("foo") shouldEqual None } "pathToUri" should "convert a Java Path to an Akka Uri" in { pathToUri(Paths.get("/some/path/my file.txt")) shouldEqual Uri("file:///some/path/my%20file.txt") } "mangle" should "generate a properly mangled path given a file project and UUID" in { val projUuid = UUID.fromString("4947db1e-33d8-462b-9754-3e8ae74fcd4e") val fileUuid = UUID.fromString("b1d7cda2-1ec0-40d2-b12e-3baf4895f7d7") mangle(ProjectRef(projUuid), fileUuid, "my file.jpg") shouldEqual "4947db1e-33d8-462b-9754-3e8ae74fcd4e/b/1/d/7/c/d/a/2/my file.jpg" } "digest" should "properly compute the hash of a given input" in { implicit val as: ActorSystem = ActorSystem() val filePath = "/storage/s3.json" val path = Paths.get(getClass.getResource(filePath).toURI) val input = FileIO.fromPath(path) val algo = "SHA-256" input.runWith(digestSink(algo)(as.dispatcher)).futureValue shouldEqual Digest( algo, "5602c497e51680bef1f3120b1d6f65d480555002a3290029f8178932e8f4801a" ) } }
Example 5
Source File: TarFlowSpec.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.archives import java.nio.file.Files import java.time.{Clock, Instant, ZoneId} import akka.actor.ActorSystem import akka.stream.scaladsl.FileIO import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.kg.TestHelper import ch.epfl.bluebrain.nexus.kg.storage.digestSink import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ class TarFlowSpec extends TestKit(ActorSystem("TarFlowSpec")) with AnyWordSpecLike with Matchers with TestHelper with ScalaFutures { private implicit val ec = system.dispatcher private implicit val clock = Clock.fixed(Instant.EPOCH, ZoneId.systemDefault()) override implicit def patienceConfig: PatienceConfig = PatienceConfig(55.second, 150.milliseconds) "A TarFlow" should { "tar a bunch of sources" in { val digest = "3fef41c5afe7a7ee11ee9d556a564fb57784cc5247b24c6ca70783f396fa158a1c7952504d3e1aa441de20cf065d740eec454c6ffb7fbc4b6351b950ee51c886" val elems = 500 val contents = List.tabulate(2) { i => val content = (i until (i + elems)).toList.mkString(",") + "\n" ArchiveSource(content.length.toLong, s"some/path$i/$i.txt", produce(content)) } val path = Files.createTempFile("test", ".tar") TarFlow.write(contents).runWith(FileIO.toPath(path)).futureValue FileIO.fromPath(path).runWith(digestSink("SHA-512")).futureValue.value shouldEqual digest Files.delete(path) } } }
Example 6
Source File: BackupAdminController.scala From recogito2 with Apache License 2.0 | 5 votes |
package controllers.admin.backup import akka.actor.ActorSystem import akka.stream.scaladsl.FileIO import com.mohiva.play.silhouette.api.Silhouette import controllers.{BaseAuthController, Security} import controllers.document.BackupReader import javax.inject.{Inject, Singleton} import services.ContentType import services.annotation.AnnotationService import services.document.DocumentService import services.generated.tables.records.DocumentFilepartRecord import services.user.UserService import services.user.Roles._ import services.visit.VisitService import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import org.webjars.play.WebJarsUtil import play.api.Configuration import play.api.mvc.{ControllerComponents, ResponseHeader, Result} import play.api.libs.Files.TemporaryFileCreator import play.api.http.HttpEntity import scala.concurrent.{ExecutionContext, Future} import storage.db.DB import storage.es.migration.AnnotationMigrationUtil import transform.tiling.TilingService @Singleton class BackupAdminController @Inject() ( val components: ControllerComponents, val config: Configuration, val migrationUtil: AnnotationMigrationUtil, val users: UserService, val visits: VisitService, val silhouette: Silhouette[Security.Env], implicit val db: DB, implicit val tilingService: TilingService, implicit val annotations: AnnotationService, implicit val documents: DocumentService, implicit val ctx: ExecutionContext, implicit val system: ActorSystem, implicit val tmpFileCreator: TemporaryFileCreator, implicit val webJarsUtil: WebJarsUtil ) extends BaseAuthController(components, config, documents, users) with BackupReader { def restore = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => request.body.asMultipartFormData.flatMap(_.file("backup")) match { case Some(formData) => restoreBackup(formData.ref.path.toFile, runAsAdmin = true, forcedOwner = None).map { case (doc, fileparts) => Ok }.recover { case t: Throwable => t.printStackTrace() InternalServerError } case None => Future.successful(BadRequest) } } def exportVisits = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => visits.scrollExport().map { path => val fmt = DateTimeFormat.forPattern("yyyy-MM-dd") val source = FileIO.fromPath(path) val filename = s"visits-exported-${fmt.print(DateTime.now)}.csv" Result( header = ResponseHeader(200, Map("Content-Disposition" -> s"""attachment; filename="${filename}"""")), body = HttpEntity.Streamed(source, None, Some("text/csv")) ) } } def deleteVisitsOlderThan(date: Option[String]) = silhouette.SecuredAction(Security.WithRole(Admin)).async { implicit request => date match { case Some(_) => Future.successful(BadRequest("User-provided dates not supported yet.")) case _ => val cutoffDate = DateTime.now minusMonths 6 visits.deleteOlderThan(cutoffDate).map { success => if (success) Ok("Done.") else InternalServerError("Something went wrong.") } } } }
Example 7
Source File: StreamTestSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.unicomplex import akka.actor._ import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.pattern._ import akka.stream.ActorMaterializer import akka.stream.scaladsl.FileIO import akka.testkit.{ImplicitSender, TestKit} import com.typesafe.config.ConfigFactory import org.scalatest.concurrent.Waiters import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import org.squbs.lifecycle.GracefulStop import org.squbs.unicomplex.Timeouts._ import scala.concurrent.Await object StreamTestSpec { val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath val classPaths = Array( "StreamCube", "StreamSvc" ) map (dummyJarsDir + "/" + _) val config = ConfigFactory.parseString( s""" |squbs { | actorsystem-name = StreamTestSpec | ${JMX.prefixConfig} = true |} |default-listener.bind-port = 0 """.stripMargin ) val boot = UnicomplexBoot(config) .createUsing {(name, config) => ActorSystem(name, config)} .scanComponents(classPaths) .initExtensions .start() } class StreamTestSpec extends TestKit(StreamTestSpec.boot.actorSystem) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll with Waiters { implicit val am = ActorMaterializer() import system.dispatcher val portBindings = Await.result((Unicomplex(system).uniActor ? PortBindings).mapTo[Map[String, Int]], awaitMax) val port = portBindings("default-listener") override def afterAll(): Unit = { Unicomplex(system).uniActor ! GracefulStop } "UniComplex" must { "upload file with correct parts" in { val filePath = StreamTestSpec.getClass.getResource("/classpaths/StreamSvc/dummy.txt").getPath val file = new java.io.File(filePath) require(file.exists() && file.canRead) val chunkSize = 8192 val responseF = Http().singleRequest(HttpRequest(HttpMethods.POST, uri = s"http://127.0.0.1:$port/streamsvc/file-upload", entity = HttpEntity(MediaTypes.`application/octet-stream`, FileIO.fromPath(file.toPath, chunkSize)))) val actualResponseEntity = Await.result(responseF flatMap extractEntityAsString, awaitMax) val expectedNumberOfChunks = Math.ceil(file.length.toDouble / chunkSize).toInt val expectedResponseEntity = s"Chunk Count: $expectedNumberOfChunks ByteCount: ${file.length}" actualResponseEntity should be (expectedResponseEntity) } } }
Example 8
Source File: SecretReader.scala From reactive-lib with Apache License 2.0 | 5 votes |
package com.lightbend.rp.secrets.scaladsl import java.nio.charset.StandardCharsets import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.FileIO import akka.util.ByteString import java.nio.file.{ Path, Paths } import scala.concurrent._ object SecretReader { @deprecated("Read from file /rp/secrets/%name%/%key% where %name% is transformed to lowercase, and '-' for non-alphanum", "1.7.0") def get(name: String, key: String)(implicit as: ActorSystem, mat: ActorMaterializer): Future[Option[ByteString]] = { import as.dispatcher sys .env .get(envName(name, key)) .map(data => Future.successful(Some(ByteString(data)))) .getOrElse( FileIO.fromPath(filePath(name, key)) .runFold(ByteString.empty)(_ ++ _) .map(Some(_)) .recover { case _: Throwable => None }) } private[secrets] def envName(namespace: String, name: String): String = s"RP_SECRETS_${namespace}_$name" .toUpperCase .map(c => if (c.isLetterOrDigit) c else '_') private[scaladsl] def filePath(name: String, key: String): Path = Paths .get("/rp") .resolve("secrets") .resolve(name) .resolve(key) }
Example 9
Source File: TarFlowSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage import java.io.ByteArrayInputStream import java.nio.file.{Files, Path, Paths} import akka.actor.ActorSystem import akka.stream.alpakka.file.scaladsl.Directory import akka.stream.scaladsl.{FileIO, Source} import akka.testkit.TestKit import akka.util.ByteString import ch.epfl.bluebrain.nexus.storage.utils.{EitherValues, IOEitherValues, Randomness} import org.apache.commons.compress.archivers.tar.TarArchiveInputStream import org.apache.commons.io.FileUtils import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.{BeforeAndAfterAll, Inspectors, OptionValues} import scala.annotation.tailrec class TarFlowSpec extends TestKit(ActorSystem("TarFlowSpec")) with AnyWordSpecLike with Matchers with IOEitherValues with Randomness with EitherValues with OptionValues with Inspectors with BeforeAndAfterAll { val basePath = Files.createTempDirectory("tarflow") val dir1 = basePath.resolve("one") val dir2 = basePath.resolve("two") override def afterAll(): Unit = { super.afterAll() FileUtils.cleanDirectory(basePath.toFile) () } type PathAndContent = (Path, String) "A TarFlow" should { Files.createDirectories(dir1) Files.createDirectories(dir2) def relativize(path: Path): String = basePath.getParent().relativize(path).toString "generate the byteString for a tar file correctly" in { val file1 = dir1.resolve("file1.txt") val file1Content = genString() val file2 = dir1.resolve("file3.txt") val file2Content = genString() val file3 = dir2.resolve("file3.txt") val file3Content = genString() val files = List(file1 -> file1Content, file2 -> file2Content, file3 -> file3Content) forAll(files) { case (file, content) => Source.single(ByteString(content)).runWith(FileIO.toPath(file)).futureValue } val byteString = Directory.walk(basePath).via(TarFlow.writer(basePath)).runReduce(_ ++ _).futureValue val bytes = new ByteArrayInputStream(byteString.toArray) val tar = new TarArchiveInputStream(bytes) @tailrec def readEntries( tar: TarArchiveInputStream, entries: List[PathAndContent] = Nil ): List[PathAndContent] = { val entry = tar.getNextTarEntry if (entry == null) entries else { val data = Array.ofDim[Byte](entry.getSize.toInt) tar.read(data) readEntries(tar, (Paths.get(entry.getName) -> ByteString(data).utf8String) :: entries) } } val directories = List(relativize(basePath) -> "", relativize(dir1) -> "", relativize(dir2) -> "") val untarred = readEntries(tar).map { case (path, content) => path.toString -> content } val expected = files.map { case (path, content) => relativize(path) -> content } ++ directories untarred should contain theSameElementsAs expected } } }
Example 10
Source File: TarFlow.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.storage import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.{Files, Path} import akka.NotUsed import akka.stream.scaladsl.{FileIO, Flow, Source} import akka.util.ByteString import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarConstants} def writer(basePath: Path): Flow[Path, ByteString, NotUsed] = Flow[Path] .flatMapConcat { case path if Files.isRegularFile(path) => val headerSource = Source.single(headerBytes(basePath, path)) val paddingSource = Source.single(padToBoundary(path)) headerSource.concat(FileIO.fromPath(path)).concat(paddingSource) case path => Source.single(headerBytes(basePath, path)) } .concat(Source.single(terminalChunk)) }
Example 11
Source File: PackageObjectSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.storage import java.nio.file.Paths import java.util.UUID import akka.actor.ActorSystem import akka.http.scaladsl.model.Uri import akka.stream.scaladsl.FileIO import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef import ch.epfl.bluebrain.nexus.kg.resources.file.File.Digest import org.scalatest.concurrent.ScalaFutures import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers class PackageObjectSpec extends AnyFlatSpecLike with Matchers with ScalaFutures { "uriToPath" should "convert an Akka Uri that represents a valid file path to a Java Path" in { uriToPath("file:///some/path/my%20file.txt") shouldEqual Some(Paths.get("/some/path/my file.txt")) uriToPath("s3://some/path") shouldEqual None uriToPath("foo") shouldEqual None } "pathToUri" should "convert a Java Path to an Akka Uri" in { pathToUri(Paths.get("/some/path/my file.txt")) shouldEqual Uri("file:///some/path/my%20file.txt") } "mangle" should "generate a properly mangled path given a file project and UUID" in { val projUuid = UUID.fromString("4947db1e-33d8-462b-9754-3e8ae74fcd4e") val fileUuid = UUID.fromString("b1d7cda2-1ec0-40d2-b12e-3baf4895f7d7") mangle(ProjectRef(projUuid), fileUuid, "my file.jpg") shouldEqual "4947db1e-33d8-462b-9754-3e8ae74fcd4e/b/1/d/7/c/d/a/2/my file.jpg" } "digest" should "properly compute the hash of a given input" in { implicit val as: ActorSystem = ActorSystem() val filePath = "/storage/s3.json" val path = Paths.get(getClass.getResource(filePath).toURI) val input = FileIO.fromPath(path) val algo = "SHA-256" input.runWith(digestSink(algo)(as.dispatcher)).futureValue shouldEqual Digest( algo, "5602c497e51680bef1f3120b1d6f65d480555002a3290029f8178932e8f4801a" ) } }
Example 12
Source File: TarFlowSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.archives import java.nio.file.Files import java.time.{Clock, Instant, ZoneId} import akka.actor.ActorSystem import akka.stream.scaladsl.FileIO import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.kg.TestHelper import ch.epfl.bluebrain.nexus.kg.storage.digestSink import org.scalatest.concurrent.ScalaFutures import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ class TarFlowSpec extends TestKit(ActorSystem("TarFlowSpec")) with AnyWordSpecLike with Matchers with TestHelper with ScalaFutures { implicit private val ec = system.dispatcher implicit private val clock = Clock.fixed(Instant.EPOCH, ZoneId.systemDefault()) implicit override def patienceConfig: PatienceConfig = PatienceConfig(55.second, 150.milliseconds) "A TarFlow" should { "tar a bunch of sources" in { val digest = "3fef41c5afe7a7ee11ee9d556a564fb57784cc5247b24c6ca70783f396fa158a1c7952504d3e1aa441de20cf065d740eec454c6ffb7fbc4b6351b950ee51c886" val elems = 500 val contents = List.tabulate(2) { i => val content = (i until (i + elems)).toList.mkString(",") + "\n" ArchiveSource(content.length.toLong, s"some/path$i/$i.txt", produce(content)) } val path = Files.createTempFile("test", ".tar") TarFlow.write(contents).runWith(FileIO.toPath(path)).futureValue FileIO.fromPath(path).runWith(digestSink("SHA-512")).futureValue.value shouldEqual digest Files.delete(path) } } }
Example 13
Source File: TransferTransformFile.scala From streams-tests with Apache License 2.0 | 5 votes |
package com.softwaremill.streams import java.io.File import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.io.Framing import akka.stream.scaladsl.{FileIO, Keep} import akka.util.ByteString import com.softwaremill.streams.util.TestFiles import com.softwaremill.streams.util.Timed._ import scala.concurrent.{Await, Future} import scalaz.stream.{io, text} import scala.concurrent.duration._ trait TransferTransformFile { def run(from: File, to: File): Long } object AkkaStreamsTransferTransformFile extends TransferTransformFile { private lazy implicit val system = ActorSystem() override def run(from: File, to: File) = { implicit val mat = ActorMaterializer() val r: Future[Long] = FileIO.fromFile(from) .via(Framing.delimiter(ByteString("\n"), 1048576)) .map(_.utf8String) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\n") .map(ByteString(_)) .toMat(FileIO.toFile(to))(Keep.right) .run() Await.result(r, 1.hour) } def shutdown() = { system.terminate() } } object ScalazStreamsTransferTransformFile extends TransferTransformFile { override def run(from: File, to: File) = { io.linesR(from.getAbsolutePath) .filter(!_.contains("#!@")) .map(_.replace("*", "0")) .intersperse("\n") .pipe(text.utf8Encode) .to(io.fileChunkW(to.getAbsolutePath)) .run .run to.length() } } object TransferTransformFileRunner extends App { def runTransfer(ttf: TransferTransformFile, sizeMB: Int): String = { val output = File.createTempFile("fft", "txt") try { ttf.run(TestFiles.testFile(sizeMB), output).toString } finally output.delete() } val tests = List( (ScalazStreamsTransferTransformFile, 10), (ScalazStreamsTransferTransformFile, 100), (ScalazStreamsTransferTransformFile, 500), (AkkaStreamsTransferTransformFile, 10), (AkkaStreamsTransferTransformFile, 100), (AkkaStreamsTransferTransformFile, 500) ) runTests(tests.map { case (ttf, sizeMB) => (s"${if (ttf == ScalazStreamsTransferTransformFile) "scalaz" else "akka"}, $sizeMB MB", () => runTransfer(ttf, sizeMB)) }, 3) AkkaStreamsTransferTransformFile.shutdown() }
Example 14
Source File: LogJson.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.nio.file.{ Files, Path } import java.io.File import java.time.ZonedDateTime import scala.concurrent.duration._ import scala.concurrent.ExecutionContext import scala.concurrent.Future import scala.util.{ Success, Failure } import akka.Done import akka.actor._ import akka.util.ByteString import akka.stream.{ ActorAttributes, ActorMaterializer, IOResult } import akka.stream.scaladsl.JsonFraming import akka.stream.scaladsl.{ FileIO, BidiFlow, Flow, Framing, Keep, Sink, Source } import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import spray.json._ object LogJson extends EventMarshalling with NotificationMarshalling with MetricMarshalling { def textInFlow(maxLine: Int) = { Framing.delimiter(ByteString("\n"), maxLine) .map(_.decodeString("UTF8")) .map(LogStreamProcessor.parseLineEx) .collect { case Some(e) => e } } def jsonInFlow(maxJsonObject: Int) = { JsonFraming.objectScanner(maxJsonObject) .map(_.decodeString("UTF8").parseJson.convertTo[Event]) } def jsonFramed(maxJsonObject: Int) = JsonFraming.objectScanner(maxJsonObject) val jsonOutFlow = Flow[Event].map { event => ByteString(event.toJson.compactPrint) } val notifyOutFlow = Flow[Summary].map { ws => ByteString(ws.toJson.compactPrint) } val metricOutFlow = Flow[Metric].map { m => ByteString(m.toJson.compactPrint) } val textOutFlow = Flow[Event].map{ event => ByteString(LogStreamProcessor.logLine(event)) } def logToJson(maxLine: Int) = { BidiFlow.fromFlows(textInFlow(maxLine), jsonOutFlow) } def jsonToLog(maxJsonObject: Int) = { BidiFlow.fromFlows(jsonInFlow(maxJsonObject), textOutFlow) } def logToJsonFlow(maxLine: Int) = { logToJson(maxLine).join(Flow[Event]) } def jsonToLogFlow(maxJsonObject: Int) = { jsonToLog(maxJsonObject).join(Flow[Event]) } }
Example 15
Source File: Event.scala From 006877 with MIT License | 5 votes |
package aia.stream import java.io.File import java.time.ZonedDateTime import scala.concurrent.Future import akka.NotUsed import akka.util.ByteString import akka.stream.IOResult import akka.stream.scaladsl.{ Source, FileIO, Framing } import scala.concurrent.duration.FiniteDuration case class Event( host: String, service: String, state: State, time: ZonedDateTime, description: String, tag: Option[String] = None, metric: Option[Double] = None ) sealed trait State case object Critical extends State case object Error extends State case object Ok extends State case object Warning extends State object State { def norm(str: String): String = str.toLowerCase def norm(state: State): String = norm(state.toString) val ok = norm(Ok) val warning = norm(Warning) val error = norm(Error) val critical = norm(Critical) def unapply(str: String): Option[State] = { val normalized = norm(str) if(normalized == norm(Ok)) Some(Ok) else if(normalized == norm(Warning)) Some(Warning) else if(normalized == norm(Error)) Some(Error) else if(normalized == norm(Critical)) Some(Critical) else None } } case class LogReceipt(logId: String, written: Long) case class ParseError(logId: String, msg: String)
Example 16
Source File: ChunkedEntitiesDocs.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.akkahttp.server import akka.http.scaladsl.server.Route import endpoints4s.algebra import endpoints4s.algebra.JsonStreamingExample trait ChunkedEntitiesDocs extends algebra.ChunkedEntitiesDocs with ChunkedEntities { //#implementation import java.nio.file.Paths import akka.stream.scaladsl.FileIO val logoRoute: Route = logo.implementedBy { _ => FileIO.fromPath(Paths.get("/foo/bar/logo.png")).map(_.toArray) } //#implementation } import scala.concurrent.duration.DurationInt //#json-streaming import akka.stream.scaladsl.Source import endpoints4s.akkahttp.server object JsonStreamingExampleServer extends JsonStreamingExample with server.Endpoints with server.ChunkedJsonEntities with server.JsonEntitiesFromSchemas { val routes = ticks.implementedBy(_ => Source.tick(0.seconds, 1.second, ())) } //#json-streaming
Example 17
Source File: ChunkedEntitiesDocs.scala From endpoints4s with MIT License | 5 votes |
package endpoints4s.play.server import akka.stream.scaladsl.Source import endpoints4s.algebra import endpoints4s.algebra.JsonStreamingExample import scala.concurrent.duration.DurationInt trait ChunkedEntitiesDocs extends algebra.ChunkedEntitiesDocs with ChunkedEntities { //#implementation import akka.stream.scaladsl.FileIO import java.nio.file.Paths val logoHandler = logo.implementedBy { _ => FileIO.fromPath(Paths.get("/foo/bar/logo.png")).map(_.toArray) } //#implementation } //#json-streaming import endpoints4s.play.server class JsonStreamingExampleServer(val playComponents: server.PlayComponents) extends JsonStreamingExample with server.Endpoints with server.ChunkedJsonEntities with server.JsonEntitiesFromSchemas { val routes = routesFromEndpoints( ticks.implementedBy(_ => Source.tick(0.seconds, 1.second, ())) ) } //#json-streaming
Example 18
Source File: AttachmentCtrl.scala From Cortex with GNU Affero General Public License v3.0 | 5 votes |
package org.thp.cortex.controllers import java.net.URLEncoder import java.nio.file.Files import javax.inject.{Inject, Singleton} import play.api.http.HttpEntity import play.api.libs.Files.DefaultTemporaryFileCreator import play.api.mvc._ import play.api.{mvc, Configuration} import akka.stream.scaladsl.FileIO import net.lingala.zip4j.core.ZipFile import net.lingala.zip4j.model.ZipParameters import net.lingala.zip4j.util.Zip4jConstants import org.thp.cortex.models.Roles import org.elastic4play.Timed import org.elastic4play.controllers.{Authenticated, Renderer} import org.elastic4play.models.AttachmentAttributeFormat import org.elastic4play.services.AttachmentSrv @Timed("controllers.AttachmentCtrl.downloadZip") def downloadZip(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ if (!name.getOrElse("").intersect(AttachmentAttributeFormat.forbiddenChar).isEmpty) BadRequest("File name is invalid") else { val f = tempFileCreator.create("zip", hash).path Files.delete(f) val zipFile = new ZipFile(f.toFile) val zipParams = new ZipParameters zipParams.setCompressionLevel(Zip4jConstants.DEFLATE_LEVEL_FASTEST) zipParams.setEncryptFiles(true) zipParams.setEncryptionMethod(Zip4jConstants.ENC_METHOD_STANDARD) zipParams.setPassword(password) zipParams.setFileNameInZip(name.getOrElse(hash)) zipParams.setSourceExternalStream(true) zipFile.addStream(attachmentSrv.stream(hash), zipParams) Result( header = ResponseHeader( 200, Map( "Content-Disposition" → s"""attachment; filename="${URLEncoder.encode(name.getOrElse(hash), "utf-8")}.zip"""", "Content-Type" → "application/zip", "Content-Transfer-Encoding" → "binary", "Content-Length" → Files.size(f).toString ) ), body = HttpEntity.Streamed(FileIO.fromPath(f), Some(Files.size(f)), Some("application/zip")) ) } } }
Example 19
Source File: DefaultBodyWritables.scala From play-ws with Apache License 2.0 | 5 votes |
package play.api.libs.ws import java.io.File import java.nio.ByteBuffer import java.util.function.Supplier import akka.stream.scaladsl.StreamConverters.fromInputStream import akka.stream.scaladsl.FileIO import akka.stream.scaladsl.Source import akka.util.ByteString import scala.compat.java8.FunctionConverters.asScalaFromSupplier implicit val writeableOf_urlEncodedForm: BodyWritable[Map[String, Seq[String]]] = { import java.net.URLEncoder BodyWritable( formData => InMemoryBody( ByteString.fromString( formData.flatMap(item => item._2.map(c => s"${item._1}=${URLEncoder.encode(c, "UTF-8")}")).mkString("&") ) ), "application/x-www-form-urlencoded" ) } implicit val writeableOf_urlEncodedSimpleForm: BodyWritable[Map[String, String]] = { writeableOf_urlEncodedForm.map[Map[String, String]](_.map(kv => kv._1 -> Seq(kv._2))) } } object DefaultBodyWritables extends DefaultBodyWritables
Example 20
Source File: FileIOEcho.scala From akka_streams_tutorial with MIT License | 5 votes |
package alpakka.file import java.nio.file.Paths import akka.actor.ActorSystem import akka.stream.IOResult import akka.stream.scaladsl.FileIO import scala.concurrent.Future import scala.util.{Failure, Success} object FileIOEcho extends App { implicit val system = ActorSystem("FileIOEcho") implicit val executionContext = system.dispatcher val sourceFileName = "./src/main/resources/testfile.jpg" val encFileName = "testfile.enc" val resultFileName = "testfile_result.jpg" val sourceOrig = FileIO.fromPath(Paths.get(sourceFileName), 3000) val sinkEnc = FileIO.toPath(Paths.get(encFileName)) val doneEnc = sourceOrig //.wireTap(each => println(s"Chunk enc: $each")) .map(each => each.encodeBase64) .runWith(sinkEnc) doneEnc.onComplete { case Success(_) => val sourceEnc = FileIO.fromPath(Paths.get(encFileName)) val sinkDec = FileIO.toPath(Paths.get(resultFileName)) val doneDec = sourceEnc //.wireTap(each => println(s"Chunk dec: $each")) .map(each => each.decodeBase64) .runWith(sinkDec) terminateWhen(doneDec) case Failure(ex) => println(s"Exception: $ex") } def terminateWhen(done: Future[IOResult]) = { done.onComplete { case Success(_) => println(s"Flow Success. Written file: $resultFileName About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 21
Source File: XmlProcessing.scala From akka_streams_tutorial with MIT License | 5 votes |
package alpakka.xml import java.nio.file.Paths import java.util.Base64 import akka.actor.ActorSystem import akka.stream.alpakka.xml.scaladsl.XmlParsing import akka.stream.alpakka.xml.{EndElement, ParseEvent, StartElement, TextEvent} import akka.stream.scaladsl.{FileIO, Sink, Source} import akka.util.ByteString import scala.collection.immutable import scala.concurrent.Future import scala.util.{Failure, Success} object XmlProcessing extends App { implicit val system = ActorSystem("XmlProcessing") implicit val executionContext = system.dispatcher val resultFileName = "testfile_result.jpg" val done = FileIO.fromPath(Paths.get("./src/main/resources/xml_with_base64_embedded.xml")) .via(XmlParsing.parser) .statefulMapConcat(() => { // state val stringBuilder: StringBuilder = StringBuilder.newBuilder var counter: Int = 0 // aggregation function parseEvent: ParseEvent => parseEvent match { case s: StartElement if s.attributes.contains("mediaType") => stringBuilder.clear() val mediaType = s.attributes.head._2 println("mediaType: " + mediaType) immutable.Seq(mediaType) case s: EndElement if s.localName == "embeddedDoc" => val text = stringBuilder.toString println("File content: " + text) //large embedded files are read into memory Source.single(ByteString(text)) .map(each => ByteString(Base64.getMimeDecoder.decode(each.toByteBuffer))) .runWith(FileIO.toPath(Paths.get(s"$counter-$resultFileName"))) counter = counter + 1 immutable.Seq(text) case t: TextEvent => stringBuilder.append(t.text) immutable.Seq.empty case _ => immutable.Seq.empty } }) .runWith(Sink.ignore) terminateWhen(done) def terminateWhen(done: Future[_]) = { done.onComplete { case Success(_) => println("Flow Success. About to terminate...") system.terminate() case Failure(e) => println(s"Flow Failure: $e. About to terminate...") system.terminate() } } }
Example 22
Source File: SplitWhen.scala From akka_streams_tutorial with MIT License | 5 votes |
package sample.stream_shared_state import java.nio.file.Paths import akka.NotUsed import akka.actor.ActorSystem import akka.stream.IOResult import akka.stream.scaladsl.{FileIO, Flow, Framing, Keep, Sink, Source} import akka.util.ByteString import org.slf4j.{Logger, LoggerFactory} import scala.concurrent.Future import scala.util.{Failure, Success} object SplitWhen extends App { val logger: Logger = LoggerFactory.getLogger(this.getClass) implicit val system = ActorSystem("SplitWhen") implicit val executionContext = system.dispatcher val nonLinearCapacityFactor = 100 //raise to see how it scales val filename = "splitWhen.csv" def genResourceFile() = { logger.info(s"Writing resource file: $filename...") def fileSink(filename: String): Sink[String, Future[IOResult]] = Flow[String] .map(s => ByteString(s + "\n")) .toMat(FileIO.toPath(Paths.get(filename)))(Keep.right) Source.fromIterator(() => (1 to nonLinearCapacityFactor).toList.combinations(2)) .map(each => s"${each.head},${each.last}") .runWith(fileSink(filename)) } val sourceOfLines = FileIO.fromPath(Paths.get(filename)) .via(Framing.delimiter(ByteString("\n"), maximumFrameLength = 1024, allowTruncation = true) .map(_.utf8String)) val csvToRecord: Flow[String, Record, NotUsed] = Flow[String] .map(_.split(",").map(_.trim)) .map(stringArrayToRecord) val terminationHook: Flow[Record, Record, Unit] = Flow[Record] .watchTermination() { (_, done) => done.onComplete { case Failure(err) => logger.info(s"Flow failed: $err") case _ => system.terminate(); logger.info(s"Flow terminated") } } val printSink = Sink.foreach[Vector[Record]](each => println(s"Reached sink: $each")) private def stringArrayToRecord(cols: Array[String]) = Record(cols(0), cols(1)) private def hasKeyChanged = { () => { var lastRecordKey: Option[String] = None currentRecord: Record => lastRecordKey match { case Some(currentRecord.key) | None => lastRecordKey = Some(currentRecord.key) List((currentRecord, false)) case _ => lastRecordKey = Some(currentRecord.key) List((currentRecord, true)) } } } genResourceFile().onComplete { case Success(_) => logger.info(s"Start processing...") sourceOfLines .via(csvToRecord) .via(terminationHook) .statefulMapConcat(hasKeyChanged) // stateful decision .splitWhen(_._2) // split when key has changed .map(_._1) // proceed with payload .fold(Vector.empty[Record])(_ :+ _) // sum payload .mergeSubstreams // better performance, but why? .runWith(printSink) case Failure(exception) => logger.info(s"Exception: $exception") } case class Record(key: String, value: String) }
Example 23
Source File: DirManager.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package it.gov.daf.catalogmanager.listeners import java.net.URLEncoder import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ FileIO, Source } import net.caoticode.dirwatcher.FSListener import play.api.libs.ws.WSClient import play.api.libs.ws.ahc.AhcWSClient import play.api.mvc.MultipartFormData.FilePart import play.Logger import scala.concurrent.Future class DirManager() extends FSListener { import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global val logger = Logger.underlying() override def onCreate(ref: Path): Unit = { implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() val wsClient = AhcWSClient() val name = ref.getParent.getFileName.toString println(name) val uri: Option[String] = IngestionUtils.datasetsNameUri.get(name) val logicalUri = URLEncoder.encode(uri.get, "UTF-8") logger.debug("logicalUri: " + logicalUri) call(wsClient) .andThen { case _ => wsClient.close() } .andThen { case _ => system.terminate() } def call(wsClient: WSClient): Future[Unit] = { wsClient.url("http://localhost:9001/ingestion-manager/v1/add-datasets/" + logicalUri) //.withHeaders("content-type" -> "multipart/form-data") .post( Source(FilePart("upfile", name, None, FileIO.fromPath(ref)) :: List())).map { response => val statusText: String = response.statusText logger.debug(s"Got a response $statusText") } } logger.debug(s"created $ref") } override def onDelete(ref: Path): Unit = println(s"deleted $ref") override def onModify(ref: Path): Unit = println(s"modified $ref") }
Example 24
Source File: SemanticRepositorySpecs.scala From daf-semantics with Apache License 2.0 | 5 votes |
package specs import org.junit.runner.RunWith import scala.concurrent.{ Await, Future } import scala.concurrent.duration.Duration import play.api.test._ import play.api.http.Status import play.api.Application import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.ws.WSResponse import play.api.libs.ws.ahc.AhcWSClient import org.specs2.runner.JUnitRunner import org.specs2.mutable.Specification import play.api.libs.json.Json //import it.almawave.linkeddata.kb.utils.ConfigHelper import scala.collection.JavaConversions._ import scala.collection.JavaConverters._ import play.twirl.api.Content import play.api.test.Helpers._ import play.api.libs.json.JsObject import java.io.File import play.api.http.Writeable import akka.stream.scaladsl.Source import play.api.mvc.MultipartFormData import play.api.libs.Files.TemporaryFile import java.nio.file.Files import org.asynchttpclient.AsyncHttpClient import play.api.libs.ws.WS import akka.util.ByteString import play.api.mvc.MultipartFormData.DataPart import play.api.mvc.MultipartFormData.FilePart import akka.stream.scaladsl.FileIO import play.api.libs.ws.WSClient /* * TODO: REWRITE */ @RunWith(classOf[JUnitRunner]) class SemanticRepositorySpecs extends Specification { def application: Application = GuiceApplicationBuilder().build() "The semantic repository" should { "call kb/v1/contexts to obtain a list of contexts" in { new WithServer(app = application, port = 9999) { WsTestClient.withClient { implicit client => val response: WSResponse = Await.result[WSResponse]( client.url(s"http://localhost:${port}/kb/v1/contexts").execute, Duration.Inf) response.status must be equalTo Status.OK response.json.as[Seq[JsObject]].size must be equals 0 // response.json.as[Seq[JsObject]].size must be greaterThan 0 // if pre-loaded ontologies! } } } "call kb/v1/contexts ensuring all contexts have triples" in { new WithServer(app = application, port = 9999) { WsTestClient.withClient { implicit client => val response: WSResponse = Await.result[WSResponse]( client.url(s"http://localhost:${port}/kb/v1/contexts").execute, Duration.Inf) val json_list = response.json.as[Seq[JsObject]] forall(json_list)((_) must not beNull) forall(json_list)(_.keys must contain("context", "triples")) forall(json_list)(item => (item \ "triples").get.as[Int] > 0) } } } } }