scala.reflect.io.Directory Scala Examples
The following examples show how to use scala.reflect.io.Directory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: servers.scala From scalatest-embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import io.confluent.kafka.schemaregistry.RestApp import kafka.server.KafkaServer import net.manub.embeddedkafka.{ EmbeddedServer, EmbeddedServerWithKafka, EmbeddedZ } import scala.reflect.io.Directory override def stop(clearLogs: Boolean = false): Unit = app.stop() } case class EmbeddedKWithSR( factory: Option[EmbeddedZ], broker: KafkaServer, app: EmbeddedSR, logsDirs: Directory)(implicit config: EmbeddedKafkaConfigWithSchemaRegistry) extends EmbeddedServerWithKafka { override def stop(clearLogs: Boolean): Unit = { app.stop() broker.shutdown() broker.awaitShutdown() factory.foreach(_.stop(clearLogs)) if (clearLogs) logsDirs.deleteRecursively() } }
Example 2
Source File: ConfigSpec.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness import java.io.{BufferedWriter, File, FileWriter} import java.util.concurrent.TimeUnit import akka.actor.{Actor, ActorSystem, Props} import akka.testkit.TestProbe import com.typesafe.config.ConfigFactory import com.webtrends.harness.app.HarnessActor.ConfigChange import com.webtrends.harness.config.ConfigWatcherActor import com.webtrends.harness.health.{ComponentState, HealthComponent} import com.webtrends.harness.service.messages.CheckHealth import org.specs2.mutable.SpecificationWithJUnit import scala.concurrent.ExecutionContextExecutor import scala.concurrent.duration.FiniteDuration import scala.reflect.io.{Directory, Path} class ConfigSpec extends SpecificationWithJUnit { implicit val dur = FiniteDuration(2, TimeUnit.SECONDS) new File("services/test/conf").mkdirs() implicit val sys = ActorSystem("system", ConfigFactory.parseString( """ akka.actor.provider = "akka.actor.LocalActorRefProvider" services { path = "services" } """).withFallback(ConfigFactory.load)) implicit val ec: ExecutionContextExecutor = sys.dispatcher val probe = TestProbe() val parent = sys.actorOf(Props(new Actor { val child = context.actorOf(ConfigWatcherActor.props, "child") def receive = { case x if sender == child => probe.ref forward x case x => child forward x } })) sequential "config " should { "be in good health" in { probe.send(parent, CheckHealth) val msg = probe.expectMsgClass(classOf[HealthComponent]) msg.state equals ComponentState.NORMAL } "detect changes in config" in { val file = new File("services/test/conf/test.conf") val bw = new BufferedWriter(new FileWriter(file)) bw.write("test = \"value\"") bw.close() val msg = probe.expectMsgClass(classOf[ConfigChange]) msg.isInstanceOf[ConfigChange] } } step { sys.terminate().onComplete { _ => Directory(Path(new File("services"))).deleteRecursively() } } }
Example 3
Source File: ManagedPath.scala From zio-rocksdb with Apache License 2.0 | 5 votes |
package zio.rocksdb.internal package internal import java.io.IOException import java.nio.file.{ Files, Path } import zio.{ Task, UIO, ZIO, ZManaged } import scala.reflect.io.Directory object ManagedPath { private def createTempDirectory: Task[Path] = Task { Files.createTempDirectory("zio-rocksdb") } private def deleteDirectory(path: Path): UIO[Boolean] = UIO { new Directory(path.toFile).deleteRecursively() } private def deleteDirectoryE(path: Path): UIO[Unit] = deleteDirectory(path) >>= { case true => ZIO.unit case false => ZIO.die(new IOException("Could not delete path recursively")) } def apply(): ZManaged[Any, Throwable, Path] = createTempDirectory.toManaged(deleteDirectoryE) }
Example 4
Source File: EmbeddedServer.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka import java.nio.file.Path import kafka.server.KafkaServer import org.apache.zookeeper.server.ServerCnxnFactory import scala.reflect.io.Directory override def stop(clearLogs: Boolean): Unit = { broker.shutdown() broker.awaitShutdown() factory.foreach(_.stop(clearLogs)) if (clearLogs) { val _ = Directory(logsDirs.toFile).deleteRecursively } } } object EmbeddedK { def apply( broker: KafkaServer, logsDirs: Path, config: EmbeddedKafkaConfig ): EmbeddedK = EmbeddedK(factory = None, broker, logsDirs, config) }
Example 5
Source File: WorkspaceLoaderTests.scala From codepropertygraph with Apache License 2.0 | 5 votes |
package io.shiftleft.console.workspacehandling import better.files.Dsl.mkdir import better.files.File import org.scalatest.{Matchers, WordSpec} import scala.reflect.io.Directory class WorkspaceLoaderTests extends WordSpec with Matchers { private val tmpDirPrefix = "workspace-tests" "WorkspaceLoader" should { "create workspace and workspace directory if nonexistent" in { val dir = File.newTemporaryDirectory(tmpDirPrefix) new Directory(dir.toJava).deleteRecursively() TestLoader().load(dir.path.toString) try { dir.exists shouldBe true } finally { new Directory(dir.toJava).deleteRecursively() } } "handle broken project.json gracefully by skipping project" in { File.usingTemporaryDirectory(tmpDirPrefix) { tmpDir => mkdir(tmpDir / "1") (tmpDir / "1" / "project.json").write("{foo") TestLoader().load(tmpDir.path.toString).numberOfProjects shouldBe 0 } } "load project correctly" in { File.usingTemporaryDirectory(tmpDirPrefix) { tmpDir => val projectName = "foo" WorkspaceTests.createFakeProject(tmpDir, projectName) val project = TestLoader().loadProject((tmpDir / projectName).path) project match { case Some(p) => p.name shouldBe "foo" p.inputPath shouldBe "foo" p.cpg shouldBe None case None => fail } } } "initialize workspace's project list correctly" in { File.usingTemporaryDirectory(tmpDirPrefix) { tmpDir => val projectName = "foo" WorkspaceTests.createFakeProject(tmpDir, projectName) val workspace = TestLoader().load(tmpDir.toString) workspace.numberOfProjects shouldBe 1 } } } "ProjectFile" should { import org.json4s.DefaultFormats import org.json4s.native.Serialization.{read => jsonRead, write => jsonWrite} implicit val formats: DefaultFormats.type = DefaultFormats "be serializable to json" in { jsonWrite(ProjectFile("foo", "aname")) shouldBe """{"inputPath":"foo","name":"aname"}""" } "be deserializable from json" in { val projectFile = jsonRead[ProjectFile]("""{"inputPath":"foo","name":"aname"}""") projectFile.inputPath shouldBe "foo" projectFile.name shouldBe "aname" } } }
Example 6
Source File: servers.scala From embedded-kafka-schema-registry with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import java.nio.file.Path import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication import kafka.server.KafkaServer import net.manub.embeddedkafka.{ EmbeddedServer, EmbeddedServerWithKafka, EmbeddedZ } import scala.reflect.io.Directory override def stop(clearLogs: Boolean = false): Unit = app.stop() } case class EmbeddedKWithSR( factory: Option[EmbeddedZ], broker: KafkaServer, app: EmbeddedSR, logsDirs: Path, config: EmbeddedKafkaConfig ) extends EmbeddedServerWithKafka { override def stop(clearLogs: Boolean): Unit = { app.stop() broker.shutdown() broker.awaitShutdown() factory.foreach(_.stop(clearLogs)) if (clearLogs) { val _ = Directory(logsDirs.toFile).deleteRecursively } } }
Example 7
Source File: ExperimentVariantEventLevelDBServiceTest.scala From izanami with Apache License 2.0 | 5 votes |
package specs.leveldb.abtesting import java.io.File import domains.abtesting.events.impl.ExperimentVariantEventLevelDBService import domains.abtesting.AbstractExperimentServiceTest import domains.abtesting.events.ExperimentVariantEventService import env.{DbDomainConfig, DbDomainConfigDetails, LevelDbConfig} import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll} import test.FakeApplicationLifecycle import scala.concurrent.duration.DurationInt import scala.concurrent.{Await, Future} import scala.util.Random class ExperimentVariantEventLevelDBServiceTest extends AbstractExperimentServiceTest("LevelDb") with BeforeAndAfter with BeforeAndAfterAll { private val lifecycle: FakeApplicationLifecycle = new FakeApplicationLifecycle() override def dataStore(name: String): ExperimentVariantEventService.Service = ExperimentVariantEventLevelDBService(s"./target/leveldb-test/data-${Random.nextInt(1000)}") override protected def afterAll(): Unit = { super.afterAll() Await.result(Future.traverse(lifecycle.hooks) { _.apply() }, 5.seconds) import scala.reflect.io.Directory val directory = new Directory(new File("./target/leveldb-test/")) directory.deleteRecursively() } }
Example 8
Source File: LevelDBJsonDataStoreTest.scala From izanami with Apache License 2.0 | 5 votes |
package specs.leveldb.store import java.io.File import env.{DbDomainConfig, DbDomainConfigDetails, InMemory, LevelDbConfig} import org.scalatest.BeforeAndAfterAll import store.AbstractJsonDataStoreTest import test.FakeApplicationLifecycle import scala.concurrent.{Await, Future} import scala.concurrent.duration.DurationInt import scala.util.Random import store.leveldb._ import store.datastore.JsonDataStore class LevelDBJsonDataStoreTest extends AbstractJsonDataStoreTest("LevelDb") with BeforeAndAfterAll { private val lifecycle: FakeApplicationLifecycle = new FakeApplicationLifecycle() override def dataStore(name: String): JsonDataStore.Service = LevelDBJsonDataStore(s"./target/leveldb-storetest/data-${Random.nextInt(1000)}") override protected def afterAll(): Unit = { super.afterAll() Await.result(Future.traverse(lifecycle.hooks) { _.apply() }, 5.seconds) import scala.reflect.io.Directory val directory = new Directory(new File("./target/leveldb-test/")) directory.deleteRecursively() } }