java.io.FileNotFoundException Scala Examples
The following examples show how to use java.io.FileNotFoundException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DirectoryListFileFinder.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.commons.utils import java.io.{File, FileNotFoundException, IOException} import scala.util.{Failure, Success, Try} def filePredicate(f: File, desc: Option[String]): Boolean def findFile(): Try[File] = { findFile(None) } def findFile(desc: String): Try[File] = { findFile(Some(desc)) } def findFile(desc: Option[String]): Try[File] = { findPotentialFiles( dirsToSearch, listFilesInDirectory // convert to Try - give a nice message in the exception concerning the dirs, otherwise just Success it ).fold(dirs => Failure( new IOException(s"Unable to list files in dirs: ${dirs.mkString(", ")}") ), Success[Seq[File]] ).flatMap(_.find(filePredicate(_, desc)) .map(Success[File]) .getOrElse(Failure( new FileNotFoundException( s"Unable to find file ${desc.map(_ + " ").getOrElse("")}" + s"in dirs: ${dirsToSearch.mkString(", ")}") )) ) } } object DirectoryListFileFinder { type EitherBadDirsOrFiles = Either[Seq[File], Seq[File]] def findPotentialFiles( dirs: Traversable[File], listFilesInDirectory: File => Option[Seq[File]]): EitherBadDirsOrFiles = { dirs.map { dir => val files = listFilesInDirectory(dir) // if we're unable to list files inside the dir then // let's not lose this information by keeping the dir in Left files.toRight(dir) }.foldLeft(Right(Seq[File]()): EitherBadDirsOrFiles) { case (Left(badDirs), Left(badDir)) => Left(badDir +: badDirs) case (Left(badDirs), Right(_)) => Left(badDirs) case (Right(_), Left(badDir)) => Left(Seq(badDir)) case (Right(files), Right(files2)) => Right(files ++ files2) case _ => ??? // to silence buggy 2.10 non-exhaustive match warning } } }
Example 2
Source File: CloudSuite.scala From cloud-integration with Apache License 2.0 | 5 votes |
package com.cloudera.spark.cloud.common import java.io.{File, FileNotFoundException} import com.cloudera.spark.cloud.s3.{S3ACommitterConstants, S3AConstants} import org.apache.hadoop.conf.Configuration import org.scalatest.concurrent.Eventually import org.scalatest.{BeforeAndAfter, FunSuite} import org.apache.spark.LocalSparkContext import org.apache.spark.internal.Logging def loadConfiguration(): Configuration = { val config = new Configuration(true) getKnownSysprop(SYSPROP_CLOUD_TEST_CONFIGURATION_FILE).foreach { filename => logDebug(s"Configuration property = `$filename`") val f = new File(filename) if (f.exists()) { // unsynced but its only a log statement if (configLogged) { configLogged = true logInfo(s"Loading configuration from $f") } config.addResource(f.toURI.toURL) } else { throw new FileNotFoundException(s"No file '$filename'" + s" declared in property $SYSPROP_CLOUD_TEST_CONFIGURATION_FILE") } } overlayConfiguration( config, Seq( HIVE_TESTS_DISABLED, METADATASTORE_AUTHORITATIVE, REQUIRED_HADOOP_VERSION, SCALE_TEST_ENABLED, SCALE_TEST_SIZE_FACTOR, S3A_CLIENT_FACTORY_IMPL, S3A_COMMITTER_TEST_ENABLED, S3A_ENCRYPTION_KEY_1, S3A_ENCRYPTION_KEY_2, S3A_METADATA_STORE_IMPL, S3GUARD_IMPLEMENTATION, S3GUARD_TEST_ENABLED ) ) // setup the committer from any property passed in getKnownSysprop(S3A_COMMITTER_NAME).foreach(committer => { val binding = S3ACommitterConstants.COMMITTERS_BY_NAME(committer.toLowerCase()) binding.bind(config) logInfo(s"Using committer binding $binding") }) config } }
Example 3
Source File: OpenApiUtils.scala From gospeak with Apache License 2.0 | 5 votes |
package gospeak.web.utils import java.io.{File, FileNotFoundException} import com.typesafe.config.{ConfigFactory, ConfigRenderOptions} import play.api.libs.json.{JsValue, Json} import gospeak.libs.scala.Extensions._ import scala.util.Try object OpenApiUtils { val specPath = "app/gospeak/web/api/swagger/gospeak.openapi.conf" def loadSpec(): Try[JsValue] = { Try(new File(s"web/$specPath")).filterWith(_.exists(), f => new FileNotFoundException(f.getAbsolutePath)) .orElse(Try(new File(specPath)).filterWith(_.exists(), f => new FileNotFoundException(f.getAbsolutePath))) .flatMap(loadSpec) } private def loadSpec(file: File): Try[JsValue] = { val spec = ConfigFactory.parseFile(file).resolve() val json = spec.root().render(ConfigRenderOptions.concise()) Try(Json.parse(json)) } }
Example 4
Source File: SnapshotRemoteImpl.scala From c4proto with Apache License 2.0 | 5 votes |
package ee.cone.c4gate import java.io.FileNotFoundException import java.util.UUID import com.typesafe.scalalogging.LazyLogging import ee.cone.c4actor.{Config, NanoTimer, RawSnapshot, RawSnapshotLoader, RawSnapshotLoaderFactory, RemoteSnapshotUtil, SnapshotMaker, SnapshotTask, SnapshotTaskSigner} import ee.cone.c4di.{c4, c4multi, provide} import okio.ByteString import scala.annotation.tailrec @c4multi("RemoteRawSnapshotLoaderImplApp") final class RemoteRawSnapshotLoaderImpl(baseURL: String)(util: HttpUtil) extends RawSnapshotLoader with LazyLogging { def load(snapshot: RawSnapshot): ByteString = { val tm = NanoTimer() val resp = util.get(s"$baseURL/${snapshot.relativePath}", Nil) assert(resp.status == 200) logger.debug(s"downloaded ${resp.body.size} in ${tm.ms} ms") resp.body } } @c4("MergingSnapshotApp") final class RemoteRawSnapshotLoaderFactory(inner: RemoteRawSnapshotLoaderImplFactory) extends RawSnapshotLoaderFactory { def create(baseURL: String): RawSnapshotLoader = inner.create(baseURL) } @c4("RemoteRawSnapshotApp") final class RemoteSnapshotUtilImpl(util: HttpUtil) extends RemoteSnapshotUtil with LazyLogging { def authHeaders(signed: String): List[(String, String)] = List(("x-r-signed", signed)) def measure[R](f: =>R): R = { val startTime = System.currentTimeMillis val res = f logger.debug(s"Snapshot request time: ${System.currentTimeMillis - startTime}") res } def request(appURL: String, signed: String): ()=>List[RawSnapshot] = measure{ val url: String = "/need-snapshot" val uuid = UUID.randomUUID().toString util.post(s"$appURL$url", ("x-r-response-key",uuid) :: authHeaders(signed)) () => @tailrec def retry(): HttpResponse = try { val res = util.get(s"$appURL/response/$uuid",Nil) if(res.status!=200) throw new FileNotFoundException res } catch { case e: FileNotFoundException => Thread.sleep(1000) retry() } val headers = retry().headers headers.getOrElse("x-r-snapshot-keys",Nil) match { case Seq(res) => res.split(",").map(RawSnapshot).toList case _ => throw new Exception(headers.getOrElse("x-r-error-message",Nil).mkString(";")) } } } class RemoteSnapshotAppURL(val value: String) @c4("RemoteRawSnapshotApp") final class DefRemoteSnapshotAppURL(config: Config) extends RemoteSnapshotAppURL(config.get("C4HTTP_SERVER")) @c4("RemoteRawSnapshotApp") final class EnvRemoteRawSnapshotLoader(url: RemoteSnapshotAppURL, factory: RemoteRawSnapshotLoaderImplFactory) { @provide def get: Seq[RawSnapshotLoader] = List(factory.create(url.value)) } @c4("RemoteRawSnapshotApp") final class RemoteSnapshotMaker( appURL: RemoteSnapshotAppURL, util: RemoteSnapshotUtil, signer: SnapshotTaskSigner ) extends SnapshotMaker { def make(task: SnapshotTask): List[RawSnapshot] = util.request(appURL.value, signer.sign(task, System.currentTimeMillis() + 3600*1000))() }
Example 5
Source File: HdfsUtils.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import java.io.{FileNotFoundException, IOException} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs._ private[streaming] object HdfsUtils { def getOutputStream(path: String, conf: Configuration): FSDataOutputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) // If the file exists and we have append support, append instead of creating a new file val stream: FSDataOutputStream = { if (dfs.isFile(dfsPath)) { if (conf.getBoolean("dfs.support.append", true) || conf.getBoolean("hdfs.append.support", false) || dfs.isInstanceOf[RawLocalFileSystem]) { dfs.append(dfsPath) } else { throw new IllegalStateException("File exists and there is no append support!") } } else { dfs.create(dfsPath) } } stream } def getInputStream(path: String, conf: Configuration): FSDataInputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) try { dfs.open(dfsPath) } catch { case _: FileNotFoundException => null case e: IOException => // If we are really unlucky, the file may be deleted as we're opening the stream. // This can happen as clean up is performed by daemon threads that may be left over from // previous runs. if (!dfs.isFile(dfsPath)) null else throw e } } def checkState(state: Boolean, errorMsg: => String) { if (!state) { throw new IllegalStateException(errorMsg) } } def checkFileExists(path: String, conf: Configuration): Boolean = { val hdpPath = new Path(path) val fs = getFileSystemForPath(hdpPath, conf) fs.isFile(hdpPath) } }
Example 6
Source File: ParseCSVwithHTMLSpec.scala From CSYE7200 with MIT License | 5 votes |
package edu.neu.coe.csye7200.parse import java.io.FileNotFoundException import edu.neu.coe.csye7200.Tag import org.scalatest.{FlatSpec, Matchers} import scala.io.Source import scala.util._ class ParseCSVwithHTMLSpec extends FlatSpec with Matchers { behavior of "it" it should "work" in { val parser = ParseCSVwithHTML(CsvParser()) val resource = "report.csv" val title = "Report" val wy: Try[Tag] = parseResource(parser, resource, title) wy should matchPattern { case Success(_) => } wy.get.toString shouldBe s""" |<html> |<head> |<title>Report</title></head> |<body> |<table border="1"> |<tr> |<th>Name</th> |<th>Notes</th></tr> |<tr> |<td>Robin</td> |<td><p>This is Robin</p></td></tr> |<tr> |<td>Nik</td> |<td><p><dir>This is Nik</dir></p></td></tr> |<tr> |<td>Dino</td> |<td><table><tr><th>day</th><th>food</th><tr><td>Sunday</td><td>Mousakka</td></table></td></tr> |<tr> |<td>Kal</td> |<td><ol><li>INFO</li><li>CSYE</li></ol></td></tr> |<tr></tr></table></body></html>""".stripMargin } private def parseResource(parser: ParseCSVwithHTML, resource: String, title: String) = Option(getClass.getResource(resource)) match { case Some(u) => val source = Source.fromFile(u.toURI) val result = parser.parseStreamIntoHTMLTable(source.getLines.toStream, title) source.close() result case None => Failure(new FileNotFoundException(s"cannot get resource $resource")) } }
Example 7
Source File: FileStorage.scala From ratatool with Apache License 2.0 | 5 votes |
package com.spotify.ratatool.io import java.io.FileNotFoundException import java.net.URI import java.nio.ByteBuffer import java.nio.channels.SeekableByteChannel import org.apache.avro.file.SeekableInput import org.apache.beam.sdk.io.FileSystems import org.apache.beam.sdk.io.fs.MatchResult.Metadata import scala.jdk.CollectionConverters._ private[ratatool] object FileStorage { def apply(path: String): FileStorage = new FileStorage(path) def isLocalUri(uri: URI): Boolean = uri.getScheme == null || uri.getScheme == "file" def isGcsUri(uri: URI): Boolean = uri.getScheme == "gs" def isHdfsUri(uri: URI): Boolean = uri.getScheme == "hdfs" } private[ratatool] class FileStorage(protected[io] val path: String) { def exists: Boolean = ! FileSystems.`match`(path).metadata.isEmpty def listFiles: Seq[Metadata] = FileSystems.`match`(path).metadata().asScala.toList def isDone: Boolean = { val partPattern = "([0-9]{5})-of-([0-9]{5})".r val metadata = try { listFiles } catch { case e: FileNotFoundException => Seq.empty } val nums = metadata.flatMap { meta => val m = partPattern.findAllIn(meta.resourceId().toString) if (m.hasNext) { Some(m.group(1).toInt, m.group(2).toInt) } else { None } } if (metadata.isEmpty) { // empty list false } else if (nums.nonEmpty) { // found xxxxx-of-yyyyy pattern val parts = nums.map(_._1).sorted val total = nums.map(_._2).toSet metadata.size == nums.size && // all paths matched total.size == 1 && total.head == parts.size && // yyyyy part parts.head == 0 && parts.last + 1 == parts.size // xxxxx part } else { true } } }
Example 8
Source File: LogFile.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.operation import java.io.{BufferedReader, File, FileInputStream, FileNotFoundException, FileOutputStream, InputStreamReader, IOException, PrintStream} import java.util.ArrayList import scala.collection.JavaConverters._ import org.apache.commons.io.FileUtils import org.apache.hadoop.io.IOUtils import org.apache.kyuubi.Logging import org.apache.spark.sql.Row import yaooqinn.kyuubi.KyuubiSQLException class LogFile private ( file: File, private var reader: Option[BufferedReader], writer: PrintStream, @volatile private var isRemoved: Boolean = false) extends Logging { def this(file: File) = { this(file, LogFile.createReader(file, isRemoved = false), new PrintStream(new FileOutputStream(file))) } private def resetReader(): Unit = { reader.foreach(IOUtils.closeStream) reader = None } private def readResults(nLines: Long): Seq[Row] = { reader = reader.orElse(LogFile.createReader(file, isRemoved)) val logs = new ArrayList[Row]() reader.foreach { r => var i = 1 try { var line: String = r.readLine() while ((i < nLines || nLines <= 0) && line != null) { logs.add(Row(line)) line = r.readLine() i += 1 } } catch { case e: FileNotFoundException => val operationHandle = file.getName val path = file.getAbsolutePath val msg = if (isRemoved) { s"Operation[$operationHandle] has been closed and the log file $path has been removed" } else { s"Operation[$operationHandle] Log file $path is not found" } throw new KyuubiSQLException(msg, e) } } logs.asScala } def write(msg: String): Unit = { writer.print(msg) } def close(): Unit = synchronized { try { reader.foreach(_.close()) writer.close() if (!isRemoved) { FileUtils.forceDelete(file) isRemoved = true } } catch { case e: IOException => error(s"Failed to remove corresponding log file of operation: ${file.getName}", e) } } } object LogFile { def createReader(file: File, isRemoved: Boolean): Option[BufferedReader] = try { Option(new BufferedReader(new InputStreamReader(new FileInputStream(file)))) } catch { case e: FileNotFoundException => val operationHandle = file.getName val path = file.getAbsolutePath val msg = if (isRemoved) { s"Operation[$operationHandle] has been closed and the log file $path has been removed" } else { s"Operation[$operationHandle] Log file $path is not found" } throw new KyuubiSQLException(msg, e) } }
Example 9
Source File: AvroInOutTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.utils.io.avro import java.io.{File, FileNotFoundException, FileWriter} import java.nio.file.Paths import com.salesforce.op.test.TestSparkContext import com.salesforce.op.utils.io.avro.AvroInOut._ import org.apache.avro.generic.GenericRecord import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.spark.rdd.RDD import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class AvroInOutTest extends FlatSpec with TestSparkContext { val avroSchemaPath = s"$testDataDir/PassengerDataAll.avsc" val avroFilePath = s"$testDataDir/PassengerDataAll.avro" val avroFileRecordCount = 891 val hdfs: FileSystem = FileSystem.get(sc.hadoopConfiguration) lazy val avroTemp: String = tempDir + "/avro-inout-test" Spec(AvroInOut.getClass) should "creates RDD from an avro file" in { val res = readPathSeq(avroFilePath, withCount = true, deepCopy = true, persist = false) res shouldBe a[RDD[_]] res.count shouldBe avroFileRecordCount } it should "creates RDD from a sequence of avro files" in { val res = readPathSeq(s"$avroFilePath,$avroFilePath") res.count shouldBe avroFileRecordCount*2 } it should "create RDD from a mixed sequence of valid and invalid avro files" in { val res = readPathSeq(s"badfile/path1,$avroFilePath,badfile/path2,$avroFilePath,badfile/path3") res.count shouldBe avroFileRecordCount*2 } it should "throw an error if passed in avro files are invalid" in { val error = intercept[IllegalArgumentException](readPathSeq("badfile/path1,badfile/path2")) error.getMessage shouldBe "No valid directory found in path 'badfile/path1,badfile/path2'" } it should "creates Some(RDD) from an avro file" in { val res = read(avroFilePath) res.size shouldBe 1 res.get shouldBe an[RDD[_]] res.get.count shouldBe avroFileRecordCount } it should "create None from an invalid avro file" in { val res = read("badfile/path") res shouldBe None } Spec[AvroWriter[_]] should "writeAvro to filesystem" in { val avroData = readPathSeq(avroFilePath).asInstanceOf[RDD[GenericRecord]] val avroSchema = loadFile(avroSchemaPath) val error = intercept[FileNotFoundException](hdfs.listStatus(new Path(avroTemp))) error.getMessage shouldBe s"File $avroTemp does not exist" AvroWriter(avroData).writeAvro(avroTemp, avroSchema) val hdfsFiles = hdfs.listStatus(new Path(avroTemp)) filter (x => x.getPath.getName.contains("part")) val res = readPathSeq((for { x <- hdfsFiles } yield avroTemp + "/" + x.getPath.getName).mkString(",")) res.count shouldBe avroFileRecordCount } it should "checkPathsExist" in { val tmpDir = Paths.get(File.separator, "tmp").toFile val f1 = new File(tmpDir, "avroinouttest") f1.delete() val w = new FileWriter(f1) w.write("just checking") w.close() val f2 = new File(tmpDir, "thisfilecannotexist") f2.delete() val f3 = new File(tmpDir, "this file cannot exist") f3.delete() assume(f1.exists && !f2.exists && !f3.exists) // check for one dir being invalid in the path amongst two selectExistingPaths(s"$f1,$f2") shouldBe f1.toString // check if all dirs in the path are invalid then we get an exception intercept[IllegalArgumentException] { selectExistingPaths(f2.toString) } // also, check if all dirs in the path are invalid ( in a different way ) then we get an exception intercept[IllegalArgumentException] { selectExistingPaths(f3.toString) } // check for one dir being invalid ( in a different way ) in the path amongst the two dirs in it selectExistingPaths(s"$f1,$f3") shouldBe f1.toString // check for paths order insensitivity selectExistingPaths(s"$f3,$f1") shouldBe f1.toString // check for an exception if the path is an empty string intercept[IllegalArgumentException] { selectExistingPaths("") } } }
Example 10
Source File: CopyCsvFileTrait.scala From cloud-integration with Apache License 2.0 | 5 votes |
package com.cloudera.spark.cloud.common import java.io.{EOFException, FileNotFoundException} import org.apache.hadoop.fs.Path override def prepareTestCSVFile(): Unit = { require(hasCSVTestFile(), "No CSV file") require(isFilesystemDefined, "Test FS is not defined; call initFS() first") // here the CSV file is copied over val source = sourceCSVFilePath.get if (source.toUri.getScheme == "wasb") { // source is already in Azure testCSVFile = sourceCSVFilePath deleteTestCSVFile = false } else { val srcStatus = source.getFileSystem(getConf).getFileStatus(source) if (srcStatus.getLen == 0) { throw new EOFException(s"File $source is an empty file") } // need to copy over val destFile = path(source.getName) testCSVFile = Some(destFile) var toCopy = false try { val status = filesystem.getFileStatus(destFile) if (status.getLen != srcStatus.getLen) { logInfo(s"Dest file exists, but length of $status != source data $srcStatus") } else { logInfo(s"Datafile exists; no copy needed: $status") toCopy = false } } catch { case _ : FileNotFoundException => toCopy = true } if (toCopy) { copyFile(sourceCSVFilePath.get, destFile, getConf, true) } } } }
Example 11
Source File: CompatibilitySuite.scala From sparklens with Apache License 2.0 | 5 votes |
import java.io.{ByteArrayOutputStream, FileNotFoundException, PrintStream} import com.qubole.sparklens.TestUtils import com.qubole.sparklens.app.ReporterApp import org.scalatest.FunSuite import scala.util.control.Breaks._ class CompatibilitySuite extends FunSuite { test("should be able to report on previously generated sparklens dumps") { breakable { (1 to 100).foreach(x => { //run for the versions of sparklens output saved try { val testInput = TestUtils.getFileContents( s"${System.getProperty("user.dir")}/src/test/compatibility-files/version-${x}.json") val testOut = new ByteArrayOutputStream() Console.withOut(new PrintStream(testOut)) { ReporterApp.startAnalysersFromString(testInput) } val testOutput = testOut.toString val olderOutput = TestUtils.getFileContents( s"${System.getProperty("user.dir")}/src/test/compatibility-files/version-${x}.output") olderOutput.split("\n").foreach(line => { assert(testOutput.contains(line)) }) } catch { case e: FileNotFoundException => break } }) } } }
Example 12
Source File: HdfsUtils.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import java.io.{FileNotFoundException, IOException} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs._ private[streaming] object HdfsUtils { def getOutputStream(path: String, conf: Configuration): FSDataOutputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) // If the file exists and we have append support, append instead of creating a new file val stream: FSDataOutputStream = { if (dfs.isFile(dfsPath)) { if (conf.getBoolean("hdfs.append.support", false) || dfs.isInstanceOf[RawLocalFileSystem]) { dfs.append(dfsPath) } else { throw new IllegalStateException("File exists and there is no append support!") } } else { dfs.create(dfsPath) } } stream } def getInputStream(path: String, conf: Configuration): FSDataInputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) try { dfs.open(dfsPath) } catch { case _: FileNotFoundException => null case e: IOException => // If we are really unlucky, the file may be deleted as we're opening the stream. // This can happen as clean up is performed by daemon threads that may be left over from // previous runs. if (!dfs.isFile(dfsPath)) null else throw e } } def checkState(state: Boolean, errorMsg: => String) { if (!state) { throw new IllegalStateException(errorMsg) } } def checkFileExists(path: String, conf: Configuration): Boolean = { val hdpPath = new Path(path) val fs = getFileSystemForPath(hdpPath, conf) fs.isFile(hdpPath) } }
Example 13
Source File: PersistentDMap.scala From CM-Well with Apache License 2.0 | 5 votes |
package k.grid.dmap.impl.persistent import java.io.{File, FileNotFoundException, PrintWriter} import com.typesafe.scalalogging.LazyLogging import k.grid.{Config, Grid} import k.grid.dmap.api._ import play.api.libs.json.Json import scala.util.{Failure, Success, Try} import scala.concurrent.duration._ import json.MapDataJsonProtocol._ import scala.concurrent.ExecutionContext.Implicits.global object PersistentDMap extends DMapFacade { override def masterType: DMapActorInit = DMapActorInit(classOf[PersistentMaster], "PersistentMaster") override def slaveType: DMapActorInit = DMapActorInit(classOf[PersistentSlave], "PersistentSlave") } class PersistentMaster extends DMapMaster { override val facade: DMapFacade = PersistentDMap override def onStart: Unit = {} } class PersistentSlave extends DMapSlave with LazyLogging { Grid.system.scheduler.schedule(5.seconds, 1.second, self, WriteData) case class MapHolder(m: Map[String, SettingsValue], timestamp: Long) case object NewData extends DMapMessage { override def act: Unit = { hasNewData = true } } case object WriteData extends DMapMessage { override def act: Unit = { val m = facade.sm if (hasNewData) { writeMap(MapData(m, lastTimestamp)) hasNewData = false } } } var hasNewData: Boolean = false private val dataFile = new File(s"${Grid.persistentDmapDir}/${Config.clusterName}") def readMap: Option[MapData] = { val content = Try { val src = scala.io.Source.fromFile(dataFile) val mData = Json.parse(src.getLines().mkString("\n")).as[MapData] src.close() mData } match { case Success(c) => Some(c) case Failure(e) if e.isInstanceOf[FileNotFoundException] => None case Failure(e) => { logger.error(e.getMessage, e) None } } content } def writeMap(md: MapData) = { val content = Json.stringify(Json.toJson(md)) new PrintWriter(dataFile) { write(content); close } } override val facade: DMapFacade = PersistentDMap override def onStart: Unit = { if (Grid.isController) { import java.io.File logger.info(s" *** Will use data dir: ${Grid.persistentDmapDir}") Try(new File(Grid.persistentDmapDir).mkdir()) val mdOpt = readMap mdOpt.foreach { md => lastTimestamp = md.timestamp facade.sm = md.m } } } override protected def onUpdate(oldMap: Map[String, SettingsValue], newMap: Map[String, SettingsValue], timestamp: Long): Unit = { if (Grid.isController) self ! NewData } }
Example 14
Source File: ParseCSVwithHTMLSpec.scala From CSYE7200_Old with MIT License | 5 votes |
package edu.neu.coe.csye7200.parse import java.io.FileNotFoundException import org.scalatest.{FlatSpec, Matchers} import scala.io.Source import scala.util._ class ParseCSVwithHTMLSpec extends FlatSpec with Matchers { behavior of "it" it should "work" in { val parser = ParseCSVwithHTML(CsvParser()) val resource = "report.csv" val title = "Report" val wy: Try[String] = parseResource(parser, resource, title) wy should matchPattern { case Success(_) => } wy.get shouldBe s"""<html><head><title>$title</title></head> |<body><table><tr><th>Name</th> |<th>Notes</th> |</tr> |<tr><td>Robin</td> |<td><p>This is Robin</p></td> |</tr> |<tr><td>Nik</td> |<td><p><dir>This is Nik</dir></p></td> |</tr> |<tr><td>Dino</td> |<td><table><tr><th>day</th><th>food</th><tr><td>Sunday</td><td>Mousakka</td></table></td> |</tr> |<tr><td>Kal</td> |<td><ol><li>INFO</li><li>CSYE</li></ol></td> |</tr> |<tr></tr> |</table></body></html> |""".stripMargin } private def parseResource(parser: ParseCSVwithHTML, resource: String, title: String) = Option(getClass.getResource(resource)) match { case Some(u) => Success(parser.parseStreamIntoHTMLTable(Source.fromFile(u.toURI).getLines.toStream, title)) case None => Failure(new FileNotFoundException(s"cannot get resource $resource")) } }
Example 15
Source File: HadoopFileSystemLogStore.scala From delta with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.delta.storage import java.io.{BufferedReader, FileNotFoundException, InputStreamReader} import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.FileAlreadyExistsException import java.util.UUID import scala.collection.JavaConverters._ import org.apache.commons.io.IOUtils import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.SparkSession protected def writeWithRename( path: Path, actions: Iterator[String], overwrite: Boolean = false): Unit = { val fs = path.getFileSystem(getHadoopConfiguration) if (!fs.exists(path.getParent)) { throw new FileNotFoundException(s"No such file or directory: ${path.getParent}") } if (overwrite) { val stream = fs.create(path, true) try { actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write) } finally { stream.close() } } else { if (fs.exists(path)) { throw new FileAlreadyExistsException(path.toString) } val tempPath = createTempPath(path) var streamClosed = false // This flag is to avoid double close var renameDone = false // This flag is to save the delete operation in most of cases. val stream = fs.create(tempPath) try { actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write) stream.close() streamClosed = true try { if (fs.rename(tempPath, path)) { renameDone = true } else { if (fs.exists(path)) { throw new FileAlreadyExistsException(path.toString) } else { throw new IllegalStateException(s"Cannot rename $tempPath to $path") } } } catch { case _: org.apache.hadoop.fs.FileAlreadyExistsException => throw new FileAlreadyExistsException(path.toString) } } finally { if (!streamClosed) { stream.close() } if (!renameDone) { fs.delete(tempPath, false) } } } } protected def createTempPath(path: Path): Path = { new Path(path.getParent, s".${path.getName}.${UUID.randomUUID}.tmp") } override def invalidateCache(): Unit = {} }
Example 16
Source File: ViveknSentimentUtils.scala From spark-nlp with Apache License 2.0 | 5 votes |
package com.johnsnowlabs.nlp.annotators.sda.vivekn import java.io.FileNotFoundException import com.johnsnowlabs.nlp.util.io.ExternalResource import com.johnsnowlabs.nlp.util.io.ResourceHelper.SourceStream import scala.collection.mutable.{ListBuffer, Map => MMap} trait ViveknSentimentUtils { def negateSequence(words: Array[String]): Set[String] = { val negations = Seq("not", "cannot", "no") val delims = Seq("?.,!:;") val result = ListBuffer.empty[String] var negation = false var prev: Option[String] = None var pprev: Option[String] = None words.foreach( word => { val processed = word.toLowerCase val negated = if (negation) "not_" + processed else processed result.append(negated) if (prev.isDefined) { val bigram = prev.get + " " + negated result.append(bigram) if (pprev.isDefined) { result.append(pprev.get + " " + bigram) } pprev = prev } prev = Some(negated) if (negations.contains(processed) || processed.endsWith("n't")) negation = !negation if (delims.exists(word.contains)) negation = false }) result.toSet } def ViveknWordCount( er: ExternalResource, prune: Int, f: List[String] => Set[String], left: MMap[String, Long] = MMap.empty[String, Long].withDefaultValue(0), right: MMap[String, Long] = MMap.empty[String, Long].withDefaultValue(0) ): (MMap[String, Long], MMap[String, Long]) = { val regex = er.options("tokenPattern").r val prefix = "not_" val sourceStream = SourceStream(er.path) sourceStream.content.foreach(c => c.foreach(line => { val words = regex.findAllMatchIn(line).map(_.matched).toList f.apply(words).foreach(w => { left(w) += 1 right(prefix + w) += 1 }) })) sourceStream.close() if (left.isEmpty || right.isEmpty) throw new FileNotFoundException("Word count dictionary for vivekn sentiment does not exist or is empty") if (prune > 0) (left.filter{case (_, v) => v > 1}, right.filter{case (_, v) => v > 1}) else (left, right) } }
Example 17
Source File: ConfigFactoryWrapperSpec.scala From pureconfig with Mozilla Public License 2.0 | 5 votes |
package pureconfig.backend import java.io.FileNotFoundException import com.typesafe.config.{ ConfigException, ConfigFactory } import pureconfig.BaseSuite import pureconfig.PathUtils._ import pureconfig.error.{ CannotParse, CannotReadFile } class ConfigFactoryWrapperSpec extends BaseSuite { behavior of "ConfigFactoryWrapper.parseFile" it should "return a Left when a file does not exist" in { ConfigFactory.parseFile(nonExistingPath.toFile) shouldEqual ConfigFactory.empty ConfigFactoryWrapper.parseFile(nonExistingPath) should failLike { case CannotReadFile(`nonExistingPath`, Some(reason)) => be(a[FileNotFoundException])(reason) } } it should "return a Left when a file exists but cannot be parsed" in { val tmpPath = createTempFile("{foo:") intercept[ConfigException](ConfigFactory.parseFile(tmpPath.toFile)) ConfigFactoryWrapper.parseFile(tmpPath) should failWithType[CannotParse] } behavior of "ConfigFactoryWrapper.loadFile" it should "return a Left when a file does not exist" in { ConfigFactory.load(ConfigFactory.parseFile(nonExistingPath.toFile)) shouldEqual ConfigFactory.load(ConfigFactory.empty) ConfigFactoryWrapper.loadFile(nonExistingPath) should failLike { case CannotReadFile(`nonExistingPath`, Some(reason)) => be(a[FileNotFoundException])(reason) } } it should "return a Left when a file exists but cannot be parsed" in { val tmpPath = createTempFile("{foo:") intercept[ConfigException](ConfigFactory.load(ConfigFactory.parseFile(tmpPath.toFile))) ConfigFactoryWrapper.loadFile(tmpPath) should failWithType[CannotParse] } it should "return a Left when it finds unresolved placeholders" in { val tmpPath = createTempFile(f"""{ foo1: "bla", foo2: $${charlie}}""") intercept[ConfigException](ConfigFactory.load(ConfigFactory.parseFile(tmpPath.toFile))) ConfigFactoryWrapper.loadFile(tmpPath) should failWithType[CannotParse] } }
Example 18
Source File: TestImplicits.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil import org.junit.Test import org.apache.daffodil.Implicits._ import org.apache.daffodil.exceptions._ import java.io.FileNotFoundException import org.junit.Assert._ class TestImplicits { @Test def testIntercept1(): Unit = { intercept[Abort] { Assert.abort("yadda") } } @Test def testIntercept2(): Unit = { val e = intercept[InterceptFailedException] { intercept[Abort] { // this will not cause an abort exception 2 + 2 } } assertTrue(e.getMessage().contains("Failed to intercept")) } @Test def testIntercept3(): Unit = { val e = intercept[InterceptFailedException] { intercept[FileNotFoundException] { // an exception is caught, but not the right kind throw new Exception("yadda") } } assertTrue(e.getMessage().contains("Expected 'java.io.FileNotFoundException'")) } }
Example 19
Source File: RedisWriter.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.redis.sink.writer import java.io.{File, FileNotFoundException} import com.datamountaineer.streamreactor.connect.errors.ErrorHandler import com.datamountaineer.streamreactor.connect.redis.sink.config.RedisSinkSettings import com.datamountaineer.streamreactor.connect.schemas.ConverterUtil import com.datamountaineer.streamreactor.connect.sink._ import com.typesafe.scalalogging.StrictLogging import redis.clients.jedis.Jedis abstract class RedisWriter extends DbWriter with StrictLogging with ConverterUtil with ErrorHandler { var jedis: Jedis = _ def createClient(sinkSettings: RedisSinkSettings): Unit = { val connection = sinkSettings.connectionInfo if (connection.isSslConnection) { connection.keyStoreFilepath match { case Some(path) => if (!new File(path).exists) { throw new FileNotFoundException(s"Keystore not found in: $path") } System.setProperty("javax.net.ssl.keyStorePassword", connection.keyStorePassword.getOrElse("")) System.setProperty("javax.net.ssl.keyStore", path) System.setProperty("javax.net.ssl.keyStoreType", connection.keyStoreType.getOrElse("jceks")) case None => } connection.trustStoreFilepath match { case Some(path) => if (!new File(path).exists) { throw new FileNotFoundException(s"Truststore not found in: $path") } System.setProperty("javax.net.ssl.trustStorePassword", connection.trustStorePassword.getOrElse("")) System.setProperty("javax.net.ssl.trustStore", path) System.setProperty("javax.net.ssl.trustStoreType", connection.trustStoreType.getOrElse("jceks")) case None => } } jedis = new Jedis(connection.host, connection.port, connection.isSslConnection) connection.password.foreach(p => jedis.auth(p)) //initialize error tracker initialize(sinkSettings.taskRetries, sinkSettings.errorPolicy) } def close(): Unit = { if (jedis != null) { jedis.close() } } }
Example 20
Source File: IJRepoIdeaResolver.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.idea import org.jetbrains.sbtidea.download.api.Resolver import java.io.{FileNotFoundException, InputStream} import org.jetbrains.sbtidea.Keys import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.IdeaUpdater.IJ_REPO_OVERRIDE import org.jetbrains.sbtidea.{PluginLogger => log} import sbt.{URL, url} class IJRepoIdeaResolver extends Resolver[IdeaDependency] { override def resolve(dep: IdeaDependency): Seq[IdeaArtifact] = { val ideaUrl = () => getUrl(dep.buildInfo, ".zip") // sources are available only for Community Edition val srcJarUrl = () => getUrl(dep.buildInfo.copy(edition = Keys.IntelliJPlatform.IdeaCommunity), "-sources.jar") IdeaDistImpl(dep, ideaUrl) :: IdeaSourcesImpl(dep, srcJarUrl) :: Nil } private val defaultBaseURL = "https://www.jetbrains.com/intellij-repository" private def getCoordinates(platform: IntelliJPlatform): (String, String) = platform match { case IntelliJPlatform.IdeaCommunity => "com/jetbrains/intellij/idea" -> "ideaIC" case IntelliJPlatform.IdeaUltimate => "com/jetbrains/intellij/idea" -> "ideaIU" case IntelliJPlatform.PyCharmCommunity => "com/jetbrains/intellij/pycharm" -> "pycharmPC" case IntelliJPlatform.PyCharmProfessional => "com/jetbrains/intellij/pycharm" -> "pycharmPY" case IntelliJPlatform.CLion => "com/jetbrains/intellij/clion" -> "clion" case IntelliJPlatform.MPS => "com/jetbrains/mps" -> "mps" } //noinspection NoTailRecursionAnnotation protected def getUrl(platform: BuildInfo, artifactSuffix: String, trySnapshot: Boolean = false): URL = { val (repo, buildNumberSuffix) = if (trySnapshot) "snapshots" -> "-EAP-SNAPSHOT" else if (platform.buildNumber.contains("SNAPSHOT")) "snapshots" -> "" else "releases" -> "" val (groupId, artifactId) = getCoordinates(platform.edition) val urlFormEnv = System.getProperty(IJ_REPO_OVERRIDE) val baseURL = if (urlFormEnv != null) { log.warn(s"Using non-default IntelliJ repository URL: $urlFormEnv") urlFormEnv } else defaultBaseURL val repoURL = s"$baseURL/$repo/$groupId" val build = platform.buildNumber + buildNumberSuffix var stream: Option[InputStream] = None try { val result = url(s"$repoURL/$artifactId/$build/$artifactId-$build$artifactSuffix") stream = Some(result.openStream()) result } catch { case _: FileNotFoundException if !trySnapshot && !platform.buildNumber.endsWith("SNAPSHOT") => println(s"Can't find $platform in releases, trying snapshots") getUrl(platform, artifactSuffix, trySnapshot = true) } finally { stream.foreach(_.close()) } } }
Example 21
Source File: DirectoryListFileFinder.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.commons.utils import java.io.{File, FileNotFoundException, IOException} import scala.util.{Failure, Success, Try} def filePredicate(f: File, desc: Option[String]): Boolean def findFile(): Try[File] = { findFile(None) } def findFile(desc: String): Try[File] = { findFile(Some(desc)) } def findFile(desc: Option[String]): Try[File] = { findPotentialFiles( dirsToSearch, listFilesInDirectory // convert to Try - give a nice message in the exception concerning the dirs, otherwise just Success it ).fold(dirs => Failure( new IOException(s"Unable to list files in dirs: ${dirs.mkString(", ")}") ), Success[Seq[File]] ).flatMap(_.find(filePredicate(_, desc)) .map(Success[File]) .getOrElse(Failure( new FileNotFoundException( s"Unable to find file ${desc.map(_ + " ").getOrElse("")}" + s"in dirs: ${dirsToSearch.mkString(", ")}") )) ) } } object DirectoryListFileFinder { type EitherBadDirsOrFiles = Either[Seq[File], Seq[File]] def findPotentialFiles( dirs: Traversable[File], listFilesInDirectory: File => Option[Seq[File]]): EitherBadDirsOrFiles = { dirs.map { dir => val files = listFilesInDirectory(dir) // if we're unable to list files inside the dir then // let's not lose this information by keeping the dir in Left files.toRight(dir) }.foldLeft(Right(Seq[File]()): EitherBadDirsOrFiles) { case (Left(badDirs), Left(badDir)) => Left(badDir +: badDirs) case (Left(badDirs), Right(_)) => Left(badDirs) case (Right(_), Left(badDir)) => Left(Seq(badDir)) case (Right(files), Right(files2)) => Right(files ++ files2) case _ => ??? // to silence buggy 2.10 non-exhaustive match warning } } }
Example 22
Source File: ForgeInstallSpec.scala From PackUpdate with Apache License 2.0 | 5 votes |
package at.chaosfield.packupdate.json import java.io.{File, FileNotFoundException} import java.net.URI import scala.xml.XML import at.chaosfield.packupdate.common.{FileManager, Log, MavenPath} import org.json4s.jackson.JsonMethods case class ForgeInstallSpec( install: InstallInformation, versionInfo: VersionInformation, spec: Int = 0 ) case class InstallInformation( profileName: String, target: String, path: MavenPath, version: String, filePath: String, welcome: String, minecraft: String, mirrorList: URI, logo: String, modList: String ) case class VersionInformation( id: String, `type`: String, minecraftArguments: String, mainClass: String, inheritsFrom: String, jar: String, libraries: Array[LibraryInformation] ) case class LibraryInformation( name: MavenPath, url: Option[URI], checksums: Array[String], serverreq: Boolean = false, clientreq: Boolean = false ) { def getPom(mavenPath: MavenPath, log: Log): xml.Elem = { var lastException: Option[Exception] = None val tryUrls = url.toList ++ LibraryInformation.RepoList for (url <- tryUrls) { try { val pomUrl = url.resolve(mavenPath.getPom.getFilePath).toURL val data = FileManager.readStreamToString(FileManager.retrieveUrl(pomUrl, log)._1) return XML.loadString(data) } catch { case e: FileNotFoundException => lastException = Some(e) log.debug(s"File not found at $url, trying next...") } } throw lastException.get } } object LibraryInformation { final val RepoList = List("https://repo.maven.apache.org/maven2/", "https://libraries.minecraft.net/").map(new URI(_)) }
Example 23
Source File: Resources.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
import sbt._ import java.io.FileNotFoundException import java.io.File object Resources { def getJavaHome: File = { val javaHome = Option(System.getenv("JAVA_HOME")).map(_+"/jre"). orElse(Option(System.getProperty("java.home"))) javaHome match { case Some(str) => file(str) case None => throw new FileNotFoundException("$JAVA_HOME is undefined as well as the system property `java.home`." + "Setup a environment variable JAVA_HOME") } } def checkExists(file:File): File = { if(file.exists()) file else throw new FileNotFoundException(s"Can't find needed resource: $file") } }
Example 24
Source File: CLIConfig.scala From sbt-api-builder with MIT License | 5 votes |
package apibuilder.sbt import java.io.{File, FileNotFoundException} import java.nio.file.{Path, PathMatcher} import io.circe.Decoder import io.circe.yaml.parser import sbt.IO final case class CLIConfig(organizationFor: Map[String, OrganizationConfig]) extends AnyVal final case class OrganizationConfig(applicationFor: Map[String, ApplicationConfig]) extends AnyVal final case class ApplicationConfig(version: String, generators: Seq[GeneratorConfig]) final case class GeneratorConfig(generator: String, maybeTargetPath: Option[Path], pathMatchers: Seq[PathMatcher]) object CLIConfig extends BaseDecoders { final def load(f: File): Either[ConfigException, CLIConfig] = if (!f.getParentFile.exists) Left(MissingParentDirectory(f)) else { try { IO.reader(f) { r => parser .parse(r) .left .map(pf => InvalidContent(pf.message)) .flatMap(_.as[CLIConfig].left.map(df => InvalidContent(df.message))) } } catch { case _: FileNotFoundException => Left(MissingFile(f)) } } implicit final val cliConfigDecoder: Decoder[CLIConfig] = Decoder.instance { c => c.downField("code").as[Map[String, OrganizationConfig]].map(CLIConfig.apply) } implicit final val organizationConfigDecoder: Decoder[OrganizationConfig] = Decoder.instance { c => c.value.as[Map[String, ApplicationConfig]].map(OrganizationConfig.apply) } implicit final val applicationConfig: Decoder[ApplicationConfig] = Decoder.instance { c => for { version <- c.downField("version").as[String] generators <- c.downField("generators").as[Seq[GeneratorConfig]] } yield ApplicationConfig(version, generators) } implicit final val generatorConfigDecoder: Decoder[GeneratorConfig] = Decoder.instance { c => for { generator <- c.downField("generator").as[String] maybeTargetPath <- c.downField("target").as[Option[Path]] pathMatchers <- c.downField("files").as[Seq[PathMatcher]] } yield GeneratorConfig(generator, maybeTargetPath, pathMatchers) } }
Example 25
Source File: ScalafmtCoursierPlugin.scala From neo-sbt-scalafmt with Apache License 2.0 | 5 votes |
package com.lucidchart.scalafmt.coursier import com.lucidchart.sbt.scalafmt.ScalafmtCorePlugin import com.lucidchart.sbt.scalafmt.ScalafmtCorePlugin.autoImport._ import coursier._ import java.io.{FileNotFoundException, IOException} import sbt.KeyRanks._ import sbt.Keys._ import sbt._ import scala.util.control.NonFatal import scalaz.concurrent.Task object ScalafmtCoursierPlugin extends AutoPlugin { object autoImport { val scalafmtCoursierRepositories = SettingKey[Seq[Repository]]("scalafmt-coursier-repositories", "Coursier respositories for scalafmt", BTask) } import autoImport._ override val projectSettings = Seq( externalDependencyClasspath in Scalafmt := { val dependencies = (libraryDependencies in Scalafmt).value .map(module => Dependency(Module(module.organization, module.name), module.revision)) .toSet val cacheFile = streams.value.cacheDirectory / "dependencies" val newHash = dependencies.hashCode val cached = try { IO.readLines(cacheFile) match { case hash +: fileStrings => val files = fileStrings.map(file) if (hash.toInt == newHash && files.forall(_.exists)) Some(files) else None } } catch { case _: FileNotFoundException => None case NonFatal(e) => streams.value.log.error(e.getLocalizedMessage) None } Attributed.blankSeq(cached.getOrElse { synchronized { val fetch = Fetch.from(scalafmtCoursierRepositories.value, coursier.Cache.fetch()) streams.value.log.info(s"Fetching scalafmt for ${Reference.display(thisProjectRef.value)}") val resolution = Resolution(dependencies).process.run(fetch).unsafePerformSync val result = Task .gatherUnordered(resolution.artifacts.map(coursier.Cache.file(_).run)) .unsafePerformSync .map(_.valueOr(error => throw new IOException(error.describe))) .filter(_.ext == "jar") .sorted streams.value.log.info(s"Fetched ${result.size} artifacts for scalafmt") IO.writeLines(cacheFile, newHash.toString +: result.map(_.toString)) result } }) }, scalafmtUseIvy := false ) override val buildSettings = Seq( scalafmtCoursierRepositories := Seq( coursier.Cache.ivy2Local, coursier.MavenRepository("https://repo1.maven.org/maven2") ) ) override val requires = ScalafmtCorePlugin override val trigger = allRequirements }
Example 26
Source File: DevClusterLauncher.scala From berilia with Apache License 2.0 | 5 votes |
package com.criteo.dev.cluster import java.io.{File, FileNotFoundException} import java.net.URL import com.criteo.dev.cluster.config.{ConfigLoader, GlobalConfig} object DevClusterLauncher { def main(args: Array[String]) { if (args.length == 0) { printHelp System.exit(1) } if (System.getenv("USER") == null) { println("Required variable USER is not set.") System.exit(1) } val commandMap = CommandRegistry.getCommandMap val commandString = args(0).trim() val command = commandMap.get(commandString) if (command isEmpty) { println(s"Invalid command: [$commandString]. Following commands are valid.") printHelp System.exit(1) } else { try { val argList = args.toList.drop(1) val realArgs = argList.filterNot(_.startsWith("--")) val conf = ConfigLoader( getOption(args, "source").map(getFileURL(_)).getOrElse(getFileURL("source.conf")), getOption(args, "target").map(getFileURL(_)).getOrElse(getFileURL("target.conf")), getOption(args, "checkpoint").map(getFileURL(_)) ).value command.get.apply(realArgs, conf) } catch { case e: Exception => { e.printStackTrace() System.exit(1) } } } System.exit(0) } def getFileURL(path: String): URL = { val file = new File(path) if (file.exists) file.toURI.toURL else throw new FileNotFoundException(s"$path does not exist") } def getOption(args: Array[String], argName: String): Option[String] = args .find(_.startsWith(s"--$argName")) .flatMap(_.split("=").drop(1).headOption) def printHelp(): Unit = { println("This tool provides utilities for creating and managing AWS dev instances, " + "and utilities such as copying data from gateway, and configuring gateway on local " + "machine to the cluster. Use the following commands.\n") CommandRegistry.getCommands.foreach( cc => { println(s"\033[1m${cc.name} commands\033[0m") println() cc.actions.filter(_.isHidden == false).foreach(c => { println(s"* ${c.command}") c.printHelp println() }) }) } } object HelpAction extends CliAction[Unit] { override def command: String = "help" override def usageArgs: List[Any] = List() override def help: String = "Gets help" override def applyInternal(args: List[String], config: GlobalConfig): Unit = { DevClusterLauncher.printHelp } }
Example 27
Source File: HazelCastConnection.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hazelcast import java.io.{File, FileNotFoundException} import java.net.URI import java.util.{Properties, UUID} import com.datamountaineer.streamreactor.connect.hazelcast.config.{HazelCastConnectionConfig, HazelCastSocketConfig} import com.hazelcast.cache.HazelcastCachingProvider import com.hazelcast.client.HazelcastClient import com.hazelcast.client.config.{ClientConfig, ClientNetworkConfig, SocketOptions} import com.hazelcast.config.{GroupConfig, SSLConfig} import com.hazelcast.core.HazelcastInstance import javax.cache.{CacheManager, Caching} import scala.collection.JavaConverters._ object HazelCastConnection { def buildClient(config: HazelCastConnectionConfig): HazelcastInstance = { val clientConfig = new ClientConfig val networkConfig = clientConfig.getNetworkConfig if (config.sslEnabled) { setSSLOptions(config) networkConfig.setSSLConfig(new SSLConfig().setEnabled(true)) } networkConfig.setAddresses(config.members.toList.asJava) val groupConfig = new GroupConfig(config.group, config.pass) clientConfig.setGroupConfig(groupConfig) buildSocketOptions(networkConfig, config.socketConfig) clientConfig.setInstanceName(config.group + "-kafka-connect-" + UUID.randomUUID().toString) HazelcastClient.newHazelcastClient(clientConfig) } private def buildSocketOptions(clientNetworkConfig: ClientNetworkConfig, socketConfig: HazelCastSocketConfig): SocketOptions = { val socketOptions = clientNetworkConfig.getSocketOptions socketOptions.setKeepAlive(socketConfig.keepAlive) socketOptions.setTcpNoDelay(socketConfig.tcpNoDelay) socketOptions.setReuseAddress(socketConfig.reuseAddress) socketOptions.setLingerSeconds(socketConfig.lingerSeconds) socketOptions.setBufferSize(socketConfig.bufferSize) socketOptions } def getCacheManager(client: HazelcastInstance, name: String) : CacheManager = { val instanceName = client.getName() val cachingProvider = Caching.getCachingProvider() // Create Properties instance pointing to a named HazelcastInstance val properties = new Properties() properties.setProperty(HazelcastCachingProvider.HAZELCAST_INSTANCE_NAME, instanceName) val cacheManagerName = new URI(name ) val cacheManager = cachingProvider.getCacheManager(cacheManagerName, null, properties ) cacheManager } def setSSLOptions(config: HazelCastConnectionConfig) = { config.keyStoreLocation match { case Some(path) => if (!new File(path).exists) { throw new FileNotFoundException(s"Keystore not found in: $path") } System.setProperty("javax.net.ssl.keyStorePassword", config.keyStorePassword.getOrElse("")) System.setProperty("javax.net.ssl.keyStore", path) System.setProperty("javax.net.ssl.keyStoreType", config.keyStoreType.getOrElse("jks")) case None => } config.trustStoreLocation match { case Some(path) => if (!new File(path).exists) { throw new FileNotFoundException(s"Truststore not found in: $path") } System.setProperty("javax.net.ssl.trustStorePassword", config.trustStorePassword.getOrElse("")) System.setProperty("javax.net.ssl.trustStore", path) System.setProperty("javax.net.ssl.trustStoreType", config.trustStoreType.getOrElse("jks")) case None => } } }
Example 28
Source File: FileGenerator.scala From avrohugger with Apache License 2.0 | 5 votes |
package avrohugger package generators import avrohugger.format.abstractions.SourceFormat import avrohugger.input.DependencyInspector import avrohugger.input.NestedSchemaExtractor import avrohugger.input.reflectivecompilation.schemagen._ import avrohugger.input.parsers.{ FileInputParser, StringInputParser} import avrohugger.matchers.TypeMatcher import avrohugger.stores.{ ClassStore, SchemaStore } import java.io.{File, FileNotFoundException, IOException} import org.apache.avro.{ Protocol, Schema } import org.apache.avro.Schema.Type.ENUM // Unable to overload this class' methods because outDir uses a default value private[avrohugger] object FileGenerator { def schemaToFile( schema: Schema, outDir: String, format: SourceFormat, classStore: ClassStore, schemaStore: SchemaStore, typeMatcher: TypeMatcher, restrictedFields: Boolean): Unit = { val topNS: Option[String] = DependencyInspector.getReferredNamespace(schema) val topLevelSchemas: List[Schema] = NestedSchemaExtractor.getNestedSchemas(schema, schemaStore, typeMatcher) // most-nested classes processed first topLevelSchemas.reverse.distinct.foreach(schema => { // pass in the top-level schema's namespace if the nested schema has none val ns = DependencyInspector.getReferredNamespace(schema) orElse topNS format.compile(classStore, ns, Left(schema), outDir, schemaStore, typeMatcher, restrictedFields) }) } def protocolToFile( protocol: Protocol, outDir: String, format: SourceFormat, classStore: ClassStore, schemaStore: SchemaStore, typeMatcher: TypeMatcher, restrictedFields: Boolean): Unit = { val ns = Option(protocol.getNamespace) format.compile(classStore, ns, Right(protocol), outDir, schemaStore, typeMatcher, restrictedFields) } def stringToFile( str: String, outDir: String, format: SourceFormat, classStore: ClassStore, schemaStore: SchemaStore, stringParser: StringInputParser, typeMatcher: TypeMatcher, restrictedFields: Boolean): Unit = { val schemaOrProtocols = stringParser.getSchemaOrProtocols(str, schemaStore) schemaOrProtocols.foreach(schemaOrProtocol => { schemaOrProtocol match { case Left(schema) => { schemaToFile(schema, outDir, format, classStore, schemaStore, typeMatcher, restrictedFields) } case Right(protocol) => { protocolToFile(protocol, outDir, format, classStore, schemaStore, typeMatcher, restrictedFields) } } }) } def fileToFile( inFile: File, outDir: String, format: SourceFormat, classStore: ClassStore, schemaStore: SchemaStore, fileParser: FileInputParser, typeMatcher: TypeMatcher, classLoader: ClassLoader, restrictedFields: Boolean): Unit = { val schemaOrProtocols: List[Either[Schema, Protocol]] = fileParser.getSchemaOrProtocols(inFile, format, classStore, classLoader) schemaOrProtocols.foreach(schemaOrProtocol => schemaOrProtocol match { case Left(schema) => { schemaToFile(schema, outDir, format, classStore, schemaStore, typeMatcher, restrictedFields) } case Right(protocol) => { protocolToFile(protocol, outDir, format, classStore, schemaStore, typeMatcher, restrictedFields) } }) } }
Example 29
Source File: DownloadSupportSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.utils import java.io.FileNotFoundException import java.net.URL import org.scalatest.{BeforeAndAfter, Matchers, FunSpec} import scala.io.Source import scala.tools.nsc.io.File class DownloadSupportSpec extends FunSpec with Matchers with BeforeAndAfter { val downloadDestinationUrl = new URL("file:///tmp/testfile2.ext") val testFileContent = "This is a test" val testFileName = "/tmp/testfile.txt" // Create a test file for downloading before { File(testFileName).writeAll(testFileContent) } // Cleanup what we made after { if (File(testFileName).exists) File(testFileName).delete() if (File(downloadDestinationUrl.getPath).exists) File(downloadDestinationUrl.getPath).delete() } describe("DownloadSupport"){ describe("#downloadFile( String, String )"){ it("should download a file to the download directory"){ val testFileUrl = "file:///tmp/testfile.txt" // Create our utility and download the file val downloader = new Object with DownloadSupport downloader.downloadFile( testFileUrl, downloadDestinationUrl.getProtocol + "://" + downloadDestinationUrl.getPath) // Verify the file contents are what was in the original file val downloadedFileContent: String = Source.fromFile(downloadDestinationUrl.getPath).mkString downloadedFileContent should be (testFileContent) } } describe("#downloadFile( URL, URL )"){ it("should download a file to the download directory"){ val testFileUrl = new URL("file:///tmp/testfile.txt") val downloader = new Object with DownloadSupport downloader.downloadFile(testFileUrl, downloadDestinationUrl) // Verify the file contents are what was in the original file val downloadedFileContent: String = Source.fromFile(downloadDestinationUrl.getPath).mkString downloadedFileContent should be (testFileContent) } it("should throw FileNotFoundException if the download URL is bad"){ val badFilename = "file:///tmp/testbadfile.txt" if (File(badFilename).exists) File(badFilename).delete() val badFileUrl = new URL(badFilename) val downloader = new Object with DownloadSupport intercept[FileNotFoundException] { downloader.downloadFile(badFileUrl, downloadDestinationUrl) } } it("should throw FileNotFoundException if the download ") { val testFileUrl = new URL("file:///tmp/testfile.txt") val badDestinationUrl = new URL("file:///tmp/badloc/that/doesnt/exist.txt") val downloader = new Object with DownloadSupport intercept[FileNotFoundException] { downloader.downloadFile(testFileUrl, badDestinationUrl) } } } } }
Example 30
Source File: ParagraphVectorsClassifierExample.scala From dl4scala with MIT License | 5 votes |
package org.dl4scala.examples.nlp.paragraphvectors import java.io.FileNotFoundException import org.datavec.api.util.ClassPathResource import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable import org.deeplearning4j.text.documentiterator.LabelAwareIterator import org.deeplearning4j.text.tokenization.tokenizerfactory.TokenizerFactory import org.deeplearning4j.models.paragraphvectors.ParagraphVectors import org.deeplearning4j.models.word2vec.VocabWord import org.deeplearning4j.text.documentiterator.FileLabelAwareIterator import org.deeplearning4j.text.tokenization.tokenizer.preprocessor.CommonPreprocessor import org.deeplearning4j.text.tokenization.tokenizerfactory.DefaultTokenizerFactory import org.dl4scala.examples.nlp.paragraphvectors.tools.{LabelSeeker, MeansBuilder} import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ log.info("Document '" + document.getLabels + "' falls into the following categories: ") for (score: (String, Double) <- scores.asScala) { log.info(" " + score._1 + ": " + score._2) } } } } object ParagraphVectorsClassifierExample extends App{ val app = new ParagraphVectorsClassifierExample app.makeParagraphVectors() app.checkUnlabeledData() }
Example 31
Source File: FileStreamReader.scala From piglet with Apache License 2.0 | 5 votes |
package dbis.piglet.backends.spark import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming.receiver.Receiver import scala.io.Source import java.io.{ FileNotFoundException, IOException } import org.apache.spark.streaming.scheduler._ import org.apache.spark.streaming.StreamingContext class FileStreamReader(file: String, @transient val ssc: StreamingContext) extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2) { def onStart() { // Start the thread that reads data from a file new Thread("FileStreamReader") { override def run() { receive() } }.start() } def onStop() { // There is nothing to do here } private def receive() { try { for (line <- Source.fromFile(file).getLines()) { store(line) //Thread sleep 1000 // for testing } //stop("stopped ...") // stop receiver //ssc.stop() //SparkStream.ssc.stop(true, true) // stop streaming context gracefully } catch { case ex: FileNotFoundException => println(s"Could not find $file file.") case ex: IOException => println(s"Had an IOException during reading $file file") } finally { stop("Stopped Receiver") ssc.stop(true, true) SparkStream.ssc.stop(true, true) //sys.exit() } } } class FileReader(ssc: StreamingContext) { def readFile(file: String) = ssc.receiverStream(new FileStreamReader(file, ssc)) } object FileStreamReader { implicit def customFileStreamReader(ssc: StreamingContext) = new FileReader(ssc) }
Example 32
Source File: TryCatchFinally.scala From Scala-and-Spark-for-Big-Data-Analytics with MIT License | 5 votes |
package com.chapter3.ScalaFP import java.io.IOException import java.io.FileReader import java.io.FileNotFoundException object TryCatch { def main(args: Array[String]) { try { val f = new FileReader("data/data.txt") } catch { case ex: FileNotFoundException => println("File not found exception") case ex: IOException => println("IO Exception") } finally { println("Finally block always executes"); } } }
Example 33
Source File: MemoryRepository.scala From polynote with Apache License 2.0 | 5 votes |
package polynote.testing.repository import java.io.FileNotFoundException import java.net.URI import polynote.kernel.{BaseEnv, GlobalEnv, NotebookRef, TaskB} import polynote.messages._ import polynote.server.repository.NotebookRepository import polynote.testing.kernel.MockNotebookRef import zio.{RIO, Task, UIO, ZIO} import scala.collection.mutable class MemoryRepository extends NotebookRepository { private val notebooks = new mutable.HashMap[String, Notebook]() def notebookExists(path: String): UIO[Boolean] = ZIO.effectTotal(notebooks contains path) def notebookURI(path: String): UIO[Option[URI]] = ZIO.effectTotal(if (notebooks contains path) Option(new URI(s"memory://$path")) else None) def loadNotebook(path: String): Task[Notebook] = ZIO.effectTotal(notebooks.get(path)).get.mapError(err => new FileNotFoundException(path)) def openNotebook(path: String): RIO[BaseEnv with GlobalEnv, NotebookRef] = loadNotebook(path).flatMap(nb => MockNotebookRef(nb, tup => saveNotebook(tup._2))) def saveNotebook(nb: Notebook): UIO[Unit] = ZIO.effectTotal(notebooks.put(nb.path, nb)) def listNotebooks(): UIO[List[String]] = ZIO.effectTotal(notebooks.keys.toList) def createNotebook(path: String, maybeUriOrContent: Option[String]): UIO[String] = ZIO.effectTotal(notebooks.put(path, Notebook(path, ShortList.of(), None))).as(path) def createAndOpen(path: String, notebook: Notebook, version: Int): RIO[BaseEnv with GlobalEnv, NotebookRef] = ZIO.effectTotal(notebooks.put(path, notebook)).flatMap { _ => MockNotebookRef(notebook, tup => saveNotebook(tup._2), version) } def initStorage(): TaskB[Unit] = ZIO.unit def renameNotebook(path: String, newPath: String): Task[String] = loadNotebook(path).map { notebook => notebooks.put(newPath, notebook) notebooks.remove(path) newPath } def copyNotebook(path: String, newPath: String): TaskB[String] = loadNotebook(path).map { notebook => notebooks.put(newPath, notebook) newPath } def deleteNotebook(path: String): TaskB[Unit] = ZIO.effectTotal(notebooks.get(path)).flatMap { case None => ZIO.fail(new FileNotFoundException(path)) case Some(_) => ZIO.effectTotal(notebooks.remove(path)).unit } }
Example 34
Source File: PollingUtils.scala From sttp with Apache License 2.0 | 5 votes |
import java.io.FileNotFoundException import java.net.{ConnectException, URL} import scala.concurrent.TimeoutException import scala.concurrent.duration._ object PollingUtils { def waitUntilServerAvailable(url: URL): Unit = { val connected = poll(5.seconds, 250.milliseconds)({ urlConnectionAvailable(url) }) if (!connected) { throw new TimeoutException(s"Failed to connect to $url") } } def poll(timeout: FiniteDuration, interval: FiniteDuration)(poll: => Boolean): Boolean = { val start = System.nanoTime() def go(): Boolean = { if (poll) { true } else if ((System.nanoTime() - start) > timeout.toNanos) { false } else { Thread.sleep(interval.toMillis) go() } } go() } def urlConnectionAvailable(url: URL): Boolean = { try { url.openConnection() .getInputStream .close() true } catch { case _: ConnectException => false case _: FileNotFoundException => true // on 404 } } }
Example 35
Source File: ChangedFilesBuilderSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.builder import java.io.{File, FileNotFoundException} import java.nio.file.Paths import org.apache.maven.plugin.logging.SystemStreamLog import org.scalatest.GivenWhenThen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ChangedFilesBuilderSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "ChangedFilesBuilder" it should "Identify files that have changed from master" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changedFiles = Seq( "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilder.scala", "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/SourceFileSequenceBuilder.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilderSpec.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/LocalConfigBuilderSpec.scala" ).map(x => getAbsolutePathFrom(x)) val changeFunction = () => changedFiles.map(new File(_)) val result = new ChangedFilesBuilder(log, true, "master", changeFunction).build(sources) result should be(changedFiles.map(new File(_))) } it should "return all files if diff is false" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changeFunction = () => sources val result = new ChangedFilesBuilder(log, false, "master", changeFunction).build(sources) result should be(sources) } it should "re-throw exceptions it encounters" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changedFiles = Seq( "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilder.scala", "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/SourceFileSequenceBuilder.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilderSpec.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/LocalConfigBuilderSpec.scala" ).map(x => getAbsolutePathFrom(x)) val changeFunction = () => throw new FileNotFoundException("Ooops") an[FileNotFoundException] should be thrownBy { new ChangedFilesBuilder(log, true, "master", changeFunction).build(sources) } } def getAbsolutePathFrom(path: String): String = Paths.get(path).normalize.toAbsolutePath.toString }
Example 36
Source File: FileDependencies.scala From spatial with MIT License | 5 votes |
package argon package codegen import java.io.FileNotFoundException import utils.io.files trait FileDependencies extends Codegen { var dependencies: List[CodegenDep] = Nil lazy val files_list: Seq[String] = { val url = getClass.getResource("/files_list") if (url == null) throw new FileNotFoundException( "File \"files_list\" is not found, please run bin/update_resources.sh before launching a SpatialApp.") io.Source.fromURL(url).mkString("").split("\n") } sealed trait CodegenDep { def copy(out: String): Unit } case class FileDep(folder: String, name: String, relPath: String = "", outputPath:Option[String] = None) extends CodegenDep { def copy(out: String): Unit = { val outPathApp = outputPath.getOrElse(name) val relPathApp = relPath + outPathApp try { files.copyResource(s"/$folder/$name", s"$out/$relPathApp") } catch {case t: Throwable => bug(s"Error $t") bug(s"Cannot copy dependency:") bug(" src: " + folder + "/" + name) bug(" dst: " + out + relPathApp) throw t } } } case class DirDep(folder: String, name: String, relPath: String = "", outputPath:Option[String] = None) extends CodegenDep { override def copy(out: String): Unit = { val dir = folder + "/" + name // Console.println("Looking at " + dir) def rename(e: String) = { val srcFolder = if (folder.startsWith("./")) folder.split("/") else {"./" + folder}.split("/") val path = e.split("/").drop(srcFolder.length) if (outputPath.isDefined) { val sourceName = folder + "/" + path.dropRight(1).mkString("/") val outputName = outputPath.get + path.last FileDep(sourceName, path.last, relPath, Some(outputName)) } else { val outputName = path.mkString("/") FileDep(folder, outputName, relPath) } } files_list.filter(_.startsWith("./"+dir)) .map{d => rename(d)} .foreach{f => f.copy(out)} } } def copyDependencies(out: String): Unit = { dependencies.foreach{dep => dep.copy(out) } } override protected def postprocess[R](b: Block[R]): Block[R] = { copyDependencies(out) super.postprocess(b) } }
Example 37
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base), fs) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { logs.forall(expectedLogs.contains) } }
Example 38
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val linkTarget = File(s"/somewhere/else/7.txt") val base1 = File(s"${base.path}/1.txt") val baseLink = Symlink(s"${base.path}/7.txt", linkTarget) val subdir = Directory(s"${base.path}/subdir") val sub2 = File(s"${subdir.path}/2.txt") val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget) val directories = Map( base -> List(subdir, base1, baseLink), subdir -> List(sub2, subLink) ) val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3)) val (actual, logs) = run(Scanner.pathScan[R](base), fs) "Report Format" ! {actual.mustEqual(expected)} }
Example 39
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base), fs) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { expectedLogs.forall(logs.contains) } }
Example 40
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T]) = program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base)) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { logs.forall(expectedLogs.contains) } }
Example 41
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, FilesystemCmd, Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T]) = program.runReader(ScanConfig(2)).runFilesystemCmds(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base)) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { logs.forall(expectedLogs.contains) } }
Example 42
Source File: InferSchemaCommandSuite.scala From HANAVora-Extensions with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import java.io.FileNotFoundException import org.apache.spark.sql.{AnalysisException, GlobalSapSQLContext, Row} import org.scalatest.FunSuite import com.sap.spark.util.TestUtils.{getFileFromClassPath, withTempDirectory} class InferSchemaCommandSuite extends FunSuite with GlobalSapSQLContext { test("Inferring of schema fails on non-existent file") { withTempDirectory { dir => val nonExistentPath = dir.path + "/non-existent" intercept[FileNotFoundException] { sqlc.sql(s"""INFER SCHEMA OF "$nonExistentPath" AS ORC""").collect() } } } // scalastyle:off magic.number test("Inferring of schema works on parquet file") { val personFile = getFileFromClassPath("/pers.parquet") val result = sqlc.sql(s"""INFER SCHEMA OF "$personFile"""").collect().toSet assertResult( Set( Row("name", 1, true, "VARCHAR(*)", null, null, null), Row("age", 2, true, "INTEGER", 32, 2, 0)))(result) } test("Inferring of schema works on orc file") { val personFile = getFileFromClassPath("/pers.orc") val result = sqlc.sql(s"""INFER SCHEMA OF "$personFile"""").collect().toSet assertResult( Set( Row("name", 1, true, "VARCHAR(*)", null, null, null), Row("age", 2, true, "INTEGER", 32, 2, 0)))(result) } // scalastyle:on magic.number test("Inferring of schema fails on invalid file") { val invalidFile = getFileFromClassPath("/simple.csv") intercept[AnalysisException] { sqlc.sql(s"""INFER SCHEMA OF "$invalidFile"""") } } }
Example 43
Source File: BasicWriteStatsTracker.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources import java.io.FileNotFoundException import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.spark.{SparkContext, TaskContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.execution.SQLExecution import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} import org.apache.spark.util.SerializableConfiguration class BasicWriteJobStatsTracker( serializableHadoopConf: SerializableConfiguration, @transient val metrics: Map[String, SQLMetric]) extends WriteJobStatsTracker { override def newTaskInstance(): WriteTaskStatsTracker = { new BasicWriteTaskStatsTracker(serializableHadoopConf.value) } override def processStats(stats: Seq[WriteTaskStats]): Unit = { val sparkContext = SparkContext.getActive.get var numPartitions: Long = 0L var numFiles: Long = 0L var totalNumBytes: Long = 0L var totalNumOutput: Long = 0L val basicStats = stats.map(_.asInstanceOf[BasicWriteTaskStats]) basicStats.foreach { summary => numPartitions += summary.numPartitions numFiles += summary.numFiles totalNumBytes += summary.numBytes totalNumOutput += summary.numRows } metrics(BasicWriteJobStatsTracker.NUM_FILES_KEY).add(numFiles) metrics(BasicWriteJobStatsTracker.NUM_OUTPUT_BYTES_KEY).add(totalNumBytes) metrics(BasicWriteJobStatsTracker.NUM_OUTPUT_ROWS_KEY).add(totalNumOutput) metrics(BasicWriteJobStatsTracker.NUM_PARTS_KEY).add(numPartitions) val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) SQLMetrics.postDriverMetricUpdates(sparkContext, executionId, metrics.values.toList) } } object BasicWriteJobStatsTracker { private val NUM_FILES_KEY = "numFiles" private val NUM_OUTPUT_BYTES_KEY = "numOutputBytes" private val NUM_OUTPUT_ROWS_KEY = "numOutputRows" private val NUM_PARTS_KEY = "numParts" def metrics: Map[String, SQLMetric] = { val sparkContext = SparkContext.getActive.get Map( NUM_FILES_KEY -> SQLMetrics.createMetric(sparkContext, "number of written files"), NUM_OUTPUT_BYTES_KEY -> SQLMetrics.createMetric(sparkContext, "bytes of written output"), NUM_OUTPUT_ROWS_KEY -> SQLMetrics.createMetric(sparkContext, "number of output rows"), NUM_PARTS_KEY -> SQLMetrics.createMetric(sparkContext, "number of dynamic part") ) } }
Example 44
Source File: FileSystemBazelLocalWorkspace.scala From exodus with MIT License | 5 votes |
package com.wixpress.build.bazel import java.io.FileNotFoundException import better.files.File import ThirdPartyPaths._ class FileSystemBazelLocalWorkspace(root: File) extends BazelLocalWorkspace { val localWorkspaceName: String = { val workspaceFileContent = contentIfExistsOf(root / "WORKSPACE") val validWorkspaceWith = """(?s).*workspace\s*\(\s*name\s*=\s*"([^"]+)"\s*\).*""".r workspaceFileContent match { case Some(validWorkspaceWith(name)) => name case _ => "" } } private val ThirdPartyOverridesPath = "bazel_migration/third_party_targets.overrides" validate() override def overwriteBuildFile(packageName: String, content: String): Unit = { val buildFilePath = root / packageName / "BUILD.bazel" buildFilePath.createIfNotExists(createParents = true) buildFilePath.overwrite(content) } override def overwriteThirdPartyReposFile(thirdPartyReposContent: String): Unit = (root / thirdPartyReposFilePath).overwrite(thirdPartyReposContent) override def overwriteThirdPartyImportTargetsFile(thirdPartyGroup: String, content: String): Unit = { val targetsFile = root / s"$thirdPartyImportFilesPathRoot/$thirdPartyGroup.bzl" content match { case "" => if (targetsFile.exists) targetsFile.delete() case _ => { targetsFile.createIfNotExists(createParents = true) targetsFile.overwrite(content) } } } override def thirdPartyReposFileContent(): String = contentIfExistsOf(root / thirdPartyReposFilePath).getOrElse("") override def buildFileContent(packageName: String): Option[String] = contentIfExistsOf(root / packageName / "BUILD.bazel") override def thirdPartyImportTargetsFileContent(thirdPartyGroup: String): Option[String] = contentIfExistsOf(root / thirdPartyImportFilesPathRoot / s"$thirdPartyGroup.bzl") override def allThirdPartyImportTargetsFilesContent(): Set[String] = { allThirdPartyImportTargetsFiles().values.toSet } override def allThirdPartyImportTargetsFiles(): Map[File, String] = { val thirdPartyLocation = root / thirdPartyImportFilesPathRoot thirdPartyLocation.createIfNotExists(asDirectory = true, createParents = true) val files = thirdPartyLocation.glob("**/*.bzl") val withNoCustomVersions = files.filterNot(f => f.path.startsWith(thirdPartyLocation + "/custom/")) withNoCustomVersions.map(f => f -> contentIfExistsOf(f).get) toMap } override def thirdPartyOverrides(): ThirdPartyOverrides = { contentIfExistsOf(root / ThirdPartyOverridesPath) .map(ThirdPartyOverridesReader.from) .getOrElse(ThirdPartyOverrides.empty) } private def contentIfExistsOf(filePath: File) = if (filePath.exists) Some(filePath.contentAsString) else None private def validate(): Unit = { if (!root.exists) throw new FileNotFoundException(root.pathAsString) } }
Example 45
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds) "file scan" ! { val actual = run(Scanner.pathScan(base), fs) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) actual.mustEqual(expected) } "Error from Filesystem" ! { val emptyFs: Filesystem = MockFilesystem(directories, Map.empty) val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs) val expected = Left(new IOException().toString) actual.mustEqual(expected) } type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]] def runE[T](program: Eff[E, T], fs: Filesystem) = //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program //we convert to a common error type String then flatten program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten "Error - Report with non-numeric input" ! { val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs) val expected = Left("Number of files must be numeric: not a number") actual.mustEqual(expected) } "Error - Report with non-positive input" ! { val actual = runE(Scanner.scanReport(Array("base", "-1")), fs) val expected = Left("Invalid number of files -1") actual.mustEqual(expected) } }
Example 46
Source File: Clause_6_exception.scala From HadoopLearning with MIT License | 5 votes |
package com.c503.scala import java.io.{FileNotFoundException, FileReader, IOException} object Clause_6_exception { def main(args: Array[String]): Unit = { try { val f = new FileReader("input.txt") println(f.getClass.getName) } catch { case ex: FileNotFoundException => { println("Missing file exception") } case bx: IOException => { println("IO Exception") } } finally { println("Exiting finally...") } } }
Example 47
Source File: ServiceSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
import java.io.{File, FileNotFoundException, IOException} import java.net.ServerSocket import java.util.Base64 import it.gov.daf.entitymanager.Entity import it.gov.daf.entitymanager.client.Entity_managerClient import org.specs2.mutable.Specification import org.specs2.specification.BeforeAfterAll import play.api.Application import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.ws.ahc.AhcWSClient import play.api.test.WithServer import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.util.{Failure, Random, Try} @SuppressWarnings( Array( "org.wartremover.warts.NonUnitStatements", "org.wartremover.warts.Throw", "org.wartremover.warts.Var" ) ) class ServiceSpec extends Specification with BeforeAfterAll { def getAvailablePort: Int = { try { val socket = new ServerSocket(0) try { socket.getLocalPort } finally { socket.close() } } catch { case e: IOException => throw new IllegalStateException(s"Cannot find available port: ${e.getMessage}", e) } } private def constructTempDir(dirPrefix: String): Try[File] = Try { val rndrange = 10000000 val file = new File(System.getProperty("java.io.tmpdir"), s"$dirPrefix${Random.nextInt(rndrange)}") if (!file.mkdirs()) throw new RuntimeException("could not create temp directory: " + file.getAbsolutePath) file.deleteOnExit() file } private def deleteDirectory(path: File): Boolean = { if (!path.exists()) { throw new FileNotFoundException(path.getAbsolutePath) } var ret = true if (path.isDirectory) path.listFiles().foreach(f => ret = ret && deleteDirectory(f)) ret && path.delete() } var tmpDir: Try[File] = Failure[File](new Exception("")) def application: Application = GuiceApplicationBuilder(). configure("pac4j.authenticator" -> "test"). configure("janusgraph.storage.directory" -> s"${tmpDir.map(_.getCanonicalPath).getOrElse("db")}/berkeleyje"). configure("janusgraph.index.search.directory" -> s"${tmpDir.map(_.getCanonicalPath).getOrElse("db")}/lucene"). build() "The entity_manager" should { "create an entity and retrieve it correctly" in new WithServer(app = application, port = getAvailablePort) { val ws: AhcWSClient = AhcWSClient() val plainCreds = "david:david" val plainCredsBytes = plainCreds.getBytes val base64CredsBytes = Base64.getEncoder.encode(plainCredsBytes) val base64Creds = new String(base64CredsBytes) val client = new Entity_managerClient(ws)(s"http://localhost:$port") val result = Await.result(client.createEntity(s"Basic $base64Creds", Entity("DAVID")), Duration.Inf) val entity = Await.result(client.getEntity(s"Basic $base64Creds", "DAVID"), Duration.Inf) entity must beEqualTo(Entity("DAVID")) } } override def beforeAll(): Unit = tmpDir = constructTempDir("test") override def afterAll(): Unit = tmpDir.foreach(deleteDirectory(_)) }
Example 48
Source File: TestCatalogClient.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package controllers import java.io.FileNotFoundException import daf.catalogmanager._ import scala.util.{ Failure, Success } trait TestCatalogClient { this: DatasetController => override protected val catalogClient = new TestCatalogManagerClient } sealed class TestCatalogManagerClient extends CatalogManagerClient("") { private def makeCatalog(id: String) = MetaCatalog( dataschema = DatasetCatalog( avro = null, flatSchema = List.empty, kyloSchema = None ), operational = Operational( theme = "", subtheme = "", logical_uri = id, physical_uri = Some { s"test-dir/$id" }, is_std = true, group_own = "test", group_access = None, std_schema = None, read_type = "", georef = None, input_src = null, ingestion_pipeline = None, storage_info = Some { StorageInfo( hdfs = Some { StorageHdfs(s"test-dir/$id", Some(s"test-dir/$id"), None) }, kudu = None, hbase = None, textdb = None, mongo = None ) }, dataset_type = "" ), dcatapit = null ) override def getById(authorization: String, catalogId: String) = catalogId match { case "path/to/failure" => Failure { new FileNotFoundException("Encountered failure condition") } case other => Success { makeCatalog(other) } } }
Example 49
Source File: ErrorHandlers.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.web import java.io.FileNotFoundException import java.lang.reflect.UndeclaredThrowableException import daf.error.InvalidRequestException import it.gov.daf.common.web.ErrorHandler import org.apache.spark.sql.AnalysisException import org.ietf.jgss.GSSException import play.api.mvc.Results object ErrorHandlers { val security: ErrorHandler = { case _: GSSException => Results.Unauthorized } val spark: ErrorHandler = { case _: FileNotFoundException => Results.NotFound case _: AnalysisException => Results.NotFound case error: UndeclaredThrowableException if error.getUndeclaredThrowable.isInstanceOf[AnalysisException] => Results.NotFound } val api: ErrorHandler = { case error: InvalidRequestException => Results.BadRequest { error.getMessage } } }
Example 50
Source File: DockerComposeFileOps.scala From sbt-docker-compose with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.github.ehsanyou.sbt.docker.compose.io import java.io.FileNotFoundException import java.util.UUID import cats.syntax.either._ import com.github.ehsanyou.sbt.docker.compose.io.DataTypes.ServiceName import com.github.ehsanyou.sbt.docker.compose.io.DataTypes.ServiceWithTag import io.circe.Json import sbt.File import sbt._ trait IDockerComposeFileOps { def store: File def getServices: Seq[ServiceName] def getServicesWithTag: Seq[ServiceWithTag] def withImageTags(tags: Seq[(String, String)]): IDockerComposeFileOps } case class DockerComposeFileOps( jsonAST: Json, cwd: File ) extends IDockerComposeFileOps { def asPrettyYaml: String = Printer.spaces2.pretty(jsonAST) override def getServices: Seq[ServiceName] = jsonAST.hcursor .downField("services") .fields .map(_.map(ServiceName).toSeq) .getOrElse(Seq.empty) override def getServicesWithTag: Seq[ServiceWithTag] = getServices flatMap { service => jsonAST.hcursor .downField("services") .downField(service.name) .get[String]("image") .toOption .map { image => val split = image.split(":").toSeq ServiceWithTag(service.name, split.drop(1).lastOption) } } def withImageTags(tags: Seq[(String, String)]): DockerComposeFileOps = tags.foldLeft(this) { case (acc, (serviceName, tag)) => acc.replaceServiceTag(serviceName, tag) match { case Some(json) => acc.copy(json) case None => acc } } private def replaceServiceTag(serviceName: String, tag: String): Option[Json] = jsonAST.hcursor .downField("services") .downField(serviceName) .downField("image") .withFocus(_.mapString { image => val split = image.split(":").toSeq s"${split.head}:$tag" }) .top override def store: File = { val f: File = cwd / s"docker-compose-modified-${UUID.randomUUID()}.yml" sbt.IO.write(f, asPrettyYaml) f } } object DockerComposeFileOps { def apply(path: String, workingDir: File): DockerComposeFileOps = DcFileReader(path) match { case Right(json) => DockerComposeFileOps(json, workingDir) case Left(err) => throw new FileNotFoundException(err.msg) } }
Example 51
Source File: HdfsUtils.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import java.io.{FileNotFoundException, IOException} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs._ private[streaming] object HdfsUtils { def getOutputStream(path: String, conf: Configuration): FSDataOutputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) // If the file exists and we have append support, append instead of creating a new file val stream: FSDataOutputStream = { if (dfs.isFile(dfsPath)) { if (conf.getBoolean("hdfs.append.support", false) || dfs.isInstanceOf[RawLocalFileSystem]) { dfs.append(dfsPath) } else { throw new IllegalStateException("File exists and there is no append support!") } } else { dfs.create(dfsPath) } } stream } def getInputStream(path: String, conf: Configuration): FSDataInputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) try { dfs.open(dfsPath) } catch { case _: FileNotFoundException => null case e: IOException => // If we are really unlucky, the file may be deleted as we're opening the stream. // This can happen as clean up is performed by daemon threads that may be left over from // previous runs. if (!dfs.isFile(dfsPath)) null else throw e } } def checkState(state: Boolean, errorMsg: => String) { if (!state) { throw new IllegalStateException(errorMsg) } } def checkFileExists(path: String, conf: Configuration): Boolean = { val hdpPath = new Path(path) val fs = getFileSystemForPath(hdpPath, conf) fs.isFile(hdpPath) } }
Example 52
Source File: RsaKeysGenerator.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.jwt import java.io.{File, FileNotFoundException, FileOutputStream} import com.daml.lf.data.TryOps.Bracket.bracket import scalaz.std.option._ import scalaz.syntax.applicative._ import scala.util.{Failure, Success, Try} object RsaKeysGenerator { private val keySize: Int = 2048 def generate(destination: domain.KeyPair[File]): Try[domain.KeyPair[File]] = for { keyPair <- generate_(): Try[domain.KeyPair[Array[Byte]]] publicKeyFile <- writeKey(keyPair.publicKey, destination.publicKey) privateKeyFile <- writeKey(keyPair.privateKey, destination.privateKey) } yield domain.KeyPair(publicKey = publicKeyFile, privateKey = privateKeyFile) def generate(): Try[domain.KeyPair[Seq[Byte]]] = generate_().map(k => k.map(as => as.toSeq)) private def generate_(): Try[domain.KeyPair[Array[Byte]]] = Try { val kpg = java.security.KeyPairGenerator.getInstance("RSA") kpg.initialize(keySize) Option(kpg.generateKeyPair()).flatMap(domainKeyPair) } flatMap { case Some(x) => Success(x) case None => Failure(new IllegalStateException("Cannot generate RSA key pair, null returned")) } private def domainKeyPair(k: java.security.KeyPair): Option[domain.KeyPair[Array[Byte]]] = ^(Option(k.getPublic), Option(k.getPrivate)) { (pub, pvt) => domain.KeyPair(publicKey = pub.getEncoded, privateKey = pvt.getEncoded) } private def writeKey(key: Array[Byte], file: File): Try[File] = bracket(Try(new FileOutputStream(file)))(close).flatMap { ostream => for { encoder <- Try(java.util.Base64.getEncoder) _ <- Try(ostream.write(encoder.encode(key))) _ <- exists(file) } yield file } private def close(a: FileOutputStream): Try[Unit] = Try(a.close()) private def exists(f: File): Try[File] = for { b <- Try(f.exists()) x <- if (b) Success(f) else Failure(new FileNotFoundException(f.getAbsolutePath)) } yield x }
Example 53
Source File: IOUtilTest.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.log.io import java.io.FileNotFoundException import wvlet.log.Spec class IOUtilTest extends Spec { def `find unused port`: Unit = { val port = IOUtil.unusedPort assert(port > 0) } def `find a file`: Unit = { val buildSbt = IOUtil.findPath("build.sbt") assert(buildSbt.isDefined) assert(buildSbt.get.getPath == "build.sbt") val notFound = IOUtil.findPath("non-existing-file-path.xxxxxxx") assert(notFound.isEmpty) } def `read file as a String`: Unit = { val str = IOUtil.readAsString("build.sbt") assert(str.length > 0) } def `throw FileNotFoundException if file is not found`: Unit = { intercept[FileNotFoundException] { IOUtil.readAsString("non-existing-file-path.txt.tmp") } } }
Example 54
Source File: StringInputParser.scala From avrohugger with Apache License 2.0 | 5 votes |
package avrohugger package input package parsers import reflectivecompilation.{ PackageSplitter, Toolbox } import stores.{ SchemaStore, TypecheckDependencyStore } import org.apache.avro.Protocol import org.apache.avro.Schema import org.apache.avro.Schema.Parser import org.apache.avro.SchemaParseException import org.apache.avro.compiler.idl.Idl import org.apache.avro.compiler.idl.ParseException import scala.collection.JavaConverters._ import java.nio.charset.Charset import java.io.FileNotFoundException // tries schema first, then protocol, then idl, then for case class defs class StringInputParser { lazy val schemaParser = new Parser() lazy val typecheckDependencyStore = new TypecheckDependencyStore def getSchemaOrProtocols( inputString: String, schemaStore: SchemaStore): List[Either[Schema, Protocol]] = { def trySchema(str: String): List[Either[Schema, Protocol]] = { try { List(Left(schemaParser.parse(str))) } catch { case notSchema: SchemaParseException => tryProtocol(str) case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryProtocol(str: String): List[Either[Schema, Protocol]] = { try { List(Right(Protocol.parse(str))) } catch { case notProtocol: SchemaParseException => tryIDL(str) case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryIDL(str: String): List[Either[Schema, Protocol]] = { try { val bytes = str.getBytes(Charset.forName("UTF-8")) val inStream = new java.io.ByteArrayInputStream(bytes) val idlParser = new Idl(inStream) val protocol = idlParser.CompilationUnit() List(Right(protocol)) } catch { case e: ParseException => { if (e.getMessage.contains("FileNotFoundException")) { sys.error("Imports not supported in String IDLs, only avdl files.") } else tryCaseClass(str, schemaStore) } case unknown: Throwable => sys.error("Unexpected exception: " + unknown) } } def tryCaseClass( str: String, schemaStore: SchemaStore): List[Either[Schema, Protocol]] = { val compilationUnits = PackageSplitter.getCompilationUnits(str) val scalaDocs = ScalaDocParser.getScalaDocs(compilationUnits) val trees = compilationUnits.map(src => Toolbox.toolBox.parse(src)) val treesZippedWithDocs = trees.zip(scalaDocs) val schemas = treesZippedWithDocs.flatMap(treeAndDocs => { val tree = treeAndDocs._1 val docs = treeAndDocs._2 TreeInputParser.parse(tree, docs, schemaStore, typecheckDependencyStore) }) schemas.map(schema => Left(schema)) } // tries schema first, then protocol, then idl, then for case class defs val schemaOrProtocols = trySchema(inputString) schemaOrProtocols } }
Example 55
Source File: Sourcer.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.elasticsearch.utils import java.io.{File, FileNotFoundException} import java.nio.charset.StandardCharsets import org.slf4j.{Logger, LoggerFactory} import scala.io.BufferedSource import scala.io.Source object Sourcer { protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass) val utf8: String = StandardCharsets.UTF_8.toString def sourceFromResource(path: String): BufferedSource = { val url = Option(Sourcer.getClass.getResource(path)) .getOrElse(throw newFileNotFoundException(path)) logger.info("Sourcing resource " + url.getPath) Source.fromURL(url, utf8) } def sourceFromFile(file: File): BufferedSource = { logger.info("Sourcing file " + file.getPath) Source.fromFile(file, utf8) } def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path)) def newFileNotFoundException(path: String): FileNotFoundException = { val message1 = path + " (The system cannot find the path specified" val message2 = message1 + (if (path.startsWith("~")) ". Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "") val message3 = message2 + ")" new FileNotFoundException(message3) } }
Example 56
Source File: Sourcer.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.utils import java.io.{File, FileNotFoundException} import java.nio.charset.StandardCharsets import org.slf4j.{Logger, LoggerFactory} import scala.io.BufferedSource import scala.io.Source object Sourcer { protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass) val utf8: String = StandardCharsets.UTF_8.toString def sourceFromResource(path: String): BufferedSource = { val url = Option(Sourcer.getClass.getResource(path)) .getOrElse(throw newFileNotFoundException(path)) logger.info("Sourcing resource " + url.getPath) Source.fromURL(url, utf8) } def sourceFromFile(file: File): BufferedSource = { logger.info("Sourcing file " + file.getPath) Source.fromFile(file, utf8) } def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path)) def newFileNotFoundException(path: String): FileNotFoundException = { val message1 = path + " (The system cannot find the path specified" val message2 = message1 + (if (path.startsWith("~")) ". Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "") val message3 = message2 + ")" new FileNotFoundException(message3) } }
Example 57
Source File: Sourcer.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.wmexchanger.utils import java.io.{File, FileNotFoundException} import java.nio.charset.StandardCharsets import org.slf4j.{Logger, LoggerFactory} import scala.io.BufferedSource import scala.io.Source object Sourcer { protected lazy val logger: Logger = LoggerFactory.getLogger(this.getClass) val utf8: String = StandardCharsets.UTF_8.toString def sourceFromResource(path: String): BufferedSource = { val url = Option(Sourcer.getClass.getResource(path)) .getOrElse(throw newFileNotFoundException(path)) logger.info("Sourcing resource " + url.getPath) Source.fromURL(url, utf8) } def sourceFromFile(file: File): BufferedSource = { logger.info("Sourcing file " + file.getPath) Source.fromFile(file, utf8) } def sourceFromFile(path: String): BufferedSource = sourceFromFile(new File(path)) def newFileNotFoundException(path: String): FileNotFoundException = { val message1 = path + " (The system cannot find the path specified" val message2 = message1 + (if (path.startsWith("~")) ". Make sure to not use the tilde (~) character in paths in lieu of the home directory." else "") val message3 = message2 + ")" new FileNotFoundException(message3) } }
Example 58
Source File: JSONProcessorTest.scala From donut with MIT License | 5 votes |
package report.donut.gherkin.processors import java.io.{File, FileNotFoundException} import report.donut.gherkin.model.StatusConfiguration import org.scalatest.{FlatSpec, Matchers} class JSONProcessorTest extends FlatSpec with Matchers { val rootDir = List("src", "test", "resources", "mix-cucumber-1-and-4").mkString("", File.separator, File.separator) val statusConfiguration = StatusConfiguration(false, false, false, false) behavior of "JSONProcessor" it should "identify valid files in a directory" in { val jsonFiles = JSONProcessor.getValidFiles(new File(rootDir)) jsonFiles.size shouldBe 11 jsonFiles.contains(rootDir + "1.json") shouldBe true jsonFiles.contains(rootDir + "2.json") shouldBe true jsonFiles.contains(rootDir + "3.json") shouldBe true jsonFiles.contains(rootDir + "4.json") shouldBe true jsonFiles.contains(rootDir + "5.json") shouldBe true jsonFiles.contains(rootDir + "6.json") shouldBe true jsonFiles.contains(rootDir + "7.json") shouldBe true jsonFiles.contains(rootDir + "8.json") shouldBe true jsonFiles.contains(rootDir + "9.json") shouldBe true jsonFiles.contains(rootDir + "10.json") shouldBe true jsonFiles.contains(rootDir + "11.json") shouldBe true jsonFiles.contains(rootDir + "empty_json.json") shouldBe false jsonFiles.contains(rootDir + "sample.xml") shouldBe false } it should "include json files only" in { val jsonFiles = JSONProcessor.getValidFiles(new File(rootDir)) jsonFiles.map(name => name.endsWith(".json")).reduce(_ && _) shouldBe true } it should "exclude empty files" in { val jsonFiles = JSONProcessor.getValidFiles(new File(rootDir)) jsonFiles.contains("empty_json.json") shouldBe false } it should "not parse incorrect file" in { intercept[FileNotFoundException] { val features = JSONProcessor.parseJsonFile(rootDir + "test.json") features shouldBe List.empty } } it should "handle all weirdos" in { val weirdos = JSONProcessor.loadFrom(new File("src/test/resources/samples-weirdos")) pending } }
Example 59
Source File: HdfsUtils.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.streaming.util import java.io.{FileNotFoundException, IOException} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs._ private[streaming] object HdfsUtils { def getOutputStream(path: String, conf: Configuration): FSDataOutputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) // If the file exists and we have append support, append instead of creating a new file val stream: FSDataOutputStream = { if (dfs.isFile(dfsPath)) { if (conf.getBoolean("hdfs.append.support", false) || dfs.isInstanceOf[RawLocalFileSystem]) { dfs.append(dfsPath) } else { throw new IllegalStateException("File exists and there is no append support!") } } else { dfs.create(dfsPath) } } stream } def getInputStream(path: String, conf: Configuration): FSDataInputStream = { val dfsPath = new Path(path) val dfs = getFileSystemForPath(dfsPath, conf) try { dfs.open(dfsPath) } catch { case _: FileNotFoundException => null case e: IOException => // If we are really unlucky, the file may be deleted as we're opening the stream. // This can happen as clean up is performed by daemon threads that may be left over from // previous runs. if (!dfs.isFile(dfsPath)) null else throw e } } def checkState(state: Boolean, errorMsg: => String) { if (!state) { throw new IllegalStateException(errorMsg) } } def checkFileExists(path: String, conf: Configuration): Boolean = { val hdpPath = new Path(path) val fs = getFileSystemForPath(hdpPath, conf) fs.isFile(hdpPath) } }
Example 60
Source File: DownloadSupportSpec.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.utils import java.io.FileNotFoundException import java.net.URL import org.scalatest.{BeforeAndAfter, Matchers, FunSpec} import scala.io.Source import scala.tools.nsc.io.File class DownloadSupportSpec extends FunSpec with Matchers with BeforeAndAfter { val downloadDestinationUrl = new URL("file:///tmp/testfile2.ext") val testFileContent = "This is a test" val testFileName = "/tmp/testfile.txt" // Create a test file for downloading before { File(testFileName).writeAll(testFileContent) } // Cleanup what we made after { if (File(testFileName).exists) File(testFileName).delete() if (File(downloadDestinationUrl.getPath).exists) File(downloadDestinationUrl.getPath).delete() } describe("DownloadSupport"){ describe("#downloadFile( String, String )"){ it("should download a file to the download directory"){ val testFileUrl = "file:///tmp/testfile.txt" // Create our utility and download the file val downloader = new Object with DownloadSupport downloader.downloadFile( testFileUrl, downloadDestinationUrl.getProtocol + "://" + downloadDestinationUrl.getPath) // Verify the file contents are what was in the original file val downloadedFileContent: String = Source.fromFile(downloadDestinationUrl.getPath).mkString downloadedFileContent should be (testFileContent) } } describe("#downloadFile( URL, URL )"){ it("should download a file to the download directory"){ val testFileUrl = new URL("file:///tmp/testfile.txt") val downloader = new Object with DownloadSupport downloader.downloadFile(testFileUrl, downloadDestinationUrl) // Verify the file contents are what was in the original file val downloadedFileContent: String = Source.fromFile(downloadDestinationUrl.getPath).mkString downloadedFileContent should be (testFileContent) } it("should throw FileNotFoundException if the download URL is bad"){ val badFilename = "file:///tmp/testbadfile.txt" if (File(badFilename).exists) File(badFilename).delete() val badFileUrl = new URL(badFilename) val downloader = new Object with DownloadSupport intercept[FileNotFoundException] { downloader.downloadFile(badFileUrl, downloadDestinationUrl) } } it("should throw FileNotFoundException if the download ") { val testFileUrl = new URL("file:///tmp/testfile.txt") val badDestinationUrl = new URL("file:///tmp/badloc/that/doesnt/exist.txt") val downloader = new Object with DownloadSupport intercept[FileNotFoundException] { downloader.downloadFile(testFileUrl, badDestinationUrl) } } } } }
Example 61
Source File: HdfsUtilsTest.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.sparta.serving.core.utils import java.io.{FileNotFoundException, InputStream} import org.apache.hadoop.fs.{FileSystem, _} import org.junit.runner.RunWith import org.mockito.Mockito._ import org.scalatest._ import org.scalatest.junit.JUnitRunner import org.scalatest.mock.MockitoSugar import scala.util.{Failure, Try} @RunWith(classOf[JUnitRunner]) class HdfsUtilsTest extends FlatSpec with ShouldMatchers with MockitoSugar { val fileSystem: FileSystem = mock[FileSystem] val utils = new HdfsUtils(fileSystem, "stratio") "hdfs utils" should "getfiles from a path" in { val expected = Array(mock[FileStatus]) when(fileSystem.listStatus(new Path("myTestPath"))).thenReturn(expected) val result = utils.getFiles("myTestPath") result should be(expected) } it should "return single file as inputStream" in { val expected: InputStream = mock[FSDataInputStream] when(fileSystem.open(new Path("testFile"))).thenReturn(expected.asInstanceOf[FSDataInputStream]) val result: InputStream = utils.getFile("testFile") result should be(expected) } it should "write" in { val result = Try(utils.write("from", "to", true)) match { case Failure(ex: Throwable) => ex } result.isInstanceOf[FileNotFoundException] should be(true) } it should "write without override" in { val result = Try(utils.write("from", "to", false)) match { case Failure(ex: Throwable) => ex } result.isInstanceOf[FileNotFoundException] should be(true) } }
Example 62
Source File: ImmutableMigrationsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.on.sql import java.io.{BufferedReader, FileNotFoundException} import java.math.BigInteger import java.nio.charset.Charset import java.security.MessageDigest import java.util import com.daml.ledger.on.sql.ImmutableMigrationsSpec._ import org.flywaydb.core.Flyway import org.flywaydb.core.api.configuration.FluentConfiguration import org.flywaydb.core.internal.resource.LoadableResource import org.flywaydb.core.internal.scanner.{LocationScannerCache, ResourceNameCache, Scanner} import org.scalatest.Matchers._ import org.scalatest.WordSpec import scala.collection.JavaConverters._ class ImmutableMigrationsSpec extends WordSpec { "migration files" should { "never change, according to their accompanying digest file" in { val configuration = Flyway .configure() .locations(s"classpath:/$migrationsResourcePath") val resourceScanner = flywayScanner(configuration) val resources = resourceScanner.getResources("", ".sql").asScala.toSeq resources.size should be >= 3 resources.foreach { resource => val migrationFile = resource.getRelativePath val digestFile = migrationFile + ".sha256" val expectedDigest = readExpectedDigest(migrationFile, digestFile, resourceScanner) val currentDigest = computeCurrentDigest(resource, configuration.getEncoding) assert( currentDigest == expectedDigest, s"""The contents of the migration file "$migrationFile" have changed! Migrations are immutable; you must not change their contents or their digest.""", ) } } } } object ImmutableMigrationsSpec { private val migrationsResourcePath = "com/daml/ledger/on/sql/migrations" private val hashMigrationsScriptPath = "ledger/ledger-on-sql/hash-migrations.sh" private def flywayScanner(configuration: FluentConfiguration) = new Scanner( classOf[Object], util.Arrays.asList(configuration.getLocations: _*), getClass.getClassLoader, configuration.getEncoding, new ResourceNameCache, new LocationScannerCache, ) private def readExpectedDigest( sourceFile: String, digestFile: String, resourceScanner: Scanner[_], ): String = { val resource = Option(resourceScanner.getResource(digestFile)) .getOrElse(throw new FileNotFoundException( s"""\"$digestFile\" is missing. If you are introducing a new Flyway migration step, you need to create an SHA-256 digest file by running $hashMigrationsScriptPath.""")) new BufferedReader(resource.read()).readLine() } private def computeCurrentDigest(resource: LoadableResource, encoding: Charset): String = { val sha256 = MessageDigest.getInstance("SHA-256") new BufferedReader(resource.read()) .lines() .forEach(line => sha256.update((line + "\n").getBytes(encoding))) val digest = sha256.digest() String.format(s"%0${digest.length * 2}x", new BigInteger(1, digest)) } }
Example 63
Source File: YamlReaderTest.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.airframe.config import wvlet.airspec.AirSpec import wvlet.log.io.Resource import java.io.FileNotFoundException case class MyConfig(id: Int, fullName: String, port: Int = 8989) case class DB(accountId: Int, database: String, table: Seq[String]) class YamlReaderTest extends AirSpec { private def findFile(name: String): String = { Resource.find(name) match { case Some(x) => x.getPath case None => throw new FileNotFoundException(s"${name} is not found") } } val yml: String = findFile("myconfig.yml") val listYml: String = findFile("list.yml") val classesYml: String = findFile("classes.yml") def `parse yaml file`: Unit = { val m = YamlReader.loadYaml(yml) m.contains("default") shouldBe true m.contains("staging") shouldBe true m.size shouldBe 2 } def `read yaml as objects`: Unit = { val m = YamlReader.loadMapOf[MyConfig](yml) m.contains("default") shouldBe true m.contains("staging") shouldBe true m("default") shouldBe MyConfig(1, "default-config", 8989) m("staging") shouldBe MyConfig(2, "staging-config", 10000) } def `read an specific env from yaml`: Unit = { val m = YamlReader.load[MyConfig](yml, "staging") m shouldBe MyConfig(2, "staging-config", 10000) } def `throw an exception when the target env is missing`: Unit = { intercept[IllegalArgumentException] { YamlReader.load[MyConfig](yml, "production") } } def `parse lists in yaml`: Unit = { val m = YamlReader.loadYamlList(listYml) m.size shouldBe 2 m(0)("database") shouldBe "mydb" m(0)("account_id") shouldBe 1 m(1)("database") shouldBe "mydb2" m(1)("account_id") shouldBe 10 val s = m.map(p => YamlReader.bind[DB](p)) s(0) shouldBe DB(1, "mydb", Seq("A")) s(1) shouldBe DB(10, "mydb2", Seq("T1", "T2")) } def `parse map in yaml`: Unit = { val m = YamlReader.loadMapOf[ClassConfig](classesYml) m.size shouldBe 2 m("development").classes shouldBe Seq("class1", "class2", "class3") m("development").classAssignments shouldBe Map( "nobita" -> "class1", "takeshi" -> "class2", "suneo" -> "class3" ) } }
Example 64
Source File: JavaClassSpecs.scala From kaitai_struct_compiler with GNU General Public License v3.0 | 5 votes |
package io.kaitai.struct.formats import java.io.{File, FileNotFoundException, IOError} import io.kaitai.struct.Log import io.kaitai.struct.format.{ClassSpec, ClassSpecs} import io.kaitai.struct.precompile.ErrorInInput import scala.collection.mutable import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future class JavaClassSpecs(relPath: String, absPaths: Seq[String], firstSpec: ClassSpec) extends ClassSpecs(firstSpec) { private val relFiles = mutable.Map[String, ClassSpec]() private val absFiles = mutable.Map[String, ClassSpec]() override def importRelative(name: String, path: List[String], inFile: Option[String]): Future[Option[ClassSpec]] = Future { Log.importOps.info(() => s".. importing relative $name") JavaClassSpecs.cached(path, inFile, relFiles, name, (_) => JavaKSYParser.fileNameToSpec(s"$relPath/$name.ksy") ) } override def importAbsolute(name: String, path: List[String], inFile: Option[String]): Future[Option[ClassSpec]] = Future { Log.importOps.info(() => s".. importing absolute $name") JavaClassSpecs.cached(path, inFile, absFiles, name, tryAbsolutePaths) } def tryAbsolutePaths(name: String): ClassSpec = { absPaths.foreach { (path) => val fn = s"$path/$name.ksy" val f = new File(fn) if (f.exists) { if (f.canRead) { if (f.isFile) { return JavaKSYParser.fileNameToSpec(fn) } else { Log.importOps.warn(() => s".... $fn exists, but is not a regular file, skipping") } } else { Log.importOps.warn(() => s".... $fn exists, but not readable, skipping") } } } throw new FileNotFoundException(s"Unable to find '$name' in import search paths, using: $absPaths") } } object JavaClassSpecs { def cached( path: List[String], inFile: Option[String], cacheMap: mutable.Map[String, ClassSpec], name: String, importOp: (String) => ClassSpec ): Option[ClassSpec] = { // Have we loaded it previously? cacheMap.get(name) match { case Some(_) => // Yes, it's already loaded and processed, nothing new here Log.importOps.info(() => s".... cached") None case None => // Nope, let's import it try { val spec = importOp(name) cacheMap(name) = spec Some(spec) } catch { case err: Throwable => throw new ErrorInInput(err, path, inFile) } } } }
Example 65
Source File: TestAccumulators.scala From spark-distcp with Apache License 2.0 | 5 votes |
package com.coxautodata.objects import java.io.FileNotFoundException import org.apache.spark.sql.SparkSession import org.scalatest.{FunSpec, Matchers} class TestAccumulators extends FunSpec with Matchers { it("test all accumulator conditions") { val spark = SparkSession.builder().master("local[*]").getOrCreate() val testCases: Seq[DistCPResult] = List( DeleteResult(null, DeleteActionResult.SkippedDoesNotExists), DeleteResult(null, DeleteActionResult.SkippedDryRun), DeleteResult(null, DeleteActionResult.Deleted), DeleteResult(null, DeleteActionResult.Failed(new RuntimeException("test"))), DirectoryCopyResult(null, null, CopyActionResult.SkippedAlreadyExists), DirectoryCopyResult(null, null, CopyActionResult.SkippedDryRun), DirectoryCopyResult(null, null, CopyActionResult.Created), DirectoryCopyResult(null, null, CopyActionResult.Failed(new RuntimeException("test"))), FileCopyResult(null, null, 1, CopyActionResult.SkippedAlreadyExists), FileCopyResult(null, null, 1000, CopyActionResult.SkippedIdenticalFileAlreadyExists), FileCopyResult(null, null, 1000000, CopyActionResult.SkippedDryRun), FileCopyResult(null, null, 1000000, CopyActionResult.Copied), FileCopyResult(null, null, 1000, CopyActionResult.OverwrittenOrUpdated), FileCopyResult(null, null, 50000, CopyActionResult.Failed(new FileNotFoundException("test"))) ) val acc = new Accumulators(spark) testCases.foreach(acc.handleResult) acc.getOutputText should be( """--Raw data-- |Data copied: 977 KB (1001000 bytes) |Data skipped (already existing files, dry-run and failures): 1 MB (1051001 bytes) |--Files-- |Files copied (new files and overwritten/updated files): 2 |Files overwritten/updated: 1 |Skipped files for copying (already existing files, dry-run and failures): 4 |Failed files during copy: 1 |--Folders-- |Folders created: 1 |Skipped folder creates (already existing folders, dry-run and failures): 3 |Failed folder creates: 1 |--Deletes-- |Successful delete operations: 1 |Skipped delete operations (files/folders already missing, dry-run and failures): 3 |Failed delete operations: 1 |--Exception counts-- |java.lang.RuntimeException: 2 |java.io.FileNotFoundException: 1""".stripMargin ) spark.stop() } }
Example 66
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx3[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).runAsync.attempt.runSyncUnsafe(3.seconds) "file scan" ! { val actual = run(Scanner.pathScan(base), fs) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) actual.mustEqual(expected) } "Error from Filesystem" ! { val emptyFs: Filesystem = MockFilesystem(directories, Map.empty) val actual = runE(Scanner.scanReport(Array("base", "10")), emptyFs) val expected = ??? actual.mustEqual(expected) } type E = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]] def runE[T](program: Eff[E, T], fs: Filesystem) = //there are two nested Either in the stack, one from Exceptions and one from errors raised by the program //we convert to a common error type String then flatten program.runReader(fs).runEither.runAsync.attempt.runSyncUnsafe(3.seconds).leftMap(_.toString).flatten "Error - Report with non-numeric input" ! { val actual = runE(Scanner.scanReport(Array("base", "not a number")), fs) val expected = Left("Number of files must be numeric: not a number") actual.mustEqual(expected) } "Error - Report with non-positive input" ! { val actual = runE(Scanner.scanReport(Array("base", "-1")), fs) val expected = Left("Invalid number of files -1") actual.mustEqual(expected) } }
Example 67
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base), fs) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { logs.forall(expectedLogs.contains) } }
Example 68
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val linkTarget = File(s"/somewhere/else/7.txt") val base1 = File(s"${base.path}/1.txt") val baseLink = Symlink(s"${base.path}/7.txt", linkTarget) val subdir = Directory(s"${base.path}/subdir") val sub2 = File(s"${subdir.path}/2.txt") val subLink = Symlink(s"${subdir.path}/7.txt", linkTarget) val directories = Map( base -> List(subdir, base1, baseLink), subdir -> List(sub2, subLink) ) val fileSizes = Map(base1 -> 1L, sub2 -> 2L, linkTarget -> 7L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx5[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?], State[Set[FilePath], ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).evalStateZero[Set[FilePath]].taskAttempt.runWriter[Log].runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(linkTarget, 7), FileSize(sub2, 2)), 10, 3)) val (actual, logs) = run(Scanner.pathScan[R](base), fs) "Report Format" ! {actual.mustEqual(expected)} }
Example 69
Source File: ScannerSpec.scala From GettingWorkDoneWithExtensibleEffects with Apache License 2.0 | 5 votes |
package scan import java.io.FileNotFoundException import java.io.IOException import java.nio.file._ import cats._ import cats.data._ import cats.implicits._ import org.atnos.eff._ import org.atnos.eff.all._ import org.atnos.eff.syntax.all._ import org.atnos.eff.addon.monix._ import org.atnos.eff.addon.monix.task._ import org.atnos.eff.syntax.addon.monix.task._ import org.specs2._ import scala.collection.immutable.SortedSet import scala.concurrent.duration._ import monix.eval._ import monix.execution.Scheduler.Implicits.global class ScannerSpec extends mutable.Specification { case class MockFilesystem(directories: Map[Directory, List[FilePath]], fileSizes: Map[File, Long]) extends Filesystem { def length(file: File) = fileSizes.getOrElse(file, throw new IOException()) def listFiles(directory: Directory) = directories.getOrElse(directory, throw new IOException()) def filePath(path: String): FilePath = if (directories.keySet.contains(Directory(path))) Directory(path) else if (fileSizes.keySet.contains(File(path))) File(path) else throw new FileNotFoundException(path) } val base = Directory("base") val base1 = File(s"${base.path}/1.txt") val base2 = File(s"${base.path}/2.txt") val subdir = Directory(s"${base.path}/subdir") val sub1 = File(s"${subdir.path}/1.txt") val sub3 = File(s"${subdir.path}/3.txt") val directories = Map( base -> List(subdir, base1, base2), subdir -> List(sub1, sub3) ) val fileSizes = Map(base1 -> 1L, base2 -> 2L, sub1 -> 1L, sub3 -> 3L) val fs = MockFilesystem(directories, fileSizes) type R = Fx.fx4[Task, Reader[Filesystem, ?], Reader[ScanConfig, ?], Writer[Log, ?]] def run[T](program: Eff[R, T], fs: Filesystem) = program.runReader(ScanConfig(2)).runReader(fs).taskAttempt.runWriter.runAsync.runSyncUnsafe(3.seconds) val expected = Right(new PathScan(SortedSet(FileSize(sub3, 3), FileSize(base2, 2)), 7, 4)) val expectedLogs = Set( Log.info("Scan started on Directory(base)"), Log.debug("Scanning directory 'Directory(base)': 1 subdirectories and 2 files"), Log.debug("File base/1.txt Size 1 B"), Log.debug("File base/2.txt Size 2 B"), Log.debug("Scanning directory 'Directory(base/subdir)': 0 subdirectories and 2 files"), Log.debug("File base/subdir/1.txt Size 1 B"), Log.debug("File base/subdir/3.txt Size 3 B") ) val (actual, logs) = run(Scanner.pathScan(base), fs) "Report Format" ! {actual.mustEqual(expected)} "Logs messages are emitted (ignores order due to non-determinstic concurrent execution)" ! { expectedLogs.forall(logs.contains) } }