scala.util.Properties Scala Examples
The following examples show how to use scala.util.Properties.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: Launcher.scala From sparkplug with MIT License | 7 votes |
package springnz.sparkplug.client import java.net.{ URLEncoder, InetAddress } import better.files._ import com.typesafe.config.{ ConfigRenderOptions, Config } import org.apache.spark.launcher.SparkLauncher import springnz.sparkplug.util.{ BuilderOps, ConfigUtils, Logging, Pimpers } import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.util.{ Properties, Try } object Launcher extends Logging { import BuilderOps._ import Pimpers._ def startProcess(launcher: SparkLauncher): Future[Unit] = { val processFuture = Future { launcher.launch() }.withErrorLog("Failed to launch: ") processFuture.flatMap { process ⇒ executeProcess(process) } } private def executeProcess(process: Process): Future[Unit] = Future { val outStream = scala.io.Source.fromInputStream(process.getInputStream) for (line ← outStream.getLines()) { log.info(line) } val errorStream = scala.io.Source.fromInputStream(process.getErrorStream) for (line ← errorStream.getLines()) { log.info(line) } process.waitFor() } def launch(clientAkkaAddress: String, jarPath: File, mainJarPattern: String, mainClass: String, sparkConfig: Config, akkaRemoteConfig: Option[Config], sendJars: Boolean = true): Try[Future[Unit]] = Try { val fullExtraJarFolder = jarPath.pathAsString val sparkHome = Properties.envOrNone("SPARK_HOME") val sparkMaster = Properties.envOrElse("SPARK_MASTER", s"spark://${InetAddress.getLocalHost.getHostAddress}:7077") log.debug(s"Spark master set to: $sparkMaster") // TODO: enable this functionality (need Spark 1.5 for this) // val sparkArgs: Array[String] = config.getString("spark.submit.sparkargs").split(' ') if (!sparkMaster.startsWith("local[") && !sparkHome.isDefined) throw new RuntimeException("If 'SPARK_MASTER' is not set to local, 'SPARK_HOME' must be set.") val appName = mainClass.split('.').last val mainJar = jarPath.glob(mainJarPattern).collectFirst { case f ⇒ f.pathAsString } val configVars: Seq[(String, String)] = ConfigUtils.configFields(sparkConfig).toSeq val akkaRemoteConfigString = akkaRemoteConfig.map { config ⇒ val configString = config.root().render(ConfigRenderOptions.concise()) URLEncoder.encode(configString, "UTF-8") } val launcher = (new SparkLauncher) .setIfSome[String](mainJar) { (l, mj) ⇒ l.setAppResource(mj) } .setMainClass(mainClass) .setAppName(appName) .setMaster(sparkMaster) .setIfSome[String](sparkHome) { (l, sh) ⇒ l.setSparkHome(sh) } .addAppArgs("appName", appName) .addAppArgs("clientAkkaAddress", clientAkkaAddress) .setIfSome(akkaRemoteConfigString) { (l, config) ⇒ l.addAppArgs("remoteAkkaConfig", config) } .setFoldLeft(configVars) { case (launcher, (key, value)) ⇒ launcher.setConf(key, value) } .setDeployMode(sparkConfig.getString("spark.deploymode")) val extraJarFiles = jarPath.glob("*.jar") .map { case f ⇒ f.pathAsString } .filterNot(_.contains("/akka-")) val launcherWithJars = if (sendJars) extraJarFiles.foldLeft(launcher) { case (l, jarFile) ⇒ l.addJar(jarFile) } else if (extraJarFiles.length == 0) launcher else launcher .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*") .setConf(SparkLauncher.EXECUTOR_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*") startProcess(launcherWithJars) } }
Example 2
Source File: ServiceTestSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.testkit import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths import javax.inject.Inject import akka.japi.function.Procedure import com.google.inject.AbstractModule import com.lightbend.lagom.javadsl.api.Descriptor import com.lightbend.lagom.javadsl.api.Service import com.lightbend.lagom.javadsl.persistence.PersistentEntityRegistry import com.lightbend.lagom.javadsl.server.ServiceGuiceSupport import com.lightbend.lagom.javadsl.testkit.ServiceTest.Setup import com.lightbend.lagom.javadsl.testkit.ServiceTest.TestServer import play.inject.guice.GuiceApplicationBuilder import scala.collection.JavaConverters._ import scala.compat.java8.FunctionConverters._ import scala.util.Properties import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ServiceTestSpec extends AnyWordSpec with Matchers { "ServiceTest" when { "started with Cassandra" should { "create a temporary directory" in { val temporaryFileCountBeforeRun = listTemporaryFiles().size withServer(ServiceTest.defaultSetup.withCassandra()) { _ => val temporaryFilesDuringRun = listTemporaryFiles() temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1) } } } "stopped after starting" should { "remove its temporary directory" in { val temporaryFileCountBeforeRun = listTemporaryFiles().size withServer(ServiceTest.defaultSetup.withCassandra()) { _ => () } val temporaryFilesAfterRun = listTemporaryFiles() temporaryFilesAfterRun should have size temporaryFileCountBeforeRun } } "started with JDBC" should { "start successfully" in { withServer(ServiceTest.defaultSetup.withJdbc()) { _ => () } } } } def withServer(setup: Setup)(block: TestServer => Unit): Unit = { ServiceTest.withServer(setup.configureBuilder((registerService _).asJava), block(_)) } def registerService(builder: GuiceApplicationBuilder): GuiceApplicationBuilder = builder.bindings(new TestServiceModule) def listTemporaryFiles(): Iterator[Path] = { val tmpDir = Paths.get(Properties.tmpDir) Files .newDirectoryStream(tmpDir, "ServiceTest_*") .iterator() .asScala } } trait TestService extends Service { import Service._ final override def descriptor: Descriptor = named("test") } class TestServiceImpl @Inject() (persistentEntityRegistry: PersistentEntityRegistry) extends TestService class TestServiceModule extends AbstractModule with ServiceGuiceSupport { override def configure(): Unit = bindService(classOf[TestService], classOf[TestServiceImpl]) }
Example 3
Source File: SendgridEmailService.scala From scala-clippy with Apache License 2.0 | 5 votes |
package util.email import com.sendgrid.SendGrid import com.typesafe.scalalogging.StrictLogging import scala.concurrent.Future import scala.util.Properties class SendgridEmailService(sendgridUsername: String, sendgridPassword: String, emailFrom: String) extends EmailService with StrictLogging { private lazy val sendgrid = new SendGrid(sendgridUsername, sendgridPassword) override def send(to: String, subject: String, body: String) = { val email = new SendGrid.Email() email.addTo(to) email.setFrom(emailFrom) email.setSubject(subject) email.setText(body) val response = sendgrid.send(email) if (response.getStatus) { logger.info(s"Email to $to sent") } else { logger.error( s"Email to $to, subject: $subject, body: $body, not sent: " + s"${response.getCode}/${response.getMessage}" ) } Future.successful(()) } } object SendgridEmailService extends StrictLogging { def createFromEnv(emailFrom: String): Option[SendgridEmailService] = for { u <- Properties.envOrNone("SENDGRID_USERNAME") p <- Properties.envOrNone("SENDGRID_PASSWORD") } yield { logger.info("Using SendGrid email service") new SendgridEmailService(u, p, emailFrom) } }
Example 4
Source File: ExecutorService.scala From sparkplug with MIT License | 5 votes |
package springnz.sparkplug.executor import java.net.{ URLDecoder, URLEncoder } import java.time.LocalDate import akka.actor._ import com.typesafe.config.ConfigFactory import springnz.sparkplug.core._ import springnz.sparkplug.util.Logging import scala.util.{ Properties, Try } object Constants { val defaultAkkaRemoteConfigSection = "akkaRemote" val actorSystemName = "sparkplugExecutorSystem" val brokerActorName = "sparkplugRequestBroker" } object ExecutorService extends Logging { import Constants._ lazy val defaultRemoteAkkaConfig = ConfigFactory.load.getConfig(s"sparkplug.$defaultAkkaRemoteConfigSection") // TODO: proper command line parsing to allow richer config options def main(args: Array[String]): Unit = { if (args.length < 4) throw new IllegalArgumentException(s"Expected at least 4 arguments to ExecutorService. Args = : ${args.toList}") val appName = args(1) val sparkClientPath = args(3) log.info(s"Starting Sparkplug ExecutorService: SparkClient = $sparkClientPath: ${LocalDate.now()}") val remoteConfig = if (args.length == 6) { val urlEncodedConfig = args(5) val configString = URLDecoder.decode(urlEncodedConfig, "UTF-8") val config = ConfigFactory.parseString(configString) log.info(s"Using akka remote config:\n$configString") config } else { log.info(s"Using default akka remote config from config section 'sparkplug.$defaultAkkaRemoteConfigSection'") defaultRemoteAkkaConfig } import scala.collection.JavaConversions._ def env = System.getenv().toMap log.debug(s"Environment:\n $env") val system = ActorSystem(actorSystemName, remoteConfig) val executorService = new ExecutorService(appName) executorService.start(system, sparkClientPath) log.info("Terminating the remote application.") } } class ExecutorService(appName: String, brokerName: String = Constants.brokerActorName) extends LongLivedExecutor with Logging { // Note that the SparkConf inherits all its settings from spark-submit override val configurer: Configurer = new LocalConfigurer(appName, Properties.envOrNone("SPARK_MASTER"), None) def start(system: ActorSystem, sparkClientPath: String): Try[Unit] = { val actorOperation = SparkOperation[Unit] { implicit sparkContext ⇒ def postStopAction() = { log.info("Cancelling any jobs (if any are running).") sparkContext.cancelAllJobs() log.info("Stopping Spark context.") sparkContext.stop() } log.info("Creating requestBroker for ExecutorService.") system.actorOf(Props(new RequestBroker(sparkClientPath, postStopAction)), name = brokerName) } log.info("Executing container operation (everything happens inside this method).") val result = execute(actorOperation) log.info("Finished executing container operation (everything happens inside this method).") result } }
Example 5
Source File: Dependencies.scala From geotrellis-osm-elevation with Apache License 2.0 | 5 votes |
import scala.util.Properties import sbt._ object Dependencies { def either(environmentVariable: String, default: String): String = Properties.envOrElse(environmentVariable, default) private val sprayVersion = Properties.envOrElse("SPRAY_VERSION", "1.3.3") // Cloudera's distribution of Spark 1.5 is built with Akka 2.2.x, // as opposed to the official release, which is built with Akka 2.3.x. // We need to have the spray version match the Akka version of Spark // or else MethodNotFound pain will ensue. val sprayRouting = if(sprayVersion == "1.2.3") { "io.spray" % "spray-routing" % sprayVersion } else { "io.spray" %% "spray-routing" % sprayVersion } val sprayCan = if(sprayVersion == "1.2.3") { "io.spray" % "spray-can" % sprayVersion } else { "io.spray" %% "spray-can" % sprayVersion } }
Example 6
Source File: AlmondPreprocessor.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.amm import ammonite.interp.DefaultPreprocessor import ammonite.util.Name import fastparse.Parsed import scala.reflect.internal.Flags import scala.tools.nsc.{Global => G} import scala.util.Properties object AlmondPreprocessor { private[almond] val isAtLeast_2_12_7 = { val v = Properties.versionNumberString !v.startsWith("2.11.") && (!v.startsWith("2.12.") || { v.stripPrefix("2.12.").takeWhile(_.isDigit).toInt >= 7 }) } def customPprintSignature(ident: String, customMsg: Option[String], modOpt: Option[String], modErrOpt: Option[String]) = { val customCode = customMsg.fold("_root_.scala.None")(x => s"""_root_.scala.Some("$x")""") val modOptCode = modOpt.fold("_root_.scala.None")(x => s"""_root_.scala.Some($x)""") val modErrOptCode = modErrOpt.fold("_root_.scala.None")(x => s"""_root_.scala.Some($x)""") s"""_root_.almond | .api | .JupyterAPIHolder | .value | .Internal | .printOnChange($ident, ${fastparse.internal.Util.literalize(ident)}, $customCode, $modOptCode, $modErrOptCode)""".stripMargin } } class AlmondPreprocessor( parse: => String => Either[String, Seq[G#Tree]], autoUpdateLazyVals: Boolean, autoUpdateVars: Boolean ) extends DefaultPreprocessor(parse) { import AlmondPreprocessor._ val CustomLazyDef = Processor { case (_, code, t: G#ValDef) if autoUpdateLazyVals && !DefaultPreprocessor.isPrivate(t) && !t.name.decoded.contains("$") && t.mods.hasFlag(Flags.LAZY) => val (code0, modOpt) = fastparse.parse(code, AlmondParsers.PatVarSplitter(_)) match { case Parsed.Success((lhs, tpeOpt, rhs), _) if lhs.startsWith("lazy val ") => val mod = Name.backtickWrap(t.name.decoded + "$value") val tpePart = tpeOpt.fold("")(t => "[" + t + "]") val c = s"""val $mod = new _root_.almond.api.internal.Lazy$tpePart(() => $rhs) |import $mod.{value => ${Name.backtickWrap(t.name.decoded)}} |""".stripMargin (c, Some(mod + ".onChange")) case _ => (code, None) } DefaultPreprocessor.Expanded( code0, Seq(customPprintSignature(Name.backtickWrap(t.name.decoded), Some("[lazy]"), None, modOpt)) ) } val CustomVarDef = Processor { case (_, code, t: G#ValDef) if autoUpdateVars && isAtLeast_2_12_7 && // https://github.com/scala/bug/issues/10886 !DefaultPreprocessor.isPrivate(t) && !t.name.decoded.contains("$") && !t.mods.hasFlag(Flags.LAZY) => val (code0, modOpt) = fastparse.parse(code, AlmondParsers.PatVarSplitter(_)) match { case Parsed.Success((lhs, tpeOpt, rhs), _) if lhs.startsWith("var ") => val mod = Name.backtickWrap(t.name.decoded + "$value") val tpePart = tpeOpt.fold("")(t => "[" + t + "]") val c = s"""val $mod = new _root_.almond.api.internal.Modifiable$tpePart($rhs) |import $mod.{value => ${Name.backtickWrap(t.name.decoded)}} |""".stripMargin (c, Some(mod + ".onChange")) case _ => (code, None) } DefaultPreprocessor.Expanded( code0, Seq(customPprintSignature(Name.backtickWrap(t.name.decoded), None, modOpt, None)) ) } override val decls = Seq[(String, String, G#Tree) => Option[DefaultPreprocessor.Expanded]]( CustomLazyDef, CustomVarDef, // same as super.decls ObjectDef, ClassDef, TraitDef, DefDef, TypeDef, PatVarDef, Import, Expr ) }
Example 7
Source File: Dependencies.scala From incubator-toree with Apache License 2.0 | 5 votes |
import sbt._ import sbt.Keys._ import scala.util.Properties object Dependencies { // Libraries val akkaActor = "com.typesafe.akka" %% "akka-actor" % "2.5.27" // Apache v2 val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % "2.5.27" // Apache v2 val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % "2.5.27" // Apache v2 val clapper = "org.clapper" %% "classutil" % "1.0.12" // BSD 3-clause license, used for detecting plugins val commonsExec = "org.apache.commons" % "commons-exec" % "1.3" // Apache v2 val config = "com.typesafe" % "config" % "1.3.0" // Apache v2 val coursierVersion = "1.0.3" val coursier = "io.get-coursier" %% "coursier" % coursierVersion // Apache v2 val coursierCache = "io.get-coursier" %% "coursier-cache" % coursierVersion // Apache v2 val ivy = "org.apache.ivy" % "ivy" % "2.4.0" // Apache v2 // use the same jackson version in test than the one provided at runtime by Spark 2.0.0 val jacksonDatabind = "com.fasterxml.jackson.core" % "jackson-databind" % "2.6.5" // Apache v2 val jeroMq = "org.zeromq" % "jeromq" % "0.4.3" // MPL v2 val joptSimple = "net.sf.jopt-simple" % "jopt-simple" % "4.9" // MIT val mockito = "org.mockito" % "mockito-all" % "1.10.19" // MIT val playJson = "com.typesafe.play" %% "play-json" % "2.3.10" // Apache v2 val scalaCompiler = Def.setting{ "org.scala-lang" % "scala-compiler" % scalaVersion.value } // BSD 3-clause val scalaLibrary = Def.setting{ "org.scala-lang" % "scala-library" % scalaVersion.value } // BSD 3-clause val scalaReflect = Def.setting{ "org.scala-lang" % "scala-reflect" % scalaVersion.value } // BSD 3-clause val scalaTest = "org.scalatest" %% "scalatest" % "2.2.6" // Apache v2 val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.21" // MIT val sparkVersion = settingKey[String]("Version of Apache Spark to use in Toree") // defined in root build val sparkCore = Def.setting{ "org.apache.spark" %% "spark-core" % sparkVersion.value } // Apache v2 val sparkGraphX = Def.setting{ "org.apache.spark" %% "spark-graphx" % sparkVersion.value } // Apache v2 val sparkMllib = Def.setting{ "org.apache.spark" %% "spark-mllib" % sparkVersion.value } // Apache v2 val sparkRepl = Def.setting{ "org.apache.spark" %% "spark-repl" % sparkVersion.value } // Apache v2 val sparkSql = Def.setting{ "org.apache.spark" %% "spark-sql" % sparkVersion.value } // Apache v2 val sparkStreaming = Def.setting{ "org.apache.spark" %% "spark-streaming" % sparkVersion.value } // Apache v2 val springCore = "org.springframework" % "spring-core" % "5.2.3.RELEASE"// Apache v2 val guava = "com.google.guava" % "guava" % "14.0.1" // Apache v2 // Projects val sparkAll = Def.setting{ Seq( sparkCore.value % "provided" excludeAll( // Exclude netty (org.jboss.netty is for 3.2.2.Final only) ExclusionRule( organization = "org.jboss.netty", name = "netty" ) ), sparkGraphX.value % "provided", sparkMllib.value % "provided", sparkRepl.value % "provided", sparkSql.value % "provided", sparkStreaming.value % "provided" ) } }
Example 8
Source File: GitHooks.scala From stryker4s with Apache License 2.0 | 5 votes |
import java.nio.file.Files import java.nio.file.attribute.PosixFilePermissions import sbt._ import sbt.internal.util.ManagedLogger import scala.collection.JavaConverters._ import scala.util.Properties object GitHooks { def apply(hooksSourceDir: File, hooksTargetDir: File, log: ManagedLogger): Unit = if (hooksSourceDir.isDirectory && hooksTargetDir.exists()) { IO.listFiles(hooksSourceDir) .map(hook => (hook, hooksTargetDir / hook.name)) .filterNot(_._2.exists()) // Don't write if hook already exists .foreach { case (originalHook, targetHook) => log.info(s"Copying ${originalHook.name} hook to $targetHook") Files.copy(originalHook.asPath, targetHook.asPath) if (!Properties.isWin) targetHook.setPermissions(PosixFilePermissions.fromString("rwxr-xr-x").asScala.toSet) } } }
Example 9
Source File: YarnCommandBuilderUtils.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.launcher import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.util.Properties private[spark] object YarnCommandBuilderUtils { def quoteForBatchScript(arg: String): String = { CommandBuilderUtils.quoteForBatchScript(arg) } def findJarsDir(sparkHome: String): String = { val scalaVer = Properties.versionNumberString .split("\\.") .take(2) .mkString(".") CommandBuilderUtils.findJarsDir(sparkHome, scalaVer, true) } }
Example 10
Source File: TypedDatasetSuite.scala From frameless with Apache License 2.0 | 5 votes |
package frameless import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.{SQLContext, SparkSession} import org.scalactic.anyvals.PosZInt import org.scalatest.BeforeAndAfterAll import org.scalatestplus.scalacheck.Checkers import org.scalacheck.Prop import org.scalacheck.Prop._ import scala.util.{Properties, Try} import org.scalatest.funsuite.AnyFunSuite trait SparkTesting { self: BeforeAndAfterAll => val appID: String = new java.util.Date().toString + math.floor(math.random * 10E4).toLong.toString val conf: SparkConf = new SparkConf() .setMaster("local[*]") .setAppName("test") .set("spark.ui.enabled", "false") .set("spark.app.id", appID) private var s: SparkSession = _ implicit def session: SparkSession = s implicit def sc: SparkContext = session.sparkContext implicit def sqlContext: SQLContext = session.sqlContext override def beforeAll(): Unit = { assert(s == null) s = SparkSession.builder().config(conf).getOrCreate() } override def afterAll(): Unit = { if (s != null) { s.stop() s = null } } } class TypedDatasetSuite extends AnyFunSuite with Checkers with BeforeAndAfterAll with SparkTesting { // Limit size of generated collections and number of checks to avoid OutOfMemoryError implicit override val generatorDrivenConfig: PropertyCheckConfiguration = { def getPosZInt(name: String, default: PosZInt) = Properties.envOrNone(s"FRAMELESS_GEN_${name}") .flatMap(s => Try(s.toInt).toOption) .flatMap(PosZInt.from) .getOrElse(default) PropertyCheckConfiguration( sizeRange = getPosZInt("SIZE_RANGE", PosZInt(20)), minSize = getPosZInt("MIN_SIZE", PosZInt(0)) ) } implicit val sparkDelay: SparkDelay[Job] = Job.framelessSparkDelayForJob def approximatelyEqual[A](a: A, b: A)(implicit numeric: Numeric[A]): Prop = { val da = numeric.toDouble(a) val db = numeric.toDouble(b) val epsilon = 1E-6 // Spark has a weird behaviour concerning expressions that should return Inf // Most of the time they return NaN instead, for instance stddev of Seq(-7.827553978923477E227, -5.009124275715786E153) if((da.isNaN || da.isInfinity) && (db.isNaN || db.isInfinity)) proved else if ( (da - db).abs < epsilon || (da - db).abs < da.abs / 100) proved else falsified :| s"Expected $a but got $b, which is more than 1% off and greater than epsilon = $epsilon." } }
Example 11
Source File: ApiCache.scala From twitter-stream-ml with GNU General Public License v3.0 | 5 votes |
package com.giorgioinf.twtml.web import org.json4s._ import org.json4s.native.Serialization import org.json4s.native.Serialization.{write,read} import org.mashupbots.socko.infrastructure.Logger import scala.io.Source import scala.tools.nsc.io.File import scala.util.{Properties,Try} object ApiCache extends Logger { private val backupFile = Properties.tmpDir + "/twtml-web.json" private var typeStats = Stats() private var typeConfig = Config() implicit val formats = Serialization.formats( ShortTypeHints(List(classOf[Config], classOf[Stats]))) private def cacheStats(data:Stats) = { log.debug("caching stats") typeStats = data } private def cacheConfig(data:Config) = { log.debug("caching config") typeConfig = data backup } def config():String = { write(typeConfig) } def stats():String = { write(typeStats) } def cache(json:String) = { val data = read[TypeData](json) data match { case stat:Stats => cacheStats(stat) case conf:Config => cacheConfig(conf) case _ => log.error("json not recognized: {}", json) } } def restore() = { Try(cache(Source.fromFile(backupFile).mkString)) } def backup() = { File(backupFile).writeAll(config) } }
Example 12
Source File: NorthwindDB.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.morpheus.util import org.opencypher.morpheus.api.io.sql.SqlDataSourceConfig import org.opencypher.morpheus.api.io.util.FileSystemUtils._ import org.opencypher.morpheus.testing.utils.H2Utils._ import scala.io.Source import scala.util.Properties object NorthwindDB { def init(sqlDataSourceConfig: SqlDataSourceConfig.Jdbc): Unit = { withConnection(sqlDataSourceConfig) { connection => connection.execute("DROP SCHEMA IF EXISTS NORTHWIND") connection.execute("CREATE SCHEMA NORTHWIND") connection.setSchema("NORTHWIND") // create the SQL db schema connection.execute(readResourceAsString("/northwind/sql/northwind_schema.sql")) // populate tables with data connection.execute(readResourceAsString("/northwind/sql/northwind_data.sql")) // create views that hide problematic columns connection.execute(readResourceAsString("/northwind/sql/northwind_views.sql")) } } private def readResourceAsString(name: String): String = using(Source.fromFile(getClass.getResource(name).toURI))(_ .getLines() .filterNot(line => line.startsWith("#") || line.startsWith("CREATE INDEX")) .mkString(Properties.lineSeparator)) }
Example 13
Source File: Config.scala From franklin with Apache License 2.0 | 5 votes |
package com.azavea.franklin.database import cats.effect._ import com.zaxxer.hikari.{HikariConfig, HikariDataSource} import doobie.util.transactor.Transactor import scala.util.Properties object DatabaseConfig { var jdbcDriver: String = "org.postgresql.Driver" val jdbcNoDBUrl: String = Properties.envOrElse( "POSTGRES_URL", "jdbc:postgresql://localhost/" ) val jdbcDBName: String = Properties.envOrElse("POSTGRES_NAME", "franklin") val jdbcUrl: String = jdbcNoDBUrl + jdbcDBName val dbUser: String = Properties.envOrElse("POSTGRES_USER", "franklin") val dbPassword: String = Properties.envOrElse("POSTGRES_PASSWORD", "franklin") val dbStatementTimeout: String = Properties.envOrElse("POSTGRES_STATEMENT_TIMEOUT", "30000") val dbMaximumPoolSize: Int = Properties.envOrElse("POSTGRES_DB_POOL_SIZE", "5").toInt def nonHikariTransactor[F[_]: Async](databaseName: String)(implicit cs: ContextShift[F]) = { Transactor.fromDriverManager[F]( "org.postgresql.Driver", jdbcNoDBUrl + databaseName, dbUser, dbPassword ) } val hikariConfig = new HikariConfig() hikariConfig.setPoolName("franklin-pool") hikariConfig.setMaximumPoolSize(dbMaximumPoolSize) hikariConfig.setConnectionInitSql( s"SET statement_timeout = ${dbStatementTimeout};" ) hikariConfig.setJdbcUrl(jdbcUrl) hikariConfig.setUsername(dbUser) hikariConfig.setPassword(dbPassword) hikariConfig.setDriverClassName(jdbcDriver) val hikariDS = new HikariDataSource(hikariConfig) }
Example 14
Source File: ServiceTestSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.testkit import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.persistence.cassandra.CassandraPersistenceComponents import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcPersistenceComponents import com.lightbend.lagom.scaladsl.persistence.PersistenceComponents import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry import com.lightbend.lagom.scaladsl.playjson.EmptyJsonSerializerRegistry import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.server._ import play.api.db.HikariCPComponents import play.api.libs.ws.ahc.AhcWSComponents import scala.collection.JavaConverters._ import scala.util.Properties import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ServiceTestSpec extends AnyWordSpec with Matchers { "ServiceTest" when { "started with Cassandra" should { "create a temporary directory" in { val temporaryFileCountBeforeRun = listTemporaryFiles().size ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ => val temporaryFilesDuringRun = listTemporaryFiles() temporaryFilesDuringRun should have size (temporaryFileCountBeforeRun + 1) } } } "stopped after starting" should { "remove its temporary directory" in { val temporaryFileCountBeforeRun = listTemporaryFiles().size ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra())(new CassandraTestApplication(_)) { _ => () } val temporaryFilesAfterRun = listTemporaryFiles() temporaryFilesAfterRun should have size temporaryFileCountBeforeRun } } "started with JDBC" should { "start successfully" in { ServiceTest.withServer(ServiceTest.defaultSetup.withJdbc())(new JdbcTestApplication(_)) { _ => () } } } } def listTemporaryFiles(): Iterator[Path] = { val tmpDir = Paths.get(Properties.tmpDir) Files .newDirectoryStream(tmpDir, "ServiceTest_*") .iterator() .asScala } } trait TestService extends Service { import Service._ final override def descriptor: Descriptor = named("test") } class TestServiceImpl(persistentEntityRegistry: PersistentEntityRegistry) extends TestService class TestApplication(context: LagomApplicationContext) extends LagomApplication(context) with LocalServiceLocator with AhcWSComponents { self: PersistenceComponents => override lazy val jsonSerializerRegistry: JsonSerializerRegistry = EmptyJsonSerializerRegistry override lazy val lagomServer: LagomServer = serverFor[TestService](new TestServiceImpl(persistentEntityRegistry)) } class CassandraTestApplication(context: LagomApplicationContext) extends TestApplication(context) with CassandraPersistenceComponents class JdbcTestApplication(context: LagomApplicationContext) extends TestApplication(context) with JdbcPersistenceComponents with HikariCPComponents
Example 15
Source File: Build.scala From lagom with Apache License 2.0 | 5 votes |
import play.dev.filewatch.FileWatchService import play.sbt.run.toLoggerProxy import sbt._ import javax.net.ssl.SSLContext import javax.net.ssl.HttpsURLConnection import javax.net.ssl.TrustManager import javax.net.ssl.X509TrustManager import java.security.cert.X509Certificate import scala.annotation.tailrec import scala.collection.mutable.ListBuffer import scala.util.Properties // This is an almost verbatim copy from Play's // https://github.com/playframework/playframework/blob/master/framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/generated-keystore/project/Build.scala // I think some parts could be trimmed but keeping the (almost) verbatim copy will ease future consolidation. // Changes compared to Play's version: // - had to replace `path` with `url` in `verifyResourceContains` object DevModeBuild { def jdk7WatchService = Def.setting { FileWatchService.jdk7(Keys.sLog.value) } def jnotifyWatchService = Def.setting { FileWatchService.jnotify(Keys.target.value) } // Using 30 max attempts so that we can give more chances to // the file watcher service. This is relevant when using the // default JDK watch service which does uses polling. val MaxAttempts = 30 val WaitTime = 500L val ConnectTimeout = 10000 val ReadTimeout = 10000 private val trustAllManager = { val manager = new X509TrustManager() { def getAcceptedIssuers: Array[X509Certificate] = null def checkClientTrusted(certs: Array[X509Certificate], authType: String): Unit = {} def checkServerTrusted(certs: Array[X509Certificate], authType: String): Unit = {} } Array[TrustManager](manager) } @tailrec def verifyResourceContains( url: String, status: Int, assertions: Seq[String], attempts: Int, headers: (String, String)* ): Unit = { println(s"Attempt $attempts at $url") val messages = ListBuffer.empty[String] try { val sc = SSLContext.getInstance("SSL") sc.init(null, trustAllManager, null) HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory) val jnUrl = new java.net.URL(url) val conn = jnUrl.openConnection().asInstanceOf[java.net.HttpURLConnection] conn.setConnectTimeout(ConnectTimeout) conn.setReadTimeout(ReadTimeout) headers.foreach(h => conn.setRequestProperty(h._1, h._2)) if (status == conn.getResponseCode) messages += s"Resource at $url returned $status as expected" else throw new RuntimeException(s"Resource at $url returned ${conn.getResponseCode} instead of $status") val is = if (conn.getResponseCode >= 400) conn.getErrorStream else conn.getInputStream // The input stream may be null if there's no body val contents = if (is != null) { val c = IO.readStream(is) is.close() c } else "" conn.disconnect() assertions.foreach { assertion => if (contents.contains(assertion)) messages += s"Resource at $url contained $assertion" else throw new RuntimeException(s"Resource at $url didn't contain '$assertion':\n$contents") } messages.foreach(println) } catch { case e: Exception => println(s"Got exception: $e") if (attempts < MaxAttempts) { Thread.sleep(WaitTime) verifyResourceContains(url, status, assertions, attempts + 1, headers: _*) } else { messages.foreach(println) println(s"After $attempts attempts:") throw e } } } }
Example 16
Source File: FileUtil.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.testkit.util import java.io.File import java.nio.file.Path import org.bitcoins.core.util.BitcoinSLogger import scala.util.Properties object FileUtil extends BitcoinSLogger { def deleteTmpDir(dir: File): Boolean = { val isTemp = dir.getPath startsWith Properties.tmpDir if (!isTemp) { logger.warn( s"Directory $dir is not in the system temp dir location! You most likely didn't mean to delete this directory.") false } else if (!dir.isDirectory) { dir.delete() } else { dir.listFiles().foreach(deleteTmpDir) dir.delete() } } def deleteTmpDir(path: Path): Boolean = { deleteTmpDir(path.toFile) } }
Example 17
Source File: Dependencies.scala From incubator-toree with Apache License 2.0 | 5 votes |
import sbt._ import sbt.Keys._ import scala.util.Properties object Dependencies { // Libraries val akkaActor = "com.typesafe.akka" %% "akka-actor" % "2.5.27" // Apache v2 val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % "2.5.27" // Apache v2 val akkaTestkit = "com.typesafe.akka" %% "akka-testkit" % "2.5.27" // Apache v2 val clapper = "org.clapper" %% "classutil" % "1.0.12" // BSD 3-clause license, used for detecting plugins val commonsExec = "org.apache.commons" % "commons-exec" % "1.3" // Apache v2 val config = "com.typesafe" % "config" % "1.3.0" // Apache v2 val coursierVersion = "1.0.3" val coursier = "io.get-coursier" %% "coursier" % coursierVersion // Apache v2 val coursierCache = "io.get-coursier" %% "coursier-cache" % coursierVersion // Apache v2 val ivy = "org.apache.ivy" % "ivy" % "2.4.0" // Apache v2 // use the same jackson version in test than the one provided at runtime by Spark 2.0.0 val jacksonDatabind = "com.fasterxml.jackson.core" % "jackson-databind" % "2.6.5" // Apache v2 val jeroMq = "org.zeromq" % "jeromq" % "0.4.3" // MPL v2 val joptSimple = "net.sf.jopt-simple" % "jopt-simple" % "4.9" // MIT val mockito = "org.mockito" % "mockito-all" % "1.10.19" // MIT val playJson = "com.typesafe.play" %% "play-json" % "2.3.10" // Apache v2 val scalaCompiler = Def.setting{ "org.scala-lang" % "scala-compiler" % scalaVersion.value } // BSD 3-clause val scalaLibrary = Def.setting{ "org.scala-lang" % "scala-library" % scalaVersion.value } // BSD 3-clause val scalaReflect = Def.setting{ "org.scala-lang" % "scala-reflect" % scalaVersion.value } // BSD 3-clause val scalaTest = "org.scalatest" %% "scalatest" % "2.2.6" // Apache v2 val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.21" // MIT val sparkVersion = settingKey[String]("Version of Apache Spark to use in Toree") // defined in root build val sparkCore = Def.setting{ "org.apache.spark" %% "spark-core" % sparkVersion.value } // Apache v2 val sparkGraphX = Def.setting{ "org.apache.spark" %% "spark-graphx" % sparkVersion.value } // Apache v2 val sparkMllib = Def.setting{ "org.apache.spark" %% "spark-mllib" % sparkVersion.value } // Apache v2 val sparkRepl = Def.setting{ "org.apache.spark" %% "spark-repl" % sparkVersion.value } // Apache v2 val sparkSql = Def.setting{ "org.apache.spark" %% "spark-sql" % sparkVersion.value } // Apache v2 val sparkStreaming = Def.setting{ "org.apache.spark" %% "spark-streaming" % sparkVersion.value } // Apache v2 val springCore = "org.springframework" % "spring-core" % "5.2.2.RELEASE"// Apache v2 val guava = "com.google.guava" % "guava" % "14.0.1" // Apache v2 // Projects val sparkAll = Def.setting{ Seq( sparkCore.value % "provided" excludeAll( // Exclude netty (org.jboss.netty is for 3.2.2.Final only) ExclusionRule( organization = "org.jboss.netty", name = "netty" ) ), sparkGraphX.value % "provided", sparkMllib.value % "provided", sparkRepl.value % "provided", sparkSql.value % "provided", sparkStreaming.value % "provided" ) } }
Example 18
Source File: Main.scala From sns with Apache License 2.0 | 5 votes |
package me.snov.sns import akka.actor.ActorSystem import akka.event.{Logging, LoggingAdapter} import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import akka.stream.ActorMaterializer import akka.util.Timeout import com.typesafe.config.ConfigFactory import me.snov.sns.actor._ import me.snov.sns.api._ import me.snov.sns.service.FileDbService import me.snov.sns.util.ToStrict import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.util.Properties object Main extends App with ToStrict { implicit val system = ActorSystem("sns") implicit val executor: ExecutionContext = system.dispatcher implicit val materializer: ActorMaterializer = ActorMaterializer() implicit val logger: LoggingAdapter = Logging(system, getClass) implicit val timeout = new Timeout(1.second) val config = ConfigFactory.load() val dbService = new FileDbService(Properties.envOrElse("DB_PATH", config.getString("db.path"))) val dbActor = system.actorOf(DbActor.props(dbService), name = "DbActor") val homeActor = system.actorOf(HomeActor.props, name = "HomeActor") val subscribeActor = system.actorOf(SubscribeActor.props(dbActor), name = "SubscribeActor") val publishActor = system.actorOf(PublishActor.props(subscribeActor), name = "PublishActor") val routes: Route = toStrict { TopicApi.route(subscribeActor) ~ SubscribeApi.route(subscribeActor) ~ PublishApi.route(publishActor) ~ HealthCheckApi.route ~ HomeApi.route(homeActor) } logger.info("SNS v{} is starting", getClass.getPackage.getImplementationVersion) Http().bindAndHandle( handler = logRequestResult("akka-http-sns")(routes), interface = Properties.envOrElse("HTTP_INTERFACE", config.getString("http.interface")), port = Properties.envOrElse("HTTP_PORT", config.getString("http.port")).toInt ) }
Example 19
Source File: AutoVersionPlugin.scala From sbt-autoversion with Apache License 2.0 | 5 votes |
package autoversion import autoversion.model.{Commit, Tag} import autoversion.model.BumpOrdering.bumpOrdering import sbt._ import com.typesafe.sbt.{GitPlugin, SbtGit} import com.vdurmont.semver4j.Semver import com.vdurmont.semver4j.Semver.SemverType import sbtrelease.{versionFormatError, ReleasePlugin, Version} import sbtrelease.ReleasePlugin.autoImport.releaseVersion import scala.util.Properties object AutoVersionPlugin extends AutoPlugin { val autoImport = Keys import autoImport._ override def trigger: PluginTrigger = allRequirements override def requires: Plugins = GitPlugin && ReleasePlugin override def projectSettings: Seq[Setting[_]] = Seq( tagNameCleaner := { _.stripPrefix("v") }, bugfixRegexes := List("""\[?bugfix\]?.*""", """\[?fix\]?.*""", """\[?patch\]?.*""").map(_.r), minorRegexes := List(".*").map(_.r), majorRegexes := List("""\[?breaking\]?.*""", """\[?major\]?.*""").map(_.r), latestTag := findLatestTag.value, unreleasedCommits := listUnreleasedCommits.value, suggestedBump := suggestBump.value, releaseVersion := { ver => Version(ver).map(v => v.bump(suggestedBump.value).withoutQualifier.string).getOrElse(versionFormatError) } ) private lazy val findLatestTag = Def.task { val gitTags = runGit("tag", "--list").value val versions = gitTags.map(tag => Tag(tag, new Semver(tagNameCleaner.value(tag), SemverType.LOOSE))) versions.sortBy(_.version).lastOption } private lazy val listUnreleasedCommits = Def.taskDyn { val tag = latestTag.value.map(tag => s"${tag.raw}...").getOrElse("") Def.task { val commitListOutput = runGit("log", "--oneline", "--no-decorate", "--color=never", s"${tag}HEAD").value commitListOutput.map(Commit.apply).toVector } } private lazy val suggestBump = Def.task { val commits = unreleasedCommits.value val suggestedBumps = commits.flatMap(_.suggestedBump(majorRegexes.value, minorRegexes.value, bugfixRegexes.value)) if (suggestedBumps.isEmpty) throw new RuntimeException("No commit matches either patterns for bugfix, minor or major bumps !") else suggestedBumps.max } private def runGit(args: String*) = Def.task { SbtGit.GitKeys.gitRunner .value(args: _*)(file("."), Logger.Null) .split(Properties.lineSeparator) .filter(_.trim.nonEmpty) } }
Example 20
Source File: CodacyConfigurationFile.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.configuration import better.files.File import cats.syntax.show._ import com.codacy.analysis.core.files.Glob import com.codacy.plugins.api.languages.{Language, Languages} import io.circe.generic.auto._ import io.circe.yaml.parser import io.circe.{Decoder, Json, _} import play.api.libs.json.JsValue import scala.util.{Properties, Try} final case class LanguageConfiguration(extensions: Option[Set[String]]) final case class EngineConfiguration(excludePaths: Option[Set[Glob]], baseSubDir: Option[String], extraValues: Option[Map[String, JsValue]]) final case class CodacyConfigurationFile(engines: Option[Map[String, EngineConfiguration]], excludePaths: Option[Set[Glob]], languages: Option[Map[Language, LanguageConfiguration]]) { lazy val languageCustomExtensions: Map[Language, Set[String]] = languages.fold(Map.empty[Language, Set[String]])(_.map { case (lang, config) => (lang, config.extensions.getOrElse(Set.empty[String])) }) } class CodacyConfigurationFileLoader { val filenames: Set[String] = Set(".codacy.yaml", ".codacy.yml") def load(directory: File): Either[String, CodacyConfigurationFile] = { search(directory).flatMap(configDir => parse(configDir.contentAsString)) } def search(root: File): Either[String, File] = { filenames .map(root / _) .find(f => f.exists && f.isRegularFile) .fold[Either[String, File]]( Left(s"Could not find Codacy configuration file. Make sure you have a file named like one of ${filenames .mkString(", ")}."))(Right(_)) } def parse(yamlString: String): Either[String, CodacyConfigurationFile] = { for { json <- parser.parse(yamlString).left.map(_.show) cursor = HCursor.fromJson(json) configurationEither = Decoder[CodacyConfigurationFile].decodeAccumulating(cursor).toEither configuration <- configurationEither.left.map(_.toList.map(_.show).mkString(Properties.lineSeparator)) } yield configuration } } object CodacyConfigurationFile { implicit val globDecoder: Decoder[Glob] = (c: HCursor) => c.as[String].map(Glob) implicit val languageKeyDecoder: KeyDecoder[Language] = (languageStr: String) => Languages.fromName(languageStr) implicit val decodeEngineConfiguration: Decoder[EngineConfiguration] = new Decoder[EngineConfiguration] { val engineConfigurationKeys = Set("enabled", "exclude_paths", "base_sub_dir") def apply(c: HCursor): Decoder.Result[EngineConfiguration] = { val extraKeys = c.keys.fold(List.empty[String])(_.to[List]).filter(key => !engineConfigurationKeys.contains(key)) for { excludePaths <- c.downField("exclude_paths").as[Option[Set[Glob]]] baseSubDir <- c.downField("base_sub_dir").as[Option[String]] } yield { val extraToolConfigurations: Map[String, JsValue] = extraKeys.flatMap { extraKey => c.downField(extraKey) .as[Json] .fold[Option[JsValue]]( { _ => Option.empty }, { json => Try(play.api.libs.json.Json.parse(json.noSpaces)).toOption }) .map(value => (extraKey, value)) }(collection.breakOut) EngineConfiguration(excludePaths, baseSubDir, Option(extraToolConfigurations).filter(_.nonEmpty)) } } } implicit val decodeCodacyConfigurationFile: Decoder[CodacyConfigurationFile] = Decoder.forProduct3("engines", "exclude_paths", "languages")(CodacyConfigurationFile.apply) }
Example 21
Source File: Json.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.formatter import java.io.PrintStream import java.nio.file.Path import com.codacy.analysis.core.model.Result import com.codacy.plugins.api.results import io.circe.Encoder import io.circe.generic.auto._ import io.circe.syntax._ import scala.util.Properties object Json extends FormatterCompanion { val name: String = "json" def apply(stream: PrintStream): Formatter = new Json(stream) } private[formatter] class Json(val stream: PrintStream) extends Formatter { private var alreadyPrinted: Boolean = false private implicit val categoryEncoder: Encoder[results.Pattern.Category.Value] = Encoder.encodeEnumeration(results.Pattern.Category) private implicit val levelEncoder: Encoder[results.Result.Level.Value] = Encoder.encodeEnumeration(results.Result.Level) private implicit val fileEncoder: Encoder[Path] = Encoder[String].contramap(_.toString) override def begin(): Unit = { stream.print("[") } override def end(): Unit = { stream.print("]") stream.print(Properties.lineSeparator) stream.flush() } def add(element: Result): Unit = { if (alreadyPrinted) stream.print(",") else alreadyPrinted = true stream.print(element.asJson.noSpaces) } }
Example 22
Source File: VirtualScreeningTest.scala From MaRe with Apache License 2.0 | 5 votes |
package se.uu.it.mare import java.io.File import java.util.UUID import scala.io.Source import scala.util.Properties import org.apache.spark.SharedSparkContext import org.junit.runner.RunWith import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner private object SDFUtils { def parseIDsAndScores(sdf: String): Array[(String, String)] = { sdf.split("\\n\\$\\$\\$\\$\\n").map { mol => val lines = mol.split("\\n") (lines(0), lines.last) } } } @RunWith(classOf[JUnitRunner]) class VirtualScreeningTest extends FunSuite with SharedSparkContext { private val tmpDir = new File(Properties.envOrElse("TMPDIR", "/tmp")) test("Virtual Screening") { sc.hadoopConfiguration.set("textinputformat.record.delimiter", "\n$$$$\n") val mols = sc.textFile(getClass.getResource("sdf/molecules.sdf").getPath) // Parallel execution with MaRe val hitsParallel = new MaRe(mols) .map( inputMountPoint = TextFile("/input.sdf", "\n$$$$\n"), outputMountPoint = TextFile("/output.sdf", "\n$$$$\n"), imageName = "mcapuccini/oe:latest", command = "fred -receptor /var/openeye/hiv1_protease.oeb " + "-hitlist_size 0 " + "-conftest none " + "-dock_resolution Low " + "-dbase /input.sdf " + "-docked_molecule_file /output.sdf") .reduce( inputMountPoint = TextFile("/input.sdf", "\n$$$$\n"), outputMountPoint = TextFile("/output.sdf", "\n$$$$\n"), imageName = "mcapuccini/sdsorter:latest", command = "sdsorter -reversesort='FRED Chemgauss4 score' " + "-keep-tag='FRED Chemgauss4 score' " + "-nbest=30 " + "/input.sdf " + "/output.sdf") .rdd.collect.mkString("\n$$$$\n") // Serial execution val inputFile = new File(getClass.getResource("sdf/molecules.sdf").getPath) val dockedFile = new File(tmpDir, "mare_test_" + UUID.randomUUID.toString) dockedFile.createNewFile dockedFile.deleteOnExit val outputFile = new File(tmpDir, "mare_test_" + UUID.randomUUID.toString) outputFile.createNewFile outputFile.deleteOnExit DockerHelper.run( imageName = "mcapuccini/oe:latest", command = "fred -receptor /var/openeye/hiv1_protease.oeb " + "-hitlist_size 0 " + "-conftest none " + "-dock_resolution Low " + "-dbase /input.sdf " + "-docked_molecule_file /docked.sdf", bindFiles = Seq(inputFile, dockedFile), volumeFiles = Seq(new File("/input.sdf"), new File("/docked.sdf")), forcePull = false) DockerHelper.run( imageName = "mcapuccini/sdsorter:latest", command = "sdsorter -reversesort='FRED Chemgauss4 score' " + "-keep-tag='FRED Chemgauss4 score' " + "-nbest=30 " + "/docked.sdf " + "/output.sdf", bindFiles = Seq(dockedFile, outputFile), volumeFiles = Seq(new File("/docked.sdf"), new File("/output.sdf")), forcePull = false) val hitsSerial = Source.fromFile(outputFile).mkString // Test val parallel = SDFUtils.parseIDsAndScores(hitsParallel) val serial = SDFUtils.parseIDsAndScores(hitsSerial) assert(parallel.deep == serial.deep) } }
Example 23
Source File: DockerHelperTest.scala From MaRe with Apache License 2.0 | 5 votes |
package se.uu.it.mare import scala.util.Properties import org.junit.runner.RunWith import org.scalatest.FunSuite import com.github.dockerjava.core.DefaultDockerClientConfig import com.github.dockerjava.core.DockerClientBuilder import com.github.dockerjava.core.command.PullImageResultCallback import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class DockerHelperTest extends FunSuite { // Init Docker client private val configBuilder = DefaultDockerClientConfig.createDefaultConfigBuilder() if (Properties.envOrNone("DOCKER_HOST") != None) { configBuilder.withDockerHost(System.getenv("DOCKER_HOST")) } if (Properties.envOrNone("DOCKER_TLS_VERIFY") != None) { val tlsVerify = System.getenv("DOCKER_TLS_VERIFY") == "1" configBuilder.withDockerTlsVerify(tlsVerify) } if (Properties.envOrNone("DOCKER_CERT_PATH") != None) { configBuilder.withDockerCertPath(System.getenv("DOCKER_CERT_PATH")) } private val config = configBuilder.build private val dockerClient = DockerClientBuilder.getInstance(config).build test("Map-like Docker run, image not present") { // Remove image if present val localImgList = dockerClient.listImagesCmd .withImageNameFilter("busybox:1") .exec if (localImgList.size > 0) { dockerClient.removeImageCmd("busybox:1") .withForce(true) .exec } // Run docker val statusCode = DockerHelper.run( imageName = "busybox:1", command = "true", bindFiles = Seq(), volumeFiles = Seq(), forcePull = false) assert(statusCode == 0) } test("Map-like Docker run, image present") { // Pull image dockerClient.pullImageCmd("busybox:1") .exec(new PullImageResultCallback) .awaitSuccess() // Run docker val statusCode = DockerHelper.run( imageName = "busybox:1", command = "true", bindFiles = Seq(), volumeFiles = Seq(), forcePull = false) assert(statusCode == 0) } test("Map-like Docker run, force pull") { // Pull image dockerClient.pullImageCmd("busybox:1") .exec(new PullImageResultCallback) .awaitSuccess() // Run docker val statusCode = DockerHelper.run( imageName = "busybox:1", command = "true", bindFiles = Seq(), volumeFiles = Seq(), forcePull = true) assert(statusCode == 0) } }
Example 24
Source File: ScoresApiSupport.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto.scores import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.HttpMethods._ import akka.http.scaladsl.model.{ HttpRequest, HttpResponse } import akka.stream.ActorMaterializer import liigavoitto.util.Logging import org.joda.time.format.DateTimeFormat import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.{ Failure, Properties, Success, Try } trait ScoresApiSupport extends Logging { implicit val system: ActorSystem implicit val ec = system.dispatcher implicit val fm = ActorMaterializer() val oneHundredMegabytes = 100000000 val apiUrl = Properties.envOrElse("SCORES_API_URL", "http://scores.api.yle.fi/v0/") val scoresAuth = Map[String, String]( "app_id" -> Properties.envOrElse("SCORES_API_APP_ID", ""), "app_key" -> Properties.envOrElse("SCORES_API_APP_KEY", "") ) val dateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss") val timeout = 15.seconds protected def get(url: String) = { Try { val request = HttpRequest(GET, url) log.info("REQUEST: " + request) Http().singleRequest(request).map(r => getStr(r)) } match { case Success(s) => s case Failure(e) => log.warn(s"Failed to get $url: " + e.getMessage) e.printStackTrace() throw new RuntimeException("Failure: " + e) } } protected def getStr(r: HttpResponse) = { Try { val entity = Await.result(r.entity.withSizeLimit(oneHundredMegabytes).toStrict(timeout), timeout) entity.data.decodeString("UTF-8") } match { case Success(s) => s case Failure(e) => throw new RuntimeException(s"Scores api failure: " + e.getMessage) } } }
Example 25
Source File: App.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives import akka.stream.ActorMaterializer import liigavoitto.api.{ ApiRoutes, BaseRoutes } import liigavoitto.util.Logging import scala.util.Properties object App extends Directives with ApiRoutes with Logging { implicit lazy val system = ActorSystem("liiga-voitto") lazy val port = Properties.envOrElse("APP_PORT", "45258").toInt def main(args: Array[String]) { implicit val executionContext = system.dispatcher implicit val fm = ActorMaterializer() Http().bindAndHandle(routes, "0.0.0.0", port) log.info(s"Server online at http://0.0.0.0:$port/") } val routes = BaseRoutes.baseRoutes ~ apiRoutes ~ localRoute }
Example 26
Source File: ArticlesV2Transformer.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto.transform import liigavoitto.journalist.author.AuthorImage import org.joda.time.format.ISODateTimeFormat import scala.util.Properties trait ArticlesV2Transformer { val dateFormat = ISODateTimeFormat.dateTimeNoMillis() val urlBase = Properties.envOrElse("ARTICLE_BASE_URL", "http://urheilu.test.c1t.yle.fi/urheilu/") def getV2Article(createArticle: Article): ArticleV2 = { val published = dateFormat.print(createArticle.datePublished) val url = urlBase + createArticle.id val externalContent = createArticle.externalContent val content = getContent(createArticle, externalContent, createArticle.surveyLink, createArticle.footNote) val subjects = createArticle.conceptIds.map(c => Subject(c)) ArticleV2( createArticle.id, createArticle.language, Url(url), Headline(createArticle.title, None), createArticle.lead, Publisher("Yle Urheilu"), published, published, published, createArticle.coverage, content, subjects, List(), Some(List(author(createArticle.author)).flatten), createArticle.properties, createArticle.shortSummary ) } private def getContent(createArticle: Article, externalContent: List[ExternalContent], surveyLink: Option[String], footNote: Option[String]) = { List( HeadingBlock(level = 1, text = createArticle.title, "heading"), textBlock(createArticle.lead) ) ++ mainExternalContent(externalContent) ++ (bodyTextBlocks(createArticle.body) ++ createArticle.gameEvents) ++ createArticle.gameStats ++ surveyLinkBlock(surveyLink) ++ footNoteBlock(footNote) ++ bottomExternalContent(externalContent) } private def withFootNote(content: List[ContentBlock], footNote: Option[String]) = footNote match { case Some(t) => content :+ textBlock(t) case None => content } private def mainExternalContent(ec: List[ExternalContent]) = ec.filter(_.embedLocation.isEmpty).map(externalContentBlock) private def bottomExternalContent(ec: List[ExternalContent]) = ec.filter(_.embedLocation.contains("bottom")).map(externalContentBlock) private def externalContentBlock(ec: ExternalContent) = ExternalContentV2("external-content", ec.html, ec.css, ec.scripts) private def surveyLinkBlock(sl: Option[String]) = sl.map(f => List(textBlock(f))).getOrElse(List()) private def footNoteBlock(footNote: Option[String]) = footNote.map(f => List(textBlock(f))).getOrElse(List()) private def textBlock(text: String) = TextBlock(text, "text") private def imageBlock(a: AuthorImage) = Some(ImageBlock(a.id, a.aspect, "image", a.alt)) private def bodyTextBlocks(paragraphs: List[String]) = paragraphs.map(textBlock) private def author(a: Option[liigavoitto.journalist.author.Author]) = a match { case Some(s) => Some(Author("Person", s.id, s.name, s.organization, imageBlock(s.image))) case None => None } }
Example 27
Source File: AuthorGenerator.scala From avoin-voitto with MIT License | 4 votes |
package liigavoitto.journalist.author import scala.util.Properties case class Author(id: String, name: String, organization: String, image: AuthorImage) case class AuthorImage(id: String, aspect: Double, alt: Option[String]) object AuthorGenerator { def getAuthor = Author(authorId, name, organization, image) def authorId = Properties.envOrElse("ESCENIC_AUTHOR_ID", "") def imageId = "" def name = "Voitto-robotti" def organization = "Yle" def image = AuthorImage( imageId, 500.toDouble / 500.toDouble, Some(name) ) }