java.nio.file.Path Scala Examples
The following examples show how to use java.nio.file.Path.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: IntellijVMOptions.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.runIdea import java.nio.file.Path import scala.collection.JavaConverters._ import scala.collection.mutable import org.jetbrains.sbtidea.Keys.IntelliJPlatform case class IntellijVMOptions(platform: IntelliJPlatform, pluginPath: Path, ideaHome: Path, xmx: Int = 1536, xms: Int = 128, reservedCodeCacheSize: Int = 240, softRefLRUPolicyMSPerMB: Int = 50, gc: String = "-XX:+UseConcMarkSweepGC", gcOpt: String = "-XX:CICompilerCount=2", noPCE: Boolean = false, debug: Boolean = true, debugPort: Int = 5005, suspend: Boolean = false, test: Boolean = false, defaultOptions: Seq[String] = IntellijVMOptions.STATIC_OPTS) { private def build: Seq[String] = { val buffer = new mutable.ArrayBuffer[String]() buffer ++= defaultOptions buffer += s"-Xms${xms}m" buffer += s"-Xmx${xmx}m" buffer += s"-XX:ReservedCodeCacheSize=${reservedCodeCacheSize}m" buffer += s"-XX:SoftRefLRUPolicyMSPerMB=$softRefLRUPolicyMSPerMB" buffer += gc buffer += gcOpt val (system, config) = if (test) (ideaHome.resolve("test-system"), ideaHome.resolve("test-config")) else (ideaHome.resolve("system"), ideaHome.resolve("config")) buffer += s"-Didea.system.path=$system" buffer += s"-Didea.config.path=$config" buffer += s"-Dplugin.path=$pluginPath" if (noPCE) buffer += "-Didea.ProcessCanceledException=disabled" if (!test) buffer += "-Didea.is.internal=true" if (debug) { val suspendValue = if (suspend) "y" else "n" buffer += s"-agentlib:jdwp=transport=dt_socket,server=y,suspend=$suspendValue,address=$debugPort" } if (platform.platformPrefix.nonEmpty) buffer += s"-Didea.platform.prefix=${platform.platformPrefix}" buffer } def add(opts: Seq[String]): IntellijVMOptions = copy(defaultOptions = defaultOptions ++ opts) def add(opt: String): IntellijVMOptions = copy(defaultOptions = defaultOptions :+ opt) def asSeq: Seq[String] = build.filter(_.nonEmpty) def asJava: java.util.List[String] = asSeq.asJava } object IntellijVMOptions { val IDEA_MAIN = "com.intellij.idea.Main" val STATIC_OPTS: Seq[String] = """-Dsun.io.useCanonPrefixCache=false |-ea |-Djava.net.preferIPv4Stack=true |-XX:+HeapDumpOnOutOfMemoryError |-XX:-OmitStackTraceInFastThrow |-Dawt.useSystemAAFontSettings=lcd |-Dsun.java2d.renderer=sun.java2d.marlin.MarlinRenderingEngine |-Dsun.tools.attach.tmp.only=true |-Dide.no.platform.update=true |-Dkotlinx.coroutines.debug=off |-Djdk.attach.allowAttachSelf=true |-Djdk.module.illegalAccess.silent=true |-XX:MaxJavaStackTraceDepth=10000 |-Didea.debug.mode=true |-Dapple.laf.useScreenMenuBar=true |-Duse.linux.keychain=false |-Didea.initially.ask.config=true |""".stripMargin.split("\n").toSeq }
Example 2
Source File: IdeaSources.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.idea import java.net.URL import java.nio.file.{Files, Path} import org.jetbrains.sbtidea.download.FileDownloader import org.jetbrains.sbtidea.{PluginLogger, pathToPathExt} import sbt._ import org.jetbrains.sbtidea.download.api._ import scala.language.postfixOps abstract class IdeaSources extends IdeaArtifact { override type R = IdeaSources override protected def usedInstaller: Installer[IdeaSources] = new Installer[IdeaSources] { override def isInstalled(art: IdeaSources)(implicit ctx: InstallContext): Boolean = ctx.baseDirectory / "sources.zip" exists override def downloadAndInstall(art: IdeaSources)(implicit ctx: InstallContext): Unit = { val file = FileDownloader(ctx.baseDirectory.getParent).download(art.dlUrl, optional = true) Files.move(file, ctx.baseDirectory.resolve("sources.zip")) PluginLogger.info(s"${caller.buildInfo.edition.name} sources installed") } } } class IdeaSourcesImpl(override val caller: AbstractIdeaDependency, dlUrlProvider: () => URL) extends IdeaSources { override def dlUrl: URL = dlUrlProvider() } object IdeaSourcesImpl { def apply(caller: AbstractIdeaDependency, dlUrlProvider: () => URL): IdeaSourcesImpl = new IdeaSourcesImpl(caller, dlUrlProvider) }
Example 3
Source File: CommunityUpdater.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download import java.nio.file.Path import org.jetbrains.sbtidea.download.api._ import org.jetbrains.sbtidea.download.idea.{IdeaDependency, IdeaDist} import org.jetbrains.sbtidea.download.plugin.{LocalPluginRegistry, PluginDependency, PluginRepoUtils} import org.jetbrains.sbtidea.Keys.IntellijPlugin import org.jetbrains.sbtidea.{PluginLogger => log} import org.jetbrains.sbtidea.download.jbr.JbrDependency class CommunityUpdater(baseDirectory: Path, ideaBuildInfo: BuildInfo, plugins: Seq[IntellijPlugin], withSources: Boolean = true) { implicit protected val context: InstallContext = InstallContext(baseDirectory = baseDirectory, downloadDirectory = baseDirectory.getParent) implicit protected val remoteRepoApi: PluginRepoUtils = new PluginRepoUtils implicit protected val localRegistry: LocalPluginRegistry = new LocalPluginRegistry(baseDirectory) protected val ideaDependency: IdeaDependency = IdeaDependency(ideaBuildInfo) protected def dependencies: Seq[UnresolvedArtifact] = ideaDependency +: JbrDependency(baseDirectory, ideaBuildInfo, Seq(ideaDependency)) +: plugins.map(pl => PluginDependency(pl, ideaBuildInfo, Seq(ideaDependency))) def update(): Unit = topoSort(dependencies).foreach(update) def update(dependency: UnresolvedArtifact): Unit = { val resolved = dependency.resolve val (installed, nonInstalled) = resolved.partition(_.isInstalled) val numMissing = nonInstalled.size val numInstalled = installed.size val numTotal = installed.size + nonInstalled.size if (resolved.isEmpty) log.warn(s"- Nothing resolved for $dependency") else if (nonInstalled.nonEmpty) log.info(s"~ Resolving $dependency -> $numMissing/$numTotal new artifacts") else log.info(s"+ $dependency is up to date: $numInstalled/$numTotal") nonInstalled.foreach(_.install) } private[download] def topoSort(deps: Seq[UnresolvedArtifact]): Seq[UnresolvedArtifact] = { val indexed = topoSortImpl(deps) indexed .sortBy(- _._2) .map(_._1) .distinct } private def topoSortImpl(deps: Seq[UnresolvedArtifact], level: Int = 0): Seq[(UnresolvedArtifact, Int)] = { deps.map(_ -> level) ++ deps.flatMap(dep => topoSortImpl(dep.dependsOn, level+1)) } }
Example 4
Source File: PluginXmlDetector.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download import java.net.URI import java.nio.file.{FileSystems, Files, Path} import java.util.Collections import java.util.function.Predicate private class PluginXmlDetector extends Predicate[Path] { import org.jetbrains.sbtidea.packaging.artifact._ private val MAP = Collections.emptyMap[String, Any]() var result: String = _ override def test(t: Path): Boolean = { if (!t.toString.endsWith(".jar")) return false val uri = URI.create(s"jar:${t.toUri}") try { using(FileSystems.newFileSystem(uri, MAP)) { fs => val maybePluginXml = fs.getPath("META-INF", "plugin.xml") if (Files.exists(maybePluginXml)) { result = new String(Files.readAllBytes(maybePluginXml)) true } else { false } } } catch { case e: java.util.zip.ZipError => throw new RuntimeException(s"Corrupt zip file: $t", e) } } }
Example 5
Source File: PluginDescriptor.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.io.InputStream import java.net.URL import java.nio.file.{Files, Path} import scala.xml._ case class PluginDescriptor(id: String, name: String, version: String, sinceBuild: String, untilBuild: String, dependsOn: Seq[PluginDescriptor.Dependency] = Seq.empty) { def toXMLStr: String = { s""" |<idea-plugin> | <name>$name</name> | <id>$id</id> | <version>$version</version> | <idea-version since-build="$sinceBuild" until-build="$untilBuild"/> | ${dependsOn.map(dep => s"""<depends optional="${dep.optional}">${dep.id}</depends>""").mkString("\n")} |</idea-plugin> |""".stripMargin } } object PluginDescriptor { private val OPTIONAL_KEY = "(optional) " private val OPTIONAL_ATTR = "optional" final case class Dependency(id: String, optional: Boolean) def load(str: String): PluginDescriptor = load(XML.withSAXParser(createNonValidatingParser).loadString(str)) def load(url: URL): PluginDescriptor = load(XML.withSAXParser(createNonValidatingParser).load(url)) def load(path: Path): PluginDescriptor = load(XML.withSAXParser(createNonValidatingParser).load(Files.newInputStream(path))) def load(stream: InputStream): PluginDescriptor = load(XML.withSAXParser(createNonValidatingParser).load(stream)) //noinspection ExistsEquals : scala 2.10 def load(xml: Elem): PluginDescriptor = { val id = (xml \\ "id").text val version = (xml \\ "version").text val name = (xml \\ "name").text val since = (xml \\ "idea-version").headOption.map(_.attributes("since-build").text).getOrElse("") val until = (xml \\ "idea-version").headOption.map(_.attributes("until-build").text).getOrElse("") val dependencies = (xml \\ "depends").map { node => val id = node.text.replace(OPTIONAL_KEY, "") val optional = node.text.contains(OPTIONAL_KEY) || node.attributes.asAttrMap.get(OPTIONAL_ATTR).exists(_ == "true") Dependency(id, optional) } val idOrName = if (id.isEmpty) name else id PluginDescriptor(idOrName, name, version, since, until, dependencies) } private def createNonValidatingParser = { val factory = javax.xml.parsers.SAXParserFactory.newInstance() factory.setValidating(false) factory.setFeature("http://xml.org/sax/features/validation", false) factory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false) factory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false) factory.setFeature("http://xml.org/sax/features/external-general-entities", false) factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false) factory.newSAXParser() } }
Example 6
Source File: PluginArtifact.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.net.URL import java.nio.file.Path import org.jetbrains.sbtidea.download.api._ trait PluginArtifact extends ResolvedArtifact { def caller: PluginDependency } case class RemotePluginArtifact(caller: PluginDependency, dlUrl: URL) (implicit private val ctx: InstallContext, repo: PluginRepoApi, localRegistry: LocalPluginRegistryApi) extends PluginArtifact with UrlBasedArtifact { override type R = RemotePluginArtifact override def usedInstaller: RepoPluginInstaller = new RepoPluginInstaller(caller.buildInfo) } case class LocalPlugin(caller: PluginDependency, descriptor: PluginDescriptor, root: Path) extends PluginArtifact { override type R = LocalPlugin override protected def usedInstaller: Installer[LocalPlugin] = new Installer[LocalPlugin] { override def isInstalled(art: LocalPlugin)(implicit ctx: InstallContext): Boolean = art.root.toFile.exists() override def downloadAndInstall(art: LocalPlugin)(implicit ctx: InstallContext): Unit = () } }
Example 7
Source File: JbrBintrayResolver.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.jbr import java.net.URL import java.nio.file.Path import java.util.{Locale, Properties} import org.jetbrains.sbtidea.download.api.Resolver import org.jetbrains.sbtidea.download.jbr.JbrDependency.VERSION_AUTO import org.jetbrains.sbtidea.packaging.artifact.using import org.jetbrains.sbtidea.{pathToPathExt, PluginLogger => log, _} import sbt._ class JbrBintrayResolver extends Resolver[JbrDependency] { import JbrBintrayResolver._ override def resolve(dep: JbrDependency): Seq[JbrArtifact] = { getJbrVersion(dep) .flatMap(buildJbrDlUrl) .map(url => JbrArtifact(dep, url)) .toSeq } private[jbr] def buildJbrDlUrl(version: String): Option[URL] = splitVersion(version).flatMap { case (major, minor) => Some(new URL(s"$BASE_URL/jbr-$major-$platform-$arch-b$minor.tar.gz")) case _ => log.error(s"Unexpected version format from $version") None } private[jbr] def getJbrVersion(dep: JbrDependency): Option[String] = dep.buildInfo.jbrVersion match { case Some(VERSION_AUTO) => extractVersionFromIdea(dep.ideaRoot) case otherVersion@Some(_) => otherVersion case None => None } private def platform: String = System.getProperty("os.name", "").toLowerCase(Locale.ENGLISH) match { case value if value.startsWith("win") => "windows" case value if value.startsWith("lin") => "linux" case value if value.startsWith("mac") => "osx" case other => log.error(s"Unsupported jbr os: $other"); "" } private def arch: String = System.getProperty("os.arch") match { case "x86" => "x86" case _ => "x64" } private[jbr] def extractVersionFromIdea(ideaInstallationDir: Path): Option[String] = { val dependenciesFile = ideaInstallationDir / "dependencies.txt" val props = new Properties() using(dependenciesFile.inputStream)(props.load) props.getProperty("jdkBuild").lift2Option } } object JbrBintrayResolver { val BASE_URL = "https://cache-redirector.jetbrains.com/jetbrains.bintray.com/intellij-jbr" def splitVersion(version: String): Option[(String, String)] = { val lastIndexOfB = version.lastIndexOf('b') if (lastIndexOfB > -1) Some(version.substring(0, lastIndexOfB) -> version.substring(lastIndexOfB + 1)) else { log.error(s"Malformed jbr version: $version") None } } }
Example 8
Source File: JbrInstaller.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.jbr import java.nio.file.{Files, Path} import java.util.Properties import org.jetbrains.sbtidea.download.api._ import org.jetbrains.sbtidea.download.{FileDownloader, NioUtils} import org.jetbrains.sbtidea.packaging.artifact.using import org.jetbrains.sbtidea.{pathToPathExt, PluginLogger => log, _} import org.rauschig.jarchivelib.{ArchiveFormat, ArchiverFactory, CompressionType} import sbt._ class JbrInstaller extends Installer[JbrArtifact] { import JbrInstaller._ override def isInstalled(art: JbrArtifact)(implicit ctx: InstallContext): Boolean = (ctx.baseDirectory / JBR_DIR_NAME).exists override def downloadAndInstall(art: JbrArtifact)(implicit ctx: InstallContext): Unit = { val file = FileDownloader(ctx.downloadDirectory).download(art.dlUrl) install(file) } private[jbr] def install(dist: Path)(implicit ctx: InstallContext): Unit = { val archiver = ArchiverFactory.createArchiver(ArchiveFormat.TAR, CompressionType.GZIP) val tmpDir = Files.createTempDirectory(ctx.baseDirectory, "jbr-extract") log.info(s"extracting jbr to $tmpDir") archiver.extract(dist.toFile, tmpDir.toFile) val installPath = ctx.baseDirectory / JBR_DIR_NAME val children = tmpDir.list if (children.size == 1) { NioUtils.delete(installPath) Files.move(children.head, installPath) NioUtils.delete(tmpDir) log.info(s"installed JBR into $installPath") } else { log.error(s"Unexpected JBR archive structure, expected single directory") } } } object JbrInstaller { val JBR_DIR_NAME = "jbr" }
Example 9
Source File: JbrDependency.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.jbr import java.net.URL import java.nio.file.Path import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.api._ case class JbrDependency(ideaRoot: Path, buildInfo: BuildInfo, dependsOn: Seq[UnresolvedArtifact] = Seq.empty) extends UnresolvedArtifact { override type U = JbrDependency override type R = JbrArtifact override protected def usedResolver: JbrBintrayResolver = new JbrBintrayResolver override def toString: String = s"JbrDependency(${buildInfo.jbrVersion})" } object JbrDependency { val VERSION_AUTO = "__auto__" } case class JbrArtifact(caller: JbrDependency, dlUrl: URL) extends ResolvedArtifact with UrlBasedArtifact { override type R = JbrArtifact override protected def usedInstaller: JbrInstaller = new JbrInstaller }
Example 10
Source File: IdeaDistInstaller.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.idea import java.nio.file.attribute.PosixFilePermissions import java.nio.file.{Files, Path} import java.util.function.Consumer import org.jetbrains.sbtidea.download.{BuildInfo, FileDownloader, IdeaUpdater, NioUtils} import org.jetbrains.sbtidea.{PluginLogger => log} import org.jetbrains.sbtidea.download.api._ import org.jetbrains.sbtidea.pathToPathExt import sbt._ class IdeaDistInstaller(buildInfo: BuildInfo) extends Installer[IdeaDist] { override def isInstalled(art: IdeaDist)(implicit ctx: InstallContext): Boolean = IdeaUpdater.isDumbIdea || (ctx.baseDirectory.toFile.exists() && ctx.baseDirectory.toFile.listFiles().nonEmpty) override def downloadAndInstall(art: IdeaDist)(implicit ctx: InstallContext): Unit = installDist(FileDownloader(ctx.baseDirectory.getParent).download(art.dlUrl)) private def tmpDir(implicit ctx: InstallContext) = ctx.baseDirectory.getParent.resolve(s"${buildInfo.edition.name}-${buildInfo.buildNumber}-TMP") private[idea] def downloadArtifact(art: IdeaDist)(implicit ctx: InstallContext): Path = FileDownloader(ctx.baseDirectory.getParent).download(art.dlUrl) private[idea] def installDist(artifact: Path)(implicit ctx: InstallContext): Path = { import sys.process._ import org.jetbrains.sbtidea.Keys.IntelliJPlatform.MPS log.info(s"Extracting ${buildInfo.edition.name} dist to $tmpDir") ctx.baseDirectory.toFile.getParentFile.mkdirs() // ensure "sdk" directory exists NioUtils.delete(ctx.baseDirectory) NioUtils.delete(tmpDir) Files.createDirectories(tmpDir) if (artifact.getFileName.toString.endsWith(".zip")) { val res = sbt.IO.unzip(artifact.toFile, tmpDir.toFile) if (res.isEmpty) throw new RuntimeException(s"Failed to unzip ${artifact.toFile} - bad archive") } else if (artifact.getFileName.toString.endsWith(".tar.gz")) { if (s"tar xfz $artifact -C $tmpDir --strip 1".! != 0) { throw new RuntimeException(s"Failed to install ${buildInfo.edition.name} dist: tar command failed") } } else throw new RuntimeException(s"Unexpected dist archive format(not zip or gzip): $artifact") if (ctx.baseDirectory.exists) { log.warn("IJ install directory already exists, removing...") NioUtils.delete(ctx.baseDirectory) } buildInfo.edition match { case MPS if Files.list(tmpDir).count() == 1 => // MPS may add additional folder level to the artifact log.info("MPS detected: applying install dir quirks") val actualDir = Files.list(tmpDir).iterator().next() Files.move(actualDir, ctx.baseDirectory) Files.deleteIfExists(tmpDir) case _ => Files.move(tmpDir, ctx.baseDirectory) } fixAccessRights(ctx.baseDirectory) NioUtils.delete(artifact) log.info(s"Installed ${buildInfo.edition.name}($buildInfo) to ${ctx.baseDirectory}") ctx.baseDirectory } private def fixAccessRights(ideaDir: Path): Unit = { if (!System.getProperty("os.name").startsWith("Windows")) { val execPerms = PosixFilePermissions.fromString("rwxrwxr-x") val binDir = ideaDir.resolve("bin") try { Files .walk(binDir) .forEach(new Consumer[Path] { override def accept(t: Path): Unit = Files.setPosixFilePermissions(t, execPerms) }) } catch { case e: Exception => log.warn(s"Failed to fix access rights for $binDir: ${e.getMessage}") } } } }
Example 11
Source File: CreatePluginsClasspath.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.tasks import java.nio.file.Path import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.PluginLogger import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.api.InstallContext import org.jetbrains.sbtidea.download.plugin._ import sbt.Keys._ import sbt._ object CreatePluginsClasspath { def apply(ideaBaseDir: Path, ideaBuildInfo: BuildInfo, plugins: Seq[IntellijPlugin], log: PluginLogger, moduleNameHint: String = ""): Classpath = { implicit val context: InstallContext = InstallContext(baseDirectory = ideaBaseDir, downloadDirectory = ideaBaseDir) implicit val remoteRepoApi: PluginRepoUtils = new PluginRepoUtils implicit val localRegistry: LocalPluginRegistry = new LocalPluginRegistry(ideaBaseDir) PluginLogger.bind(log) val resolved = plugins.map(pl => PluginDependency(pl, ideaBuildInfo, Seq.empty).resolve) val allDependencies = resolved.flatten val duplicates = resolved .filter(_.nonEmpty) .map { chain => chain.head -> resolved.filter(_.tail.contains(chain.head)).map(_.head) } .groupBy(_._1) .map { case (k, v) => k -> v.flatMap { case (_, j) => j } } .filter(_._2.nonEmpty) duplicates.collect { case (LocalPlugin(_, PluginDescriptor(id, _, _, _, _, _), _), parents) => val thisNonOptionalDependency = PluginDescriptor.Dependency(id, optional = false) val parentIds = parents.collect { case LocalPlugin(_, PluginDescriptor(parentId, _, _, _, _, deps), _) if deps.contains(thisNonOptionalDependency) => parentId } if (parentIds.nonEmpty) log.warn(s"Plugin [$id] is already included by: [${parentIds.mkString(", ")}]${if (moduleNameHint.nonEmpty) s" in project '$moduleNameHint'" else ""}") } val roots = allDependencies.collect { case LocalPlugin(_, _, root) => root }.distinct val pluginsFinder = roots .foldLeft(PathFinder.empty) { (pathFinder, pluginRoot) => if (pluginRoot.toFile.isDirectory) pathFinder +++ ((pluginRoot.toFile / "lib") * (globFilter("*.jar") -- "asm*.jar")) else pathFinder +++ pluginRoot.toFile } pluginsFinder.classpath } }
Example 12
Source File: SearchPluginId.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.tasks import java.net.URLEncoder import java.nio.file.Path import java.util.regex.Pattern import org.jetbrains.sbtidea.PluginLogger import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.plugin.LocalPluginRegistry import com.eclipsesource.json._ import scalaj.http.Http import scala.collection.JavaConverters._ class SearchPluginId(ideaRoot: Path, buildInfo: BuildInfo, useBundled: Boolean = true, useRemote: Boolean = true) { private val REPO_QUERY = "https://plugins.jetbrains.com/api/search/plugins?search=%s&build=%s" // true if plugin was found in the remote repo def apply(query: String): Map[String, (String, Boolean)] = { val local = if (useBundled) searchPluginIdLocal(query) else Map.empty val remote = if (useRemote) searchPluginIdRemote(query) else Map.empty remote ++ local } private def searchPluginIdLocal(query: String): Map[String, (String, Boolean)] = { val pattern = Pattern.compile(query) val registry = new LocalPluginRegistry(ideaRoot) val allDescriptors = registry.getAllDescriptors allDescriptors .filter(descriptor => pattern.matcher(descriptor.name).find() || pattern.matcher(descriptor.id).find()) .map(descriptor => descriptor.id -> (descriptor.name, false)) .toMap } // Apparently we can't use json4s when cross-compiling for sbt because there are BOTH no shared versions AND binary compatibility private def searchPluginIdRemote(query: String): Map[String, (String, Boolean)] = { try { val param = URLEncoder.encode(query, "UTF-8") val url = REPO_QUERY.format(param, s"${buildInfo.edition.edition}-${buildInfo.getActualIdeaBuild(ideaRoot)}") val data = Http(url).asString.body val json = Json.parse(data) val values = json.asArray().values().asScala.map(_.asObject()) val names = values.map(_.getString("name", "") -> true) val ids = values.map(_.getString("xmlId", "")) ids.zip(names).toMap } catch { case ex: Throwable => PluginLogger.warn(s"Failed to query IJ plugin repo: $ex") Map.empty } } }
Example 13
Source File: TransformerSerialization.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperables.spark.wrappers.transformers import java.nio.file.{Files, Path} import org.apache.commons.io.FileUtils import org.scalatest.{BeforeAndAfter, Suite} import io.deepsense.deeplang.doperables.Transformer import io.deepsense.deeplang.doperables.dataframe.DataFrame import io.deepsense.deeplang.{DeeplangIntegTestSupport, ExecutionContext} trait TransformerSerialization extends Suite with BeforeAndAfter { var tempDir: Path = _ before { tempDir = Files.createTempDirectory("writeReadTransformer") } after { FileUtils.deleteDirectory(tempDir.toFile) } } object TransformerSerialization { implicit class TransformerSerializationOps(private val transformer: Transformer) { def applyTransformationAndSerialization( path: Path, df: DataFrame)(implicit executionContext: ExecutionContext): DataFrame = { val result = transformer._transform(executionContext, df) val deserialized = loadSerializedTransformer(path) val resultFromSerializedTransformer = deserialized._transform(executionContext, df) DeeplangIntegTestSupport.assertDataFramesEqual(result, resultFromSerializedTransformer) result } def loadSerializedTransformer( path: Path)( implicit executionContext: ExecutionContext): Transformer = { val outputPath: Path = path.resolve(this.getClass.getName) transformer.save(executionContext, outputPath.toString) Transformer.load(executionContext, outputPath.toString) } } }
Example 14
Source File: WriteReadTransformerIntegSpec.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperations import java.nio.file.{Files, Path} import org.scalatest.BeforeAndAfter import io.deepsense.deeplang.doperables.{PythonTransformer, TargetTypeChoices, TypeConverter} import io.deepsense.deeplang.doperations.exceptions.DeepSenseIOException class WriteReadTransformerIntegSpec extends WriteReadTransformerIntegTest with BeforeAndAfter { val tempDir: Path = Files.createTempDirectory("writeReadTransformer") "ReadTransformer" should { "read previously written Transformer" in { val transformer = new TypeConverter().setTargetType(TargetTypeChoices.BooleanTargetTypeChoice()) val outputPath: Path = tempDir.resolve("TypeConverter") writeReadTransformer(transformer, outputPath.toString) } } "WriteTransformer" should { "overwrite the previously written Transformer if the overwrite parameter is set to true" in { val transformer1 = new PythonTransformer() val transformer2 = new TypeConverter().setTargetType(TargetTypeChoices.BooleanTargetTypeChoice()) val outputPath: Path = tempDir.resolve("TypeConverter") writeTransformer(transformer1, outputPath.toString, overwrite = true) writeReadTransformer(transformer2, outputPath.toString) } "throw an exception if a Transformer with the given name exists and the overwrite parameter is set to false" in { val transformer = new TypeConverter().setTargetType(TargetTypeChoices.BooleanTargetTypeChoice()) val outputPath: Path = tempDir.resolve("TypeConverter") writeTransformer(transformer, outputPath.toString, overwrite = true) a [DeepSenseIOException] shouldBe thrownBy { writeTransformer(transformer, outputPath.toString, overwrite = false) } } } after { tempDir.toFile.delete() } }
Example 15
Source File: JSDependencyManifest.scala From jsdependencies with BSD 3-Clause "New" or "Revised" License | 5 votes |
package org.scalajs.jsdependencies.core import scala.collection.immutable.{Seq, Traversable} import java.io._ import java.nio.charset.StandardCharsets import java.nio.file.{Path, Files} import org.scalajs.jsdependencies.core.json._ final class JSDependencyManifest( val origin: Origin, val libDeps: List[JSDependency]) { import JSDependencyManifest._ override def equals(that: Any): Boolean = that match { case that: JSDependencyManifest => this.origin == that.origin && this.libDeps == that.libDeps case _ => false } override def hashCode(): Int = { import scala.util.hashing.MurmurHash3._ var acc = HashSeed acc = mix(acc, origin.##) acc = mixLast(acc, libDeps.##) finalizeHash(acc, 2) } override def toString(): String = { val b = new StringBuilder b ++= s"JSDependencyManifest(origin=$origin" if (libDeps.nonEmpty) b ++= s", libDeps=$libDeps" b ++= ")" b.result() } } object JSDependencyManifest { // "org.scalajs.jsdependencies.core.JSDependencyManifest".## private final val HashSeed = -902988673 final val ManifestFileName = "JS_DEPENDENCIES" implicit object JSDepManJSONSerializer extends JSONSerializer[JSDependencyManifest] { @inline def optList[T](x: List[T]): Option[List[T]] = if (x.nonEmpty) Some(x) else None def serialize(x: JSDependencyManifest): JSON = { new JSONObjBuilder() .fld("origin", x.origin) .opt("libDeps", optList(x.libDeps)) .toJSON } } implicit object JSDepManJSONDeserializer extends JSONDeserializer[JSDependencyManifest] { def deserialize(x: JSON): JSDependencyManifest = { val obj = new JSONObjExtractor(x) new JSDependencyManifest( obj.fld[Origin]("origin"), obj.opt[List[JSDependency]]("libDeps").getOrElse(Nil)) } } def write(dep: JSDependencyManifest, output: Path): Unit = { val writer = Files.newBufferedWriter(output, StandardCharsets.UTF_8) try { write(dep, writer) } finally { writer.close() } } def write(dep: JSDependencyManifest, writer: Writer): Unit = writeJSON(dep.toJSON, writer) def read(file: Path): JSDependencyManifest = { val reader = Files.newBufferedReader(file, StandardCharsets.UTF_8) try { read(reader) } finally { reader.close() } } def read(reader: Reader): JSDependencyManifest = fromJSON[JSDependencyManifest](readJSON(reader)) }
Example 16
Source File: JReports.scala From libisabelle with Apache License 2.0 | 5 votes |
package info.hupel.isabelle.japi import java.nio.file.Path import scala.collection.JavaConverters._ import info.hupel.isabelle._ import info.hupel.isabelle.api._ final class JMarkup(markup: Markup) { def getName(): String = markup._1 def getProperties(): java.util.Map[String, String] = markup._2.toMap.asJava } final class JReports(reports: Reports) { def interpret(env: Environment): java.util.Map[Path, JRegions] = reports.interpret(env).regions.mapValues(new JRegions(_)).asJava } final class JRegion(val region: Region) extends java.lang.Iterable[JMarkup] { def iterator(): java.util.Iterator[JMarkup] = region.markup.iterator.map(new JMarkup(_)).asJava def getBody(): java.util.List[XML.Tree] = region.body.asJava def getStart(): Int = region.range.start def getEnd(): Int = region.range.end def getSubRegions(): JRegions = new JRegions(region.subRegions) def subMap(f: java.util.function.Function[JRegion, JRegion]): JRegion = new JRegion(region.subMap(r => f.apply(new JRegion(r)).region)) def subFilter(f: java.util.function.Predicate[JRegion]): JRegion = new JRegion(region.subFilter(r => f.test(new JRegion(r)))) } final class JRegions(val regions: Regions) extends java.lang.Iterable[JRegion] { def iterator(): java.util.Iterator[JRegion] = regions.items.iterator.map(new JRegion(_)).asJava def map(f: java.util.function.Function[JRegion, JRegion]): JRegions = new JRegions(regions.map(r => f.apply(new JRegion(r)).region)) def filter(f: java.util.function.Predicate[JRegion]): JRegions = new JRegions(regions.filter(r => f.test(new JRegion(r)))) }
Example 17
Source File: Devel.scala From libisabelle with Apache License 2.0 | 5 votes |
package info.hupel.isabelle.setup import java.io.OutputStreamWriter import java.nio.file.{Files, Path} import org.eclipse.jgit.api._ import org.eclipse.jgit.lib.TextProgressMonitor import org.eclipse.jgit.storage.file._ import org.log4s._ trait Devel { def init(path: Path): Unit def update(path: Path): Unit } case class GitDevel(url: String, branch: String) extends Devel { private val logger = getLogger private val monitor = new TextProgressMonitor(new OutputStreamWriter(Console.err)) def init(path: Path): Unit = { logger.debug(s"Cloning $branch from $url into $path") Files.createDirectories(path) new CloneCommand() .setDirectory(path.toFile) .setURI(url) .setBranch(branch) .setProgressMonitor(monitor) .call() () } def update(path: Path): Unit = { logger.debug(s"Fetching $branch from $url into $path") val repo = new FileRepositoryBuilder() .findGitDir(path.toFile) .setup() .build() new Git(repo).pull() .setRemoteBranchName(branch) .call() () } } object Devel { val knownDevels: Map[String, Devel] = Map( "isabelle-mirror" -> GitDevel("https://github.com/isabelle-prover/mirror-isabelle.git", "master") ) }
Example 18
Source File: PluginMock.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.net.URI import java.nio.file.{FileSystems, Files, Path} import org.jetbrains.sbtidea.TmpDirUtils import org.jetbrains.sbtidea.packaging.artifact import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.plugin.PluginDescriptor.Dependency import scala.collection.JavaConverters._ trait PluginMock extends TmpDirUtils { implicit class PluginMetaDataExt(metadata: PluginDescriptor) { def toPluginId: IntellijPlugin.Id = IntellijPlugin.Id(metadata.id, Some(metadata.version), None) } protected def createPluginJarMock(metaData: PluginDescriptor): Path = { val tmpDir = newTmpDir val targetPath = tmpDir.resolve(s"${metaData.name}.jar") val targetUri = URI.create("jar:" + targetPath.toUri) val opts = Map("create" -> "true").asJava artifact.using(FileSystems.newFileSystem(targetUri, opts)) { fs => Files.createDirectory(fs.getPath("/", "META-INF")) Files.write( fs.getPath("/", "META-INF", "plugin.xml"), createPluginXmlContent(metaData).getBytes ) } targetPath } protected def createPluginZipMock(metaData: PluginDescriptor): Path = { val tmpDir = newTmpDir val targetPath = tmpDir.resolve(s"${metaData.name}.zip") val targetUri = URI.create("jar:" + targetPath.toUri) val opts = Map("create" -> "true").asJava val mainPluginJar = createPluginJarMock(metaData) artifact.using(FileSystems.newFileSystem(targetUri, opts)) { fs => val libRoot = fs.getPath("/", metaData.name, "lib") Files.createDirectories(libRoot) Files.copy( mainPluginJar, libRoot.resolve(mainPluginJar.getFileName.toString) ) } targetPath } protected def createPluginXmlContent(metaData: PluginDescriptor): String = { val depStr = metaData.dependsOn.map { case Dependency(id, true) => s"""<depends optional="true">$id</depends>""" case Dependency(id, false) => s"<depends>$id</depends>" } s""" |<idea-plugin> | <name>${metaData.name}</name> | <id>${metaData.id}</id> | <version>${metaData.version}</version> | <idea-version since-build="${metaData.sinceBuild}" until-build="${metaData.untilBuild}"/> | ${depStr.mkString("\n")} |</idea-plugin> |""".stripMargin } }
Example 19
Source File: apiAdapter.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package sbt.jetbrains.ideaPlugin import java.io.InputStream import java.nio.file.{Files, Path} import java.util.Optional object apiAdapter { type CompileResult = Any val Using = sbt.io.Using type BuildDependencies = sbt.internal.BuildDependencies def projectJarName(project: sbt.Project): String = s"${project.id}.jar" def extractAffectedFiles(result: CompileResult): Seq[sbt.File] = Seq.empty object SbtCompilationBackCompat { type Analysis = sbt.internal.inc.Analysis type Relations = sbt.internal.inc.Relations type CompileResult = xsbti.compile.CompileResult type CompileAnalysis = xsbti.compile.CompileAnalysis type PreviousResult = xsbti.compile.PreviousResult type ClassFileManager = xsbti.compile.ClassFileManager type IncOptions = xsbti.compile.IncOptions val Analysis = sbt.internal.inc.Analysis implicit class CompileResultExt(val result: PreviousResult) extends AnyVal { def getAnalysis: Optional[CompileAnalysis] = result.analysis() } implicit class IncOptionsExt(val options: IncOptions) extends AnyVal { def withClassfileManager(manager: ClassFileManager): IncOptions = options.withExternalHooks(options.externalHooks().withExternalClassFileManager(manager)) } object PreviousResult { def empty(): PreviousResult = xsbti.compile.PreviousResult.create(Optional.empty(), Optional.empty()) } } // / method is missing because it's already implemented in sbt 1.3 PathOps final class PathExt(val path: Path) extends AnyVal { import scala.collection.JavaConverters.asScalaIteratorConverter def list: Seq[Path] = Files.list(path).iterator().asScala.toSeq def exists: Boolean = Files.exists(path) def isDir: Boolean = Files.isDirectory(path) def inputStream: InputStream = Files.newInputStream(path) } }
Example 20
Source File: apiAdapter.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package sbt.jetbrains.ideaPlugin import java.io.InputStream import java.nio.file.{Files, Path} import sbt.File import sbt.inc._ import java.util.Optional object apiAdapter { type CompileResult = sbt.inc.Analysis type BuildDependencies = sbt.BuildDependencies val Using = sbt.Using def projectJarName(project: sbt.Project): String = s"${project.id}.jar" def extractAffectedFiles(initialTimestamp: Long, result: Seq[CompileResult]): Seq[File] = { def processCompilation(compileResult: CompileResult): Seq[File] = { val lastCompilation = compileResult.compilations.allCompilations.find(_.startTime() >= initialTimestamp).getOrElse(return Seq.empty) val startTime = lastCompilation.startTime() val res = compileResult.stamps.products.collect { case (f, s:LastModified) if s.value >= startTime => f }.toSeq res } val res = result.flatMap(processCompilation) res } object SbtCompilationBackCompat { type Analysis = sbt.inc.Analysis type Relations = sbt.inc.Relations type CompileResult = sbt.Compiler.CompileResult type CompileAnalysis = sbt.inc.Analysis type PreviousResult = sbt.Compiler.PreviousAnalysis type ClassFileManager = sbt.inc.ClassfileManager type IncOptions = sbt.inc.IncOptions val Analysis = sbt.inc.Analysis implicit class CompileResultExt(val result: PreviousResult) extends AnyVal { def getAnalysis: Optional[CompileAnalysis] = Optional.of(result.analysis) } implicit class IncOptionsExt(val options: IncOptions) extends AnyVal { def withClassfileManager(manager: ClassFileManager): IncOptions = options.withNewClassfileManager(() => manager) } object PreviousResult { def empty(): PreviousResult = sbt.Compiler.PreviousAnalysis(Analysis.Empty, None) } } final class PathExt(val path: Path) extends AnyVal { import scala.collection.JavaConverters.asScalaIteratorConverter def /(string: String): Path = path.resolve(string) def list: Seq[Path] = Files.list(path).iterator().asScala.toSeq def exists: Boolean = Files.exists(path) def isDir: Boolean = Files.isDirectory(path) def inputStream: InputStream = Files.newInputStream(path) } }
Example 21
Source File: ClassShader.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.artifact import java.nio.file.{Files, Path, StandardOpenOption} import org.jetbrains.sbtidea.packaging.PackagingKeys.ShadePattern import org.pantsbuild.jarjar.{NiceJJProcessor, _} import org.pantsbuild.jarjar.util.EntryStruct import sbt.Keys.TaskStreams class ClassShader(patterns: Seq[ShadePattern])(implicit val streams: TaskStreams) { private val processor = new NiceJJProcessor(patterns.map { case ShadePattern(pat, res) => val jRule = new Rule() jRule.setPattern(pat) jRule.setResult(res) jRule }) private val entry = new EntryStruct if (streams!=null) streams.log.info(s"Initialized shader with ${patterns.size} patterns") def applyShading(from: Path, to: Path)(cont: => Unit): Unit = { entry.data = Files.readAllBytes(from) entry.name = from.toString.substring(1).replace('\\', '/') // leading '/' cannot be used in ZFS also fix class names produced under windows entry.time = -1 if (processor.process(entry)) { val newPath = to.getFileSystem.getPath(entry.name) val parent = newPath.getParent if (parent != null && !Files.exists(parent)) Files.createDirectories(parent) Files.write(newPath, entry.data, StandardOpenOption.CREATE) } } } class NoOpClassShader() extends ClassShader(Seq())(null) { override def applyShading(from: Path, to: Path)(cont: => Unit): Unit = cont }
Example 22
Source File: DynamicPackager.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.artifact import java.nio.file.{FileSystem, Files, Path} import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter._ import sbt.Keys.TaskStreams class DynamicPackager(myOutput: Path, shader: ClassShader, excludeFilter: ExcludeFilter, incrementalCache: IncrementalCache) (implicit private val streams: TaskStreams) extends SimplePackager(myOutput, shader, excludeFilter, incrementalCache) { override protected def outputExists(path: Path): Boolean = Files.exists(path) override protected def createOutputFS(output: Path): FileSystem = { if (!output.toFile.exists()) Files.createDirectories(output) output.getFileSystem } override protected def createOutput(srcPath: Path, output: Path, outputFS: FileSystem): Path = { val srcTranslated = translatePath(srcPath, myOutput.getFileSystem) if (srcPath.toString.contains("META-INF")) myOutput.getParent.resolve(srcTranslated) else myOutput.resolve(srcTranslated) } private def translatePath(path: Path, toFS: FileSystem) = { val pathFS = path.getFileSystem val pathSeparator = pathFS.getSeparator val toSeparator = toFS.getSeparator val adapted = path.toString.replace(pathSeparator, toSeparator) toFS.getPath(adapted) } }
Example 23
Source File: IncrementalCache.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.artifact import java.io.{BufferedOutputStream, ByteArrayInputStream, ObjectInputStream, ObjectOutputStream} import java.nio.file.{Files, Path} import sbt.Keys.TaskStreams import scala.collection.mutable trait IncrementalCache extends AutoCloseable { def fileChanged(in: Path): Boolean } class DumbIncrementalCache extends IncrementalCache { override def fileChanged(in: Path): Boolean = true override def close(): Unit = () } class PersistentIncrementalCache(private val root: Path)(implicit private val streams: TaskStreams) extends IncrementalCache { private val FILENAME = "sbtidea.cache" private val myFile = root.resolve(FILENAME) private val myData = loadOrCreate() type Data = mutable.HashMap[String, Long] private def loadFromDisk(): Either[String, Data] = { if (!Files.exists(myFile) || Files.size(myFile) <= 0) return Left("Cache file is empty or doesn't exist") val data = Files.readAllBytes(myFile) using(new ObjectInputStream(new ByteArrayInputStream(data))) { stream => Right(stream.readObject().asInstanceOf[Data]) } } private def loadOrCreate(): Data = loadFromDisk() match { case Left(message) => streams.log.info(message) new Data() case Right(value) => value } private def saveToDisk(): Unit = { import java.nio.file.StandardOpenOption._ if (!Files.exists(myFile.getParent)) { Files.createDirectories(myFile.getParent) Files.createFile(myFile) } using(new ObjectOutputStream( new BufferedOutputStream( Files.newOutputStream(myFile, CREATE, WRITE, TRUNCATE_EXISTING)))) { stream => stream.writeObject(myData) } } override def close(): Unit = saveToDisk() override def fileChanged(in: Path): Boolean = { val newTimestamp = Files.getLastModifiedTime(in).toMillis val inStr = in.toString val lastTimestamp = myData.getOrElseUpdate(inStr, newTimestamp) val result = newTimestamp > lastTimestamp myData.put(inStr, newTimestamp) result } }
Example 24
Source File: DynamicDistBuilder.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.artifact import java.nio.file.{Files, Path, Paths, StandardCopyOption} import org.jetbrains.sbtidea.packaging.PackagingKeys.ExcludeFilter import org.jetbrains.sbtidea.packaging._ import sbt.File import sbt.Keys.TaskStreams class DynamicDistBuilder(stream: TaskStreams, target: File, outputDir: File, private val hints: Seq[File]) extends DistBuilder(stream, target) { override def packageJar(to: Path, mappings: Mappings): Unit = { val isStatic = mappings.forall(_.metaData.static) if (isStatic) super.packageJar(to, mappings) else { val newOutputPath = outputDir.toPath.resolve("classes") if (!Files.exists(newOutputPath) || hints.isEmpty) packageNoHints(newOutputPath, mappings) else packageUsingHints(newOutputPath) } } private def packageUsingHints(newOutputPath: Path): Unit = { timed(s"Using ${hints.size} hints from previous compilation: $newOutputPath", { val key = "classes" val offset = key.length + 1 for (hint <- hints) { val hintStr = hint.toString val relativisedStr = hintStr.substring(hintStr.indexOf(key) + offset) val newRelativePath = Paths.get(relativisedStr) val newAbsolutePath = newOutputPath.resolve(newRelativePath) if (newAbsolutePath.toFile.getParentFile == null || !newAbsolutePath.toFile.getParentFile.exists()) Files.createDirectories(newAbsolutePath.getParent) Files.copy(hint.toPath, newAbsolutePath, StandardCopyOption.REPLACE_EXISTING) } }) } private def packageNoHints(newOutputPath: Path, mappings: Mappings): Unit = { val packager = new DynamicPackager(newOutputPath, new NoOpClassShader, ExcludeFilter.AllPass, incrementalCache) timed(s"classes(${mappings.size}): $newOutputPath", packager.mergeIntoOne(mappings.map(_.from.toPath)) ) } override def patch(to: Path, mappings: Mappings): Unit = { streams.log.info(s"Patching has no effect when building dynamic artifact") } }
Example 25
Source File: MappingArtifactBuilder.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.packaging.artifact import java.nio.file.Path import org.jetbrains.sbtidea.packaging._ import sbt._ abstract class MappingArtifactBuilder[T] extends ArtifactBuilder [Mappings, T] { protected def copySingleJar(mapping: Mapping): Unit protected def copyDirs(mappings: Mappings): Unit protected def packageJar(to: Path, mappings: Mappings): Unit protected def patch(to: Path, mappings: Mappings): Unit = () protected def unknown(mappings: Mappings): Unit = () protected def createResult: T protected def mappingFilter(m: Mapping): Boolean = !m.from.toString.endsWith("jar") || !m.to.exists() private def preFilterMappings(mappings: Mappings): Mappings = mappings.filter(mappingFilter) protected def processMappings(incremental: Seq[(sbt.File, Seq[Mapping])]): Unit = { incremental.foreach { case (to, Seq(mapping@Mapping(from, _, _))) if to.name.endsWith("jar") && from.name.endsWith("jar") => copySingleJar(mapping) case (to, mappings) if to.name.endsWith("jar") => packageJar(to.toPath, mappings) case (to, mappings) if to.toString.contains("jar!") => patch(to.toPath, mappings) case (_, mapping) => copyDirs(mapping) case other => unknown(other._2) } } protected def transformMappings(structure: Mappings): Seq[(sbt.File, Seq[Mapping])] = { val filtered = preFilterMappings(structure) val (overrides, normal) = filtered.partition(_.to.toString.contains("jar!")) val groupedNormal = normal.groupBy(_.to) val groupedOverrides = overrides.groupBy(_.to) groupedNormal.toSeq ++ groupedOverrides.toSeq } override def produceArtifact(structure: Mappings): T = { val transformed = transformMappings(structure) processMappings(transformed) createResult } }
Example 26
Source File: IdeaMock.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.idea import java.net.{URI, URL} import java.nio.file.{Files, Path, Paths} import java.util.zip.{ZipEntry, ZipInputStream} import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.packaging.artifact import org.jetbrains.sbtidea.{Keys, TmpDirUtils} import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.jbr.JbrDependency trait IdeaMock extends TmpDirUtils { protected val IDEA_VERSION = "192.5728.12" protected val IDEA_EDITION = "IU" protected val IDEA_DIST = s"idea$IDEA_EDITION-$IDEA_VERSION.zip" protected val IDEA_DIST_PATH = s"/org/jetbrains/sbtidea/download/$IDEA_DIST" protected val IDEA_BUILDINFO: BuildInfo = BuildInfo(IDEA_VERSION, Keys.IntelliJPlatform.IdeaUltimate, Some(JbrDependency.VERSION_AUTO)) protected val IDEA_DEP: IdeaDependency = IdeaDependency(IDEA_BUILDINFO) protected val IDEA_ART: IdeaDist = IdeaDistImpl(IDEA_DEP, new URL("file:")) protected val bundledPlugins: List[Keys.IntellijPlugin] = "org.jetbrains.plugins.yaml".toPlugin :: "com.intellij.properties".toPlugin :: Nil protected def installIdeaMock: Path = { val tmpDir = newTmpDir val installDir = Files.createDirectory(tmpDir.resolve(IDEA_VERSION)) val stream = getClass.getResourceAsStream(IDEA_DIST_PATH) artifact.using(new ZipInputStream(stream)) { zip => var entry: ZipEntry = zip.getNextEntry while (entry != null) { val toPath = installDir.resolve(entry.getName) if (entry.isDirectory) Files.createDirectory(toPath) else Files.copy(zip, toPath) entry = zip.getNextEntry } } installDir } protected def getDistCopy: Path = Files.copy(getIdeaDistMockPath, newTmpDir.resolve(IDEA_DIST)) protected def getIdeaDistMockURI: URI = getClass.getResource(IDEA_DIST_PATH).toURI protected def getIdeaDistMockPath: Path = Paths.get(getIdeaDistMockURI) }
Example 27
Source File: Configuration.scala From libisabelle with Apache License 2.0 | 5 votes |
package info.hupel.isabelle.api import java.nio.file.Path final case class Configuration(paths: List[Path], session: String) { override def toString: String = s"session $session" + (paths match { case Nil => "" case ps => " at " + ps.mkString(":") }) } object Configuration { def simple(session: String): Configuration = Configuration(Nil, session) }
Example 28
Source File: IntellijPluginResolverTestBase.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.nio.file.{Path, Paths} import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.download.plugin.PluginDescriptor.Dependency import org.jetbrains.sbtidea.{Keys, download, pathToPathExt} import org.scalatest.Inside import sbt._ import scala.language.implicitConversions abstract class IntellijPluginResolverTestBase extends IntellijPluginInstallerTestBase with Inside { protected val pluginA: PluginDescriptor = PluginDescriptor("org.A", "A - bundled", "0", "", "") protected val pluginB: PluginDescriptor = PluginDescriptor("org.B", "B - remote", "0", "", "") protected val pluginC: PluginDescriptor = PluginDescriptor("org.C", "C - remote", "0", "", "", Seq(Dependency("org.A", optional = true), Dependency("org.B", optional = false))) protected val pluginD: PluginDescriptor = PluginDescriptor("org.D", "D - remote cyclic", "0", "", "", Seq(Dependency("org.E", optional = false), Dependency("org.A", optional = true))) protected val pluginE: PluginDescriptor = PluginDescriptor("org.E", "C - remote cyclic", "0", "", "", Seq(Dependency("org.D", optional = false), Dependency("org.C", optional = true))) protected val descriptorMap: Map[String, PluginDescriptor] = Seq(pluginA, pluginB, pluginC, pluginD, pluginE).map(p => p.id -> p).toMap protected implicit def descriptor2Plugin(descriptor: PluginDescriptor): PluginDependency = PluginDependency(Keys.IntellijPlugin.Id(descriptor.id, None, None), IDEA_BUILDINFO, descriptor.dependsOn.map(p => plugin2PluginDep(p.id.toPlugin))) override protected implicit val localRegistry: LocalPluginRegistryApi = new LocalPluginRegistryApi { override def getPluginDescriptor(ideaPlugin: Keys.IntellijPlugin): Either[String, PluginDescriptor] = ideaPlugin match { case IntellijPlugin.Url(_) => throw new IllegalArgumentException("url plugin not supported") case IntellijPlugin.Id(id, _, _) => descriptorMap.get(id).filterNot(_.name.contains("remote")).toRight("plugin is remote") case IntellijPlugin.BundledFolder(name) => descriptorMap.get(name).filterNot(_.name.contains("remote")).toRight("plugin is remote") } override def isPluginInstalled(ideaPlugin: Keys.IntellijPlugin): Boolean = ideaPlugin match { case IntellijPlugin.Url(_) => false case IntellijPlugin.Id(id, _, _) => descriptorMap.get(id).exists(_.name.contains("bundled")) case IntellijPlugin.BundledFolder(name) => descriptorMap.get(name).exists(_.name.contains("bundled")) } override def getAllDescriptors: Seq[PluginDescriptor] = descriptorMap.values.toSeq override def markPluginInstalled(ideaPlugin: Keys.IntellijPlugin, to: Path): Unit = () override def getInstalledPluginRoot(ideaPlugin: Keys.IntellijPlugin): Path = Paths.get("INVALID") } override protected implicit val repoAPI: PluginRepoApi = new PluginRepoApi { override def getRemotePluginXmlDescriptor(idea: download.BuildInfo, pluginId: String, channel: Option[String]): Either[Throwable, PluginDescriptor] = descriptorMap.get(pluginId).filter(_.name.contains("remote")).toRight(null) override def getPluginDownloadURL(idea: download.BuildInfo, pluginInfo: Keys.IntellijPlugin.Id): URL = new URL("file:INVALID") override def getLatestPluginVersion(idea: download.BuildInfo, pluginId: String, channel: Option[String]): Either[Throwable, String] = throw new IllegalArgumentException } }
Example 29
Source File: IntellijPluginInstallerTestBase.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.plugin import java.nio.file.Path import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.ConsoleLogger import org.jetbrains.sbtidea.download.api.InstallContext import org.jetbrains.sbtidea.download.idea.IdeaMock import org.jetbrains.sbtidea.download.BuildInfo import org.jetbrains.sbtidea.download.jbr.JbrDependency import org.scalatest.{FunSuite, Matchers} import org.jetbrains.sbtidea.pathToPathExt import sbt._ import scala.language.implicitConversions trait IntellijPluginInstallerTestBase extends FunSuite with Matchers with IdeaMock with PluginMock with ConsoleLogger { protected lazy val ideaRoot: Path = installIdeaMock protected val pluginsRoot: Path = ideaRoot / "plugins" protected val ideaBuild: BuildInfo = BuildInfo(IDEA_VERSION, IntelliJPlatform.IdeaUltimate, Some(JbrDependency.VERSION_AUTO)) protected implicit val defaultBuildInfo: BuildInfo = IDEA_BUILDINFO protected implicit val localRegistry: LocalPluginRegistryApi = new LocalPluginRegistry(ideaRoot) protected implicit val repoAPI: PluginRepoApi = new PluginRepoUtils protected def createInstaller(implicit buildInfo: BuildInfo = IDEA_BUILDINFO): RepoPluginInstaller = new RepoPluginInstaller(buildInfo) protected implicit def plugin2PluginDep(pl: IntellijPlugin)(implicit buildInfo: BuildInfo): PluginDependency = PluginDependency(pl, buildInfo) protected implicit def plugin2PluginArt(pl: IntellijPlugin): RemotePluginArtifact = RemotePluginArtifact(pl, new URL("file:")) protected implicit def installContext: InstallContext = InstallContext(ideaRoot, ideaRoot.getParent) }
Example 30
Source File: JbrInstallerTest.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.download.jbr import java.nio.file.{Files, Path} import org.jetbrains.sbtidea.download.api.InstallContext import org.jetbrains.sbtidea.{ConsoleLogger, TmpDirUtils} import org.jetbrains.sbtidea.download.idea.IdeaMock import org.scalatest.{FunSuite, Matchers} import org.jetbrains.sbtidea.pathToPathExt import sbt._ class JbrInstallerTest extends FunSuite with Matchers with IdeaMock with TmpDirUtils with ConsoleLogger { private val jbrFileName = "jbr-11_0_5-linux-x64-b520.38.tar.gz" private val jbrMock = s"/org/jetbrains/sbtidea/download/$jbrFileName" private def getMockJbrCopy: Path = { val tmpDir = Files.createTempDirectory(getClass.getSimpleName) val target = tmpDir / jbrFileName Files.copy(getClass.getResourceAsStream(jbrMock), target) target } test("detect jbr is not installed") { val ideaRoot = installIdeaMock implicit val ctx: InstallContext = InstallContext(ideaRoot, ideaRoot / "downloads") val jbrArtifact = JbrArtifact(JbrDependency.apply(ideaRoot,IDEA_BUILDINFO), new URL("file:")) val installer = new JbrInstaller installer.isInstalled(jbrArtifact) shouldBe false } test("detect jbr is installed") { val ideaRoot = installIdeaMock Files.createDirectory(ideaRoot / "jbr") implicit val ctx: InstallContext = InstallContext(ideaRoot, ideaRoot / "downloads") val jbrArtifact = JbrArtifact(JbrDependency.apply(ideaRoot,IDEA_BUILDINFO), new URL("file:")) val installer = new JbrInstaller installer.isInstalled(jbrArtifact) shouldBe true } test("jbr is installed") { val ideaRoot = installIdeaMock implicit val ctx: InstallContext = InstallContext(ideaRoot, ideaRoot / "downloads") val installer = new JbrInstaller installer.install(getMockJbrCopy) ideaRoot.toFile.list should contain ("jbr") (ideaRoot / "jbr").toFile.list should contain allElementsOf Seq("lib", "bin", "conf", "include", "legal", "release") } }
Example 31
Source File: TmpDirUtils.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea import java.nio.file.attribute.FileAttribute import java.nio.file.{Files, Path} import org.jetbrains.sbtidea.download.NioUtils trait TmpDirUtils { def createTempFile(prefix: String, suffix: String, fileAttributes: FileAttribute[_]*): Path = { val res = Files.createTempFile(prefix, suffix, fileAttributes:_*) TmpDirUtils.allocatedTmpDirs += res res } def newTmpDir: Path = { val dir = Files.createTempDirectory(getClass.getName) TmpDirUtils.allocatedTmpDirs += dir dir } } object TmpDirUtils { private val allocatedTmpDirs = new scala.collection.mutable.ListBuffer[Path]() Runtime.getRuntime.addShutdownHook(new Thread() { override def run(): Unit = allocatedTmpDirs.foreach(NioUtils.delete) }) }
Example 32
Source File: BuildIndex.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.searchableoptions import java.nio.file.{Files, Path} import java.util.function.Predicate import org.jetbrains.sbtidea.Keys.{intellijMainJars, intellijVMOptions} import org.jetbrains.sbtidea.packaging.PackagingKeys.packageArtifact import org.jetbrains.sbtidea.packaging._ import org.jetbrains.sbtidea.packaging.artifact.DistBuilder import org.jetbrains.sbtidea.runIdea.IdeaRunner import org.jetbrains.sbtidea.{pathToPathExt, PluginLogger, SbtPluginLogger} import sbt.Keys.{streams, target} import sbt._ import scala.collection.JavaConverters._ object BuildIndex { private val IDX_DIR = "search" type IndexElement = (Path, Path) // jar file -> options.xml def createTask: Def.Initialize[Task[Unit]] = Def.task { implicit val log: PluginLogger = new SbtPluginLogger(streams.value) val ideaCP = intellijMainJars.value.map(_.data.toPath) val pluginRoot = packageArtifact.value.toPath val indexOutputPath = target.value / "searchableOptions" val indexerCMD = "traverseUI" :: indexOutputPath.getCanonicalPath :: "true" :: Nil val vmOptions = intellijVMOptions.value log.info("Building searchable plugin options index...") val runner = new IdeaRunner(ideaCP, vmOptions, blocking = true, programArguments = indexerCMD) runner.run() val indexRoots = getIndexFiles(pluginRoot, indexOutputPath.toPath) val indexedMappings = prepareMappings(indexRoots) if (indexRoots.isEmpty) log.error(s"No options search index built for plugin root: $pluginRoot") if (indexedMappings.isEmpty) log.error(s"No options search index packaged from given roots: $indexRoots") indexedMappings.foreach { case (jar, mapping) => new DistBuilder(streams.value, target.value).patch(jar, Seq(mapping)) } log.info(s"Successfully merged options index") } private def getIndexFiles(pluginOutputDir: Path, indexOutputDir: Path): Seq[IndexElement] = { val predicate = new Predicate[Path] { override def test(p: Path): Boolean = p.toString.endsWith("jar") } val allArtifactJars = Files.walk(pluginOutputDir) .filter(predicate) .iterator().asScala .map(path => path.getFileName.toString -> path) .toMap val indexesForPlugin: Seq[(Path, Path)] = indexOutputDir .list .filter(idx => allArtifactJars.contains(idx.getFileName.toString)) .filter(idx => (idx / IDX_DIR).exists && (idx / IDX_DIR).isDir && (idx / IDX_DIR).list.nonEmpty) .foldLeft(Seq.empty[IndexElement]) { (acc, idx) => acc :+ (allArtifactJars(idx.getFileName.toString) -> (idx / IDX_DIR).list.head) } indexesForPlugin } private def prepareMappings(indexes: Seq[IndexElement]): Seq[(Path, Mapping)] = indexes.map { case (jar, indexXML) => jar -> Mapping(indexXML.toFile, new File( s"$jar!/search/searchableOptions.xml"), MappingMetaData.EMPTY.copy(kind = MAPPING_KIND.MISC)) } }
Example 33
Source File: PluginXmlPatcher.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea.xml import java.nio.file.{Files, Path} import org.jetbrains.sbtidea.PluginLogger import org.jetbrains.sbtidea.{PluginLogger => log} import org.jetbrains.sbtidea.Keys.pluginXmlOptions class PluginXmlPatcher(input: Path, createCopy: Boolean = false) { def patch(options: pluginXmlOptions): Path = try { val content = new String(Files.readAllBytes(input)) if (content.isEmpty) { log.error(s"Patching failed: $input exists but is empty") input } else { val result = transform(content, options) if (!createCopy) Files.write(input, result.getBytes) else Files.write(Files.createTempFile("", "plugin.xml"), result.getBytes) } } catch { case e: Exception => log.error(s"Patching failed: $e") input } private def transform(input: String, options: pluginXmlOptions): String = { var content = input Option(options.version) .foreach( value => content = tag(content, "version", value)) Option(options.pluginDescription) .foreach( value => content = tag(content, "description", value)) Option(options.changeNotes) .foreach( value => content = tag(content, "change-notes", value)) val ideaVersionTag = (Option(options.sinceBuild), Option(options.untilBuild)) match { case (Some(since), Some(until)) => s"""<idea-version since-build="$since" until-build="$until"/>""" case (None, Some(until)) => s"""<idea-version until-build="$until"/>""" case (Some(since), None) => s"""<idea-version since-build="$since"/>""" case _ => "" } if (ideaVersionTag.nonEmpty) content = content.replaceAll("<idea-version.+/>", ideaVersionTag) content } private def tag(str: String, name: String, value: String): String = if (str.matches(s"(?s)^.*<$name>.+</$name>.*$$")) str.replaceAll(s"<$name>.+</$name>", s"<$name>$value</$name>") else { log.warn(s"$input doesn't have $name tag defined, not patching") str } }
Example 34
Source File: package.scala From sbt-idea-plugin with Apache License 2.0 | 5 votes |
package org.jetbrains.sbtidea import java.net.{HttpURLConnection, URL} import java.nio.file.{Files, Path} import com.eclipsesource.json.Json import org.jetbrains.sbtidea.Keys._ import org.jetbrains.sbtidea.pathToPathExt import sbt._ package object download { case class BuildInfo(buildNumber: String, edition: IntelliJPlatform, jbrVersion: Option[String]) { override def toString: String = s"BuildInfo($edition-$buildNumber)" } def withConnection[V](url: URL)(f: => HttpURLConnection => V): V = { var connection: HttpURLConnection = null try { connection = url.openConnection().asInstanceOf[HttpURLConnection] f(connection) } finally { try { if (connection != null) connection.disconnect() } catch { case e: Exception => println(s"Failed to close connection $url: ${e.getMessage}") } } } implicit class BuildInfoOps(val buildInfo: BuildInfo) { def getActualIdeaBuild(ideaRoot: Path): String = { val productInfo = ideaRoot / "product-info.json" if (buildInfo.buildNumber.count(_ == '.') < 2 && productInfo.exists) { // most likely some LATEST-EAP-SNAPSHOT kind of version try { val content = new String(Files.readAllBytes(productInfo)) val parsed = Json.parse(content) parsed.asObject().getString("buildNumber", buildInfo.buildNumber) } catch { case _: Throwable => buildInfo.buildNumber } } else buildInfo.buildNumber } } }
Example 35
Source File: JupyterPath.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.kernel.util import java.nio.file.Path sealed abstract class JupyterPath(val name: String) extends Product with Serializable { def paths: Seq[Path] } object JupyterPath { case object System extends JupyterPath("system") { def paths: Seq[Path] = JupyterPaths.systemPaths } case object Env extends JupyterPath("environment") { def paths: Seq[Path] = JupyterPaths.envPaths } case object User extends JupyterPath("user") { def paths: Seq[Path] = Seq(JupyterPaths.userPath) } }
Example 36
Source File: DefaultBintrayRepoConnector.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser.bintray import java.net.{HttpURLConnection, URL} import java.nio.file.Path import play.api.libs.json.{JsValue, Json} import uk.gov.hmrc.{FileDownloader, Logger, ServiceCredentials} import scala.util.{Failure, Success, Try} object BintrayRepoConnector extends Logger { def apply(bintrayCreds: ServiceCredentials, workDir : Path): BintrayRepoConnector = new DefaultBintrayRepoConnector(workDir, new BintrayHttp(bintrayCreds), new FileDownloader()) def dryRun(bintrayCreds: ServiceCredentials, workDir : Path) = { log.info("Bintray : running in dry-run mode") val dryRunHttp = new BintrayHttp(bintrayCreds){ override def emptyPost(url:String): Try[Unit] = { println("BintrayHttp emptyPost DRY_RUN");Success(Unit)} override def putFile(version: VersionDescriptor, file: Path, url: String): Try[Unit] = { println("BintrayHttp putFile DRY_RUN");Success(Unit) } } new DefaultBintrayRepoConnector(workDir, dryRunHttp, new FileDownloader()) } } trait BintrayRepoConnector { def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path] def publish(version: VersionDescriptor): Try[Unit] def downloadFile(url: String, fileName: String): Try[Path] def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit] def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit] def findFiles(version: VersionDescriptor): Try[List[String]] def getRepoMetaData(repoName:String, artefactName: String): Try[Unit] } class DefaultBintrayRepoConnector(workDir: Path, bintrayHttp: BintrayHttp, fileDownloader: FileDownloader) extends BintrayRepoConnector with Logger { def publish(version: VersionDescriptor):Try[Unit] = { val url = BintrayPaths.publishUrlFor(version) bintrayHttp.emptyPost(url) } def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit] = { val url = BintrayPaths.fileListUrlFor(version) log.info(s"Bintray : checking to see if $url exists") val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection] conn.setRequestMethod("HEAD") conn.connect() conn.getResponseCode match { case 200 => Failure(new IllegalArgumentException(s"${version.artefactName} ${version.version} already exists")) case _ => Success() } } def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path] = { downloadFile(jarUrl, jarFileName) match { case Success(x) => Some(x) case Failure(y) => None } } def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit] = { bintrayHttp.putFile(version, filePath, url) } def downloadFile(url: String, fileName: String): Try[Path] = { val targetFile = workDir.resolve(fileName) fileDownloader.url2File(url, targetFile) map { unit => targetFile } } def findFiles(version: VersionDescriptor): Try[List[String]] = { val url = BintrayPaths.fileListUrlFor(version) bintrayHttp.get(url).map { st => val fileNames: Seq[JsValue] = Json.parse(st) \\ "path" fileNames.map(_.as[String]).toList } } def getRepoMetaData(repoName:String, artefactName: String): Try[Unit] = { val url = BintrayPaths.metadata(repoName, artefactName) bintrayHttp.get(url).map { _ => Unit} } }
Example 37
Source File: BintrayHttp.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser.bintray import java.net.URL import java.nio.file.Path import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import akka.stream.ActorMaterializer import play.api.libs.ws.ning.{NingAsyncHttpClientConfigBuilder, NingWSClient, NingWSClientConfig} import play.api.libs.ws.{WSAuthScheme, WSClientConfig, WSResponse} import play.api.mvc.Results import uk.gov.hmrc.{Logger, ServiceCredentials} import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.util.{Failure, Success, Try} import scala.concurrent.duration._ class BintrayHttp(creds:ServiceCredentials) extends Logger { implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() private def getTimeoutPropertyOptional(key: String) = Option(System.getProperty(key)).map(_.toLong milliseconds) def wsClientConfig = NingWSClientConfig( wsClientConfig = WSClientConfig( connectionTimeout = getTimeoutPropertyOptional("wsclient.timeout.connection").getOrElse(2 seconds), idleTimeout = getTimeoutPropertyOptional("wsclient.timeout.idle").getOrElse(2 seconds), requestTimeout = getTimeoutPropertyOptional("wsclient.timeout.request").getOrElse(2 seconds) ) ) val ws = new NingWSClient(new NingAsyncHttpClientConfigBuilder(wsClientConfig).build()) def apiWs(url:String) = ws.url(url) .withAuth( creds.user, creds.pass, WSAuthScheme.BASIC) .withHeaders("content-type" -> "application/json") def emptyPost(url:String): Try[Unit] = { log.info(s"posting file to $url") val call = apiWs(url).post(Results.EmptyContent()) val result: WSResponse = Await.result(call, Duration.apply(5, TimeUnit.MINUTES)) result.status match { case s if s >= 200 && s < 300 => Success(new URL(url)) case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}")) } } def get[A](url:String): Try[String] ={ log.info(s"getting file from $url") val call = apiWs(url).get() val result: WSResponse = Await.result(call, Duration.apply(5, TimeUnit.MINUTES)) result.status match { case s if s >= 200 && s < 300 => Success(result.body) case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}")) } } def putFile(version: VersionDescriptor, file: Path, url: String): Try[Unit] = { log.info(s"version $version") log.info(s"putting file to $url") val call = apiWs(url) .withHeaders( "X-Bintray-Package" -> version.artefactName, "X-Bintray-Version" -> version.version) .put(file.toFile) val result: WSResponse = Await.result(call, Duration.apply(6, TimeUnit.MINUTES)) result.status match { case s if s >= 200 && s < 300 => Success(Unit) case _@e => Failure(new scala.Exception(s"Didn't get expected status code when writing to Bintray. Got status ${result.status}: ${result.body}")) } } }
Example 38
Source File: Repositories.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.nio.file.{Files, Path} import uk.gov.hmrc.releaser.bintray.{BintrayIvyPaths, BintrayMavenPaths, BintrayPaths} trait RepoFlavour extends BintrayPaths { // def scalaVersion: String def releaseCandidateRepo: String def releaseRepo: String val artefactBuilder:(VersionMapping, Path) => TransformerProvider } trait IvyRepo extends RepoFlavour with BintrayIvyPaths { // val scalaVersion = "2.10" val artefactBuilder = IvyArtefacts.apply _ } trait MavenRepo extends RepoFlavour with BintrayMavenPaths { // val scalaVersion = "2.11" val artefactBuilder = MavenArtefacts.apply _ } object RepoFlavours { val mavenRepository: RepoFlavour = new BintrayRepository("release-candidates", "releases") with MavenRepo val ivyRepository: RepoFlavour = new BintrayRepository("sbt-plugin-release-candidates", "sbt-plugin-releases") with IvyRepo } case class BintrayRepository(releaseCandidateRepo:String, releaseRepo:String)
Example 39
Source File: Artefacts.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.nio.file.Path import uk.gov.hmrc.Logger import scala.collection.immutable.ListMap trait TransformerProvider extends Logger { def regexTransformers: ListMap[String, Option[Transformer]] def transformersForSupportedFiles(filePaths: List[String]): List[(String, Option[Transformer])] = { filePaths .filter(isRelevantFile) .map { f => f -> findTransformer(f).flatten } } private def isRelevantFile(filePath: String): Boolean = { val isRelevant = findTransformer(filePath).isDefined if (!isRelevant) log.warn(s"$filePath was ignored because it is an unsupported file type") isRelevant } private def findTransformer(filePath: String): Option[Option[Transformer]] = { val fileName = filePath.split("/").last regexTransformers .find(t => t._1.r.findFirstIn(fileName).isDefined) .map(t => { val transformer = t._2 if (transformer.isEmpty) log.warn(s"$filePath was ignored because it is a blacklisted file type") transformer }) } } object IvyArtefacts{ def apply(map:VersionMapping, localDir:Path) = new IvyArtefacts(map, localDir) } class IvyArtefacts(map:VersionMapping, localDir:Path) extends TransformerProvider { val regexTransformers = ListMap( map.artefactName+"-javadoc\\.jar" -> None, s"ivy\\.xml$$" -> Some(new IvyTransformer), s".+\\.jar$$" -> Some(new JarManifestTransformer), s".+\\.tgz$$" -> Some(new CopyAndRenameTransformer), s".+\\.zip$$" -> Some(new CopyAndRenameTransformer)) } object MavenArtefacts{ def apply(map:VersionMapping, localDir:Path) = new MavenArtefacts(map, localDir) } class MavenArtefacts(map:VersionMapping, localDir:Path) extends TransformerProvider { val regexTransformers = ListMap( s".*-javadoc\\.jar$$" -> None, s".+\\.pom$$" -> Some(new PomTransformer), s".+\\.jar$$" -> Some(new JarManifestTransformer), s".+\\.tgz$$" -> Some(new TgzTransformer), s".+\\.zip$$" -> Some(new CopyAndRenameTransformer)) }
Example 40
Source File: FileDownloader.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc import java.net.URL import java.nio.file.{Files, Path} import scala.util.{Failure, Success, Try} class FileDownloader extends Logger { import resource._ def url2File(url: String, targetFile: Path): Try[Path] = { if(targetFile.toFile.exists()){ log.info(s"not downloading from $url as file already exists") Success(targetFile) } else { log.info(s"downloading $url to $targetFile") try { managed(new URL(url).openConnection().getInputStream).foreach { in => Files.createDirectories(targetFile.getParent) Files.copy(in, targetFile) } Success(targetFile) } catch { case e: Exception => Failure(e) } } } }
Example 41
Source File: CredentialsFinder.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc import java.nio.file.Path import scala.io.Source case class ServiceCredentials(user:String, pass:String) object CredentialsFinder { def findGithubCredsInFile(file:Path):Option[ServiceCredentials] = { val conf = new ConfigFile(file) conf.get("token") map { t => ServiceCredentials("token", t)} } def findBintrayCredsInFile(file:Path):Option[ServiceCredentials] = { val conf = new ConfigFile(file) for(user <- conf.get("user"); pass <- conf.get("password")) yield ServiceCredentials(user, pass) } } class ConfigFile(file: Path) extends Logger { private val kvMap: Map[String, String] = { try { Source.fromFile(file.toFile) .getLines().toSeq .map(_.split("=")) .map { case Array(key, value) => key.trim -> value.trim}.toMap } catch { case e: Exception => { log.info(s"error parsing $file ${e.getMessage}") Map.empty } } } def get(path: String) = kvMap.get(path) }
Example 42
Source File: ScalaInterpreterParams.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond import java.nio.file.Path import almond.protocol.KernelInfo import ammonite.interp.{CodeClassWrapper, CodeWrapper} import ammonite.util.Colors import coursierapi.{Dependency, Module} import scala.concurrent.ExecutionContext final case class ScalaInterpreterParams( updateBackgroundVariablesEcOpt: Option[ExecutionContext] = None, extraRepos: Seq[String] = Nil, extraBannerOpt: Option[String] = None, extraLinks: Seq[KernelInfo.Link] = Nil, predefCode: String = "", predefFiles: Seq[Path] = Nil, automaticDependencies: Map[Module, Seq[Dependency]] = Map(), automaticVersions: Map[Module, String] = Map(), forceMavenProperties: Map[String, String] = Map(), mavenProfiles: Map[String, Boolean] = Map(), codeWrapper: CodeWrapper = CodeClassWrapper, initialColors: Colors = Colors.Default, initialClassLoader: ClassLoader = Thread.currentThread().getContextClassLoader, metabrowse: Boolean = false, metabrowseHost: String = "localhost", metabrowsePort: Int = -1, lazyInit: Boolean = false, trapOutput: Boolean = false, disableCache: Boolean = false, autoUpdateLazyVals: Boolean = true, autoUpdateVars: Boolean = true )
Example 43
Source File: Display.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.display import java.io.File import java.net.URL import java.nio.file.Path import java.util.{Map => JMap} import almond.interpreter.api.{DisplayData, OutputHandler} import jupyter.{Displayer, Displayers} import scala.collection.JavaConverters._ trait Display { def data(): Map[String, String] def metadata(): Map[String, String] = Map() def displayData(): DisplayData = DisplayData(data(), metadata = metadata()) def display()(implicit output: OutputHandler): Unit = output.display(displayData()) // registering things with jvm-repr just in case Display.registered } object Display { private lazy val registered: Unit = { Displayers.register( classOf[Display], new Displayer[Display] { def display(d: Display): JMap[String, String] = d.data().asJava } ) } def markdown(content: String)(implicit output: OutputHandler): Unit = Markdown(content).display() def html(content: String)(implicit output: OutputHandler): Unit = Html(content).display() def latex(content: String)(implicit output: OutputHandler): Unit = Latex(content).display() def text(content: String)(implicit output: OutputHandler): Unit = Text(content).display() def js(content: String)(implicit output: OutputHandler): Unit = Javascript(content).display() def svg(content: String)(implicit output: OutputHandler): Unit = Svg(content).display() trait Builder[C, T] { protected def build(contentOrUrl: Either[URL, C]): T def apply(content: C): T = build(Right(content)) def from(url: String): T = build(Left(new URL(url))) def from(url: URL): T = build(Left(url)) def fromFile(file: File): T = build(Left(file.toURI.toURL)) def fromFile(path: Path): T = build(Left(path.toUri.toURL)) def fromFile(path: String): T = build(Left(new File(path).toURI.toURL)) } }
Example 44
Source File: MetaDataProvider.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.nio.file.Path import java.util.jar.Manifest import java.util.zip.ZipFile import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import uk.gov.hmrc.releaser.github.CommitSha import scala.collection.JavaConversions._ import scala.io.Source import scala.util.{Failure, Success, Try} trait MetaDataProvider { def fromJarFile(p: Path): Try[ArtefactMetaData] def fromCommitManifest(p: Path): Try[ArtefactMetaData] } case class ArtefactMetaData(sha:CommitSha, commitAuthor:String, commitDate:DateTime) class ArtefactMetaDataProvider extends MetaDataProvider { import ArtefactMetaDataProvider._ def fromJarFile(p: Path): Try[ArtefactMetaData] = { Try {new ZipFile(p.toFile) }.flatMap { jarFile => jarFile.entries().filter(_.getName == "META-INF/MANIFEST.MF").toList.headOption.map { ze => val man = new Manifest(jarFile.getInputStream(ze)) ArtefactMetaData( man.getMainAttributes.getValue("Git-Head-Rev"), man.getMainAttributes.getValue("Git-Commit-Author"), gitCommitDateFormat.parseDateTime(man.getMainAttributes.getValue("Git-Commit-Date")) ) }.toTry(new Exception(s"Failed to retrieve manifest from $p")) } } def fromCommitManifest(p: Path): Try[ArtefactMetaData] = { Try { val map = Source.fromFile(p.toFile) .getLines().toSeq .map(_.split("=")) .map { case Array(key, value) => key.trim -> value.trim }.toMap ArtefactMetaData(map("sha"), map("author"), gitCommitDateFormat.parseDateTime(map("date"))) } } } object ArtefactMetaDataProvider { val gitCommitDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") implicit class OptionPimp[A](opt: Option[A]){ def toTry(e:Exception):Try[A] = opt match { case Some(x) => Success(x) case None => Failure(e) } } }
Example 45
Source File: JupyterPaths.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.kernel.util import java.nio.file.{Path, Paths} object JupyterPaths { // See http://jupyter-client.readthedocs.io/en/5.2.3/kernels.html#kernel-specs // FIXME On Windows, rely on https://github.com/soc/directories-jvm/blob/d302b1e93963c81ed511e072a52e95251b5d078b/src/main/java/io/github/soc/directories/Util.java#L110 ? // Not sure about the way this gets PROGRAMDATA and APPDATA, on Windows. def systemPaths: Seq[Path] = OS.current match { case _: OS.Unix => Seq( Paths.get("/usr/local/share/jupyter/kernels"), Paths.get("/usr/share/jupyter/kernels") ) case OS.Windows => Seq( Paths.get(sys.env("PROGRAMDATA"), "jupyter", "kernels") ) } def userPath: Path = OS.current match { case OS.Mac => Paths.get(sys.props("user.home"), "Library", "Jupyter", "kernels") case _: OS.Unix => Paths.get(sys.props("user.home"), ".local", "share", "jupyter", "kernels") case OS.Windows => Paths.get(sys.env("APPDATA"), "jupyter", "kernels") } def envPaths: Seq[Path] = { val sysPrefixPath = sys.props.get("jupyter.sys.prefix").toSeq.map { prefix => Paths.get(prefix, "share", "jupyter", "kernels") } val jupyterPathEnv = sys.env.get("JUPYTER_PATH").toSeq.map { prefix => Paths.get(prefix, "kernels") } val jupyterPathProp = sys.props.get("jupyter.path").toSeq.map { prefix => Paths.get(prefix, "kernels") } (sysPrefixPath ++ jupyterPathEnv ++ jupyterPathProp).distinct } def paths: Seq[Path] = (Seq(userPath) ++ envPaths ++ systemPaths).distinct }
Example 46
Source File: TestHelpers.scala From Waves with MIT License | 5 votes |
package com.wavesplatform import java.io.IOException import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import com.wavesplatform.account.Address import com.wavesplatform.features.BlockchainFeatures import com.wavesplatform.settings.{FunctionalitySettings, GenesisSettings, GenesisTransactionSettings, WavesSettings} import scala.concurrent.duration._ object TestHelpers { def genesisSettings(balances: Map[Address, Long], blockTimestamp: Long = System.currentTimeMillis()): GenesisSettings = { val totalAmount = balances.values.sum val transactions = balances.map { case (account, amount) => GenesisTransactionSettings(account.stringRepr, amount) }.toSeq GenesisSettings(blockTimestamp, blockTimestamp, totalAmount, None, transactions, 1000, 60.seconds) } def enableNG(settings: FunctionalitySettings): FunctionalitySettings = settings.copy( blockVersion3AfterHeight = 0, preActivatedFeatures = settings.preActivatedFeatures ++ Map(BlockchainFeatures.NG.id -> 0) ) def enableNG(settings: WavesSettings): WavesSettings = settings.copy( blockchainSettings = settings.blockchainSettings.copy(functionalitySettings = enableNG(settings.blockchainSettings.functionalitySettings)) ) def deleteRecursively(path: Path): Unit = Files.walkFileTree( path, new SimpleFileVisitor[Path] { override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { Option(exc).fold { Files.delete(dir) FileVisitResult.CONTINUE }(throw _) } override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { Files.delete(file) FileVisitResult.CONTINUE } } ) }
Example 47
Source File: VerifierLoggerBenchmark.scala From Waves with MIT License | 5 votes |
package com.wavesplatform.transaction.smart import java.io.BufferedWriter import java.nio.file.{Files, Path, Paths} import java.util.concurrent.TimeUnit import cats.Id import com.wavesplatform.account.KeyPair import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils._ import com.wavesplatform.lang.v1.compiler.Terms import com.wavesplatform.lang.v1.compiler.Terms.{CONST_BOOLEAN, EVALUATED} import com.wavesplatform.lang.v1.evaluator.Log import com.wavesplatform.lang.v1.evaluator.ctx.impl.waves.Bindings import com.wavesplatform.state.BinaryDataEntry import com.wavesplatform.transaction.DataTransaction import com.wavesplatform.transaction.smart.VerifierLoggerBenchmark.BigLog import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole @OutputTimeUnit(TimeUnit.MILLISECONDS) @BenchmarkMode(Array(Mode.AverageTime)) @Threads(1) @Fork(1) @Warmup(iterations = 10) @Measurement(iterations = 10) class VerifierLoggerBenchmark { @Benchmark def verifierLogged(bh: Blackhole, log: BigLog): Unit = { val logs = Verifier.buildLogs("id", log.value) bh.consume(log.writer.write(logs)) } } object VerifierLoggerBenchmark { @State(Scope.Benchmark) class BigLog { val resultFile: Path = Paths.get("log.txt") val writer: BufferedWriter = Files.newBufferedWriter(resultFile) private val dataTx: DataTransaction = DataTransaction .selfSigned(1.toByte, KeyPair(Array[Byte]()), (1 to 4).map(i => BinaryDataEntry(s"data$i", ByteStr(Array.fill(1024 * 30)(1)))).toList, 100000000, 0) .explicitGet() private val dataTxObj: Terms.CaseObj = Bindings.transactionObject( RealTransactionWrapper(dataTx, ???, ???, ???).explicitGet(), proofsEnabled = true ) val value: (Log[Id], Either[String, EVALUATED]) = ( List.fill(500)("txVal" -> Right(dataTxObj)), Right(CONST_BOOLEAN(true)) ) @TearDown def deleteFile(): Unit = { Files.delete(resultFile) writer.close() } } }
Example 48
Source File: Sourcepath.scala From metabrowse with Apache License 2.0 | 5 votes |
package metabrowse.server import java.nio.file.Files import java.nio.file.Path import java.nio.file.Paths def jdkSources(): Option[Path] = { for { javaHome <- sys.props.get("java.home") srcZip = Paths.get(javaHome).getParent.resolve("src.zip") if Files.isRegularFile(srcZip) } yield srcZip } private[metabrowse] def coursierFetchCompilerPlugin( artifact: String ): Path = { coursierFetch(List("--intransitive", artifact)).headOption.getOrElse { sys.error(artifact) } } private[metabrowse] def coursierFetch(extra: List[String]): List[Path] = { sys.process .Process(List("coursier", "fetch") ++ extra) .!! .trim .linesIterator .map(jar => Paths.get(jar)) .toList } }
Example 49
Source File: KryoSerializerInit.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.serializers import java.nio.file.Path import com.esotericsoftware.kryo.{Kryo, Serializer} import com.esotericsoftware.kryo.io.{Input, Output} import io.altoo.akka.serialization.kryo.DefaultKryoInitializer import io.altoo.akka.serialization.kryo.serializer.scala.ScalaKryo class PathSerializer extends Serializer[Path] { override def write(kryo: Kryo, output: Output, path: Path): Unit = output.writeString(path.toString) override def read(kryo: Kryo, input: Input, `type`: Class[Path]): Path = Path.of(input.readString()) } class KryoSerializerInit extends DefaultKryoInitializer { override def postInit(kryo: ScalaKryo): Unit = { super.postInit(kryo) kryo.addDefaultSerializer(classOf[Path], classOf[PathSerializer]) kryo.register(classOf[Path], new PathSerializer) () } }
Example 50
Source File: FileIO.scala From korolev with Apache License 2.0 | 5 votes |
package korolev.effect.io import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader} import java.nio.file.Path import korolev.effect.syntax._ import korolev.effect.{Effect, Stream} object FileIO { def readBytes[F[_]: Effect](path: Path): F[LazyBytes[F]] = { val inputStream = new FileInputStream(path.toFile) LazyBytes.fromInputStream(inputStream) } def readLines[F[_]: Effect](path: Path): F[Stream[F, String]] = { Stream.unfoldResource[F, BufferedReader, Unit, String]( default = (), create = Effect[F].delay(new BufferedReader(new FileReader(path.toFile))), loop = (reader, _) => Effect[F].delay { ((), Option(reader.readLine())) } ) } def write[F[_]: Effect](path: Path, append: Boolean = false): Stream[F, Array[Byte]] => F[Unit] = { stream => val outputStream = new FileOutputStream(path.toFile, append) def aux(): F[Unit] = { stream.pull().flatMap { case Some(chunk) => Effect[F] .delay(outputStream.write(chunk)) .after(aux()) .recover { case error => outputStream.close() throw error } case None => Effect[F].delay(outputStream.close()) } } aux() } }
Example 51
Source File: JobUtils.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.job.util import java.io.File import java.nio.charset.Charset import java.nio.file.{ Files, Path, StandardCopyOption } import java.util.zip.ZipFile import com.typesafe.scalalogging.StrictLogging import helloscala.common.Configuration import helloscala.common.util.{ DigestUtils, Utils } import mass.common.util.FileUtils import mass.core.job.JobConstants import mass.job.JobSettings import mass.message.job._ import mass.model.job.{ JobItem, JobTrigger } import scala.concurrent.{ ExecutionContext, Future } object JobUtils extends StrictLogging { case class JobZipInternal private (configs: Vector[JobCreateReq], entries: Vector[Path]) def uploadJob(jobSettings: JobSettings, req: JobUploadJobReq)(implicit ec: ExecutionContext): Future[JobZip] = Future { val sha256 = DigestUtils.sha256HexFromPath(req.file) val dest = jobSettings.jobSavedDir.resolve(sha256.take(2)).resolve(sha256) val jobZipInternal = parseJobZip(req.file, req.charset, dest.resolve(JobConstants.DIST)) match { case Right(v) => v case Left(e) => throw e } val zipPath = dest.resolve(req.fileName) Files.move(req.file, zipPath, StandardCopyOption.REPLACE_EXISTING) JobZip(zipPath, jobZipInternal.configs, jobZipInternal.entries) } @inline def parseJobZip(file: Path, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = parseJobZip(file.toFile, charset, dest) def parseJobZip(file: File, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = Utils.either { import scala.jdk.CollectionConverters._ import scala.language.existentials val zip = new ZipFile(file, charset) try { val (confEntries, fileEntries) = zip .entries() .asScala .filterNot(entry => entry.isDirectory) .span(entry => entry.getName.endsWith(JobConstants.ENDS_SUFFIX) && !entry.isDirectory) val configs = confEntries.map(confEntry => parseJobConf(FileUtils.getString(zip.getInputStream(confEntry), charset, "\n")) match { case Right(config) => config case Left(e) => throw e }) val buf = Array.ofDim[Byte](1024) val entryPaths = fileEntries.map { entry => val entryName = entry.getName val savePath = dest.resolve(entryName) if (!Files.isDirectory(savePath.getParent)) { Files.createDirectories(savePath.getParent) } FileUtils.write(zip.getInputStream(entry), Files.newOutputStream(savePath), buf) // zip entry存磁盘 savePath } JobZipInternal(configs.toVector, entryPaths.toVector) } finally { if (zip ne null) zip.close() } } def parseJobConf(content: String): Either[Throwable, JobCreateReq] = Utils.either { val conf = Configuration.parseString(content) val jobItem = JobItem(conf.getConfiguration("item")) val jobTrigger = JobTrigger(conf.getConfiguration("trigger")) JobCreateReq(conf.get[Option[String]]("key"), jobItem, jobTrigger) } } case class JobZip(zipPath: Path, configs: Vector[JobCreateReq], entries: Vector[Path])
Example 52
Source File: JobMessage.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.message.job import java.io.File import java.nio.charset.Charset import java.nio.file.Path import java.time.OffsetDateTime import java.util.concurrent.TimeUnit import akka.http.scaladsl.server.directives.FileInfo import fusion.json.CborSerializable import helloscala.common.data.{ IntValueName, StringValueName, ValueName } import mass.common.page.{ Page, PageResult } import mass.core.job.JobResult import mass.model.CommonStatus import mass.model.job._ import scala.concurrent.duration.FiniteDuration sealed trait JobMessage extends CborSerializable sealed trait JobResponse extends CborSerializable final case class JobErrorResponse(status: Int, message: String) extends JobResponse final case class ProgramVersionItem(programId: String, versions: Seq[StringValueName]) final case class JobGetAllOptionReq() extends JobMessage final case class JobGetAllOptionResp( program: Seq[StringValueName], triggerType: Seq[ValueName[String]], programVersion: Seq[ProgramVersionItem], jobStatus: Seq[IntValueName]) extends JobResponse final case class JobScheduleReq(key: String) extends JobMessage final case class JobCreateReq(key: Option[String], item: JobItem, trigger: JobTrigger) extends JobMessage final case class JobCreateResp(schedule: Option[JobSchedule]) extends JobResponse final case class JobUpdateReq( key: String, program: Option[Program] = None, programOptions: Option[Seq[String]] = None, programMain: Option[String] = None, programArgs: Option[Seq[String]] = None, programVersion: Option[String] = None, resources: Option[Map[String, String]] = None, data: Option[Map[String, String]] = None, description: Option[String] = None, dependentJobKeys: Option[Seq[String]] = None, name: Option[String] = None, triggerType: Option[TriggerType] = None, triggerEvent: Option[String] = None, startTime: Option[OffsetDateTime] = None, endTime: Option[OffsetDateTime] = None, // 重复次数 repeat: Option[Int] = None, // 每次重复间隔 interval: Option[FiniteDuration] = None, cronExpress: Option[String] = None, failedRetries: Option[Int] = None, timeout: Option[FiniteDuration] = None, alarmEmails: Option[Seq[String]] = None, status: Option[CommonStatus] = None) extends JobMessage final case class JobFindReq(key: String) extends JobMessage final case class JobSchedulerResp(schedule: Option[JobSchedule]) extends JobResponse final case class JobPageReq(page: Int = 1, size: Int = 20, key: Option[String] = None) extends Page with JobMessage final case class JobPageResp(content: Seq[JobSchedule], totalElements: Long, page: Int, size: Int) extends PageResult[JobSchedule] with JobResponse final case class JobListReq(key: String) extends JobMessage final case class JobListResp(items: Seq[JobSchedule]) extends JobResponse final case class SchedulerJobResult( start: OffsetDateTime, end: OffsetDateTime, exitValue: Int, outPath: String, errPath: String) extends JobResult { def runDuration: FiniteDuration = FiniteDuration(java.time.Duration.between(start, end).toNanos, TimeUnit.NANOSECONDS).toCoarsest } final case class JobUploadJobReq(file: Path, fileName: String, charset: Charset) extends JobMessage final case class JobUploadJobResp(resps: Seq[JobCreateResp]) extends JobResponse final case class JobUploadFilesReq(items: Seq[(FileInfo, File)]) extends JobMessage final case class JobUploadFilesResp(resources: Seq[IntValueName]) extends JobResponse
Example 53
Source File: Releaser.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.io.File import java.nio.file.{Files, Path} import org.apache.commons.io.FileUtils import uk.gov.hmrc.releaser.bintray.{BintrayHttp, BintrayRepoConnector, DefaultBintrayRepoConnector} import uk.gov.hmrc.releaser.github.{GithubConnector, Repo} import uk.gov.hmrc.{CredentialsFinder, FileDownloader, Logger} import scala.util.{Failure, Success, Try} object ReleaserMain { def main(args: Array[String]): Unit = { val result = Releaser(args) System.exit(result) } } object Releaser extends Logger { import ArgParser._ def apply(args: Array[String]): Int = { parser.parse(args, Config()) match { case Some(config) => val githubName = config.githubNameOverride.getOrElse(config.artefactName) run(config.artefactName, ReleaseCandidateVersion(config.rcVersion), config.releaseType, githubName, config.releaseNotes, config.dryRun) case None => -1 } } def run(artefactName: String, rcVersion: ReleaseCandidateVersion, releaseType: ReleaseType.Value, gitHubName: String, releaseNotes: Option[String], dryRun: Boolean = false): Int = { val githubCredsFile = System.getProperty("user.home") + "/.github/.credentials" val bintrayCredsFile = System.getProperty("user.home") + "/.bintray/.credentials" val githubCredsOpt = CredentialsFinder.findGithubCredsInFile(new File(githubCredsFile).toPath) val bintrayCredsOpt = CredentialsFinder.findBintrayCredsInFile(new File(bintrayCredsFile).toPath) doReleaseWithCleanup { directories => if (githubCredsOpt.isEmpty) { log.info(s"Didn't find github credentials in $githubCredsFile") -1 } else if (bintrayCredsOpt.isEmpty) { log.info(s"Didn't find Bintray credentials in $bintrayCredsFile") -1 } else { val releaserVersion = getClass.getPackage.getImplementationVersion val metaDataProvider = new ArtefactMetaDataProvider() val gitHubDetails = if (dryRun) GithubConnector.dryRun(githubCredsOpt.get, releaserVersion) else GithubConnector(githubCredsOpt.get, releaserVersion) val bintrayDetails = if (dryRun) BintrayRepoConnector.dryRun(bintrayCredsOpt.get, directories.workDir) else BintrayRepoConnector(bintrayCredsOpt.get, directories.workDir) val bintrayRepoConnector = new DefaultBintrayRepoConnector(directories.workDir, new BintrayHttp(bintrayCredsOpt.get), new FileDownloader) val coordinator = new Coordinator(directories.stageDir, metaDataProvider, gitHubDetails, bintrayRepoConnector) val result = coordinator.start(artefactName, Repo(gitHubName), rcVersion, releaseType, releaseNotes) result match { case Success(targetVersion) => log.info(s"Releaser successfully released $artefactName $targetVersion") 0 case Failure(e) => e.printStackTrace() log.info(s"Releaser failed to release $artefactName $rcVersion with error '${e.getMessage}'") 1 } } } } def doReleaseWithCleanup[T](f: ReleaseDirectories => T): T = { val directories = ReleaseDirectories() try { f(directories) } finally { log.info("cleaning releaser work directory") directories.delete().recover{case t => log.warn(s"failed to delete releaser work directory ${t.getMessage}")} } } } case class ReleaseDirectories(tmpDirectory: Path = Files.createTempDirectory("releaser")) { lazy val workDir = Files.createDirectories(tmpDirectory.resolve("work")) lazy val stageDir = Files.createDirectories(tmpDirectory.resolve("stage")) def delete() = Try { FileUtils.forceDelete(tmpDirectory.toFile) } }
Example 54
Source File: FakeBintrayRepoConnector.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser.bintray import java.nio.file.{Path, Paths} import scala.collection.mutable import scala.util.{Failure, Success, Try} class FakeBintrayRepoConnector(filesuffix:String = "", jarResource:Option[String], bintrayFiles:Set[String], targetExists:Boolean = false) extends BintrayRepoConnector { val downloadedFiles = mutable.Set[String]() val uploadedFiles = mutable.Set[(VersionDescriptor, Path, String)]() var lastPublishDescriptor: Option[VersionDescriptor] = None override def findJar(jarFileName: String, jarUrl: String, version: VersionDescriptor): Option[Path] = jarResource.map { x => Paths.get(this.getClass.getResource(filesuffix + x).toURI) } override def publish(version: VersionDescriptor): Try[Unit] = { lastPublishDescriptor = Some(version) Success(Unit) } override def findFiles(version: VersionDescriptor): Try[List[String]] = Success(bintrayFiles.toList ++ jarResource) override def downloadFile(url: String, fileName: String): Try[Path] = { downloadedFiles.add(url) val success = Success { val s = filesuffix + fileName val resource = this.getClass.getResource(s) val i = resource.toURI val path = Paths.get(i) path } success } override def uploadFile(version: VersionDescriptor, filePath: Path, url: String): Try[Unit] = { uploadedFiles.add((version, filePath, url)) Success(Unit) } override def verifyTargetDoesNotExist(version: VersionDescriptor): Try[Unit] = targetExists match { case true => Failure(new IllegalArgumentException("Failed in test")) case false => Success(Unit) } override def getRepoMetaData(repoName: String, artefactName: String): Try[Unit] = Success(Unit) }
Example 55
Source File: TgzTransformerSpec.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.io._ import java.nio.file.{Files, Path} import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarArchiveInputStream} import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream import org.apache.commons.io.FileUtils import org.scalatest._ import scala.collection.mutable.ListBuffer import scala.util.{Failure, Success} class TgzTransformerSpec extends WordSpec with Matchers with BeforeAndAfterEach with OptionValues with TryValues{ val tgzPath = new File(this.getClass.getResource("/help-frontend/uk/gov/hmrc/help-frontend_2.11/1.26.0-3-gd7ed03c/help-frontend_2.11-1.26.0-3-gd7ed03c.tgz").toURI).toPath var transformer:TgzTransformer = _ val candidate_1_26_0_3_gd7ed03c = ReleaseCandidateVersion("1.26.0-3-gd7ed03c") val release_1_4_0 = ReleaseVersion("1.4.0") var tmpDir:Path = _ override def beforeEach(){ tmpDir = Files.createTempDirectory("tmp") transformer = new TgzTransformer() FileUtils.copyFileToDirectory(tgzPath.toFile, tmpDir.toFile) } override def afterEach(){ FileUtils.deleteDirectory(tmpDir.toFile) } "the transformer" should { "decompress the tgz, rename the main folder and compress it back" in { val inFile = new File(tmpDir.toFile, tgzPath.getFileName.toString).toPath val targetFilePath = tmpDir.resolve("help-frontend-1.4.0.tgz") val originalTarEntries = listTgzEntries(inFile) assertTarEntry(originalTarEntries, "./help-frontend-1.26.0-3-gd7ed03c/") assertTarEntry(originalTarEntries, "./help-frontend-1.4.0/", exists = false) assertTarEntry(originalTarEntries, "./start-docker.sh", mode = Some(493)) val outFileTry = transformer(inFile, "help-frontend", candidate_1_26_0_3_gd7ed03c, release_1_4_0, targetFilePath) outFileTry match { case Success(outFile) => val tarEntries = listTgzEntries(targetFilePath) assertTarEntry(tarEntries, "./help-frontend-1.26.0-3-gd7ed03c/", exists = false) assertTarEntry(tarEntries, "./help-frontend-1.4.0/") assertTarEntry(tarEntries, "./start-docker.sh", mode = Some(493)) case Failure(e) => fail("Caught exception: " + e.getMessage, e) } } } private def listTgzEntries(localTgzFile: Path) : List[TarArchiveEntry] = { val bytes = new Array[Byte](2048) val fin = new BufferedInputStream(new FileInputStream(localTgzFile.toFile)) val gzIn = new GzipCompressorInputStream(fin) val tarIn = new TarArchiveInputStream(gzIn) val entries = ListBuffer[TarArchiveEntry]() Iterator continually tarIn.getNextTarEntry takeWhile (null !=) foreach { tarEntry => entries += tarEntry } tarIn.close() entries.toList } private def assertTarEntry(tarEntries: List[TarArchiveEntry], entryName: String, exists: Boolean = true, mode: Option[Int] = None) = { val entryOption = tarEntries.find(_.getName == entryName) entryOption match { case Some(entry) => exists shouldBe true mode.foreach { m => m shouldBe entry.getMode} case None => exists shouldBe false } } }
Example 56
Source File: Scalafmtter.scala From neo-sbt-scalafmt with Apache License 2.0 | 5 votes |
package com.lucidchart.scalafmt.impl import com.lucidchart.scalafmt.api import com.lucidchart.scalafmt.api.Dialect import java.nio.file.Path import java.util.function import org.scalafmt import org.scalafmt.config.ScalafmtConfig import scala.meta.dialects class Scalafmtter(config: ScalafmtConfig) extends api.Scalafmtter { self => def formatter(dialect: Dialect) = new function.Function[String, String] { private[this] val config = dialect match { case Dialect.SBT => ScalafmtConfigUtil.setDialect(self.config, dialects.Sbt0137) case Dialect.SCALA => self.config } def apply(code: String) = scalafmt.Scalafmt.format(code, config).get } def includeFile(file: Path) = config.project.matcher.matches(file.toString) // Otherwise, this cache hangs on to a lot override protected def finalize() = try ScalametaUtil.clearCache() finally super.finalize() }
Example 57
Source File: ComDatabricksDataSourceSuite.scala From glow with Apache License 2.0 | 5 votes |
package io.projectglow.sql import java.nio.file.{Files, Path} // Sanity check that legacy DataSource names starting with "com.databricks." still work class ComDatabricksDataSourceSuite extends GlowBaseTest { lazy val vcf = s"$testDataHome/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.vcf" lazy val bgen = s"$testDataHome/bgen/example.16bits.bgen" protected def createTempPath(extension: String): Path = { val tempDir = Files.createTempDirectory(s"test-$extension-dir") val path = tempDir.resolve(s"test.$extension") logger.info(s"Writing $extension to path ${path.toAbsolutePath.toString}") path } case class DataSources(legacyDataSource: String, standardDataSource: String, file: String) // Legacy read DataSource, standard read DataSource, file val readDataSources: Seq[DataSources] = Seq( DataSources("com.databricks.vcf", "vcf", vcf), DataSources("com.databricks.bgen", "bgen", bgen) ) gridTest("read")(readDataSources) { rds => val legacyDf = spark.read.format(rds.legacyDataSource).load(rds.file).orderBy("contigName", "start") val standardDf = spark.read.format(rds.standardDataSource).load(rds.file).orderBy("contigName", "start") assert(legacyDf.collect sameElements standardDf.collect) } // Legacy write source, standard read DataSource, file val writeDataSources: Seq[DataSources] = Seq( DataSources("com.databricks.vcf", "vcf", vcf), DataSources("com.databricks.bigvcf", "vcf", vcf), DataSources("com.databricks.bigbgen", "bgen", bgen) ) gridTest("write")(writeDataSources) { wds => val inputDf = spark.read.format(wds.standardDataSource).load(wds.file).orderBy("contigName", "start") val rewrittenFile = createTempPath(wds.standardDataSource).toString inputDf.write.format(wds.legacyDataSource).save(rewrittenFile) val rewrittenDf = spark.read.format(wds.standardDataSource).load(rewrittenFile).orderBy("contigName", "start") assert(inputDf.collect sameElements rewrittenDf.collect) } }
Example 58
Source File: CLIConfig.scala From sbt-api-builder with MIT License | 5 votes |
package apibuilder.sbt import java.io.{File, FileNotFoundException} import java.nio.file.{Path, PathMatcher} import io.circe.Decoder import io.circe.yaml.parser import sbt.IO final case class CLIConfig(organizationFor: Map[String, OrganizationConfig]) extends AnyVal final case class OrganizationConfig(applicationFor: Map[String, ApplicationConfig]) extends AnyVal final case class ApplicationConfig(version: String, generators: Seq[GeneratorConfig]) final case class GeneratorConfig(generator: String, maybeTargetPath: Option[Path], pathMatchers: Seq[PathMatcher]) object CLIConfig extends BaseDecoders { final def load(f: File): Either[ConfigException, CLIConfig] = if (!f.getParentFile.exists) Left(MissingParentDirectory(f)) else { try { IO.reader(f) { r => parser .parse(r) .left .map(pf => InvalidContent(pf.message)) .flatMap(_.as[CLIConfig].left.map(df => InvalidContent(df.message))) } } catch { case _: FileNotFoundException => Left(MissingFile(f)) } } implicit final val cliConfigDecoder: Decoder[CLIConfig] = Decoder.instance { c => c.downField("code").as[Map[String, OrganizationConfig]].map(CLIConfig.apply) } implicit final val organizationConfigDecoder: Decoder[OrganizationConfig] = Decoder.instance { c => c.value.as[Map[String, ApplicationConfig]].map(OrganizationConfig.apply) } implicit final val applicationConfig: Decoder[ApplicationConfig] = Decoder.instance { c => for { version <- c.downField("version").as[String] generators <- c.downField("generators").as[Seq[GeneratorConfig]] } yield ApplicationConfig(version, generators) } implicit final val generatorConfigDecoder: Decoder[GeneratorConfig] = Decoder.instance { c => for { generator <- c.downField("generator").as[String] maybeTargetPath <- c.downField("target").as[Option[Path]] pathMatchers <- c.downField("files").as[Seq[PathMatcher]] } yield GeneratorConfig(generator, maybeTargetPath, pathMatchers) } }
Example 59
Source File: servers.scala From embedded-kafka-schema-registry with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import java.nio.file.Path import io.confluent.kafka.schemaregistry.rest.SchemaRegistryRestApplication import kafka.server.KafkaServer import net.manub.embeddedkafka.{ EmbeddedServer, EmbeddedServerWithKafka, EmbeddedZ } import scala.reflect.io.Directory override def stop(clearLogs: Boolean = false): Unit = app.stop() } case class EmbeddedKWithSR( factory: Option[EmbeddedZ], broker: KafkaServer, app: EmbeddedSR, logsDirs: Path, config: EmbeddedKafkaConfig ) extends EmbeddedServerWithKafka { override def stop(clearLogs: Boolean): Unit = { app.stop() broker.shutdown() broker.awaitShutdown() factory.foreach(_.stop(clearLogs)) if (clearLogs) { val _ = Directory(logsDirs.toFile).deleteRecursively } } }
Example 60
Source File: SrcFile.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.models import java.nio.file.{Files, Path} import de.thm.move.loader.parser.ast._ import scala.collection.JavaConversions._ def getAfterModel: String = { model.annot match { case Icon(_,_,start,end) => val endLineIdx = end.line-1 val afterLines = lines.drop(endLineIdx+1) val afterChars = lines(endLineIdx).drop(end.column) afterChars + "\n" + afterLines.mkString("\n") case WithoutIcon(pos) => val endLineIdx = pos.line-1 val afterLines = lines.drop(endLineIdx+1) val afterChars = lines(endLineIdx).drop(pos.column) //there is "annotation( .. )" but parser killed the closing ")" so add it again ")" + afterChars + "\n" + afterLines.mkString("\n") case NoAnnotation(pos) => val endLineIdx = pos.line-1 val afterLines = lines.drop(endLineIdx+1) val afterChars = lines(endLineIdx).drop(pos.column-1) //there is no annotation: generate closing the newly added annotation( .. ) ");\n" + afterChars + "\n" + afterLines.mkString("\n") } } def isSame(p:Path): Boolean = { val x = Files.getLastModifiedTime(p) val xx = lastModifiedTimestamp Files.getLastModifiedTime(p) equals lastModifiedTimestamp } def noExternalChanges: Boolean = isSame(file) }
Example 61
Source File: Global.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move import java.net.URL import java.nio.charset.Charset import java.nio.file.{Files, Path, Paths} import java.util.{Locale, ResourceBundle} import de.thm.move.config.{Config, ConfigLoader} import de.thm.move.history.History import de.thm.move.shortcuts.ShortCutHandler import de.thm.move.util.CustomResourceBundle object Global { private val configDirectoryName = ".move" private val configDirPath = Paths.get(System.getProperty("user.home"), configDirectoryName) def zippedUndo[A, B](xs:List[A])( fn: A => B)( exec: A => Unit, undo: A => B => Unit): Unit = { val zipped = xs zip xs.map(fn) history.execute { xs.foreach(exec) } { zipped.foreach { case (a,b) => undo(a)(b) } } } }
Example 62
Source File: RecentlyFilesHandler.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.controllers import java.nio.file.{Files, Path, StandardOpenOption} import javafx.event.ActionEvent import javafx.scene.control.MenuItem import de.thm.move.Global import de.thm.move.implicits.FxHandlerImplicits._ import de.thm.move.util.JFxUtils import de.thm.recent._ import spray.json.JsonFormat class RecentlyFilesHandler(recent:Recent[Path], pathClicked: Path => Unit) { private def menuItem(path:Path): MenuItem = { val item = new MenuItem(path.toString) JFxUtils.addFontIcon(item, "\uf1c9") item.setOnAction { _:ActionEvent => incrementPriorityOf(path) println(recent.recentValuesByPriority) pathClicked(path) } item } def incrementPriorityOf(path:Path): Unit = recent.incrementPriority(path) def getMenuItems:Seq[MenuItem] = recent.recentElementsByPriority.map(menuItem) def writeTo(outputFile:Path)(implicit pathFormat:JsonFormat[Path]): Unit = { val jsonString = recent.toJson Files.write(outputFile, jsonString.getBytes(Global.encoding)) } }
Example 63
Source File: ResourceUtils.scala From MoVE with Mozilla Public License 2.0 | 5 votes |
package de.thm.move.util import java.net.URI import java.nio.file.{Files, Path, Paths} import java.util.Base64 import javafx.scene.paint.Color import de.thm.move.Global._ object ResourceUtils { def getFilename(uri:URI):String = { val uriStr = uri.toString uriStr.substring(uriStr.lastIndexOf("/")+1, uriStr.length) } def getFilename(p:Path):String = { p.getFileName.toString } def asColor(key:String): Option[Color] = config.getString(key).map(Color.web) def copy(src:URI, target:URI): Unit = { val targetPath = Paths.get(target).getParent val srcPath = Paths.get(src) val filename = srcPath.getFileName Files.copy(srcPath, targetPath.resolve(filename)) } }
Example 64
Source File: ScalafmtSbtReporter.scala From sbt-scalafmt with Apache License 2.0 | 5 votes |
package org.scalafmt.sbt import java.io.PrintWriter import java.io.OutputStreamWriter import java.nio.file.Path import org.scalafmt.interfaces.ScalafmtReporter import sbt.internal.util.MessageOnlyException import sbt.util.Logger import scala.util.control.NoStackTrace class ScalafmtSbtReporter(log: Logger, out: OutputStreamWriter) extends ScalafmtReporter { override def error(file: Path, message: String): Unit = { throw new MessageOnlyException(s"$message: $file") } override def error(file: Path, e: Throwable): Unit = { if (e.getMessage != null) { error(file, e.getMessage) } else { throw new FailedToFormat(file.toString, e) } } override def error(file: Path, message: String, e: Throwable): Unit = { if (e.getMessage != null) { error(file, s"$message: ${e.getMessage()}") } else { throw new FailedToFormat(file.toString, e) } } override def excluded(file: Path): Unit = log.debug(s"file excluded: $file") override def parsedConfig(config: Path, scalafmtVersion: String): Unit = log.debug(s"parsed config (v$scalafmtVersion): $config") override def downloadWriter(): PrintWriter = new PrintWriter(out) override def downloadOutputStreamWriter(): OutputStreamWriter = out private class FailedToFormat(filename: String, cause: Throwable) extends Exception(filename, cause) with NoStackTrace }
Example 65
Source File: BigQueryConfig.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.bigquery.client import java.nio.file.{Path, Paths} import com.google.api.services.bigquery.BigqueryScopes import com.spotify.scio.CoreSysProps import com.spotify.scio.bigquery.BigQuerySysProps import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.TypedRead.QueryPriority import scala.util.Try object BigQueryConfig { private[this] val PriorityDefault: QueryPriority = QueryPriority.BATCH private[this] val DefaultScopes = List(BigqueryScopes.BIGQUERY) private[this] val DefaultLocation = "US" def location: String = DefaultLocation def scopes: Seq[String] = DefaultScopes def isCacheEnabled: Boolean = BigQuerySysProps.CacheEnabled.valueOption .flatMap(x => Try(x.toBoolean).toOption) .getOrElse(CacheEnabledDefault) def cacheDirectory: Path = BigQuerySysProps.CacheDirectory.valueOption.map(Paths.get(_)).getOrElse(CacheDirectoryDefault) def connectTimeoutMs: Option[Int] = BigQuerySysProps.ConnectTimeoutMs.valueOption.map(_.toInt) def readTimeoutMs: Option[Int] = BigQuerySysProps.ReadTimeoutMs.valueOption.map(_.toInt) def priority: QueryPriority = { lazy val isCompilingOrTesting = Thread .currentThread() .getStackTrace .exists { e => e.getClassName.startsWith("scala.tools.nsc.interpreter.") || e.getClassName.startsWith("org.scalatest.tools.") } BigQuerySysProps.Priority.valueOption.map(_.toUpperCase) match { case Some("INTERACTIVE") => QueryPriority.INTERACTIVE case Some("BATCH") => QueryPriority.BATCH case _ if isCompilingOrTesting => QueryPriority.INTERACTIVE case _ => PriorityDefault } } }
Example 66
Source File: FileDownloadDoFnTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.transforms import java.nio.file.{Files, Path} import com.spotify.scio.testing._ import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.{Files => GFiles} import scala.jdk.CollectionConverters._ class FileDownloadDoFnTest extends PipelineSpec { "FileDownloadDoFn" should "work" in { val tmpDir = Files.createTempDirectory("filedofn-") val files = createFiles(tmpDir, 100) runWithContext { sc => val p = sc.parallelize(files.map(_.toUri)).flatMapFile(fn) p.keys should containInAnyOrder((1 to 100).map(_.toString)) p.values.distinct should forAll { f: Path => !Files.exists(f) } } files.foreach(Files.delete) Files.delete(tmpDir) } it should "support batch" in { val tmpDir = Files.createTempDirectory("filedofn-") val files = createFiles(tmpDir, 100) runWithContext { sc => val p = sc.parallelize(files.map(_.toUri)).flatMapFile(fn, 10, false) p.keys should containInAnyOrder((1 to 100).map(_.toString)) p.values.distinct should forAll { f: Path => !Files.exists(f) } } files.foreach(Files.delete) Files.delete(tmpDir) } it should "support keeping downloaded files" in { val tmpDir = Files.createTempDirectory("filedofn-") val files = createFiles(tmpDir, 100) runWithContext { sc => val p = sc.parallelize(files.map(_.toUri)).flatMapFile(fn, 10, true) p.keys should containInAnyOrder((1 to 100).map(_.toString)) p.values.distinct should forAll { f: Path => val r = Files.exists(f) if (r) { Files.delete(f) } r } } files.foreach(Files.delete) Files.delete(tmpDir) } private def createFiles(dir: Path, n: Int): Seq[Path] = (1 to n).map { i => val file = dir.resolve("part-%05d-of-%05d.txt".format(i, n)) GFiles.asCharSink(file.toFile, Charsets.UTF_8).write(i.toString) file } private def fn(input: Path) = Files.readAllLines(input).asScala.map((_, input)) }
Example 67
Source File: TapsTest.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.io import java.io.File import java.nio.file.{Files, Path} import java.util.UUID import com.spotify.scio.CoreSysProps import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.concurrent.Await import scala.concurrent.duration._ class TapsTest extends AnyFlatSpec with Matchers { val data = Seq("a", "b", "c") private def tmpFile: Path = new File(new File(CoreSysProps.TmpDir.value), "taps-test-" + UUID.randomUUID()).toPath private def writeText(p: Path, data: Seq[String]): Unit = { val writer = Files.newBufferedWriter(p) data.foreach { s => writer.write(s) writer.newLine() } writer.close() } "ImmediateTap" should "work with text file" in { TapsSysProps.Algorithm.value = "immediate" val f = tmpFile writeText(f, data) val future = Taps().textFile(f.toString) future.isCompleted shouldBe true future.value.get.isSuccess shouldBe true Await.result(future, Duration.Inf).value.toSeq shouldBe data Files.delete(f) } it should "fail missing text file" in { TapsSysProps.Algorithm.value = "immediate" val f = tmpFile val future = Taps().textFile(f.toString) future.isCompleted shouldBe true future.value.get.isSuccess shouldBe false } "PollingTap" should "work with text file" in { TapsSysProps.Algorithm.value = "polling" TapsSysProps.PollingInitialInterval.value = "1000" TapsSysProps.PollingMaximumAttempts.value = "1" val f = tmpFile val future = Taps().textFile(f.toString) future.isCompleted shouldBe false writeText(f, data) val result = Await.result(future, 10.seconds) result.value.toSeq shouldBe data Files.delete(f) } it should "fail missing text file" in { TapsSysProps.Algorithm.value = "polling" TapsSysProps.PollingInitialInterval.value = "1000" TapsSysProps.PollingMaximumAttempts.value = "1" val f = tmpFile val future = Taps().textFile(f.toString) future.isCompleted shouldBe false Await.ready(future, 10.seconds) } }
Example 68
Source File: BazelRunfiles.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.bazeltools import java.nio.file.{Path, Paths} import com.google.devtools.build.runfiles.Runfiles trait BazelRunfiles { private val MainWorkspace = "com_github_digital_asset_daml" private val MainWorkspacePath = Paths.get(MainWorkspace) private val inBazelEnvironment = Set("RUNFILES_DIR", "JAVA_RUNFILES", "RUNFILES_MANIFEST_FILE", "RUNFILES_MANIFEST_ONLY").exists( sys.env.contains) def rlocation(path: String): String = if (inBazelEnvironment) Runfiles.create.rlocation(MainWorkspace + "/" + path) else path def rlocation(path: Path): Path = if (inBazelEnvironment) { val workspacePathString = MainWorkspacePath .resolve(path) .toString .replaceAllLiterally("\\", "/") val runfilePath = Option(Runfiles.create.rlocation(workspacePathString)) Paths.get(runfilePath.getOrElse(throw new IllegalArgumentException(path.toString))) } else path def requiredResource(name: String): java.io.File = { val file = new java.io.File(rlocation(name)) if (file.exists()) file else throw new IllegalStateException(s"File doest not exist: ${file.getAbsolutePath}") } } object BazelRunfiles extends BazelRunfiles
Example 69
Source File: MetricsReporter.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.configuration import java.net.{InetAddress, InetSocketAddress} import java.nio.file.{Path, Paths} import com.codahale.metrics import com.codahale.metrics.{MetricRegistry, ScheduledReporter} import com.daml.platform.sandbox.config.InvalidConfigException import com.google.common.net.HostAndPort import scopt.Read import scala.util.Try sealed trait MetricsReporter { def register(registry: MetricRegistry): ScheduledReporter } object MetricsReporter { case object Console extends MetricsReporter { override def register(registry: MetricRegistry): ScheduledReporter = metrics.ConsoleReporter .forRegistry(registry) .build() } final case class Csv(directory: Path) extends MetricsReporter { override def register(registry: MetricRegistry): ScheduledReporter = metrics.CsvReporter .forRegistry(registry) .build(directory.toFile) } final case class Graphite(address: InetSocketAddress) extends MetricsReporter { override def register(registry: MetricRegistry): ScheduledReporter = metrics.graphite.GraphiteReporter .forRegistry(registry) .build(new metrics.graphite.Graphite(address)) } object Graphite { val defaultHost: InetAddress = InetAddress.getLoopbackAddress val defaultPort: Int = 2003 def apply(): Graphite = Graphite(new InetSocketAddress(defaultHost, defaultPort)) def apply(port: Int): Graphite = Graphite(new InetSocketAddress(defaultHost, port)) } implicit val metricsReporterRead: Read[MetricsReporter] = Read.reads { _.split(":", 2).toSeq match { case Seq("console") => Console case Seq("csv", directory) => Csv(Paths.get(directory)) case Seq("graphite") => Graphite() case Seq("graphite", address) => Try(address.toInt) .map(port => Graphite(port)) .recover { case _: NumberFormatException => //noinspection UnstableApiUsage val hostAndPort = HostAndPort .fromString(address) .withDefaultPort(Graphite.defaultPort) Graphite(new InetSocketAddress(hostAndPort.getHost, hostAndPort.getPort)) } .get case _ => throw new InvalidConfigException( """Must be one of "console", "csv:PATH", or "graphite[:HOST][:PORT]".""") } } }
Example 70
Source File: SandboxConfig.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.sandbox.config import java.io.File import java.nio.file.Path import java.time.Duration import ch.qos.logback.classic.Level import com.daml.caching.SizedCache import com.daml.ledger.api.auth.AuthService import com.daml.ledger.api.tls.TlsConfiguration import com.daml.ledger.participant.state.v1.SeedService.Seeding import com.daml.platform.common.LedgerIdMode import com.daml.platform.configuration.{CommandConfiguration, LedgerConfiguration, MetricsReporter} import com.daml.platform.services.time.TimeProviderType import com.daml.ports.Port final case class SandboxConfig( address: Option[String], port: Port, portFile: Option[Path], damlPackages: List[File], timeProviderType: Option[TimeProviderType], commandConfig: CommandConfiguration, ledgerConfig: LedgerConfiguration, tlsConfig: Option[TlsConfiguration], scenario: Option[String], implicitPartyAllocation: Boolean, ledgerIdMode: LedgerIdMode, maxInboundMessageSize: Int, jdbcUrl: Option[String], eagerPackageLoading: Boolean, logLevel: Option[Level], authService: Option[AuthService], seeding: Option[Seeding], metricsReporter: Option[MetricsReporter], metricsReportingInterval: Duration, eventsPageSize: Int, lfValueTranslationEventCacheConfiguration: SizedCache.Configuration, lfValueTranslationContractCacheConfiguration: SizedCache.Configuration, profileDir: Option[Path], stackTraces: Boolean, ) object SandboxConfig { val DefaultPort: Port = Port(6865) val DefaultMaxInboundMessageSize: Int = 4 * 1024 * 1024 val DefaultEventsPageSize: Int = 1000 val DefaultTimeProviderType: TimeProviderType = TimeProviderType.WallClock val DefaultLfValueTranslationCacheConfiguration: SizedCache.Configuration = SizedCache.Configuration.none lazy val nextDefault: SandboxConfig = SandboxConfig( address = None, port = DefaultPort, portFile = None, damlPackages = Nil, timeProviderType = None, commandConfig = CommandConfiguration.default, ledgerConfig = LedgerConfiguration.defaultLocalLedger, tlsConfig = None, scenario = None, implicitPartyAllocation = true, ledgerIdMode = LedgerIdMode.Dynamic, maxInboundMessageSize = DefaultMaxInboundMessageSize, jdbcUrl = None, eagerPackageLoading = false, logLevel = None, // the default is in logback.xml authService = None, seeding = Some(Seeding.Strong), metricsReporter = None, metricsReportingInterval = Duration.ofSeconds(10), eventsPageSize = DefaultEventsPageSize, lfValueTranslationEventCacheConfiguration = DefaultLfValueTranslationCacheConfiguration, lfValueTranslationContractCacheConfiguration = DefaultLfValueTranslationCacheConfiguration, profileDir = None, stackTraces = true, ) lazy val default: SandboxConfig = nextDefault.copy( seeding = None, ledgerConfig = LedgerConfiguration.defaultLedgerBackedIndex, ) }
Example 71
Source File: ApiServerConfig.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.apiserver import java.io.File import java.nio.file.Path import com.daml.ledger.participant.state.v1.ParticipantId import com.daml.ledger.participant.state.v1.SeedService.Seeding import com.daml.ledger.api.tls.TlsConfiguration import com.daml.platform.configuration.IndexConfiguration import com.daml.ports.Port case class ApiServerConfig( participantId: ParticipantId, archiveFiles: List[File], port: Port, address: Option[String], // This defaults to "localhost" when set to `None`. jdbcUrl: String, tlsConfig: Option[TlsConfiguration], maxInboundMessageSize: Int, eventsPageSize: Int = IndexConfiguration.DefaultEventsPageSize, portFile: Option[Path], seeding: Seeding, )
Example 72
Source File: Tests.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.api.testtool import java.nio.file.Path import com.daml.ledger.api.testtool import com.daml.ledger.api.testtool.infrastructure.{BenchmarkReporter, LedgerTestSuite} import com.daml.ledger.api.testtool.tests._ import org.slf4j.LoggerFactory object Tests { type Tests = Map[String, LedgerTestSuite] def performanceTests(path: Option[Path]): Tests = { val reporter = (key: String, value: Double) => path .map(BenchmarkReporter.toFile) .getOrElse(BenchmarkReporter.toStream(System.out)) .addReport(key, value) Envelope.values.flatMap { envelope => { val throughputKey: String = performanceEnvelopeThroughputTestKey(envelope) val latencyKey: String = performanceEnvelopeLatencyTestKey(envelope) val transactionSizeKey: String = performanceEnvelopeTransactionSizeTestKey(envelope) List( throughputKey -> new testtool.tests.PerformanceEnvelope.ThroughputTest( logger = LoggerFactory.getLogger(throughputKey), envelope = envelope, reporter = reporter, ), latencyKey -> new testtool.tests.PerformanceEnvelope.LatencyTest( logger = LoggerFactory.getLogger(latencyKey), envelope = envelope, reporter = reporter, ), transactionSizeKey -> new testtool.tests.PerformanceEnvelope.TransactionSizeScaleTest( logger = LoggerFactory.getLogger(transactionSizeKey), envelope = envelope, ), ) } } }.toMap private[this] def performanceEnvelopeThroughputTestKey(envelope: Envelope): String = s"PerformanceEnvelope.${envelope.name}.Throughput" private[this] def performanceEnvelopeLatencyTestKey(envelope: Envelope): String = s"PerformanceEnvelope.${envelope.name}.Latency" private[this] def performanceEnvelopeTransactionSizeTestKey(envelope: Envelope): String = s"PerformanceEnvelope.${envelope.name}.TransactionSize" private[testtool] val PerformanceTestsKeys = Envelope.values.flatMap { envelope => List( performanceEnvelopeThroughputTestKey(envelope), performanceEnvelopeLatencyTestKey(envelope), performanceEnvelopeTransactionSizeTestKey(envelope)), } }
Example 73
Source File: Config.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.api.testtool import java.io.File import java.nio.file.Path import com.daml.ledger.api.testtool.infrastructure.PartyAllocationConfiguration import com.daml.ledger.api.tls.TlsConfiguration final case class Config( participants: Vector[(String, Int)], darPackages: List[File], mustFail: Boolean, verbose: Boolean, timeoutScaleFactor: Double, loadScaleFactor: Double, concurrentTestRuns: Int, extract: Boolean, tlsConfig: Option[TlsConfiguration], excluded: Set[String], included: Set[String], performanceTests: Set[String], performanceTestsReport: Option[Path], listTests: Boolean, listTestSuites: Boolean, allTests: Boolean, shuffleParticipants: Boolean, partyAllocation: PartyAllocationConfiguration, ) object Config { val default: Config = Config( participants = Vector.empty, darPackages = Nil, mustFail = false, verbose = false, timeoutScaleFactor = 1.0, loadScaleFactor = 1.0, concurrentTestRuns = Runtime.getRuntime.availableProcessors(), extract = false, tlsConfig = None, excluded = Set.empty, included = Set.empty, performanceTests = Set.empty, performanceTestsReport = None, listTests = false, listTestSuites = false, allTests = false, shuffleParticipants = false, partyAllocation = PartyAllocationConfiguration.ClosedWorldWaitingForAllParticipants, ) }
Example 74
Source File: Util.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.helpers import java.net.URI import java.nio.file.{Files, Path, Paths} object Util { @annotation.varargs def guessRelativeFileLocation(filenames: String*): URI = { val uri = guessPath(filenames) Paths.get(".").toAbsolutePath.relativize(uri).toUri } @annotation.varargs def guessFileLocation(filenames: String*): URI = guessPath(filenames).toUri private def cwd = Paths.get(".").toAbsolutePath def guessPath(filenames: Seq[String]): Path = { def folders(from: Path): Stream[Path] = if (from == null) Stream.empty else from #:: folders(from.getParent) def guess(from: Path): Stream[Path] = folders(from).flatMap { d => filenames.toStream.map(d.resolve) } val guesses = guess(cwd) guesses .find(Files.exists(_)) .getOrElse(throw new IllegalStateException(s"""Could not find ${filenames .mkString(", ")}, having searched: |${guesses.mkString("\n")}""".stripMargin)) } }
Example 75
Source File: ExtractorConfig.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.config import java.nio.file.Path import java.util.UUID import scalaz.{OneAnd, Order} import scalaz.syntax.foldable._ import scalaz.syntax.functor._ import scalaz.std.list._ import scalaz.std.string._ import com.daml.lf.data.Ref.Party import com.daml.ledger.api.v1.ledger_offset.LedgerOffset import com.daml.ledger.api.tls.TlsConfiguration import com.daml.ports.Port sealed abstract class SnapshotEndSetting object SnapshotEndSetting { case object Head extends SnapshotEndSetting case object Follow extends SnapshotEndSetting final case class Until(offset: String) extends SnapshotEndSetting } final case class ExtractorConfig( ledgerHost: String, ledgerPort: Port, ledgerInboundMessageSizeMax: Int, from: LedgerOffset, to: SnapshotEndSetting, parties: ExtractorConfig.Parties, templateConfigs: Set[TemplateConfig], tlsConfig: TlsConfiguration, accessTokenFile: Option[Path], appId: String = s"Extractor-${UUID.randomUUID().toString}" ) { def partySpec: String = parties.widen[String] intercalate "," } object ExtractorConfig { type Parties = OneAnd[List, Party] } final case class TemplateConfig(moduleName: String, entityName: String) object TemplateConfig { implicit val templateConfigOrdering: Ordering[TemplateConfig] = Ordering.by(TemplateConfig.unapply) implicit val templateConfigOrder: Order[TemplateConfig] = Order.fromScalaOrdering }
Example 76
Source File: JsonProtocol.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml import java.nio.file.{Files, Path} import com.daml.ledger.api.v1.value.Record import com.daml.ledger.api.validation.ValueValidator import com.daml.lf.value.json.ApiCodecCompressed import spray.json._ object JsonProtocol extends DefaultJsonProtocol { private def cannotReadDamlLf(): RuntimeException = new UnsupportedOperationException("Reading JSON-encoded DAML-LF value is not supported") implicit object RecordJsonFormat extends JsonFormat[Record] { override def read(json: JsValue): Record = throw cannotReadDamlLf() override def write(record: Record): JsValue = ApiCodecCompressed.apiValueToJsValue( ValueValidator.validateRecord(record).right.get.mapContractId(_.coid) ) } private implicit class JsObjectWith(val jsObject: JsObject) extends AnyVal { def +(pair: (String, JsValue)): JsObject = jsObject.copy(fields = jsObject.fields + pair) } import Application._ implicit val createdFormat: RootJsonFormat[CreatedResult] = jsonFormat4(CreatedResult.apply) implicit val archivedFormat: RootJsonFormat[ArchivedResult] = jsonFormat3(ArchivedResult.apply) implicit val eventFormat: RootJsonFormat[EventResult] = new RootJsonFormat[EventResult] { override def read(json: JsValue): Application.EventResult = throw cannotReadDamlLf() override def write(eventResult: EventResult): JsValue = eventResult match { case create: CreatedResult => createdFormat .write(create) .asJsObject + ("type" -> JsString("created")) case archive: ArchivedResult => archivedFormat .write(archive) .asJsObject + ("type" -> JsString("archived")) } } implicit val contractFormat: RootJsonFormat[ContractResult] = jsonFormat2(Application.ContractResult.apply) implicit val transactionFormat: RootJsonFormat[TransactionResult] = jsonFormat2(Application.TransactionResult.apply) def saveAsJson[A: JsonWriter](outputFile: Path, a: A): Unit = { val _ = Files.write(outputFile, a.toJson.prettyPrint.getBytes()) } }
Example 77
Source File: Config.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml import java.nio.file.{Path, Paths} import scopt.{OptionParser, Read} object Config { private implicit val pathRead: Read[Path] = Read.reads(Paths.get(_)) private implicit val readTest: Read[MigrationStep.Test] = Read.stringRead.map(s => s.split(",", -1) match { case Array(Divulgence.ApplicationId, owner, divulgee, suffix) => new Divulgence(owner, divulgee, suffix) case Array(KeyTransfer.ApplicationId, owner, receiver, suffix) => new KeyTransfer(owner, receiver, suffix) case Array(ProposeAccept.ApplicationId, proposer, accepter, note) => new ProposeAccept(proposer, accepter, note) case _ => throw new IllegalArgumentException(s"Illegal test name or parameters '$s'") }) val parser: OptionParser[Config] = new scopt.OptionParser[Config]("migration-step") { opt[Path]("dar") .action((dar, c) => c.copy(dar = dar)) .required() opt[String]("host") .action((host, c) => c.copy(host = host)) .required() opt[Int]("port") .action((port, c) => c.copy(port = port)) .required() opt[Path]("output") .action((path, c) => c.copy(outputFile = path)) .required() opt[MigrationStep.Test]("test") .action((test, c) => c.copy(test = test)) .required() } // Null-safety is provided by the CLI parser making all fields required val default: Config = Config(null, 0, null, null, null) sealed trait Test { def host: String def port: Int def outputFile: Path } } final case class Config( host: String, port: Int, outputFile: Path, dar: Path, test: MigrationStep.Test, ) extends Config.Test
Example 78
Source File: MigrationStep.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml import java.nio.file.Path import akka.actor.ActorSystem import com.daml.grpc.adapter.{AkkaExecutionSequencerPool, ExecutionSequencerFactory} import com.daml.lf.archive.DarReader import scalaz.syntax.traverse._ import scala.concurrent.{ExecutionContext, Future} import akka.stream.Materializer import scala.util.control.NonFatal object MigrationStep { trait Test { def execute(packageId: String, config: Config.Test)( implicit ec: ExecutionContext, esf: ExecutionSequencerFactory, mat: Materializer, ): Future[Unit] } private def readPackageId(path: Path): String = DarReader().readArchiveFromFile(path.toFile).get.map(_._1.toString).main def main(args: Array[String]): Unit = { val config = Config.parser.parse(args, Config.default).getOrElse(sys.exit(1)) val packageId = readPackageId(config.dar) implicit val system: ActorSystem = ActorSystem(packageId) implicit val sequencer: ExecutionSequencerFactory = new AkkaExecutionSequencerPool(packageId)(system) implicit val ec: ExecutionContext = system.dispatcher val result = config.test.execute(packageId, config) result.failed.foreach { case NonFatal(e) => e.printStackTrace(System.err) } result.onComplete(_ => system.terminate()) } }
Example 79
Source File: Conf.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.codegen.conf import java.nio.file.{Path, Paths} import ch.qos.logback.classic.Level import com.daml.buildinfo.BuildInfo import scopt.{OptionParser, Read} final case class Conf( darFiles: Map[Path, Option[String]] = Map(), outputDirectory: Path, decoderPkgAndClass: Option[(String, String)] = None, verbosity: Level = Level.ERROR, roots: List[String] = Nil ) object Conf { private[conf] final val PackageAndClassRegex = """(?:(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+(?:\.\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)*)\.)(\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}+)""".r def parse(args: Array[String]): Option[Conf] = parser.parse(args, Conf(Map.empty, Paths.get("."))) def parser: OptionParser[Conf] = new scopt.OptionParser[Conf]("codegen") { head("codegen", BuildInfo.Version) note("Code generator for the DAML ledger bindings.\n") arg[(Path, Option[String])]("<DAR-file[=package-prefix]>...")( optTupleRead(readPath, Read.stringRead)) .unbounded() .action((p, c) => c.copy(darFiles = c.darFiles + p)) .required() .text("DAR file to use as input of the codegen with an optional, but recommend, package prefix for the generated sources.") opt[Path]('o', "output-directory")(readPath) .action((p, c) => c.copy(outputDirectory = p)) .required() .text("Output directory for the generated sources") opt[(String, String)]('d', "decoderClass")(readClassName) .action((className, c) => c.copy(decoderPkgAndClass = Some(className))) .text("Fully Qualified Class Name of the optional Decoder utility") opt[Level]('V', "verbosity")(readVerbosity) .action((l, c) => c.copy(verbosity = l)) .text("Verbosity between 0 (only show errors) and 4 (show all messages) -- defaults to 0") opt[String]('r', "root")(Read.stringRead) .unbounded() .action((rexp, c) => c.copy(roots = rexp :: c.roots)) .text( "Regular expression for fully-qualified names of templates to generate -- defaults to .*") help("help").text("This help text") } private[conf] val readPath: scopt.Read[Path] = scopt.Read.stringRead.map(s => Paths.get(s)) val readClassName: scopt.Read[(String, String)] = scopt.Read.stringRead.map { case PackageAndClassRegex(p, c) => (p, c) case _ => throw new IllegalArgumentException("Expected a Full Qualified Class Name") } val readVerbosity: scopt.Read[Level] = scopt.Read.stringRead.map { case "0" => Level.ERROR case "1" => Level.WARN case "2" => Level.INFO case "3" => Level.DEBUG case "4" => Level.TRACE case _ => throw new IllegalArgumentException( "Expected a verbosity value between 0 (least verbose) and 4 (most verbose)") } private[conf] def optTupleRead[A: Read, B: Read]: Read[(A, Option[B])] = new Read[(A, Option[B])] { override def arity: Int = 2 override def reads: String => (A, Option[B]) = { s: String => s.split('=').toList match { case Nil => throw new IllegalArgumentException("Expected a key with an optional value: key[=value]") case key :: Nil => (implicitly[Read[A]].reads(key), None) case key :: value :: Nil => (implicitly[Read[A]].reads(key), Some(implicitly[Read[B]].reads(value))) case _ => throw new IllegalArgumentException("Expected a key with an optional value: key[=value]") } } } }
Example 80
Source File: Main.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import java.io.File import java.nio.file.Path import ch.qos.logback.classic.Level import com.daml.lf.codegen.conf.Conf import com.typesafe.scalalogging.StrictLogging import org.slf4j.{Logger, LoggerFactory} import scalaz.Cord import scala.collection.breakOut object Main extends StrictLogging { private val codegenId = "Scala Codegen" @deprecated("Use codegen font-end: com.daml.codegen.CodegenMain.main", "0.13.23") def main(args: Array[String]): Unit = Conf.parse(args) match { case Some(conf) => generateCode(conf) case None => throw new IllegalArgumentException( s"Invalid ${codegenId: String} command line arguments: ${args.mkString(" "): String}") } def generateCode(conf: Conf): Unit = conf match { case Conf(darMap, outputDir, decoderPkgAndClass, verbosity, roots) => setGlobalLogLevel(verbosity) logUnsupportedEventDecoderOverride(decoderPkgAndClass) val (dars, packageName) = darsAndOnePackageName(darMap) CodeGen.generateCode(dars, packageName, outputDir.toFile, CodeGen.Novel, roots) } private def setGlobalLogLevel(verbosity: Level): Unit = { LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME) match { case a: ch.qos.logback.classic.Logger => a.setLevel(verbosity) logger.info(s"${codegenId: String} verbosity: ${verbosity.toString}") case _ => logger.warn(s"${codegenId: String} cannot set requested verbosity: ${verbosity.toString}") } } private def logUnsupportedEventDecoderOverride(mapping: Option[(String, String)]): Unit = mapping.foreach { case (a, b) => logger.warn( s"${codegenId: String} does not allow overriding Event Decoder, skipping: ${a: String} -> ${b: String}") } private def darsAndOnePackageName(darMap: Map[Path, Option[String]]): (List[File], String) = { val dars: List[File] = darMap.keys.map(_.toFile)(breakOut) val uniquePackageNames: Set[String] = darMap.values.collect { case Some(x) => x }(breakOut) uniquePackageNames.toSeq match { case Seq(packageName) => (dars, packageName) case _ => throw new IllegalStateException( s"${codegenId: String} expects all dars mapped to the same package name, " + s"requested: ${format(darMap): String}") } } private def format(map: Map[Path, Option[String]]): String = { val cord = map.foldLeft(Cord("{")) { (str, kv) => str ++ kv._1.toFile.getAbsolutePath ++ "->" ++ kv._2.toString ++ "," } (cord ++ "}").toString } }
Example 81
Source File: UtilTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import com.daml.lf.data.Ref.{QualifiedName, PackageId} import java.io.IOException import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import com.daml.lf.{iface => I} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks class UtilTest extends UtilTestHelpers with GeneratorDrivenPropertyChecks { val packageInterface = I.Interface(packageId = PackageId.assertFromString("abcdef"), typeDecls = Map.empty) val scalaPackageParts = Array("com", "digitalasset") val scalaPackage: String = scalaPackageParts.mkString(".") val util = lf.LFUtil( scalaPackage, I.EnvironmentInterface fromReaderInterfaces packageInterface, outputDir.toFile) def damlScalaName(damlNameSpace: Array[String], name: String): util.DamlScalaName = util.DamlScalaName(damlNameSpace, name) behavior of "Util" it should "mkDamlScalaName for a Contract named Test" in { val result = util.mkDamlScalaNameFromDirsAndName(Array(), "Test") result shouldEqual damlScalaName(Array.empty, "Test") result.packageName shouldEqual scalaPackage result.qualifiedName shouldEqual (scalaPackage + ".Test") } it should "mkDamlScalaName for a Template names foo.bar.Test" in { val result = util.mkDamlScalaName(Util.Template, QualifiedName assertFromString "foo.bar:Test") result shouldEqual damlScalaName(Array("foo", "bar"), "Test") result.packageName shouldEqual (scalaPackage + ".foo.bar") result.qualifiedName shouldEqual (scalaPackage + ".foo.bar.Test") } "partitionEithers" should "equal scalaz separate in simple cases" in forAll { iis: List[Either[Int, Int]] => import scalaz.syntax.monadPlus._, scalaz.std.list._, scalaz.std.either._ Util.partitionEithers(iis) shouldBe iis.separate } } abstract class UtilTestHelpers extends FlatSpec with Matchers with BeforeAndAfterAll { val outputDir = Files.createTempDirectory("codegenUtilTest") override protected def afterAll(): Unit = { super.afterAll() deleteRecursively(outputDir) } def deleteRecursively(dir: Path): Unit = { Files.walkFileTree( dir, new SimpleFileVisitor[Path] { override def postVisitDirectory(dir: Path, exc: IOException) = { Files.delete(dir) FileVisitResult.CONTINUE } override def visitFile(file: Path, attrs: BasicFileAttributes) = { Files.delete(file) FileVisitResult.CONTINUE } } ) () } }
Example 82
Source File: PortFiles.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ports import java.nio.file.{Files, Path} import scalaz.{Show, \/} import scala.collection.JavaConverters._ object PortFiles { sealed abstract class Error extends Serializable with Product final case class FileAlreadyExists(path: Path) extends Error final case class CannotWriteToFile(path: Path, reason: String) extends Error object Error { implicit val showInstance: Show[Error] = Show.shows { case FileAlreadyExists(path) => s"Port file already exists: ${path.toAbsolutePath: Path}" case CannotWriteToFile(path, reason) => s"Cannot write to port file: ${path.toAbsolutePath: Path}, reason: $reason" } } def write(path: Path, port: Port): Error \/ Unit = \/.fromTryCatchNonFatal { writeUnsafe(path, port) }.leftMap { case _: java.nio.file.FileAlreadyExistsException => FileAlreadyExists(path) case e => CannotWriteToFile(path, e.toString) } private def writeUnsafe(path: Path, port: Port): Unit = { import java.nio.file.StandardOpenOption.CREATE_NEW val lines: java.lang.Iterable[String] = List(port.value.toString).asJava val created = Files.write(path, lines, CREATE_NEW) created.toFile.deleteOnExit() } }
Example 83
Source File: PortFilesSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ports import java.nio.file.{Path, Paths} import java.util.UUID import com.daml.ports.PortFiles.FileAlreadyExists import org.scalatest.{FreeSpec, Inside, Matchers} import scalaz.{-\/, \/-} class PortFilesSpec extends FreeSpec with Matchers with Inside { "Can create a port file with a unique file name" in { val path = uniquePath() inside(PortFiles.write(path, Port(1024))) { case \/-(()) => } path.toFile.exists() shouldBe true } "Cannot create a port file with a nonunique file name" in { val path = uniquePath() inside(PortFiles.write(path, Port(1024))) { case \/-(()) => } inside(PortFiles.write(path, Port(1024))) { case -\/(FileAlreadyExists(p)) => p shouldBe path } } private def uniquePath(): Path = { val fileName = s"${this.getClass.getSimpleName}-${UUID.randomUUID().toString}.dummy" Paths.get(fileName) } }
Example 84
Source File: PortLock.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.testing.postgresql import java.io.RandomAccessFile import java.nio.channels.{ ClosedChannelException, FileChannel, FileLock, OverlappingFileLockException } import java.nio.file.{Files, Path, Paths} import com.daml.ports.Port private[postgresql] object PortLock { // We can't use `sys.props("java.io.tmpdir")` because Bazel changes this for each test run. // For this to be useful, it needs to be shared across concurrent runs. private val portLockDirectory: Path = { val tempDirectory = if (sys.props("os.name").startsWith("Windows")) { Paths.get(sys.props("user.home"), "AppData", "Local", "Temp") } else { Paths.get("/tmp") } tempDirectory.resolve(Paths.get("daml", "build", "postgresql-testing", "ports")) } def lock(port: Port): Either[FailedToLock, Locked] = { Files.createDirectories(portLockDirectory) val portLockFile = portLockDirectory.resolve(port.toString) val file = new RandomAccessFile(portLockFile.toFile, "rw") val channel = file.getChannel try { val lock = channel.tryLock() val locked = new Locked(port, lock, channel, file) if (lock != null) { Right(locked) } else { locked.unlock() Left(FailedToLock(port)) } } catch { case _: OverlappingFileLockException => channel.close() file.close() Left(FailedToLock(port)) } } final class Locked(val port: Port, lock: FileLock, channel: FileChannel, file: RandomAccessFile) { def unlock(): Unit = { try { lock.release() } catch { // ignore case _: ClosedChannelException => } channel.close() file.close() } } case class FailedToLock(port: Port) extends RuntimeException(s"Failed to lock port $port.") }
Example 85
Source File: AppModule.scala From iotchain with MIT License | 5 votes |
package jbok.app import java.nio.file.Path import cats.effect._ import distage._ import doobie.util.transactor.Transactor import jbok.app.service._ import jbok.app.service.store.doobie.{Doobie, DoobieBlockStore, DoobieTransactionStore} import jbok.app.service.store.{BlockStore, Migration, TransactionStore} import jbok.common.config.Config import jbok.core.CoreModule import jbok.core.api._ import jbok.core.config.FullConfig class AppModule[F[_]: TagK](implicit F: ConcurrentEffect[F], cs: ContextShift[F]) extends ModuleDef { addImplicit[Bracket[F, Throwable]] make[Transactor[F]].fromResource((config: FullConfig) => Doobie.xa[F](config.db)) make[Unit].fromEffect((config: FullConfig) => Migration.migrate[F](config.db)) make[TransactionStore[F]].from[DoobieTransactionStore[F]] make[BlockStore[F]].from[DoobieBlockStore[F]] make[ServiceHelper[F]] make[AccountAPI[F]].from[AccountService[F]] make[AdminAPI[F]].from[AdminService[F]] make[BlockAPI[F]].from[BlockService[F]] make[ContractAPI[F]].from[ContractService[F]] make[MinerAPI[F]].from[MinerService[F]] make[PersonalAPI[F]].from[PersonalService[F]] make[TransactionAPI[F]].from[TransactionService[F]] make[HttpService[F]] make[StoreUpdateService[F]] make[FullNode[F]] } object AppModule { def resource[F[_]: TagK](config: FullConfig = CoreModule.testConfig)(implicit F: ConcurrentEffect[F], cs: ContextShift[F], T: Timer[F]): Resource[F, Locator] = Injector().produceF[F](new CoreModule[F](config) ++ new AppModule[F]).toCats def resource[F[_]: TagK](path: Path)(implicit F: ConcurrentEffect[F], cs: ContextShift[F], T: Timer[F]): Resource[F, Locator] = Resource.liftF(Config[F].read[FullConfig](path)).flatMap(config => resource[F](config)) }
Example 86
Source File: LoggerPlatform.scala From iotchain with MIT License | 5 votes |
package jbok.common.log import java.nio.file.{Path, Paths} import cats.effect.Sync import cats.implicits._ import jbok.common.FileUtil import scribe.handler.LogHandler import scribe.writer.FileWriter import scribe.writer.file.LogPath import scala.concurrent.duration._ object LoggerPlatform { def initConfig[F[_]: Sync](config: LogConfig): F[Unit] = { val level = Level.fromName(config.level) Logger.setRootLevel(level) >> (config.logDir match { case "/dev/null" => Logger.setRootHandlers(Logger.consoleHandler(level.some)) case dir => FileUtil[F].open(Paths.get(config.logDir), create = true, asDirectory = true) >> Logger.setRootHandlers( Logger.consoleHandler(level.some), fileHandler(Paths.get(dir), level.some, config.maxLogs) ) }) } def fileHandler(directory: Path, minimumLevel: Option[Level] = None, maxLogs: Int = 15): LogHandler = LogHandler( Logger.fileFormatter, FileWriter().nio .path(LogPath.simple("iotchain.log", directory = directory)) .rolling(LogPath.daily(prefix = "iotchain", directory = directory)) .maxLogs(maxLogs, checkRate = 1.seconds), minimumLevel.map(Logger.fromJbokLevel) ) }
Example 87
Source File: NetworkBuilder.scala From iotchain with MIT License | 5 votes |
package jbok.core.config import java.net.InetSocketAddress import java.nio.file.{Path, Paths} import better.files.File import cats.effect.IO import cats.implicits._ import io.circe.syntax._ import jbok.common.config.Config import jbok.core.keystore.KeyStorePlatform import jbok.core.models.Address import jbok.core.peer.PeerUri import jbok.crypto.signature.KeyPair import monocle.macros.syntax.lens._ import sys.process.{ProcessLogger, stringSeqToProcess} import scala.concurrent.duration._ final case class NetworkBuilder( base: FullConfig, configs: List[FullConfig] = Nil, ) { val home = System.getProperty("user.home") val root = Paths.get(home).resolve(".jbok") def withBlockPeriod(n: Int): NetworkBuilder = copy(base = base.lens(_.mining.period).set(n.millis)) def createCert(ip: String, cn: String, caDir: Path, certDir: Path): IO[String] = IO { val path = File(".") val projectDir = path.path.toAbsolutePath val processLogger = new ProcessLogger { override def out(s: => String): Unit = println(s) override def err(s: => String): Unit = println(s) override def buffer[T](f: => T): T = f } Seq("bash", "-c", s"${projectDir.resolve("bin/create-cert.sh")} ${ip} ${cn} ${projectDir.resolve("bin").toAbsolutePath} ${caDir.toAbsolutePath} ${certDir.toAbsolutePath}") .lineStream_!(processLogger) .mkString("\n") } def addNode(keyPair: KeyPair, coinbase: Address, rootPath: Path, host: String): NetworkBuilder = { val config = base .lens(_.rootPath).set(rootPath.toAbsolutePath.toString) .lens(_.peer.host).set(host) .lens(_.service.local).set(host) .lens(_.service.enableMetrics).set(true) // .lens(_.service.secure).set(true) .lens(_.mining.enabled).set(true) .lens(_.mining.address).set(Address(keyPair)) .lens(_.mining.coinbase).set(coinbase) // .lens(_.ssl.enabled).set(true) .lens(_.ssl.trustStorePath).set(rootPath.resolve("cert/cacert.jks").toAbsolutePath.toString) .lens(_.ssl.keyStorePath).set(rootPath.resolve("cert/server.jks").toAbsolutePath.toString) .lens(_.persist.driver).set("rocksdb") .lens(_.persist.path).set(s"${rootPath.resolve("data").toAbsolutePath}") .lens(_.log.logDir).set(s"${rootPath.resolve("logs").toAbsolutePath}") .lens(_.keystore.dir).set(s"${rootPath.resolve("keystore").toAbsolutePath}") .lens(_.db.driver).set("org.sqlite.JDBC") .lens(_.db.url).set(s"jdbc:sqlite:${rootPath.resolve(s"service.db")}") val keystore = new KeyStorePlatform[IO](config.keystore) keystore.importPrivateKey(keyPair.secret.bytes, "changeit").unsafeRunSync() createCert(host, host, root.resolve("ca"), rootPath.resolve("cert")).unsafeRunSync() copy(configs = config :: configs) } def build: List[FullConfig] = { val reversed = configs.reverse val seeds = reversed.map(_.peer).map { peer => PeerUri.fromTcpAddr(new InetSocketAddress(peer.host, peer.port)).uri } reversed.zipWithIndex.map { case (config, i) => config.lens(_.peer.seeds).set(seeds.take(i) ++ seeds.drop(i + 1)) } } def dump: IO[Unit] = build.traverse_(config => Config[IO].dump(config.asJson, Paths.get(config.rootPath).resolve(s"config.yaml"))) }
Example 88
Source File: LocalImageFiles.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.dataset.image import java.awt.color.ColorSpace import java.nio.file.{Files, Path} import org.apache.log4j.Logger object LocalImageFiles { Class.forName("javax.imageio.ImageIO") Class.forName("java.awt.color.ICC_ColorSpace") // Class.forName("sun.java2d.cmm.lcms.LCMS") ColorSpace.getInstance(ColorSpace.CS_sRGB).toRGB(Array[Float](0, 0, 0)) val logger = Logger.getLogger(getClass) private[bigdl] def readPaths(path: Path, hasLabel: Boolean = true) : Array[LocalLabeledImagePath] = { if (hasLabel) readPathsWithLabel(path) else readPathsNoLabel(path) } }
Example 89
Source File: BGRImgToLocalSeqFile.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.dataset.image import java.nio.ByteBuffer import java.nio.file.Path import com.intel.analytics.bigdl.dataset.Transformer import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{Path => hadoopPath} import org.apache.hadoop.io.{SequenceFile, Text} import scala.collection.Iterator object BGRImgToLocalSeqFile { def apply(blockSize: Int, baseFileName: Path, hasName: Boolean = false): BGRImgToLocalSeqFile = { new BGRImgToLocalSeqFile(blockSize, baseFileName, hasName) } } class BGRImgToLocalSeqFile(blockSize: Int, baseFileName: Path, hasName: Boolean = false) extends Transformer[(LabeledBGRImage, String), String] { private val conf: Configuration = new Configuration private var index = 0 private val preBuffer: ByteBuffer = ByteBuffer.allocate(4 * 2) override def apply(prev: Iterator[(LabeledBGRImage, String)]): Iterator[String] = { new Iterator[String] { override def hasNext: Boolean = prev.hasNext override def next(): String = { val fileName = baseFileName + s"_$index.seq" val path = new hadoopPath(fileName) val writer = SequenceFile.createWriter(conf, SequenceFile.Writer.file(path), SequenceFile.Writer.keyClass(classOf[Text]), SequenceFile.Writer.valueClass(classOf[Text])) var i = 0 while (i < blockSize && prev.hasNext) { val (image, imageName) = prev.next() preBuffer.putInt(image.width()) preBuffer.putInt(image.height()) val imageByteData = image.convertToByte() val data: Array[Byte] = new Array[Byte](preBuffer.capacity + imageByteData.length) System.arraycopy(preBuffer.array, 0, data, 0, preBuffer.capacity) System.arraycopy(imageByteData, 0, data, preBuffer.capacity, imageByteData.length) preBuffer.clear val imageKey = if (hasName) s"${imageName}\n${image.label().toInt}" else s"${image.label().toInt}" writer.append(new Text(imageKey), new Text(data)) i += 1 } writer.close() index += 1 fileName } } } }
Example 90
Source File: Utils.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.models.autoencoder import java.nio.ByteBuffer import java.nio.file.{Files, Path} import com.intel.analytics.bigdl.dataset.ByteRecord import scopt.OptionParser object Utils { val trainMean = 0.13066047740239436 val trainStd = 0.30810779333114624 case class TrainParams( folder: String = "./", checkpoint: Option[String] = None, modelSnapshot: Option[String] = None, stateSnapshot: Option[String] = None, batchSize: Int = 150, maxEpoch: Int = 10, graphModel: Boolean = false, optimizerVersion: Option[String] = None ) val trainParser = new OptionParser[TrainParams]("BigDL Autoencoder on MNIST") { opt[String]('f', "folder") .text("where you put the MNIST data") .action((x, c) => c.copy(folder = x)) opt[String]("model") .text("model snapshot location") .action((x, c) => c.copy(modelSnapshot = Some(x))) opt[String]("state") .text("state snapshot location") .action((x, c) => c.copy(stateSnapshot = Some(x))) opt[String]("checkpoint") .text("where to cache the model and state") .action((x, c) => c.copy(checkpoint = Some(x))) opt[Int]('b', "batchSize") .text("batch size") .action((x, c) => c.copy(batchSize = x)) opt[Int]('e', "maxEpoch") .text("max epoch") .action((x, c) => c.copy(maxEpoch = x)) opt[Unit]('g', "graphModel") .text("use graph model") .action((x, c) => c.copy(graphModel = true)) opt[String]("optimizerVersion") .text("state optimizer version") .action((x, c) => c.copy(optimizerVersion = Some(x))) } private[bigdl] def load(featureFile: Path, labelFile: Path): Array[ByteRecord] = { val labelBuffer = ByteBuffer.wrap(Files.readAllBytes(labelFile)) val featureBuffer = ByteBuffer.wrap(Files.readAllBytes(featureFile)) val labelMagicNumber = labelBuffer.getInt() require(labelMagicNumber == 2049) val featureMagicNumber = featureBuffer.getInt() require(featureMagicNumber == 2051) val labelCount = labelBuffer.getInt() val featureCount = featureBuffer.getInt() require(labelCount == featureCount) val rowNum = featureBuffer.getInt() val colNum = featureBuffer.getInt() val result = new Array[ByteRecord](featureCount) var i = 0 while (i < featureCount) { val img = new Array[Byte]((rowNum * colNum)) var y = 0 while (y < rowNum) { var x = 0 while (x < colNum) { img(x + y * colNum) = featureBuffer.get() x += 1 } y += 1 } result(i) = ByteRecord(img, labelBuffer.get().toFloat + 1.0f) i += 1 } result } }
Example 91
Source File: Domain.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka import java.nio.file.Path import com.wix.zorechka.repos.GitRepo import com.wix.zorechka.repos.GitRepo import org.apache.maven.artifact.versioning.ComparableVersion case class ForkData(repo: GitRepo, forkDir: Path) case class Dep(groupId: String, artifactId: String, version: String) { def mapKey(): String = s"$groupId:$artifactId" def branchKey(): String = s"$artifactId-$version" def fullKey(): String = s"$artifactId-$version" } object Dep { implicit val ordering: Ordering[Dep] = Ordering.by[Dep, ComparableVersion] { dep => new ComparableVersion(dep.version) } } case class Dummy()
Example 92
Source File: ResultNotifier.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka.service import java.nio.file.{Files, Path} import com.wix.zorechka.Dep import com.wix.zorechka.clients.{BuildozerClient, GithubClient} import zio.console.Console import zio.{RIO, ZIO} import collection.JavaConverters._ trait ResultNotifier { val notifier: ResultNotifier.Service } object ResultNotifier { trait Service { def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): RIO[GithubClient with BuildozerClient with Console, Unit] } trait CreatePullRequest extends ResultNotifier { override val notifier: Service = new Service { def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): ZIO[GithubClient with BuildozerClient with Console, Throwable, Unit] = { val (depsDesc, branch) = branchName(updatedDeps) for { _ <- GithubClient.createBranch(forkDir, branch) _ <- ZIO.effect(applyDepUpdates(forkDir, updatedDeps)) _ <- applyUnusedDeps(forkDir, unusedDeps) _ <- GithubClient.stageAllChanges(forkDir) _ <- GithubClient.commit(forkDir, s"zorechka found new versions for deps: $depsDesc #pr") _ <- GithubClient.push(forkDir, branch) } yield () } } private def applyUnusedDeps(repoDir: Path, unusedDeps: List[PackageDeps]): RIO[BuildozerClient, List[Unit]] = { ZIO.collectAll { unusedDeps.flatMap { unusedDep => unusedDep.deps.map { dep => BuildozerClient.deleteDep(repoDir, dep.target, dep.dep) } } } } private def applyDepUpdates(repoDir: Path, deps: List[Dep]): Unit = { val regex = """artifact = "(.+)",""".r deps.foreach { dep => val file = repoDir .resolve("third_party") .resolve(dep.groupId.replaceAll("\\.", "_") + ".bzl") if (file.toFile.exists()) { println(s"Rewriting deps for ${file.toAbsolutePath} to $dep") val lines = Files.readAllLines(file) val result = lines.asScala.map { line => regex.findFirstMatchIn(line) match { case Some(m) if line.contains(s"${dep.groupId}:${dep.artifactId}:") => line.replace(m.group(1), s"${dep.groupId}:${dep.artifactId}:${dep.version}") case _ => line } } Files.write(file, result.asJava) } } } private def branchName(deps: List[Dep]) = { val depsSample = deps.map(_.branchKey()).take(3).mkString("_") val depsDesc = (if (depsSample.length > 90) depsSample.substring(0, 90) else depsSample) + (if (deps.size > 3) s"_and_${deps.size - 3}_more" else "") (depsDesc, s"feature/update-deps-$depsDesc") } } trait PrintPullRequestInfo extends ResultNotifier { override val notifier: Service = new Service { override def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): RIO[GithubClient with BuildozerClient with Console, Unit] = { ZIO.accessM[Console](_.console.putStrLn( s""" |Going to update: |${updatedDeps.mkString("\n")} | |Going to remove: |${unusedDeps.mkString("\n")} |""".stripMargin)) } } } def notify(forkDir: Path, updatedDeps: List[Dep], unusedDeps: List[PackageDeps]): ZIO[ResultNotifier with GithubClient with BuildozerClient with Console, Throwable, Unit] = ZIO.accessM[ResultNotifier with GithubClient with BuildozerClient with Console](_.notifier.notify(forkDir, updatedDeps, unusedDeps)) }
Example 93
Source File: BazelClient.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka.clients import java.nio.file.Path import com.wix.zorechka.Dep import com.wix.zorechka.clients.process.RunProcess import com.wix.zorechka.clients.process.RunProcess.execCmd import zio.{RIO, Task, ZIO} case class BuildPackage(value: String, buildFileHash: String) case class BuildTarget(target: String, deps: List[String]) trait BazelClient { val bazelClient: BazelClient.Service } object BazelClient { trait Service { def allBuildTargets(workDir: Path): Task[List[BuildPackage]] def buildTarget(workDir: Path, target: BuildTarget): Task[Unit] def foundDeps(repoDir: Path): Task[List[Dep]] } trait Live extends BazelClient { override val bazelClient: Service = new Service { override def allBuildTargets(workDir: Path): Task[List[BuildPackage]] = for { output <- RunProcess.execCmd(List("bazel", "query", "--noshow_progress", "buildfiles(...)"), workDir) packs = output.value.filter(_.startsWith("//")).map(_.split(":")(0)) packsWithHashes <- ZIO.collectAll(packs.map { pack => val buildFilePath = workDir.resolve(pack.stripPrefix("//")).resolve("BUILD.bazel").toString RunProcess.execCmd(List("sha256sum", s"$buildFilePath"), workDir) .map { output => output.value.head.split(" ").head }.map { hash => BuildPackage(pack, hash) } }) } yield packsWithHashes override def buildTarget(workDir: Path, target: BuildTarget): Task[Unit] = { RunProcess.execCmd(List("bazel", "build", target.target), workDir).unit } override def foundDeps(repoDir: Path): Task[List[Dep]] = { val cmd = List("bazel", "query", "--noimplicit_deps","--keep_going", "deps(kind(scala_library, deps(//...)), 1)", "--output", "build") for { exec <- execCmd(cmd, repoDir) } yield parseQueryOutput(exec.value).filterNot(isIgnored) } private def parseQueryOutput(lines: List[String]): List[Dep] = { val regex = """jars = \["@(.+)//:(.+)-(.+)\.jar"\]""".r val deps = lines.flatMap { line => regex.findFirstMatchIn(line).map { m => Dep( // cleanup group name by replacing '_' with '.' where needed m.group(1).replace("_" + m.group(2) .replace('.', '_') .replace('-', '_'), "" ).replace('_', '.'), m.group(2), m.group(3)) } } deps } // TODO private def isIgnored(dep: Dep): Boolean = { dep.groupId.startsWith("com.wixpress") } } } def allBuildTargets(workDir: Path): ZIO[BazelClient, Throwable, List[BuildPackage]] = ZIO.accessM[BazelClient](_.bazelClient.allBuildTargets(workDir)) def buildTarget(workDir: Path, target: BuildTarget): RIO[BazelClient, Unit] = ZIO.accessM[BazelClient](_.bazelClient.buildTarget(workDir, target)) def foundDeps(repoDir: Path): RIO[BazelClient, List[Dep]] = ZIO.accessM[BazelClient](_.bazelClient.foundDeps(repoDir)) }
Example 94
Source File: GithubClient.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka.clients import java.nio.file.Path import com.wix.zorechka.clients.process.{ClientOutput, RunProcess} import com.wix.zorechka.repos.GitRepo import zio.{RIO, Task, ZIO} trait GithubClient { val githubClient: GithubClient.Service } object GithubClient { trait Service { def cloneRepo(repo: GitRepo, destinationDir: Path): Task[ClientOutput] def createBranch(workDir: Path, branchName: String): Task[ClientOutput] def stageAllChanges(workDir: Path): Task[ClientOutput] def commit(workDir: Path, commitMsg: String): Task[ClientOutput] def push(workDir: Path, branchName: String): Task[ClientOutput] } trait Live extends GithubClient { val githubClient: GithubClient.Service = new GithubClient.Service { def cloneRepo(repo: GitRepo, destinationDir: Path): Task[ClientOutput] = { RunProcess.execCmd(List("git", "clone", "--recursive", repo.url), destinationDir) } override def createBranch(workDir: Path, branchName: String): Task[ClientOutput] = { RunProcess.execCmd(List("git", "checkout", "-b", branchName), workDir) } override def commit(workDir: Path, commitMsg: String): Task[ClientOutput] = { RunProcess.execCmd(List("git", "commit", "-m", commitMsg), workDir) } override def stageAllChanges(workDir: Path): Task[ClientOutput] = { RunProcess.execCmd(List("git", "add", "-A"), workDir) } override def push(workDir: Path, branchName: String): Task[ClientOutput] = { RunProcess.execCmd(List("git", "push", "--set-upstream", "origin", branchName), workDir) } } } def cloneRepo(repo: GitRepo, destinationDir: Path): RIO[GithubClient, ClientOutput] = ZIO.accessM[GithubClient](_.githubClient.cloneRepo(repo, destinationDir)) def createBranch(workDir: Path, branchName: String): RIO[GithubClient, ClientOutput] = ZIO.accessM[GithubClient](_.githubClient.createBranch(workDir, branchName)) def stageAllChanges(workDir: Path): RIO[GithubClient, ClientOutput] = ZIO.accessM[GithubClient](_.githubClient.stageAllChanges(workDir)) def commit(workDir: Path, message: String): RIO[GithubClient, ClientOutput] = ZIO.accessM[GithubClient](_.githubClient.commit(workDir, message)) def push(workDir: Path, branchName: String): RIO[GithubClient, ClientOutput] = ZIO.accessM[GithubClient](_.githubClient.push(workDir, branchName)) } object GithubClientLive extends GithubClient.Live
Example 95
Source File: RunProcess.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka.clients.process import java.nio.file.Path import zio.{Task, ZIO} import scala.collection.mutable.ListBuffer import scala.sys.process.{Process, ProcessLogger} case class ClientOutput(value: List[String]) extends AnyVal object RunProcess { def execCmd(command: List[String], workDir: Path, extraEnv: List[(String, String)] = List.empty): Task[ClientOutput] = ZIO.effect { val lb = ListBuffer.empty[String] val log = new ProcessLogger { override def out(s: => String): Unit = { println(s) lb.append(s) } override def err(s: => String): Unit = { println(s) lb.append(s) } override def buffer[T](f: => T): T = f } println(command.mkString(" ")) val exitStatus = Process(command, Some(workDir.toFile), extraEnv: _*).!(log) if (exitStatus != 0 && exitStatus != 3) throw new IllegalStateException(s"Got status $exitStatus") ClientOutput(lb.result()) } }
Example 96
Source File: BuildozerClient.scala From zorechka-bot with MIT License | 5 votes |
package com.wix.zorechka.clients import java.nio.file.Path import com.wix.zorechka.clients.process.RunProcess import zio.{RIO, Task, ZIO} trait BuildozerClient { val buildozerClient: BuildozerClient.Service } object BuildozerClient { trait Service { def packageDeps(workDir: Path, target: BuildPackage): Task[List[BuildTarget]] def deleteDep(workDir: Path, target: BuildTarget, dep: String): Task[Unit] def addDep(workDir: Path, target: BuildTarget, dep: String): Task[Unit] } trait Live extends BuildozerClient { override val buildozerClient: Service = new Service { override def packageDeps(workDir: Path, target: BuildPackage): Task[List[BuildTarget]] = for { output <- RunProcess.execCmd(List("buildozer", "print label deps", s"${target.value}:*"), workDir) } yield output.value .filter(!_.contains("has no attribute")) .filter(!_.contains("(missing)")) .map(_.split(" ").map(_.stripPrefix("[").stripSuffix("]")).filter(_.nonEmpty).toList).map { case x :: xs => BuildTarget(x, xs) // TODO: not exhaustive match } override def deleteDep(workDir: Path, target: BuildTarget, dep: String): Task[Unit] = { RunProcess.execCmd(List("buildozer", s"remove deps $dep", target.target), workDir).unit } override def addDep(workDir: Path, target: BuildTarget, dep: String): Task[Unit] = { RunProcess.execCmd(List("buildozer", s"add deps $dep", target.target), workDir).unit } } } def packageDeps(workDir: Path, target: BuildPackage): RIO[BuildozerClient, List[BuildTarget]] = ZIO.accessM[BuildozerClient](_.buildozerClient.packageDeps(workDir, target)) def deleteDep(workDir: Path, target: BuildTarget, dep: String): RIO[BuildozerClient, Unit] = ZIO.accessM[BuildozerClient](_.buildozerClient.deleteDep(workDir, target, dep)) def addDep(workDir: Path, target: BuildTarget, dep: String): RIO[BuildozerClient, Unit] = ZIO.accessM[BuildozerClient](_.buildozerClient.addDep(workDir, target, dep)) }
Example 97
Source File: IndexStorable.scala From spark-lucenerdd with Apache License 2.0 | 5 votes |
package org.zouzias.spark.lucenerdd.store import java.nio.file.{Files, Path} import org.apache.lucene.facet.FacetsConfig import org.apache.lucene.store._ import org.zouzias.spark.lucenerdd.config.Configurable import org.zouzias.spark.lucenerdd.logging.Logging protected def storageMode(directoryPath: Path): Directory = { if (Config.hasPath(IndexStoreKey)) { val storageMode = Config.getString(IndexStoreKey) storageMode match { // TODO: FIX: Currently there is a single lock instance for each directory. // TODO: Implement better lock handling here case "disk" => { logInfo(s"Config parameter ${IndexStoreKey} is set to 'disk'") logInfo("Lucene index will be storage in disk") logInfo(s"Index disk location ${tmpJavaDir}") // directoryPath.toFile.deleteOnExit() // Delete on exit new MMapDirectory(directoryPath, new SingleInstanceLockFactory) } case ow => logInfo(s"Config parameter ${IndexStoreKey} is set to ${ow}") logInfo("Lucene index will be storage in memory (default)") logInfo( """ Quoting from http://lucene.apache.org/core/7_5_0/core/org/apache/ lucene/store/RAMDirectory.html A memory-resident Directory implementation. Locking implementation is by default the SingleInstanceLockFactory. Warning: This class is not intended to work with huge indexes. Everything beyond several hundred megabytes will waste resources (GC cycles), because it uses an internal buffer size of 1024 bytes, producing millions of byte[1024] arrays. This class is optimized for small memory-resident indexes. It also has bad concurrency on multithreaded environments. It is recommended to materialize large indexes on disk and use MMapDirectory, which is a high-performance directory implementation working directly on the file system cache of the operating system, so copying data to Java heap space is not useful. """.stripMargin) new RAMDirectory() } } else { logInfo(s"Config parameter ${IndexStoreKey} is not set") logInfo("Lucene index will be storage in disk") new MMapDirectory(directoryPath, new SingleInstanceLockFactory) } } override def close(): Unit = { IndexDir.close() TaxonomyDir.close() } }
Example 98
Source File: TestHelpers.scala From matcher with MIT License | 5 votes |
package com.wavesplatform.dex.util import java.io.IOException import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} object TestHelpers { def deleteRecursively(path: Path): Unit = Files.walkFileTree( path, new SimpleFileVisitor[Path] { override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { Option(exc).fold { Files.delete(dir) FileVisitResult.CONTINUE }(throw _) } override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { Files.delete(file) FileVisitResult.CONTINUE } } ) }
Example 99
Source File: ScalapRenderer.scala From jardiff with Apache License 2.0 | 5 votes |
package scala.tools.jardiff import java.nio.file.{Files, Path} import scala.tools.scalap.scalax.rules.ScalaSigParserError class ScalapRenderer(privates: Boolean) extends FileRenderer { def outFileExtension: String = ".scalap" override def render(in: Path, out: Path): Unit = { val classBytes = Files.readAllBytes(in) try { val main = new scala.tools.scalap.Main main.printPrivates = privates val decompiled = main.decompileScala(classBytes, in.getFileName.toString == "package.class") if (decompiled != "") { Files.createDirectories(out.getParent) Files.write(out, decompiled.getBytes("UTF-8")) } } catch { case err: ScalaSigParserError => System.err.println("WARN: unable to invoke scalap on: " + in + ": " + err.getMessage) } } }
Example 100
Source File: ScalapSigRenderer.scala From jardiff with Apache License 2.0 | 5 votes |
package scala.tools.jardiff import java.nio.file.{Files, Path} import scala.tools.scalap.scalax.rules.ScalaSigParserError import scala.tools.scalap.scalax.rules.scalasig.{ByteCode, ScalaSigAttributeParsers} class ScalapSigRenderer(privates: Boolean) extends FileRenderer { def outFileExtension: String = ".scalap" override def render(in: Path, out: Path): Unit = { val classBytes = Files.readAllBytes(in) try { val scalaSig = ScalaSigAttributeParsers.parse(ByteCode(classBytes)) val main = new scala.tools.scalap.Main main.printPrivates = privates val decompiled = main.parseScalaSignature(scalaSig, in.getFileName.toString == "package.sig") if (decompiled != "") { Files.createDirectories(out.getParent) Files.write(out, decompiled.getBytes("UTF-8")) } } catch { case err: ScalaSigParserError => System.err.println("WARN: unable to invoke scalap on: " + in + ": " + err.getMessage) } } }
Example 101
Source File: AsmTextifyRenderer.scala From jardiff with Apache License 2.0 | 5 votes |
package scala.tools.jardiff import java.io.PrintWriter import java.nio.file.{Files, Path} import scala.collection.JavaConverters._ import org.objectweb.asm.{ClassReader, Opcodes} import org.objectweb.asm.tree.{ClassNode, FieldNode, InnerClassNode, MethodNode} import org.objectweb.asm.util.TraceClassVisitor class AsmTextifyRenderer(code: Boolean, raw: Boolean, privates: Boolean) extends FileRenderer { def outFileExtension: String = ".asm" override def render(in: Path, out: Path): Unit = { val classBytes = Files.readAllBytes(in) val rawNode = classFromBytes(classBytes) val node = if (raw) rawNode else zapScalaClassAttrs(sortClassMembers(rawNode)) if (!code) node.methods.forEach(_.instructions.clear()) if (!privates) { node.methods.removeIf((m: MethodNode) => isPrivate(m.access)) node.fields.removeIf((m: FieldNode) => isPrivate(m.access)) node.innerClasses.removeIf((m: InnerClassNode) => isPrivate(m.access)) } Files.createDirectories(out.getParent) val pw = new PrintWriter(Files.newBufferedWriter(out)) try { val trace = new TraceClassVisitor(pw) node.accept(trace) } finally { pw.close() } } private def isPrivate(access: Int): Boolean = { (access & Opcodes.ACC_PRIVATE) != 0 } def sortClassMembers(node: ClassNode): node.type = { node.fields.sort(_.name compareTo _.name) node.methods.sort(_.name compareTo _.name) node } private def isScalaSigAnnot(desc: String) = List("Lscala/reflect/ScalaSignature", "Lscala/reflect/ScalaLongSignature").exists(desc.contains) // drop ScalaSig annotation and class attributes private def zapScalaClassAttrs(node: ClassNode): node.type = { if (node.visibleAnnotations != null) node.visibleAnnotations = node.visibleAnnotations.asScala.filterNot(a => a == null || isScalaSigAnnot(a.desc)).asJava node.attrs = null node } private def classFromBytes(bytes: Array[Byte]): ClassNode = { val node = new ClassNode() new ClassReader(bytes).accept(node, if (raw) 0 else ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES) node } }
Example 102
Source File: StandardizationQueryV1.scala From daf-semantics with Apache License 2.0 | 5 votes |
package it.almawave.daf.standardization.v1 import com.typesafe.config.Config import java.nio.file.Paths import java.nio.file.Files import it.almawave.linkeddata.kb.catalog.VocabularyBox import java.io.FileFilter import java.io.File import java.nio.file.Path import org.slf4j.LoggerFactory def details(voc_box: VocabularyBox, level: Int, uri: String, lang: String) = { val onto_id = detect_ontology(voc_box) val query_path: Path = detailsQueryFile(onto_id) .map(_.toPath()) .getOrElse(default_query_details) // disabled for too many logs! logger.debug(s"daf.standardization> try ${voc_box.id} with details query: ${query_path}") val query = new String(Files.readAllBytes(query_path)) query .replace("${vocabularyID}", voc_box.id) .replace("${level}", level.toString()) .replace("${uri}", uri) .replace("${lang}", lang) } }
Example 103
Source File: ManagedVersions.scala From sbt-slamdata with Apache License 2.0 | 5 votes |
package slamdata import sbt.util.FileBasedStore import sbt.internal.util.codec.JValueFormats import sjsonnew.{BasicJsonProtocol, IsoString} import sjsonnew.shaded.scalajson.ast.unsafe.{JField, JObject, JString, JValue} import sjsonnew.support.scalajson.unsafe.{Converter, Parser, PrettyPrinter} import java.nio.file.Path final class ManagedVersions private (path: Path) extends BasicJsonProtocol with JValueFormats { private[this] val store: FileBasedStore[JValue] = new FileBasedStore( path.toFile, Converter)( IsoString.iso(PrettyPrinter.apply, Parser.parseUnsafe)) def apply(key: String): String = get(key).getOrElse(sys.error(s"unable to find string -> string mapping for key '$key'")) def get(key: String): Option[String] = { safeRead() match { case JObject(values) => values.find(_.field == key) match { case Some(JField(_, JString(value))) => Some(value) case _ => None } case _ => sys.error(s"unable to parse managed versions store at $path") } } def update(key: String, version: String): Unit = { safeRead() match { case JObject(values) => var i = 0 var done = false while (i < values.length && !done) { if (values(i).field == key) { values(i) = JField(key, JString(version)) done = true } i += 1 } val values2 = if (!done) { val values2 = new Array[JField](values.length + 1) System.arraycopy(values, 0, values2, 0, values.length) values2(values.length) = JField(key, JString(version)) values2 } else { values } store.write(JObject(values2)) case _ => sys.error(s"unable to parse managed versions store at $path") } } private[this] def safeRead(): JValue = { try { store.read[JValue]() } catch { case _: sbt.internal.util.EmptyCacheError => val back = JObject(Array[JField]()) store.write(back) back } } } object ManagedVersions { def apply(path: Path): ManagedVersions = new ManagedVersions(path) }
Example 104
Source File: ManagedVersions.scala From sbt-slamdata with Apache License 2.0 | 5 votes |
package slamdata import sbt.util.FileBasedStore import sbt.internal.util.codec.JValueFormats import sjsonnew.{BasicJsonProtocol, IsoString} import sjsonnew.shaded.scalajson.ast.unsafe.{JField, JObject, JString, JValue} import sjsonnew.support.scalajson.unsafe.{Converter, Parser, PrettyPrinter} import java.nio.file.Path final class ManagedVersions private (path: Path) extends BasicJsonProtocol with JValueFormats { private[this] val store: FileBasedStore[JValue] = new FileBasedStore( path.toFile, Converter)( IsoString.iso(PrettyPrinter.apply, Parser.parseUnsafe)) def apply(key: String): String = get(key).getOrElse(sys.error(s"unable to find string -> string mapping for key '$key'")) def get(key: String): Option[String] = { safeRead() match { case JObject(values) => values.find(_.field == key) match { case Some(JField(_, JString(value))) => Some(value) case _ => None } case _ => sys.error(s"unable to parse managed versions store at $path") } } def update(key: String, version: String): Unit = { safeRead() match { case JObject(values) => var i = 0 var done = false while (i < values.length && !done) { if (values(i).field == key) { values(i) = JField(key, JString(version)) done = true } i += 1 } val values2 = if (!done) { val values2 = new Array[JField](values.length + 1) System.arraycopy(values, 0, values2, 0, values.length) values2(values.length) = JField(key, JString(version)) values2 } else { values } store.write(JObject(values2)) case _ => sys.error(s"unable to parse managed versions store at $path") } } private[this] def safeRead(): JValue = { try { store.read[JValue]() } catch { case _: sbt.internal.util.EmptyCacheError => val back = JObject(Array[JField]()) store.write(back) back } } } object ManagedVersions { def apply(path: Path): ManagedVersions = new ManagedVersions(path) }
Example 105
Source File: DirManager.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package it.gov.daf.catalogmanager.listeners import java.net.URLEncoder import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ FileIO, Source } import net.caoticode.dirwatcher.FSListener import play.api.libs.ws.WSClient import play.api.libs.ws.ahc.AhcWSClient import play.api.mvc.MultipartFormData.FilePart import play.Logger import scala.concurrent.Future class DirManager() extends FSListener { import java.nio.file.Path import scala.concurrent.ExecutionContext.Implicits.global val logger = Logger.underlying() override def onCreate(ref: Path): Unit = { implicit val system = ActorSystem() implicit val materializer = ActorMaterializer() val wsClient = AhcWSClient() val name = ref.getParent.getFileName.toString println(name) val uri: Option[String] = IngestionUtils.datasetsNameUri.get(name) val logicalUri = URLEncoder.encode(uri.get, "UTF-8") logger.debug("logicalUri: " + logicalUri) call(wsClient) .andThen { case _ => wsClient.close() } .andThen { case _ => system.terminate() } def call(wsClient: WSClient): Future[Unit] = { wsClient.url("http://localhost:9001/ingestion-manager/v1/add-datasets/" + logicalUri) //.withHeaders("content-type" -> "multipart/form-data") .post( Source(FilePart("upfile", name, None, FileIO.fromPath(ref)) :: List())).map { response => val statusText: String = response.statusText logger.debug(s"Got a response $statusText") } } logger.debug(s"created $ref") } override def onDelete(ref: Path): Unit = println(s"deleted $ref") override def onModify(ref: Path): Unit = println(s"modified $ref") }
Example 106
Source File: JUnitSensor.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package junit import java.io.File import java.nio.file.{Path, Paths} import scala.jdk.CollectionConverters._ import cats.instances.list._ import com.mwz.sonar.scala.util.Log import com.mwz.sonar.scala.util.syntax.SonarConfig._ import com.mwz.sonar.scala.util.syntax.SonarFileSystem._ import com.mwz.sonar.scala.util.syntax.SonarSensorContext._ import org.sonar.api.batch.fs.{FileSystem, InputFile} import org.sonar.api.batch.sensor.{Sensor, SensorContext, SensorDescriptor} import org.sonar.api.config.Configuration import org.sonar.api.measures.CoreMetrics private[junit] def save( context: SensorContext, reports: Map[InputFile, JUnitReport] ): Unit = { if (reports.nonEmpty) log.debug(s"Parsed reports:\n${reports.mkString(", ")}") else log.info("No test metrics were saved by this sensor.") reports.foreach { case (file, report) => log.info(s"Saving junit test metrics for $file.") context.saveMeasure[Integer](file, CoreMetrics.SKIPPED_TESTS, report.skipped) context.saveMeasure[Integer](file, CoreMetrics.TESTS, report.tests - report.skipped) context.saveMeasure[Integer](file, CoreMetrics.TEST_ERRORS, report.errors) context.saveMeasure[Integer](file, CoreMetrics.TEST_FAILURES, report.failures) context.saveMeasure[java.lang.Long]( file, CoreMetrics.TEST_EXECUTION_TIME, (report.time * 1000).longValue ) } } } object JUnitSensor { val SensorName = "Scala JUnit Sensor" val TestsPropertyKey = "sonar.tests" val DefaultTests = List(Paths.get("src/test/scala")) val ReportsPropertyKey = "sonar.junit.reportPaths" val DefaultReportPaths = List(Paths.get("target/test-reports")) private[junit] def testPaths(conf: Configuration): List[Path] = conf.getPaths(TestsPropertyKey, DefaultTests) private[junit] def reportPaths(conf: Configuration): List[Path] = conf.getPaths(ReportsPropertyKey, DefaultReportPaths) }
Example 107
Source File: JUnitReportParser.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package junit import java.io.File import java.nio.file.Path import scala.jdk.CollectionConverters._ import scala.util.Try import scala.xml.{Elem, XML} import com.mwz.sonar.scala.util.Log import org.sonar.api.batch.fs.{FilePredicate, FileSystem, InputFile} import org.sonar.api.scanner.ScannerSide trait JUnitReportParserAPI { private[junit] def resolveFiles( tests: List[Path], reports: List[JUnitReport] ): Map[InputFile, JUnitReport] = reports .groupBy(_.name) .flatMap { case (name, reports) => val path: String = name.replace(".", "/") val files: List[Path] = tests.map(_.resolve(s"$path.scala")) val predicates: List[FilePredicate] = files.map(f => fileSystem.predicates.hasPath(f.toString)) val inputFiles: Iterable[InputFile] = fileSystem .inputFiles( fileSystem.predicates.or(predicates.asJava) ) .asScala if (files.isEmpty) log.error(s"The following files were not found: ${files.mkString(", ")}") // Collect all of the input files. inputFiles.flatMap(file => reports.headOption.map((file, _))) } }
Example 108
Source File: ScalaPlugin.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala import java.nio.file.{Path, Paths} import com.mwz.sonar.scala.util.Log import com.mwz.sonar.scala.util.syntax.Optionals._ import org.sonar.api.Plugin import org.sonar.api.config.Configuration import org.sonar.api.resources.AbstractLanguage import scalariform.ScalaVersion import scalariform.lexer.{ScalaLexer, Token} import scalariform.utils.Utils._ final class ScalaPlugin extends Plugin { override def define(context: Plugin.Context): Unit = { context.addExtensions( // Global configuration. classOf[GlobalConfig], // Scala. classOf[Scala], classOf[sensor.ScalaSensor], // PR decoration. classOf[pr.GlobalIssues], classOf[pr.GithubPrReviewJob], // Scalastyle. classOf[scalastyle.ScalastyleRulesRepository], classOf[scalastyle.ScalastyleQualityProfile], classOf[scalastyle.ScalastyleChecker], classOf[scalastyle.ScalastyleSensor], // Scapegoat. classOf[scapegoat.ScapegoatRulesRepository], classOf[scapegoat.ScapegoatQualityProfile], classOf[scapegoat.ScapegoatReportParser], classOf[scapegoat.ScapegoatSensor], // Built-in quality profiles. classOf[qualityprofiles.ScalastyleScapegoatQualityProfile], classOf[qualityprofiles.RecommendedQualityProfile], // Scoverage. classOf[scoverage.ScoverageMeasures], classOf[scoverage.ScoverageMetrics], classOf[scoverage.ScoverageReportParser], classOf[scoverage.ScoverageSensor], // JUnit. classOf[junit.JUnitReportParser], classOf[junit.JUnitSensor] ) } }
Example 109
Source File: ScoverageReportParser.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package scoverage import java.nio.file.{Path, Paths} import scala.xml.{Node, XML} import cats.syntax.semigroup.catsSyntaxSemigroup import com.mwz.sonar.scala.util.PathUtils import org.sonar.api.scanner.ScannerSide private[scoverage] def extractScoverageFromNode(node: Node): Scoverage = { val branches = (node \\ "statement") .filter(node => !(node \@ "ignored").toBoolean && (node \@ "branch").toBoolean) val coveredBranches = branches.filter(statement => (statement \@ "invocation-count").toInt > 0) Scoverage( statements = (node \@ "statement-count").toInt, coveredStatements = (node \@ "statements-invoked").toInt, statementCoverage = (node \@ "statement-rate").toDouble, branches = branches.size, coveredBranches = coveredBranches.size, branchCoverage = (node \@ "branch-rate").toDouble ) } }
Example 110
Source File: SonarConfig.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package util package syntax import java.nio.file.{Path, Paths} import cats.instances.string._ import cats.syntax.eq._ import com.mwz.sonar.scala.util.syntax.Optionals._ import org.sonar.api.config.Configuration object SonarConfig { implicit final class ConfigOps(private val configuration: Configuration) extends AnyVal { @SuppressWarnings(Array("UnusedMethodParameter")) def getAs[T <: String](key: String)(implicit ev: T =:= String): Option[String] = { configuration .get(key) .toOption .filterNot(_.trim.isEmpty) } } }
Example 111
Source File: SonarFileSystem.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package util package syntax import java.io.File import java.nio.file.Path import scala.util.{Failure, Success, Try} import cats.syntax.flatMap._ import cats.{Monad, MonoidK} import org.sonar.api.batch.fs.FileSystem object SonarFileSystem { implicit final class FileSystemOps(private val fs: FileSystem) extends AnyVal { def resolve[F[_]: Monad: MonoidK](toResolve: F[Path]): F[File] = toResolve.flatMap[File] { path => Try(fs.resolvePath(path.toString)) match { case Failure(_) => MonoidK[F].empty case Success(f) => Monad[F].pure(f) } } } }
Example 112
Source File: ScapegoatReportParser.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package scapegoat import java.nio.file.{Path, Paths} import scala.xml.XML import org.sonar.api.scanner.ScannerSide trait ScapegoatReportParserAPI { def parse(scapegoatReportPath: Path): Map[String, Seq[ScapegoatIssue]] } override def parse(scapegoatReportPath: Path): Map[String, Seq[ScapegoatIssue]] = { val scapegoatXMLReport = XML.loadFile(scapegoatReportPath.toFile) val scapegoatIssues = for { issue <- scapegoatXMLReport \\ "warning" line = (issue \@ "line").toInt text = issue \@ "text" file = replaceAllDotsButLastWithSlashes(issue \@ "file") inspectionId = issue \@ "inspection" } yield ScapegoatIssue(line, text, file, inspectionId) scapegoatIssues.groupBy(issue => issue.file) } }
Example 113
Source File: SonarFileSystemSpec.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala package util package syntax import java.nio.file.{Path, Paths} import cats.instances.list._ import cats.instances.option._ import com.mwz.sonar.scala.util.syntax.SonarFileSystem._ import org.mockito.ArgumentMatchers._ import org.mockito.Mockito._ import org.scalatest.OptionValues import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatestplus.mockito.MockitoSugar import org.sonar.api.batch.fs.FileSystem import org.sonar.api.batch.fs.internal.DefaultFileSystem class SonarFileSystemSpec extends AnyFlatSpec with Matchers with OptionValues with MockitoSugar { it should "attempt to resolve paths" in { val fs = new DefaultFileSystem(Paths.get("./")) val paths = List(Paths.get("path/1"), Paths.get("path/2")) fs.resolve(paths) shouldBe List( Paths.get("./").resolve("path/1").toAbsolutePath.normalize.toFile, Paths.get("./").resolve("path/2").toAbsolutePath.normalize.toFile ) val path: Option[Path] = Some(Paths.get("another/path")) fs.resolve(path).value shouldBe Paths.get("./").resolve("another/path").toAbsolutePath.normalize.toFile } it should "handle exceptions gracefully" in { val fs = mock[FileSystem] val path = List(Paths.get("path")) when(fs.resolvePath(any())).thenThrow(new RuntimeException()) fs.resolve(path) shouldBe empty } }
Example 114
Source File: fixtures.scala From sonar-scala with GNU Lesser General Public License v3.0 | 5 votes |
package com.mwz.sonar.scala import java.io.File import java.nio.file.{Files, Path} import cats.effect.IO import cats.effect.concurrent.Ref import com.mwz.sonar.scala.util.Logger trait WithFiles { def withFiles(paths: String*)(test: Seq[File] => Any): Unit = { val tmpDir: Path = Files.createTempDirectory("") val files: Seq[File] = paths.map(path => Files.createFile(tmpDir.resolve(path)).toFile) try test(files) finally { files.foreach(f => Files.deleteIfExists(f.toPath)) Files.deleteIfExists(tmpDir) } } } trait WithTracing { def withTracing(test: Ref[IO, List[String]] => Any): Unit = test(Ref.unsafe[IO, List[String]](List.empty)) } trait WithLogging { object LogLevel { sealed trait Level final case object Debug extends Level final case object Info extends Level final case object Warn extends Level final case object Error extends Level } def withLogging(test: (Ref[IO, List[(LogLevel.Level, String)]], Logger[IO]) => Any): Unit = { val logs = Ref.unsafe[IO, List[(LogLevel.Level, String)]](List.empty) val logger: Logger[IO] = new Logger[IO] { def debug(s: String): IO[Unit] = logs.update((LogLevel.Debug, s) :: _) def info(s: String): IO[Unit] = logs.update((LogLevel.Info, s) :: _) def warn(s: String): IO[Unit] = logs.update((LogLevel.Warn, s) :: _) def error(s: String): IO[Unit] = logs.update((LogLevel.Error, s) :: _) def error(s: String, e: Throwable): IO[Unit] = logs.update((LogLevel.Error, s) :: _) } test(logs, logger) } }
Example 115
Source File: SparkSqlUtils.scala From HadoopLearning with MIT License | 5 votes |
package com.c503.utils import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader} import java.nio.file.Path import com.google.common.io.Resources import org.apache.log4j.{Level, Logger} import org.apache.mesos.Protos.Resource import org.apache.spark.sql.SparkSession import scala.io.Source def readSqlByPath(sqlPath: String) = { val buf = new StringBuilder val path = this.getPathByName(sqlPath) val file = Source.fromFile(path) for (line <- file.getLines) { buf ++= line + "\n" } file.close buf.toString() } }
Example 116
Source File: IDEPathHelper.scala From keycloak-benchmark with Apache License 2.0 | 5 votes |
import java.net.URI import java.nio.file.attribute.{FileAttribute, BasicFileAttributes} import java.nio.file.{StandardCopyOption, Paths, Files, Path} import io.gatling.core.util.PathHelper._ class Directories( val data: Path, val bodies: Path, val binaries: Path, val results: Path ) object IDEPathHelper { private val uri: URI = getClass.getClassLoader.getResource("gatling.conf").toURI val directories: Directories = if (uri.getScheme.startsWith("jar")) { val testDir = System.getProperty("test.dir"); val mainDir: Path = if (testDir != null) { val dir = Paths.get(testDir); if (dir.exists) { if (!dir.isDirectory) { throw new IllegalArgumentException(testDir + " is not a directory") } dir } else { Files.createDirectory(dir) } } else { Files.createTempDirectory("gatling-") } System.out.println("Using " + mainDir + " as gatling directory") // unpack gatling.conf Files.copy(getClass.getResourceAsStream("gatling.conf"), mainDir.resolve("gatling.conf"), StandardCopyOption.REPLACE_EXISTING) // using createDirectories to ignore existing val directories = new Directories( Files.createDirectories(mainDir.resolve("data")), Files.createDirectories(mainDir.resolve("bodies")), Files.createDirectories(mainDir.resolve("binaries")), Files.createDirectories(mainDir.resolve("results"))) val simulationFile: String = Engine.simulationClass.replace('.', '/') + ".class" // unpack simulation val targetFile: Path = mainDir.resolve("binaries").resolve(simulationFile) Files.createDirectories(targetFile.getParent) Files.copy(getClass.getResourceAsStream(simulationFile), targetFile, StandardCopyOption.REPLACE_EXISTING) directories } else { val projectRootDir = RichPath(uri).ancestor(3) val mavenResourcesDirectory = projectRootDir / "src" / "test" / "resources" val mavenTargetDirectory = projectRootDir / "target" new Directories( mavenResourcesDirectory / "data", mavenResourcesDirectory / "bodies", mavenTargetDirectory / "test-classes", mavenTargetDirectory / "results") } }
Example 117
Source File: PersistingParser.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter.internal package importer import java.nio.file.Path import com.olvind.logging.Logger import org.scalablytyped.converter.internal.ts.{parser, TsParsedFile} object PersistingParser { def apply( cacheDirOpt: Option[Path], inputFolders: IArray[InFolder], logger: Logger[Unit], ): InFile => Either[String, TsParsedFile] = cacheDirOpt match { case Some(cacheDir) => val pf = FileLocking.persistingFunction[(InFile, Array[Byte]), Either[String, TsParsedFile]]( { case (file, bs) => val shortestRelative = inputFolders.map(f => file.path.relativeTo(f.path)).sortBy(_.toString.length).head.toString val base = cacheDir resolve s"${BuildInfo.version}" resolve shortestRelative (base resolve Digest.of(List(bs)).hexString) }, logger, ) { case (inFile, bytes) => parser.parseFileContent(inFile, bytes) } inFile => pf((inFile, os.read.bytes(inFile.path))) case None => inFile => parser.parseFileContent(inFile, os.read.bytes(inFile.path)) } }
Example 118
Source File: Main.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter import java.nio.file.Path import java.util.concurrent.ForkJoinPool import org.scalablytyped.converter.internal.constants.defaultCacheFolder import org.scalablytyped.converter.internal.importer.build.BinTrayPublisher import org.scalablytyped.converter.internal.importer.{withZipFs, Ci, Publisher} import org.scalablytyped.converter.internal.{constants, files} import scala.concurrent.ExecutionContext object Main { def main(args: Array[String]): Unit = { val Ci.Config(config) = args System.setProperty("scala.concurrent.context.numThreads", config.parallelScalas.toString) val publishFolder = constants.defaultLocalPublishFolder val pool = new ForkJoinPool(config.parallelLibraries) val ec = ExecutionContext.fromExecutorService(pool) withZipFs.maybe(files.existing(defaultCacheFolder) / "bintray.zip", config.enablePublish) { bintrayPathOpt => val publisher: Publisher = if (config.enablePublish) BinTrayPublisher(bintrayPathOpt, config.projectName, Some(config.repo), ec) match { case Left(err) => sys.error(err) case Right(value) => value } else BinTrayPublisher.Dummy withZipFs(defaultCacheFolder / "npmjs.zip") { npmjsPath => withZipFs.maybe(defaultCacheFolder / "parseCache.zip", config.enableParseCache && config.conserveSpace) { parseCachePathOpt => val parseCacheOpt: Option[Path] = parseCachePathOpt orElse { if (config.enableParseCache) Some((defaultCacheFolder / "parse").toNIO) else None } val paths = Ci.Paths(npmjsPath, parseCacheOpt, defaultCacheFolder, publishFolder, defaultCacheFolder / "git") val ci = new Ci(config, paths, publisher, pool, ec) if (config.benchmark) { println(ci.run()) println(ci.run()) println(ci.run()) } else { ci.run() } } } } pool.shutdown() System.exit(0) } }
Example 119
Source File: withZipFs.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter.internal.importer import java.net.URI import java.nio.file.Path object withZipFs { import java.nio.file.FileSystems def maybe[T](path: os.Path, enable: Boolean)(f: Option[Path] => T): T = if (enable) apply(path)(path => f(Some(path))) else f(None) def apply[T](path: os.Path)(f: Path => T): T = { val uri = URI.create(s"jar:file:${path}") val env = new java.util.HashMap[String, String]() env.put("create", "true") val zipfs = FileSystems.newFileSystem(uri, env) val root = zipfs.getPath("/") try f(root) finally zipfs.close() } }
Example 120
Source File: NpmjsFetcher.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter.internal package importer package documentation import java.nio.file.Path import com.olvind.logging.Logger import gigahorse.HttpClient import gigahorse.support.okhttp.Gigahorse import org.scalablytyped.converter.internal.stringUtils.encodeURIComponent import org.scalablytyped.converter.internal.ts.TsIdentLibrarySimple import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} case class NpmjsFetcher(cacheDir: Path)(implicit ec: ExecutionContext) extends Npmjs { val client: HttpClient = Gigahorse.http( Gigahorse.config .withMaxConnections(10) .withMaxRequestRetry(3), ) override def apply[L](source: Source, logger: Logger[L]): Future[Option[Npmjs.Data]] = { val libOpt = source match { case Source.StdLibSource(_, _, _) => Some(TsIdentLibrarySimple("typescript")) case Source.FromFolder(_, libName) => Some(libName) case _ => None } libOpt match { case None => Future.successful(None) case Some(lib) => val cacheFile = cacheDir.resolve("/" + lib.`__value`) Json.opt[Npmjs.Data](cacheFile) match { case Some(x) => Future.successful(Some(x)) case None => client .run( Gigahorse.url(s"https://api.npms.io/v2/package/${encodeURIComponent(lib.value)}").get, Gigahorse.asString, ) .transform { case Failure(th) => logger.warn(s"Could't fetch metadata for $lib", th) Success(None) case Success(jsonStr) => Json.CustomJacksonParser.decode[Npmjs.Data](jsonStr) match { case Left(err) => logger.error(s"Could't decode json for $lib, $jsonStr", err) Success(None) case Right(data) => files.softWrite(cacheFile)(_.println(jsonStr)) Success(Some(data)) } } } } } }
Example 121
Source File: ApplySpecMain.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.PrintStream import java.nio.file.{Path, Paths} import io.eels.{Constants, SourceParser} import io.eels.component.hive.{HiveOps, HiveSource, HiveSpec} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.metastore.HiveMetaStoreClient object ApplySpecMain { implicit val fs = FileSystem.get(new Configuration) implicit val hiveConf = new HiveConf implicit val client = new HiveMetaStoreClient(hiveConf) def apply(args: Seq[String], out: PrintStream = System.out): Unit = { val parser = new scopt.OptionParser[Options]("eel") { head("eel apply-spec", Constants.EelVersion) opt[String]("dataset") required() action { (source, o) => o.copy(source = source) } text "specify dataset, eg hive:database:table" opt[String]("spec") required() action { (schema, o) => o.copy(specPath = Paths.get(schema)) } text "specify path to eel spec" } parser.parse(args, Options()) match { case Some(options) => val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}")) val source = builder() source match { case hive: HiveSource => HiveOps.applySpec(HiveSpec(options.specPath), false) case _ => sys.error(s"Unsupported source $source") } case _ => } } case class Options(source: String = null, specPath: Path = null) }
Example 122
Source File: io.scala From sbt-org-policies with Apache License 2.0 | 5 votes |
package sbtorgpolicies import java.io._ import java.net.URL import java.nio.charset.Charset import java.nio.file.Path import java.nio.file.Paths.get import cats.syntax.either._ import sbtorgpolicies.exceptions.IOException import scala.io.Source import scala.language.implicitConversions package object io { type IOResult[T] = Either[IOException, T] object syntax { implicit def eitherFilterSyntax[T](either: Either[Throwable, T]): FilteredEitherOps[T] = new FilteredEitherOps(either) implicit def fileNameSyntax(fileName: String): FileNameOps = new FileNameOps(fileName) final class FilteredEitherOps[T](either: Either[Throwable, T]) { def withFilter(f: T => Boolean): Either[Throwable, T] = either match { case Right(r) if !f(r) => new IllegalStateException("Filter condition has not been satisfied").asLeft[T] case _ => either } } final class FileNameOps(filename: String) { def toPath: Path = get(filename) def toFile: File = new File(filename.fixPath) def fixPath: String = filename.replaceAll("/", File.separator) def ensureFinalSlash: String = filename + (if (filename.endsWith(File.separator)) "" else File.separator) } } object IO { def file(path: String): File = new File(path) def url(address: String): URL = new URL(address) def readLines(file: File): Iterator[String] = Source.fromFile(file).getLines() def readBytes(file: File): Array[Byte] = { val is: InputStream = new FileInputStream(file) val array: Array[Byte] = Stream.continually(is.read).takeWhile(_ != -1).map(_.toByte).toArray is.close() array } def write(file: File, content: String, charset: Charset = Charset.forName("UTF-8")): Unit = { val writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file, false), charset) ) writer.write(content) writer.close() } def relativize(base: File, file: File): Option[String] = { def ensureEndingSlash: Option[String] = { val path = base.getAbsolutePath path.lastOption.map { case c if c == File.separatorChar => path case _ => path + File.separatorChar } } val baseFileString = if (base.isDirectory) ensureEndingSlash else None val pathString = file.getAbsolutePath baseFileString flatMap { case baseString if pathString.startsWith(baseString) => Some(pathString.substring(baseString.length)) case _ => None } } } }
Example 123
Source File: KubernetesTestComponents.scala From spark-integration with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.k8s.integrationtest import java.nio.file.{Path, Paths} import java.util.UUID import scala.collection.mutable import scala.collection.JavaConverters._ import io.fabric8.kubernetes.client.DefaultKubernetesClient import org.scalatest.concurrent.Eventually private[spark] class KubernetesTestComponents(defaultClient: DefaultKubernetesClient) { val namespaceOption = Option(System.getProperty("spark.kubernetes.test.namespace")) val hasUserSpecifiedNamespace = namespaceOption.isDefined val namespace = namespaceOption.getOrElse(UUID.randomUUID().toString.replaceAll("-", "")) private val serviceAccountName = Option(System.getProperty("spark.kubernetes.test.serviceAccountName")) .getOrElse("default") val kubernetesClient = defaultClient.inNamespace(namespace) val clientConfig = kubernetesClient.getConfiguration def createNamespace(): Unit = { defaultClient.namespaces.createNew() .withNewMetadata() .withName(namespace) .endMetadata() .done() } def deleteNamespace(): Unit = { defaultClient.namespaces.withName(namespace).delete() Eventually.eventually(KubernetesSuite.TIMEOUT, KubernetesSuite.INTERVAL) { val namespaceList = defaultClient .namespaces() .list() .getItems .asScala require(!namespaceList.exists(_.getMetadata.getName == namespace)) } } def newSparkAppConf(): SparkAppConf = { new SparkAppConf() .set("spark.master", s"k8s://${kubernetesClient.getMasterUrl}") .set("spark.kubernetes.namespace", namespace) .set("spark.executor.memory", "500m") .set("spark.executor.cores", "1") .set("spark.executors.instances", "1") .set("spark.app.name", "spark-test-app") .set("spark.ui.enabled", "true") .set("spark.testing", "false") .set("spark.kubernetes.submission.waitAppCompletion", "false") .set("spark.kubernetes.authenticate.driver.serviceAccountName", serviceAccountName) } } private[spark] class SparkAppConf { private val map = mutable.Map[String, String]() def set(key: String, value: String): SparkAppConf = { map.put(key, value) this } def get(key: String): String = map.getOrElse(key, "") def setJars(jars: Seq[String]): Unit = set("spark.jars", jars.mkString(",")) override def toString: String = map.toString def toStringArray: Iterable[String] = map.toList.flatMap(t => List("--conf", s"${t._1}=${t._2}")) } private[spark] case class SparkAppArguments( mainAppResource: String, mainClass: String, appArgs: Array[String]) private[spark] object SparkAppLauncher extends Logging { def launch( appArguments: SparkAppArguments, appConf: SparkAppConf, timeoutSecs: Int, sparkHomeDir: Path): Unit = { val sparkSubmitExecutable = sparkHomeDir.resolve(Paths.get("bin", "spark-submit")) logInfo(s"Launching a spark app with arguments $appArguments and conf $appConf") val commandLine = mutable.ArrayBuffer(sparkSubmitExecutable.toFile.getAbsolutePath, "--deploy-mode", "cluster", "--class", appArguments.mainClass, "--master", appConf.get("spark.master") ) ++ appConf.toStringArray :+ appArguments.mainAppResource if (appArguments.appArgs.nonEmpty) { commandLine += appArguments.appArgs.mkString(" ") } logInfo(s"Launching a spark app with command line: ${commandLine.mkString(" ")}") ProcessUtils.executeProcess(commandLine.toArray, timeoutSecs) } }
Example 124
Source File: FileUtil.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness.utils import java.io.File import java.nio.file.{FileSystems, Files, Path} import scala.io.Source def getSymLink(f:File) : File = { if (f == null) throw new NullPointerException("File must not be null") val path = FileSystems.getDefault.getPath(f.getPath) if (Files.isSymbolicLink(path)) { f.getCanonicalFile } else { f.getAbsoluteFile } } }
Example 125
Source File: GlobalWatchService.scala From incubator-retired-iota with Apache License 2.0 | 5 votes |
package org.apache.iota.fey import java.nio.file.{Files, Path, Paths, WatchEvent} import akka.actor.{Actor, ActorLogging, ActorRef} import org.apache.iota.fey.GlobalWatchService.REGISTER_WATCHER_PERFORMER import org.apache.iota.fey.WatchingDirectories.STOPPED class GlobalWatchService extends Actor with ActorLogging{ //WatchService var watchThread:Thread = null val watchFileTask:GlobalWatchServiceTask = new GlobalWatchServiceTask(self) override def preStart(): Unit = { startWatcher("PRE-START") } override def postStop(): Unit = { stopWatcher("POST-STOP") } private def startWatcher(from: String) = { log.info(s"Starting Global Watcher from $from") watchThread = new Thread(watchFileTask, "FEY_GLOBAL_WATCH_SERVICE_PERFORMERS") watchThread.setDaemon(true) watchThread.start() } private def stopWatcher(from: String) = { log.info(s"Stopping Global Watcher from $from") if(watchThread != null && watchThread.isAlive){ watchThread.interrupt() watchThread = null } } override def receive: Receive = { case REGISTER_WATCHER_PERFORMER(path, file_name, actor, events, loadExists) => registerPath(path,file_name,actor,events,loadExists) case STOPPED => stopWatcher("STOPPED-THREAD") startWatcher("STOPPED-THREAD") case x => log.error(s"Unknown message $x") } private def broadcastMessageIfFileExists(actor: ActorRef, pathWithFile: String) = { val filePath = Paths.get(pathWithFile) if(Files.exists(filePath)){ log.info(s"File $pathWithFile exists. Broadcasting message to actor ${actor.path.toString}") actor ! GlobalWatchService.ENTRY_CREATED(filePath) } } private def registerPath(dir_path: String, file_name:Option[String], actor: ActorRef, events: Array[WatchEvent.Kind[_]], loadExists: Boolean) = { WatchingDirectories.actorsInfo.get((dir_path,file_name)) match { case Some(info) => val newInfo:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => { info.get(event) match { case Some(actors) => (event, (Array(actor) ++ actors)) case None => (event, Array(actor)) } }).toMap WatchingDirectories.actorsInfo.put((dir_path,file_name), info ++ newInfo) watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events) case None => val tmpEvents:Map[WatchEvent.Kind[_], Array[ActorRef]] = events.map(event => {(event, Array(actor))}).toMap WatchingDirectories.actorsInfo.put((dir_path,file_name), tmpEvents) watchFileTask.watch(Paths.get(dir_path),actor.path.toString,events) } if(file_name.isDefined && loadExists){ log.info(s"Checking if file $dir_path/${file_name.get} already exist") broadcastMessageIfFileExists(actor, s"$dir_path/${file_name.get}") } } } object GlobalWatchService{ sealed case class ENTRY_CREATED(path:Path) sealed case class ENTRY_MODIFIED(path:Path) sealed case class ENTRY_DELETED(path:Path) sealed case class REGISTER_WATCHER_PERFORMER(dir_path: String, file_name:Option[String], actor: ActorRef, events: Array[WatchEvent.Kind[_]], loadIfExists: Boolean) }
Example 126
Source File: WatchServiceReceiver.scala From incubator-retired-iota with Apache License 2.0 | 5 votes |
package org.apache.iota.fey import java.nio.file.StandardWatchEventKinds._ import java.nio.file.{FileSystems, Path} import java.io.File import akka.actor.ActorRef import org.apache.iota.fey.JsonReceiverActor.JSON_RECEIVED import play.api.libs.json._ import scala.io.Source class WatchServiceReceiver(receiverActor: ActorRef) extends JsonReceiver{ processInitialFiles() private val watchService = FileSystems.getDefault.newWatchService() def watch(path: Path) : Unit = path.register(watchService, ENTRY_CREATE, ENTRY_MODIFY) def getJsonObject(params: String): Option[JsValue] = { try{ val stringJson = Source.fromFile(params).getLines.mkString Option(Json.parse(stringJson)) }catch{ case e: Exception => log.error("Could not parse JSON", e) None } } override def execute(): Unit = { val key = watchService.take() val eventsIterator = key.pollEvents().iterator() while(eventsIterator.hasNext) { val event = eventsIterator.next() val relativePath = event.context().asInstanceOf[Path] val path = key.watchable().asInstanceOf[Path].resolve(relativePath) log.debug(s"${event.kind()} --- $path") event.kind() match { case (ENTRY_CREATE | ENTRY_MODIFY) if path.toString.endsWith(CONFIG.JSON_EXTENSION) => processJson(path.toString, path.toFile) case _ => } } key.reset() } private[fey] def processJson(path: String, file: File) = { try{ getJsonObject(path) match { case Some(orchestrationJSON) => val valid = validJson(orchestrationJSON) if(valid && (orchestrationJSON \ JSON_PATH.COMMAND).as[String].toUpperCase != "DELETE"){ checkForLocation(orchestrationJSON) } if(valid) { receiverActor ! JSON_RECEIVED(orchestrationJSON, file) }else{ log.warn(s"File $path not processed. Incorrect JSON schema") } case None => } } catch { case e: Exception => log.error(s"File $path will not be processed", e) } } private def processInitialFiles() = { Utils.getFilesInDirectory(CONFIG.JSON_REPOSITORY) .filter(file => file.getName.endsWith(CONFIG.JSON_EXTENSION)) .foreach(file => { processJson(file.getAbsolutePath, file) }) } override def exceptionOnRun(e: Exception): Unit = { e match { case e: InterruptedException => case e: Exception => log.error("Watch Service stopped", e) } watchService.close() } }
Example 127
Source File: MavenCoordinatesListReader.scala From exodus with MIT License | 5 votes |
package com.wixpress.build.maven import java.nio.file.{Files, Path} import scala.io.Source object MavenCoordinatesListReader { def coordinatesIn(filePath:Path):Set[Coordinates] = { val lines = Source.fromInputStream(Files.newInputStream(filePath)).getLines().toSet coordinatesInText(lines) } def coordinatesInText(content: Set[String]):Set[Coordinates] = { content .map(_.trim) .filterNot(_.isEmpty) .filterNot(_.startsWith("#")) .map(l=>Coordinates.deserialize(l)) } }
Example 128
Source File: BazelRcWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} class BazelRcWriter(repoRoot: Path) { def write(): Unit = { val contents = """# |# DO NOT EDIT - this line imports shared managed bazel configuration |# |import %workspace%/tools/bazelrc/.bazelrc.managed.dev.env | |# |# ADDITIONS ONLY UNDER THIS LINE |# | """.stripMargin writeToDisk(contents) } private def writeToDisk(contents: String): Unit = Files.write(repoRoot.resolve(".bazelrc"), contents.getBytes) }
Example 129
Source File: SourcesPackageWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path, StandardOpenOption} import com.wix.bazel.migrator.model.{Package, Target} class SourcesPackageWriter(repoRoot: Path, bazelPackages: Set[Package]) { def write(): Unit = { bazelPackages .flatMap(jvmTargetsAndRelativePathFromMonoRepoRoot) .flatMap(sourceDirAndRelativePackagePaths) .foreach(writeSourcesTarget) } private def jvmTargetsAndRelativePathFromMonoRepoRoot(bazelPackage: Package): Set[JvmTargetAndRelativePath] = { val r = bazelPackage.targets.collect { case jvm: Target.Jvm => (jvm, bazelPackage.relativePathFromMonoRepoRoot) } r.map(JvmTargetAndRelativePathFromMonoRepoRoot(_)) } def sourceDirAndRelativePackagePaths(jvmTargetAndRelativePath: JvmTargetAndRelativePath): Set[SourceDirPathAndRelativePackagePath] = { val basePackagePath = repoRoot.resolve(jvmTargetAndRelativePath.relativePath) jvmTargetAndRelativePath.jvm.sources.map { source => val sourceDirPath = basePackagePath.resolve(adjustSource(source)) SourceDirPathAndRelativePackagePath(sourceDirPath, jvmTargetAndRelativePath.relativePath) } } private def writeSourcesTarget(s: SourceDirPathAndRelativePackagePath) = Files.write( s.sourceDirBuildPath, s.sourcesTarget.getBytes, StandardOpenOption.CREATE, StandardOpenOption.APPEND ) private def adjustSource(source: String) = { if (source.startsWith("/")) source.drop(1) else source } private[migrator] case class SourcesTargetAndSourceDirPath(sourceDirBuildPath: Path, sourcesTarget: Array[Byte]) private[migrator] case class JvmTargetAndRelativePath(jvm: Target.Jvm, relativePath: String) private[migrator] object JvmTargetAndRelativePathFromMonoRepoRoot { def apply(targetAndRelativePath: (Target.Jvm, String)) = JvmTargetAndRelativePath(targetAndRelativePath._1, targetAndRelativePath._2) } } private[migrator] case class SourceDirPathAndRelativePackagePath(sourceDirPath: Path, relativePackagePath: String){ def sourcesTarget: String = { if (sourceDirPath.endsWith(relativePackagePath)) """ |sources() |""".stripMargin else s""" |sources( | visibility = ["//$relativePackagePath:__pkg__"] |) |""".stripMargin } def sourceDirBuildPath: Path = sourceDirPath.resolve("BUILD.bazel") }
Example 130
Source File: DockerImagesWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} import com.wix.bazel.migrator.overrides.InternalTargetsOverrides class DockerImagesWriter(repoRoot: Path, overrides: InternalTargetsOverrides) { private val dockerImagesRootPath = repoRoot.resolve("third_party/docker_images") def write(): Unit = { val images = overrides.targetOverrides.toSeq.flatMap(_.dockerImagesDeps).flatten.map(DockerImage(_)).toSet createBzlFile(images) createBuildFile(images) } private def writeToDisk(fileName: String, contents: String): Unit = { val filePath = dockerImagesRootPath.resolve(fileName) Files.createDirectories(dockerImagesRootPath) Files.createFile(filePath) Files.write(filePath, contents.getBytes) } private def createBzlFile(images: Set[DockerImage]): Unit = { val header = s"""load( | "@io_bazel_rules_docker//container:container.bzl", | "container_pull", | container_repositories = "repositories" |) | |def docker_images(): | container_repositories() |""".stripMargin val contents = images.map(_.toContainerPullRule).mkString("\n\n") writeToDisk("docker_images.bzl", header + contents) } private def createBuildFile(images: Set[DockerImage]): Unit = { val header = s""" |package(default_visibility = ["//visibility:public"]) |licenses(["reciprocal"]) |load("@io_bazel_rules_docker//container:container.bzl", "container_image") |""".stripMargin val contents = images.map(_.toContainerImageRule).mkString("\n\n") writeToDisk("BUILD.bazel", header + contents) } }
Example 131
Source File: BazelRcRemoteWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} class BazelRcRemoteWriter(repoRoot: Path) { def write(): Unit = { val contents = """# Copyright 2016 The Bazel Authors. All rights reserved. |# |# Licensed under the Apache License, Version 2.0 (the "License"); |# you may not use this file except in compliance with the License. |# You may obtain a copy of the License at |# |# http://www.apache.org/licenses/LICENSE-2.0 |# |# Unless required by applicable law or agreed to in writing, software |# distributed under the License is distributed on an "AS IS" BASIS, |# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |# See the License for the specific language governing permissions and |# limitations under the License. | |# Depending on how many machines are in the remote execution instance, setting |# this higher can make builds faster by allowing more jobs to run in parallel. |# Setting it too high can result in jobs that timeout, however, while waiting |# for a remote machine to execute them. |build:remote --jobs=100 | |# (no need to import %workspace%/.bazelrc, bazel does this by default) |import %workspace%/tools/bazelrc/.bazelrc.remotesettings | |# Set various strategies so that all actions execute remotely. Mixing remote |# and local execution will lead to errors unless the toolchain and remote |# machine exactly match the host machine. |build:remote --spawn_strategy=remote |build:remote --strategy=Javac=remote |build:remote --strategy=Closure=remote |build:remote --genrule_strategy=remote |build:remote --define=EXECUTOR=remote |build:remote --strategy=Scalac=remote |test:remote --strategy=Scalac=remote |test:remote --test_tmpdir=/tmp | |# Enable the remote cache so action results can be shared across machines, |# developers, and workspaces. |build:remote --remote_cache=remotebuildexecution.googleapis.com | |# Enable remote execution so actions are performed on the remote systems. |build:remote --remote_executor=remotebuildexecution.googleapis.com | |# Set flags for uploading to BES in order to view results in the Bazel Build |# Results UI. |build:results --bes_backend="buildeventservice.googleapis.com" |build:results --bes_timeout=10s | """.stripMargin writeToDisk(contents) } private def writeToDisk(contents: String): Unit = Files.write(repoRoot.resolve(".bazelrc.remote"), contents.getBytes) }
Example 132
Source File: RelativePathSupport.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze import java.io.IOException import java.nio.file.{Path, Paths} import com.fasterxml.jackson.core.{JsonGenerator, JsonParser, JsonToken} import com.fasterxml.jackson.databind._ import com.fasterxml.jackson.databind.module.SimpleModule class RelativePathSupportingModule extends SimpleModule { addDeserializer(classOf[Path], new RelativePathSupportingDeserializer) addSerializer(classOf[Path], new RelativePathSupportingSerializer) } class RelativePathSupportingSerializer extends JsonSerializer[Path] { @throws[IOException] def serialize(value: Path, gen: JsonGenerator, serializers: SerializerProvider): Unit = value match { case null => gen.writeNull() case _ => gen.writeString(value.toString) } } class RelativePathSupportingDeserializer extends JsonDeserializer[Path] { @throws[IOException] def deserialize(p: JsonParser, ctxt: DeserializationContext): Path = p.getCurrentToken match { case JsonToken.VALUE_NULL => null case JsonToken.VALUE_STRING => Paths.get(p.readValueAs(classOf[String])) case _ => throw ctxt.wrongTokenException(p, JsonToken.VALUE_STRING, "The value of a java.nio.file.Path must be a string") } }
Example 133
Source File: SourceFileTracer.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze.jdk import java.nio.file.{FileSystem, FileSystems, Files, Path} import com.wix.bazel.migrator.model.SourceModule import MavenRelativeSourceDirPathFromModuleRoot.PossibleLocation import com.wix.bazel.migrator.analyze.CodePath trait SourceFileTracer { def traceSourceFile(module: SourceModule, fqn: String, pathToJar: String, testClass: Boolean): CodePath } class JavaPSourceFileTracer(repoRoot: Path, processRunner: ProcessRunner = new JavaProcessRunner, fileSystem: FileSystem = FileSystems.getDefault) extends SourceFileTracer { private val Command = "javap" private def parseFileName(stdOut: String) = { val firstLine = stdOut.split("\n")(0) firstLine.split('"') match { case Array(_, fileName) => fileName case _ => throw new RuntimeException(s"Unknown stdout format $stdOut") } } private def findLocationIn(relativePathFromMonoRepoRoot: String, possibleLocations: Set[PossibleLocation], filePath: String): Option[String] = possibleLocations.find { location => { val possiblePath = repoRoot.resolve(relativePathFromMonoRepoRoot).resolve(location).resolve(filePath) Files.exists(possiblePath) } } override def traceSourceFile(module: SourceModule, fqn: String, pathToClasses: String, testClass: Boolean): CodePath = { val packagePart = fqn.splitAt(fqn.lastIndexOf('.'))._1.replace('.', '/') val cmdArgs = List( "-cp", pathToClasses, fqn) val runResult = processRunner.run(repoRoot, "javap", cmdArgs) if (runResult.exitCode != 0) { throw new RuntimeException(s"Problem locating the source file of class $fqn in $pathToClasses") } val filePath = packagePart + "/" + parseFileName(runResult.stdOut) val locations = MavenRelativeSourceDirPathFromModuleRoot.getPossibleLocationFor(testClass) findLocationIn(module.relativePathFromMonoRepoRoot, locations, filePath) match { case Some(location) =>CodePath(module, location, filePath) case None => { throw new RuntimeException(s"Could not find location of $filePath in ${module.relativePathFromMonoRepoRoot}") } } } } object MavenRelativeSourceDirPathFromModuleRoot { type PossibleLocation = String private val mainCodePrefixes = Set("src/main") private val testCodePrefixes = Set("src/test", "src/it", "src/e2e") private val languages = Set("java", "scala") private val ProdCodeLocations: Set[PossibleLocation] = mainCodePrefixes.flatMap(prefix => languages.map(language => s"$prefix/$language")) private val TestCodeLocations: Set[PossibleLocation] = testCodePrefixes.flatMap(prefix => languages.map(language => s"$prefix/$language")) def getPossibleLocationFor(testCode:Boolean): Set[PossibleLocation] = if (testCode) TestCodeLocations else ProdCodeLocations }
Example 134
Source File: JDepsCommandImpl.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze.jdk import java.nio.file.{Files, Path, Paths} class JDepsCommandImpl(repoRoot: Path) extends JDepsCommand { override def analyzeClassesDependenciesPerJar(jarPath: String, classPath: List[String]): Option[ClassDependencies] = { val fileName = Paths.get(jarPath).getFileName.toString val dotDirectory = Files.createTempDirectory("dot") val classpath = classPath.mkString(":") val cmdArgs = List("jdeps", "-dotoutput", dotDirectory.toString, "-v", "-cp", classpath, jarPath) val process = (new ProcessBuilder).directory(repoRoot.toFile).command(cmdArgs:_*) process.redirectOutput() val process1 = process.start() val stream = process1.getInputStream process1.waitFor() val path = dotDirectory.resolve(fileName + ".dot") if (Files.exists(path)) Some(ClassDependencies(path)) else None } }
Example 135
Source File: ProcesRunner.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze.jdk import java.nio.file.Path trait ProcessRunner { def run(runningDirectory: Path, command: String, args: List[String]): RunResult } class JavaProcessRunner extends ProcessRunner { override def run(runningDirectory: Path, command: String, args: List[String]): RunResult = { val process = (new ProcessBuilder).directory(runningDirectory.toFile).command((command :: args):_*) val process1 = process.start() process1.waitFor() val stdOut = scala.io.Source.fromInputStream(process1.getInputStream).mkString val stdErr = scala.io.Source.fromInputStream(process1.getErrorStream).mkString RunResult( exitCode = process1.exitValue(), stdOut = stdOut, stdErr = stdErr ) } } case class RunResult(exitCode: Int, stdOut: String, stdErr: String)
Example 136
Source File: CachingEagerEvaluatingDependencyAnalyzer.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze import java.nio.file.{Files, Path, Paths} import java.util import java.util.concurrent.atomic.AtomicInteger import com.fasterxml.jackson.annotation.JsonTypeInfo import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.wix.bazel.migrator.model._ import com.wixpress.build.maven.MavenScope import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ import scala.collection.parallel.ParMap //this is needed since currently the transformer isn't thread safe but the dependency analyzer is class CachingEagerEvaluatingDependencyAnalyzer(sourceModules: Set[SourceModule], dependencyAnalyzer: DependencyAnalyzer, performSourceAnalysis: Boolean) extends DependencyAnalyzer { private val log = LoggerFactory.getLogger(getClass) private val cachePath = Files.createDirectories(Paths.get("./cache")) private val objectMapper = new ObjectMapper() .registerModule(DefaultScalaModule) .registerModule(new RelativePathSupportingModule) .registerModule(new SourceModuleSupportingModule(sourceModules)) .addMixIn(classOf[Target], classOf[TypeAddingMixin]) .addMixIn(classOf[CodePurpose], classOf[TypeAddingMixin]) .addMixIn(classOf[TestType], classOf[TypeAddingMixin]) .addMixIn(classOf[MavenScope], classOf[TypeAddingMixin]) private val collectionType = objectMapper.getTypeFactory.constructCollectionType(classOf[util.Collection[Code]], classOf[Code]) private val clean = performSourceAnalysis private def cachePathForSourceModule(m: SourceModule) = { cachePath.resolve(m.relativePathFromMonoRepoRoot + ".cache") } private val size = sourceModules.size private val counter = new AtomicInteger() private val tenthSize = size / 10 private def initCachePathForSourceModule(p: Path) = Files.createDirectories(p.getParent) private def maybeCodeFromCache(p: Path): Option[List[Code]] = { if (clean || !Files.exists(p)) return None try { val value: util.Collection[Code] = objectMapper.readValue(p.toFile, collectionType) val codeList = value.asScala.toList Some(codeList) } catch { case e: Exception => log.warn(s"Error reading $p ,deleting cache file.") log.warn(e.getMessage) Files.deleteIfExists(p) None } } private def retrieveCodeAndCache(m: SourceModule, cachePath: Path): List[Code] = { val codeList = dependencyAnalyzer.allCodeForModule(m) Files.deleteIfExists(cachePath) initCachePathForSourceModule(cachePath) Files.createFile(cachePath) try { objectMapper.writeValue(cachePath.toFile, codeList) } catch { case e: InterruptedException => log.warn(s"aborting write to file $cachePath") Files.deleteIfExists(cachePath) throw e case e: Exception => log.warn(s"could not write to file $cachePath") log.warn(e.getMessage) } codeList } private def calculateMapEntryFor(sourceModule: SourceModule) = { printProgress() val cachePath = cachePathForSourceModule(sourceModule) (sourceModule, maybeCodeFromCache(cachePath).getOrElse(retrieveCodeAndCache(sourceModule, cachePath))) } private def printProgress(): Unit = { if (tenthSize > 0) { val currentCount = counter.incrementAndGet() if (currentCount % tenthSize == 0) { log.info(s"DependencyAnalyzer:allCodeForModule:\t ${currentCount / tenthSize * 10}% done") } } } private val allCode: ParMap[SourceModule, List[Code]] = sourceModules.par.map(calculateMapEntryFor).toMap override def allCodeForModule(module: SourceModule): List[Code] = allCode(module) } @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, property = "__class") trait TypeAddingMixin
Example 137
Source File: ZincDependencyAnalyzer.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze import java.nio.file.{Path, Paths} import com.wix.bazel.migrator.model.SourceModule import com.wix.build.zinc.analysis.{ZincAnalysisParser, ZincCodePath, ZincModuleAnalysis, ZincSourceModule} import com.wixpress.build.maven.Coordinates class ZincDepednencyAnalyzer(repoPath: Path) extends DependencyAnalyzer { private val modules: Map[Coordinates, List[ZincModuleAnalysis]] = new ZincAnalysisParser(Paths.get(repoPath.toAbsolutePath.toString)).readModules() override def allCodeForModule(module: SourceModule): List[Code] = { val emptyDependencies = module.dependencies.copy(directDependencies = Set(), allDependencies = Set()) // TODO: change type of passed module to not include dependencies!!! val strippedModule = module.copy(dependencies = emptyDependencies) allCodeForStrippedModule(strippedModule) } private def allCodeForStrippedModule(strippedModule: SourceModule) = { modules.getOrElse(strippedModule.coordinates, Nil).map { moduleAnalysis => Code(toCodePath(strippedModule, moduleAnalysis.codePath), toDependencies(moduleAnalysis)) } } private def toCodePath(module: SourceModule, v: ZincCodePath) = { CodePath(module, v.relativeSourceDirPathFromModuleRoot, v.filePath) } private def toDependencies( analysis: ZincModuleAnalysis) = { // TODO: figure out runtime deps!!!!!!! analysis.dependencies.map(d => { Dependency(toCodePath(moduleFrom(d.module), d), isCompileDependency = true) }) } private def moduleFrom(m: ZincSourceModule) = SourceModule(m.moduleName, m.coordinates) }
Example 138
Source File: InternalFileDepsOverridesDependencyAnalyzer.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze import java.nio.file.Path import com.wix.bazel.migrator.model.SourceModule import com.wix.bazel.migrator.overrides.InternalFileDepsOverridesReader import com.wix.build.maven.analysis.SourceModules class InternalFileDepsOverridesDependencyAnalyzer(sourceModules: SourceModules, repoRoot: Path) extends DependencyAnalyzer { private val internalFileDepsOverrides = InternalFileDepsOverridesReader.from(repoRoot) private val compileTimeOverridesAsCode: Map[SourceModule, List[Code]] = internalFileDepsOverrides.compileTimeOverrides.map(compileOverridesToCodes).getOrElse(Map.empty) private val runtimeOverridesAsCode: Map[SourceModule, List[Code]] = internalFileDepsOverrides.runtimeOverrides.map(runtimeOverridesToCodes).getOrElse(Map.empty) private val compileAndRuntimeOverridesAsCode = compileTimeOverridesAsCode.foldLeft(runtimeOverridesAsCode) { case (acc, cur) => acc + (cur._1 -> (acc.getOrElse(cur._1, List.empty) ++ cur._2)) } override def allCodeForModule(module: SourceModule): List[Code] = compileAndRuntimeOverridesAsCode.getOrElse(module, List.empty[Code]) private def runtimeOverridesToCodes(overrides: Map[String, Map[String, List[String]]]) = overridesToCodes(isCompileDependency = false)(overrides) private def compileOverridesToCodes(overrides: Map[String, Map[String, List[String]]]) = overridesToCodes(isCompileDependency = true)(overrides) private def overridesToCodes(isCompileDependency: Boolean)(overrides: Map[String, Map[String, List[String]]]) = overrides.map { case (relativePath, moduleDeps) => moduleForRelativePath(relativePath) -> moduleDeps.map { case (codeInModule, codeDeps) => Code(codePathFrom(codeInRepo(relativePath, codeInModule)), codeDeps.map(dependencyOn(isCompileDependency))) }.toList } private def codeInRepo(relativePath: String, codeInModule: String) = { val modulePrefix = relativePath match { case "" => "" case nonEmpty => nonEmpty + "/" } modulePrefix + codeInModule } private def moduleForRelativePath(relativeModulePath: String) = sourceModules.findByRelativePath(relativeModulePath) .getOrElse(throw new IllegalArgumentException(s"Unknown relative module path $relativeModulePath")) private def codePathFrom(relativeFilePath: String) = { val filePathParts = relativeFilePath.split('/') val indexOfSrc = filePathParts.indexOf("src") CodePath(moduleForRelativePath(filePathParts.slice(0, indexOfSrc).mkString("/")), filePathParts.slice(indexOfSrc, indexOfSrc + 3).mkString("/"), filePathParts.slice(indexOfSrc + 3, filePathParts.length).mkString("/")) } private def dependencyOn(isCompileDependency: Boolean)(relativeFilePath: String): Dependency = Dependency(codePathFrom(relativeFilePath), isCompileDependency) }
Example 139
Source File: Manifest.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.io._ import java.nio.file.{Files, Path} import java.util.jar import com.wix.bazel.migrator.Manifest.Attributes case class Manifest(ImplementationArtifactId: String, ImplementationVersion: String, ImplementationVendorId: String) { @throws[IOException] def write(dir: Path): Path = { val m = new jar.Manifest() val attr = m.getMainAttributes attr.put(jar.Attributes.Name.MANIFEST_VERSION, "1.0") // mandatory attribute pairs foreach (attr.putValue _).tupled val file = manifestFileAt(dir) val os = Files.newOutputStream(file) try { m.write(os) file } finally { os.close() } } private def pairs: Seq[(String, String)] = Seq( Attributes.ImplementationArtifactId -> ImplementationArtifactId, Attributes.ImplementationVersion -> ImplementationVersion, Attributes.ImplementationVendorId -> ImplementationVendorId) @throws[IOException] private def manifestFileAt(dir: Path) = { Files.createDirectories(dir) dir.resolve("MANIFEST.MF") } } private object Manifest { object Attributes { val ImplementationArtifactId = "Implementation-ArtifactId" val ImplementationVersion = "Implementation-Version" val ImplementationVendorId = "Implementation-Vendor-Id" } }
Example 140
Source File: GitIgnoreCleaner.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} import scala.collection.JavaConverters._ class GitIgnoreCleaner(repoRoot: Path, blackListItems: Set[String] = GitIgnoreCleaner.DefaultBlackListItems) { val gitIgnorePath = repoRoot.resolve(".gitignore") def clean() = if (Files.isRegularFile(gitIgnorePath)) { val lines = Files.readAllLines(gitIgnorePath) val modified = removeBlackListItems(lines.asScala) if (lines != modified) Files.write(gitIgnorePath, modified.asJava) } private def removeBlackListItems(lines: Seq[String]): Seq[String] = lines.filterNot(blackListItems) } object GitIgnoreCleaner { val DefaultBlackListItems = Set( "maven" ) }
Example 141
Source File: BazelRcManagedDevEnvWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path, StandardOpenOption} class BazelRcManagedDevEnvWriter(repoRoot: Path, defaultOptions: List[String]) { private val bazelRcManagedDevEnvPath = repoRoot.resolve("tools/bazelrc/.bazelrc.managed.dev.env") def resetFileWithDefaultOptions(): Unit = { deleteIfExists() appendLines(defaultOptions) } def appendLine(line: String): Unit = appendLines(List(line)) def appendLines(lines: List[String]): Unit = writeToDisk(lines.mkString("", System.lineSeparator(), System.lineSeparator())) private def deleteIfExists(): Unit = Files.deleteIfExists(bazelRcManagedDevEnvPath) private def writeToDisk(contents: String): Unit = { Files.createDirectories(bazelRcManagedDevEnvPath.getParent) Files.write(bazelRcManagedDevEnvPath, contents.getBytes, StandardOpenOption.APPEND, StandardOpenOption.CREATE) } } object BazelRcManagedDevEnvWriter { val defaultExodusOptions: List[String] = List( "# fetch", "fetch --experimental_multi_threaded_digest=true", "", "# query", "query --experimental_multi_threaded_digest=true", "", "# test", "test --test_tmpdir=/tmp", "test --test_output=errors", "", "# build", "build:bazel16uplocal --action_env=PLACE_HOLDER=SO_USING_CONFIG_GROUP_WILL_WORK_BW_CMPTBL", "build --strategy=Scalac=worker", "build --strict_java_deps=off", "build --strict_proto_deps=off", "build --experimental_multi_threaded_digest=true", "", "# this flag makes Bazel keep the analysis cache when test flags such as 'test_arg' (and other 'test_xxx' flags) change", "build --trim_test_configuration=true", "", "# the following flags serve tests but associated with the build command in order to avoid mutual analysis cache", "# invalidation between test commands and build commands (see https://github.com/bazelbuild/bazel/issues/7450)", "build --test_env=BUILD_TOOL=BAZEL", "build --test_env=DISPLAY", "build --test_env=LC_ALL=en_US.UTF-8", ) }
Example 142
Source File: ForcedBinaryDependenciesOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.Path import com.wixpress.build.maven.Coordinates object ForcedBinaryDependenciesOverridesReader { //TODO - temp solution implement with json reader def from(repoRoot: Path): ForcedBinaryDependenciesOverrides = ForcedBinaryDependenciesOverrides( // static list that was extracted using buildoscope and build descriptor services Set( Coordinates.deserialize("com.wixpress.proto:members-area-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:communities-blog-proto:1.0.0"), Coordinates.deserialize("com.wixpress.proto:rpc-server-test-app-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:domain-helper-api-proto:1.0.0"), Coordinates.deserialize("com.wixpress.proto:promote-seo-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:promote-campaigns-manager-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:promote-home-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:wix-realtime-server-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:one-app-datalib-codegen-testidl:1.0.0"), Coordinates.deserialize("com.wixpress.proto:shoutout-email-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:wix-captcharator-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:app-settings-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:experts-beta-server-api:1.0.0"), Coordinates.deserialize("com.wixpress.proto:experts-server-api:1.0.0") ) ) }
Example 143
Source File: SourceModulesOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.wix.build.maven.analysis.SourceModulesOverrides object SourceModulesOverridesReader { private val mapper = new ObjectMapper().registerModule(DefaultScalaModule) def from(repoRoot: Path): SourceModulesOverrides = { val overridesPath = repoRoot.resolve("bazel_migration").resolve("source_modules.overrides") if (Files.exists(overridesPath)) mapper.readValue( Files.newBufferedReader(overridesPath), classOf[SourceModulesOverrides] ) else SourceModulesOverrides.empty } }
Example 144
Source File: GeneratedCodeOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule object GeneratedCodeOverridesReader { private val mapper = new ObjectMapper() .registerModule(DefaultScalaModule) def from(repoRoot: Path): GeneratedCodeLinksOverrides = { val overridesPath = repoRoot.resolve("bazel_migration").resolve("code_paths.overrides") if (Files.exists(overridesPath)) mapper.readValue( Files.newBufferedReader(overridesPath), classOf[GeneratedCodeLinksOverrides] ) else GeneratedCodeLinksOverrides.empty } }
Example 145
Source File: MavenArchiveTargetsOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule object MavenArchiveTargetsOverridesReader { def from(repoRoot: Path): MavenArchiveTargetsOverrides = { val overridesPath = repoRoot.resolve("bazel_migration").resolve("maven_archive_targets.overrides") if (Files.exists(overridesPath)) { val objectMapper = new ObjectMapper().registerModule(DefaultScalaModule) objectMapper.readValue(Files.readAllBytes(overridesPath), classOf[MavenArchiveTargetsOverrides]) } else { MavenArchiveTargetsOverrides(Set.empty) } } }
Example 146
Source File: InternalTargetOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.wix.bazel.migrator.model.TestType import com.wix.bazel.migrator.utils.TypeAddingMixin object InternalTargetOverridesReader { private val objectMapper = new ObjectMapper() .registerModule(DefaultScalaModule) .addMixIn(classOf[TestType], classOf[TypeAddingMixin]) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) def from(repoRootPath: Path): InternalTargetsOverrides = { val internalTargetsOverrides = repoRootPath.resolve("bazel_migration").resolve("internal_targets.overrides") if (Files.isReadable(internalTargetsOverrides)) { objectMapper.readValue(Files.newInputStream(internalTargetsOverrides), classOf[InternalTargetsOverrides]) } else { InternalTargetsOverrides() } } }
Example 147
Source File: WorkspaceOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} object WorkspaceOverridesReader { def from(repoRoot: Path): WorkspaceOverrides = { val overridesPath = repoRoot.resolve("bazel_migration").resolve("workspace.suffix.overrides") if (Files.exists(overridesPath)) WorkspaceOverrides(readPath(overridesPath)) else WorkspaceOverrides("") } private def readPath(path: Path) = new String(Files.readAllBytes(path)) } case class WorkspaceOverrides(suffix: String)
Example 148
Source File: AdditionalDepsByMavenDepsOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import scala.util.{Failure, Success, Try} object AdditionalDepsByMavenDepsOverridesReader { private val mapper = new ObjectMapper() .registerModule(DefaultScalaModule) def from(filepath: Path): AdditionalDepsByMavenDepsOverrides = { if (Files.exists(filepath)) readContentIn(filepath) else AdditionalDepsByMavenDepsOverrides.empty } private def readContentIn(filepath: Path) = { Try(mapper.readValue( Files.newBufferedReader(filepath), classOf[AdditionalDepsByMavenDepsOverrides] )) match { case Success(overrides) => overrides case Failure(e) => throw OverrideParsingException(s"cannot parse $filepath", e) } } }
Example 149
Source File: InternalFileDepsOverridesReader.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.{Files, Path} import com.fasterxml.jackson.databind._ import com.fasterxml.jackson.module.scala.DefaultScalaModule object InternalFileDepsOverridesReader { private val mapper = new ObjectMapper().registerModule(DefaultScalaModule) def from(repoRoot: Path): InternalFileDepsOverrides = { val overridesPath = repoRoot.resolve("bazel_migration").resolve("internal_file_deps.overrides") if (Files.exists(overridesPath)) mapper.readValue( Files.newBufferedReader(overridesPath), classOf[InternalFileDepsOverrides] ) else InternalFileDepsOverrides.empty } }
Example 150
Source File: PreludeWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} import com.wix.bazel.migrator.PreludeWriter._ class PreludeWriter(repoRoot: Path, preludeContent: Seq[String]) { def write(): Unit = { val path = repoRoot.resolve("tools/build_rules/") Files.createDirectories(path) writeEmptyBuildFile(path) writePrelude(path) } private def writePrelude(dest: Path): Unit = { writeToDisk(dest, "prelude_bazel", preludeContent.mkString(System.lineSeparator)) } private def writeEmptyBuildFile(dest: Path): Unit = writeToDisk(dest, "BUILD.bazel", "") private def writeToDisk(dest: Path, filename: String, content: String): Unit = Files.write(dest.resolve(filename), content.getBytes) } object PreludeWriter { val ScalaLibraryImport = """|load( | "@io_bazel_rules_scala//scala:scala.bzl", | "scala_binary", | "scala_library", | "scala_test", | "scala_macro_library", | "scala_specs2_junit_test", |) """.stripMargin val ScalaImport = """load("@io_bazel_rules_scala//scala:scala_import.bzl", "scala_import",)""" val JavaTestImport = """load("@rules_jvm_test_discovery//:java_test_discovery.bzl", "java_test_discovery")""" val TestImport = """load("//:tests.bzl", "specs2_unit_test", "specs2_ite2e_test", "specs2_mixed_test")""" val Junit5Import = """load("//:junit5.bzl", "java_junit5_test")""" val SourcesImport = """load("//:macros.bzl", "sources")""" }
Example 151
Source File: TemplateOfThirdPartyDepsSkylarkFileWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} class TemplateOfThirdPartyDepsSkylarkFileWriter(repoRoot: Path, mavenArchiveMacroPath: String) { def write(): Unit = { val thirdPartyDepsSkylarkFileContents = s""" |load("$mavenArchiveMacroPath", "maven_archive", "maven_proto") | |def third_party_dependencies(): """.stripMargin writeToDisk(thirdPartyDepsSkylarkFileContents) createBuildFileIfMissing() } private def writeToDisk(thirdPartyDepsSkylarkFileContents: String): Unit = Files.write(repoRoot.resolve("third_party.bzl"), thirdPartyDepsSkylarkFileContents.getBytes) private def createBuildFileIfMissing(): Unit = { val buildFilePath = repoRoot.resolve("BUILD.bazel") if (!Files.exists(buildFilePath)) Files.createFile(buildFilePath) } }
Example 152
Source File: BazelRcRemoteSettingsWriter.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} class BazelRcRemoteSettingsWriter(repoRoot: Path) { def write(): Unit = { val contents = """# NOTE - THIS FILE IS MANUALLY DUPLICATED INSIDE WAZEL CONTAINER (see BazelRcRemoteSettingsWriter.writeToDisk for explanation) | |# Remote Build Execution requires a strong hash function, such as SHA256. |startup --host_jvm_args=-Dbazel.DigestFunction=SHA256 | |# Set several flags related to specifying the toolchain and java properties. |# These flags are duplicated rather than imported from (for example) |# %workspace%/configs/debian8_clang/0.2.0/toolchain.bazelrc to make this |# bazelrc a standalone file that can be copied more easily. |build:rbe_based --host_javabase=@core_server_build_tools//rbe-toolchains/jdk:jdk8 |build:rbe_based --javabase=@core_server_build_tools//rbe-toolchains/jdk:jdk8 |build --crosstool_top=@core_server_build_tools//toolchains:crosstool_top |build --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 |build --extra_toolchains=@core_server_build_tools//toolchains:extra_toolchains |build --host_platform=@core_server_build_tools//rbe-toolchains/jdk:rbe_ubuntu1604 |build --platforms=@core_server_build_tools//rbe-toolchains/jdk:rbe_ubuntu1604 |build:rbe_based --action_env=PLACE_HOLDER=SO_USING_CONFIG_GROUP_WILL_WORK_BW_CMPTBL | |# Enable encryption. |build --tls_enabled=true | |# Enforce stricter environment rules, which eliminates some non-hermetic |# behavior and therefore improves both the remote cache hit rate and the |# correctness and repeatability of the build. |build --experimental_strict_action_env=true | |# Set a higher timeout value, just in case. |build --remote_timeout=3600 | |# Enable authentication. This will pick up application default credentials by |# default. You can use --auth_credentials=some_file.json to use a service |# account credential instead. |build --auth_enabled=true | |#The following environment variable is used by bazel integration e2e tests which need to know if we're using the |#`remote` configuration and so add custom toolchains which means the tests need to add them as well |test --test_env=REMOTE="true" | |test --test_env=CC | |build:rbe_based --extra_execution_platforms=@core_server_build_tools//platforms:rbe_small,@core_server_build_tools//platforms:rbe_large,@core_server_build_tools//platforms:rbe_default |test:rbe_based --extra_execution_platforms=@core_server_build_tools//platforms:rbe_small,@core_server_build_tools//platforms:rbe_large,@core_server_build_tools//platforms:rbe_default | """.stripMargin writeToDisk(contents) } // currently this file is duplicated between the global location (generated by the migrator) and between wazel container. // This is because docker cannot ADD files if they're not in the build context (symlinks included) // The global file is currently used for the jenkins rbe step AND gcb container (which runs rbe) // plan for removing this duplication - once we move to building all our images with docker-rules, // move .bazelrc.remotesettings to be a resource for both the gcb-bazel-step container AND for wazel container // (NOTE - if jenkins is still alive when this happens, it should also be added to the jenkins execution image) private def writeToDisk(contents: String): Unit = Files.write(repoRoot.resolve("tools/bazelrc/.bazelrc.remotesettings"), contents.getBytes) }
Example 153
Source File: SourceModules.scala From exodus with MIT License | 5 votes |
package com.wix.build.maven.analysis import java.nio.file.Path import com.wix.bazel.migrator.model.SourceModule import com.wix.bazel.migrator.overrides.SourceModulesOverridesReader import com.wixpress.build.maven.AetherMavenDependencyResolver case class SourceModules(codeModules: Set[SourceModule]) { def findByRelativePath(relativePath: String): Option[SourceModule] = codeModules.find(_.relativePathFromMonoRepoRoot == relativePath) } object SourceModules { def apply(repoRoot: Path, dependencyResolver: AetherMavenDependencyResolver) = new SourceModules( new MavenBuildSystem(repoRoot, SourceModulesOverridesReader.from(repoRoot), dependencyResolver) .modules() ) def of(repoRoot: Path, dependencyResolver: AetherMavenDependencyResolver) = apply(repoRoot, dependencyResolver) }
Example 154
Source File: MavenCoordinatesListReaderIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.utils import java.nio.charset.StandardCharsets import java.nio.file.{Files, NoSuchFileException, Path} import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import com.wixpress.build.maven.MavenCoordinatesListReader import com.wixpress.build.maven.MavenMakers.someCoordinates import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope //noinspection TypeAnnotation class MavenCoordinatesListReaderIT extends SpecificationWithJUnit{ "MavenCoordinatesListReader" should { "read file with coordinates" in new Ctx{ val coordinatesA = someCoordinates("a") val coordinatesB = someCoordinates("b") val fileContent = s"""${coordinatesA.serialized} |${coordinatesB.serialized}""".stripMargin val filePath:Path = fileWithContent(fileContent) MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB) } "ignore empty line" in new Ctx{ val coordinatesA = someCoordinates("a") val coordinatesB = someCoordinates("b") val fileContent = s"""${coordinatesA.serialized} | |${coordinatesB.serialized}""".stripMargin val filePath:Path = fileWithContent(fileContent) MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA,coordinatesB) } "ignore preceding and trailing spaces" in new Ctx{ val coordinatesA = someCoordinates("a") val coordinatesB = someCoordinates("b") val fileContent = s" ${coordinatesA.serialized} " val filePath:Path = fileWithContent(fileContent) MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA) } "ignore lines that starts with #" in new Ctx{ val coordinatesA = someCoordinates("a") val coordinatesB = someCoordinates("b") val fileContent = s"""${coordinatesA.serialized} |#${coordinatesB.serialized}""".stripMargin val filePath:Path = fileWithContent(fileContent) MavenCoordinatesListReader.coordinatesIn(filePath) mustEqual Set(coordinatesA) } "throw exception in case file is missing" in new Ctx{ MavenCoordinatesListReader.coordinatesIn(fs.getPath("non-existing-file")) must throwA[NoSuchFileException] } } trait Ctx extends Scope{ val fs = MemoryFileSystemBuilder.newLinux().build() def fileWithContent(content:String):Path = { val path = Files.createTempFile(fs.getPath("/"),"",".txt") Files.write(path, content.getBytes(StandardCharsets.UTF_8)) } } }
Example 155
Source File: InternalTargetOverridesReaderIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.Path import com.fasterxml.jackson.core.JsonProcessingException import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope class InternalTargetOverridesReaderIT extends SpecificationWithJUnit { "read" should { "throw parse exception given invalid overrides json string" in new Context { writeOverrides("invl:") InternalTargetOverridesReader.from(repoRoot) must throwA[JsonProcessingException] } "default to no overrides when trying to read an non existent overrides file" in new Context { InternalTargetOverridesReader.from(repoRoot).targetOverrides must beEmpty } "read empty overrides" in new Context { val label = "//some/path/to/target:target" writeOverrides( s"""|{ | "targetOverrides" : [ { | "label" : "$label" | } ] |}""".stripMargin ) InternalTargetOverridesReader.from(repoRoot) must beEqualTo( InternalTargetsOverrides(Set(InternalTargetOverride(label))) ) } "read docker image dep from manual json" in new Context { val label = "//some/path/to/target:target" val dockerImage = "docker-repo/docker-image:t.a.g" writeOverrides( s"""{ | "targetOverrides" : [ { | "label" : "$label", | "dockerImagesDeps" : [ "$dockerImage" ] | } ] |}""".stripMargin) InternalTargetOverridesReader.from(repoRoot) must beEqualTo( InternalTargetsOverrides(Set(InternalTargetOverride(label, dockerImagesDeps = Option(List(dockerImage)))))) } "read block network from manual json" in new Context { val label = "//some/path/to/target:target" val blockNetwork = false writeOverrides( s"""{ | "targetOverrides" : [ { | "label" : "$label", | "blockNetwork" : $blockNetwork | } ] |}""".stripMargin) InternalTargetOverridesReader.from(repoRoot) must beEqualTo( InternalTargetsOverrides(Set(InternalTargetOverride(label, blockNetwork = Some(blockNetwork))))) } } abstract class Context extends Scope with OverridesReaderITSupport { override val overridesPath: Path = setupOverridesPath(repoRoot, "internal_targets.overrides") } }
Example 156
Source File: AdditionalDepsByMavenDepsOverridesReaderIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.Path import org.specs2.matcher.Matcher import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope //noinspection TypeAnnotation class AdditionalDepsByMavenDepsOverridesReaderIT extends SpecificationWithJUnit { "read" should { "throw parse exception given invalid overrides json string" in new Context { writeOverrides("invl:") AdditionalDepsByMavenDepsOverridesReader.from(overridesPath) must throwA[OverrideParsingException] } "read overrides from manual json" in new Context { writeOverrides( s"""{ "overrides" : [{ | "groupId" : "$groupId", | "artifactId" : "$artifactId", | "additionalDeps": { | "deps" : ["$dependency"], | "runtimeDeps" : ["$runtimeDependency"] | } | } |]}""".stripMargin) private val expectedOverride = AdditionalDepsByMavenDepsOverride( groupId, artifactId, AdditionalDeps( deps = Set(dependency), runtimeDeps = Set(runtimeDependency))) AdditionalDepsByMavenDepsOverridesReader.from(overridesPath) must containExactly(expectedOverride) } "read overrides from manual json with only runtime deps" in new Context { writeOverrides( s"""{ "overrides" : [{ | "groupId" : "$groupId", | "artifactId" : "$artifactId", | "additionalDeps": { | "runtimeDeps" : ["$runtimeDependency"] | } | } |]}""".stripMargin) private val expectedOverride = AdditionalDepsByMavenDepsOverride( groupId, artifactId, AdditionalDeps( deps = Set.empty, runtimeDeps = Set(runtimeDependency))) AdditionalDepsByMavenDepsOverridesReader.from(overridesPath) must containExactly(expectedOverride) }.pendingUntilFixed("currently it reads 'deps' field as null, must specify empty array") "read overrides from generated json" in new Context { val overrides = AdditionalDepsByMavenDepsOverrides(List(AdditionalDepsByMavenDepsOverride( groupId, artifactId, AdditionalDeps( deps = Set(dependency), runtimeDeps = Set(runtimeDependency))))) writeOverrides(objectMapper.writeValueAsString(overrides)) AdditionalDepsByMavenDepsOverridesReader.from(overridesPath) must beEqualTo(overrides) } "default to no overrides when trying to read an non existent overrides file" in new Context { AdditionalDepsByMavenDepsOverridesReader.from(overridesPath) must haveNoOverrides } } abstract class Context extends Scope with OverridesReaderITSupport { val groupId = "some.group" val artifactId = "some-artifact" val dependency = "//some:dependency" val runtimeDependency = "//some/runtime:dependency" override val overridesPath: Path = setupOverridesPath(repoRoot, "additional_deps_by_maven.overrides") } def containExactly(expectedOverride:AdditionalDepsByMavenDepsOverride): Matcher[AdditionalDepsByMavenDepsOverrides] = {(_:AdditionalDepsByMavenDepsOverrides).overrides} ^^ contain(exactly(expectedOverride)) def haveNoOverrides: Matcher[AdditionalDepsByMavenDepsOverrides] = {(_:AdditionalDepsByMavenDepsOverrides).overrides} ^^ beEmpty }
Example 157
Source File: GeneratedCodeLinksOverridesReaderIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.Path import com.fasterxml.jackson.core.JsonProcessingException import com.wix.bazel.migrator import com.wixpress.build.maven.{Coordinates, MavenMakers} import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope //noinspection TypeAnnotation class GeneratedCodeLinksOverridesReaderIT extends SpecificationWithJUnit { "read" should { "throw parse exception given invalid overrides json string" in new Context { writeOverrides("invl:") GeneratedCodeOverridesReader.from(repoRoot) must throwA[JsonProcessingException] } "read overrides from manual json" in new Context { val generatedFile = "com/wixpress/Foo.scala" val sourceFile = "com/wixpress/foo.proto" writeOverrides( s"""{ | "links" : [ { | "groupId" : "${module.groupId}", | "artifactId" : "${module.artifactId}", | "generatedFile" : "$generatedFile", | "sourceFile" : "$sourceFile" | } ] |}""".stripMargin) GeneratedCodeOverridesReader.from(repoRoot) must beEqualTo(GeneratedCodeLinksOverrides(Seq( GeneratedCodeLink(module.groupId, module.artifactId, generatedFile, sourceFile)))) } "read overrides from generated json" in new Context { val overrides = multipleOverrides writeOverrides(objectMapper.writeValueAsString(overrides)) GeneratedCodeOverridesReader.from(repoRoot) must beEqualTo(overrides) } "default to no overrides when trying to read an non existent overrides file" in new Context { GeneratedCodeOverridesReader.from(repoRoot).links must beEmpty } } abstract class Context extends Scope with OverridesReaderITSupport { val module: Coordinates = MavenMakers.someCoordinates("some-module") override val overridesPath: Path = setupOverridesPath(repoRoot, "code_paths.overrides") def multipleOverrides: GeneratedCodeLinksOverrides = { val overrides = (1 to 20).map { index => GeneratedCodeLink( groupId = module.groupId, artifactId = module.artifactId, generatedFile = s"com/wixpress/Foo$index.scala", sourceFile = s"com/wixpress/foo$index.proto" ) } migrator.overrides.GeneratedCodeLinksOverrides(overrides) } } }
Example 158
Source File: MavenArchiveTargetsOverridesReaderIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.overrides import java.nio.file.Path import com.fasterxml.jackson.core.JsonProcessingException import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import com.wixpress.build.bazel.OverrideCoordinates import org.specs2.mutable.SpecificationWithJUnit import org.specs2.specification.Scope class MavenArchiveTargetsOverridesReaderIT extends SpecificationWithJUnit { "MavenArchiveTargetsOverridesReader" should { "return empty set in case override file does not exists" in { lazy val fileSystem = MemoryFileSystemBuilder.newLinux().build() val repoRoot: Path = fileSystem.getPath("repoRoot") MavenArchiveTargetsOverridesReader.from(repoRoot) mustEqual MavenArchiveTargetsOverrides(Set.empty) } "throw exception in case of invalid json" in new ctx { val overridesPath = setupOverridesPath(repoRoot, "maven_archive_targets.overrides") writeOverrides("blabla") MavenArchiveTargetsOverridesReader.from(repoRoot) must throwA[JsonProcessingException] } "return empty set in case of empty array in the json" in new ctx { val overridesPath = setupOverridesPath(repoRoot, "maven_archive_targets.overrides") val json = s"""|{ | "unpackedOverridesToArchive" : [] |}""".stripMargin writeOverrides(json) MavenArchiveTargetsOverridesReader.from(repoRoot) mustEqual MavenArchiveTargetsOverrides(Set.empty) } "return set of maven archive coordinates to override" in new ctx { val overridesPath = setupOverridesPath(repoRoot, "maven_archive_targets.overrides") val json = s"""{ | "unpackedOverridesToArchive": [ | { | "groupId": "some-group", | "artifactId": "some-artifact-id" | }, | { | "groupId": "another-group", | "artifactId": "another-artifact-id" | } | ] |}""".stripMargin writeOverrides(json) MavenArchiveTargetsOverridesReader.from(repoRoot) mustEqual MavenArchiveTargetsOverrides(Set( OverrideCoordinates("some-group", "some-artifact-id"), OverrideCoordinates("another-group", "another-artifact-id"))) } } trait ctx extends Scope with OverridesReaderITSupport }
Example 159
Source File: BazelRcWriterIT.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.Path import org.specs2.matcher.Matcher class BazelRcWriterIT extends BaseWriterIT { "BazelRcWriter" should { "create file with the expected content" in new ctx { bazelRcWriter.write() bazelRcPath must beRegularFile(withContentMatching = ===(expectedBazelRcContent)) } } trait ctx extends baseCtx { final val bazelRcPath: Path = path(withName = ".bazelrc") final val bazelRcWriter: BazelRcWriter = new BazelRcWriter(repoRoot) final val expectedBazelRcContent = """# |# DO NOT EDIT - this line imports shared managed bazel configuration |# |import %workspace%/tools/bazelrc/.bazelrc.managed.dev.env | |# |# ADDITIONS ONLY UNDER THIS LINE |# | """.stripMargin } def contentContainsLine(line: String): Matcher[String] = contentContainsLines(List(line)) def contentContainsLines(lines: List[String]): Matcher[String] = {(_:String).split(System.lineSeparator()).toList} ^^ containAllOf(lines) def contentContainsExactlyLines(lines: List[String]): Matcher[String] = {(_:String).split(System.lineSeparator()).toList} ^^ containTheSameElementsAs(lines) }
Example 160
Source File: DockerImagesWriterTest.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.{Files, Path} import better.files.File import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import com.wix.bazel.migrator.overrides.{InternalTargetOverride, InternalTargetsOverrides} import org.specs2.matcher.{Matcher, Scope} import org.specs2.mutable.SpecificationWithJUnit class DockerImagesWriterTest extends SpecificationWithJUnit { abstract class ctx extends Scope{ def containExactlyOnce(substr: String): Matcher[String] = { {a:String => a.indexOf(substr) must not be_== -1} and {a:String => a.indexOf(substr) must beEqualTo(a.lastIndexOf(substr))} } val rootfs: Path = MemoryFileSystemBuilder.newLinux().build().getPath("repo-root") val overrideWithDockerImages = InternalTargetOverride("some-label", dockerImagesDeps = Option(List("mysql:5.7", "docker-repo.wixpress.com/com.wixpress.whatever/whatever:1.234.5"))) def overrides: Set[InternalTargetOverride] def writer = new DockerImagesWriter(rootfs, InternalTargetsOverrides(overrides)) } "DockerImagesWriter" should { "create docker_images.bzl in third_party/docker_images" in new ctx { def overrides: Set[InternalTargetOverride] = Set.empty writer.write() Files.exists(rootfs.resolve("third_party/docker_images/docker_images.bzl")) should beTrue } "create BUILD.bazel file in third_party/docker_images" in new ctx { def overrides: Set[InternalTargetOverride] = Set.empty writer.write() Files.exists(rootfs.resolve("third_party/docker_images/BUILD.bazel")) should beTrue } "fill default values in container_pull for short-form image" in new ctx { def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages) writer.write() val expected: String = s"""| container_pull( | name = "mysql_5.7", | registry = "index.docker.io", | repository = "library/mysql", | tag = "5.7" | )""".stripMargin File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString must contain(expected) } "write values as-is in container_pull for full form image" in new ctx { def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages) writer.write() val expected: String = s"""| container_pull( | name = "com.wixpress.whatever_whatever_1.234.5", | registry = "docker-repo.wixpress.com", | repository = "com.wixpress.whatever/whatever", | tag = "1.234.5" | )""".stripMargin File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString must contain(expected) } "write container_image in BUILD file" in new ctx { def overrides: Set[InternalTargetOverride] = Set(overrideWithDockerImages) writer.write() val expected: String = s"""container_image(name="com.wixpress.whatever_whatever_1.234.5", base="@com.wixpress.whatever_whatever_1.234.5//image")""".stripMargin File(rootfs.resolve("third_party/docker_images/BUILD.bazel")).contentAsString must contain(expected) } "deduplicate images in BUILD file" in new ctx { def overrides = Set(overrideWithDockerImages, overrideWithDockerImages.copy(label = "duplicate")) writer.write() private val fileContent: String = File(rootfs.resolve("third_party/docker_images/BUILD.bazel")).contentAsString fileContent must containExactlyOnce("container_image(name=\"mysql_5.7\",") } "deduplicate images in docker_images.bzl file" in new ctx { def overrides = Set(overrideWithDockerImages, overrideWithDockerImages.copy(label = "duplicate")) writer.write() private val fileContent: String = File(rootfs.resolve("third_party/docker_images/docker_images.bzl")).contentAsString fileContent must containExactlyOnce("name = \"mysql_5.7\",") } } }
Example 161
Source File: JavaPSourceFileTracerTest.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze.jdk import java.nio.file.{FileSystem, Files, Path} import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import com.wix.bazel.migrator.model.SourceModule import com.wix.bazel.migrator.model.makers.ModuleMaker._ import org.specs2.matcher.Scope import org.specs2.mock.Mockito import org.specs2.mutable.SpecificationWithJUnit import com.wix.bazel.migrator.analyze.CodePath class JavaPSourceFileTracerTest extends SpecificationWithJUnit with Mockito { "JavaPSourceFileTracerTest" should { "return the location of source file given it exists on filesystem" in new ctx{ override def relativeSourcePath: String = "src/main/java" private val file: Path = fullPathToSourceFile Files.createDirectories(file.getParent) Files.createFile(file) processRunner.run(repoRoot,"javap",List("-cp",pathToClasses,fqn)) returns RunResult( exitCode = 0, stdOut = s"""Compiled from "${className}.$fileType" |dontcare |dontcare |""".stripMargin, stdErr = "" ) val res = tracer.traceSourceFile(module,fqn = fqn,pathToClasses = pathToClasses, testClass = false) res mustEqual CodePath(module,relativeSourcePath,filePath) } } trait ctx extends Scope{ val fileSystem: FileSystem = MemoryFileSystemBuilder.newLinux().build() val repoRoot: Path = fileSystem.getPath("/") val moduleName = "foo" val module: SourceModule = aModule(moduleName) def relativeSourcePath:String val javaPackage = "com.wix.example" val className = "Example" val fileType = "java" val filePath = javaPackage.replace('.','/') + s"/$className.$fileType" def fullPathToSourceFile: Path = repoRoot.resolve(module.relativePathFromMonoRepoRoot).resolve(relativeSourcePath).resolve(filePath) val processRunner: ProcessRunner = mock[ProcessRunner] val tracer = new JavaPSourceFileTracer(repoRoot,processRunner,fileSystem) val pathToClasses: String = moduleName + "target/classes" val fqn = s"$javaPackage.$className" } }
Example 162
Source File: MavenStandardModulePathsResolverTest.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator.analyze.jdk import java.nio.file.{FileSystem, Files, Path} import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import com.wix.bazel.migrator.model.SourceModule import com.wix.bazel.migrator.model.makers.ModuleMaker.aModule import org.specs2.matcher.Scope import org.specs2.mutable.SpecificationWithJUnit class MavenStandardModulePathsResolverTest extends SpecificationWithJUnit { "MavenStandardModulePathsResolver" >> { "in case asked for classes modules should" in { "return relative path to <relative-module-dir>/target/classes in case it exists" in new ctx { val standardPathToClasses: Path = pathToModuleTargetDir.resolve("classes") Files.createDirectories(standardPathToClasses) pathsResolver.resolveClassesPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + "/target/classes") } "return None in case <relative-module-dir>/target/classes does not exist" in new ctx { pathsResolver.resolveClassesPath(interestingModule) must beNone } } "in case asked for test-classes for modules should" in { "return relative path to <relative-module-dir>/target/test-classes in case it exists" in new ctx { val standardPathToTestClasses: Path = pathToModuleTargetDir.resolve("test-classes") Files.createDirectories(standardPathToTestClasses) pathsResolver.resolveTestClassesPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + "/target/test-classes") } "return None in case <relative-module-dir>/target/test-classes does not exist" in new ctx { pathsResolver.resolveTestClassesPath(interestingModule) must beNone } } "in case asked for jar-path for modules should" in { "return relative path to <relative-module-dir>/target/<artifactId>-<version>.jar in case it exists" in new ctx { val jarName = s"${interestingModule.coordinates.artifactId}-${interestingModule.coordinates.version}.jar" val standardPathToClasses: Path = pathToModuleTargetDir.resolve(jarName) Files.createDirectories(standardPathToClasses) pathsResolver.resolveJarPath(interestingModule) must beSome(interestingModule.relativePathFromMonoRepoRoot + s"/target/$jarName") } "return None in case <relative-module-dir>/target/classes does not exist" in new ctx { pathsResolver.resolveJarPath(interestingModule) must beNone } } } trait ctx extends Scope { val fileSystem: FileSystem = MemoryFileSystemBuilder.newLinux().build() val repoRoot: Path = fileSystem.getPath("/") val moduleName = "interesting-module" val interestingModule: SourceModule = aModule(moduleName).copy(relativePathFromMonoRepoRoot = moduleName) val pathToModule: Path = Files.createDirectories(repoRoot.resolve(interestingModule.relativePathFromMonoRepoRoot)) val pathsResolver: ModulePathsResolver = new MavenStandardModulesPathsResolver(repoRoot,fileSystem) val pathToModuleTargetDir: Path = pathToModule.resolve("target") } }
Example 163
Source File: SourceDirPathAndRelativePackagePathTest.scala From exodus with MIT License | 5 votes |
package com.wix.bazel.migrator import java.nio.file.Path import com.github.marschall.memoryfilesystem.MemoryFileSystemBuilder import org.specs2.matcher.Scope import org.specs2.mutable.SpecificationWithJUnit //noinspection TypeAnnotation class SourceDirPathAndRelativePackagePathTest extends SpecificationWithJUnit{ "SourceDirPathAndRelativePackagePath" should { "serialize sources for target without cycles" in new ctx { val somePackage = "path/to/library" val sourceDirPath = rootPath.resolve(somePackage) SourceDirPathAndRelativePackagePath(sourceDirPath, somePackage).sourcesTarget mustEqual """ |sources() |""".stripMargin } "serialize sources for target with cycles" in new ctx { val somePackage = "path/to/library" val sourceDirPath = rootPath.resolve(somePackage).resolve("subPackage") SourceDirPathAndRelativePackagePath(sourceDirPath, somePackage).sourcesTarget mustEqual s""" |sources( | visibility = ["//$somePackage:__pkg__"] |) |""".stripMargin } "return the path to sources BUILD file" in new ctx { val somePackage = "path/to/library" val sourceDirPath = rootPath.resolve(somePackage) SourceDirPathAndRelativePackagePath(sourceDirPath, somePackage).sourceDirBuildPath mustEqual sourceDirPath.resolve("BUILD.bazel") } } trait ctx extends Scope{ private val fs = MemoryFileSystemBuilder.newLinux().build() val rootPath: Path = fs.getPath("/") } }
Example 164
Source File: Chapter10.scala From Learning-Spark-SQL with MIT License | 5 votes |
//Code for Chapter 10 to be executed in Spark shell. For all other code from the BigDL library follow the instructions and commands in the book. //Note that the code in this Chapter uses Spark 2.1 due to some bugs. //Execute the following on the command prompt to start the Spark shell source /Users/aurobindosarkar/Downloads/BigDL-master/scripts/bigdl.sh Aurobindos-MacBook-Pro-2:spark-2.1.0-bin-hadoop2.7 aurobindosarkar$ bin/spark-shell --properties-file /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/conf/spark-bigdl.conf --jars /Users/aurobindosarkar/Downloads/BigDL-master/spark/dist/target/bigdl-0.2.0-SNAPSHOT-spark-2.0.0-scala-2.11.8-mac-dist/lib/bigdl-0.2.0-SNAPSHOT-jar-with-dependencies.jar import com.intel.analytics.bigdl._ import com.intel.analytics.bigdl.dataset.DataSet import com.intel.analytics.bigdl.dataset.image.{BytesToGreyImg, GreyImgNormalizer, GreyImgToBatch, GreyImgToSample} import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Module} import com.intel.analytics.bigdl.numeric.NumericFloat import com.intel.analytics.bigdl.optim._ import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, T, Table} import com.intel.analytics.bigdl.nn._ import java.nio.ByteBuffer import java.nio.file.{Files, Path, Paths} import com.intel.analytics.bigdl.dataset.ByteRecord import com.intel.analytics.bigdl.utils.File val trainData = "/Users/aurobindosarkar/Downloads/mnist/train-images-idx3-ubyte" val trainLabel = "/Users/aurobindosarkar/Downloads/mnist/train-labels-idx1-ubyte" val validationData = "/Users/aurobindosarkar/Downloads/mnist/t10k-images-idx3-ubyte" val validationLabel = "/Users/aurobindosarkar/Downloads/mnist/t10k-labels-idx1-ubyte" val nodeNumber = 1 val coreNumber = 2 Engine.init val model = Sequential[Float]() val classNum = 10 val batchSize = 12 model.add(Reshape(Array(1, 28, 28))).add(SpatialConvolution(1, 6, 5, 5)).add(Tanh()).add(SpatialMaxPooling(2, 2, 2, 2)).add(Tanh()).add(SpatialConvolution(6, 12, 5, 5)).add(SpatialMaxPooling(2, 2, 2, 2)).add(Reshape(Array(12 * 4 * 4))).add(Linear(12 * 4 * 4, 100)).add(Tanh()).add(Linear(100, classNum)).add(LogSoftMax()) def load(featureFile: String, labelFile: String): Array[ByteRecord] = { val featureBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(featureFile))) val labelBuffer = ByteBuffer.wrap(Files.readAllBytes(Paths.get(labelFile))); val labelMagicNumber = labelBuffer.getInt(); require(labelMagicNumber == 2049); val featureMagicNumber = featureBuffer.getInt(); require(featureMagicNumber == 2051); val labelCount = labelBuffer.getInt(); val featureCount = featureBuffer.getInt(); require(labelCount == featureCount); val rowNum = featureBuffer.getInt(); val colNum = featureBuffer.getInt(); val result = new Array[ByteRecord](featureCount); var i = 0; while (i < featureCount) { val img = new Array[Byte]((rowNum * colNum)); var y = 0; while (y < rowNum) { var x = 0; while (x < colNum) { img(x + y * colNum) = featureBuffer.get(); x += 1; } y += 1; } result(i) = ByteRecord(img, labelBuffer.get().toFloat + 1.0f); i += 1; } result; } val trainMean = 0.13066047740239506 val trainStd = 0.3081078 val trainSet = DataSet.array(load(trainData, trainLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(trainMean, trainStd) -> GreyImgToBatch(batchSize) val optimizer = Optimizer(model = model, dataset = trainSet, criterion = ClassNLLCriterion[Float]()) val testMean = 0.13251460696903547 val testStd = 0.31048024 val maxEpoch = 2 val validationSet = DataSet.array(load(validationData, validationLabel), sc) -> BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToBatch(batchSize) optimizer.setEndWhen(Trigger.maxEpoch(2)) optimizer.setState(T("learningRate" -> 0.05, "learningRateDecay" -> 0.0)) optimizer.setCheckpoint("/Users/aurobindosarkar/Downloads/mnist/checkpoint", Trigger.severalIteration(500)) optimizer.setValidation(trigger = Trigger.everyEpoch, dataset = validationSet, vMethods = Array(new Top1Accuracy, new Top5Accuracy[Float], new Loss[Float])) optimizer.optimize() model.save("/Users/aurobindosarkar/Downloads/mnist/model") val model = Module.load[Float]("/Users/aurobindosarkar/Downloads/mnist/model") val partitionNum = 2 val rddData = sc.parallelize(load(validationData, validationLabel), partitionNum) val transformer = BytesToGreyImg(28, 28) -> GreyImgNormalizer(testMean, testStd) -> GreyImgToSample() val evaluationSet = transformer(rddData) val result = model.evaluate(evaluationSet, Array(new Top1Accuracy[Float]), Some(batchSize)) result.foreach(r => println(s"${r._2} is ${r._1}"))
Example 165
Source File: zip.scala From watr-works with Apache License 2.0 | 5 votes |
package edu.umass.cs.iesl.watr package corpora package filesys import java.nio.file.{ DirectoryStream, Files, Path } import fs2._ import cats.effect._ import cats.implicits._ object zip { def dirEntries[F[_]](dir: Path, include: Path => Boolean = _ => true)(implicit F: Effect[F]): fs2.Stream[F, Path] = { def useDirStream(dirStream: DirectoryStream[Path]): fs2.Stream[F, Path] = { Stream.unfold(dirStream.iterator) { iter => if (iter.hasNext()) Some((iter.next(), iter)) else None } } val closeDirStream = (dirStream: DirectoryStream[Path]) => F.delay(dirStream.close) val acquire = F.delay(Files.newDirectoryStream(dir)) val release = closeDirStream(_) Stream.bracket(acquire)(release) .flatMap(ds => useDirStream(ds)) .filter(include) } def dirEntriesRecursive[F[_]](dir: Path, include: Path => Boolean = _ => true)(implicit F: Effect[F]): Stream[F, Path] = dirEntries[F](dir).flatMap { p => val r = if (include(p)) Stream.emit(p) else Stream.empty if (Files.isDirectory(p)) r ++ dirEntriesRecursive(p, include) else r } }
Example 166
Source File: ManagedPath.scala From zio-rocksdb with Apache License 2.0 | 5 votes |
package zio.rocksdb.internal package internal import java.io.IOException import java.nio.file.{ Files, Path } import zio.{ Task, UIO, ZIO, ZManaged } import scala.reflect.io.Directory object ManagedPath { private def createTempDirectory: Task[Path] = Task { Files.createTempDirectory("zio-rocksdb") } private def deleteDirectory(path: Path): UIO[Boolean] = UIO { new Directory(path.toFile).deleteRecursively() } private def deleteDirectoryE(path: Path): UIO[Unit] = deleteDirectory(path) >>= { case true => ZIO.unit case false => ZIO.die(new IOException("Could not delete path recursively")) } def apply(): ZManaged[Any, Throwable, Path] = createTempDirectory.toManaged(deleteDirectoryE) }
Example 167
Source File: HttpRepository.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.executor.repository import java.net.URI import java.nio.file.{Files, Path} import java.util.concurrent.Executors import akka.actor.ActorSystem import com.typesafe.config.{Config, ConfigFactory} import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration.TimeUnit object HttpRepositoryConfig { val defaults: Config = ConfigFactory.load().getConfig("ml.combust.mleap.executor.repository-defaults.http") } class HttpRepositoryConfig(_config: Config) { val config: Config = _config.withFallback(FileRepositoryConfig.defaults) val threads: Int = config.getInt("threads") } class HttpRepository(config: HttpRepositoryConfig) extends Repository { private val threadPool = Executors.newFixedThreadPool(config.threads) implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool) override def downloadBundle(uri: URI): Future[Path] = Future { val tmpFile = Files.createTempFile("mleap", ".bundle.zip") Files.copy(uri.toURL.openStream(), tmpFile) tmpFile } override def canHandle(uri: URI): Boolean = uri.getScheme == "http" || uri.getScheme == "https" override def shutdown(): Unit = threadPool.shutdown() override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit) } object HttpRepositoryProvider extends RepositoryProvider { override def create(config: Config) (implicit system: ActorSystem): HttpRepository = { new HttpRepository(new HttpRepositoryConfig(config)) } }
Example 168
Source File: MultiRepository.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.executor.repository import java.net.URI import java.nio.file.Path import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import com.typesafe.config.Config import ml.combust.mleap.executor.error.BundleException import scala.collection.JavaConverters._ import scala.concurrent.{Await, Future, Promise} import scala.concurrent.duration._ import scala.util.Try class MultiRepository(repositories: Seq[Repository]) extends Repository { val terminatePromise: Promise[Unit] = Promise[Unit] new Thread { override def run(): Unit = { terminatePromise.complete(Try { for (repository <- repositories) { repository.awaitTermination(Long.MaxValue, TimeUnit.DAYS) } }) } }.start() override def downloadBundle(uri: URI): Future[Path] = { for (repository <- repositories) { if (repository.canHandle(uri)) return repository.downloadBundle(uri) } Future.failed(new BundleException("could not find a repository to download the bundle file")) } override def canHandle(uri: URI): Boolean = { for (repository <- repositories) { if (repository.canHandle(uri)) return true } false } override def shutdown(): Unit = { for (repository <- repositories) { repository.shutdown() } } override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = synchronized { Try(Await.ready(terminatePromise.future, FiniteDuration(timeout, unit))) } } object MultiRepositoryProvider extends RepositoryProvider { override def create(config: Config) (implicit system: ActorSystem): MultiRepository = { val rConfigs = config.getConfigList("repositories") val repositories = for (rConfig <- rConfigs.asScala) yield { Repository.fromConfig(rConfig) } new MultiRepository(repositories) } }
Example 169
Source File: FileRepository.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.executor.repository import java.io.File import java.net.URI import java.nio.file.{Files, Path, StandardCopyOption} import java.util.concurrent.Executors import akka.actor.ActorSystem import com.typesafe.config.{Config, ConfigFactory} import ml.combust.mleap.executor.error.BundleException import scala.concurrent.duration.TimeUnit import scala.concurrent.{ExecutionContext, Future} object FileRepositoryConfig { val defaults: Config = ConfigFactory.load().getConfig("ml.combust.mleap.executor.repository-defaults.file") } class FileRepositoryConfig(_config: Config) { val config: Config = _config.withFallback(FileRepositoryConfig.defaults) val move: Boolean = config.getBoolean("move") val threads: Int = config.getInt("threads") } class FileRepository(config: FileRepositoryConfig) extends Repository { private val threadPool = Executors.newFixedThreadPool(config.threads) implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool) def this() = this(new FileRepositoryConfig(FileRepositoryConfig.defaults)) override def downloadBundle(uri: URI): Future[Path] = Future { if (uri.getPath.isEmpty) { throw new BundleException("file path cannot be empty") } val local = new File(uri.getPath).toPath if (!Files.exists(local)) { throw new BundleException(s"file does not exist $local") } if (config.move) { val tmpFile = Files.createTempFile("mleap", ".bundle.zip") Files.copy(local, tmpFile, StandardCopyOption.REPLACE_EXISTING) tmpFile.toFile.deleteOnExit() tmpFile } else { local } } override def canHandle(uri: URI): Boolean = uri.getScheme == "file" || uri.getScheme == "jar:file" override def shutdown(): Unit = threadPool.shutdown() override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit) } object FileRepositoryProvider extends RepositoryProvider { override def create(tConfig: Config) (implicit system: ActorSystem): Repository = { val config = new FileRepositoryConfig(tConfig) new FileRepository(config) } }
Example 170
Source File: ClusteringTreeNodeUtil.scala From mleap with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.clustering.bundle.tree.clustering import java.nio.file.Path import ml.combust.bundle.BundleContext import ml.combust.bundle.tree.cluster.NodeSerializer import org.apache.spark.ml.bundle.SparkBundleContext import org.apache.spark.mllib.clustering.{BisectingKMeansModel, ClusteringTreeNode} object ClusteringTreeNodeUtil { implicit val nodeWrapper = SparkNodeWrapper def write(bisectingKMeansModel: BisectingKMeansModel) (implicit context: BundleContext[SparkBundleContext]): Unit = { NodeSerializer[ClusteringTreeNode](context.file("tree")).write(bisectingKMeansModel.root) } def read() (implicit context: BundleContext[SparkBundleContext]): BisectingKMeansModel = { new BisectingKMeansModel(NodeSerializer[ClusteringTreeNode](context.file("tree")).read().get) } }
Example 171
Source File: NodeSerializer.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.bundle.serializer import java.nio.file.{Files, Path} import ml.combust.bundle.BundleContext import ml.combust.bundle.dsl.{Bundle, Node} import ml.combust.bundle.json.JsonSupport._ import spray.json._ import scala.util.Try def read(): Try[Any] = { Try(FormatNodeSerializer.serializer.read(bundleContext.file(Bundle.nodeFile))).flatMap { node => ModelSerializer(bundleContext).readWithModel().flatMap { case (model, m) => Try { val op = bundleContext.bundleRegistry[Context, Any, Any](m.op) op.load(node, model)(bundleContext) } } } } }
Example 172
Source File: FileUtil.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.bundle.util import java.io.IOException import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} object FileUtil { def rmRf(path: Path): Unit = { Files.walkFileTree(path, new SimpleFileVisitor[Path]() { override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { Files.delete(file) FileVisitResult.CONTINUE } override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { Files.delete(dir) FileVisitResult.CONTINUE } }) } }
Example 173
Source File: S3Repository.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.repository.s3 import java.net.URI import java.nio.file.{Files, Path} import java.util.concurrent.Executors import akka.actor.ActorSystem import com.amazonaws.services.s3.{AmazonS3ClientBuilder, AmazonS3URI} import com.typesafe.config.Config import ml.combust.mleap.executor.repository.{Repository, RepositoryProvider} import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration.TimeUnit import scala.util.Try class S3RepositoryConfig(config: Config) { val threads: Int = config.getInt("threads") } class S3Repository(config: S3RepositoryConfig) extends Repository { private val client = AmazonS3ClientBuilder.defaultClient() private val threadPool = Executors.newFixedThreadPool(config.threads) implicit val diskEc: ExecutionContext = ExecutionContext.fromExecutor(threadPool) override def downloadBundle(uri: URI): Future[Path] = Future { val s3Uri = new AmazonS3URI(uri) val bucket = s3Uri.getBucket val key = s3Uri.getKey val tmpFile = Files.createTempFile("mleap", ".bundle.zip") Files.copy(client.getObject(bucket, key).getObjectContent, tmpFile) tmpFile } override def canHandle(uri: URI): Boolean = Try(new AmazonS3URI(uri)).isSuccess override def shutdown(): Unit = threadPool.shutdown() override def awaitTermination(timeout: Long, unit: TimeUnit): Unit = threadPool.awaitTermination(timeout, unit) } class S3RepositoryProvider extends RepositoryProvider { override def create(config: Config) (implicit system: ActorSystem): S3Repository = { new S3Repository(new S3RepositoryConfig(config)) } }
Example 174
Source File: ModelLoader.scala From mleap with Apache License 2.0 | 5 votes |
package ml.combust.mleap.springboot import TypeConverters._ import javax.annotation.PostConstruct import org.slf4j.LoggerFactory import ml.combust.mleap.pb import org.springframework.beans.factory.annotation.{Autowired, Value} import org.springframework.stereotype.Component import scala.collection.JavaConverters._ import java.nio.file.{Files, Path, Paths} import ml.combust.mleap.executor.MleapExecutor import scalapb.json4s.Parser @Component class ModelLoader(@Autowired val mleapExecutor: MleapExecutor, @Autowired val jsonParser: Parser) { @Value("${mleap.model.config:#{null}}") private val modelConfigPath: String = null private val logger = LoggerFactory.getLogger(classOf[ModelLoader]) private val timeout = 60000 @PostConstruct def loadModel(): Unit = { if (modelConfigPath == null) { logger.info("Skipping loading model on startup") return } val configPath = Paths.get(modelConfigPath) if (!Files.exists(configPath)) { logger.warn(s"Model path does not exist: $modelConfigPath") return } val configFiles: List[Path] = if (Files.isDirectory(configPath)) { Files.list(configPath).iterator().asScala.toList } else { List(configPath) } for (configFile <- configFiles) { logger.info(s"Loading model from ${configFile.toString}") val request = new String(Files.readAllBytes(configFile)) mleapExecutor.loadModel(jsonParser.fromJsonString[pb.LoadModelRequest](request))(timeout) } } }
Example 175
Source File: Result.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.model import java.nio.file.Path import com.codacy.plugins.api.metrics.LineComplexity import com.codacy.plugins.api.results import com.codacy.plugins.api.duplication.DuplicationCloneFile sealed trait Result final case class DuplicationClone(cloneLines: String, nrTokens: Int, nrLines: Int, files: Set[DuplicationCloneFile]) extends Result sealed trait ToolResult extends Result final case class FileError(filename: Path, message: String) extends ToolResult final case class Issue(patternId: results.Pattern.Id, filename: Path, message: Issue.Message, level: results.Result.Level, category: Option[results.Pattern.Category], location: Location) extends ToolResult object Issue { final case class Message(text: String) extends AnyVal { override def toString: String = text } } final case class FileMetrics(filename: Path, complexity: Option[Int], loc: Option[Int], cloc: Option[Int], nrMethods: Option[Int], nrClasses: Option[Int], lineComplexities: Set[LineComplexity]) extends Result
Example 176
Source File: IssuesReportSerializer.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.serializer import java.nio.file.Path import com.codacy.analysis.core.model.IssuesAnalysis.FileResults import com.codacy.analysis.core.model.{Issue, IssuesAnalysis, Location, ToolResult, ToolResults} import com.codacy.plugins.api.results import io.circe.{Encoder, Printer} import io.circe.generic.semiauto.deriveEncoder import io.circe.syntax._ object IssuesReportSerializer { private[IssuesReportSerializer] implicit val levelEncoder: Encoder[results.Result.Level.Value] = Encoder.encodeEnumeration(results.Result.Level) private[IssuesReportSerializer] implicit val categoryEncoder: Encoder[results.Pattern.Category.Value] = Encoder.encodeEnumeration(results.Pattern.Category) private[IssuesReportSerializer] implicit val pathEncoder: Encoder[Path] = Encoder.encodeString.contramap(_.toString) private[IssuesReportSerializer] implicit val toolResultsEncoder: Encoder[ToolResults] = deriveEncoder private[IssuesReportSerializer] implicit val issuesAnalysisEncoder: Encoder[IssuesAnalysis] = deriveEncoder private[IssuesReportSerializer] implicit val issueResultEncoder: Encoder[Issue] = deriveEncoder private[IssuesReportSerializer] implicit val patternIdEncoder: Encoder[results.Pattern.Id] = deriveEncoder private[IssuesReportSerializer] implicit val issueMessageEncoder: Encoder[Issue.Message] = deriveEncoder private[IssuesReportSerializer] implicit val issueLocationEncoder: Encoder[Location] = deriveEncoder private[IssuesReportSerializer] implicit val resultEncoder: Encoder[ToolResult] = deriveEncoder private[IssuesReportSerializer] implicit val fileResultsEncoder: Encoder[FileResults] = deriveEncoder def toJsonString(toolResults: Set[ToolResults]): String = toolResults.asJson.printWith(Printer.noSpaces.copy(dropNullValues = true)) }
Example 177
Source File: Text.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.formatter import java.io.PrintStream import java.nio.file.Path import com.codacy.analysis.core.model._ import com.codacy.plugins.api.results import com.codacy.plugins.api.duplication.DuplicationCloneFile object Text extends FormatterCompanion { val name: String = "text" def apply(stream: PrintStream): Formatter = new Text(stream) } private[formatter] class Text(val stream: PrintStream) extends Formatter { override def begin(): Unit = { stream.println("Starting analysis ...") stream.flush() } override def end(): Unit = { stream.println("Analysis complete") stream.flush() } override def add(element: Result): Unit = { element match { case Issue(patternId, filename, message, level, category, location) => stream.println(prettyMessage(patternId, filename, message, level, category, location)) stream.flush() case FileError(filename, message) => stream.println(s"Found $message in $filename") stream.flush() case DuplicationClone(_, nrTokens, nrLines, files) => stream.println(prettyMessage(nrTokens, nrLines, files)) stream.flush() case fileMetrics: FileMetrics => stream.println(prettyMessage(fileMetrics)) stream.flush() } } private def prettyMessage(fileMetrics: FileMetrics): String = { val fileMetricsValues = List( fileMetrics.complexity.map(complexityNum => s" CC - $complexityNum"), fileMetrics.loc.map(loc => s" LOC - $loc"), fileMetrics.cloc.map(cloc => s" CLOC - $cloc"), fileMetrics.nrMethods.map(nrMethods => s" #methods - $nrMethods"), fileMetrics.nrClasses.map(nrClasses => s" #classes - $nrClasses")).collect { case Some(namedValue) => namedValue } val coloredMetricsFound = Console.MAGENTA + "Metrics" + Console.RESET val boldFileName = s"${Console.BOLD}${fileMetrics.filename}${Console.RESET}" if (fileMetricsValues.isEmpty) { s"No [$coloredMetricsFound] found in $boldFileName." } else { s"Found [$coloredMetricsFound] in $boldFileName:\n${fileMetricsValues.mkString("\n")}" } } private def prettyMessage(patternId: results.Pattern.Id, filename: Path, message: Issue.Message, level: results.Result.Level, category: Option[results.Pattern.Category], location: Location): String = { val categoryColored = Console.YELLOW + category.fold("")(c => s"/${c.toString}") + Console.RESET val levelColored = levelColor(level) + level + Console.RESET val patternColored = Console.BOLD + patternId + Console.RESET s"Found [$levelColored$categoryColored] `$message` in $filename:$location ($patternColored)" } private def prettyMessage(nrTokens: Int, nrLines: Int, files: Set[DuplicationCloneFile]): String = { val coloredCloneFound = Console.CYAN + "Clone" + Console.RESET val duplicatedFilesMsg = files .groupBy(_.filePath) .map { case (filePath, cloneFiles) => val lineNumbers = cloneFiles.map(cloneFile => s" l. ${cloneFile.startLine} - ${cloneFile.endLine}").mkString("\n") s" ${Console.BOLD}$filePath${Console.RESET}\n$lineNumbers" } .mkString("\n") s"Found [$coloredCloneFound] $nrLines duplicated lines with $nrTokens tokens:\n$duplicatedFilesMsg" } private def levelColor(level: results.Result.Level): String = { level match { case results.Result.Level.Info => Console.BLUE case results.Result.Level.Warn => Console.YELLOW case results.Result.Level.Err => Console.RED } } }
Example 178
Source File: Json.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.formatter import java.io.PrintStream import java.nio.file.Path import com.codacy.analysis.core.model.Result import com.codacy.plugins.api.results import io.circe.Encoder import io.circe.generic.auto._ import io.circe.syntax._ import scala.util.Properties object Json extends FormatterCompanion { val name: String = "json" def apply(stream: PrintStream): Formatter = new Json(stream) } private[formatter] class Json(val stream: PrintStream) extends Formatter { private var alreadyPrinted: Boolean = false private implicit val categoryEncoder: Encoder[results.Pattern.Category.Value] = Encoder.encodeEnumeration(results.Pattern.Category) private implicit val levelEncoder: Encoder[results.Result.Level.Value] = Encoder.encodeEnumeration(results.Result.Level) private implicit val fileEncoder: Encoder[Path] = Encoder[String].contramap(_.toString) override def begin(): Unit = { stream.print("[") } override def end(): Unit = { stream.print("]") stream.print(Properties.lineSeparator) stream.flush() } def add(element: Result): Unit = { if (alreadyPrinted) stream.print(",") else alreadyPrinted = true stream.print(element.asJson.noSpaces) } }
Example 179
Source File: MetricsToolExecutor.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.analysis import java.nio.file.Path import better.files.File import com.codacy.analysis.core import com.codacy.analysis.core.model.FileMetrics import scala.util.{Failure, Success} object MetricsToolExecutor { import com.codacy.analysis.cli.analysis.AnalyseExecutor._ def reduceMetricsToolResultsByFile(metricsResults: Seq[MetricsToolExecutorResult]): Seq[MetricsToolExecutorResult] = { val (successfulMetricsResults, failedMetricsResults) = metricsResults.partition(_.analysisResults.isSuccess) successfulMetricsResults .groupBy(_.language) .values .flatMap { _.foldLeft(Option.empty[MetricsToolExecutorResult]) { case ( Some(metricsExecutorResAcc @ MetricsToolExecutorResult(_, _, Success(fileMetricsAcc))), metricsExecutorRes @ MetricsToolExecutorResult(_, _, Success(fileMetrics))) => val allFiles = metricsExecutorResAcc.files ++ metricsExecutorRes.files val reducedFileMetrics = reduceFileMetricsByFile(fileMetrics ++ fileMetricsAcc) Some(metricsExecutorResAcc.copy(files = allFiles, analysisResults = Success(reducedFileMetrics))) case (_, o) => Some(o) } }(collection.breakOut) ++ failedMetricsResults } private def reduceFileMetricsByFile(fileMetrics: Set[FileMetrics]): Set[FileMetrics] = { fileMetrics .groupBy(_.filename) .flatMap { case (filePath, fMetrics) => fMetrics.reduceOption { (fMetricsAccumulator, fMetricsElement) => FileMetrics( filePath, fMetricsAccumulator.complexity.orElse(fMetricsElement.complexity), fMetricsAccumulator.loc.orElse(fMetricsElement.loc), fMetricsAccumulator.cloc.orElse(fMetricsElement.cloc), fMetricsAccumulator.nrMethods.orElse(fMetricsElement.nrMethods), fMetricsAccumulator.nrClasses.orElse(fMetricsElement.nrClasses), if (fMetricsAccumulator.lineComplexities.nonEmpty) { fMetricsAccumulator.lineComplexities } else { fMetricsElement.lineComplexities }) } }(collection.breakOut) } def calculateMissingFileMetrics( directory: File, metricsResults: Seq[AnalyseExecutor.MetricsToolExecutorResult]): Seq[MetricsToolExecutorResult] = { val fileMetricsByFilePath: Map[Path, FileMetrics] = metricsResults.flatMap { result => result.analysisResults.map(_.map(fileMetrics => (fileMetrics.filename, fileMetrics))).getOrElse(Set.empty) }(collection.breakOut) metricsResults.foldLeft(Seq.empty[MetricsToolExecutorResult]) { case (metricsAccumulator, res @ AnalyseExecutor.MetricsToolExecutorResult(_, _, Success(_))) => metricsAccumulator :+ countMissingLoc(directory, fileMetricsByFilePath, res) case (metricsAccumulator, res @ AnalyseExecutor.MetricsToolExecutorResult(lang, files, Failure(_))) if !metricsResults.exists(r => r.language == lang && r.files == files && r.analysisResults.isSuccess) => metricsAccumulator :+ res :+ countMissingLoc(directory, fileMetricsByFilePath, res) case (metricsAccumulator, res) => metricsAccumulator :+ res } } private def countMissingLoc(directory: File, fileMetricsByFilePath: Map[Path, FileMetrics], metricsRes: AnalyseExecutor.MetricsToolExecutorResult): MetricsToolExecutorResult = { val fileMetrics = metricsRes.files.map { file => fileMetricsByFilePath.get(file) match { case None => FileMetrics( filename = file, nrClasses = None, nrMethods = None, loc = countLoc(directory, file), cloc = None, complexity = None, lineComplexities = Set.empty) case Some(metrics) if metrics.loc.isEmpty => metrics.copy(loc = countLoc(directory, file)) case Some(metrics) => metrics } } metricsRes.copy(analysisResults = Success(fileMetrics)) } private def countLoc(directory: File, file: Path): Option[Int] = { val fileAbsolutePath = (directory / file.toString).path.toAbsolutePath.toString core.utils.FileHelper.countLoc(fileAbsolutePath) } }
Example 180
Source File: FileHelper.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.utils import java.nio.file.Path import better.files.File import org.log4s import org.log4s.getLogger import scala.util.Try object FileHelper { private val logger: log4s.Logger = getLogger def relativePath(filename: String): Path = File.currentWorkingDirectory.relativize(File(filename)) def countLoc(filename: String): Option[Int] = { Try(File(filename).lineIterator).fold( { t => logger.error(t)(s"Failed to read file $filename") Option.empty[Int] }, { lines => Some(lines.foldLeft(0) { case (counter, line) if line.trim.length >= 3 => counter + 1 case (counter, _) => counter }) }) } }
Example 181
Source File: CodacyPluginsAnalyser.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.analysis import java.nio.file.Path import better.files.File import com.codacy.analysis.core.model._ import com.codacy.analysis.core.tools.{DuplicationTool, MetricsTool, Tool} import com.codacy.plugins.api.Source import org.log4s.{Logger, getLogger} import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} class CodacyPluginsAnalyser extends Analyser[Try] { private val logger: Logger = getLogger override def analyse(tool: Tool, directory: File, files: Set[Path], config: Configuration, timeout: Option[Duration] = Option.empty[Duration]): Try[Set[ToolResult]] = { val result = tool.run(directory, files, config, timeout) result match { case Success(res) => logger.info(s"Completed analysis for ${tool.name} with ${res.size} results") case Failure(e) => logger.error(e)(Analyser.Error.ToolExecutionFailure("analysis", tool.name).message) } result } override def metrics(metricsTool: MetricsTool, directory: File, files: Option[Set[Path]], timeout: Option[Duration] = Option.empty[Duration]): Try[Set[FileMetrics]] = { val srcFiles = files.map(_.map(filePath => Source.File(filePath.toString))) val result = metricsTool.run(directory, srcFiles, timeout) result match { case Success(res) => logger.info(s"Completed metrics for ${metricsTool.name} with ${res.size} results") case Failure(e) => logger.error(e)(Analyser.Error.ToolExecutionFailure("metrics", metricsTool.name).message) } result.map(_.to[Set]) } override def duplication(duplicationTool: DuplicationTool, directory: File, files: Set[Path], timeout: Option[Duration] = Option.empty[Duration]): Try[Set[DuplicationClone]] = { val result = duplicationTool.run(directory, files, timeout) result match { case Success(res) => logger.info(s"Completed duplication for ${duplicationTool.name} with ${res.size} results") case Failure(e) => logger.error(e)(Analyser.Error.ToolExecutionFailure("duplication", duplicationTool.name).message) } result.map(_.to[Set]) } } object CodacyPluginsAnalyser extends AnalyserCompanion[Try] { val name: String = "codacy-plugins" override def apply(): Analyser[Try] = new CodacyPluginsAnalyser() object errors { def missingTool(tool: String): Analyser.Error = Analyser.Error.NonExistingToolInput(tool, Tool.allToolShortNames) } }
Example 182
Source File: Analyser.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.analysis import java.nio.file.Path import better.files.File import com.codacy.analysis.core.model.{Configuration, DuplicationClone, FileMetrics, ToolResult} import com.codacy.analysis.core.tools.{DuplicationTool, MetricsTool, Tool} import org.log4s.{Logger, getLogger} import scala.concurrent.duration.Duration import scala.util.Try trait AnalyserCompanion[T[_]] { def name: String def apply(): Analyser[T] } trait Analyser[T[_]] { def analyse(tool: Tool, directory: File, files: Set[Path], config: Configuration, timeout: Option[Duration] = Option.empty[Duration]): T[Set[ToolResult]] def metrics(metricsTool: MetricsTool, directory: File, files: Option[Set[Path]], timeout: Option[Duration] = Option.empty[Duration]): T[Set[FileMetrics]] def duplication(duplicationTool: DuplicationTool, directory: File, files: Set[Path], timeout: Option[Duration] = Option.empty[Duration]): T[Set[DuplicationClone]] } object Analyser { private val logger: Logger = getLogger val defaultAnalyser: AnalyserCompanion[Try] = CodacyPluginsAnalyser val allAnalysers: Set[AnalyserCompanion[Try]] = Set(defaultAnalyser) def apply(name: String): Analyser[Try] = { val builder = allAnalysers.find(_.name.equalsIgnoreCase(name)).getOrElse { logger.warn(s"Could not find analyser for name $name. Using ${defaultAnalyser.name} as fallback.") defaultAnalyser } builder() } sealed trait Error { val message: String } object Error { final case class ToolExecutionFailure(toolType: String, toolName: String) extends Error { override val message: String = s"Failed $toolType for $toolName" } final case class ToolNeedsNetwork(toolName: String) extends Error { override val message: String = s"The tool $toolName needs network access to execute." } final case class NonExistingToolInput(toolName: String, availableTools: Set[String]) extends Error { override val message: String = s"""The selected tool "$toolName" is not supported or does not exist. |The tool should be one of (${availableTools.mkString(", ")})""".stripMargin } case object NoActiveToolInConfiguration extends Error { override val message: String = "No active tool found on the remote configuration" } case object NoToolsFoundForFiles extends Error { override val message: String = "No tools found for files provided" } } }
Example 183
Source File: DuplicationTool.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.tools import java.nio.file.Path import better.files.File import com.codacy.analysis.core.model.DuplicationClone import com.codacy.plugins.api.duplication.DuplicationTool.CodacyConfiguration import com.codacy.plugins.api.languages.Language import com.codacy.plugins.api import com.codacy.plugins.duplication.traits import com.codacy.plugins.runners.{BinaryDockerRunner, DockerRunner} import com.codacy.plugins.utils.PluginHelper import org.log4s.getLogger import scala.concurrent.duration._ import scala.util.Try class DuplicationTool(private val duplicationTool: traits.DuplicationTool, val languageToRun: Language) extends ITool { override def name: String = "duplication" override def supportedLanguages: Set[Language] = duplicationTool.languages.to[Set] def run(directory: File, files: Set[Path], timeout: Option[Duration] = Option.empty[Duration]): Try[Set[DuplicationClone]] = { val dockerRunner = new BinaryDockerRunner[api.duplication.DuplicationClone](duplicationTool) val runner = new traits.DuplicationRunner(duplicationTool, dockerRunner) for { duplicationClones <- runner.run( directory.toJava, CodacyConfiguration(Option(languageToRun), Option.empty), timeout.getOrElse(DockerRunner.defaultRunTimeout), None) clones = filterDuplicationClones(duplicationClones, files) } yield { clones.map(clone => DuplicationClone(clone.cloneLines, clone.nrTokens, clone.nrLines, clone.files.to[Set]))( collection.breakOut): Set[DuplicationClone] } } private def filterDuplicationClones(duplicationClones: List[api.duplication.DuplicationClone], files: Set[Path], minCloneLines: Int = 5): List[api.duplication.DuplicationClone] = { // The duplication files should be more than 1. If it is one, then it means // that the other clone was in an ignored file. This is based on the assumption // that the duplication results will contain more than one entry for files // with duplicated clones with themselves. duplicationClones.collect { case clone if clone.nrLines >= minCloneLines => val commitFileNames = files.map(_.toString) val filteredFiles = filterUnignoredFiles(clone.files, commitFileNames) (clone.copy(files = filteredFiles), filteredFiles.length) }.collect { case (clone, nrCloneFiles) if nrCloneFiles > 1 => clone } } private def filterUnignoredFiles(duplicated: Seq[api.duplication.DuplicationCloneFile], expectedFiles: Set[String]): Seq[api.duplication.DuplicationCloneFile] = { duplicated.collect { case cloneFile if expectedFiles.contains(cloneFile.filePath) => cloneFile } } } object DuplicationToolCollector { private val logger: org.log4s.Logger = getLogger private val availableTools: List[traits.DuplicationTool] = PluginHelper.dockerDuplicationPlugins def fromLanguages(languages: Set[Language]): Set[DuplicationTool] = { languages.flatMap { lang => val collectedTools = availableTools.collect { case tool if tool.languages.contains(lang) => new DuplicationTool(tool, lang) } if (collectedTools.isEmpty) { logger.info(s"No duplication tools found for language ${lang.name}") } collectedTools } } }
Example 184
Source File: TestUtils.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.core.utils import java.nio.file.attribute.PosixFilePermission import java.nio.file.{Path, Paths} import better.files.File import com.codacy.plugins.api.results import io.circe.Decoder import org.specs2.concurrent.ExecutionEnv import org.specs2.matcher.MatchResult import scala.sys.process.Process object TestUtils { implicit val categoryDecoder: Decoder[results.Pattern.Category.Value] = Decoder.decodeEnumeration(results.Pattern.Category) implicit val levelDecoder: Decoder[results.Result.Level.Value] = Decoder.decodeEnumeration(results.Result.Level) implicit val fileDecoder: Decoder[Path] = Decoder[String].map(Paths.get(_)) implicit val executionEnv: ExecutionEnv = ExecutionEnv.fromGlobalExecutionContext def withClonedRepo[T](gitUrl: String, commitUUid: String)(block: (File, File) => MatchResult[T]): MatchResult[T] = (for { directory <- File.temporaryDirectory() file <- File.temporaryFile() } yield { directory .addPermission(PosixFilePermission.OWNER_READ) .addPermission(PosixFilePermission.GROUP_READ) .addPermission(PosixFilePermission.OTHERS_READ) .addPermission(PosixFilePermission.OWNER_EXECUTE) .addPermission(PosixFilePermission.GROUP_EXECUTE) .addPermission(PosixFilePermission.OTHERS_EXECUTE) Process(Seq("git", "clone", gitUrl, directory.pathAsString)).! Process(Seq("git", "reset", "--hard", commitUUid), directory.toJava).! block(file, directory) }).get() def withTemporaryGitRepo[T](fn: File => MatchResult[T]): MatchResult[T] = { (for { temporaryDirectory <- File.temporaryDirectory() } yield { Process(Seq("git", "init"), temporaryDirectory.toJava).! Process(Seq("git", "commit", "--allow-empty", "-m", "initial commit"), temporaryDirectory.toJava).! fn(temporaryDirectory) }).get } }
Example 185
Source File: JGitSystemReader.scala From sbt-dynver with Apache License 2.0 | 5 votes |
package sbtdynver import java.io.{ File, IOException } import java.net.{ InetAddress, UnknownHostException } import java.nio.file.{ Files, InvalidPathException, Path, Paths } import org.eclipse.jgit.internal.JGitText import org.eclipse.jgit.lib.{ Config, Constants } import org.eclipse.jgit.storage.file.FileBasedConfig import org.eclipse.jgit.util.{ FS, StringUtils, SystemReader } import org.slf4j.LoggerFactory // Copy of org.eclipse.jgit.util.SystemReader.Default with: // * calls to Files.createDirectories guarded by if !Files.isDirectory // necessary because my ~/.config is a symlink to a directory // which Files.createDirectories isn't happy with object JGitSystemReader extends SystemReader { private val LOG = LoggerFactory.getLogger(getClass) lazy val init: Unit = SystemReader.setInstance(this) override lazy val getHostname = { try InetAddress.getLocalHost.getCanonicalHostName catch { case _: UnknownHostException => "localhost" } }.ensuring(_ != null) override def getenv(variable: String): String = System.getenv(variable) override def getProperty(key: String): String = System.getProperty(key) override def getCurrentTime: Long = System.currentTimeMillis override def getTimezone(when: Long): Int = getTimeZone.getOffset(when) / (60 * 1000) override def openUserConfig(parent: Config, fs: FS) = new FileBasedConfig(parent, new File(fs.userHome, ".gitconfig"), fs) override def openSystemConfig(parent: Config, fs: FS): FileBasedConfig = { if (StringUtils.isEmptyOrNull(getenv(Constants.GIT_CONFIG_NOSYSTEM_KEY))) { val configFile = fs.getGitSystemConfig if (configFile != null) return new FileBasedConfig(parent, configFile, fs) } new FileBasedConfig(parent, null, fs) { override def load(): Unit = () // do not load override def isOutdated = false // regular class would bomb here } } override def openJGitConfig(parent: Config, fs: FS): FileBasedConfig = { val xdgPath = getXDGConfigHome(fs) if (xdgPath != null) { var configPath: Path = null try { configPath = xdgPath.resolve("jgit") if (!Files.isDirectory(configPath)) Files.createDirectories(configPath) configPath = configPath.resolve(Constants.CONFIG) return new FileBasedConfig(parent, configPath.toFile, fs) } catch { case e: IOException => LOG.error(JGitText.get.createJGitConfigFailed, configPath: Any, e) } } new FileBasedConfig(parent, new File(fs.userHome, ".jgitconfig"), fs) } private def getXDGConfigHome(fs: FS): Path = { var configHomePath = getenv(Constants.XDG_CONFIG_HOME) if (StringUtils.isEmptyOrNull(configHomePath)) configHomePath = new File(fs.userHome, ".config").getAbsolutePath try { val xdgHomePath = Paths.get(configHomePath) if (!Files.isDirectory(xdgHomePath)) Files.createDirectories(xdgHomePath) xdgHomePath } catch { case e @ (_: IOException | _: InvalidPathException) => LOG.error(JGitText.get.createXDGConfigHomeFailed, configHomePath: Any, e) null } } }
Example 186
Source File: BulkTableWriter.scala From spark-cassandra-stress with Apache License 2.0 | 5 votes |
package com.datastax.bdp.spark.writer import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import com.datastax.spark.connector._ import com.datastax.spark.connector.writer._ import java.nio.file.{Path, Files} import scala.language.implicitConversions object BulkTableWriter{ implicit def toBulkTableWriter[T](rdd: RDD[T]): BulkTableWriter[T] = new BulkTableWriter(rdd) } class BulkTableWriter[T](rdd: RDD[T]) { def bulkSaveToCassandra(keyspaceName: String, tableName: String, columns: ColumnSelector = AllColumns, writeConf: BulkWriteConf = BulkWriteConf()): Unit = { throw new UnsupportedOperationException } } case class BulkWriteConf(outputDirectory: Option[Path] = None, deleteSource: Boolean = true, bufferSizeInMB: Int = 64)
Example 187
Source File: ProcessJobRunnerSrv.scala From Cortex with GNU Affero General Public License v3.0 | 5 votes |
package org.thp.cortex.services import java.nio.charset.StandardCharsets import java.nio.file.{Files, Path, Paths} import akka.actor.ActorSystem import javax.inject.{Inject, Singleton} import org.elastic4play.utils.RichFuture import org.thp.cortex.models._ import play.api.Logger import play.api.libs.json.Json import scala.concurrent.duration.FiniteDuration import scala.concurrent.{ExecutionContext, Future} import scala.sys.process.{Process, ProcessLogger, _} import scala.util.Try @Singleton class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) { lazy val logger = Logger(getClass) private val pythonPackageVersionRegex = "^Version: ([0-9]*)\\.([0-9]*)\\.([0-9]*)".r def checkCortexUtilsVersion(pythonVersion: String): Option[(Int, Int, Int)] = Try { (s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil) .lineStream .collectFirst { case pythonPackageVersionRegex(major, minor, patch) ⇒ (major.toInt, minor.toInt, patch.toInt) } }.getOrElse(None) def run(jobDirectory: Path, command: String, job: Job, timeout: Option[FiniteDuration])(implicit ec: ExecutionContext): Future[Unit] = { val baseDirectory = Paths.get(command).getParent.getParent val output = StringBuilder.newBuilder logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}") val process = Process(Seq(command, jobDirectory.toString), baseDirectory.toFile) .run(ProcessLogger { s ⇒ logger.info(s" Job ${job.id}: $s") output ++= s }) val execution = Future .apply { process.exitValue() () } .map { _ ⇒ val outputFile = jobDirectory.resolve("output").resolve("output.json") if (!Files.exists(outputFile) || Files.size(outputFile) == 0) { val report = Json.obj("success" → false, "errorMessage" → output.toString) Files.write(outputFile, report.toString.getBytes(StandardCharsets.UTF_8)) } () } .recoverWith { case error ⇒ logger.error(s"Execution of command $command failed", error) Future.apply { val report = Json.obj("success" → false, "errorMessage" → s"${error.getMessage}\n$output") Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8)) () } } timeout.fold(execution)(t ⇒ execution.withTimeout(t, killProcess(process))) } def killProcess(process: Process): Unit = { logger.info("Timeout reached, killing process") process.destroy() } }
Example 188
Source File: package.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan import java.io.{File, FileOutputStream} import java.net.URL import java.nio.file.{Files, Path} import com.typesafe.scalalogging.Logger import org.slf4j.LoggerFactory package object tools { private lazy val logger = Logger(LoggerFactory.getLogger("milan")) def addToSbtClasspath(paths: Seq[Path]): Unit = { val urls = paths.map(_.toUri.toURL).toList urls.foreach(url => logger.info(s"Adding {$url} to classpath.")) val classLoader = this.getClass.getClassLoader val addMethod = classLoader.getClass.getDeclaredMethod("add", classOf[Seq[URL]]) addMethod.invoke(classLoader, urls) } def compileApplicationInstance(providerClassName: String, providerParameters: List[(String, String)], compilerClassName: String, compilerParameters: List[(String, String)], outputFile: Path): File = { val providerClass = ClassHelper.loadClass(providerClassName) val provider = providerClass.getConstructors.find(_.getParameterCount == 0) match { case None => throw new Exception(s"Provider class $providerClassName does not have a default constructor.") case Some(constructor) => constructor.newInstance().asInstanceOf[ApplicationInstanceProvider] } val instance = provider.getApplicationInstance(providerParameters) val actualCompilerClassName = KnownCompilers.convertFromKnownCompiler(compilerClassName) val compilerClass = ClassHelper.loadClass(actualCompilerClassName) val compiler = compilerClass.getConstructors.find(_.getParameterCount == 0) match { case None => throw new Exception(s"Compiler class $actualCompilerClassName does not have a default constructor.") case Some(constructor) => constructor.newInstance().asInstanceOf[ApplicationInstanceCompiler] } println(s"Writing generated code to output file '$outputFile'.") Files.createDirectories(outputFile.getParent) val outputStream = new FileOutputStream(outputFile.toFile) try { compiler.compile(instance, compilerParameters, outputStream) outputFile.toFile } finally { outputStream.close() } } }
Example 189
Source File: FlinkGenerator.scala From milan with Apache License 2.0 | 5 votes |
package com.amazon.milan.compiler.flink.generator import java.io.{ByteArrayOutputStream, OutputStream} import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.nio.file.{Files, Path, StandardOpenOption} import com.amazon.milan.application.{Application, ApplicationConfiguration, ApplicationInstance} import com.amazon.milan.compiler.flink.internal.FlinkTypeEmitter import com.amazon.milan.lang.StreamGraph import com.amazon.milan.program.{Cycle, StreamExpression} import com.amazon.milan.{Id, SemanticVersion} import com.typesafe.scalalogging.Logger import org.slf4j.LoggerFactory case class GeneratorConfig(preventGenericTypeInformation: Boolean = false) object FlinkGenerator { val default = new FlinkGenerator(GeneratorConfig()) } class FlinkGenerator(classLoader: ClassLoader, generatorConfig: GeneratorConfig) { private val generatorTypeLifter = new FlinkTypeLifter(new FlinkTypeEmitter, this.generatorConfig.preventGenericTypeInformation) private val logger = Logger(LoggerFactory.getLogger(getClass)) def this(generatorConfig: GeneratorConfig) { this(getClass.getClassLoader, generatorConfig) } def generateScala(graph: StreamGraph, appConfig: ApplicationConfiguration, packageName: String, className: String): String = { val application = new Application(Id.newId(), graph, SemanticVersion.ZERO) val instance = new ApplicationInstance(Id.newId(), application, appConfig) this.generateScala(instance, packageName, className) } def generateScala(instance: ApplicationInstance, outputPath: Path, packageName: String, className: String): Unit = { val scalaCode = this.generateScala(instance, packageName, className) val contents = scalaCode.getBytes(StandardCharsets.UTF_8) Files.write(outputPath, contents, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING) } def generateScala(instance: ApplicationInstance, packageName: String, className: String): String = { val output = new ByteArrayOutputStream() this.generateScala(instance, output, packageName, className) output.flush() StandardCharsets.UTF_8.decode(ByteBuffer.wrap(output.toByteArray)).toString } def generateScala(instance: ApplicationInstance, output: OutputStream, packageName: String, className: String): Unit = { val finalGraph = instance.application.graph.getDereferencedGraph finalGraph.typeCheckGraph() val outputs = new GeneratorOutputs(this.generatorTypeLifter) val context = GeneratorContext.createEmpty(instance.instanceDefinitionId, finalGraph, instance.config, outputs, this.generatorTypeLifter) // Ensure that every data stream is generated. finalGraph .getStreams .foreach(stream => this.ensureStreamIsGenerated(context, stream)) // Close any cycles. finalGraph .getStreams .filter(_.isInstanceOf[Cycle]) .map(_.asInstanceOf[Cycle]) .foreach(context.closeCycle) // Add all sinks at the end. instance.config.dataSinks.foreach(sink => context.generateSink(sink)) val generated = context.output.generateScala(packageName, className) output.write(generated.getBytes(StandardCharsets.UTF_8)) } private def ensureStreamIsGenerated(context: GeneratorContext, stream: StreamExpression): Unit = { context.getOrGenerateDataStream(stream) } }
Example 190
Source File: Fresh.scala From sbt-fresh with Apache License 2.0 | 5 votes |
package de.heikoseeberger.sbtfresh import de.heikoseeberger.sbtfresh.license.License import java.nio.charset.StandardCharsets.UTF_8 import java.nio.file.{ Files, Path } import java.nio.file.StandardCopyOption.REPLACE_EXISTING import org.eclipse.jgit.api.Git private final class Fresh( buildDir: Path, organization: String, name: String, author: String, license: Option[License] ) { require(organization.nonEmpty, "organization must not be empty!") require(name.nonEmpty, "name must not be empty!") private val packageSegments = { val all = (organization.segments ++ name.segments).map(_.toLowerCase) val tail = all.tail.zip(all).collect { case (s1, s2) if s1 != s2 => s1 } all.head +: tail } def initialCommit(): Unit = { val git = Git.init().setDirectory(buildDir.toFile).call() git.add().addFilepattern(".").call() git.commit().setMessage("Fresh project, created with sbt-fresh").call() } def writeBuildProperties(): Path = write("project/build.properties", Template.buildProperties) def writeBuildSbt(setUpTravis: Boolean, setUpWartremover: Boolean): Path = write( "build.sbt", Template.buildSbt( organization, name, packageSegments, author, license, setUpTravis, setUpWartremover ) ) def writeGitignore(): Path = write(".gitignore", Template.gitignore) def writeLicense(): Unit = license.foreach(l => copy("LICENSE", l.id)) def writeNotice(): Path = write("NOTICE", Template.notice(author)) def writePlugins(setUpTravis: Boolean, setUpWartremover: Boolean): Path = write("project/plugins.sbt", Template.plugins(setUpTravis, setUpWartremover)) def writeReadme(): Path = write("README.md", Template.readme(name, license)) def writeScalafmt(): Path = write(".scalafmt.conf", Template.scalafmtConf) def writeTravisYml(): Path = write(".travis.yml", Template.travisYml) private def write(path: String, content: String) = Files.write(resolve(path), content.getBytes(UTF_8)) private def copy(path: String, name: String) = Files.copy(getClass.getResourceAsStream(s"/$name"), resolve(path), REPLACE_EXISTING) private def resolve(path: String) = { val resolved = buildDir.resolve(path) if (resolved.getParent != null) Files.createDirectories(resolved.getParent) resolved } }
Example 191
Source File: Main.scala From perf_tester with Apache License 2.0 | 5 votes |
package benchmarks import java.nio.file.{Files, Path, Paths} object Main extends App { val startTime = System.currentTimeMillis() val rootPath: Path = Paths.get(args.headOption.getOrElse(".")) val compilerSetup = new CompilerSetup(rootPath, args.drop(3).toList) val N = args.drop(1).headOption.map(_.toInt).getOrElse(2) // TODO change it! val M = args.drop(2).headOption.map(_.toInt).getOrElse(15) val sources = IO.listSourcesIn(rootPath.resolve("sources")).map(_.toString) val removeAt = N - M val profileFile = compilerSetup.outputDir.resolve("profile.txt") // TODO always add this! def runCompilation(n: Int): Long = { val run = new compilerSetup.global.Run val start = System.currentTimeMillis() run.compile(sources) val duration = System.currentTimeMillis() - start Files.move(compilerSetup.currentOutput, compilerSetup.currentOutput.resolveSibling(s"classes_$n")) if (n == removeAt && Files.exists(profileFile)) { Files.move(profileFile, profileFile.resolveSibling("initial-profile.txt")) } duration } println(s"Running benchmark with (N=$N, M=$M) in $rootPath with scalac options: ${compilerSetup.scalacOptions}") val times = (1 to N).map(runCompilation) val total = System.currentTimeMillis() - startTime def asSec(long: Long) = long / 1000.0 def asSec(d: Double) = d / 1000 val overhead = asSec(total - times.sum) val lastMAvg = asSec(times.takeRight(M).sum / M.toDouble) // TODO support cases where M > N val allAvg = asSec(times.sum / N.toDouble) // TODO proper output format println(s"Run $N compilations in ${asSec(total)} with overhead: $overhead.") println(s"Avgs. Last ($M): $lastMAvg, all $allAvg") println(s"Times: ${times.map(asSec)}") }
Example 192
Source File: CompilerSetup.scala From perf_tester with Apache License 2.0 | 5 votes |
package benchmarks import java.io.File import java.nio.file.{Files, Path} import benchmarks.Main.rootPath import scala.reflect.internal.util.Position import scala.tools.nsc.{Global, Settings} import scala.tools.nsc.reporters.Reporter import scala.util.Try import collection.JavaConverters._ case class CompilerSetup(rootPath: Path, providedScalacOptions: List[String]) { val outputDir: Path = rootPath.resolve("output") val currentOutput: Path = outputDir.resolve("classes") val scalacOptions = providedScalacOptions ++ Try(Files.readAllLines(rootPath.resolve("scalac.opts")).asScala.flatMap(_.split(" +"))).getOrElse(Nil) IO.cleanDir(outputDir) Files.createDirectories(currentOutput) val cpJars = IO.jarsIn(rootPath.resolve("cpJars")) val reporter: Reporter = new Reporter { // We are ignoring all override protected def info0(pos: Position, msg: String, severity: this.Severity, force: Boolean): Unit = { // println(s"[$severity] $pos: $msg") // Uncomment for to get compilation messages } } val settings: Settings = new Settings( msg => throw new RuntimeException(s"[ERROR] $msg") ) configure(settings) val global: Global = new Global(settings, reporter) def configure(settings: Settings): Unit = { settings.outputDirs.setSingleOutput(currentOutput.toAbsolutePath.toString) settings.classpath.append(cpJars.mkString(File.pathSeparator)) settings.processArguments(scalacOptions, processAll = true) } }
Example 193
Source File: IO.scala From perf_tester with Apache License 2.0 | 5 votes |
package benchmarks import java.nio.file.{Files, Path} object IO { def cleanDir(dir: Path): Unit = if(Files.exists(dir)) Files.walk(dir, 50).toArray.sortBy(- _.toString.length).foreach(o => Files.delete(o.asInstanceOf[Path])) def jarsIn(path: Path): Seq[Path] = Files.walk(path).toArray().map(_.asInstanceOf[Path].toAbsolutePath) .toList.filter(_.getFileName.toString.endsWith(".jar")) def listSourcesIn(path: Path): List[Path] = { def isSource(p: Path) = { val name = p.getFileName.toString name.endsWith(".scala") || name.endsWith(".java") } val maxDepth = 557 Files.walk(path, maxDepth).toArray.map(_.asInstanceOf[Path].toAbsolutePath).filter(isSource).toList } }
Example 194
Source File: CollectHsMetrics.scala From dagr with MIT License | 5 votes |
package dagr.tasks.picard import java.nio.file.Path import com.fulcrumgenomics.commons.io.PathUtil import dagr.tasks.DagrDef import DagrDef.{PathToBam, PathToFasta, PathToIntervals} import scala.collection.mutable.ListBuffer object CollectHsMetrics { val MetricsExtension = ".hybrid_selection_metrics" val PerTargetExtension = ".per_target_coverage" val PerBaseExtension = ".per_base_coverage" def baitSetName(baitSetIntervals: Path): String = PathUtil.basename(baitSetIntervals.toString, trimExt=true) } class CollectHsMetrics(override val in: PathToBam, override val prefix: Option[Path] = None, ref: PathToFasta, targets: PathToIntervals, baits: Option[PathToIntervals] = None, baitSetName: Option[String] = None, minimumBaseQuality: Option[Int] = None, generatePerBaseCoverage: Boolean = false, generatePerTargetCoverage: Boolean = true) extends PicardTask with PicardMetricsTask { override def metricsExtension: String = CollectHsMetrics.MetricsExtension override protected def addPicardArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("I=" + in) buffer.append("O=" + metricsFile) buffer.append("R=" + ref) buffer.append("TI=" + targets) buffer.append("BI=" + baits.getOrElse(targets)) buffer.append("BAIT_SET_NAME=" + baitSetName.getOrElse(CollectHsMetrics.baitSetName(targets))) buffer.append("LEVEL=ALL_READS") if (generatePerTargetCoverage) { buffer.append("PER_TARGET_COVERAGE=" + metricsFile(extension=CollectHsMetrics.PerTargetExtension, kind=PicardOutput.Text)) } if (generatePerBaseCoverage) { buffer.append("PER_BASE_COVERAGE=" + metricsFile(extension=CollectHsMetrics.PerBaseExtension, kind=PicardOutput.Text)) } minimumBaseQuality.foreach { q => buffer.append("MINIMUM_BASE_QUALITY=" + q) } } }
Example 195
Source File: MarkIlluminaAdapters.scala From dagr with MIT License | 5 votes |
package dagr.tasks.picard import java.nio.file.Path import dagr.core.execsystem.{Cores, Memory} import dagr.core.tasksystem.Pipe import dagr.tasks.DataTypes.SamOrBam import dagr.tasks.DagrDef import DagrDef.PathToBam import scala.collection.mutable.ListBuffer object MarkIlluminaAdapters { val MetricsExtension = ".adapter_metrics" } class MarkIlluminaAdapters(override val in: PathToBam, out: PathToBam, override val prefix: Option[Path], fivePrimeAdapter: Option[String] = None, threePrimeAdapter: Option[String] = None) extends PicardTask with PicardMetricsTask with Pipe[SamOrBam,SamOrBam] { requires(Cores(1), Memory("1G")) override def metricsExtension: String = MarkIlluminaAdapters.MetricsExtension override protected def addPicardArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("I=" + in) buffer.append("O=" + out) buffer.append("M=" + metricsFile) fivePrimeAdapter.foreach(v => buffer.append("FIVE_PRIME_ADAPTER=" + v)) threePrimeAdapter.foreach(v => buffer.append("THREE_PRIME_ADAPTER=" + v)) } }
Example 196
Source File: CollectTargetedPcrMetrics.scala From dagr with MIT License | 5 votes |
package dagr.tasks.picard import java.nio.file.Path import com.fulcrumgenomics.commons.io.PathUtil import dagr.tasks.DagrDef import DagrDef.{PathToBam, PathToFasta, PathToIntervals} import scala.collection.mutable.ListBuffer object CollectTargetedPcrMetrics { def perTargetMetricsPath(metricsFile: Path): Path = { PathUtil.replaceExtension(metricsFile, ".per_target." + PicardOutput.Text) } def metricsExtension: String = ".targeted_pcr_metrics" } class CollectTargetedPcrMetrics(override val in: PathToBam, override val prefix: Option[Path], ref: PathToFasta, targets: PathToIntervals) extends PicardTask with PicardMetricsTask{ override def metricsExtension: String = CollectTargetedPcrMetrics.metricsExtension override protected def addPicardArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("I=" + in) buffer.append("O=" + metricsFile) buffer.append("R=" + ref) buffer.append("TARGET_INTERVALS=" + targets) buffer.append("AMPLICON_INTERVALS=" + targets) buffer.append("LEVEL=ALL_READS") buffer.append("PER_TARGET_COVERAGE=" + CollectTargetedPcrMetrics.perTargetMetricsPath(metricsFile)) } }
Example 197
Source File: CollectGcBiasMetrics.scala From dagr with MIT License | 5 votes |
package dagr.tasks.picard import java.nio.file.Path import dagr.tasks.DagrDef import DagrDef.{PathToBam, PathToFasta} import scala.collection.mutable.ListBuffer object CollectGcBiasMetrics { val HistogramExtension: String = ".gc_bias" val SummaryMetricsExtension: String = ".gc_bias.summary_metrics" val DetailMetricsExtension: String = ".gc_bias.detail_metrics" } class CollectGcBiasMetrics(override val in: PathToBam, override val prefix: Option[Path], ref: PathToFasta) extends PicardTask with PicardMetricsTask { override def metricsExtension: String = "" override protected def addPicardArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("R=" + ref) buffer.append("I=" + in) buffer.append("O=" + metricsFile(CollectGcBiasMetrics.DetailMetricsExtension, PicardOutput.Text)) buffer.append("S=" + metricsFile(CollectGcBiasMetrics.SummaryMetricsExtension, PicardOutput.Text)) buffer.append("CHART=" + metricsFile(CollectGcBiasMetrics.HistogramExtension, PicardOutput.Pdf)) } }
Example 198
Source File: SamtoolsPileup.scala From dagr with MIT License | 5 votes |
package dagr.tasks.samtools import java.nio.file.Path import dagr.core.tasksystem.FixedResources import dagr.tasks.DagrDef import DagrDef.{PathToBam, PathToFasta, PathToIntervals} import scala.collection.mutable.ListBuffer class SamtoolsPileup(ref: PathToFasta, regions: Option[PathToIntervals] = None, bam: PathToBam, out: Option[Path], maxDepth: Int = 5000, minMappingQuality: Int = 1, minBaseQuality: Int = 13) extends SamtoolsTask("mpileup") with FixedResources { override def addSubcommandArgs(buffer: ListBuffer[Any]): Unit = { buffer.append("--fasta-ref", ref.toString) regions.foreach(r => buffer.append("--positions", r.toString)) out.foreach(f => buffer.append("--output", f.toString)) buffer.append("--max-depth", maxDepth.toString) buffer.append("--min-MQ", minMappingQuality.toString) buffer.append("--min-BQ", minBaseQuality.toString) buffer.append(bam) } }
Example 199
Source File: VerifyBamId.scala From dagr with MIT License | 5 votes |
package dagr.tasks.misc import java.nio.file.Path import dagr.core.config.Configuration import dagr.core.execsystem.{Cores, Memory} import dagr.core.tasksystem.{FixedResources, ProcessTask} import dagr.tasks.DagrDef import DagrDef.{PathToBam, PathToVcf} import scala.collection.mutable.ListBuffer class VerifyBamId(val vcf: PathToVcf, val bam: PathToBam, val out: Path, val maxDepth: Int = 50, val minMapq: Int = 20, val minQ: Int = 20, val grid: Double = 0.01, val precise: Boolean = true ) extends ProcessTask with Configuration with FixedResources { requires(Cores(1), Memory("1G")) private val verifyBamID: Path = configureExecutable("verifybamid.executable", "verifyBamID") override def args: Seq[Any] = { val buffer = ListBuffer[Any]() buffer ++= verifyBamID :: "--ignoreRG" :: "--chip-none" :: "--noPhoneHome" :: "--self" :: Nil buffer ++= "--vcf" :: vcf :: "--bam" :: bam :: "--out" :: out :: Nil buffer ++= "--maxDepth" :: maxDepth :: "--minMapQ" :: minMapq :: "--minQ" :: minQ :: "--grid" :: grid :: Nil if (precise) buffer += "--precise" buffer.toList } }
Example 200
Source File: DeleteFiles.scala From dagr with MIT License | 5 votes |
package dagr.tasks.misc import java.nio.file.{Path, Files} import java.util.stream.Collectors import com.fulcrumgenomics.commons.CommonsDef._ import dagr.core.tasksystem.SimpleInJvmTask private def delete(path: Path): Unit = { if (Files.isDirectory(path)) { val childStream = Files.list(path) val children = childStream.collect(Collectors.toList()) childStream.close() children.iterator.foreach(this.delete) } Files.deleteIfExists(path) } }