java.util.zip.ZipFile Scala Examples
The following examples show how to use java.util.zip.ZipFile.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: AddJar.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.{URL, URI} import java.nio.file.{Files, Paths} import java.util.zip.ZipFile import org.apache.toree.magic._ import org.apache.toree.magic.builtin.AddJar._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils} import com.typesafe.config.Config import org.apache.hadoop.fs.Path import org.apache.toree.plugins.annotations.Event object AddJar { val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file") private var jarDir:Option[String] = None def getJarDir(config: Config): String = { jarDir.getOrElse({ jarDir = Some( if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) { config.getString("jar_dir") } else { FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath } ) jarDir.get }) } } class AddJar extends LineMagic with IncludeInterpreter with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike { // Option to mark re-downloading of jars private val _force = parser.accepts("f", "forces re-download of specified jar") // Option to mark re-downloading of jars private val _magic = parser.accepts("magic", "loads jar as a magic extension") // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) ) } else { downloadFile( new URL(jarRemoteLocation), new File(downloadLocation).toURI.toURL ) } // Report download finished printStream.println(s"Finished download of $jarName") } else { printStream.println(s"Using cached version of $jarName") } // validate jar file if(! isValidJar(fileDownloadLocation)) { throw new IllegalArgumentException(s"Jar '$jarName' is not valid.") } if (_magic) { val plugins = pluginManager.loadPlugins(fileDownloadLocation) pluginManager.initializePlugins(plugins) } else { kernel.addJars(fileDownloadLocation.toURI) } } }
Example 2
package coursier.launcher.internal import java.util.zip.{ZipEntry, ZipFile, ZipInputStream} import scala.collection.JavaConverters._ private[coursier] object Zip { def zipEntries(zipStream: ZipInputStream): Iterator[(ZipEntry, Array[Byte])] = new Iterator[(ZipEntry, Array[Byte])] { var nextEntry = Option.empty[ZipEntry] def update() = nextEntry = Option(zipStream.getNextEntry) update() def hasNext = nextEntry.nonEmpty def next() = { val ent = nextEntry.get val data = FileUtil.readFullyUnsafe(zipStream) update() // ZipInputStream seems not to be fine with custom deflaters, like some recent versions of proguard use. // This makes ZipOutputStream not handle some of these entries fine without this. // See https://github.com/spring-projects/spring-boot/issues/13720#issuecomment-403428384. // Same workaround as https://github.com/spring-projects/spring-boot/issues/13720 // (https://github.com/spring-projects/spring-boot/commit/a50646b7cc3ad941e748dfb450077e3a73706205#diff-2ff64cd06c0b25857e3e0dfdb6733174R144) ent.setCompressedSize(-1L) (ent, data) } } def zipEntries(zipFile: ZipFile): Iterator[(ZipEntry, Array[Byte])] = zipFile.entries().asScala.map { ent => val data = FileUtil.readFully(zipFile.getInputStream(ent)) // Doing this like above just in case ent.setCompressedSize(-1L) (ent, data) } }
Example 3
Source File: MainClass.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.install import java.io.{File, InputStream} import java.util.jar.{Manifest => JManifest} import java.util.zip.ZipFile object MainClass { private def manifestPath = "META-INF/MANIFEST.MF" def mainClasses(jars: Seq[File]): Map[(String, String), String] = { val metaInfs = jars.flatMap { f => val zf = new ZipFile(f) val entryOpt = Option(zf.getEntry(manifestPath)) entryOpt.map(e => () => zf.getInputStream(e)).toSeq } val mainClasses = metaInfs.flatMap { f => var is: InputStream = null val attributes = try { is = f() new JManifest(is).getMainAttributes } finally { if (is != null) is.close() } def attributeOpt(name: String) = Option(attributes.getValue(name)) val vendor = attributeOpt("Implementation-Vendor-Id").getOrElse("") val title = attributeOpt("Specification-Title").getOrElse("") val mainClass = attributeOpt("Main-Class") mainClass.map((vendor, title) -> _) } mainClasses.toMap } def retainedMainClassOpt( mainClasses: Map[(String, String), String], mainDependencyOpt: Option[(String, String)] ): Option[String] = if (mainClasses.size == 1) { val (_, mainClass) = mainClasses.head Some(mainClass) } else { // Trying to get the main class of the first artifact val mainClassOpt = for { (mainOrg, mainName) <- mainDependencyOpt mainClass <- mainClasses.collectFirst { case ((org, name), mainClass) if org == mainOrg && ( mainName == name || mainName.startsWith(name + "_") // Ignore cross version suffix ) => mainClass } } yield mainClass def sameOrgOnlyMainClassOpt = for { (mainOrg, mainName) <- mainDependencyOpt orgMainClasses = mainClasses.collect { case ((org, _), mainClass) if org == mainOrg => mainClass }.toSet if orgMainClasses.size == 1 } yield orgMainClasses.head mainClassOpt.orElse(sameOrgOnlyMainClassOpt) } }
Example 4
Source File: ARSCFileParser_apktool.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.amandroid.core.parser import org.argus.jawa.core.util.FileResourceUri import brut.androlib.res.decoder.ARSCDecoder import brut.androlib.res.decoder.ARSCDecoder.ARSCData import brut.androlib.res.data.ResID import brut.androlib.res.data.ResResSpec import brut.androlib.res.data.ResPackage import java.util.regex.Pattern import org.argus.jawa.core.util._ import java.util.zip.ZipFile class ARSCFileParser_apktool { // final private val TITLE = "ARSCFileParser_apktool" private var data: ARSCData = _ def parse(apkUri: FileResourceUri): Unit = { val apkFile = FileUtil.toFilePath(apkUri) val zf = new ZipFile(apkFile) try{ val ze = zf.getEntry("resources.arsc") if(ze != null){ val in = zf.getInputStream(ze) this.data = ARSCDecoder.decode(in, false, false) } else {}//err_msg_normal(TITLE, "Cannot find resources.arsc file.") } finally { zf.close() } } def findResource(resourceId: Int): ResResSpec = { var result: ResResSpec = null val id = new ResID(resourceId) if(this.data != null){ this.data.getPackages.foreach{ pkg => if(pkg.hasResSpec(id)){ result = pkg.getResSpec(id) } } } result } def getPackages: Set[ResPackage] = { if(this.data != null){ data.getPackages.toSet } else Set() } def getData: ARSCData = this.data def getGlobalStringPool: Map[Int, String] = { val matches: MMap[Int, String] = mmapEmpty getPackages.foreach{ pkg => val str = pkg.getResTable.toString val strs = str.substring(1, str.length() - 1).split(", ") val p = Pattern.compile("(.+)\\sstring\\/(.+)") var matches: Map[Int, String] = Map() strs foreach { str => val m = p.matcher(str) if(m.find()){ matches += (Integer.parseInt(m.group(1).substring(2), 16) -> m.group(2)) } } } matches.toMap } }
Example 5
Source File: ZipUtil.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.jawa.core.util import java.io.{File, FileOutputStream, InputStream, OutputStream} import java.util.zip.{ZipEntry, ZipFile} import scala.collection.JavaConverters._ object ZipUtil { val BUFSIZE = 4096 val buffer = new Array[Byte](BUFSIZE) def unZip(source: String, targetFolder: String): Boolean = { val zipFile = new ZipFile(source) unzipAllFile(zipFile.entries.asScala.toList, getZipEntryInputStream(zipFile), new File(targetFolder)) } def getZipEntryInputStream(zipFile: ZipFile)(entry: ZipEntry): InputStream = zipFile.getInputStream(entry) def unzipAllFile(entryList: List[ZipEntry], inputGetter: ZipEntry => InputStream, targetFolder: File): Boolean = { entryList match { case entry :: entries => if (entry.isDirectory) new File(targetFolder, entry.getName).mkdirs else saveFile(inputGetter(entry), new FileOutputStream(new File(targetFolder, entry.getName))) unzipAllFile(entries, inputGetter, targetFolder) case _ => true } } def saveFile(fis: InputStream, fos: OutputStream): Unit = { writeToFile(bufferReader(fis), fos) fis.close() fos.close() } def bufferReader(fis: InputStream)(buffer: Array[Byte]): (Int, Array[Byte]) = (fis.read(buffer), buffer) def writeToFile(reader: Array[Byte] => (Int, Array[Byte]), fos: OutputStream): Boolean = { val (length, data) = reader(buffer) if (length >= 0) { fos.write(data, 0, length) writeToFile(reader, fos) } else true } }
Example 6
Source File: Using.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.jawa.core.compiler.compile.io import java.io.{Closeable, FileInputStream, FileOutputStream, InputStream, OutputStream, File => JavaFile} import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter} import java.io.{BufferedReader, BufferedWriter} import java.util.zip.GZIPInputStream import java.net.URL import java.nio.channels.FileChannel import java.nio.charset.Charset import java.util.jar.{JarFile, JarInputStream, JarOutputStream} import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream} import ErrorHandling.translate import scala.reflect.{Manifest => SManifest} abstract class Using[Source, T] { protected def open(src: Source): T def apply[R](src: Source)(f: T => R): R = { val resource = open(src) try { f(resource) } finally { close(resource) } } protected def close(out: T): Unit } abstract class WrapUsing[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends Using[Source, T] { protected def label[S](m: SManifest[S]): String = m.runtimeClass.getSimpleName protected def openImpl(source: Source): T protected final def open(source: Source): T = translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) } } trait OpenFile[T] extends Using[JavaFile, T] { protected def openImpl(file: JavaFile): T protected final def open(file: JavaFile): T = { val parent = file.getParentFile if(parent != null) IO.createDirectory(parent) openImpl(file) } } object Using { def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = wrap(openF, closeCloseable) def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] = new WrapUsing[Source, T] { def openImpl(source: Source): T = openF(source) def close(t: T): Unit = closeF(t) } def resource[Source, T <: Closeable](openF: Source => T): Using[Source,T] = resource(openF, closeCloseable) def resource[Source, T](openF: Source => T, closeF: T => Unit): Using[Source,T] = new Using[Source,T] { def open(s: Source): T = openF(s) def close(s: T): Unit = closeF(s) } def file[T <: Closeable](openF: JavaFile => T): OpenFile[T] = file(openF, closeCloseable) def file[T](openF: JavaFile => T, closeF: T => Unit): OpenFile[T] = new OpenFile[T] { def openImpl(file: JavaFile): T = openF(file) def close(t: T): Unit = closeF(t) } private def closeCloseable[T <: Closeable]: T => Unit = _.close() def bufferedOutputStream: Using[OutputStream, BufferedOutputStream] = wrap((out: OutputStream) => new BufferedOutputStream(out) ) def bufferedInputStream: Using[InputStream, BufferedInputStream] = wrap((in: InputStream) => new BufferedInputStream(in) ) def fileOutputStream(append: Boolean = false): OpenFile[BufferedOutputStream] = file(f => new BufferedOutputStream(new FileOutputStream(f, append))) def fileInputStream: OpenFile[BufferedInputStream] = file(f => new BufferedInputStream(new FileInputStream(f))) def urlInputStream: Using[URL, BufferedInputStream] = resource((u: URL) => translate("Error opening " + u + ": ")(new BufferedInputStream(u.openStream))) def fileOutputChannel: OpenFile[FileChannel] = file(f => new FileOutputStream(f).getChannel) def fileInputChannel: OpenFile[FileChannel] = file(f => new FileInputStream(f).getChannel) def fileWriter(charset: Charset = IO.utf8, append: Boolean = false): OpenFile[BufferedWriter] = file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) ) def fileReader(charset: Charset): OpenFile[BufferedReader] = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) ) def urlReader(charset: Charset): Using[URL, BufferedReader] = resource((u: URL) => new BufferedReader(new InputStreamReader(u.openStream, charset))) def jarFile(verify: Boolean): OpenFile[JarFile] = file(f => new JarFile(f, verify), (_: JarFile).close()) def zipFile: OpenFile[ZipFile] = file(f => new ZipFile(f), (_: ZipFile).close()) def streamReader: Using[(InputStream, Charset), InputStreamReader] = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } } def gzipInputStream: Using[InputStream, GZIPInputStream] = wrap((in: InputStream) => new GZIPInputStream(in, 8192) ) def zipInputStream: Using[InputStream, ZipInputStream] = wrap((in: InputStream) => new ZipInputStream(in)) def zipOutputStream: Using[OutputStream, ZipOutputStream] = wrap((out: OutputStream) => new ZipOutputStream(out)) def gzipOutputStream: Using[OutputStream, GZIPOutputStream] = wrap((out: OutputStream) => new GZIPOutputStream(out, 8192), (_: GZIPOutputStream).finish()) def jarOutputStream: Using[OutputStream, JarOutputStream] = wrap((out: OutputStream) => new JarOutputStream(out)) def jarInputStream: Using[InputStream, JarInputStream] = wrap((in: InputStream) => new JarInputStream(in)) def zipEntry(zip: ZipFile): Using[ZipEntry, InputStream] = resource((entry: ZipEntry) => translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } ) }
Example 7
Source File: DeployDynamoDBLocal.scala From sbt-dynamodb with MIT License | 5 votes |
package com.localytics.sbt.dynamodb import java.io.FileInputStream import java.net.URL import java.util.zip.GZIPInputStream import java.util.zip.ZipFile import sbt.File import sbt.Keys._ import scala.concurrent.duration.Duration import scala.sys.process._ import scala.util.Try object DeployDynamoDBLocal { private[dynamodb] def validJar(file: File): Boolean = Try(new ZipFile(file)).isSuccess private[dynamodb] def validGzip(file: File): Boolean = Try(new GZIPInputStream(new FileInputStream(file)).read()).isSuccess def apply(ver: String, url: Option[String], targetDir: File, downloadIfOlderThan: Duration, streamz: TaskStreams): File = { val targz = new File(targetDir, s"dynamodb_local_$ver.tar.gz") val jar = new File(targetDir, "DynamoDBLocal.jar") def isStale(file: File) = ver == "latest" && System.currentTimeMillis - file.lastModified() > downloadIfOlderThan.toMillis if (!targetDir.exists()) { streamz.log.info(s"Creating DynamoDB Local directory $targetDir") targetDir.mkdirs() } if (!targz.exists() || isStale(targz) || !validGzip(targz)) { val remoteFile = url.getOrElse(s"https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_$ver.tar.gz") streamz.log.info(s"Downloading targz from [$remoteFile] to [${targz.getAbsolutePath}]") (new URL(remoteFile) #> targz).!! } if (!validGzip(targz)) sys.error(s"Invalid gzip file at [${targz.getAbsolutePath}]") if (!jar.exists() || !validJar(jar)) { streamz.log.info(s"Extracting jar from [${targz.getAbsolutePath}] to [${jar.getAbsolutePath}]") Process(Seq("tar", "xzf", targz.getName), targetDir).!! } if (!validJar(jar)) sys.error(s"Invalid jar file at [${jar.getAbsolutePath}]") jar } }
Example 8
Source File: ThriftImporter.scala From diffy with GNU Affero General Public License v3.0 | 5 votes |
package ai.diffy.scrooge import com.twitter.scrooge.frontend.{DirImporter, Importer} import java.io.File import java.nio.file.Files import java.util.zip.ZipFile import scala.collection.JavaConversions._ import scala.io.Source object ZippedFileImporter { def apply(zipFiles: Seq[ZipFile]): Importer = { val thriftDir = Files.createTempDirectory("thrift-") thriftDir.toFile.deleteOnExit() zipFiles foreach { zipFile => zipFile.entries.toList.collect { case zipEntry if !zipEntry.isDirectory && zipEntry.getName.endsWith(".thrift") => val data = Source.fromInputStream(zipFile.getInputStream(zipEntry), "UTF-8").mkString val newFile = new File(thriftDir.toString + File.separator + zipEntry.getName) new File(newFile.getParent).mkdirs() Files.write(newFile.toPath, data.getBytes) } } DirImporter(thriftDir.toFile) } } object FileImporter { def apply(files: Seq[File]): Importer = { val thriftDir = Files.createTempDirectory("thrift-") thriftDir.toFile.deleteOnExit() files foreach { file => val newFile = new File(thriftDir.toString + File.separator + file.getName) Files.copy(file.toPath, newFile.toPath) } DirImporter(thriftDir.toFile) } }
Example 9
Source File: ThriftDifferenceProxy.scala From diffy with GNU Affero General Public License v3.0 | 5 votes |
package ai.diffy.proxy import java.io.File import java.util.zip.ZipFile import ai.diffy.analysis.{DifferenceAnalyzer, InMemoryDifferenceCollector, JoinedDifferences} import ai.diffy.lifter.{MapLifterPool, Message, ThriftLifter} import ai.diffy.scrooge.ZippedFileImporter import com.twitter.finagle.thrift.{ClientId, ThriftClientRequest} import com.twitter.finagle.tracing.NullTracer import com.twitter.finagle.{Resolver, Thrift, ThriftMux} import com.twitter.util.{Future, Try} import scala.collection.JavaConversions._ case class ThriftDifferenceProxy ( settings: Settings, collector: InMemoryDifferenceCollector, joinedDifferences: JoinedDifferences, analyzer: DifferenceAnalyzer) extends DifferenceProxy { override type Req = ThriftClientRequest override type Rep = Array[Byte] override type Srv = ThriftService private[this] lazy val clientId = new ClientId(settings.clientId) override val proxy = super.proxy private[this] val zipfile = new ZipFile(new File(settings.pathToThriftJar)) private[this] val importer = ZippedFileImporter(Seq(zipfile)) private[this] val filenames = zipfile.entries.toSeq collect { case zipEntry if !zipEntry.isDirectory && zipEntry.getName.endsWith(".thrift") => zipEntry.getName } val lifter = MapLifterPool( ThriftLifter.fromImporter( importer, filenames, settings.serviceClass ) ) override def serviceFactory(serverset: String, label: String) = { val client = if (settings.enableThriftMux) { ThriftMux.client .withClientId(clientId) .newClient(serverset, label).toService } else { val config = if(settings.useFramedThriftTransport) { Thrift.client } else { Thrift.client.withBufferedTransport } config .withNoAttemptTTwitterUpgrade .withTracer(NullTracer) .withClientId(clientId) .newClient(serverset, label) .toService } ThriftService(client, Resolver.eval(serverset)) } override lazy val server = { if (settings.enableThriftMux) { ThriftMux.serve( settings.servicePort, proxy map { req: Array[Byte] => new ThriftClientRequest(req, false) } ) } else { val config = if(settings.useFramedThriftTransport) { Thrift.server } else { Thrift.server.withBufferedTransport() } config.withTracer(NullTracer).serve( settings.servicePort, proxy map { req: Array[Byte] => new ThriftClientRequest(req, false) } ) } } override def liftRequest(req: ThriftClientRequest): Future[Message] = lifter(req.message) override def liftResponse(rep: Try[Array[Byte]]): Future[Message] = Future.const(rep) flatMap { lifter(_) } }
Example 10
Source File: HasBackupValidation.scala From recogito2 with Apache License 2.0 | 5 votes |
package controllers.document import collection.JavaConverters._ import java.io.{ File, InputStream } import java.math.BigInteger import java.security.{ DigestInputStream, MessageDigest } import java.util.zip.ZipFile import controllers.HasConfig import scala.concurrent.{ ExecutionContext, Future } import scala.io.Source object HasBackupValidation { class InvalidSignatureException extends RuntimeException class InvalidBackupException extends RuntimeException class DocumentExistsException extends RuntimeException } trait HasBackupValidation { self: HasConfig => protected val ALGORITHM = "SHA-256" protected val SECRET = self.config.get[String]("play.http.secret.key") private def computeHash(stream: InputStream) = { val md = MessageDigest.getInstance(ALGORITHM) val din = new DigestInputStream(stream, md) // Weird, but din is pure side-effect - consume the stream & din computes the hash while (din.read() != -1) { } din.close() new BigInteger(1, md.digest()).toString(16) } def computeSignature(metadataHash: String, fileHashes: Seq[String], annotationsHash: String) = { val str = SECRET + metadataHash + fileHashes.mkString + annotationsHash val md = MessageDigest.getInstance(ALGORITHM).digest(str.getBytes) new BigInteger(1, md).toString(16) } def validateBackup(file: File)(implicit ctx: ExecutionContext): Future[Boolean] = Future { scala.concurrent.blocking { val zipFile = new ZipFile(file) val entries = zipFile.entries.asScala.toSeq.filter(!_.getName.startsWith("__MACOSX")) def hash(filename: String) = { val entry = entries.filter(_.getName == filename).head computeHash(zipFile.getInputStream(entry)) } val expectedSignature = { val metadataHash = hash("metadata.json") val fileHashes = entries.filter(_.getName.startsWith("parts" + File.separator)) .map(entry => hash(entry.getName)) val annotationsHash = hash("annotations.jsonl") computeSignature(metadataHash, fileHashes, annotationsHash) } val storedSignature = { val signatureEntry = entries.filter(_.getName == "signature").head Source.fromInputStream(zipFile.getInputStream(signatureEntry), "UTF-8").getLines.mkString("\n") } expectedSignature == storedSignature } } }
Example 11
Source File: Check.scala From sbt-coursier with Apache License 2.0 | 5 votes |
import java.io.File import java.util.zip.ZipFile import scala.collection.JavaConverters._ object Check { def onlyNamespace(ns: String, jar: File, ignoreFiles: Set[String] = Set.empty): Unit = { val zf = new ZipFile(jar) val unrecognized = zf.entries() .asScala .map(_.getName) .filter { n => !n.startsWith("META-INF/") && !n.startsWith(ns + "/") && n != "reflect.properties" && // scala-reflect adds that !ignoreFiles(n) } .toVector .sorted for (u <- unrecognized) System.err.println(s"Unrecognized: $u") assert(unrecognized.isEmpty) } }
Example 12
Source File: AddJar.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.{URL, URI} import java.nio.file.{Files, Paths} import java.util.zip.ZipFile import org.apache.toree.magic._ import org.apache.toree.magic.builtin.AddJar._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils} import com.typesafe.config.Config import org.apache.hadoop.fs.Path import org.apache.toree.plugins.annotations.Event object AddJar { val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file") private var jarDir:Option[String] = None def getJarDir(config: Config): String = { jarDir.getOrElse({ jarDir = Some( if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) { config.getString("jar_dir") } else { FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath } ) jarDir.get }) } } class AddJar extends LineMagic with IncludeInterpreter with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike { // Option to mark re-downloading of jars private val _force = parser.accepts("f", "forces re-download of specified jar") // Option to mark re-downloading of jars private val _magic = parser.accepts("magic", "loads jar as a magic extension") // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) ) } else { downloadFile( new URL(jarRemoteLocation), new File(downloadLocation).toURI.toURL ) } // Report download finished printStream.println(s"Finished download of $jarName") } else { printStream.println(s"Using cached version of $jarName") } // validate jar file if(! isValidJar(fileDownloadLocation)) { throw new IllegalArgumentException(s"Jar '$jarName' is not valid.") } if (_magic) { val plugins = pluginManager.loadPlugins(fileDownloadLocation) pluginManager.initializePlugins(plugins) } else { kernel.addJars(fileDownloadLocation.toURI) } } }
Example 13
Source File: JobUtils.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.job.util import java.io.File import java.nio.charset.Charset import java.nio.file.{ Files, Path, StandardCopyOption } import java.util.zip.ZipFile import com.typesafe.scalalogging.StrictLogging import helloscala.common.Configuration import helloscala.common.util.{ DigestUtils, Utils } import mass.common.util.FileUtils import mass.core.job.JobConstants import mass.job.JobSettings import mass.message.job._ import mass.model.job.{ JobItem, JobTrigger } import scala.concurrent.{ ExecutionContext, Future } object JobUtils extends StrictLogging { case class JobZipInternal private (configs: Vector[JobCreateReq], entries: Vector[Path]) def uploadJob(jobSettings: JobSettings, req: JobUploadJobReq)(implicit ec: ExecutionContext): Future[JobZip] = Future { val sha256 = DigestUtils.sha256HexFromPath(req.file) val dest = jobSettings.jobSavedDir.resolve(sha256.take(2)).resolve(sha256) val jobZipInternal = parseJobZip(req.file, req.charset, dest.resolve(JobConstants.DIST)) match { case Right(v) => v case Left(e) => throw e } val zipPath = dest.resolve(req.fileName) Files.move(req.file, zipPath, StandardCopyOption.REPLACE_EXISTING) JobZip(zipPath, jobZipInternal.configs, jobZipInternal.entries) } @inline def parseJobZip(file: Path, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = parseJobZip(file.toFile, charset, dest) def parseJobZip(file: File, charset: Charset, dest: Path): Either[Throwable, JobZipInternal] = Utils.either { import scala.jdk.CollectionConverters._ import scala.language.existentials val zip = new ZipFile(file, charset) try { val (confEntries, fileEntries) = zip .entries() .asScala .filterNot(entry => entry.isDirectory) .span(entry => entry.getName.endsWith(JobConstants.ENDS_SUFFIX) && !entry.isDirectory) val configs = confEntries.map(confEntry => parseJobConf(FileUtils.getString(zip.getInputStream(confEntry), charset, "\n")) match { case Right(config) => config case Left(e) => throw e }) val buf = Array.ofDim[Byte](1024) val entryPaths = fileEntries.map { entry => val entryName = entry.getName val savePath = dest.resolve(entryName) if (!Files.isDirectory(savePath.getParent)) { Files.createDirectories(savePath.getParent) } FileUtils.write(zip.getInputStream(entry), Files.newOutputStream(savePath), buf) // zip entry存磁盘 savePath } JobZipInternal(configs.toVector, entryPaths.toVector) } finally { if (zip ne null) zip.close() } } def parseJobConf(content: String): Either[Throwable, JobCreateReq] = Utils.either { val conf = Configuration.parseString(content) val jobItem = JobItem(conf.getConfiguration("item")) val jobTrigger = JobTrigger(conf.getConfiguration("trigger")) JobCreateReq(conf.get[Option[String]]("key"), jobItem, jobTrigger) } } case class JobZip(zipPath: Path, configs: Vector[JobCreateReq], entries: Vector[Path])
Example 14
Source File: MetaDataProvider.scala From releaser with Apache License 2.0 | 5 votes |
package uk.gov.hmrc.releaser import java.nio.file.Path import java.util.jar.Manifest import java.util.zip.ZipFile import org.joda.time.DateTime import org.joda.time.format.DateTimeFormat import uk.gov.hmrc.releaser.github.CommitSha import scala.collection.JavaConversions._ import scala.io.Source import scala.util.{Failure, Success, Try} trait MetaDataProvider { def fromJarFile(p: Path): Try[ArtefactMetaData] def fromCommitManifest(p: Path): Try[ArtefactMetaData] } case class ArtefactMetaData(sha:CommitSha, commitAuthor:String, commitDate:DateTime) class ArtefactMetaDataProvider extends MetaDataProvider { import ArtefactMetaDataProvider._ def fromJarFile(p: Path): Try[ArtefactMetaData] = { Try {new ZipFile(p.toFile) }.flatMap { jarFile => jarFile.entries().filter(_.getName == "META-INF/MANIFEST.MF").toList.headOption.map { ze => val man = new Manifest(jarFile.getInputStream(ze)) ArtefactMetaData( man.getMainAttributes.getValue("Git-Head-Rev"), man.getMainAttributes.getValue("Git-Commit-Author"), gitCommitDateFormat.parseDateTime(man.getMainAttributes.getValue("Git-Commit-Date")) ) }.toTry(new Exception(s"Failed to retrieve manifest from $p")) } } def fromCommitManifest(p: Path): Try[ArtefactMetaData] = { Try { val map = Source.fromFile(p.toFile) .getLines().toSeq .map(_.split("=")) .map { case Array(key, value) => key.trim -> value.trim }.toMap ArtefactMetaData(map("sha"), map("author"), gitCommitDateFormat.parseDateTime(map("date"))) } } } object ArtefactMetaDataProvider { val gitCommitDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ") implicit class OptionPimp[A](opt: Option[A]){ def toTry(e:Exception):Try[A] = opt match { case Some(x) => Success(x) case None => Failure(e) } } }