java.io.PrintStream Scala Examples
The following examples show how to use java.io.PrintStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: Banner.scala From daml with Apache License 2.0 | 6 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.sandbox.banner import java.io.PrintStream import scala.io.Source object Banner { def show(out: PrintStream): Unit = { val resourceName = "banner.txt" if (getClass.getClassLoader.getResource(resourceName) != null) out.println( Source .fromResource(resourceName) .getLines .mkString("\n")) else out.println("Banner resource missing from classpath.") } }
Example 2
Source File: SinkFunctionExample.scala From examples-scala with Apache License 2.0 | 5 votes |
package io.github.streamingwithflink.chapter8 import java.io.PrintStream import java.net.{InetAddress, Socket} import io.github.streamingwithflink.util.{SensorReading, SensorSource, SensorTimeAssigner} import org.apache.flink.configuration.Configuration import org.apache.flink.streaming.api.TimeCharacteristic import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction} import org.apache.flink.streaming.api.scala._ /** * Example program that writes sensor readings to a socket. * * NOTE: Before starting the program, you need to start a process that listens on a socket at localhost:9191. * On Linux, you can do that with nc (netcat) with the following command: * * nc -l localhost 9191 */ object SinkFunctionExample { def main(args: Array[String]): Unit = { val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment // use event time for the application env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) // configure watermark interval env.getConfig.setAutoWatermarkInterval(1000L) // ingest sensor stream val readings: DataStream[SensorReading] = env // SensorSource generates random temperature readings .addSource(new SensorSource) // assign timestamps and watermarks which are required for event time .assignTimestampsAndWatermarks(new SensorTimeAssigner) // write the sensor readings to a socket readings.addSink(new SimpleSocketSink("localhost", 9191)) // set parallelism to 1 because only one thread can write to a socket .setParallelism(1) env.execute() } } /** * Writes a stream of [[SensorReading]] to a socket. */ class SimpleSocketSink(val host: String, val port: Int) extends RichSinkFunction[SensorReading] { var socket: Socket = _ var writer: PrintStream = _ override def open(config: Configuration): Unit = { // open socket and writer socket = new Socket(InetAddress.getByName(host), port) writer = new PrintStream(socket.getOutputStream) } override def invoke( value: SensorReading, ctx: SinkFunction.Context[_]): Unit = { // write sensor reading to socket writer.println(value.toString) writer.flush() } override def close(): Unit = { // close writer and socket writer.close() socket.close() } }
Example 3
Source File: SparkSQLEnv.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import scala.collection.JavaConverters._ import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null sqlContext = null } } }
Example 4
Source File: Distribution.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 5
Source File: PrintLogger.scala From scalismo-faces with Apache License 2.0 | 5 votes |
package scalismo.faces.sampling.face.loggers import java.io.PrintStream import scalismo.sampling.loggers.AcceptRejectLogger import scalismo.sampling.{DistributionEvaluator, ProposalGenerator} class VerbosePrintLogger[A](output: PrintStream, prefix: String) extends AcceptRejectLogger[A] { private var counter = 0 override def accept(current: A, sample: A, generator: ProposalGenerator[A], evaluator: DistributionEvaluator[A]): Unit = { output.println(s"$counter A ${evaluator.logValue(sample)} $generator $evaluator") counter += 1 } override def reject(current: A, sample: A, generator: ProposalGenerator[A], evaluator: DistributionEvaluator[A]): Unit = { output.println(s"$counter R ${evaluator.logValue(sample)} $generator $evaluator") counter += 1 } }
Example 6
Source File: HiveEngineExecutorFactory.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis.engine.hive.executor import java.io.PrintStream import com.webank.wedatasphere.linkis.engine.execute.{EngineExecutor, EngineExecutorFactory} import com.webank.wedatasphere.linkis.engine.hive.common.HiveUtils import com.webank.wedatasphere.linkis.engine.hive.exception.HiveSessionStartFailedException import com.webank.wedatasphere.linkis.server.JMap import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.session.SessionState import org.apache.hadoop.security.UserGroupInformation import org.slf4j.LoggerFactory import org.springframework.stereotype.Component @Component class HiveEngineExecutorFactory extends EngineExecutorFactory { private val logger = LoggerFactory.getLogger(getClass) private val HIVE_QUEUE_NAME:String = "mapreduce.job.queuename" private val BDP_QUEUE_NAME:String = "wds.linkis.yarnqueue" override def createExecutor(options: JMap[String, String]): EngineExecutor = { val hiveConf:HiveConf = HiveUtils.getHiveConf hiveConf.setVar(HiveConf.ConfVars.HIVEJAR, HiveUtils.jarOfClass(classOf[Driver]) .getOrElse(throw HiveSessionStartFailedException(40012 ,"cannot find hive-exec.jar, start session failed!"))) import scala.collection.JavaConversions._ options.foreach{ case(k,v) => logger.info(s"key is $k, value is $v")} options.filter{case (k,v) => k.startsWith("hive.") || k.startsWith("mapreduce.") || k.startsWith("wds.linkis.")}.foreach{case(k, v) => logger.info(s"key is $k, value is $v") if (BDP_QUEUE_NAME.equals(k)) hiveConf.set(HIVE_QUEUE_NAME, v) else hiveConf.set(k, v)} val sessionState:SessionState = new SessionState(hiveConf) sessionState.out = new PrintStream(System.out, true, "utf-8") sessionState.info = new PrintStream(System.out, true, "utf-8") sessionState.err = new PrintStream(System.out, true, "utf-8") SessionState.start(sessionState) val ugi = UserGroupInformation.getCurrentUser new HiveEngineExecutor(5000, sessionState, ugi, hiveConf) } }
Example 7
Source File: OutlierFinder.scala From spatial with MIT License | 5 votes |
package utils import java.io.PrintStream import scala.collection.mutable class OutlierFinder(val finderName: String) { OutlierFinder.set += this private val times = new mutable.HashMap[String, mutable.Map[Any,(Long,Long)]]() def record[R](name: String, key: Any)(scope: => R): R = { if (!times.contains(name)) times += name -> new mutable.HashMap[Any,(Long,Long)]() val startTime = System.currentTimeMillis() try { scope } finally { val endTime = System.currentTimeMillis() val (n,prev) = times(name).getOrElse(key, (0L, 0L)) times(name) = times(name) + (key -> (n+1, prev + endTime - startTime)) } } def report(stream: PrintStream = Console.out): Unit = { times.foreach{case (cat,map) => val avgs = map.toSeq.map{case (name,(n,len)) => (name, len.toDouble/n) } val values = avgs.map(_._2) val mean = stats.mean(values) val stdDev = stats.stdDev(values) val outliers = avgs.filter{case (_,avg) => Math.abs(avg - mean) > 2*stdDev } if (outliers.nonEmpty) { stream.println(s"$cat") stream.println("------") stream.println(s"Mean: $mean") stream.println(s"StdDev: $stdDev") stream.println(s"Outliers: ") outliers.foreach{case (name,time) => stream.println(s" $name: $time") } stream.println("") stream.println("") } } times.foreach{case (cat,map) => val avgs = map.toSeq.map{case (name,(n,len)) => (name, len.toDouble/n) } val values = avgs.map(_._2) val mean = stats.mean(values) val stdDev = stats.stdDev(values) stream.println(s"$cat") stream.println("------") stream.println(s"Mean: $mean") stream.println(s"StdDev: $stdDev") avgs.sortBy(_._2).foreach{case (name,time) => stream.println(s" $name: $time") } stream.println("") stream.println("") } } } object OutlierFinder { var set: Set[OutlierFinder] = Set.empty def report(stream: PrintStream = Console.out): Unit = set.foreach(_.report(stream)) }
Example 8
Source File: Instrument.scala From spatial with MIT License | 5 votes |
package utils import java.io.PrintStream import utils.implicits.terminal._ import utils.implicits.collections._ import scala.collection.mutable class Instrument(val top: String = "") { var scope: String = top val times = new mutable.HashMap[String,Long]() def add(i2: Instrument): Unit = { i2.times.foreach{case (k,v) => times += k -> (v + times.getOrElse(k, 0L)) } } @inline def apply[A](name: String)(blk: => A): A = { val prev = scope.split('.') if (prev.contains(name)) { // Don't double count recursive calls (we're already timing this) blk } else { val outerScope = scope val fullName = if (scope == "") name else scope + "." + name val startTime = System.currentTimeMillis() scope = fullName try { blk } finally { scope = outerScope val endTime = System.currentTimeMillis() times(fullName) = times.getOrElse(fullName, 0L) + (endTime - startTime) } } } def fix(keys: Iterable[String]): Unit = { val cats = keys.map(_.split(".")) val max = keys.map(_.length).maxOrElse(1) for (i <- max until 1 by -1) { val cs = cats.flatMap{parts => if (parts.length >= i) Some(parts.take(i)) else None } cs.foreach{top => val cat = top.mkString(".") val entries = cats.filter(_.startsWith(top)).map(_.mkString(".")) val total = entries.map{e => times(e) }.sum if (!times.contains(cat) || times(cat) < total) times += cat -> total } } } private def subcats(cat: String, keys: Iterable[String], depth: Int): Iterable[String] = { keys.filter(key => key.startsWith(cat) && key.count(_ == '.') == depth+1) .toSeq.sortBy(key => -times(key) ) } private def dumpCategory(cat: String, keys: Iterable[String])(implicit out: PrintStream): Unit = { val depth = cat.count(_ == '.') val parent = cat.split('.').dropRight(1).mkString(".") val time = times.getOrElse(cat,0L) val subs = subcats(cat, keys, depth) val parentTime = if (depth == 0) time else times.getOrElse(parent, time) out.info( s"$cat: ${time/1000.0}s (" + "%.2f".format(time.toDouble/parentTime*100) + "%)") subs.foreach(dumpCategory(_,keys)) } private def topKeys: Iterable[String] = top match { case "" => val keys = times.keys val minDepth = keys.map{key => key.count(_ == '.') }.minOrElse(0) keys.filter{key => key.count(_ == '.') == minDepth } case t => Seq(t) } def totalTime: Long = topKeys.map{times.apply}.maxOrElse(0) def dump(title: => String, out: PrintStream = Console.out): Unit = { fix(times.keySet) out.info(title) val top = topKeys val keys = times.keys ++ top top.foreach{cat => dumpCategory(cat, keys)(out) } } def dumpAll(out: PrintStream = Console.out): Unit = { times.foreach{case (name,time) => out.println(s"$name: ${time/1000.0}s") } } @inline def reset(): Unit = { scope = top times.clear() } }
Example 9
Source File: terminal.scala From spatial with MIT License | 5 votes |
package utils package implicits import java.io.PrintStream object terminal { implicit class PrintReport(out: PrintStream) { def log(x: => Any): Unit = out.println(x) def log(ctx: Ctx, x: => Any): Unit = log(ctx.toString + ": " + x) def log(ctx: Ctx): Unit = log(ctx, showCaret = false) def log(ctx: Ctx, showCaret: Boolean): Unit = if (ctx.content.isDefined) { log(ctx.content.get) if (showCaret) log(" "*(ctx.column-1) + "^") else log("") } def warn(x: => Any): Unit = out.println(s"[${Console.YELLOW}warn${Console.RESET}] $x") def warn(ctx: Ctx, x: => Any): Unit = warn(ctx.toString + ": " + x) def warn(ctx: Ctx): Unit = warn(ctx, showCaret = false) def warn(ctx: Ctx, showCaret: Boolean): Unit = if (ctx.content.isDefined) { warn(ctx.content.get) if (showCaret) warn(" "*(ctx.column-1) + "^") else warn("") } def error(x: => Any): Unit = out.println(s"[${Console.RED}error${Console.RESET}] $x") def error(ctx: Ctx, x: => Any): Unit = error(ctx.file + ":" + ctx.line + ": " + x) def error(ctx: Ctx): Unit = error(ctx, showCaret = false) def error(ctx: Ctx, showCaret: Boolean): Unit = if (ctx.content.isDefined) { error(ctx.content.get) if (showCaret) error(" "*(ctx.column-1) + "^") else error("") } def bug(x: => Any): Unit = out.println(s"[${Console.MAGENTA}bug${Console.RESET}] $x") def bug(ctx: Ctx, x: => Any): Unit = bug(ctx.file + ":" + ctx.line + ": " + x) def bug(ctx: Ctx): Unit = bug(ctx, showCaret = false) def bug(ctx: Ctx, showCaret: Boolean): Unit = if (ctx.content.isDefined) { bug(ctx.content.get) if (showCaret) bug(" "*(ctx.column-1) + "^") else bug("") } def info(x: => Any): Unit = out.println(s"[${Console.BLUE}info${Console.RESET}] $x") def info(ctx: Ctx, x: => Any): Unit = info(ctx.file + ":" + ctx.line + ": " + x) def info(ctx: Ctx): Unit = info(ctx, showCaret = false) def info(ctx: Ctx, showCaret: Boolean): Unit = if (ctx.content.isDefined) { info(ctx.content.get) if (showCaret) bug(" "*(ctx.column-1) + "^") else bug("") } } }
Example 10
Source File: CaptureOutputStream.scala From spatial with MIT License | 5 votes |
package utils.io import java.io.{ByteArrayOutputStream, OutputStream, PrintStream} class CaptureOutputStream extends OutputStream { val data = new ByteArrayOutputStream() override def write(b: Int): Unit = data.write(b) override def write(b: Array[Byte]): Unit = data.write(b) override def write(b: Array[Byte], off: Int, len: Int): Unit = data.write(b,off,len) def dump: String = new java.lang.String(data.toByteArray, java.nio.charset.StandardCharsets.UTF_8) } class CaptureStream(__out: CaptureOutputStream, paired: PrintStream) extends PrintStream(__out) { def this(paired: PrintStream) = this(new CaptureOutputStream(), paired) def dump: String = __out.dump //TODO[5]: For some reason this duplicates the printing //override def print(s: String): Unit = { paired.print(s); super.print(s) } //override def println(s: String): Unit = { paired.println(s); super.println(s) } }
Example 11
Source File: DSEWriterThread.scala From spatial with MIT License | 5 votes |
package spatial.dse import java.io.PrintStream import java.util.concurrent.BlockingQueue case class DSEWriterThread( threadId: Int, spaceSize: BigInt, filename: String, header: String, workQueue: BlockingQueue[Array[String]] ) extends Runnable { private var isAlive: Boolean = true private var hasTerminated: Boolean = false def requestStop(): Unit = { isAlive = false } def run(): Unit = { val data = new PrintStream(filename) data.println(header + ",Timestamp") val P = BigDecimal(spaceSize) var N = BigDecimal(0) var nextNotify = BigDecimal(0); val notifyStep = 5000 val startTime = System.currentTimeMillis() while(isAlive) { try { val array = workQueue.take() if (array.nonEmpty) { array.foreach { line => data.println(line) } data.flush() N += array.length if (N > nextNotify) { val time = System.currentTimeMillis - startTime println(" %.4f".format(100 * (N / P).toFloat) + s"% ($N / $P) Complete after ${time / 1000} seconds") nextNotify += notifyStep } } else if (array.isEmpty) requestStop() // Somebody poisoned the work queue! } catch {case e: Throwable => println(e.getMessage) e.getStackTrace.foreach{line => println(" " + line) } requestStop() } } data.close() hasTerminated = true } }
Example 12
Source File: State.scala From spatial with MIT License | 5 votes |
package argon import java.io.PrintStream import utils.io.NullOutputStream import scala.collection.mutable class State(val app: DSLRunnable) extends forge.AppState with Serializable { var issues: Set[Issue] = Set.empty def hasIssues: Boolean = issues.nonEmpty def runtimeArgs: Seq[String] = app match { case test: DSLTest => test.runtimeArgs.cmds case _ => Nil } def dseModelArgs: Seq[String] = app match { case test: DSLTest => test.dseModelArgs.cmds case _ => Nil } def finalModelArgs: Seq[String] = app match { case test: DSLTest => test.finalModelArgs.cmds case _ => Nil } def resetErrors(): Unit = errors = 0 def reset(): Unit = { config.reset() id = -1 scope = null impure = null cache = Map.empty globals.reset() pass = 1 logTab = 0 genTabs.clear() log = new PrintStream(new NullOutputStream) gen = new PrintStream(new NullOutputStream) streams.clear() infos = 0 warnings = 0 errors = 0 bugs = 0 issues = Set.empty } def copyTo(target: State): Unit = { this.config.copyTo(target.config) target.id = this.id target.scope = this.scope target.impure = this.impure target.cache = this.cache globals.copyTo(target.globals) target.pass = this.pass target.logTab = this.logTab target.genTabs = this.genTabs target.log = this.log target.gen = this.gen target.streams ++= this.streams target.infos = this.infos target.warnings = this.warnings target.errors = this.errors target.bugs = this.bugs } }
Example 13
Source File: Warn.scala From spatial with MIT License | 5 votes |
package emul import java.text.SimpleDateFormat import java.util.Calendar import java.io.PrintStream object Warn { val now = Calendar.getInstance().getTime val fmt = new SimpleDateFormat("dd_MM_yyyy_hh_mm_aa") val timestamp = fmt.format(now) var warns: Int = 0 lazy val log = new PrintStream(timestamp + ".log") def apply(x: => String): Unit = { log.println(x) warns += 1 } def close(): Unit = { if (warns > 0) { println(Warn.warns + " warnings occurred during program execution. See " + Warn.timestamp + ".log for details") log.close() } } }
Example 14
package emul import java.io.PrintStream import java.io.File object OOB { lazy val writeStream = new PrintStream("./logs/writes.log") lazy val readStream = new PrintStream("./logs/reads.log") def open(): Unit = { new File("./logs/").mkdirs() writeStream readStream } def close(): Unit = { writeStream.close() readStream.close() } def readOrElse[T](mem: String, addr: String, invalid: T, en: Boolean)(rd: => T): T = { try { val data = rd if (en) readStream.println(s"Mem: $mem; Addr: $addr") data } catch {case err: java.lang.ArrayIndexOutOfBoundsException => if (en) readStream.println(s"Mem: $mem; Addr: $addr [OOB]") invalid } } def writeOrElse(mem: String, addr: String, data: Any, en: Boolean)(wr: => Unit): Unit = { try { wr if (en) writeStream.println(s"Mem: $mem; Addr: $addr; Data: $data") } catch {case err: java.lang.ArrayIndexOutOfBoundsException => if (en) writeStream.println(s"Mem: $mem; Addr: $addr; Data: $data [OOB]") } } }
Example 15
Source File: FetchSpecMain.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.PrintStream import io.eels.{Constants, SourceParser} import io.eels.component.hive.{HiveSource, HiveSpec} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.hive.conf.HiveConf object FetchSpecMain { implicit val fs = FileSystem.get(new Configuration) implicit val hiveConf = new HiveConf def apply(args: Seq[String], out: PrintStream = System.out): Unit = { val parser = new scopt.OptionParser[Options]("eel") { head("eel fetch-spec", Constants.EelVersion) opt[String]("dataset") required() action { (source, o) => o.copy(source = source) } text "specify dataset, eg hive:database:table" } parser.parse(args, Options()) match { case Some(options) => val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}")) val source = builder() source match { case hive: HiveSource => val spec = hive.spec val json = HiveSpec.writeAsJson(spec.copy(tables = spec.tables.filter(_.tableName == hive.tableName))) println(json) case _ => sys.error(s"Unsupported source $source") } case _ => } } case class Options(source: String = null) }
Example 16
Source File: ApplySpecMain.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.PrintStream import java.nio.file.{Path, Paths} import io.eels.{Constants, SourceParser} import io.eels.component.hive.{HiveOps, HiveSource, HiveSpec} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.metastore.HiveMetaStoreClient object ApplySpecMain { implicit val fs = FileSystem.get(new Configuration) implicit val hiveConf = new HiveConf implicit val client = new HiveMetaStoreClient(hiveConf) def apply(args: Seq[String], out: PrintStream = System.out): Unit = { val parser = new scopt.OptionParser[Options]("eel") { head("eel apply-spec", Constants.EelVersion) opt[String]("dataset") required() action { (source, o) => o.copy(source = source) } text "specify dataset, eg hive:database:table" opt[String]("spec") required() action { (schema, o) => o.copy(specPath = Paths.get(schema)) } text "specify path to eel spec" } parser.parse(args, Options()) match { case Some(options) => val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}")) val source = builder() source match { case hive: HiveSource => HiveOps.applySpec(HiveSpec(options.specPath), false) case _ => sys.error(s"Unsupported source $source") } case _ => } } case class Options(source: String = null, specPath: Path = null) }
Example 17
Source File: ShowSchemaMain.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.PrintStream import io.eels.{Constants, SourceParser} import io.eels.component.avro.AvroSchemaFn import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.hive.conf.HiveConf object ShowSchemaMain { implicit val fs = FileSystem.get(new Configuration) implicit val hiveConf = new HiveConf def apply(args: Seq[String], out: PrintStream = System.out): Unit = { val parser = new scopt.OptionParser[Options]("eel") { head("eel schema", Constants.EelVersion) opt[String]("source") required() action { (source, o) => o.copy(source = source) } text "specify source, eg hive:database:table or parquet:/path/to/file" } parser.parse(args, Options()) match { case Some(options) => val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}")) val source = builder() val schema = source.schema val avroSchema = AvroSchemaFn.toAvro(schema) out.println(avroSchema) case _ => } } case class Options(source: String = "") }
Example 18
Source File: AnalyzeMain.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.PrintStream import io.eels.{Constants, SourceParser} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.hadoop.hive.conf.HiveConf object AnalyzeMain { import scala.concurrent.ExecutionContext.Implicits.global implicit val fs = FileSystem.get(new Configuration) implicit val hiveConf = new HiveConf def apply(args: Seq[String], out: PrintStream = System.out): Unit = { val parser = new scopt.OptionParser[Options]("eel") { head("eel analyze", Constants.EelVersion) opt[String]("dataset") required() action { (source, o) => o.copy(source = source) } text "specify dataset, eg hive:database:table" opt[Boolean]("reverse") optional() action { (reverse, o) => o.copy(reverse = reverse) } text "specify reverse ordering of columns, eg most distinct first" } parser.parse(args, Options()) match { case Some(options) => val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}")) val result = builder().counts.toSeq.sortBy(_._2.size) val orderedResults = if (options.reverse) result.reverse else result for ((columnName, columnCounts) <- orderedResults) { println(columnName) for ((value, counts) <- columnCounts) { println(s"\t$value ($counts)") } } case _ => } } case class Options(source: String = null, reverse: Boolean = false) }
Example 19
Source File: ShowSchemaMainTest.scala From eel-sdk with Apache License 2.0 | 5 votes |
package io.eels.cli import java.io.{ByteArrayOutputStream, PrintStream} import org.scalatest.{Matchers, WordSpec} class ShowSchemaMainTest extends WordSpec with Matchers { "SchemaMain" should { "display schema for specified avro source" in { val baos = new ByteArrayOutputStream val out = new PrintStream(baos) ShowSchemaMain(Seq("--source", "avro:" + getClass.getResource("/test.avro").getFile), out) new String(baos.toByteArray).trim shouldBe """{"type":"record","name":"row","namespace":"namespace","fields":[{"name":"name","type":"string"},{"name":"job","type":"string"},{"name":"location","type":"string"}]}""" } } }
Example 20
Source File: SparkSQLEnv.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null sqlContext = null } } }
Example 21
Source File: MiscExpressionsSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions import java.io.PrintStream import scala.util.Random import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types._ class MiscExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { test("assert_true") { intercept[RuntimeException] { checkEvaluation(AssertTrue(Literal.create(false, BooleanType)), null) } intercept[RuntimeException] { checkEvaluation(AssertTrue(Cast(Literal(0), BooleanType)), null) } intercept[RuntimeException] { checkEvaluation(AssertTrue(Literal.create(null, NullType)), null) } intercept[RuntimeException] { checkEvaluation(AssertTrue(Literal.create(null, BooleanType)), null) } checkEvaluation(AssertTrue(Literal.create(true, BooleanType)), null) checkEvaluation(AssertTrue(Cast(Literal(1), BooleanType)), null) } test("uuid") { checkEvaluation(Length(Uuid(Some(0))), 36) val r = new Random() val seed1 = Some(r.nextLong()) assert(evaluateWithoutCodegen(Uuid(seed1)) === evaluateWithoutCodegen(Uuid(seed1))) assert(evaluateWithGeneratedMutableProjection(Uuid(seed1)) === evaluateWithGeneratedMutableProjection(Uuid(seed1))) assert(evaluateWithUnsafeProjection(Uuid(seed1)) === evaluateWithUnsafeProjection(Uuid(seed1))) val seed2 = Some(r.nextLong()) assert(evaluateWithoutCodegen(Uuid(seed1)) !== evaluateWithoutCodegen(Uuid(seed2))) assert(evaluateWithGeneratedMutableProjection(Uuid(seed1)) !== evaluateWithGeneratedMutableProjection(Uuid(seed2))) assert(evaluateWithUnsafeProjection(Uuid(seed1)) !== evaluateWithUnsafeProjection(Uuid(seed2))) val uuid = Uuid(seed1) assert(uuid.fastEquals(uuid)) assert(!uuid.fastEquals(Uuid(seed1))) assert(!uuid.fastEquals(uuid.freshCopy())) assert(!uuid.fastEquals(Uuid(seed2))) } test("PrintToStderr") { val inputExpr = Literal(1) val systemErr = System.err val (outputEval, outputCodegen) = try { val errorStream = new java.io.ByteArrayOutputStream() System.setErr(new PrintStream(errorStream)) // check without codegen checkEvaluationWithoutCodegen(PrintToStderr(inputExpr), 1) val outputEval = errorStream.toString errorStream.reset() // check with codegen checkEvaluationWithGeneratedMutableProjection(PrintToStderr(inputExpr), 1) val outputCodegen = errorStream.toString (outputEval, outputCodegen) } finally { System.setErr(systemErr) } assert(outputCodegen.contains(s"Result of $inputExpr is 1")) assert(outputEval.contains(s"Result of $inputExpr is 1")) } }
Example 22
Source File: SapSQLEnv.scala From HANAVora-Extensions with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.sap.thriftserver import java.io.PrintStream import org.apache.spark.scheduler.StatsReportListener import org.apache.spark.sql.hive.{HiveContext, SapHiveContext} import org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver import org.apache.spark.sql.hive.thriftserver.SparkSQLEnv._ import org.apache.spark.util.Utils import org.apache.spark.{Logging, SparkConf, SparkContext} import scala.collection.JavaConversions._ object SapSQLEnv extends Logging { def init() { logDebug("Initializing SapSQLEnv") if (hiveContext == null) { logInfo("Creating SapSQLContext") val sparkConf = new SparkConf(loadDefaults = true) val maybeSerializer = sparkConf.getOption("spark.serializer") val maybeKryoReferenceTracking = sparkConf.getOption("spark.kryo.referenceTracking") // If user doesn't specify the appName, we want to get [SparkSQL::localHostName] instead of // the default appName [SparkSQLCLIDriver] in cli or beeline. val maybeAppName = sparkConf .getOption("spark.app.name") .filterNot(_ == classOf[SparkSQLCLIDriver].getName) sparkConf .setAppName(maybeAppName.getOrElse(s"SparkSQL::${Utils.localHostName()}")) .set("spark.serializer", maybeSerializer.getOrElse("org.apache.spark.serializer.KryoSerializer")) .set("spark.kryo.referenceTracking", maybeKryoReferenceTracking.getOrElse("false")) sparkContext = new SparkContext(sparkConf) sparkContext.addSparkListener(new StatsReportListener()) hiveContext = new SapHiveContext(sparkContext) hiveContext.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8")) hiveContext.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8")) hiveContext.metadataHive.setError(new PrintStream(System.err, true, "UTF-8")) hiveContext.setConf("spark.sql.hive.version", HiveContext.hiveExecutionVersion) if (log.isDebugEnabled) { hiveContext.hiveconf.getAllProperties.toSeq.sorted.foreach { case (k, v) => logDebug(s"HiveConf var: $k=$v") } } } } }
Example 23
Source File: StyleChecker.scala From sbt-coursera with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.lamp import sbt.File import java.io.ByteArrayOutputStream import java.io.PrintStream import org.scalastyle._ import Settings._ object StyleChecker { val maxResult = 100 class CustomTextOutput[T <: FileSpec]() extends Output[T] { private val messageHelper = new MessageHelper(this.getClass().getClassLoader()) var fileCount: Int = _ override def message(m: Message[T]): Unit = m match { case StartWork() => case EndWork() => case StartFile(file) => print("Checking file " + file + "...") fileCount = 0 case EndFile(file) => if (fileCount == 0) println(" OK!") case StyleError(file, clazz, key, level, args, line, column, customMessage) => report(line, column, messageHelper.text(level.name), Output.findMessage(messageHelper, clazz, key, args, customMessage)) case StyleException(file, clazz, message, stacktrace, line, column) => report(line, column, "error", message) } private def report(line: Option[Int], column: Option[Int], level: String, message: String) { if (fileCount == 0) println("") fileCount += 1 println(" " + fileCount + ". " + level + pos(line, column) + ":") println(" " + message) } private def pos(line: Option[Int], column: Option[Int]): String = line match { case Some(line) => " at line " + line + (column match { case Some(column) => " character " + column case None => "" }) case None => "" } } def score(outputResult: OutputResult) = { val penalties = outputResult.errors + outputResult.warnings scala.math.max(maxResult - penalties, 0) } def assess(sources: Seq[File], styleSheetPath: String, courseId: String): (String, Int) = { val configFile = new File(styleSheetPath).getAbsolutePath val messages = new ScalastyleChecker().checkFiles( ScalastyleConfiguration.readFromXml(configFile), Directory.getFiles(None, sources)) val output = new ByteArrayOutputStream() val outputResult = Console.withOut(new PrintStream(output)) { new CustomTextOutput().output(messages) } val msg = output.toString + "Processed " + outputResult.files + " file(s)\n" + "Found " + outputResult.errors + " errors\n" + "Found " + outputResult.warnings + " warnings\n" + (if (outputResult.errors + outputResult.warnings > 0) "Consult the style guide at %s/wiki/ScalaStyleGuide".format(baseURL(courseId)) else "") (msg, score(outputResult)) } }
Example 24
Source File: LoggerPrintStream.scala From ledger-manager-chrome with MIT License | 5 votes |
package co.ledger.wallet.core.utils.logs import java.io.{OutputStream, PrintStream} import scala.scalajs.js import js.Dynamic.global override def flush(): Unit = if (!flushed) { doWriteLine(buffer + LineContEnd) buffer = LineContStart flushed = true } override def close(): Unit = () private def doWriteLine(line: String): Unit = { Logger.log(logLevel, "Global", line) } } object LoggerPrintStream { private final val LineContEnd: String = "\u21A9" private final val LineContStart: String = "\u21AA" class DummyOutputStream extends OutputStream { def write(c: Int): Unit = throw new AssertionError( "Should not get in JSConsoleBasedPrintStream.DummyOutputStream") } def init(): Unit = { System.setErr(new LoggerPrintStream("E")) System.setOut(new LoggerPrintStream("D")) } }
Example 25
Source File: MTLSpecs.scala From shims with Apache License 2.0 | 5 votes |
package shims.effect import cats.effect.{ContextShift, IO} import cats.effect.laws.discipline.{arbitrary, AsyncTests, ConcurrentEffectTests, ConcurrentTests}, arbitrary._ import cats.effect.laws.util.{TestContext, TestInstances}, TestInstances._ import cats.{Eq, Functor, Monad} import cats.instances.either._ import cats.instances.int._ import cats.instances.option._ import cats.instances.tuple._ import cats.instances.unit._ import cats.syntax.functor._ import scalaz.{EitherT, Kleisli, OptionT, StateT, WriterT} import org.scalacheck.{Arbitrary, Prop} import org.specs2.Specification import org.specs2.scalacheck.Parameters import org.specs2.specification.core.Fragments import org.typelevel.discipline.Laws import org.typelevel.discipline.specs2.Discipline import scala.concurrent.ExecutionContext import scala.util.control.NonFatal import java.io.{ByteArrayOutputStream, PrintStream} object MTLSpecs extends Specification with Discipline { def is = br ^ checkAllAsync("OptionT[IO, ?]", implicit ctx => ConcurrentTests[OptionT[IO, ?]].concurrent[Int, Int, Int]) ^ br ^ checkAllAsync("Kleisli[IO, Int, ?]", implicit ctx => ConcurrentTests[Kleisli[IO, Int, ?]].concurrent[Int, Int, Int]) ^ br ^ checkAllAsync("EitherT[IO, Throwable, ?]", implicit ctx => ConcurrentEffectTests[EitherT[IO, Throwable, ?]].concurrentEffect[Int, Int, Int]) ^ br ^ checkAllAsync("StateT[IO, Int, ?]", implicit ctx => AsyncTests[StateT[IO, Int, ?]].async[Int, Int, Int]) ^ br ^ checkAllAsync("WriterT[IO, Int, ?]", implicit ctx => ConcurrentEffectTests[WriterT[IO, Int, ?]].concurrentEffect[Int, Int, Int]) def checkAllAsync(name: String, f: TestContext => Laws#RuleSet)(implicit p: Parameters) = { val context = TestContext() val ruleSet = f(context) Fragments.foreach(ruleSet.all.properties.toList) { case (id, prop) => s"$name.$id" ! check(Prop(p => silenceSystemErr(prop(p))), p, defaultFreqMapPretty) ^ br } } implicit def iocsForEC(implicit ec: ExecutionContext): ContextShift[IO] = IO.contextShift(ec) implicit def optionTArbitrary[F[_], A](implicit arbFA: Arbitrary[F[Option[A]]]): Arbitrary[OptionT[F, A]] = Arbitrary(arbFA.arbitrary.map(OptionT.optionT(_))) implicit def kleisliArbitrary[F[_], R, A](implicit arbRFA: Arbitrary[R => F[A]]): Arbitrary[Kleisli[F, R, A]] = Arbitrary(arbRFA.arbitrary.map(Kleisli(_))) implicit def eitherTArbitrary[F[_]: Functor, L, A](implicit arbEA: Arbitrary[F[Either[L, A]]]): Arbitrary[EitherT[F, L, A]] = Arbitrary(arbEA.arbitrary.map(fe => EitherT.eitherT(fe.map(_.asScalaz)))) implicit def stateTArbitrary[F[_]: Monad, S, A](implicit arbSFA: Arbitrary[S => F[(S, A)]]): Arbitrary[StateT[F, S, A]] = Arbitrary(arbSFA.arbitrary.map(StateT(_))) implicit def writerTArbitrary[F[_], L, A](implicit arbFLA: Arbitrary[F[(L, A)]]): Arbitrary[WriterT[F, L, A]] = Arbitrary(arbFLA.arbitrary.map(WriterT(_))) implicit def kleisliEq[F[_], A](implicit eqv: Eq[F[A]]): Eq[Kleisli[F, Int, A]] = Eq.by(_(42)) // totally random and comprehensive seed implicit def stateTEq[F[_]: Monad, S, A](implicit eqv: Eq[F[(Int, A)]]): Eq[StateT[F, Int, A]] = Eq.by(_.run(42)) // totally random and comprehensive seed // copied from cats-effect private def silenceSystemErr[A](thunk: => A): A = synchronized { // Silencing System.err val oldErr = System.err val outStream = new ByteArrayOutputStream() val fakeErr = new PrintStream(outStream) System.setErr(fakeErr) try { val result = thunk System.setErr(oldErr) result } catch { case NonFatal(e) => System.setErr(oldErr) // In case of errors, print whatever was caught fakeErr.close() val out = outStream.toString("utf-8") if (out.nonEmpty) oldErr.println(out) throw e } } }
Example 26
Source File: Text.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.formatter import java.io.PrintStream import java.nio.file.Path import com.codacy.analysis.core.model._ import com.codacy.plugins.api.results import com.codacy.plugins.api.duplication.DuplicationCloneFile object Text extends FormatterCompanion { val name: String = "text" def apply(stream: PrintStream): Formatter = new Text(stream) } private[formatter] class Text(val stream: PrintStream) extends Formatter { override def begin(): Unit = { stream.println("Starting analysis ...") stream.flush() } override def end(): Unit = { stream.println("Analysis complete") stream.flush() } override def add(element: Result): Unit = { element match { case Issue(patternId, filename, message, level, category, location) => stream.println(prettyMessage(patternId, filename, message, level, category, location)) stream.flush() case FileError(filename, message) => stream.println(s"Found $message in $filename") stream.flush() case DuplicationClone(_, nrTokens, nrLines, files) => stream.println(prettyMessage(nrTokens, nrLines, files)) stream.flush() case fileMetrics: FileMetrics => stream.println(prettyMessage(fileMetrics)) stream.flush() } } private def prettyMessage(fileMetrics: FileMetrics): String = { val fileMetricsValues = List( fileMetrics.complexity.map(complexityNum => s" CC - $complexityNum"), fileMetrics.loc.map(loc => s" LOC - $loc"), fileMetrics.cloc.map(cloc => s" CLOC - $cloc"), fileMetrics.nrMethods.map(nrMethods => s" #methods - $nrMethods"), fileMetrics.nrClasses.map(nrClasses => s" #classes - $nrClasses")).collect { case Some(namedValue) => namedValue } val coloredMetricsFound = Console.MAGENTA + "Metrics" + Console.RESET val boldFileName = s"${Console.BOLD}${fileMetrics.filename}${Console.RESET}" if (fileMetricsValues.isEmpty) { s"No [$coloredMetricsFound] found in $boldFileName." } else { s"Found [$coloredMetricsFound] in $boldFileName:\n${fileMetricsValues.mkString("\n")}" } } private def prettyMessage(patternId: results.Pattern.Id, filename: Path, message: Issue.Message, level: results.Result.Level, category: Option[results.Pattern.Category], location: Location): String = { val categoryColored = Console.YELLOW + category.fold("")(c => s"/${c.toString}") + Console.RESET val levelColored = levelColor(level) + level + Console.RESET val patternColored = Console.BOLD + patternId + Console.RESET s"Found [$levelColored$categoryColored] `$message` in $filename:$location ($patternColored)" } private def prettyMessage(nrTokens: Int, nrLines: Int, files: Set[DuplicationCloneFile]): String = { val coloredCloneFound = Console.CYAN + "Clone" + Console.RESET val duplicatedFilesMsg = files .groupBy(_.filePath) .map { case (filePath, cloneFiles) => val lineNumbers = cloneFiles.map(cloneFile => s" l. ${cloneFile.startLine} - ${cloneFile.endLine}").mkString("\n") s" ${Console.BOLD}$filePath${Console.RESET}\n$lineNumbers" } .mkString("\n") s"Found [$coloredCloneFound] $nrLines duplicated lines with $nrTokens tokens:\n$duplicatedFilesMsg" } private def levelColor(level: results.Result.Level): String = { level match { case results.Result.Level.Info => Console.BLUE case results.Result.Level.Warn => Console.YELLOW case results.Result.Level.Err => Console.RED } } }
Example 27
Source File: Json.scala From codacy-analysis-cli with GNU Affero General Public License v3.0 | 5 votes |
package com.codacy.analysis.cli.formatter import java.io.PrintStream import java.nio.file.Path import com.codacy.analysis.core.model.Result import com.codacy.plugins.api.results import io.circe.Encoder import io.circe.generic.auto._ import io.circe.syntax._ import scala.util.Properties object Json extends FormatterCompanion { val name: String = "json" def apply(stream: PrintStream): Formatter = new Json(stream) } private[formatter] class Json(val stream: PrintStream) extends Formatter { private var alreadyPrinted: Boolean = false private implicit val categoryEncoder: Encoder[results.Pattern.Category.Value] = Encoder.encodeEnumeration(results.Pattern.Category) private implicit val levelEncoder: Encoder[results.Result.Level.Value] = Encoder.encodeEnumeration(results.Result.Level) private implicit val fileEncoder: Encoder[Path] = Encoder[String].contramap(_.toString) override def begin(): Unit = { stream.print("[") } override def end(): Unit = { stream.print("]") stream.print(Properties.lineSeparator) stream.flush() } def add(element: Result): Unit = { if (alreadyPrinted) stream.print(",") else alreadyPrinted = true stream.print(element.asJson.noSpaces) } }
Example 28
Source File: LogEnv.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.log import java.io.PrintStream import java.lang.management.ManagementFactory import javax.management.{InstanceAlreadyExistsException, ObjectName} import wvlet.log.LogFormatter.SourceCodeLogFormatter override def getLoggerName(cl: Class[_]): String = { var name = cl.getName if (name.endsWith("$")) { // Remove trailing $ of Scala Object name name = name.substring(0, name.length - 1) } // When class is an anonymous trait if (name.contains("$anon$")) { import collection.JavaConverters._ val interfaces = cl.getInterfaces if (interfaces != null && interfaces.length > 0) { // Use the first interface name instead of the anonymous name name = interfaces(0).getName } } name } override def scheduleLogLevelScan: Unit = { LogLevelScanner.scheduleLogLevelScan } override def stopScheduledLogLevelScan: Unit = { LogLevelScanner.stopScheduledLogLevelScan } override def scanLogLevels: Unit = { LogLevelScanner.scanLogLevels } override def scanLogLevels(loglevelFileCandidates: Seq[String]): Unit = { LogLevelScanner.scanLogLevels(loglevelFileCandidates) } private def onGraalVM: Boolean = { // https://www.graalvm.org/sdk/javadoc/index.html?constant-values.html val graalVMFlag = Option(System.getProperty("org.graalvm.nativeimage.kind")) graalVMFlag.map(p => p == "executable" || p == "shared").getOrElse(false) } private val mBeanName = new ObjectName("wvlet.log:type=Logger") // Register JMX entry upon start-up registerJMX override def registerJMX: Unit = { if (!onGraalVM) { // Register the log level configuration interface to JMX val mbeanServer = ManagementFactory.getPlatformMBeanServer if (!mbeanServer.isRegistered(mBeanName)) { try { mbeanServer.registerMBean(LoggerJMX, mBeanName) } catch { case e: InstanceAlreadyExistsException => // this exception can happen as JMX entries can be initialized by different class loaders while running sbt } } } } override def unregisterJMX: Unit = { if (!onGraalVM) { val mbeanServer = ManagementFactory.getPlatformMBeanServer if (mbeanServer.isRegistered(mBeanName)) { mbeanServer.unregisterMBean(mBeanName) } } } }
Example 29
Source File: Handler.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.log import java.io.{PrintStream, PrintWriter} import java.util.{logging => jl} class BufferedLogHandler(formatter: LogFormatter) extends jl.Handler { private val buf = Seq.newBuilder[String] override def flush(): Unit = {} override def publish(record: jl.LogRecord): Unit = synchronized { buf += formatter.format(record) } override def close(): Unit = { // do nothing } def logs: Seq[String] = buf.result() def clear: Unit = { buf.clear() } }
Example 30
Source File: LogEnv.scala From airframe with Apache License 2.0 | 5 votes |
package wvlet.log import java.io.PrintStream private[log] object LogEnv extends LogEnvBase { override def isScalaJS: Boolean = true override def defaultLogLevel: LogLevel = LogLevel.INFO override def defaultConsoleOutput: PrintStream = Console.out override def defaultHandler = JSConsoleLogHandler() override def getLoggerName(cl: Class[_]): String = { var name = cl.getName // In Scala.js we cannot use cl.getInterfaces to find the actual type val pos = name.indexOf("$") if (pos > 0) { // Remove trailing $xxx name = name.substring(0, pos) } name } override def scheduleLogLevelScan: Unit = { // no-op } override def stopScheduledLogLevelScan: Unit = { // no-op } override def scanLogLevels: Unit = { // no-op } override def scanLogLevels(loglevelFileCandidates: Seq[String]): Unit = { // no-op } override def registerJMX: Unit = { // no-op } override def unregisterJMX: Unit = { // no-op } }
Example 31
Source File: FileLoggerSuite.scala From lsp4s with Apache License 2.0 | 5 votes |
package tests import java.io.ByteArrayOutputStream import java.io.PrintStream import java.nio.charset.StandardCharsets import java.nio.file.Files import minitest.SimpleTestSuite object FileLoggerSuite extends SimpleTestSuite { test("logs don't go to stdout") { val path = Files.createTempFile("lsp4s", ".log") val baos = new ByteArrayOutputStream() val writer = scribe.writer.FileWriter().path(_ => path).autoFlush Console.withOut(new PrintStream(baos)) { val logger = scribe.Logger("lsp4s").orphan().withHandler(writer = writer) logger.info("This is info") logger.warn("This is warning") logger.error("This is error") } val obtainedOut = baos.toString() assert(obtainedOut.isEmpty) val obtainedLogs = new String(Files.readAllBytes(path), StandardCharsets.UTF_8) List("info", "warning", "error").foreach { message => assert(obtainedLogs.contains(s"This is $message"), obtainedLogs) } } }
Example 32
Source File: OapEnv.scala From OAP with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} import org.apache.spark.sql.oap.listener.OapListener import org.apache.spark.sql.oap.ui.OapTab import org.apache.spark.util.Utils private[spark] object OapEnv extends Logging { logDebug("Initializing Oap Env") var initialized: Boolean = false var sparkSession: SparkSession = _ // This is to enable certain OAP features, like UI, even // in non-Spark SQL CLI/ThriftServer conditions def initWithoutCreatingSparkSession(): Unit = synchronized { if (!initialized && !Utils.isTesting) { val sc = SparkContext.getOrCreate() sc.addSparkListener(new OapListener) this.sparkSession = SparkSession.getActiveSession.get sc.ui.foreach(new OapTab(_)) initialized = true } } }
Example 33
Source File: AddDeps.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.URL import org.apache.toree.dependencies.Credentials import org.apache.toree.magic._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.ArgumentParsingSupport import scala.util.Try import org.apache.toree.plugins.annotations.Event class AddDeps extends LineMagic with IncludeInterpreter with IncludeOutputStream with ArgumentParsingSupport with IncludeDependencyDownloader with IncludeKernel { private def printStream = new PrintStream(outputStream) private val _transitive = parser.accepts( "transitive", "Retrieve dependencies recursively" ) private val _verbose = parser.accepts( "verbose", "Prints out additional information" ) private val _trace = parser.accepts( "trace", "Prints out trace of download progress" ) private val _abortOnResolutionErrors = parser.accepts( "abort-on-resolution-errors", "Abort (no downloads) when resolution fails" ) private val _exclude = parser.accepts("exclude", "exclude dependency").withRequiredArg().ofType(classOf[String]) private val _repository = parser.accepts( "repository", "Adds an additional repository to available list" ).withRequiredArg().ofType(classOf[String]) private val _credentials = parser.accepts( "credential", "Adds a credential file to be used to the list" ).withRequiredArg().ofType(classOf[String]) private val _configuration = parser.accepts( "ivy-configuration", "Sets the Ivy configuration for the dependency; defaults to \"default\"" ).withRequiredArg().ofType(classOf[String]) private val _classifier = parser.accepts( "classifier", "Sets the dependency's classifier" ).withRequiredArg().ofType(classOf[String]) @Event(name = "adddeps") override def execute(code: String): Unit = { val nonOptionArgs = parseArgs(code) dependencyDownloader.setPrintStream(printStream) val repository = getAll(_repository).getOrElse(Nil) val credentials = getAll(_credentials).getOrElse(Nil) val excludes = getAll(_exclude).getOrElse(Nil) val excludesSet = excludes.map((x: String) => { if (x.contains(":")) { (x.split(":")(0), x.split(":")(1)) } else { (x, "*") } }: (String, String)).toSet val repositoriesWithCreds = dependencyDownloader.resolveRepositoriesAndCredentials(repository, credentials) if (nonOptionArgs.size == 3) { // get the jars and hold onto the paths at which they reside val uris = dependencyDownloader.retrieve( groupId = nonOptionArgs.head, artifactId = nonOptionArgs(1), version = nonOptionArgs(2), transitive = _transitive, ignoreResolutionErrors = !_abortOnResolutionErrors, extraRepositories = repositoriesWithCreds, verbose = _verbose, trace = _trace, excludes = excludesSet, configuration = get(_configuration), artifactClassifier = get(_classifier) ) // pass the new Jars to the kernel kernel.addJars(uris.filter(_.getPath.endsWith(".jar")): _*) } else { printHelp(printStream, """%AddDeps my.company artifact-id version""") } } }
Example 34
Source File: AddJar.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.{URL, URI} import java.nio.file.{Files, Paths} import java.util.zip.ZipFile import org.apache.toree.magic._ import org.apache.toree.magic.builtin.AddJar._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils} import com.typesafe.config.Config import org.apache.hadoop.fs.Path import org.apache.toree.plugins.annotations.Event object AddJar { val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file") private var jarDir:Option[String] = None def getJarDir(config: Config): String = { jarDir.getOrElse({ jarDir = Some( if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) { config.getString("jar_dir") } else { FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath } ) jarDir.get }) } } class AddJar extends LineMagic with IncludeInterpreter with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike { // Option to mark re-downloading of jars private val _force = parser.accepts("f", "forces re-download of specified jar") // Option to mark re-downloading of jars private val _magic = parser.accepts("magic", "loads jar as a magic extension") // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) ) } else { downloadFile( new URL(jarRemoteLocation), new File(downloadLocation).toURI.toURL ) } // Report download finished printStream.println(s"Finished download of $jarName") } else { printStream.println(s"Using cached version of $jarName") } // validate jar file if(! isValidJar(fileDownloadLocation)) { throw new IllegalArgumentException(s"Jar '$jarName' is not valid.") } if (_magic) { val plugins = pluginManager.loadPlugins(fileDownloadLocation) pluginManager.initializePlugins(plugins) } else { kernel.addJars(fileDownloadLocation.toURI) } } }
Example 35
Source File: Truncation.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.plugins.annotations.Event class Truncation extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "truncation") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Output WILL be truncated.") KernelOptions.noTruncation = false case "off" => printStream.println(s"Output will NOT be truncated") KernelOptions.noTruncation = true case "" => printStream.println(s"Truncation is currently ${if (KernelOptions.noTruncation) "off" else "on"} ") case other => printStream.println(s"${other} is not a valid option for the NoTruncation magic.") } } }
Example 36
Source File: ShowOutput.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.plugins.annotations.Event class ShowOutput extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "showoutput") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Console output WILL be shown.") KernelOptions.showOutput = true case "off" => printStream.println(s"Console output will NOT be shown.") KernelOptions.showOutput = false case "" => printStream.println(s"Console output display is currently ${if (KernelOptions.showOutput) "on" else "off"}.") case other => printStream.println(s"${other} is not a valid option for the ShowOutput magic.") } } }
Example 37
Source File: ShowTypes.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.plugins.annotations.Event class ShowTypes extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "showtypes") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Types will be printed.") KernelOptions.showTypes = true case "off" => printStream.println(s"Types will not be printed") KernelOptions.showTypes = false case "" => printStream.println(s"ShowTypes is currently ${if (KernelOptions.showTypes) "on" else "off"} ") case other => printStream.println(s"${other} is not a valid option for the ShowTypes magic.") } } }
Example 38
Source File: JavaScript.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import com.google.common.base.Strings import org.apache.toree.kernel.protocol.v5.MIMEType import org.apache.toree.magic._ import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.utils.ArgumentParsingSupport import org.slf4j.LoggerFactory import org.apache.toree.plugins.annotations.Event class JavaScript extends CellMagic with ArgumentParsingSupport with IncludeOutputStream { // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) @Event(name = "javascript") override def execute(code: String): CellMagicOutput = { def printHelpAndReturn: CellMagicOutput = { printHelp(printStream, """%JavaScript <string_code>""") CellMagicOutput() } Strings.isNullOrEmpty(code) match { case true => printHelpAndReturn case false => CellMagicOutput(MIMEType.ApplicationJavaScript -> code) } } }
Example 39
Source File: Html.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import org.apache.toree.kernel.protocol.v5.MIMEType import org.apache.toree.magic._ import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.utils.ArgumentParsingSupport import com.google.common.base.Strings import org.apache.toree.plugins.annotations.Event class Html extends CellMagic with ArgumentParsingSupport with IncludeOutputStream { // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) @Event(name = "html") override def execute(code: String): CellMagicOutput = { def printHelpAndReturn: CellMagicOutput = { printHelp(printStream, """%%Html <string_code>""") CellMagicOutput() } Strings.isNullOrEmpty(code) match { case true => printHelpAndReturn case false => CellMagicOutput(MIMEType.TextHtml -> code) } } }
Example 40
Source File: StreamState.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.global import java.io.{InputStream, OutputStream, PrintStream} def withStreams[T](thunk: => T): T = { init(_inputStream, _outputStream, _errorStream) val returnValue = Console.withIn(_inputStream) { Console.withOut(_outputStream) { Console.withErr(_errorStream) { thunk } } } reset() returnValue } }
Example 41
Source File: ArgumentParsingSupport.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.utils import joptsimple.{OptionSpec, OptionParser} import scala.collection.JavaConverters._ import scala.language.implicitConversions import java.io.{PrintStream, OutputStream} trait ArgumentParsingSupport { protected lazy val parser = new OptionParser() private var options: joptsimple.OptionSet = _ parser.allowsUnrecognizedOptions() def parseArgs(args: String, delimiter: String = " ") = { options = parser.parse(args.split(delimiter): _*) options.nonOptionArguments().asScala.map(_.toString) } def printHelp(outputStream: OutputStream, usage: String) = { val printStream = new PrintStream(outputStream) printStream.println(s"Usage: $usage\n") parser.printHelpOn(outputStream) } implicit def has[T](spec: OptionSpec[T]): Boolean = { require(options != null, "Arguments not parsed yet!") options.has(spec) } implicit def get[T](spec: OptionSpec[T]): Option[T] = { require(options != null, "Arguments not parsed yet!") Some(options.valueOf(spec)).filter(_ != null) } // NOTE: Cannot be implicit as conflicts with get def getAll[T](spec: OptionSpec[T]): Option[List[T]] = { require(options != null, "Arguments not parsed yet!") Some(options.valuesOf(spec).asScala.toList).filter(_ != null) } }
Example 42
Source File: AvroCodegen.scala From scavro with Apache License 2.0 | 5 votes |
package org.oedura.scavro.plugin import java.io.PrintStream import org.apache.avro.tool.{IdlTool, SpecificCompilerTool, Tool} import sbt._ import scala.collection.JavaConversions._ class AvroCodegen(outputDir: File, tmpDir: File, verbose: Boolean) { val compilerTool = new SpecificCompilerTool() val idlTool = new IdlTool() val outputPath = outputDir.getAbsolutePath def run(idlFiles: Seq[File], protocolFiles: Seq[File], schemaFiles: Seq[File]) = { idlFiles.foreach(compileIDL) protocolFiles.foreach(compileProtocol) compileSchema(schemaFiles) } def outputStream(default: PrintStream) = { if (verbose) default else NullOutputStream.getPrintStream } def runTool(tool: Tool, args: List[String]) = { val params = args.toBuffer[String] tool.run(System.in, outputStream(System.out), outputStream(System.err), args) } def getTmpFile(target: File): File = { if (tmpDir.isDirectory) { val idlFileNameRegex = """(.*)\.avdl""".r val tmpFileName = target.getName match { case idlFileNameRegex(fname) => s"$fname.avpr" } tmpDir / tmpFileName } else { tmpDir } } def compileSchema(input: Seq[File]) = if (input.nonEmpty) { println("compile schema: " + input) val compilerParams: List[String] = "schema" +: input.toList.map(_.getAbsolutePath) :+ outputPath runTool(compilerTool, compilerParams) } def compileProtocol(input: File) = { val compilerParams: List[String] = "protocol" :: input.getAbsolutePath :: outputPath :: Nil runTool(compilerTool, compilerParams) } def compileIDL(input: File) = { val tmpFile = getTmpFile(input) val idlParams = input.getAbsolutePath :: tmpFile.getAbsolutePath :: Nil runTool(idlTool, idlParams) compileProtocol(tmpFile) tmpFile.delete() } } object AvroCodegen { def apply(outputDir: File, tmpDir: File, verbose: Boolean = false) = new AvroCodegen(outputDir, tmpDir, verbose) }
Example 43
Source File: SparkSQLEnv.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null sqlContext = null } } }
Example 44
Source File: Distribution.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 45
Source File: Distribution.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { out.println(statCounter) showQuantiles(out) } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println } }
Example 46
Source File: OutputMatchers.scala From sangria with Apache License 2.0 | 5 votes |
package sangria.util import java.io.{PrintStream, ByteArrayOutputStream} import org.scalatest.matchers.should.Matchers trait OutputMatchers extends Matchers { def captureStdErr(fn: => Unit) = { val output = new ByteArrayOutputStream() val printStream = new PrintStream(output) val oldErr = System.err try { System.setErr(printStream) fn } finally { System.setErr(oldErr) printStream.flush() printStream.close() } output.toString("UTF-8") } def captureConsoleOut(fn: => Unit) = { val output = new ByteArrayOutputStream() Console.withOut(output) { fn } output.toString("UTF-8") } }
Example 47
Source File: Runner.scala From avrohugger with Apache License 2.0 | 5 votes |
package avrohugger package tool import format.abstractions.SourceFormat import format.{Scavro, SpecificRecord, Standard} import java.util.Arrays import java.util.Map import java.util.TreeMap import java.io.{InputStream, PrintStream} import org.apache.avro.tool.Tool import scala.util.{Failure, Success, Try} import scala.collection.JavaConverters._ def run(args: Array[String]): Int = { if (args.length != 0) { val tool: Tool = toolsMap.get(args(0)) if (tool != null) { val result = Try { tool.run( in, out, err, Arrays.asList(args: _*).subList(1, args.length)) } result match { case Success(0) => 0 case Success(exitCode) => err.println("Tool " + args(0) + " failed with exit code " + exitCode) exitCode case Failure(e) => err.println("Tool " + args(0) + " failed: " + e.toString) 1 } } else { err.println("Unknown tool: " + args(0)) 1 } } else { err.println("----------------") err.println("Available tools:") for (k <- toolsMap.asScala.values) { err.printf("%" + maxLen + "s %s\n", k.getName(), k.getShortDescription()) } 1 } } }
Example 48
Source File: MainSpec.scala From avrohugger with Apache License 2.0 | 5 votes |
import java.io.{ByteArrayOutputStream, PrintStream} import avrohugger.tool.{Directory, Runner} import org.specs2._ import scala.collection.JavaConverters._ class MainSpec extends mutable.Specification { "tool descriptions fit in 80 characters" in { val r: Runner = new Runner(null, null, null) val descResults = r.toolsMap.values().asScala.map(t => { if (r.maxLen + 2 + t.getShortDescription().length() > 80) true else false }) //make sure there is no tool that didn't pass the desc. length test descResults.exists(x => x == true) === false } "successful runs yield zero exit code" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) val exitCode = r.run(Array("generate", "schema", Directory.TEST_INPUT_DIR + "/handle.avsc", Directory.TEST_OUTPUT_BASE_DIR )) exitCode === 0 } "not print anything to stderr" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) r.run(Array("generate", "schema", Directory.TEST_INPUT_DIR + "/handle.avsc", Directory.TEST_OUTPUT_BASE_DIR )) errBytes.toString() === "" } "invalid tool names yield non-zero exit code" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) val exitCode = r.run(Array("no-such-tool", "schema", Directory.TEST_INPUT_DIR + "/handle.avsc", Directory.TEST_OUTPUT_BASE_DIR )) exitCode !== 0 } "print something to stderr" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) r.run(Array("no-such-tool", "schema", Directory.TEST_INPUT_DIR + "/handle.avsc", Directory.TEST_OUTPUT_BASE_DIR )) errBytes.toString() must contain("Unknown tool: no-such-tool") } "invalid input yield non-zero exit code" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) val exitCode = r.run(Array("generate", "schema", Directory.TEST_INPUT_DIR + "/invalid.avsc", Directory.TEST_OUTPUT_BASE_DIR )) exitCode !== 0 } "print something to stderr" in { val errBytes: ByteArrayOutputStream = new ByteArrayOutputStream() val r: Runner = new Runner(null, null, new PrintStream(errBytes)) r.run(Array("generate", "schema", Directory.TEST_INPUT_DIR + "/invalid.avsc", Directory.TEST_OUTPUT_BASE_DIR )) errBytes.toString() must contain("invalid_type") } }
Example 49
Source File: DynaMLSSH.scala From DynaML with Apache License 2.0 | 5 votes |
package io.github.mandar2812.dynaml import java.io.{InputStream, OutputStream, PrintStream} import ammonite.ops.Path import ammonite.runtime.Storage import ammonite.sshd.{SshServer, SshServerConfig} import ammonite.sshd.util.Environment import ammonite.util.{Bind, Colors} class DynaMLSSH( sshConfig: SshServerConfig, predef: String = "", defaultPredef: Boolean = true, wd: os.Path = os.pwd, replArgs: Seq[Bind[_]] = Nil, classLoader: ClassLoader = DynaMLSSH.getClass.getClassLoader) { private lazy val sshd = SshServer( sshConfig, shellServer = DynaMLSSH.runRepl( sshConfig.ammoniteHome, predef, defaultPredef, wd, replArgs, classLoader ) ) def port = sshd.getPort def start(): Unit = sshd.start() def stop(): Unit = sshd.stop() def stopImmediately(): Unit = sshd.stop(true) } object DynaMLSSH { // Actually runs a repl inside of session serving a remote user shell. private def runRepl( homePath: os.Path, predefCode: String, defaultPredef: Boolean, wd: os.Path, replArgs: Seq[Bind[_]], replServerClassLoader: ClassLoader )(in: InputStream, out: OutputStream ): Unit = { // since sshd server has it's own customised environment, // where things like System.out will output to the // server's console, we need to prepare individual environment // to serve this particular user's session Environment.withEnvironment(Environment(replServerClassLoader, in, out)) { try { DynaML( predefCode = predefCode, predefFile = None, defaultPredef = defaultPredef, storageBackend = new Storage.Folder(homePath), wd = wd, inputStream = in, outputStream = out, errorStream = out, verboseOutput = false, remoteLogging = false, colors = Colors.Default ).run(replArgs: _*) } catch { case any: Throwable => val sshClientOutput = new PrintStream(out) sshClientOutput.println( "What a terrible failure, DynaML just blew up!" ) any.printStackTrace(sshClientOutput) } } } }
Example 50
Source File: UpdateEclipseClasspaths.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
import scala.xml._ import java.io.PrintStream import scala.language.reflectiveCalls val fixedCpNode = XML.loadString(pp.format(cpNode)) val cpes = (fixedCpNode \\ "classpathentry") val newEntries = cpes :+ <classpathentry combineaccessrules="false" kind="src" path="/daffodil-macro-lib"/> val newCP = <classpath> <!-- This file is updated by the UpdateEclipseClasspath app. --> { newEntries } </classpath> writeXMLFile(newCP, cpf.toString) } def writeXML(xml: Node, out: { def print(s: String): Unit } = System.out): Unit = { val formattedSpec = pp.format(xml) out.print("<?xml version='1.0' encoding='UTF-8'?>\n") out.print("\n") out.print(formattedSpec) out.print("\n") } def writeXMLFile(xml: Node, outputFilename: String): Unit = { val f = new java.io.File(outputFilename) f.getParentFile().mkdirs() val ps = new PrintStream(f) writeXML(xml, ps) ps.close() } }
Example 51
Source File: ProgressLoggerTest.scala From fgbio with MIT License | 5 votes |
package com.fulcrumgenomics.util import java.io.{ByteArrayOutputStream, PrintStream} import java.nio.charset.StandardCharsets import com.fulcrumgenomics.bam.api.SamRecord import com.fulcrumgenomics.commons.util.Logger import com.fulcrumgenomics.testing.UnitSpec import com.fulcrumgenomics.vcf.api.Variant import org.scalatest.concurrent.PatienceConfiguration.Interval class ProgressLoggerTest extends UnitSpec { private class LoggerHelper extends Logger(this.getClass) { private val baos = new ByteArrayOutputStream() out = Some(new PrintStream(baos, true, "UTF-8")) def lines: IndexedSeq[String] = new String(baos.toByteArray, StandardCharsets.UTF_8).split('\n').toIndexedSeq } // For Scala 2.12 compatibility private def emptyIterator[T]: Iterator[T] = Iterator.empty private val progressLogger = ProgressLogger(new LoggerHelper()) "ProgressLoggingIterator" should "wrap a SamRecord, (String, Int), Variant, and Interval" in { import com.fulcrumgenomics.util.ProgressLogger.ProgressLoggingIterator // Check typing emptyIterator[SamRecord].progress(progressLogger) emptyIterator[(String, Int)].progress(progressLogger) emptyIterator[Variant].progress(progressLogger) emptyIterator[Interval].progress(progressLogger) // Do an actual test val logger = new LoggerHelper() val progress = ProgressLogger(logger, unit=2) Iterator(("chr1", 1), ("chr2", 2), ("chr3", 3)).progress(progress).foreach(_ => ()) val lines = logger.lines lines.length shouldBe 2 lines(0) should include("chr2:2") lines(1) should include("chr3:3") } it should "wrap unsupported types" in { import com.fulcrumgenomics.util.ProgressLogger.ProgressLoggingIterator // Check typing emptyIterator[Double].progress(progressLogger) emptyIterator[String].progress(progressLogger) // Do an actual test val logger = new LoggerHelper() val progress = ProgressLogger(logger, unit=2) Iterator("foo", "bar", "car").progress(progress).foreach(_ => ()) val lines = logger.lines lines.length shouldBe 2 lines(0) should include("**") } "TransformedProgressLoggingIterator" should "convert items to a supported type" in { import com.fulcrumgenomics.util.ProgressLogger.TransformedProgressLoggingIterator emptyIterator[(String, String)].progress(progressLogger, (item: (String, String)) => (item._1, item._2.toInt)) // Do an actual test val logger = new LoggerHelper() val progress = ProgressLogger(logger, unit=2) Iterator(("chr1", "1"), ("chr2", "2"), ("chr3", "3")).progress(progress, (item: (String, String)) => (item._1, item._2.toInt)).foreach(_ => ()) val lines = logger.lines lines.length shouldBe 2 lines(0) should include("chr2:2") lines(1) should include("chr3:3") } }
Example 52
Source File: UnitSpec.scala From fgbio with MIT License | 5 votes |
package com.fulcrumgenomics.testing import java.io.PrintStream import java.nio.file.{Files, Path} import com.fulcrumgenomics.FgBioDef._ import com.fulcrumgenomics.bam.api.{SamRecord, SamSource} import com.fulcrumgenomics.cmdline.FgBioTool import com.fulcrumgenomics.commons.reflect.ReflectionUtil import com.fulcrumgenomics.commons.util.{LazyLogging, LogLevel, Logger} import com.fulcrumgenomics.sopt.cmdline.CommandLineProgramParser import com.fulcrumgenomics.sopt.util.ParsingUtil import com.fulcrumgenomics.util.Io import com.fulcrumgenomics.vcf.api.{Variant, VcfSource} import htsjdk.variant.variantcontext.VariantContext import htsjdk.variant.vcf.VCFFileReader import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import scala.reflect.ClassTag import scala.reflect.runtime.universe._ trait ErrorLogLevel extends UnitSpec with BeforeAndAfterAll { private var logLevel = Logger.level override protected def beforeAll(): Unit = { this.logLevel = Logger.level Logger.level = LogLevel.Error } override protected def afterAll(): Unit = { Logger.level = LogLevel.Info Logger.level = this.logLevel } }
Example 53
Source File: ProgressBar.scala From scaladex with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.scala.index package data import me.tongfei.progressbar.{ProgressBar => PB, ProgressBarStyle} import java.io.{PrintStream, ByteArrayOutputStream, OutputStream} import org.slf4j.Logger object ProgressBar { def apply(title: String, count: Int, logger: Logger): ProgressBar = { new ProgressBar( new PB(title, count, 1000, System.out, ProgressBarStyle.UNICODE_BLOCK), logger, count ) } } class ProgressBar(inner: PB, logger: Logger, count: Int) { var c = 0 var printed = 0 def start(): Unit = { inner.start() } def step(): Unit = { inner.step() c += 1 print() } def stepBy(n: Int): Unit = { inner.stepBy(n) c += n print() } def stop(): Unit = { inner.stop() } private def print(): Unit = { val pp = ((c.toDouble / count) * 100).toInt if (printed < pp) { logger.debug(pp + "%") printed = pp } } }
Example 54
Source File: BatchSonatypeLogger.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.publish.sonatype.logger import java.io.PrintStream final class BatchSonatypeLogger(out: PrintStream, verbosity: Int) extends SonatypeLogger { override def listingProfiles(attempt: Int, total: Int): Unit = if (verbosity >= 0) { val extra = if (attempt == 0) "" else s" (attempt $attempt / $total)" out.println("Listing Sonatype profiles..." + extra) } override def listedProfiles(errorOpt: Option[Throwable]): Unit = { val msgOpt = if (errorOpt.isEmpty) { if (verbosity >= 1) Some("Listed Sonatype profiles") else None } else Some("Fail to list Sonatype profiles") for (msg <- msgOpt) out.println(s"$msg") } }
Example 55
Source File: BatchUploadLogger.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.publish.upload.logger import java.io.PrintStream import coursier.publish.fileset.FileSet import coursier.publish.upload.Upload final class BatchUploadLogger(out: PrintStream, dummy: Boolean, isLocal: Boolean) extends UploadLogger { private val processing = if (isLocal) { if (dummy) "Would have tried to write" else "Writing" } else { if (dummy) "Would have tried to upload" else "Uploading" } override def uploadingSet(id: Object, fileSet: FileSet): Unit = out.println(s"$processing ${fileSet.elements.length} files") override def uploading(url: String, idOpt: Option[Object], totalOpt: Option[Long]): Unit = out.println(s"Uploading $url") override def uploaded(url: String, idOpt: Option[Object], errorOpt: Option[Upload.Error]): Unit = errorOpt match { case None => out.println(s"Uploaded $url") case Some(err) => out.println(s"Failed to upload $url: $err") } }
Example 56
Source File: SimpleDownloadLogger.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.publish.download.logger import java.io.PrintStream final class SimpleDownloadLogger(out: PrintStream, verbosity: Int) extends DownloadLogger { override def downloadingIfExists(url: String): Unit = { if (verbosity >= 2) out.println(s"Trying to download $url") } override def downloadedIfExists(url: String, size: Option[Long], errorOpt: Option[Throwable]): Unit = if (verbosity >= 2) { val msg = if (size.isEmpty) s"Not found : $url (ignored)" else if (errorOpt.isEmpty) s"Downloaded $url" else s"Failed to download $url" out.println(msg) } else if (verbosity >= 1) { if (size.nonEmpty) out.println(s"Downloaded $url") } }
Example 57
Source File: BatchDirLogger.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.publish.dir.logger import java.io.PrintStream import java.nio.file.Path final class BatchDirLogger(out: PrintStream, dirName: String, verbosity: Int) extends DirLogger { override def reading(dir: Path): Unit = if (verbosity >= 0) out.println(s"Reading $dirName") override def element(dir: Path, file: Path): Unit = if (verbosity >= 0) out.println(s"Found $file") override def read(dir: Path, elements: Int): Unit = if (verbosity >= 0) out.println(s"Found $elements elements in $dirName") }
Example 58
Source File: Confirm.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.cli.setup import java.io.{InputStream, PrintStream} import java.util.{Locale, Scanner} import coursier.util.Task import dataclass.data import scala.annotation.tailrec trait Confirm { def confirm(message: String, default: Boolean): Task[Boolean] } object Confirm { @data class ConsoleInput( in: InputStream = System.in, out: PrintStream = System.err, locale: Locale = Locale.getDefault, @since indent: Int = 0 ) extends Confirm { private val marginOpt = if (indent > 0) Some(" " * indent) else None def confirm(message: String, default: Boolean): Task[Boolean] = Task.delay { val choice = if (default) "[Y/n]" else "[y/N]" val message0 = marginOpt match { case None => message case Some(margin) => message.linesIterator.map(margin + _).mkString(System.lineSeparator()) } out.print(s"$message0 $choice ") @tailrec def loop(): Boolean = { val scanner = new Scanner(in) val resp = scanner.nextLine() val resp0 = resp .filter(!_.isSpaceChar) .toLowerCase(locale) .distinct resp0 match { case "y" => true case "n" => false case "" => default case _ => out.print(s"Please answer Y or N. $choice ") loop() } } loop() } } @data class YesToAll( out: PrintStream = System.err ) extends Confirm { def confirm(message: String, default: Boolean): Task[Boolean] = Task.delay { out.println(message + " [Y/n] Y") true } } def default: Confirm = ConsoleInput() }
Example 59
Source File: SetupStep.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.cli.setup import java.io.PrintStream import coursier.util.Task trait SetupStep { def banner: String def task: Task[Unit] def tryRevert: Task[Unit] final def fullTask(out: PrintStream): Task[Unit] = for { _ <- Task.delay(out.println(banner)) _ <- task _ <- Task.delay(out.println()) } yield () }
Example 60
Source File: Util.scala From coursier with Apache License 2.0 | 5 votes |
package coursier.cli import java.io.PrintStream import cats.data.ValidatedNel object Util { def prematureExit(msg: String): Nothing = { Console.err.println(msg) sys.exit(255) } def prematureExitIf(cond: Boolean)(msg: => String): Unit = if (cond) prematureExit(msg) def exit(msg: String): Nothing = { Console.err.println(msg) sys.exit(1) } def exitIf(cond: Boolean)(msg: => String): Unit = if (cond) exit(msg) implicit class ValidatedExitOnError[T](private val validated: ValidatedNel[String, T]) extends AnyVal { def exitOnError(errStream: PrintStream = System.err, exitCode: Int = 1): T = validated.toEither match { case Left(errors) => for (err <- errors.toList) errStream.println(err) sys.exit(exitCode) case Right(t) => t } } }
Example 61
Source File: ConsoleProgressBarTest.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package hu.ssh.progressbar.console import java.io.{ByteArrayOutputStream, PrintStream} import com.google.common.base.{Splitter, Strings} import com.google.common.collect.Iterables import hu.ssh.progressbar.ConsoleProgressBar import org.scalatest.{FlatSpec, Matchers} class ConsoleProgressBarTest extends FlatSpec with Matchers { "ProgressBar" should "output as expected" in { val outputstream = new ByteArrayOutputStream try { val progressBar = ConsoleProgressBar.on(new PrintStream(outputstream)).withFormat(":percent") progressBar.tick(0) assert(getLastOutput(outputstream.toString) == " 0.00") progressBar.tick(25) assert(getLastOutput(outputstream.toString) == " 25.00") progressBar.tick(30) assert(getLastOutput(outputstream.toString) == " 55.00") progressBar.tick(44) assert(getLastOutput(outputstream.toString) == " 99.00") progressBar.tickOne() assert(getLastOutput(outputstream.toString) == "100.00") } finally outputstream.close() } private def getLastOutput(string: String): String = { if (Strings.isNullOrEmpty(string)) return string val outputs = Splitter.on(ConsoleProgressBar.CARRIAGE_RETURN).omitEmptyStrings.split(string) Iterables.getLast(outputs) } }
Example 62
Source File: ConsoleLogger.scala From odin with Apache License 2.0 | 5 votes |
package io.odin.loggers import java.io.PrintStream import cats.effect.{Sync, Timer} import cats.syntax.all._ import io.odin.formatter.Formatter import io.odin.{Level, Logger, LoggerMessage} case class ConsoleLogger[F[_]: Timer]( formatter: Formatter, out: PrintStream, err: PrintStream, override val minLevel: Level )(implicit F: Sync[F]) extends DefaultLogger[F](minLevel) { private def println(out: PrintStream, msg: LoggerMessage, formatter: Formatter): F[Unit] = F.delay(out.println(formatter.format(msg))) def log(msg: LoggerMessage): F[Unit] = if (msg.level < Level.Warn) { println(out, msg, formatter) } else { println(err, msg, formatter) } } object ConsoleLogger { def apply[F[_]: Timer: Sync](formatter: Formatter, minLevel: Level): Logger[F] = ConsoleLogger(formatter, scala.Console.out, scala.Console.err, minLevel) }
Example 63
Source File: ConsoleLoggerSpec.scala From odin with Apache License 2.0 | 5 votes |
package io.odin.loggers import java.io.{ByteArrayOutputStream, PrintStream} import cats.effect.{IO, Timer} import cats.syntax.all._ import io.odin.Level._ import io.odin.formatter.Formatter import io.odin.{Level, LoggerMessage, OdinSpec} class ConsoleLoggerSpec extends OdinSpec { implicit val timer: Timer[IO] = IO.timer(scala.concurrent.ExecutionContext.global) it should "route all messages with level <= INFO to stdout" in { forAll { (loggerMessage: LoggerMessage, formatter: Formatter) => whenever(loggerMessage.level <= Info) { val outBaos = new ByteArrayOutputStream() val stdOut = new PrintStream(outBaos) val errBaos = new ByteArrayOutputStream() val stdErr = new PrintStream(errBaos) val consoleLogger = ConsoleLogger[IO](formatter, stdOut, stdErr, Level.Trace) consoleLogger.log(loggerMessage).unsafeRunSync() outBaos.toString() shouldBe (formatter.format(loggerMessage) + System.lineSeparator()) } } } it should "route all messages with level >= WARN to stderr" in { forAll { (loggerMessage: LoggerMessage, formatter: Formatter) => whenever(loggerMessage.level > Info) { val outBaos = new ByteArrayOutputStream() val stdOut = new PrintStream(outBaos) val errBaos = new ByteArrayOutputStream() val stdErr = new PrintStream(errBaos) val consoleLogger = ConsoleLogger[IO](formatter, stdOut, stdErr, Level.Trace) consoleLogger.log(loggerMessage).unsafeRunSync() errBaos.toString() shouldBe (formatter.format(loggerMessage) + System.lineSeparator()) } } } }
Example 64
Source File: ExampleTest.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.morpheus.examples import java.io.{ByteArrayOutputStream, PrintStream} import java.net.URI import org.junit.runner.RunWith import org.opencypher.okapi.testing.Bag._ import org.scalatest.{BeforeAndAfterAll, FunSpec, Matchers} import org.scalatestplus.junit.JUnitRunner import scala.io.Source @RunWith(classOf[JUnitRunner]) abstract class ExampleTest extends FunSpec with Matchers with BeforeAndAfterAll { private val oldStdOut = System.out protected val emptyOutput: String = "" protected def validate(app: => Unit, expectedOut: URI): Unit = { validate(app, Source.fromFile(expectedOut).mkString) } protected def validateBag(app: => Unit, expectedOut: URI): Unit = { val source = Source.fromFile(expectedOut) val expectedLines = source.getLines().toList val appLines = capture(app).split(System.lineSeparator()) withClue(s"${appLines.mkString("\n")} not equal to ${expectedLines.mkString("\n")}") { appLines.toBag shouldEqual expectedLines.toBag } } protected def validate(app: => Unit, expectedOut: String): Unit = { capture(app) shouldEqual expectedOut } private def capture(app: => Unit): String = { val charset = "UTF-8" val outCapture = new ByteArrayOutputStream() val printer = new PrintStream(outCapture, true, charset) Console.withOut(printer)(app) outCapture.toString(charset) } override protected def afterAll(): Unit = { System.setOut(oldStdOut) super.afterAll() } }
Example 65
Source File: PrintOptions.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.okapi.impl.util import java.io.PrintStream object PrintOptions { private val DEFAULT_MAX_COLUMN_WIDTH: Int = Int.MaxValue implicit lazy val out: PrintOptions = PrintOptions(stream = Console.out, maxColumnWidth = DEFAULT_MAX_COLUMN_WIDTH) lazy val err: PrintOptions = PrintOptions(stream = Console.err, maxColumnWidth = DEFAULT_MAX_COLUMN_WIDTH) def current(implicit options: PrintOptions): PrintOptions = options } final case class PrintOptions(stream: PrintStream, maxColumnWidth: Int = PrintOptions.DEFAULT_MAX_COLUMN_WIDTH) { def stream(newStream: PrintStream): PrintOptions = copy(stream = newStream) def maxColumnWidth(maxColumnWidth: Int): PrintOptions = copy(maxColumnWidth = maxColumnWidth) }
Example 66
Source File: Distribution.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 67
Source File: EventHistoryFileReportingSuite.scala From sparklens with Apache License 2.0 | 5 votes |
package com.qubole.sparklens.app import java.io.{ByteArrayOutputStream, PrintStream} import com.qubole.sparklens.TestUtils import org.scalatest.FunSuite class EventHistoryFileReportingSuite extends FunSuite { test("Reporting from sparklens and event-history should be same") { val eventHistoryFile = s"${System.getProperty("user.dir")}" + s"/src/test/event-history-test-files/local-1532512550423" // corresponding sparklens dump is in same location and name, but additional suffix val sparklensDump = TestUtils.getFileContents(eventHistoryFile + ".sparklens.json") validateOutput(outputFromSparklensDump(sparklensDump), outputFromEventHistoryReport(eventHistoryFile)) } private def outputFromSparklensDump(dump: String): String = { val out = new ByteArrayOutputStream() Console.withOut(new PrintStream(out)) { ReporterApp.startAnalysersFromString(dump) } out.toString } private def outputFromEventHistoryReport(file: String): String = { val out = new ByteArrayOutputStream() Console.withOut(new PrintStream(out)) { new EventHistoryReporter(file) } out.toString } private def validateOutput(file1:String, file2:String) = { assert(file1.size == file2.size, "output size is different between eventlogs report and sparklens.json report") assert(file1.lines.zip(file2.lines).filterNot(x => x._1 == x._2).size == 0, "Report lines are not matching between eventlogs report and sparklens.json report") } }
Example 68
Source File: CompatibilitySuite.scala From sparklens with Apache License 2.0 | 5 votes |
import java.io.{ByteArrayOutputStream, FileNotFoundException, PrintStream} import com.qubole.sparklens.TestUtils import com.qubole.sparklens.app.ReporterApp import org.scalatest.FunSuite import scala.util.control.Breaks._ class CompatibilitySuite extends FunSuite { test("should be able to report on previously generated sparklens dumps") { breakable { (1 to 100).foreach(x => { //run for the versions of sparklens output saved try { val testInput = TestUtils.getFileContents( s"${System.getProperty("user.dir")}/src/test/compatibility-files/version-${x}.json") val testOut = new ByteArrayOutputStream() Console.withOut(new PrintStream(testOut)) { ReporterApp.startAnalysersFromString(testInput) } val testOutput = testOut.toString val olderOutput = TestUtils.getFileContents( s"${System.getProperty("user.dir")}/src/test/compatibility-files/version-${x}.output") olderOutput.split("\n").foreach(line => { assert(testOutput.contains(line)) }) } catch { case e: FileNotFoundException => break } }) } } }
Example 69
Source File: ClientInterface.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.client import java.io.PrintStream import java.util.{Map => JMap} import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException} private[hive] case class HiveDatabase( name: String, location: String) private[hive] abstract class TableType { val name: String } private[hive] case object ExternalTable extends TableType { override val name = "EXTERNAL_TABLE" } private[hive] case object IndexTable extends TableType { override val name = "INDEX_TABLE" } private[hive] case object ManagedTable extends TableType { override val name = "MANAGED_TABLE" } private[hive] case object VirtualView extends TableType { override val name = "VIRTUAL_VIEW" } // TODO: Use this for Tables and Partitions private[hive] case class HiveStorageDescriptor( location: String, inputFormat: String, outputFormat: String, serde: String, serdeProperties: Map[String, String]) private[hive] case class HivePartition( values: Seq[String], storage: HiveStorageDescriptor) private[hive] case class HiveColumn(name: String, hiveType: String, comment: String) private[hive] case class HiveTable( specifiedDatabase: Option[String], name: String, schema: Seq[HiveColumn], partitionColumns: Seq[HiveColumn], properties: Map[String, String], serdeProperties: Map[String, String], tableType: TableType, location: Option[String] = None, inputFormat: Option[String] = None, outputFormat: Option[String] = None, serde: Option[String] = None, viewText: Option[String] = None) { @transient private[client] var client: ClientInterface = _ private[client] def withClient(ci: ClientInterface): this.type = { client = ci this } def database: String = specifiedDatabase.getOrElse(sys.error("database not resolved")) def isPartitioned: Boolean = partitionColumns.nonEmpty def getAllPartitions: Seq[HivePartition] = client.getAllPartitions(this) // Hive does not support backticks when passing names to the client. def qualifiedName: String = s"$database.$name" } def reset(): Unit }
Example 70
Source File: SparkSQLEnv.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import scala.collection.JavaConversions._ import org.apache.spark.scheduler.StatsReportListener import org.apache.spark.sql.hive.{HiveShim, HiveContext} import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null hiveContext = null } } }
Example 71
Source File: Distribution.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { out.println(statCounter) showQuantiles(out) } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println } }
Example 72
Source File: DefaultFunctions.scala From scala-debugger with Apache License 2.0 | 5 votes |
package org.scaladebugger.language.interpreters import org.scaladebugger.language.models import java.io.PrintStream object DefaultFunctions { type IntpFuncArgs = Map[String, Any] type IntpFuncRet = Any type IntpFunc = (IntpFuncArgs) => IntpFuncRet private def GetArg( m: IntpFuncArgs, name: String, default: Option[Any] = None ): Any = { val value = m.get(name) if (value.isEmpty && default.isEmpty) throw new RuntimeException(s"Missing argument $name!") value.orElse(default).get } private val Condition = (op: String, m: IntpFuncArgs) => { val l = GetArg(m, "l", default = Some(models.Undefined)) val r = GetArg(m, "r", default = Some(models.Undefined)) op match { case "<" => l.toString.toDouble < r.toString.toDouble case "<=" => l.toString.toDouble <= r.toString.toDouble case ">" => l.toString.toDouble > r.toString.toDouble case ">=" => l.toString.toDouble >= r.toString.toDouble case "==" => l == r case "!=" => l != r } } val LessThan = Condition("<", _: IntpFuncArgs) val LessThanEqual = Condition("<=", _: IntpFuncArgs) val GreaterThan = Condition(">", _: IntpFuncArgs) val GreaterThanEqual = Condition(">=", _: IntpFuncArgs) val Equal = Condition("==", _: IntpFuncArgs) val NotEqual = Condition("!=", _: IntpFuncArgs) private val NumberOperation = (op: String, m: IntpFuncArgs) => { val l = GetArg(m, "l").toString.toDouble val r = GetArg(m, "r").toString.toDouble op match { case "+" => l + r case "-" => l - r case "*" => l * r case "/" => l / r case "%" => l % r } } private val StringOperation = (op: String, m: IntpFuncArgs) => { val l = GetArg(m, "l").toString val r = GetArg(m, "r").toString op match { case "++" => l ++ r } } val PlusPlus = StringOperation("++", _: IntpFuncArgs) val Plus = NumberOperation("+", _: IntpFuncArgs) val Minus = NumberOperation("-", _: IntpFuncArgs) val Multiply = NumberOperation("*", _: IntpFuncArgs) val Divide = NumberOperation("/", _: IntpFuncArgs) val Modulus = NumberOperation("%", _: IntpFuncArgs) private val PrintOperation = (out: PrintStream, m: IntpFuncArgs) => { val text = GetArg(m, "text").toString out.println(text) } val Print = (p: PrintStream) => PrintOperation(p, _: IntpFuncArgs) }
Example 73
Source File: CTags.scala From sctags with Apache License 2.0 | 5 votes |
package sctags import java.io.PrintStream object CTags { private val header = List( "!_TAG_FILE_FORMAT\t2\t//", "!_TAG_FILE_SORTED\t1\t/0=unsorted, 1=sorted, 2=sorted,casefold/" ) private def formatTags(t: (String, Seq[Tag])) = { val file = t._1 val tags = t._2 tags.map(tag => { val pos = "/^" + tag.pos.content.replace("\\","\\\\") + "$/" tag.name + "\t" + file + "\t" + pos + tag.fieldsString }) } def generate(tags: Seq[(String, Seq[Tag])], output: PrintStream) { val tagStrings = (header ++ tags.flatMap(formatTags _).sorted).toArray tagStrings foreach {l => output.println(l)} } }
Example 74
Source File: ETags.scala From sctags with Apache License 2.0 | 5 votes |
package sctags import java.io.PrintStream object ETags { def formatTags(tags: Seq[Tag]) = { val sb = new scala.collection.mutable.StringBuilder for (tag <- tags) { val pos = tag.pos val s = pos.content + "\u007f" + tag.name + "\u0001" + pos.line + "," + pos.column + "\n" sb append s } sb.toString } def generate(files: Seq[(String, Seq[Tag])], output: PrintStream) { for ((file, tags) <- files) { val content = formatTags(tags) output.println("\f") output.println(file + "," + content.length) output.print(content) } } }
Example 75
Source File: SCTags.scala From sctags with Apache License 2.0 | 5 votes |
package sctags import scala.tools.nsc.{Settings, Global} import scala.tools.nsc.reporters.StoreReporter import scala.collection.mutable.ListBuffer import java.io.File import java.io.PrintStream object SCTags extends Parsing with TagGeneration { import FileUtils._; var outputFile: String = "tags"; var recurse = false; var etags = false def parseOpt(args:List[String]): List[String] = args match { case ("-f" |"-o") :: file :: rest => outputFile = file; parseOpt(rest) case ("-R" |"--recurse" ) :: rest => recurse = true; parseOpt(rest) case ("-e" |"--etags" ) :: rest => etags = true; parseOpt(rest) case files => files } def error(str: String) = System.err.println("Error: " + str); val settings = new Settings(error); val reporter = new StoreReporter; val compiler = new Global(settings, reporter); def run(fnames: Seq[String]) { val files = new ListBuffer[File] fnames foreach { fname => val file = new File(fname) if (file.isDirectory) { if (recurse) files ++= listFilesRecursive(file, {(f: File) => f.getName.endsWith(".scala")}) else System.err.println("Skipping directory " + fname); } else { if (file.getName.endsWith(".scala")) files += file else System.err.println("Skipping file " + fname); } } if (files.nonEmpty) { val tags = files.map(f => (f.getPath, generateTags(parse(f)))) val output = outputFile match { case "-" => Console.out case "tags" if etags => new PrintStream("TAGS") case x => new PrintStream(x) } if (etags) { ETags.generate(tags, output) } else { CTags.generate(tags, output) } } } def main(args: Array[String]): Unit = { val fnames = parseOpt(args.toList) run(fnames) } }
Example 76
Source File: LogFile.scala From kyuubi with Apache License 2.0 | 5 votes |
package yaooqinn.kyuubi.operation import java.io.{BufferedReader, File, FileInputStream, FileNotFoundException, FileOutputStream, InputStreamReader, IOException, PrintStream} import java.util.ArrayList import scala.collection.JavaConverters._ import org.apache.commons.io.FileUtils import org.apache.hadoop.io.IOUtils import org.apache.kyuubi.Logging import org.apache.spark.sql.Row import yaooqinn.kyuubi.KyuubiSQLException class LogFile private ( file: File, private var reader: Option[BufferedReader], writer: PrintStream, @volatile private var isRemoved: Boolean = false) extends Logging { def this(file: File) = { this(file, LogFile.createReader(file, isRemoved = false), new PrintStream(new FileOutputStream(file))) } private def resetReader(): Unit = { reader.foreach(IOUtils.closeStream) reader = None } private def readResults(nLines: Long): Seq[Row] = { reader = reader.orElse(LogFile.createReader(file, isRemoved)) val logs = new ArrayList[Row]() reader.foreach { r => var i = 1 try { var line: String = r.readLine() while ((i < nLines || nLines <= 0) && line != null) { logs.add(Row(line)) line = r.readLine() i += 1 } } catch { case e: FileNotFoundException => val operationHandle = file.getName val path = file.getAbsolutePath val msg = if (isRemoved) { s"Operation[$operationHandle] has been closed and the log file $path has been removed" } else { s"Operation[$operationHandle] Log file $path is not found" } throw new KyuubiSQLException(msg, e) } } logs.asScala } def write(msg: String): Unit = { writer.print(msg) } def close(): Unit = synchronized { try { reader.foreach(_.close()) writer.close() if (!isRemoved) { FileUtils.forceDelete(file) isRemoved = true } } catch { case e: IOException => error(s"Failed to remove corresponding log file of operation: ${file.getName}", e) } } } object LogFile { def createReader(file: File, isRemoved: Boolean): Option[BufferedReader] = try { Option(new BufferedReader(new InputStreamReader(new FileInputStream(file)))) } catch { case e: FileNotFoundException => val operationHandle = file.getName val path = file.getAbsolutePath val msg = if (isRemoved) { s"Operation[$operationHandle] has been closed and the log file $path has been removed" } else { s"Operation[$operationHandle] Log file $path is not found" } throw new KyuubiSQLException(msg, e) } }
Example 77
Source File: SparkSQLEnv.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import scala.collection.JavaConversions._ import org.apache.spark.scheduler.StatsReportListener import org.apache.spark.sql.hive.HiveContext import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null hiveContext = null } } }
Example 78
Source File: Distribution.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 79
Source File: StyleChecker.scala From big-data-scala-spark with MIT License | 5 votes |
import sbt.File import java.io.ByteArrayOutputStream import java.io.PrintStream import org.scalastyle._ import com.typesafe.config.ConfigFactory object StyleChecker { val maxResult = 100 class CustomTextOutput[T <: FileSpec](stream: PrintStream) extends Output[T] { private val messageHelper = new MessageHelper(ConfigFactory.load()) var fileCount: Int = _ override def message(m: Message[T]): Unit = m match { case StartWork() => case EndWork() => case StartFile(file) => stream.print("Checking file " + file + "...") fileCount = 0 case EndFile(file) => if (fileCount == 0) stream.println(" OK!") case StyleError(file, clazz, key, level, args, line, column, customMessage) => report(line, column, messageHelper.text(level.name), Output.findMessage(messageHelper, key, args, customMessage)) case StyleException(file, clazz, message, stacktrace, line, column) => report(line, column, "error", message) } private def report(line: Option[Int], column: Option[Int], level: String, message: String) { if (fileCount == 0) stream.println("") fileCount += 1 stream.println(" " + fileCount + ". " + level + pos(line, column) + ":") stream.println(" " + message) } private def pos(line: Option[Int], column: Option[Int]): String = line match { case Some(lineNumber) => " at line " + lineNumber + (column match { case Some(columnNumber) => " character " + columnNumber case None => "" }) case None => "" } } def score(outputResult: OutputResult) = { val penalties = outputResult.errors + outputResult.warnings scala.math.max(maxResult - penalties, 0) } def assess(sources: Seq[File], styleSheetPath: String): (String, Int) = { val configFile = new File(styleSheetPath).getAbsolutePath val messages = new ScalastyleChecker().checkFiles( ScalastyleConfiguration.readFromXml(configFile), Directory.getFiles(None, sources)) val output = new ByteArrayOutputStream() val outputResult = new CustomTextOutput(new PrintStream(output)).output(messages) val msg = s"""${output.toString} |Processed ${outputResult.files} file(s) |Found ${outputResult.errors} errors |Found ${outputResult.warnings} warnings |""".stripMargin (msg, score(outputResult)) } }
Example 80
Source File: StyleChecker.scala From big-data-scala-spark with MIT License | 5 votes |
import sbt.File import java.io.ByteArrayOutputStream import java.io.PrintStream import org.scalastyle._ import com.typesafe.config.ConfigFactory object StyleChecker { val maxResult = 100 class CustomTextOutput[T <: FileSpec](stream: PrintStream) extends Output[T] { private val messageHelper = new MessageHelper(ConfigFactory.load()) var fileCount: Int = _ override def message(m: Message[T]): Unit = m match { case StartWork() => case EndWork() => case StartFile(file) => stream.print("Checking file " + file + "...") fileCount = 0 case EndFile(file) => if (fileCount == 0) stream.println(" OK!") case StyleError(file, clazz, key, level, args, line, column, customMessage) => report(line, column, messageHelper.text(level.name), Output.findMessage(messageHelper, key, args, customMessage)) case StyleException(file, clazz, message, stacktrace, line, column) => report(line, column, "error", message) } private def report(line: Option[Int], column: Option[Int], level: String, message: String) { if (fileCount == 0) stream.println("") fileCount += 1 stream.println(" " + fileCount + ". " + level + pos(line, column) + ":") stream.println(" " + message) } private def pos(line: Option[Int], column: Option[Int]): String = line match { case Some(lineNumber) => " at line " + lineNumber + (column match { case Some(columnNumber) => " character " + columnNumber case None => "" }) case None => "" } } def score(outputResult: OutputResult) = { val penalties = outputResult.errors + outputResult.warnings scala.math.max(maxResult - penalties, 0) } def assess(sources: Seq[File], styleSheetPath: String): (String, Int) = { val configFile = new File(styleSheetPath).getAbsolutePath val messages = new ScalastyleChecker().checkFiles( ScalastyleConfiguration.readFromXml(configFile), Directory.getFiles(None, sources)) val output = new ByteArrayOutputStream() val outputResult = new CustomTextOutput(new PrintStream(output)).output(messages) val msg = s"""${output.toString} |Processed ${outputResult.files} file(s) |Found ${outputResult.errors} errors |Found ${outputResult.warnings} warnings |""".stripMargin (msg, score(outputResult)) } }
Example 81
Source File: StyleChecker.scala From big-data-scala-spark with MIT License | 5 votes |
import sbt.File import java.io.ByteArrayOutputStream import java.io.PrintStream import org.scalastyle._ import com.typesafe.config.ConfigFactory object StyleChecker { val maxResult = 100 class CustomTextOutput[T <: FileSpec](stream: PrintStream) extends Output[T] { private val messageHelper = new MessageHelper(ConfigFactory.load()) var fileCount: Int = _ override def message(m: Message[T]): Unit = m match { case StartWork() => case EndWork() => case StartFile(file) => stream.print("Checking file " + file + "...") fileCount = 0 case EndFile(file) => if (fileCount == 0) stream.println(" OK!") case StyleError(file, clazz, key, level, args, line, column, customMessage) => report(line, column, messageHelper.text(level.name), Output.findMessage(messageHelper, key, args, customMessage)) case StyleException(file, clazz, message, stacktrace, line, column) => report(line, column, "error", message) } private def report(line: Option[Int], column: Option[Int], level: String, message: String) { if (fileCount == 0) stream.println("") fileCount += 1 stream.println(" " + fileCount + ". " + level + pos(line, column) + ":") stream.println(" " + message) } private def pos(line: Option[Int], column: Option[Int]): String = line match { case Some(lineNumber) => " at line " + lineNumber + (column match { case Some(columnNumber) => " character " + columnNumber case None => "" }) case None => "" } } def score(outputResult: OutputResult) = { val penalties = outputResult.errors + outputResult.warnings scala.math.max(maxResult - penalties, 0) } def assess(sources: Seq[File], styleSheetPath: String): (String, Int) = { val configFile = new File(styleSheetPath).getAbsolutePath val messages = new ScalastyleChecker().checkFiles( ScalastyleConfiguration.readFromXml(configFile), Directory.getFiles(None, sources)) val output = new ByteArrayOutputStream() val outputResult = new CustomTextOutput(new PrintStream(output)).output(messages) val msg = s"""${output.toString} |Processed ${outputResult.files} file(s) |Found ${outputResult.errors} errors |Found ${outputResult.warnings} warnings |""".stripMargin (msg, score(outputResult)) } }
Example 82
Source File: SparkSQLEnv.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null sqlContext = null } } }
Example 83
Source File: Distribution.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 84
Source File: TeeCommand.scala From shellbase with Apache License 2.0 | 5 votes |
package com.sumologic.shellbase.commands import java.io.{FileOutputStream, PrintStream} import com.sumologic.shellbase.ShellCommand import com.sumologic.shellbase.cmdline.RichCommandLine._ import com.sumologic.shellbase.cmdline.{CommandLineArgument, CommandLineFlag, CommandLineOption} import org.apache.commons.cli.{CommandLine, Options} import org.apache.commons.io.output.TeeOutputStream import scala.util.Try class TeeCommand(runCommand: String => Boolean) extends ShellCommand("tee", "Forks the stdout of a command so it also prints to a file") { private val CommandArgument = new CommandLineArgument("command", 0, true) private val OutputFileOption = new CommandLineOption("o", "outputFile", false, "Filename of output file (defaults to ~/tee.out)") private val AppendFileFlag = new CommandLineFlag("a", "append", "Append the output to the file rather than overwriting it") override def maxNumberOfArguments = 1 override def addOptions(opts: Options) { opts += CommandArgument opts += OutputFileOption opts += AppendFileFlag } import com.sumologic.shellbase.ShellBase.SubCommandExtractor def execute(cmdLine: CommandLine) = { val outputFile = cmdLine.get(OutputFileOption).getOrElse(System.getProperty("user.home") + s"/tee.out") val appendFile = cmdLine.checkFlag(AppendFileFlag) cmdLine.get(CommandArgument) match { case Some(SubCommandExtractor(cmd)) => val fileOut = new FileOutputStream(outputFile, appendFile) val newOut = new PrintStream(new TeeOutputStream(Console.out, fileOut)) val status = Console.withOut(newOut) { println(s"Running `$cmd` and outputting to '$outputFile' [append=$appendFile].") runCommand(cmd) } Try(fileOut.close()) status case badCmd => println(s"Usage: tee `<command>`, but found $badCmd.") false } } }
Example 85
Source File: SparkSQLEnv.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream import scala.collection.JavaConverters._ import org.apache.spark.scheduler.StatsReportListener import org.apache.spark.sql.hive.HiveContext import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.util.Utils def stop() { logDebug("Shutting down Spark SQL Environment") // Stop the SparkContext if (SparkSQLEnv.sparkContext != null) { sparkContext.stop() sparkContext = null hiveContext = null } } }
Example 86
Source File: Distribution.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.util import java.io.PrintStream import scala.collection.immutable.IndexedSeq def summary(out: PrintStream = System.out) { // scalastyle:off println out.println(statCounter) showQuantiles(out) // scalastyle:on println } } private[spark] object Distribution { def apply(data: Traversable[Double]): Option[Distribution] = { if (data.size > 0) { Some(new Distribution(data)) } else { None } } def showQuantiles(out: PrintStream = System.out, quantiles: Traversable[Double]) { // scalastyle:off println out.println("min\t25%\t50%\t75%\tmax") quantiles.foreach{q => out.print(q + "\t")} out.println // scalastyle:on println } }
Example 87
Source File: DecoratorTest.scala From Elysium with MIT License | 5 votes |
package nz.daved.elysium.core import java.io.{ByteArrayOutputStream, PrintStream} import org.scalatest.{FlatSpec, Matchers} class DecoratorTest extends FlatSpec with Matchers { "@before" should "deal with anonymous functions passed in" in { val out: ByteArrayOutputStream = new ByteArrayOutputStream() Console.withOut(new PrintStream(out)) { DecoratorTestObject.world() } out.toString.stripLineEnd shouldBe "hello world" } "@after" should "deal with anonymous functions passed in" in { val out: ByteArrayOutputStream = new ByteArrayOutputStream() Console.withOut(new PrintStream(out)) { DecoratorTestObject.hello() } out.toString.stripLineEnd shouldBe "hello world" } }
Example 88
Source File: Benchmark.scala From scalismo with Apache License 2.0 | 5 votes |
package scalismo.utils import java.io.PrintStream object Benchmark { def benchmark[R](block: => R, desc: String = "duration", out: PrintStream = System.out): R = { val start = System.currentTimeMillis() val result = block out.println(desc + ": " + (System.currentTimeMillis() - start) + " ms") result } } trait Benchmark { def benchmark[R](block: => R, desc: String = "duration", out: PrintStream = System.out): R = Benchmark.benchmark(block, desc, out) }
Example 89
Source File: CellExecutor.scala From polynote with Apache License 2.0 | 5 votes |
package polynote.kernel.interpreter import java.io.{OutputStream, PrintStream} import java.lang.reflect.InvocationHandler import polynote.kernel.environment.{CurrentRuntime, CurrentTask, PublishResult, PublishStatus} import polynote.kernel.util.ResultOutputStream import polynote.kernel.{BaseEnv, ExecutionStatus, InterpreterEnv, Output, Result, ScalaCompiler, withContextClassLoader} import polynote.messages.CellID import polynote.runtime.KernelRuntime import zio.{RIO, Runtime, Task, UIO, ZIO, ZLayer} import zio.blocking.Blocking import zio.internal.{ExecutionMetrics, Executor} class CellExecutor(publishSync: Result => Unit, classLoader: ClassLoader, blockingExecutor: Executor) extends Executor { // we have to make sure that the Java console does the same thing as the Scala console, which is thread-local CellExecutor.initJavaConsole def yieldOpCount: Int = blockingExecutor.yieldOpCount def metrics: Option[ExecutionMetrics] = blockingExecutor.metrics def submit(runnable: Runnable): Boolean = { blockingExecutor.submit { new Runnable { def run(): Unit = { val console = new PrintStream(new ResultOutputStream(publishSync), true) withContextClassLoader(classLoader) { try { Console.withOut(console) { runnable.run() } } finally { console.close() } } } } } } def here: Boolean = blockingExecutor.here } object CellExecutor { // Make sure Java's console uses the thread-local mechanism of the Scala console // This way it can reset properly but still obey the Console.withOut mechanism lazy val initJavaConsole: Unit = { // make sure to initialize Console val _ = Console.out val dynamicOut = new OutputStream { override def write(b: Int): Unit = Console.out.write(b) } System.setOut(new PrintStream(dynamicOut)) } def layer(classLoader: ClassLoader): ZLayer[BaseEnv with InterpreterEnv, Throwable, Blocking] = ZLayer.fromEffect { ZIO.mapN(PublishResult.access, ZIO.runtime[Any]) { (publish, runtime) => ZIO.access[Blocking] { hasBlocking => new Blocking.Service { override def blockingExecutor: Executor = new CellExecutor( result => runtime.unsafeRun(publish.publish1(result)), classLoader, hasBlocking.get.blockingExecutor) } } }.flatten } }
Example 90
Source File: MetricImplicits.scala From Mastering-Spark-for-Data-Science with MIT License | 5 votes |
package io.gzet.timeseries.timely import java.io.PrintStream import java.net.Socket import java.nio.charset.StandardCharsets import io.gzet.timeseries.SimpleConfig import org.apache.spark.rdd.RDD import org.apache.spark.streaming.dstream.DStream import org.apache.spark.{Logging, Partitioner} object MetricImplicits extends Logging with SimpleConfig { def nonNegativeMod(x: Int, mod: Int): Int = { val rawMod = x % mod rawMod + (if (rawMod < 0) mod else 0) } class MetricPartitioner(partitions: Int) extends Partitioner { require(partitions >= 0, s"Number of partitions ($partitions) cannot be negative.") override def numPartitions: Int = partitions override def getPartition(key: Any): Int = { val k = key.asInstanceOf[MetricKey] nonNegativeMod(k.metricName.hashCode, partitions) } } implicit class Metrics(rdd: RDD[Metric]) { val partitions = rdd.partitions.length val partitioner = new MetricPartitioner(partitions) def publish() = { val sSortedMetricRDD = rdd filter { metric => metric.tags.nonEmpty } map { metric => (MetricKey(metric.name, metric.time), metric) } repartitionAndSortWithinPartitions partitioner sSortedMetricRDD.values foreachPartition { it: Iterator[Metric] => val sock = new Socket(timelyHost, timelyPort) val writer = new PrintStream(sock.getOutputStream, true, StandardCharsets.UTF_8.name) it foreach { metric => writer.println(metric.toPut) } writer.flush() } } } implicit class MetricStream(stream: DStream[Metric]) { def publish() = { stream foreachRDD { rdd => rdd.publish() } } } } case class Metric(name: String, time: Long, value: Double, tags: Map[String, String], viz: Option[String] = None) { def toPut = { val vizMap = if(viz.isDefined) List("viz" -> viz.get) else List[(String, String)]() val strTags = vizMap.union(tags.toList).map({ case (k, v) => s"$k=$v" }).mkString(" ") s"put $name $time $value $strTags" } } case class MetricKey(metricName: String, metricTime: Long) object MetricKey { implicit def orderingByMetricDate[A <: MetricKey] : Ordering[A] = { Ordering.by(fk => (fk.metricName, fk.metricTime)) } }
Example 91
Source File: TestUtils.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect.internals import java.io.{ByteArrayOutputStream, OutputStream, PrintStream} import java.nio.charset.StandardCharsets import scala.util.control.NonFatal def catchSystemErrInto[T](outStream: OutputStream)(thunk: => T): T = synchronized { val oldErr = System.err val fakeErr = new PrintStream(outStream) System.setErr(fakeErr) try { thunk } finally { System.setErr(oldErr) fakeErr.close() } } }
Example 92
Source File: AddDeps.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.URL import org.apache.toree.dependencies.Credentials import org.apache.toree.magic._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.ArgumentParsingSupport import scala.util.Try import org.apache.toree.plugins.annotations.Event class AddDeps extends LineMagic with IncludeInterpreter with IncludeOutputStream with ArgumentParsingSupport with IncludeDependencyDownloader with IncludeKernel { private def printStream = new PrintStream(outputStream) private val _transitive = parser.accepts( "transitive", "Retrieve dependencies recursively" ) private val _verbose = parser.accepts( "verbose", "Prints out additional information" ) private val _trace = parser.accepts( "trace", "Prints out trace of download progress" ) private val _abortOnResolutionErrors = parser.accepts( "abort-on-resolution-errors", "Abort (no downloads) when resolution fails" ) private val _exclude = parser.accepts("exclude", "exclude dependency").withRequiredArg().ofType(classOf[String]) private val _repository = parser.accepts( "repository", "Adds an additional repository to available list" ).withRequiredArg().ofType(classOf[String]) private val _credentials = parser.accepts( "credential", "Adds a credential file to be used to the list" ).withRequiredArg().ofType(classOf[String]) private val _configuration = parser.accepts( "ivy-configuration", "Sets the Ivy configuration for the dependency; defaults to \"default\"" ).withRequiredArg().ofType(classOf[String]) private val _classifier = parser.accepts( "classifier", "Sets the dependency's classifier" ).withRequiredArg().ofType(classOf[String]) @Event(name = "adddeps") override def execute(code: String): Unit = { val nonOptionArgs = parseArgs(code) dependencyDownloader.setPrintStream(printStream) val repository = getAll(_repository).getOrElse(Nil) val credentials = getAll(_credentials).getOrElse(Nil) val excludes = getAll(_exclude).getOrElse(Nil) val excludesSet = excludes.map((x: String) => { if (x.contains(":")) { (x.split(":")(0), x.split(":")(1)) } else { (x, "*") } }: (String, String)).toSet val repositoriesWithCreds = dependencyDownloader.resolveRepositoriesAndCredentials(repository, credentials) if (nonOptionArgs.size == 3) { // get the jars and hold onto the paths at which they reside val uris = dependencyDownloader.retrieve( groupId = nonOptionArgs.head, artifactId = nonOptionArgs(1), version = nonOptionArgs(2), transitive = _transitive, ignoreResolutionErrors = !_abortOnResolutionErrors, extraRepositories = repositoriesWithCreds, verbose = _verbose, trace = _trace, excludes = excludesSet, configuration = get(_configuration), artifactClassifier = get(_classifier) ) // pass the new Jars to the kernel kernel.addJars(uris.filter(_.getPath.endsWith(".jar")): _*) } else { printHelp(printStream, """%AddDeps my.company artifact-id version""") } } }
Example 93
Source File: AddJar.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.{File, PrintStream} import java.net.{URL, URI} import java.nio.file.{Files, Paths} import java.util.zip.ZipFile import org.apache.toree.magic._ import org.apache.toree.magic.builtin.AddJar._ import org.apache.toree.magic.dependencies._ import org.apache.toree.utils.{ArgumentParsingSupport, DownloadSupport, LogLike, FileUtils} import com.typesafe.config.Config import org.apache.hadoop.fs.Path import org.apache.toree.plugins.annotations.Event object AddJar { val HADOOP_FS_SCHEMES = Set("hdfs", "s3", "s3n", "file") private var jarDir:Option[String] = None def getJarDir(config: Config): String = { jarDir.getOrElse({ jarDir = Some( if(config.hasPath("jar_dir") && Files.exists(Paths.get(config.getString("jar_dir")))) { config.getString("jar_dir") } else { FileUtils.createManagedTempDirectory("toree_add_jars").getAbsolutePath } ) jarDir.get }) } } class AddJar extends LineMagic with IncludeInterpreter with IncludeOutputStream with DownloadSupport with ArgumentParsingSupport with IncludeKernel with IncludePluginManager with IncludeConfig with LogLike { // Option to mark re-downloading of jars private val _force = parser.accepts("f", "forces re-download of specified jar") // Option to mark re-downloading of jars private val _magic = parser.accepts("magic", "loads jar as a magic extension") // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) ) } else { downloadFile( new URL(jarRemoteLocation), new File(downloadLocation).toURI.toURL ) } // Report download finished printStream.println(s"Finished download of $jarName") } else { printStream.println(s"Using cached version of $jarName") } // validate jar file if(! isValidJar(fileDownloadLocation)) { throw new IllegalArgumentException(s"Jar '$jarName' is not valid.") } if (_magic) { val plugins = pluginManager.loadPlugins(fileDownloadLocation) pluginManager.initializePlugins(plugins) } else { kernel.addJars(fileDownloadLocation.toURI) } } }
Example 94
Source File: Truncation.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.plugins.annotations.Event class Truncation extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "truncation") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Output WILL be truncated.") KernelOptions.noTruncation = false case "off" => printStream.println(s"Output will NOT be truncated") KernelOptions.noTruncation = true case "" => printStream.println(s"Truncation is currently ${if (KernelOptions.noTruncation) "off" else "on"} ") case other => printStream.println(s"${other} is not a valid option for the NoTruncation magic.") } } }
Example 95
Source File: ShowOutput.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.plugins.annotations.Event class ShowOutput extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "showoutput") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Console output WILL be shown.") KernelOptions.showOutput = true case "off" => printStream.println(s"Console output will NOT be shown.") KernelOptions.showOutput = false case "" => printStream.println(s"Console output display is currently ${if (KernelOptions.showOutput) "on" else "off"}.") case other => printStream.println(s"${other} is not a valid option for the ShowOutput magic.") } } }
Example 96
Source File: ShowTypes.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import org.apache.toree.magic.LineMagic import org.apache.toree.magic.dependencies.IncludeOutputStream import java.io.PrintStream import org.apache.toree.kernel.api.KernelOptions import org.apache.toree.plugins.annotations.Event class ShowTypes extends LineMagic with IncludeOutputStream { private def printStream = new PrintStream(outputStream) @Event(name = "showtypes") override def execute(code: String): Unit = { code match { case "on" => printStream.println(s"Types will be printed.") KernelOptions.showTypes = true case "off" => printStream.println(s"Types will not be printed") KernelOptions.showTypes = false case "" => printStream.println(s"ShowTypes is currently ${if (KernelOptions.showTypes) "on" else "off"} ") case other => printStream.println(s"${other} is not a valid option for the ShowTypes magic.") } } }
Example 97
Source File: JavaScript.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import com.google.common.base.Strings import org.apache.toree.kernel.protocol.v5.MIMEType import org.apache.toree.magic._ import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.utils.ArgumentParsingSupport import org.slf4j.LoggerFactory import org.apache.toree.plugins.annotations.Event class JavaScript extends CellMagic with ArgumentParsingSupport with IncludeOutputStream { // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) @Event(name = "javascript") override def execute(code: String): CellMagicOutput = { def printHelpAndReturn: CellMagicOutput = { printHelp(printStream, """%JavaScript <string_code>""") CellMagicOutput() } Strings.isNullOrEmpty(code) match { case true => printHelpAndReturn case false => CellMagicOutput(MIMEType.ApplicationJavaScript -> code) } } }
Example 98
Source File: Html.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.magic.builtin import java.io.PrintStream import org.apache.toree.kernel.protocol.v5.MIMEType import org.apache.toree.magic._ import org.apache.toree.magic.dependencies.IncludeOutputStream import org.apache.toree.utils.ArgumentParsingSupport import com.google.common.base.Strings import org.apache.toree.plugins.annotations.Event class Html extends CellMagic with ArgumentParsingSupport with IncludeOutputStream { // Lazy because the outputStream is not provided at construction private def printStream = new PrintStream(outputStream) @Event(name = "html") override def execute(code: String): CellMagicOutput = { def printHelpAndReturn: CellMagicOutput = { printHelp(printStream, """%%Html <string_code>""") CellMagicOutput() } Strings.isNullOrEmpty(code) match { case true => printHelpAndReturn case false => CellMagicOutput(MIMEType.TextHtml -> code) } } }
Example 99
Source File: StreamState.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.global import java.io.{InputStream, OutputStream, PrintStream} def withStreams[T](thunk: => T): T = { init(_inputStream, _outputStream, _errorStream) val returnValue = Console.withIn(_inputStream) { Console.withOut(_outputStream) { Console.withErr(_errorStream) { thunk } } } reset() returnValue } }
Example 100
Source File: ArgumentParsingSupport.scala From incubator-toree with Apache License 2.0 | 5 votes |
package org.apache.toree.utils import joptsimple.{OptionSpec, OptionParser} import scala.collection.JavaConverters._ import scala.language.implicitConversions import java.io.{PrintStream, OutputStream} trait ArgumentParsingSupport { protected lazy val parser = new OptionParser() private var options: joptsimple.OptionSet = _ parser.allowsUnrecognizedOptions() def parseArgs(args: String, delimiter: String = " ") = { options = parser.parse(args.split(delimiter): _*) options.nonOptionArguments().asScala.map(_.toString) } def printHelp(outputStream: OutputStream, usage: String) = { val printStream = new PrintStream(outputStream) printStream.println(s"Usage: $usage\n") parser.printHelpOn(outputStream) } implicit def has[T](spec: OptionSpec[T]): Boolean = { require(options != null, "Arguments not parsed yet!") options.has(spec) } implicit def get[T](spec: OptionSpec[T]): Option[T] = { require(options != null, "Arguments not parsed yet!") Some(options.valueOf(spec)).filter(_ != null) } // NOTE: Cannot be implicit as conflicts with get def getAll[T](spec: OptionSpec[T]): Option[List[T]] = { require(options != null, "Arguments not parsed yet!") Some(options.valuesOf(spec).asScala.toList).filter(_ != null) } }
Example 101
Source File: PrintStreamLogger.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.logger.internal import java.io.PrintStream import almond.logger.Level import scala.annotation.tailrec final class PrintStreamLogger(val level: Level, out: PrintStream) extends ActualLogger { def log(level: Level, message: String, exception: Throwable = null): Unit = { val b = new StringBuilder b ++= level.name b += ' ' b ++= message @tailrec def addException(ex: Throwable): Unit = if (ex != null) { b += '\n' // FIXME Not portable b ++= ex.toString for (elem <- ex.getStackTrace) { b ++= "\n " // FIXME Not portable b ++= elem.toString } addException(ex.getCause) } addException(exception) out.println(b.result()) } }
Example 102
Source File: FunctionOutputStream.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.{OutputStream, PrintStream} import java.nio.{ByteBuffer, CharBuffer} import java.nio.charset.{Charset, CoderResult} class FunctionOutputStream( inputBufferSize: Int, outputBufferSize: Int, internalCharset: Charset, f: String => Unit ) extends OutputStream { // not thread-safe private val decoder = internalCharset.newDecoder() private val inArray = Array.ofDim[Byte](inputBufferSize) private val outArray = Array.ofDim[Char](outputBufferSize) private val writeBuf = ByteBuffer.wrap(inArray) private val out = CharBuffer.wrap(outArray) private def flushIfNeeded(): Unit = if (!writeBuf.hasRemaining) flush() def write(b: Int): Unit = { writeBuf.put(b.toByte) // hope toByte doesn't box b flushIfNeeded() } override def write(b: Array[Byte], off: Int, len: Int) = { var off0 = off var len0 = len while (len0 > 0) { val take = math.min(len0, writeBuf.remaining()) assert(take > 0) writeBuf.put(b, off0, take) off0 = off0 + take len0 = len0 - take flushIfNeeded() } assert(len0 == 0) assert(off0 == off + len) } override def flush(): Unit = { super.flush() val readBuf = ByteBuffer.wrap(inArray, 0, writeBuf.position()) var r: CoderResult = null while (r == null || r.isOverflow) { if (r != null) { readBuf.position(0) readBuf.limit(writeBuf.position()) } r = decoder.decode(readBuf, out, false) val outLen = out.position() if (r.isError || (r.isOverflow && outLen == 0)) r.throwException() else { if (outLen > 0) { val s = new String(outArray, 0, outLen) out.clear() f(s) } val read = readBuf.position() val avail = writeBuf.position() val remaining = avail - read writeBuf.position(remaining) if (remaining > 0) System.arraycopy(inArray, read, inArray, 0, remaining) } } } def printStream(): PrintStream = new PrintStream(this, true, internalCharset.name()) }
Example 103
Source File: CaptureImpl.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.PrintStream import java.nio.charset.{Charset, StandardCharsets} final class CaptureImpl( inputBufferSize: Int = 1024, outputBufferSize: Int = 1024, internalCharset: Charset = StandardCharsets.UTF_8 ) extends Capture { // not thread-safe private var out0: String => Unit = _ private var err0: String => Unit = _ val out: PrintStream = new FunctionOutputStream( inputBufferSize, outputBufferSize, internalCharset, s => if (out0 != null) out0(s) ).printStream() val err: PrintStream = new FunctionOutputStream( inputBufferSize, outputBufferSize, internalCharset, s => if (err0 != null) err0(s) ).printStream() def apply[T]( stdout: String => Unit, stderr: String => Unit )( block: => T ): T = try { out0 = stdout err0 = stderr Console.withOut(out) { Console.withErr(err) { val oldOut = System.out val oldErr = System.err try { System.setOut(out) System.setErr(err) block } finally { System.setOut(oldOut) System.setErr(oldErr) } } } } finally { out0 = null err0 = null } }
Example 104
Source File: NopCapture.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.PrintStream final class NopCapture extends Capture { val out: PrintStream = new NopOutputStream().printStream() val err: PrintStream = new NopOutputStream().printStream() def apply[T](stdout: String => Unit, stderr: String => Unit)(block: => T): T = Console.withOut(out) { Console.withErr(err) { val oldOut = System.out val oldErr = System.err try { System.setOut(out) System.setErr(err) block } finally { System.setOut(oldOut) System.setErr(oldErr) } } } }
Example 105
Source File: Capture.scala From almond with BSD 3-Clause "New" or "Revised" License | 5 votes |
package almond.internals import java.io.PrintStream trait Capture { def out: PrintStream def err: PrintStream def apply[T](stdout: String => Unit, stderr: String => Unit)(block: => T): T } object Capture { def create(): Capture = new CaptureImpl def nop(): Capture = new NopCapture }
Example 106
Source File: ConsoleReporter.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.PrintStream import de.frosner.ddq.constraints.{ConstraintError, ConstraintFailure, ConstraintSuccess} import de.frosner.ddq.core.CheckResult override def report(checkResult: CheckResult, header: String, prologue: String): Unit = { stream.println(Console.BLUE + header + Console.RESET) stream.println(Console.BLUE + prologue + Console.RESET) if (checkResult.constraintResults.nonEmpty) { checkResult.constraintResults.foreach { case (_, constraintResult) => val color = constraintResult.status match { case ConstraintSuccess => Console.GREEN case ConstraintFailure => Console.RED case ConstraintError(throwable) => Console.YELLOW } stream.println(color + "- " + constraintResult.message + Console.RESET) } } else { stream.println(Console.BLUE + "Nothing to check!" + Console.RESET) } stream.println("") } }
Example 107
Source File: MarkdownReporter.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.PrintStream import de.frosner.ddq.core.CheckResult override def report(checkResult: CheckResult, header: String, prologue: String): Unit = { stream.println(s"**$header**\n") stream.println(s"$prologue\n") if (checkResult.constraintResults.nonEmpty) { checkResult.constraintResults.foreach { case (_, constraintResult) => stream.println(s"- *${constraintResult.status.stringValue.toUpperCase}*: " + constraintResult.message) } } else { stream.println("Nothing to check!") } stream.println("") } }
Example 108
Source File: HumanReadableReporter.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.PrintStream import de.frosner.ddq.core.CheckResult abstract class HumanReadableReporter extends Reporter { override def report(checkResult: CheckResult): Unit = { val check = checkResult.check val df = check.dataFrame report( checkResult = checkResult, header = s"Checking ${check.name}", prologue = s"It has a total number of ${df.columns.length} columns " + s"and ${checkResult.numRows} rows." ) } protected def report(checkResult: CheckResult, header: String, prologue: String) }
Example 109
Source File: ZeppelinReporter.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.PrintStream import de.frosner.ddq.constraints.{ConstraintError, ConstraintFailure, ConstraintSuccess} import de.frosner.ddq.core.CheckResult override def report(checkResult: CheckResult, header: String, prologue: String): Unit = { if (!reportedSomething) { stream.println("%html") reportedSomething = true } stream.println("</p>") stream.println(s"<h4>$header</h4>") stream.println(s"<h5>$prologue</h5>") if (checkResult.constraintResults.nonEmpty) { stream.println(s"<table>") checkResult.constraintResults.foreach { case (_, constraintResult) => { val resultString = constraintResult.status match { case ConstraintSuccess => "✅" case ConstraintFailure => "❌" case ConstraintError(throwable) => "✋" } stream.println(s"<tr>${inTd(resultString)}${inTd(constraintResult.message)}</tr>") } } stream.println(s"</table>") } else { stream.println("Nothing to check!") } stream.println("<p hidden>") } }
Example 110
Source File: MarkdownReporterTest.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.{ByteArrayOutputStream, PrintStream} import de.frosner.ddq.constraints._ import de.frosner.ddq.core._ import de.frosner.ddq.testutils.{DummyConstraint, DummyConstraintResult} import org.apache.spark.sql.DataFrame import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FlatSpec, Matchers} class MarkdownReporterTest extends FlatSpec with Matchers with MockitoSugar { "A Markdown reporter" should "produce correct output for a check with constraints" in { val baos = new ByteArrayOutputStream() val markdownReporter = new MarkdownReporter(new PrintStream(baos)) val df = mock[DataFrame] val dfName = "myDf" val dfColumns = Array("1", "2") val dfCount = 5 when(df.columns).thenReturn(dfColumns) val header = s"Checking $dfName" val prologue = s"It has a total number of ${dfColumns.size} columns and $dfCount rows." val message1 = "1" val status1 = ConstraintSuccess val constraint1 = DummyConstraint(message1, status1) val result1 = constraint1.fun(df) val message2 = "2" val status2 = ConstraintFailure val constraint2 = DummyConstraint(message2, status2) val result2 = constraint2.fun(df) val message3 = "3" val status3 = ConstraintError(new IllegalArgumentException()) val constraint3 = DummyConstraint(message3, status3) val result3 = DummyConstraintResult(constraint3, message3, status3) val constraints = Map[Constraint, ConstraintResult[Constraint]]( constraint1 -> result1, constraint2 -> result2, constraint3 -> result3 ) val check = Check(df, Some(dfName), Option.empty, constraints.keys.toSeq) markdownReporter.report(CheckResult(constraints, check, dfCount)) val expectedOutput = s"""**$header** $prologue - *SUCCESS*: ${result1.message} - *FAILURE*: ${result2.message} - *ERROR*: ${result3.message} """ baos.toString shouldBe expectedOutput } it should "produce correct output for a check without constraint" in { val baos = new ByteArrayOutputStream() val markdownReporter = new MarkdownReporter(new PrintStream(baos)) val df = mock[DataFrame] val dfName = "myDf" val dfColumns = Array("1", "2") val dfCount = 5 when(df.columns).thenReturn(dfColumns) val header = s"Checking $dfName" val prologue = s"It has a total number of ${dfColumns.size} columns and $dfCount rows." val check = Check(df, Some(dfName), Option.empty, Seq.empty) markdownReporter.report(CheckResult(Map.empty, check, dfCount)) val expectedOutput = s"""**$header** $prologue Nothing to check! """ baos.toString shouldBe expectedOutput } }
Example 111
Source File: ConsoleReporterTest.scala From drunken-data-quality with Apache License 2.0 | 5 votes |
package de.frosner.ddq.reporters import java.io.{ByteArrayOutputStream, PrintStream} import de.frosner.ddq.constraints._ import de.frosner.ddq.core._ import de.frosner.ddq.testutils.{DummyConstraint, DummyConstraintResult} import org.apache.spark.sql.DataFrame import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{FlatSpec, Matchers} class ConsoleReporterTest extends FlatSpec with Matchers with MockitoSugar { "A Console reporter" should "produce correct output for a check with constraints" in { val baos = new ByteArrayOutputStream() val consoleReporter = new ConsoleReporter(new PrintStream(baos)) val df = mock[DataFrame] val displayName = "myDf" val dfColumns = Array("1", "2") val dfCount = 5 when(df.columns).thenReturn(dfColumns) val header = s"Checking $displayName" val prologue = s"It has a total number of ${dfColumns.size} columns and $dfCount rows." val message1 = "1" val status1 = ConstraintSuccess val constraint1 = DummyConstraint(message1, status1) val result1 = constraint1.fun(df) val message2 = "2" val status2 = ConstraintFailure val constraint2 = DummyConstraint(message2, status2) val result2 = constraint2.fun(df) val message3 = "3" val status3 = ConstraintError(new IllegalArgumentException()) val constraint3 = DummyConstraint(message3, status3) val result3 = DummyConstraintResult(constraint3, message3, status3) val constraints = Map[Constraint, ConstraintResult[Constraint]]( constraint1 -> result1, constraint2 -> result2, constraint3 -> result3 ) val check = Check(df, Some(displayName), Option.empty, constraints.keys.toSeq) consoleReporter.report(CheckResult(constraints, check, dfCount)) val expectedOutput = s"""${Console.BLUE}$header${Console.RESET} ${Console.BLUE}$prologue${Console.RESET} ${Console.GREEN}- ${result1.message}${Console.RESET} ${Console.RED}- ${result2.message}${Console.RESET} ${Console.YELLOW}- ${result3.message}${Console.RESET} """ baos.toString shouldBe expectedOutput } it should "produce correct output for a check without constraint" in { val baos = new ByteArrayOutputStream() val consoleReporter = new ConsoleReporter(new PrintStream(baos)) val df = mock[DataFrame] val displayName = "myDf" val dfColumns = Array("1", "2") val dfCount = 5 when(df.columns).thenReturn(dfColumns) val header = s"Checking $displayName" val prologue = s"It has a total number of ${dfColumns.size} columns and $dfCount rows." val check = Check(df, Some(displayName), Option.empty, Seq.empty) consoleReporter.report(CheckResult(Map.empty, check, dfCount)) val expectedOutput = s"""${Console.BLUE}$header${Console.RESET} ${Console.BLUE}$prologue${Console.RESET} ${Console.BLUE}Nothing to check!${Console.RESET} """ baos.toString shouldBe expectedOutput } }
Example 112
Source File: SbtPlugin.scala From sbt-diff-project with MIT License | 5 votes |
package jp.ne.opt.sbt.diff import java.io.PrintStream import sbt._ import Keys._ import complete.Parsers.spaceDelimited import com.mayreh.sbt.dependency.SbtPlugin.autoImport.reverseDependency object SbtPlugin extends AutoPlugin { object autoImport extends SbtPluginKeys import autoImport._ override def trigger = allRequirements override def projectSettings: Seq[Setting[_]] = Seq( gitDiffSeparator in Global := "\n", printGitDiffByBaseDirectory in Global := false, printGitDiffByAbsolutePath in Global := false, printGitDiffToFile in Global := None, excludeRootProject in Global := true, patternsAffectAllProjects in Global := Seq( """.+\.sbt$""", """.+project/[^/]+\.scala""" ), gitDiff := { import sys.process._ val project = thisProject.value val files = s"git diff --name-only ${spaceDelimited("<arg>").parsed.mkString(" ")}".lines_!.toList.collect { case line if line.trim.nonEmpty => new File(line) } if (affectsAll(patternsAffectAllProjects.value, files) || files.exists(_.absolutePath.contains(project.base.absolutePath))) { project +: (reverseDependency in thisProjectRef).value } else { Nil } }, commands += Command.args("git-diff-all", "<arg>") { (state, args) => val buildRoot = new File(loadedBuild.value.root.getPath) val (modifiedState, diffProjects) = loadedBuild.value.allProjectRefs.foldLeft(state -> Seq.empty[ResolvedProject]) { case ((currentState, projects), (ref, project)) => val (s, resolvedProjects) = Project.extract(state).runInputTask(gitDiff in ref, s" ${args.mkString(" ")}", state) s -> (projects ++ resolvedProjects) } val filteredProjects = diffProjects .groupBy(_.id) .flatMap { case (_, xs) => xs.headOption } .filter { project => !excludeRootProject.value || project.base.absolutePath != buildRoot.absolutePath } printProjects(filteredProjects.toSeq, buildRoot, gitDiffSeparator.value, printGitDiffByBaseDirectory.value, printGitDiffByAbsolutePath.value, printGitDiffToFile.value) modifiedState } ) private[this] def affectsAll(patterns: Seq[String], files: Seq[File]): Boolean = files.exists(file => patterns.exists(pattern => file.absolutePath.matches(pattern))) private[this] def printProjects(projects: Seq[ResolvedProject], buildRoot: File, separator: String, byBaseDirectory: Boolean, byAbsolutePath: Boolean, toFile: Option[File]): Unit = { val out = toFile.fold(System.out)(new PrintStream(_)) projects.headOption.foreach { _ => val str = projects.map { project => if (!byBaseDirectory) { project.id } else if (byAbsolutePath) { project.base.getAbsolutePath } else { buildRoot.toPath.relativize(project.base.toPath).toString } }.sorted mkString separator out.println(str) } } }
Example 113
Source File: Boot.scala From BacklogMigration-Redmine with MIT License | 5 votes |
package com.nulabinc.backlog.r2b.exporter.core import java.io.PrintStream import com.google.inject.Guice import com.nulabinc.backlog.migration.common.conf.ExcludeOption import com.nulabinc.backlog.migration.common.domain.{BacklogProjectKey, BacklogTextFormattingRule} import com.nulabinc.backlog.migration.common.utils.{ConsoleOut, Logging} import com.nulabinc.backlog.r2b.exporter.conf.ExportConfig import com.nulabinc.backlog.r2b.exporter.modules.RedmineModule import com.nulabinc.backlog.r2b.exporter.service.ProjectExporter import com.nulabinc.backlog.r2b.mapping.core.MappingContainer import com.nulabinc.backlog.r2b.redmine.conf.RedmineApiConfiguration import com.osinka.i18n.Messages object Boot extends Logging { def execute(apiConfig: RedmineApiConfiguration, mappingContainer: MappingContainer, backlogProjectKey: BacklogProjectKey, backlogTextFormattingRule: BacklogTextFormattingRule, exclude: ExcludeOption): PrintStream = { try { val injector = Guice.createInjector(new RedmineModule(apiConfig, mappingContainer, backlogProjectKey, backlogTextFormattingRule, ExportConfig(exclude))) ConsoleOut.println(s""" |${Messages("export.start")} |--------------------------------------------------""".stripMargin) val projectExporter = injector.getInstance(classOf[ProjectExporter]) projectExporter.boot(mappingContainer) ConsoleOut.println(s"""-------------------------------------------------- |${Messages("export.finish")}""".stripMargin) } catch { case e: Throwable => ConsoleOut.error(s"${Messages("cli.error.unknown")}:${e.getMessage}") throw e } } }
Example 114
Source File: ConsoleVideoDisplay.scala From jvm-toxcore-c with GNU General Public License v3.0 | 5 votes |
package im.tox.tox4j.av.callbacks.video import java.io.PrintStream import im.tox.tox4j.av.data.{ Height, Width } import scala.util.{ Success, Try } final case class ConsoleVideoDisplay(width: Width, height: Height) extends VideoDisplay[Seq[String], PrintStream] { override protected def canvas: Try[PrintStream] = Success(System.out) override protected def displaySent(canvas: PrintStream, frameNumber: Int, senderImage: Seq[String]): Unit = { // Don't display the sent image in text mode. } override protected def displayReceived(canvas: PrintStream, frameNumber: Int, receiverImage: Seq[String]): Unit = { canvas.print("\u001b[H\u001b[2J") receiverImage.foreach(canvas.println) } override protected def parse( y: Array[Byte], u: Array[Byte], v: Array[Byte], yStride: Int, uStride: Int, vStride: Int ): Seq[String] = { val printable = ".-~:;/<>=()ot%!?@&O8SX$#" for (yPos <- 0 until height.value) yield { new String(y.slice(yPos * yStride, yPos * yStride + width.value).map { case b => printable(((b & 0xff) / 255.0 * (printable.length - 1)).toInt) }) } } override def close(): Unit = () }
Example 115
Source File: License.scala From iep-apps with Apache License 2.0 | 5 votes |
import java.io.File import java.io.PrintStream import java.time.ZonedDateTime import java.time.ZoneOffset import scala.io.Source import sbt._ """.stripMargin.trim def findFiles(dir: File): Seq[File] = { (dir ** "*.scala").get ++ (dir ** "*.java").get } def checkLicenseHeaders(log: Logger, srcDir: File): Unit = { val badFiles = findFiles(srcDir).filterNot(checkLicenseHeader) if (badFiles.nonEmpty) { badFiles.foreach { f => log.error(s"bad license header: $f") } sys.error(s"${badFiles.size} files with incorrect header, run formatLicenseHeaders to fix") } else { log.info("all files have correct license header") } } def checkLicenseHeader(file: File): Boolean = { val lines = Source.fromFile(file, "UTF-8").getLines().toList checkLicenseHeader(lines) } def checkLicenseHeader(lines: List[String]): Boolean = { val header = lines.takeWhile(!_.startsWith("package ")).mkString(lineSeparator) header == apache2 } def formatLicenseHeaders(log: Logger, srcDir: File): Unit = { findFiles(srcDir).foreach { f => formatLicenseHeader(log, f) } } def formatLicenseHeader(log: Logger, file: File): Unit = { val lines = Source.fromFile(file, "UTF-8").getLines().toList if (!checkLicenseHeader(lines)) { log.info(s"fixing license header: $file") writeLines(file, apache2 :: removeExistingHeader(lines)) } } def removeExistingHeader(lines: List[String]): List[String] = { val res = lines.dropWhile(!_.startsWith("package ")) if (res.isEmpty) lines else res } def writeLines(file: File, lines: List[String]): Unit = { val out = new PrintStream(file) try lines.foreach(out.println) finally out.close() } }
Example 116
Source File: SchrodingerException.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.ex import java.io.{PrintWriter, PrintStream} import com.eharmony.aloha.AlohaException override def fillInStackTrace() = this override def getCause() = throw this override def getLocalizedMessage() = throw this override def getMessage() = throw this override def getStackTrace() = throw this override def initCause(cause: Throwable) = throw this override def printStackTrace() = throw this override def printStackTrace(s: PrintStream) = throw this override def printStackTrace(s: PrintWriter) = throw this override def setStackTrace(stackTrace: Array[StackTraceElement]) = throw this override def toString() = throw this def safeToString() = { val m = Option(message) getOrElse "" s"SchrodingerException($m)" } } object SchrodingerException { val Instance = new SchrodingerException }
Example 117
Source File: SchrodingerExceptionTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.ex import org.junit.{Before, Test} import org.junit.Assert._ import java.io.{PrintWriter, OutputStreamWriter, ByteArrayOutputStream, PrintStream} class SchrodingerExceptionTest { private[this] var ex: SchrodingerException = _ @Before def before() { ex = new SchrodingerException } @Test def testFillInStackTrace() { assertTrue(new SchrodingerException().fillInStackTrace().isInstanceOf[SchrodingerException]) } @Test(expected = classOf[SchrodingerException]) def testGetMessage() { ex.getMessage() } @Test(expected = classOf[SchrodingerException]) def testGetStackTrace() { ex.getStackTrace() } @Test(expected = classOf[SchrodingerException]) def testGetCause() { ex.getCause() } @Test(expected = classOf[SchrodingerException]) def testSetStackTrace() { ex.setStackTrace(Array.empty) } @Test(expected = classOf[SchrodingerException]) def testGetLocalizedMessage() { ex.getLocalizedMessage() } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceEmpty() { ex.printStackTrace() } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceStream() { val baos = new ByteArrayOutputStream() val ps = new PrintStream(baos) ex.printStackTrace(ps) } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceWriter() { val baos = new ByteArrayOutputStream() val osw = new OutputStreamWriter(baos) val ps = new PrintWriter(osw) ex.printStackTrace(ps) } @Test(expected = classOf[SchrodingerException]) def testInitCause() { ex.initCause(new Throwable) } @Test(expected = classOf[SchrodingerException]) def testToString() { ex.toString() } @Test def testNoThrowForSchrodingerExceptionWithSchrodingerExceptionCause() { new SchrodingerException(new SchrodingerException) } @Test def testNoThrowForSchrodingerExceptionWithExceptionCause() { new SchrodingerException(new Exception) } @Test(expected = classOf[SchrodingerException]) def testThrowForThrowableWithSchrodingerExceptionCause() { new Throwable(ex) } @Test(expected = classOf[SchrodingerException]) def testThrowForExceptionWithSchrodingerExceptionCause() { new Exception(ex) } @Test(expected = classOf[SchrodingerException]) def testThrowForRuntimeExceptionWithSchrodingerExceptionCause() { new RuntimeException(ex) } }
Example 118
Source File: ActionLogging.scala From flamy with Apache License 2.0 | 5 votes |
package com.flaminem.flamy.exec.utils import java.io.{File, PrintStream} import com.flaminem.flamy.conf.FlamyGlobalContext import com.flaminem.flamy.exec.utils.Workflow.Status import com.flaminem.flamy.utils.io.ConcurrentFilePrintStream import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem trait ActionLogging[A <: Action] extends ParallelActionRunner[A]{ private val LOG_PATH: String = "/../logs.csv" private val log: PrintStream = new ConcurrentFilePrintStream(new File(FlamyGlobalContext.getUniqueRunDir + LOG_PATH), FileSystem.get(new Configuration), true) private def log(v: Action, running: Status) { val row = Seq(System.currentTimeMillis, context.getProject, context.getEnvironment, context.dryRun, v.name, running) log.println(row.mkString("\t")) } override protected val workflow = new Workflow[A] with WorkflowLogging trait WorkflowLogging extends Workflow[A] { override def running(v: A) { log(v, Status.RUNNING) super.running(v) } override def successful(v: A) { log(v, Status.SUCCESSFUL) super.successful(v) } override def failed(v: A) { log(v, Status.FAILED) super.failed(v) } } }
Example 119
Source File: DataWeaveCLITest.scala From data-weave-native with Apache License 2.0 | 5 votes |
package org.mule.weave.dwnative.cli import java.io.ByteArrayInputStream import java.io.ByteArrayOutputStream import java.io.PrintStream import org.scalatest.FreeSpec import org.scalatest.Matchers import scala.io.Source class DataWeaveCLITest extends FreeSpec with Matchers { "should work with output application/json" in { val out = System.out try { val stream = new ByteArrayOutputStream() System.setOut(new PrintStream(stream, true)) new DataWeaveCLIRunner().run(Array("output application/json --- (1 to 3)[0]")) val source = Source.fromBytes(stream.toByteArray, "UTF-8") val result = source.mkString result.trim shouldBe "1" } finally { System.setOut(out) println("Finish OK 3") } } "should work with simple script and not output" in { val defaultOut = System.out try { val stream = new ByteArrayOutputStream() System.setOut(new PrintStream(stream, true)) new DataWeaveCLIRunner().run(Array("(1 to 3)[0]")) val source = Source.fromBytes(stream.toByteArray, "UTF-8") val result = source.mkString result.trim shouldBe "1" } finally { System.setOut(defaultOut) } } "should work ok when sending payload from stdin" in { val out = System.out val in = System.in try { val input = """[ | 1, | 2, | 3 |] """.stripMargin.trim val stream = new ByteArrayOutputStream() System.setOut(new PrintStream(stream, true)) System.setIn(new ByteArrayInputStream(input.getBytes("UTF-8"))) new DataWeaveCLIRunner().run(Array("payload[0]")) val source = Source.fromBytes(stream.toByteArray, "UTF-8") val result = source.mkString.trim source.close() result.trim shouldBe "1" } finally { System.setOut(out) System.setIn(in) println("Finish OK 2") } } "should work with light formats" in { val out = System.out val in = System.in try { val input = """[{ | "a" : 1, | "b" : 2, | "c" : 3 |}] """.stripMargin.trim val stream = new ByteArrayOutputStream() System.setOut(new PrintStream(stream, true)) System.setIn(new ByteArrayInputStream(input.getBytes("UTF-8"))) new DataWeaveCLIRunner().run(Array("input payload json output csv header=false ---payload")) val source = Source.fromBytes(stream.toByteArray, "UTF-8") val result = source.mkString.trim source.close() result.trim shouldBe "1,2,3" } finally { System.setOut(out) System.setIn(in) println("Finish OK 2") } } }
Example 120
Source File: MarathonArgs.scala From reactive-cli with Apache License 2.0 | 5 votes |
package com.lightbend.rp.reactivecli.argparse.marathon import com.lightbend.rp.reactivecli.argparse.{ GenerateDeploymentArgs, InputArgs, TargetRuntimeArgs } import com.lightbend.rp.reactivecli.json.JsonTransformExpression import java.io.PrintStream import scala.collection.immutable.Seq object MarathonArgs { object Output { case class MarathonArgs( instances: Int = 1, marathonLbHaproxyGroup: String = "external", marathonLbHaproxyHosts: Seq[String] = Seq.empty, namespace: Option[String] = None, output: MarathonArgs.Output = MarathonArgs.Output.PipeToStream(System.out), registryForcePull: Boolean = false, transformOutput: Option[JsonTransformExpression] = None) extends TargetRuntimeArgs
Example 121
Source File: KubernetesArgs.scala From reactive-cli with Apache License 2.0 | 5 votes |
package com.lightbend.rp.reactivecli.argparse.kubernetes import com.lightbend.rp.reactivecli.argparse.{ GenerateDeploymentArgs, InputArgs, TargetRuntimeArgs } import com.lightbend.rp.reactivecli.json.JsonTransformExpression import com.lightbend.rp.reactivecli.process.kubectl import com.lightbend.rp.reactivecli.runtime.kubernetes.PodTemplate import java.io.PrintStream import scala.concurrent.Future object KubernetesArgs { object Output { case class KubernetesArgs( generateIngress: Boolean = false, generateNamespaces: Boolean = false, generatePodControllers: Boolean = false, generateServices: Boolean = false, transformIngress: Option[JsonTransformExpression] = None, transformNamespaces: Option[JsonTransformExpression] = None, transformPodControllers: Option[JsonTransformExpression] = None, transformServices: Option[JsonTransformExpression] = None, namespace: Option[String] = None, podControllerArgs: PodControllerArgs = PodControllerArgs(), serviceArgs: ServiceArgs = ServiceArgs(), ingressArgs: IngressArgs = IngressArgs(), output: KubernetesArgs.Output = KubernetesArgs.Output.PipeToStream(System.out)) extends TargetRuntimeArgs