java.io.BufferedReader Scala Examples

The following examples show how to use java.io.BufferedReader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CloudantReceiver.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
package org.apache.bahir.cloudant

import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeUnit

import okhttp3._

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver

import org.apache.bahir.cloudant.common._

class CloudantReceiver(sparkConf: SparkConf, cloudantParams: Map[String, String])
    extends Receiver[String](StorageLevel.MEMORY_AND_DISK) {
  // CloudantChangesConfig requires `_changes` endpoint option
  lazy val config: CloudantChangesConfig = {
    JsonStoreConfigManager.getConfig(sparkConf, cloudantParams
      + ("cloudant.endpoint" -> JsonStoreConfigManager.CHANGES_INDEX)
    ).asInstanceOf[CloudantChangesConfig]
  }

  def onStart() {
    // Start the thread that receives data over a connection
    new Thread("Cloudant Receiver") {
      override def run() { receive() }
    }.start()
  }

  private def receive(): Unit = {
    val okHttpClient: OkHttpClient = new OkHttpClient.Builder()
      .connectTimeout(5, TimeUnit.SECONDS)
      .readTimeout(60, TimeUnit.SECONDS)
      .build
    val url = config.getChangesReceiverUrl.toString

    val builder = new Request.Builder().url(url)
    if (config.username != null) {
      val credential = Credentials.basic(config.username, config.password)
      builder.header("Authorization", credential)
    }
    if(config.getSelector != null) {
      val jsonType = MediaType.parse("application/json; charset=utf-8")
      val selector = "{\"selector\":" + config.getSelector + "}"
      val selectorBody = RequestBody.create(jsonType, selector)
      builder.post(selectorBody)
    }

    val request = builder.build
    val response = okHttpClient.newCall(request).execute
    val status_code = response.code

    if (status_code == 200) {
      val changesInputStream = response.body.byteStream
      var json = new ChangesRow()
      if (changesInputStream != null) {
        val bufferedReader = new BufferedReader(new InputStreamReader(changesInputStream))
        while ((json = ChangesRowScanner.readRowFromReader(bufferedReader)) != null) {
          if (!isStopped() && json != null && !json.getDoc.has("_deleted")) {
            store(json.getDoc.toString)
          }
        }
      }
    } else {
      val errorMsg = "Error retrieving _changes feed " + config.getDbname + ": " + status_code
      reportError(errorMsg, new CloudantException(errorMsg))
    }
  }

  def onStop(): Unit = {
  }
} 
Example 2
Source File: Repl.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.repl

import java.io.BufferedReader

import javax.script.ScriptException

import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal
import scala.util.{Failure, Success}

class Repl(reader: BufferedReader) {
  private val engine = new Engine
  private val heldLines = ListBuffer[String]()

  def run(): Unit = {
    loopAround()
  }

  private def loopAround(): Unit = {
    var done = false

    while (!done) {
      showPrompt
      val line = reader.readLine()
      if (line == ":quit") {
        done = true
      } else {
        evaluateLine(line)
      }
    }
  }

  private def evaluateLine(line: String): Unit = {
    val script = (heldLines :+ line).mkString("\n")
    engine.evaluate(script) match {
      case Success(result) =>
        heldLines.clear()
        println(result)

      case Failure(s: ScriptException) if continuationRequired(s) =>
        heldLines += line

      case Failure(NonFatal(t)) =>
        t.printStackTrace(System.out)
        println("")
    }
  }

  private def continuationRequired(s: ScriptException): Boolean = {
    val msg = s.getMessage
    // E.g.: javax.script.ScriptException: <eval>:1:12 Expected } but found eof
    msg.contains("but found eof")
  }

  private def showPrompt = {
    val prompt = if (heldLines.isEmpty) "> " else "| "
    print(prompt)
    System.out.flush()
  }
} 
Example 3
Source File: ScriptIdentificationInRunningAppTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, InputStreamReader}

import com.programmaticallyspeaking.ncd.testing.StringUtils
import jdk.nashorn.api.scripting.NashornScriptEngineFactory

import scala.concurrent.duration._


class ScriptIdentificationInRunningAppTest extends ScriptAddedTestFixture {

  "Recompilation of eval script should be merged with the original - regardless of observed order" in {
    val script = "'dummy';"

    whenReady(testAddScriptWithWait(script, 500.millis)) { _ =>
      getHost.scripts.filterNot(_.contents.contains("dummy")).map(_.url).distinct should have size 1
    }
  }

  override val scriptExecutor = RunningApp
}

object RunningApp extends App with ScriptExecutorBase {
  println("RunningApp starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine("--no-syntax-extensions")
  val reader = new BufferedReader(new InputStreamReader(System.in))

  val script1 =
    """function fun() {
      |  return '42';
      |}
      |var i = 0;
      |while (i++ < 10) {
      |  fun();
      |}
    """.stripMargin
  scriptEngine.eval(script1)

  // Stuff required by the test infra
  println(Signals.ready)
  waitForSignal(Signals.go)
  val script = StringUtils.fromBase64(readStdin())
  scriptEngine.eval(script)
  println(Signals.scriptDone)
} 
Example 4
Source File: ScriptExecutor.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, InputStreamReader}

import com.programmaticallyspeaking.ncd.testing.StringUtils
import jdk.nashorn.api.scripting.NashornScriptEngineFactory

import scala.util.control.NonFatal

abstract class ScriptExecutorC extends App with ScriptExecutorBase {
  println("ScriptExecutor starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine(nashornArgs: _*)
  val reader = new BufferedReader(new InputStreamReader(System.in))
  println(Signals.ready)
  waitForSignal(Signals.go)
  println("Got the go signal!")

  scriptEngine.eval(
    """this.createInstance = function (typeName) {
      |  var Type = Java.type(typeName);
      |  if (!Type) throw new Error("No such type: " + typeName);
      |  return new Type();
      |};
    """.stripMargin)

  while (true) {
    println("Awaiting script on stdin...")
    val script = StringUtils.fromBase64(readStdin())
    println("Got script: " + script)
    try {
      scriptEngine.eval(script)
      println("Script evaluation completed without errors")
    } catch {
      case NonFatal(t) =>
        t.printStackTrace(System.err)
    }
    println(Signals.scriptDone)
  }

  protected def nashornArgs: Array[String]
}

object ScriptExecutorNoJava extends ScriptExecutorC {
  override protected def nashornArgs = Array("--no-syntax-extensions", "--no-java")
}

object ScriptExecutor extends ScriptExecutorC {
  override protected def nashornArgs = Array("--no-syntax-extensions")
} 
Example 5
Source File: MultiThreadingTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn
import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeoutException
import java.util.concurrent.atomic.AtomicInteger

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.{SharedInstanceActorTesting, UnitTest}
import jdk.nashorn.api.scripting.NashornScriptEngineFactory
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.exceptions.TestFailedException
import org.slf4s.Logging

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future, Promise}

trait MultiThreadingTestFixture extends UnitTest with Logging with SharedInstanceActorTesting with VirtualMachineLauncher with ScalaFutures with FairAmountOfPatience with Eventually {
  override val scriptExecutor: ScriptExecutorBase = MultiThreadedScriptExecutor
  override implicit val executionContext: ExecutionContext = ExecutionContext.global
}

class MultiThreadingTest extends MultiThreadingTestFixture {
  def location(ln: Int) = ScriptLocation(ln, None)

  "Breakpoint requests from other threads should be ignore in a paused state" in {
    val scriptAddedPromise = Promise[Script]()
    val hitBreakpointPromise = Promise[String]()
    val breakpointCounter = new AtomicInteger()
    val host = getHost
    observeScriptEvents(new Observer[ScriptEvent] {

      override def onNext(item: ScriptEvent): Unit = item match {
        case ScriptAdded(script) =>
          scriptAddedPromise.success(script)
        case hb: HitBreakpoint =>
          breakpointCounter.incrementAndGet()
          hitBreakpointPromise.trySuccess("")
        case _ =>
      }

      override def onError(error: Throwable): Unit = {}

      override def onComplete(): Unit = {}
    })

    whenReady(scriptAddedPromise.future) { script =>
      val scriptLocation = eventually {
        host.getBreakpointLocations(ScriptIdentity.fromId(script.id), location(1), None).headOption.getOrElse(fail(s"No line numbers for script ${script.id}"))
      }
      host.setBreakpoint(ScriptIdentity.fromURL(script.url), scriptLocation, BreakpointOptions.empty)

      try {
        whenReady(hitBreakpointPromise.future) { _ =>
          // Ugly, but wait for a while to see if the counter increases over 1 (which it shouldn't).
          Thread.sleep(200)
          breakpointCounter.get() should be(1)
        }
      } catch {
        case t: TestFailedException if t.getMessage().contains("timeout") =>
          val progress = summarizeProgress()
          throw new TimeoutException("Timed out: " + progress)
      }
    }
  }
}

object MultiThreadedScriptExecutor extends App with ScriptExecutorBase {
  println("MultiThreadedScriptExecutor starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine("--no-syntax-extensions")
  val reader = new BufferedReader(new InputStreamReader(System.in))
  println(Signals.ready)
  waitForSignal(Signals.go)

  // Used a compiled script here before, stopped working with JDK 10
  var src =
    """(function () {
      |  return Math.floor(5.5);
      |})();
    """.stripMargin

  implicit val ec = ExecutionContext.global

  val futures = (1 to 5).map { _ =>
    Future {
      while (true) {
        scriptEngine.eval(src)
      }
    }
  }

  Await.result(Future.sequence(futures), 30.seconds)
} 
Example 6
Source File: StreamReadingThread.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.{BufferedReader, IOException, InputStream, InputStreamReader}

import scala.util.control.NonFatal

class StreamReadingThread(in: InputStream, appender: (String) => Unit) extends Thread {
  override def run(): Unit = {
    try {
      val reader = new BufferedReader(new InputStreamReader(in))
      var str = ""
      while (str != null) {
        str = reader.readLine()
        Option(str).foreach(appender)
      }
    } catch {
      case _: InterruptedException =>
        // ok
      case ex: IOException if isStreamClosed(ex) =>
        // ok
      case NonFatal(t) =>
        t.printStackTrace(System.err)
    }
  }

  private def isStreamClosed(ex: IOException) = ex.getMessage.toLowerCase == "stream closed"
} 
Example 7
Source File: ScriptExecutorBase.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn

import java.io.BufferedReader

trait ScriptExecutorBase {
  val reader: BufferedReader

  protected def readStdin(): String = reader.readLine()
  protected def waitForSignal(expected: String): Unit = {
    println(s"Awaiting '$expected' signal")
    val signal = readStdin()
    if (signal != expected) {
      println(s"Didn't get '$expected' signal, got: " + signal)
      System.exit(1)
    }
  }
} 
Example 8
Source File: ScannerBenchmark.scala    From better-files   with MIT License 5 votes vote down vote up
package better.files.benchmarks

import better.files._

import java.io.{BufferedReader, StringReader}

class ScannerBenchmark extends Benchmark {
  val file = File.newTemporaryFile()
  val n    = 1000
  repeat(n) {
    file
      .appendLine(-n to n mkString " ")
      .appendLine("hello " * n)
      .appendLine("world " * n)
  }
  val scanners: Seq[BufferedReader => AbstractScanner] = Seq(
    new JavaScanner(_),
    new StringBuilderScanner(_),
    new CharBufferScanner(_),
    new StreamingScanner(_),
    new IterableScanner(_),
    new IteratorScanner(_),
    new BetterFilesScanner(_),
    new ArrayBufferScanner(_),
    new FastJavaIOScanner2(_),
    new FastJavaIOScanner(_)
  )

  def runTest(scanner: AbstractScanner) = {
    val (_, time) = profile(run(scanner))
    scanner.close()
    info(f"${scanner.getClass.getSimpleName.padTo(25, ' ')}: $time%4d ms")
  }

  def run(scanner: AbstractScanner): Unit =
    repeat(n) {
      assert(scanner.hasNext)
      val ints  = List.fill(2 * n + 1)(scanner.nextInt())
      val line  = "" //scanner.nextLine()
      val words = IndexedSeq.fill(2 * n)(scanner.next())
      (line, ints, words)
    }

  test("scanner") {
    info("Warming up ...")
    scanners foreach { scannerBuilder =>
      val canaryData =
        """
          |10 -23
          |Hello World
          |Hello World
          |19
        """.stripMargin
      val scanner = scannerBuilder(new BufferedReader(new StringReader(canaryData)))
      info(s"Testing ${scanner.getClass.getSimpleName} for correctness")
      assert(scanner.hasNext)
      assert(scanner.nextInt() == 10)
      assert(scanner.nextInt() == -23)
      assert(scanner.next() == "Hello")
      assert(scanner.next() == "World")
      val l = scanner.nextLine()
      assert(l == "Hello World", l)
      assert(scanner.nextInt() == 19)
    //assert(!scanner.hasNext)
    }

    info("Running benchmark ...")
    scanners foreach { scanner =>
      runTest(scanner(file.newBufferedReader))
    }
  }
} 
Example 9
Source File: HadoopFileSystemLogStore.scala    From delta   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.delta.storage

import java.io.{BufferedReader, FileNotFoundException, InputStreamReader}
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.FileAlreadyExistsException
import java.util.UUID

import scala.collection.JavaConverters._

import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession


  protected def writeWithRename(
      path: Path, actions: Iterator[String], overwrite: Boolean = false): Unit = {
    val fs = path.getFileSystem(getHadoopConfiguration)

    if (!fs.exists(path.getParent)) {
      throw new FileNotFoundException(s"No such file or directory: ${path.getParent}")
    }
    if (overwrite) {
      val stream = fs.create(path, true)
      try {
        actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write)
      } finally {
        stream.close()
      }
    } else {
      if (fs.exists(path)) {
        throw new FileAlreadyExistsException(path.toString)
      }
      val tempPath = createTempPath(path)
      var streamClosed = false // This flag is to avoid double close
      var renameDone = false // This flag is to save the delete operation in most of cases.
      val stream = fs.create(tempPath)
      try {
        actions.map(_ + "\n").map(_.getBytes(UTF_8)).foreach(stream.write)
        stream.close()
        streamClosed = true
        try {
          if (fs.rename(tempPath, path)) {
            renameDone = true
          } else {
            if (fs.exists(path)) {
              throw new FileAlreadyExistsException(path.toString)
            } else {
              throw new IllegalStateException(s"Cannot rename $tempPath to $path")
            }
          }
        } catch {
          case _: org.apache.hadoop.fs.FileAlreadyExistsException =>
            throw new FileAlreadyExistsException(path.toString)
        }
      } finally {
        if (!streamClosed) {
          stream.close()
        }
        if (!renameDone) {
          fs.delete(tempPath, false)
        }
      }
    }
  }

  protected def createTempPath(path: Path): Path = {
    new Path(path.getParent, s".${path.getName}.${UUID.randomUUID}.tmp")
  }

  override def invalidateCache(): Unit = {}
} 
Example 10
Source File: Version.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.cli.publish.version

import java.io.{BufferedReader, File, InputStreamReader}

import caseapp._

object Version extends CaseApp[Options] {

  private def output(cmd: Seq[String], dir: File): String = {
    // adapted from https://stackoverflow.com/a/16714180/3714539
    val b = new ProcessBuilder(cmd: _*).directory(dir)
    val p = b.start()
    val reader = new BufferedReader(new InputStreamReader(p.getInputStream))
    val builder = new StringBuilder
    var line: String = null
    while ({
      line = reader.readLine()
      line != null
    }) {
      builder.append(line)
      builder.append(System.getProperty("line.separator"))
    }
    val retCode = p.waitFor()
    if (retCode == 0)
      builder.toString
    else
      throw new Exception(s"Command ${cmd.mkString(" ")} exited with code $retCode")
  }

  def version(dir: File): Either[String, String] = {

    val tag = output(Seq("git", "describe", "--tags", "--match", "v[0-9]*", "--abbrev=0"), dir).trim

    if (tag.isEmpty)
      Left("No git tag like v[0-9]* found")
    else {
      val dist = output(Seq("git", "rev-list", "--count", s"$tag...HEAD"), dir).trim.toInt // can throw…

      if (dist == 0)
        Right(tag)
      else {

        val previousVersion = tag.stripPrefix("v")

        // Tweak coursier.core.Version.Tokenizer to help here?

        val versionOpt =
          if (previousVersion.forall(c => c == '.' || c.isDigit)) {
            val l = previousVersion.split('.')
            Some((l.init :+ (l.last.toInt + 1).toString).mkString(".") + "-SNAPSHOT")
          } else {
            val idx = previousVersion.indexOf("-M")
            if (idx < 0)
              None
            else {
              Some(previousVersion.take(idx) + "-SNAPSHOT")
            }
          }

        versionOpt.toRight {
          s"Don't know how to handle version $previousVersion"
        }
      }
    }
  }

  def run(options: Options, remainingArgs: RemainingArgs): Unit = {

    val dir = remainingArgs.all match {
      case Seq() => new File(".")
      case Seq(path) => new File(path)
      case other =>
        Console.err.println(s"Too many arguments specified: ${other.mkString(" ")}\nExpected 0 or 1 argument.")
        sys.exit(1)
    }

    version(dir) match {
      case Left(msg) =>
        Console.err.println(msg)
        sys.exit(1)
      case Right(v) =>

        if (options.isSnapshot) {
          val retCode =
            if (v.endsWith("-SNAPSHOT"))
              0
            else
              1
          if (!options.quiet)
            Console.err.println(v)
          sys.exit(retCode)
        } else
          println(v)
    }
  }
} 
Example 11
Source File: IoTest.scala    From fgbio   with MIT License 5 votes vote down vote up
package com.fulcrumgenomics.util

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.fulcrumgenomics.testing.UnitSpec
import htsjdk.samtools.util.BlockCompressedInputStream


class IoTest extends UnitSpec {

  Seq(".bgz", ".bgzip").foreach { ext =>
    it should s"round trip data to a bgzipped file with extension ${ext}" in {
      val text = "This is a stupid little text fragment for compression. Yay compression!"
      val data = Seq.fill(10)(text)
      val f = makeTempFile("test.", ext)
      Io.writeLines(f, data)

      val stream = new BufferedInputStream(new FileInputStream(f.toFile))
      BlockCompressedInputStream.isValidFile(stream) shouldBe true
      val reread = Io.readLines(f).toIndexedSeq

      reread shouldBe data
    }
  }
} 
Example 12
Source File: BenchmarkSubmitter.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jnsaf.submitter

import java.io.{BufferedReader, FileReader}

import org.argus.amandroid.plugin.TaintAnalysisApproach
import org.argus.jawa.core.util._

object BenchmarkSubmitter {
  private def readExpected(expectedPath: String): IMap[String, Int] = {
    val result: MMap[String, Int] = mmapEmpty
    val uri = FileUtil.toUri(expectedPath)
    val file = FileUtil.toFile(uri)
    val rdr: BufferedReader = new BufferedReader(new FileReader(file))
    var line = rdr.readLine()
    while(line != null){
      val entry = line.split("\\s+")
      result(entry(0)) = entry(1).toInt
      line = rdr.readLine()
    }
    rdr.close()
    result.toMap
  }
  def apply(sourcePath: String, address: String, port: Int, expectedFile: String, approach: TaintAnalysisApproach.Value): Unit = {
    val expected = readExpected(expectedFile)
    val analysisResult = ApkSubmitter(sourcePath, address, port, approach)
    case class Compare(var expected: Int = 0, var result: Int = 0)
    val compare: MMap[String, Compare] = mmapEmpty
    expected.foreach { case (name, num) =>
      compare(name) = Compare(expected = num)
    }
    analysisResult.foreach { case (name, res) =>
      var num = 0
      res match {
        case Some(r) =>
          num = r.paths.size
        case None =>
      }
      compare.getOrElseUpdate(name, Compare()).result = num
    }
    println("Taint analysis result:")
    println("Expected\tResult\t\tApk")
    compare.toList.sortBy(_._1).foreach { case (name, comp) =>
      println(s"${comp.expected}\t\t${comp.result}\t\t$name")
    }
  }
} 
Example 13
Source File: ChangesReceiver.scala    From bahir   with Apache License 2.0 5 votes vote down vote up
package org.apache.bahir.cloudant.internal

import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeUnit

import com.google.gson.JsonParser
import okhttp3._

import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver

import org.apache.bahir.cloudant.CloudantChangesConfig
import org.apache.bahir.cloudant.common._

class ChangesReceiver(config: CloudantChangesConfig)
  extends Receiver[String](StorageLevel.MEMORY_AND_DISK) {

  def onStart() {
    // Start the thread that receives data over a connection
    new Thread("Cloudant Receiver") {
      override def run() { receive() }
    }.start()
  }

  private def receive(): Unit = {
    val okHttpClient: OkHttpClient = new OkHttpClient.Builder()
        .connectTimeout(5, TimeUnit.SECONDS)
        .readTimeout(60, TimeUnit.SECONDS)
        .build
    val url = config.getChangesReceiverUrl.toString

    val builder = new Request.Builder().url(url)
    if (config.username != null) {
      val credential = Credentials.basic(config.username, config.password)
      builder.header("Authorization", credential)
    }
    if(config.getSelector != null) {
      val jsonType = MediaType.parse("application/json; charset=utf-8")
      val selector = "{\"selector\":" + config.getSelector + "}"
      val selectorBody = RequestBody.create(jsonType, selector)
      builder.post(selectorBody)
    }

    val request = builder.build
    val response = okHttpClient.newCall(request).execute
    val status_code = response.code

    if (status_code == 200) {
      val changesInputStream = response.body.byteStream
      var json = new ChangesRow()
      if (changesInputStream != null) {
        val bufferedReader = new BufferedReader(new InputStreamReader(changesInputStream))
        while ((json = ChangesRowScanner.readRowFromReader(bufferedReader)) != null) {
          if (!isStopped() && json != null && !json.getDoc.has("_deleted")) {
            store(json.getDoc.toString)
          }
        }
      }
    } else {
      val responseAsJson = new JsonParser().parse(response.body.string)
      val errorMsg = "Error retrieving _changes feed data from database " + "'" +
        config.getDbname + "' with response code " + status_code + ": " + responseAsJson.toString
      reportError(errorMsg, new CloudantException(errorMsg))
      CloudantChangesConfig.receiverErrorMsg = errorMsg
    }
  }

  override def onStop(): Unit = {
  }
} 
Example 14
Source File: Util.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.verifier

import java.io.{BufferedReader, InputStreamReader}

package object util {
  
  def runCommand(cmd: String, args: scala.Seq[String],
                 logger: String => Unit = _ => ()): StdOutErrAndTime = {
    logger(s"""Executing: ${cmd} ${args.mkString(" ")}""")
    val pbargs: java.util.List[String] = {
      val lst = new java.util.ArrayList[String]()
      List((cmd +: args):_*).foreach { lst.add(_) }
      lst
    }
    val builder = new ProcessBuilder(pbargs)
    val startTime: Long = System.nanoTime()
    val p = builder.start()

    val outReader = new BufferedReader(new InputStreamReader(p.getInputStream))
    val outStr = {
      Iterator.continually(outReader.readLine()).takeWhile(_ != null).mkString("\n")
    }

    val errReader = new BufferedReader(new InputStreamReader(p.getErrorStream))
    val errStr = {
      Iterator.continually(errReader.readLine()).takeWhile(_ != null).mkString("\n")
    }

    val r = p.waitFor()
    val endTime: Long = System.nanoTime()

    if (r != 0) {
      throw new RuntimeException(
        s"""Command failed with code ${r}: ${cmd} ${args.mkString(" ")}"""
          + s"\nStandard error:\n" + errStr)
    }
    StdOutErrAndTime(outStr, errStr, endTime - startTime)
  }
} 
Example 15
Source File: IO.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package testutil

import java.io.{BufferedReader, File, FileWriter, InputStreamReader}

import scala.annotation.tailrec

object IO {
  def readTestResource(path: String): String = {
    val reader = new BufferedReader(new InputStreamReader(getClass.getResourceAsStream(path)))
    val buf = new Array[Char](2048)
    @tailrec def loop(sb: JStringBuilder): String = {
      reader.read(buf) match {
        case -1 => sb.toString
        case count => loop(sb.append(buf, 0, count))
      }
    }
    try loop(new JStringBuilder) finally reader.close()
  }

  def writeTestResource(path: String, data: String): Unit = {
    // assuming working dir used by intellij
    val writer = new FileWriter(s"src/test/resources$path".replaceAllLiterally("/", File.separator))
    try writer.write(data) finally writer.close()
  }
} 
Example 16
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 17
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 18
Source File: FileReader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.proxy

import java.io.{BufferedReader, InputStreamReader}
import scala.collection.JavaConverters._

trait FileReader {
  def readFileContents(): String
}

class FileReaderReal(filename: String) extends FileReader {
  val contents = {
    val stream = this.getClass.getResourceAsStream(filename) 
    val reader = new BufferedReader(
      new InputStreamReader(
        stream
      )
    )
    try {
      reader.lines().iterator().asScala.mkString(System.getProperty("line.separator"))
    } finally {
      reader.close()
      stream.close()
    }
  }
  
  System.out.println(s"Finished reading the actual file: $filename")
  
  override def readFileContents(): String = contents
}

class FileReaderProxy(filename: String) extends FileReader {
  private var fileReader: FileReaderReal = null
  
  override def readFileContents(): String = {
    if (fileReader == null) {
      fileReader = new FileReaderReal(filename)
    }
    fileReader.readFileContents()
  }
}

object ProxyExample {
  def main(args: Array[String]): Unit = {
    val fileMap = Map(
      "file1.txt" -> new FileReaderProxy("file1.txt"),
      "file2.txt" -> new FileReaderProxy("file2.txt"),
      "file3.txt" -> new FileReaderProxy("file3.txt"),
      "file4.txt" -> new FileReaderReal("file1.txt")
    )
    System.out.println("Created the map. You should have seen file1.txt read because it wasn't used in a proxy.")
    System.out.println(s"Reading file1.txt from the proxy: ${fileMap("file1.txt").readFileContents()}")
    System.out.println(s"Reading file3.txt from the proxy: ${fileMap("file3.txt").readFileContents()}")
  }
} 
Example 19
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 20
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 21
Source File: FileReader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.proxy

import java.io.{BufferedReader, InputStreamReader}
import scala.collection.JavaConverters._

trait FileReader {
  def readFileContents(): String
}

class FileReaderReal(filename: String) extends FileReader {
  val contents = {
    val stream = this.getClass.getResourceAsStream(filename) 
    val reader = new BufferedReader(
      new InputStreamReader(
        stream
      )
    )
    try {
      reader.lines().iterator().asScala.mkString(System.getProperty("line.separator"))
    } finally {
      reader.close()
      stream.close()
    }
  }
  
  System.out.println(s"Finished reading the actual file: $filename")
  
  override def readFileContents(): String = contents
}

class FileReaderProxy(filename: String) extends FileReader {
  private var fileReader: FileReaderReal = null
  
  override def readFileContents(): String = {
    if (fileReader == null) {
      fileReader = new FileReaderReal(filename)
    }
    fileReader.readFileContents()
  }
}

object ProxyExample {
  def main(args: Array[String]): Unit = {
    val fileMap = Map(
      "file1.txt" -> new FileReaderProxy("file1.txt"),
      "file2.txt" -> new FileReaderProxy("file2.txt"),
      "file3.txt" -> new FileReaderProxy("file3.txt"),
      "file4.txt" -> new FileReaderReal("file1.txt")
    )
    System.out.println("Created the map. You should have seen file1.txt read because it wasn't used in a proxy.")
    System.out.println(s"Reading file1.txt from the proxy: ${fileMap("file1.txt").readFileContents()}")
    System.out.println(s"Reading file3.txt from the proxy: ${fileMap("file3.txt").readFileContents()}")
  }
} 
Example 22
Source File: CsvParserExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.csv

import java.io.{BufferedReader, InputStreamReader}
import java.util.function.Consumer

import org.apache.commons.csv.{CSVFormat, CSVPrinter, CSVRecord}

import scala.collection.mutable.ListBuffer


object CsvParserExample {
  def main(args: Array[String]): Unit = {
    val reader = new BufferedReader(
      new InputStreamReader(
        new java.net.URL("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD").openStream()
      )
    )
    val csvParser = CSVFormat.RFC4180.withFirstRecordAsHeader().parse(reader)
    val dataConsumer = new DataConsumer
    csvParser.forEach(dataConsumer)
    val allRecords = dataConsumer.buf.toList
    allRecords.take(3).foreach(println)

    val csvPrinter = new CSVPrinter(System.out, CSVFormat.RFC4180.withHeader("fname", "lname", "age"))
    csvPrinter.printRecord("Jon", "Doe", "21")
    csvPrinter.printRecord("James", "Bond", "39")
    csvPrinter.flush()

  }

} 
Example 23
Source File: MeetupReceiver.scala    From meetup-stream   with Apache License 2.0 5 votes vote down vote up
package receiver

import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.storage.StorageLevel
import org.apache.spark.Logging
import com.ning.http.client.AsyncHttpClientConfig
import com.ning.http.client._
import scala.collection.mutable.ArrayBuffer
import java.io.OutputStream
import java.io.ByteArrayInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.io.InputStream
import java.io.PipedInputStream
import java.io.PipedOutputStream

class MeetupReceiver(url: String) extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2) with Logging {
  
  @transient var client: AsyncHttpClient = _
  
  @transient var inputPipe: PipedInputStream = _
  @transient var outputPipe: PipedOutputStream = _  
       
  def onStart() {    
    val cf = new AsyncHttpClientConfig.Builder()
    cf.setRequestTimeout(Integer.MAX_VALUE)
    cf.setReadTimeout(Integer.MAX_VALUE)
    cf.setPooledConnectionIdleTimeout(Integer.MAX_VALUE)      
    client= new AsyncHttpClient(cf.build())
    
    inputPipe = new PipedInputStream(1024 * 1024)
    outputPipe = new PipedOutputStream(inputPipe)
    val producerThread = new Thread(new DataConsumer(inputPipe))
    producerThread.start()
    
    client.prepareGet(url).execute(new AsyncHandler[Unit]{
        
      def onBodyPartReceived(bodyPart: HttpResponseBodyPart) = {
        bodyPart.writeTo(outputPipe)
        AsyncHandler.STATE.CONTINUE        
      }
      
      def onStatusReceived(status: HttpResponseStatus) = {
        AsyncHandler.STATE.CONTINUE
      }
      
      def onHeadersReceived(headers: HttpResponseHeaders) = {
        AsyncHandler.STATE.CONTINUE
      }
            
      def onCompleted = {
        println("completed")
      }
      
      
      def onThrowable(t: Throwable)={
        t.printStackTrace()
      }
        
    })    
    
    
  }

  def onStop() {
    if (Option(client).isDefined) client.close()
    if (Option(outputPipe).isDefined) {
     outputPipe.flush()
     outputPipe.close() 
    }
    if (Option(inputPipe).isDefined) {
     inputPipe.close() 
    }    
  }
  
  class DataConsumer(inputStream: InputStream) extends Runnable 
  {
       
      override
      def run()
      {        
        val bufferedReader = new BufferedReader( new InputStreamReader( inputStream ))
        var input=bufferedReader.readLine()
        while(input!=null){          
          store(input)
          input=bufferedReader.readLine()
        }            
      }  
      
  }

} 
Example 24
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 25
Source File: tty_loop.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package isabelle


import java.io.{IOException, Writer, Reader, InputStreamReader, BufferedReader}


class TTY_Loop(writer: Writer, reader: Reader,
  writer_lock: AnyRef = new Object,
  interrupt: Option[() => Unit] = None)
{
  private val console_output = Future.thread[Unit]("console_output") {
    try {
      var result = new StringBuilder(100)
      var finished = false
      while (!finished) {
        var c = -1
        var done = false
        while (!done && (result.length == 0 || reader.ready)) {
          c = reader.read
          if (c >= 0) result.append(c.asInstanceOf[Char])
          else done = true
        }
        if (result.length > 0) {
          System.out.print(result.toString)
          System.out.flush()
          result.length = 0
        }
        else {
          reader.close()
          finished = true
        }
      }
    }
    catch { case e: IOException => case Exn.Interrupt() => }
  }

  private val console_input = Future.thread[Unit]("console_input") {
    val console_reader = new BufferedReader(new InputStreamReader(System.in))
    def body
    {
      try {
        var finished = false
        while (!finished) {
          console_reader.readLine() match {
            case null =>
              writer.close()
              finished = true
            case line =>
              writer_lock.synchronized {
                writer.write(line)
                writer.write("\n")
                writer.flush()
              }
          }
        }
      }
      catch { case e: IOException => case Exn.Interrupt() => }
    }
    interrupt match {
      case None => body
      case Some(int) => POSIX_Interrupt.handler { int() } { body }
    }
  }

  def join { console_output.join; console_input.join }

  def cancel { console_input.cancel }
} 
Example 26
Source File: ObservableMain.scala    From advanced-scala-code   with Apache License 2.0 5 votes vote down vote up
object ObservableMain {

  def main(args: Array[String]): Unit = {

    import monix.reactive.Observable
    val linesO = Observable.defer {
      import java.io.{BufferedReader, FileReader}
      val br = new BufferedReader(new FileReader("license.txt"))
      Observable.fromLinesReaderUnsafe(br)
    }

    printStatistics(linesO)
    printStatistics(linesO)

    def printStatistics(linesO: Observable[String]): Unit = {
      val wordsO = linesO.flatMap { line =>
        val arr = line.split("\\W").map(_.toLowerCase)
          .map(_.trim).filter(!_.isEmpty)
        Observable.fromIterable(arr.toIterable)
      }

      val rawResultO = wordsO.foldLeft(Map.empty[String, Int]) { (acc, next) =>
        acc.get(next) match {
          case None => acc + (next -> 1)
          case Some(num) => acc + (next -> (1 + num))
        }
      }

      import monix.reactive.Consumer
      val finalResultT = rawResultO.map { map =>
        map.toList.sortWith( _._2 > _._2).take(5).map(_._1)
      }.consumeWith(Consumer.head)

      import monix.execution.Scheduler.Implicits.global
      val resultCF = finalResultT.runToFuture

      import scala.concurrent.Await
      import scala.concurrent.duration._
      val result = Await.result(resultCF, 30.seconds)
      println(result)
      // List(the, or, of, and, to)
    }

  }

  import cats.kernel.Monoid
  import monix.reactive.Observable
  def alternativeMonoid(wordsO: Observable[String]): Unit = {
    import cats.instances.int.catsKernelStdGroupForInt
    import cats.instances.map.catsKernelStdMonoidForMap

    val listT = wordsO.map(word => Map(word -> 1)).toListL
    val totals = listT.map { data =>
      Monoid[Map[String, Int]].combineAll(data)
    }
    // totalsT: Task[Map[String, Int]]

    val finalResultT = totals.map { data =>
      data.toList.sortWith( _._2 > _._2).take(5).map(_._1)
    }

    import monix.execution.Scheduler.Implicits.global
    import scala.concurrent.Await
    import scala.concurrent.duration._
    val result = Await.result(finalResultT.runToFuture, 30.seconds)
    println(result)
  }
} 
Example 27
Source File: FileIO.scala    From korolev   with Apache License 2.0 5 votes vote down vote up
package korolev.effect.io

import java.io.{BufferedReader, FileInputStream, FileOutputStream, FileReader}
import java.nio.file.Path

import korolev.effect.syntax._
import korolev.effect.{Effect, Stream}

object FileIO {

  def readBytes[F[_]: Effect](path: Path): F[LazyBytes[F]] = {
    val inputStream = new FileInputStream(path.toFile)
    LazyBytes.fromInputStream(inputStream)
  }

  def readLines[F[_]: Effect](path: Path): F[Stream[F, String]] = {
    Stream.unfoldResource[F, BufferedReader, Unit, String](
      default = (),
      create = Effect[F].delay(new BufferedReader(new FileReader(path.toFile))),
      loop = (reader, _) => Effect[F].delay {
        ((), Option(reader.readLine()))
      }
    )
  }

  
  def write[F[_]: Effect](path: Path, append: Boolean = false): Stream[F, Array[Byte]] => F[Unit] = { stream =>
    val outputStream = new FileOutputStream(path.toFile, append)
    def aux(): F[Unit] = {
      stream.pull().flatMap {
        case Some(chunk) => Effect[F]
          .delay(outputStream.write(chunk))
          .after(aux())
          .recover {
            case error =>
              outputStream.close()
              throw error
          }
        case None =>
          Effect[F].delay(outputStream.close())
      }
    }
    aux()
  }
} 
Example 28
Source File: GenerationContext.scala    From scalingua   with Apache License 2.0 5 votes vote down vote up
package ru.makkarpov.scalingua.plugin

import java.io.{BufferedReader, DataInputStream, FileInputStream, InputStreamReader}
import java.nio.charset.StandardCharsets

import ru.makkarpov.scalingua.LanguageId
import sbt._

object GenerationContext {
  val HashMarker = "## Hash: ## "
  val ScalaHashPrefix = s"// $HashMarker"
}

case class GenerationContext(pkg: String, implicitCtx: Option[String], lang: LanguageId, hasTags: Boolean,
                             src: File, target: File, log: Logger)
{
  val srcHash = src.hashString

  def mergeContext(ctx: Option[String]): Option[String] = (implicitCtx, ctx) match {
    case (None,    None)    => None
    case (Some(x), None)    => Some(x)
    case (None,    Some(y)) => Some(y)
    case (Some(x), Some(y)) => Some(x + ":" + y)
  }

  def filePrefix = "/" + pkg.replace('.', '/') + (if (pkg.nonEmpty) "/" else "")

  def checkBinaryHash: Boolean = target.exists() && {
    val storedHash = {
      val is = new DataInputStream(new FileInputStream(target))
      try is.readUTF()
      catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally is.close()
    }

    srcHash == storedHash
  }

  def checkTextHash: Boolean = target.exists() && {
    import GenerationContext.HashMarker

    val storedHash = {
      val rd = new BufferedReader(new InputStreamReader(new FileInputStream(target), StandardCharsets.UTF_8))
      try {
        val l = rd.readLine()
        if ((l ne null) && l.contains(HashMarker)) {
          val idx = l.indexOf(HashMarker)
          l.substring(idx + HashMarker.length)
        } else ""
      } catch {
        case t: Throwable =>
          t.printStackTrace()
          ""
      } finally rd.close()
    }

    srcHash == storedHash
  }
} 
Example 29
package com.chapter3.ScalaFP

import java.io.BufferedReader
import java.io.IOException
import java.io.FileReader

object ScalaExceptionHandling {
  def errorHandler(e:IOException){
    println("stop doing somehting!")
  }
  val file:String = "C:/Exp/input.txt"
  val input = new BufferedReader(new FileReader(file))
try {
  try {
    for (line <- Iterator.continually(input.readLine()).takeWhile(_ != null)) {
      Console.println(line)
    }
  } finally {
    input.close()
  }
} catch {
  case e:IOException => errorHandler(e)
}

  
} 
Example 30
Source File: SparkILoop.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.{BufferedReader, FileReader}

import Predef.{println => _, _}
import scala.util.Properties.{jdkHome, javaVersion, versionString, javaVmName}

import scala.tools.nsc.interpreter.{JPrintWriter, ILoop}
import scala.tools.nsc.Settings
import scala.tools.nsc.util.stringFromStream


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault)
          sets.classpath.value = sys.props("java.class.path")

        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
} 
Example 31
Source File: CustomReceiver.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket

import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 32
Source File: DetectSeriesSpec.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.statistics

import java.io.{BufferedReader, File, FileReader, StringReader}
import java.sql.SQLException

import scala.collection.JavaConversions._
import org.specs2.mutable._
import org.specs2.matcher.FileMatchers
import mimir._
import mimir.sql._
import mimir.parser._
import mimir.algebra._
import mimir.optimizer._
import mimir.ctables._
import mimir.exec._
import mimir.util._
import mimir.test._
import mimir.statistics._

object DetectSeriesSpec 
	extends SQLTestSpecification("DetectSeriesTest"){
	
	sequential
	
	def testDetectSeriesof(oper: Operator) = {
	  val (schema, df) = SparkUtils.getDataFrameWithProvFromQuery(db, oper)
    DetectSeries.seriesOf(df, schema, 0.1).collect().toSeq
	}
	
	
	"The DetectSeriesSpec" should {

		"Be able to load DetectSeriesTest1" >> {
			db.loader.loadTable("test/data/DetectSeriesTest1.csv"); ok
		}
		
		"Be able to detect Date and Timestamp type" >> {
			val queryOper = select("SELECT * FROM DetectSeriesTest1")
			val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString}
			
			colSeq must have size(4)
			colSeq must contain("TRAN_TS","EXP_DT", "JOIN_DT", "DOB")
		}

		"Be able to create a new schema and detect Date and Timestamp type" >> {
			db.catalog.materializedTableProvider().createStoredTableAs(
					HardTable(Seq(
						ID("JN_DT") -> TDate(), 
						ID("JN_TS") -> TTimestamp()
					), Seq()),
					ID("DetectSeriesTest3"),
					db
        )

			val queryOper = select("SELECT * FROM DetectSeriesTest3")
			val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString}
			
			colSeq must have size(2)
			colSeq must contain("JN_DT", "JN_TS")
		}

		"Be able to load DetectSeriesTest2" >> {
			db.loader.loadTable("test/data/DetectSeriesTest2.csv"); ok
		}

		"Be able to detect Date, Timestamp and increasing-decreasing Numeric type" >> {
			val queryOper = select("SELECT * FROM DetectSeriesTest2")
			val colSeq: Seq[String] = testDetectSeriesof(queryOper).map{_.columnName.toString}
			
			colSeq must have size(6)
			colSeq must contain("TRAN_TS","EXP_DT", "JOIN_DT", "DOB", "ROW_ID", "QUALITY")
		}
		
	}	
} 
Example 33
Source File: CustomReceiver.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo(s"Connecting to $host : $port")
     socket = new Socket(host, port)
     logInfo(s"Connected to $host : $port")
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart(s"Error connecting to $host : $port", e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 34
Source File: SaddlExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.saddle

import java.io.{BufferedReader, InputStreamReader}

import org.saddle.io._

class SaddleCsvSource(url: String) extends CsvSource {
  val reader = new BufferedReader(new InputStreamReader(new java.net.URL(url).openStream()))
  override def readLine: String = {
    reader.readLine()
  }
}

object SaddlExample {
  def main(args: Array[String]): Unit = {
    val file = new SaddleCsvSource("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD")
    val frameOrig = CsvParser.parse(file)
    // Get the header
    val head = frameOrig.rowSlice(0,1).rowAt(0)
    // Remove header row and attach the header back as column names
    val frame = frameOrig.rowSlice(1, frameOrig.numRows).mapColIndex(i => head.at(i).get)
    // Get random sample of 2% of dataset
    val sample = frame.rfilter(_ => scala.util.Random.nextDouble() < 0.02)
    sample.print()
  }
} 
Example 35
Source File: CustomReceiver.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket
import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver
import java.io.File
import java.io.FileInputStream


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port) //连接机器
     logInfo("Connected to " + host + ":" + port)
     //获取网络连接输入流
     println("isConnected:"+socket.isConnected())
     val socketInput=socket.getInputStream()
     //
     //val inputFile=new File("../data/mllib/als/testCustomReceiver.data")
    // val  in = new FileInputStream(inputFile)
    //  val  in = new FileInputStream(socketInput)
     val reader = new BufferedReader(new InputStreamReader(socketInput, "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)//存储数据
       userInput = reader.readLine()//读取数据
       println("userInput:"+userInput)
     }
     reader.close()//关闭流
     socket.close()//关闭连接
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 36
Source File: LogFile.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package yaooqinn.kyuubi.operation

import java.io.{BufferedReader, File, FileInputStream, FileNotFoundException, FileOutputStream, InputStreamReader, IOException, PrintStream}
import java.util.ArrayList

import scala.collection.JavaConverters._

import org.apache.commons.io.FileUtils
import org.apache.hadoop.io.IOUtils
import org.apache.kyuubi.Logging
import org.apache.spark.sql.Row

import yaooqinn.kyuubi.KyuubiSQLException

class LogFile private (
    file: File,
    private var reader: Option[BufferedReader],
    writer: PrintStream,
    @volatile private var isRemoved: Boolean = false) extends Logging {

  def this(file: File) = {
    this(file,
      LogFile.createReader(file, isRemoved = false),
      new PrintStream(new FileOutputStream(file)))
  }

  private def resetReader(): Unit = {
    reader.foreach(IOUtils.closeStream)
    reader = None
  }

  private def readResults(nLines: Long): Seq[Row] = {
    reader = reader.orElse(LogFile.createReader(file, isRemoved))

    val logs = new ArrayList[Row]()
    reader.foreach { r =>
      var i = 1
      try {
        var line: String = r.readLine()
        while ((i < nLines || nLines <= 0) && line != null) {
          logs.add(Row(line))
          line = r.readLine()
          i += 1
        }
      } catch {
        case e: FileNotFoundException =>
          val operationHandle = file.getName
          val path = file.getAbsolutePath
          val msg = if (isRemoved) {
            s"Operation[$operationHandle] has been closed and the log file $path has been removed"
          } else {
            s"Operation[$operationHandle] Log file $path is not found"
          }
          throw new KyuubiSQLException(msg, e)
      }
    }
    logs.asScala
  }

  
  def write(msg: String): Unit = {
    writer.print(msg)
  }


  def close(): Unit = synchronized {
    try {
      reader.foreach(_.close())
      writer.close()
      if (!isRemoved) {
        FileUtils.forceDelete(file)
        isRemoved = true
      }
    } catch {
      case e: IOException =>
        error(s"Failed to remove corresponding log file of operation: ${file.getName}", e)
    }
  }
}

object LogFile {

  def createReader(file: File, isRemoved: Boolean): Option[BufferedReader] = try {
    Option(new BufferedReader(new InputStreamReader(new FileInputStream(file))))
  } catch {
    case e: FileNotFoundException =>
      val operationHandle = file.getName
      val path = file.getAbsolutePath
      val msg = if (isRemoved) {
        s"Operation[$operationHandle] has been closed and the log file $path has been removed"
      } else {
        s"Operation[$operationHandle] Log file $path is not found"
      }
      throw new KyuubiSQLException(msg, e)
  }
} 
Example 37
Source File: HDFSUtil.scala    From aerosolve   with Apache License 2.0 5 votes vote down vote up
package com.airbnb.common.ml.util

import java.io.{BufferedReader, IOException, InputStreamReader}
import java.net.URI

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}


object HDFSUtil extends ScalaLogging {

  private lazy val hadoopConfiguration = new Configuration()

  
  def lastTaskSucceed(path: String): Boolean = {
    if (dirExists(path)) {
      if (dirExists(path + "/_temporary")) {
        logger.info(s"Deleting partial data for $path.")
        deleteDirWithoutThrow(path)
        false
      } else {
        logger.info(s"$path exists")
        true
      }
    } else {
      logger.info(s"$path does not exist")
      false
    }
  }

  def dirExists(dir: String): Boolean = {
    val path = new Path(dir)
    val hdfs = FileSystem.get(
      new java.net.URI(dir), hadoopConfiguration)

    hdfs.exists(path)
  }

  def deleteDirWithoutThrow(dir: String): Unit = {
    val path = new Path(dir)
    val hdfs = FileSystem.get(
      new java.net.URI(dir), hadoopConfiguration)
    if (hdfs.exists(path)) {
      logger.warn(s"$dir exists, DELETING")
      try {
        hdfs.delete(path, true)
      } catch {
        case e: IOException => logger.error(s" exception $e")
      }
    }
  }

  def createPath(path: String): Unit = {
    val remotePath = new Path(path)
    val remoteFS = remotePath.getFileSystem(hadoopConfiguration)
    remoteFS.mkdirs(new Path(path))
  }

  def readStringFromFile(inputFile : String): String = {
    val fs = FileSystem.get(new URI(inputFile), hadoopConfiguration)
    val path = new Path(inputFile)
    val stream = fs.open(path)
    val reader = new BufferedReader(new InputStreamReader(stream))
    val str = Stream.continually(reader.readLine()).takeWhile(_ != null).mkString("\n")
    str
  }

} 
Example 38
Source File: CustomReceiver.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.examples.streaming

import java.io.{InputStreamReader, BufferedReader, InputStream}
import java.net.Socket

import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), "UTF-8"))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
} 
Example 39
Source File: CustomReceiver.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 40
Source File: HadoopFSHelpers.scala    From morpheus   with Apache License 2.0 5 votes vote down vote up
package org.opencypher.morpheus.api.io.fs

import java.io.{BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter}

import org.apache.hadoop.fs.{FileSystem, Path}
import org.opencypher.morpheus.api.io.util.FileSystemUtils.using

object HadoopFSHelpers {

  implicit class RichHadoopFileSystem(fileSystem: FileSystem) {

    protected def createDirectoryIfNotExists(path: Path): Unit = {
      if (!fileSystem.exists(path)) {
        fileSystem.mkdirs(path)
      }
    }

    def listDirectories(path: String): List[String] = {
      val p = new Path(path)
      createDirectoryIfNotExists(p)
      fileSystem.listStatus(p)
        .filter(_.isDirectory)
        .map(_.getPath.getName)
        .toList
    }

    def deleteDirectory(path: String): Unit = {
      fileSystem.delete(new Path(path),  true)
    }

    def readFile(path: String): String = {
      using(new BufferedReader(new InputStreamReader(fileSystem.open(new Path(path)), "UTF-8"))) { reader =>
        def readLines = Stream.cons(reader.readLine(), Stream.continually(reader.readLine))
        readLines.takeWhile(_ != null).mkString
      }
    }

    def writeFile(path: String, content: String): Unit = {
      val p = new Path(path)
      val parentDirectory = p.getParent
      createDirectoryIfNotExists(parentDirectory)
      using(fileSystem.create(p)) { outputStream =>
        using(new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8"))) { bufferedWriter =>
          bufferedWriter.write(content)
        }
      }
    }
  }

} 
Example 41
Source File: Using.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.core.compiler.compile.io

import java.io.{Closeable, FileInputStream, FileOutputStream, InputStream, OutputStream, File => JavaFile}
import java.io.{BufferedInputStream, BufferedOutputStream, InputStreamReader, OutputStreamWriter}
import java.io.{BufferedReader, BufferedWriter}
import java.util.zip.GZIPInputStream
import java.net.URL
import java.nio.channels.FileChannel
import java.nio.charset.Charset
import java.util.jar.{JarFile, JarInputStream, JarOutputStream}
import java.util.zip.{GZIPOutputStream, ZipEntry, ZipFile, ZipInputStream, ZipOutputStream}

import ErrorHandling.translate

import scala.reflect.{Manifest => SManifest}

abstract class Using[Source, T]
{
  protected def open(src: Source): T
  def apply[R](src: Source)(f: T => R): R =
  {
    val resource = open(src)
    try { f(resource) }
    finally { close(resource) }
  }
  protected def close(out: T): Unit
}
abstract class WrapUsing[Source, T](implicit srcMf: SManifest[Source], targetMf: SManifest[T]) extends Using[Source, T]
{
  protected def label[S](m: SManifest[S]): String = m.runtimeClass.getSimpleName
  protected def openImpl(source: Source): T
  protected final def open(source: Source): T =
    translate("Error wrapping " + label(srcMf) + " in " + label(targetMf) + ": ") { openImpl(source) }
}
trait OpenFile[T] extends Using[JavaFile, T]
{
  protected def openImpl(file: JavaFile): T
  protected final def open(file: JavaFile): T =
  {
    val parent = file.getParentFile
    if(parent != null)
      IO.createDirectory(parent)
    openImpl(file)
  }
}
object Using
{
  def wrap[Source, T<: Closeable](openF: Source => T)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] =
    wrap(openF, closeCloseable)
  def wrap[Source, T](openF: Source => T, closeF: T => Unit)(implicit srcMf: SManifest[Source], targetMf: SManifest[T]): Using[Source,T] =
    new WrapUsing[Source, T]
    {
      def openImpl(source: Source): T = openF(source)
      def close(t: T): Unit = closeF(t)
    }

  def resource[Source, T <: Closeable](openF: Source => T): Using[Source,T] =
    resource(openF, closeCloseable)
  def resource[Source, T](openF: Source => T, closeF: T => Unit): Using[Source,T] =
    new Using[Source,T]
    {
      def open(s: Source): T = openF(s)
      def close(s: T): Unit = closeF(s)
    }
  def file[T <: Closeable](openF: JavaFile => T): OpenFile[T] = file(openF, closeCloseable)
  def file[T](openF: JavaFile => T, closeF: T => Unit): OpenFile[T] =
    new OpenFile[T]
    {
      def openImpl(file: JavaFile): T = openF(file)
      def close(t: T): Unit = closeF(t)
    }
  private def closeCloseable[T <: Closeable]: T => Unit = _.close()

  def bufferedOutputStream: Using[OutputStream, BufferedOutputStream] = wrap((out: OutputStream) => new BufferedOutputStream(out) )
  def bufferedInputStream: Using[InputStream, BufferedInputStream] = wrap((in: InputStream) => new BufferedInputStream(in) )
  def fileOutputStream(append: Boolean = false): OpenFile[BufferedOutputStream] = file(f => new BufferedOutputStream(new FileOutputStream(f, append)))
  def fileInputStream: OpenFile[BufferedInputStream] = file(f => new BufferedInputStream(new FileInputStream(f)))
  def urlInputStream: Using[URL, BufferedInputStream] = resource((u: URL) => translate("Error opening " + u + ": ")(new BufferedInputStream(u.openStream)))
  def fileOutputChannel: OpenFile[FileChannel] = file(f => new FileOutputStream(f).getChannel)
  def fileInputChannel: OpenFile[FileChannel] = file(f => new FileInputStream(f).getChannel)
  def fileWriter(charset: Charset = IO.utf8, append: Boolean = false): OpenFile[BufferedWriter] =
    file(f => new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f, append), charset)) )
  def fileReader(charset: Charset): OpenFile[BufferedReader] = file(f => new BufferedReader(new InputStreamReader(new FileInputStream(f), charset)) )
  def urlReader(charset: Charset): Using[URL, BufferedReader] = resource((u: URL) => new BufferedReader(new InputStreamReader(u.openStream, charset)))
  def jarFile(verify: Boolean): OpenFile[JarFile] = file(f => new JarFile(f, verify), (_: JarFile).close())
  def zipFile: OpenFile[ZipFile] = file(f => new ZipFile(f), (_: ZipFile).close())
  def streamReader: Using[(InputStream, Charset), InputStreamReader] = wrap{ (_: (InputStream, Charset)) match { case (in, charset) => new InputStreamReader(in, charset) } }
  def gzipInputStream: Using[InputStream, GZIPInputStream] = wrap((in: InputStream) => new GZIPInputStream(in, 8192) )
  def zipInputStream: Using[InputStream, ZipInputStream] = wrap((in: InputStream) => new ZipInputStream(in))
  def zipOutputStream: Using[OutputStream, ZipOutputStream] = wrap((out: OutputStream) => new ZipOutputStream(out))
  def gzipOutputStream: Using[OutputStream, GZIPOutputStream] = wrap((out: OutputStream) => new GZIPOutputStream(out, 8192), (_: GZIPOutputStream).finish())
  def jarOutputStream: Using[OutputStream, JarOutputStream] = wrap((out: OutputStream) => new JarOutputStream(out))
  def jarInputStream: Using[InputStream, JarInputStream] = wrap((in: InputStream) => new JarInputStream(in))
  def zipEntry(zip: ZipFile): Using[ZipEntry, InputStream] = resource((entry: ZipEntry) =>
    translate("Error opening " + entry.getName + " in " + zip + ": ") { zip.getInputStream(entry) } )
} 
Example 42
Source File: JVMUtil.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.core.util

import java.io.{BufferedReader, InputStreamReader}
import java.net.URLClassLoader
import java.text.NumberFormat

 
object JVMUtil {
	def startSecondJVM[C](clazz: Class[C], jvmArgs: List[String], args: List[String], redirectStream: Boolean): Int = {
    val separator = System.getProperty("file.separator")
    val classpath = Thread.currentThread().getContextClassLoader.asInstanceOf[URLClassLoader].getURLs.map(_.getPath()).reduce((c1, c2) => c1 + java.io.File.pathSeparator + c2)
    val path = System.getProperty("java.home") + separator + "bin" + separator + "java"
    val commands: IList[String] = List(path) ::: jvmArgs ::: List("-cp", classpath, clazz.getCanonicalName.stripSuffix("$")) ::: args
    import scala.collection.JavaConverters._
    val processBuilder = new ProcessBuilder(commands.asJava)
    processBuilder.redirectErrorStream(redirectStream)
    val process = processBuilder.start()
    val is = process.getInputStream
    val isr = new InputStreamReader(is)
    val br = new BufferedReader(isr)
    var line = br.readLine()
    while (line != null) {
      println(line)
      line = br.readLine()
    }
    process.waitFor()
  }
  
  def showMemoryUsage(): Unit = {
    val runtime = Runtime.getRuntime
    val format = NumberFormat.getInstance()
    
    val sb = new StringBuilder()
    val maxMemory = runtime.maxMemory()
    val allocatedMemory = runtime.totalMemory()
    val freeMemory = runtime.freeMemory()
    
    sb.append("free memory: " + format.format(freeMemory / 1024 / 1024) + " ")
    sb.append("allocated memory: " + format.format(allocatedMemory / 1024 / 1024) + " ")
    sb.append("max memory: " + format.format(maxMemory / 1024 / 1024) + " ")
    sb.append("total free memory: " + format.format((freeMemory + (maxMemory - allocatedMemory)) / 1024 / 1024) + " ")
    println(sb.toString())
  }
} 
Example 43
Source File: LibraryAPISummary.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jawa.core

import java.io.{BufferedReader, FileReader, StringReader}

import org.argus.jawa.core.elements.JawaType
import org.argus.jawa.core.util._

trait LibraryAPISummary {
  def isLibraryClass: JawaType => Boolean
}


class NoneLibraryAPISummary extends LibraryAPISummary {
  private val appPackages: MSet[String] = msetEmpty
  private val appPackagePrefixes: MSet[String] = msetEmpty
  private def doLoad(rdr: BufferedReader): Unit = {
    var line = Option(rdr.readLine())
    while(line.isDefined){
      line match {
        case Some(str) =>
          if(str.endsWith(".*"))
            appPackagePrefixes += str.substring(0, str.length - 2)
          else appPackages += str
        case None =>
      }
      line = Option(rdr.readLine())
    }
  }
  def load(filePath: String): Unit = {
    val rdr: BufferedReader = new BufferedReader(new FileReader(filePath))
    doLoad(rdr)
    rdr.close()
  }
  def loadFromString(str: String): Unit = {
    val rdr: BufferedReader = new BufferedReader(new StringReader(str))
    doLoad(rdr)
    rdr.close()
  }
  override def isLibraryClass: JawaType => Boolean = { typ =>
    !appPackages.contains(typ.getPackageName) && !appPackagePrefixes.exists(typ.getPackageName.startsWith)
  }
}

object NoLibraryAPISummary extends LibraryAPISummary {
  override def isLibraryClass: JawaType => Boolean = _ => false
} 
Example 44
Source File: SparkSqlUtils.scala    From HadoopLearning   with MIT License 5 votes vote down vote up
package com.c503.utils

import java.io.{BufferedInputStream, BufferedReader, FileInputStream, InputStreamReader}
import java.nio.file.Path

import com.google.common.io.Resources
import org.apache.log4j.{Level, Logger}
import org.apache.mesos.Protos.Resource
import org.apache.spark.sql.SparkSession

import scala.io.Source


  def readSqlByPath(sqlPath: String) = {
    val buf = new StringBuilder
    val path = this.getPathByName(sqlPath)
    val file = Source.fromFile(path)
    for (line <- file.getLines) {
      buf ++= line + "\n"
    }
    file.close
    buf.toString()
  }


} 
Example 45
Source File: UpdateChecker.scala    From kotlin-plugin   with MIT License 5 votes vote down vote up
package kotlin

import java.io.{InputStreamReader, BufferedReader, StringWriter}

import argonaut._, Argonaut._

import scala.concurrent.Future
import scala.util.{Failure, Success}

object UpdateChecker {
  import scala.concurrent.ExecutionContext.Implicits.global
  type Result = (Set[String],String)
  type Callback[A] = Either[Throwable,Result] => A

  def apply[A](user: String, repo: String, name: String)(result: Callback[A]): Unit = {
    val bintray = new java.net.URL(
      s"https://api.bintray.com/packages/$user/$repo/$name")
    Future {
      val uc = bintray.openConnection()
      val in = new BufferedReader(new InputStreamReader(uc.getInputStream, "utf-8"))
      try {
        val sw = new StringWriter
        val buf = Array.ofDim[Char](8192)
        Stream.continually(in.read(buf, 0, 8192)) takeWhile (
          _ != -1) foreach (sw.write(buf, 0, _))
        sw.toString
      } finally {
        in.close()
      }
    } onComplete {
      case Success(json) =>
        val decoded = json.decode[PackageInfo]
        val res: Either[Throwable, Result] = decoded match {
          case Left(Left(str)) =>
            Left(new IllegalArgumentException(str))
          case Left(Right(cursorHistory)) =>
            Left(new IllegalArgumentException(cursorHistory._1))
          case Right(packageInfo) =>
            Right(packageInfo.versions.toSet -> packageInfo.version)
        }
        result(res)
      case Failure(t) => result(Left(t))
    }
  }

  implicit def PackageInfoCodecJson: CodecJson[PackageInfo] = casecodec3(
    PackageInfo.apply, PackageInfo.unapply)("name", "latest_version", "versions")

  case class PackageInfo(name: String, version: String, versions: List[String])
} 
Example 46
Source File: MarkdownPagesEndpoint.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web.guide.markdown

import java.io.{BufferedReader, File, FileReader}
import java.time.Instant
import java.util.concurrent.ConcurrentHashMap

import com.avsystem.commons._
import com.vladsch.flexmark.ext.toc.TocExtension
import com.vladsch.flexmark.html.HtmlRenderer
import com.vladsch.flexmark.parser.Parser

import scala.concurrent.{ExecutionContext, Future}

final class MarkdownPagesEndpoint(guideResourceBase: String)(implicit ec: ExecutionContext) extends MarkdownPageRPC {

  private val tocExtension = TocExtension.create
  private val parser = Parser.builder.extensions(JList(tocExtension)).build
  private val renderer = HtmlRenderer.builder.extensions(JList(tocExtension)).build
  private val renderedPages = new ConcurrentHashMap[MarkdownPage, (Future[String], Instant)]

  private def render(file: File): Future[String] = Future {
    val reader = new BufferedReader(new FileReader(file))
    val document = parser.parseReader(reader)
    renderer.render(document)
  }

  override def loadContent(page: MarkdownPage): Future[String] = {
    val (result, _) = renderedPages.compute(page, { (_, cached) =>
      val pageFile = new File(guideResourceBase + page.file)
      cached.opt.filter {
        case (currentRender, renderedInstant) =>
          currentRender.value.exists(_.isSuccess) && renderedInstant.toEpochMilli >= pageFile.lastModified()
      }.getOrElse((render(pageFile), Instant.ofEpochMilli(pageFile.lastModified())))
    })
    result
  }
} 
Example 47
Source File: CustomReceiver.scala    From Learning-Spark-SQL   with MIT License 5 votes vote down vote up
import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     println("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     println("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     println("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
} 
Example 48
Source File: InternalHttpClient.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.http

import java.io.{BufferedReader, InputStreamReader}
import java.net.{HttpURLConnection, URLConnection}
import java.util.zip.{GZIPInputStream, InflaterInputStream}


  case class HttpResponseData(
                               statusLine: String,
                               content: String,
                               headers: collection.mutable.Map[String, String]) {

    private val startIndex = statusLine.indexOf(" ") + 1
    val status = statusLine.substring(startIndex, startIndex + 3)

    override def toString: String = {
      val sb = new StringBuilder
      sb.append(statusLine + "\n")
      headers.foreach(m => sb.append(m._1 + "=" + m._2 + "\n"))
      sb.append("\n" + content + "\n")
      sb.toString()
    }

  }
} 
Example 49
Source File: ForgerBoxMerklePathInfoTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.validation

import java.io.{BufferedReader, BufferedWriter, FileReader, FileWriter}
import java.lang.{Byte => JByte}
import java.util
import java.util.{ArrayList => JArrayList}

import com.horizen.box.ForgerBox
import com.horizen.fixtures.BoxFixture
import com.horizen.utils.{BytesUtils, ForgerBoxMerklePathInfo, ForgerBoxMerklePathInfoSerializer, MerklePath, Pair}
import com.horizen.vrf.VrfGeneratedDataProvider
import org.junit.Assert.{assertEquals, assertNotEquals, assertTrue}
import org.junit.Test
import org.scalatest.junit.JUnitSuite

class ForgerBoxMerklePathInfoTest extends JUnitSuite with BoxFixture {
  val vrfGenerationSeed = 907
  val vrfGenerationPrefix = "ForgerBoxMerklePathInfoTest"

  //uncomment if you want update vrf related data
  if (false) {
    VrfGeneratedDataProvider.updateVrfPublicKey(vrfGenerationPrefix, vrfGenerationSeed)
  }

  val forgerBox: ForgerBox = getForgerBox(
    getPrivateKey25519("123".getBytes()).publicImage(),
    1000L,
    100L,
    getPrivateKey25519("456".getBytes()).publicImage(),
    VrfGeneratedDataProvider.getVrfPublicKey(vrfGenerationPrefix, vrfGenerationSeed)
  )
  val emptyMerklePath: MerklePath = new MerklePath(new JArrayList())

  val nonEmptyMerklePath: MerklePath = new MerklePath(util.Arrays.asList(
    new Pair[JByte, Array[Byte]](0.toByte, BytesUtils.fromHexString("29d000eee85f08b6482026be2d92d081d6f9418346e6b2e9fe2e9b985f24ed1e")),
    new Pair[JByte, Array[Byte]](1.toByte, BytesUtils.fromHexString("61bfbdf7038dc7f21e2bcf193faef8e6caa8222af016a6ed86b9e9d860f046df"))
  ))

  @Test
  def comparison(): Unit = {
    assertNotEquals("Box merkle path info expected to be different.", emptyMerklePath, nonEmptyMerklePath)
  }

  @Test
  def serialization(): Unit = {
    // Test 1: empty merkle path (single element in merkle tree)
    val boxWithEmptyPath = ForgerBoxMerklePathInfo(forgerBox, emptyMerklePath)
    var boxBytes = boxWithEmptyPath.bytes
    var deserializedBox = ForgerBoxMerklePathInfoSerializer.parseBytes(boxBytes)
    assertEquals("Deserialized box merkle path info hashCode expected to be equal to the original one.", boxWithEmptyPath.hashCode(), deserializedBox.hashCode())
    assertEquals("Deserialized box merkle path info expected to be equal to the original one.", boxWithEmptyPath, deserializedBox)


    // Test 2: non empty merkle path
    val boxWithNonEmptyPath = ForgerBoxMerklePathInfo(forgerBox, nonEmptyMerklePath)
    boxBytes = boxWithNonEmptyPath.bytes
    deserializedBox = ForgerBoxMerklePathInfoSerializer.parseBytes(boxBytes)
    assertEquals("Deserialized box merkle path info hashCode expected to be equal to the original one.", boxWithNonEmptyPath.hashCode(), deserializedBox.hashCode())
    assertEquals("Deserialized box merkle path info expected to be equal to the original one.", boxWithNonEmptyPath, deserializedBox)

    // Set to true and run if you want to update regression data.
    if (false) {
      val out = new BufferedWriter(new FileWriter("src/test/resources/boxmerklepathinfo_hex"))
      out.write(BytesUtils.toHexString(boxBytes))
      out.close()
    }

    // Test 3: try to deserialize broken bytes.
    assertTrue("ForgerBoxMerklePathInfo expected to be not parsed due to broken data.", ForgerBoxMerklePathInfoSerializer.parseBytesTry("broken bytes".getBytes).isFailure)
  }

  @Test
  def serializationRegression(): Unit = {
    var bytes: Array[Byte] = null
    try {
      val classLoader = getClass.getClassLoader
      val file = new FileReader(classLoader.getResource("boxmerklepathinfo_hex").getFile)
      bytes = BytesUtils.fromHexString(new BufferedReader(file).readLine())
    }
    catch {
      case e: Exception =>
        fail(e.toString)
    }

    val boxMerklePathInfoTry = ForgerBoxMerklePathInfoSerializer.parseBytesTry(bytes)
    assertTrue("ForgerBoxMerklePathInfo expected to by parsed.", boxMerklePathInfoTry.isSuccess)

    val boxWithNonEmptyPath = ForgerBoxMerklePathInfo(forgerBox, nonEmptyMerklePath)
    assertEquals("Parsed info is different to original.", boxWithNonEmptyPath, boxMerklePathInfoTry.get)
  }
} 
Example 50
Source File: SigProofTest.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen

import java.io.{BufferedReader, File, FileReader}
import java.util.Optional
import java.{lang, util}

import com.horizen.box.WithdrawalRequestBox
import com.horizen.box.data.WithdrawalRequestBoxData
import com.horizen.cryptolibprovider.{SchnorrFunctionsImplZendoo, ThresholdSignatureCircuitImplZendoo}
import com.horizen.proposition.MCPublicKeyHashProposition
import com.horizen.schnorrnative.SchnorrSecretKey
import com.horizen.utils.BytesUtils
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.{Ignore, Test}

import scala.collection.JavaConverters._
import scala.util.Random

class SigProofTest {
  private val classLoader: ClassLoader = getClass.getClassLoader
  private val sigCircuit: ThresholdSignatureCircuitImplZendoo = new ThresholdSignatureCircuitImplZendoo()
  private val schnorrFunctions: SchnorrFunctionsImplZendoo = new SchnorrFunctionsImplZendoo()

  private def buildSchnorrPrivateKey(index: Int): SchnorrSecretKey = {
    var bytes: Array[Byte] = null
    try {
      val resourceName = "schnorr_sk0"+ index + "_hex"
      val file = new FileReader(classLoader.getResource(resourceName).getFile)
      bytes = BytesUtils.fromHexString(new BufferedReader(file).readLine())
    }
    catch {
      case e: Exception =>
        assertEquals(e.toString(), true, false)
    }

    SchnorrSecretKey.deserialize(bytes)
  }

  //Test will take around 2 minutes, enable for sanity checking of ThresholdSignatureCircuit
  @Ignore
  @Test
  def simpleCheck(): Unit = {
    val keyPairsLen = 7
    val threshold = 5 //hardcoded value

    val keyPairs = (0 until keyPairsLen).view.map(buildSchnorrPrivateKey).map(secret => (secret, secret.getPublicKey))
    val publicKeysBytes: util.List[Array[Byte]] = keyPairs.map(_._2.serializePublicKey()).toList.asJava
    val provingKeyPath = new File(classLoader.getResource("sample_proving_key_7_keys_with_threshold_5").getFile).getAbsolutePath;
    val verificationKeyPath = new File(classLoader.getResource("sample_vk_7_keys_with_threshold_5").getFile).getAbsolutePath;

    val sysConstant = sigCircuit.generateSysDataConstant(publicKeysBytes, threshold)

    val mcBlockHash = Array.fill(32)(Random.nextInt().toByte)
    val previousMcBlockHash = Array.fill(32)(Random.nextInt().toByte)

    val wb: util.List[WithdrawalRequestBox] = Seq(new WithdrawalRequestBox(new WithdrawalRequestBoxData(new MCPublicKeyHashProposition(Array.fill(20)(Random.nextInt().toByte)), 2345), 42)).asJava

    val messageToBeSigned = sigCircuit.generateMessageToBeSigned(wb, mcBlockHash, previousMcBlockHash)

    val emptySigs = List.fill[Optional[Array[Byte]]](keyPairsLen - threshold)(Optional.empty[Array[Byte]]())
    val signatures: util.List[Optional[Array[Byte]]] = (keyPairs
      .map{case (secret, public) => schnorrFunctions.sign(secret.serializeSecretKey(), public.serializePublicKey(), messageToBeSigned)}
      .map(b => Optional.of(b))
      .take(threshold)
      .toList ++ emptySigs)
      .asJava

    val proofAndQuality: utils.Pair[Array[Byte], lang.Long] = sigCircuit.createProof(wb, mcBlockHash, previousMcBlockHash, publicKeysBytes, signatures, threshold, provingKeyPath)

    val result = sigCircuit.verifyProof(wb, mcBlockHash, previousMcBlockHash, proofAndQuality.getValue, proofAndQuality.getKey, sysConstant, verificationKeyPath)

    assertTrue("Proof verification expected to be successfully", result)
  }

} 
Example 51
Source File: RpcMainchainNodeApi.scala    From Sidechains-SDK   with MIT License 5 votes vote down vote up
package com.horizen.mainchain.api

import java.io.{BufferedReader, InputStreamReader}

import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
import com.horizen.SidechainSettings
import com.horizen.serialization.ApplicationJsonSerializer
import com.horizen.utils.BytesUtils

class RpcMainchainNodeApi(val sidechainSettings: SidechainSettings)
  extends MainchainNodeApi
{

  private lazy val isOsWindows = {
    val osname = System.getProperty("os.name", "generic").toLowerCase()
    osname.contains("win")
  }

  private val clientPath = sidechainSettings.websocket.zencliCommandLine + " " +
    (sidechainSettings.genesisData.mcNetwork match {
      case "regtest" => "-regtest "
      case "testnet" => "-testnet "
      case _ => ""
    })

  private def callRpc(params: String) : String = {
    System.out.println(clientPath + " " + params)
    val process = Runtime.getRuntime.exec(clientPath + " " + params)

    val stdInput: BufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream))

    val stdError: BufferedReader = new BufferedReader(new InputStreamReader(process.getErrorStream))

    val error = stdError.readLine()
    if(error != null)
      throw new IllegalStateException("Error: " + error)

    stdInput.readLine
  }

  private def encloseJsonParameter(parameter: String): String = {
    if (isOsWindows)
      "\"" + parameter.replace("\"", "\\\"") + "\""
    else
      "'" + parameter + "'"
  }

  private def encloseStringParameter(parameter: String): String = {
    "\"" + parameter + "\""
  }

  override def getSidechainInfo: SidechainInfoResponse = {
    val objectMapper = new ObjectMapper()
    val response = callRpc("getscinfo")

    objectMapper.readValue(response, classOf[SidechainInfoResponse])
  }

  override def sendCertificate(certificateRequest: SendCertificateRequest): SendCertificateResponse = {
    val serializer = ApplicationJsonSerializer.getInstance() // TODO: maybe it's better to construct object mapper from scratch
    serializer.setDefaultConfiguration()
    val objectMapper = serializer.getObjectMapper
    objectMapper.disable(SerializationFeature.INDENT_OUTPUT)

    val response = callRpc("send_certificate "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.sidechainId)) + " "
      + certificateRequest.epochNumber + " "
      + certificateRequest.quality + " "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.endEpochBlockHash)) + " "
      + encloseStringParameter(BytesUtils.toHexString(certificateRequest.proofBytes)) + " "
      + encloseJsonParameter(objectMapper.writeValueAsString(certificateRequest.backwardTransfers)) + " "
      + certificateRequest.fee
      )

    SendCertificateResponse(BytesUtils.fromHexString(response))
  }
} 
Example 52
Source File: ModelSource.scala    From spark-ml-serving   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.spark_ml_serving.common

import java.io.{InputStreamReader, BufferedReader}

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, FileSystem}

case class ModelSource(
  root: String,
  fs: FileSystem
) {

  def readFile(path: String): String = {
    val fsPath = filePath(path)
    val reader = new BufferedReader(new InputStreamReader(fs.open(fsPath)))

    val builder      = new StringBuilder()
    var line: String = null
    while ({ line = reader.readLine(); line != null }) {
      builder.append(line + "\n")
    }
    builder.mkString
  }

  def findFile(dir: String, recursive: Boolean, f: String => Boolean): Option[Path] = {
    val dirPath = filePath(dir)
    if (fs.exists(dirPath) & fs.isDirectory(dirPath)) {
      val iter = fs.listFiles(dirPath, recursive)
      while (iter.hasNext) {
        val st = iter.next()
        if (st.isFile && f(st.getPath.getName)) return Some(st.getPath)
      }
      None
    } else {
      None
    }
  }

  def filePath(path: String): Path = {
    new Path(s"$root/$path")
  }

}

object ModelSource {

  def local(path: String): ModelSource = {
    ModelSource(path, FileSystem.getLocal(new Configuration()))
  }

  def hadoop(path: String, conf: Configuration): ModelSource = {
    val fs = FileSystem.get(conf)
    ModelSource(path, fs)
  }

} 
Example 53
Source File: ProcessInterpreter.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.engine.Interpreter

import java.io.{BufferedReader, InputStreamReader, PrintWriter}
import java.util.concurrent.TimeUnit

import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engine.spark.common.{LineBufferedStream, Starting, State, _}
import com.webank.wedatasphere.linkis.scheduler.executer.{ErrorExecuteResponse, ExecuteResponse, SuccessExecuteResponse}
import org.apache.commons.io.IOUtils
import org.json4s._

import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}


abstract class ProcessInterpreter(process: Process) extends Interpreter with Logging {

  implicit val executor: ExecutionContext = ExecutionContext.global

  protected[this] var _state: State = Starting()

  protected[this] val stdin = new PrintWriter(process.getOutputStream)
  protected[this] val stdout = new BufferedReader(new InputStreamReader(process.getInputStream()), 1)
  protected[this] val errOut = new LineBufferedStream(process.getErrorStream())

  override def state: State = _state

  override def execute(code: String): ExecuteResponse = {
    if(code == "sc.cancelAllJobs" || code == "sc.cancelAllJobs()") {
      sendExecuteRequest(code)
    }
    _state match {
      case (Dead() | ShuttingDown() | Error() | Success()) =>
        throw new IllegalStateException("interpreter is not running")
      case Idle() =>
        require(state == Idle())
        code match {
          case "SHUTDOWN" =>
            sendShutdownRequest()
            close()
            ErrorExecuteResponse("shutdown",new Exception("shutdown"))
          case _ =>
            _state = Busy()
            sendExecuteRequest(code) match {
              case Some(rep) =>
                _state = Idle()
               // ExecuteComplete(rep)
                SuccessExecuteResponse()
              case None =>
                _state = Error()
                val errorMsg = errOut.lines.mkString(", ")
                throw new Exception(errorMsg)
            }
        }
      case _ => throw new IllegalStateException(s"interpreter is in ${_state} state, cannot do query.")
    }
  }

  Future {
    val exitCode = process.waitFor()
    if (exitCode != 0) {
      errOut.lines.foreach(println)
      println(getClass.getSimpleName+" has stopped with exit code " + process.exitValue)
      _state = Error()
    } else {
      println(getClass.getSimpleName+" has finished.")
      _state = Success()
    }
  }

  protected def waitUntilReady(): Unit

  protected def sendExecuteRequest(request: String): Option[JValue]

  protected def sendShutdownRequest(): Unit = {}


  override def close(): Unit = {
    val future = Future {
      _state match {
        case (Dead() | ShuttingDown() | Success()) =>
          Future.successful()
        case _ =>
          sendShutdownRequest()
      }
    }
    _state = Dead()
    IOUtils.closeQuietly(stdin)
    IOUtils.closeQuietly(stdout)
    errOut.close

    // Give ourselves 10 seconds to tear down the process.
    Utils.tryFinally(Await.result(future, Duration(10, TimeUnit.SECONDS))){
      process.destroy()}
  }

} 
Example 54
Source File: Implicit_1_Class.scala    From HadoopLearning   with MIT License 5 votes vote down vote up
package com.c503.scala

import java.io.{BufferedReader, File, FileReader}

import scala.collection.mutable.ListBuffer


  implicit class Files(file: File) {
    def lines: List[String] = {
      val fileReader = new FileReader(file)
      val reader = new BufferedReader(fileReader)
      try {
        var lines = ListBuffer[String]()
        var line = reader.readLine()
        while (line != null) {
          lines = lines :+ line
          line = reader.readLine()
        }
        lines.toList
      } finally {
        if (fileReader != null) {
          fileReader.close()
        }
        if (reader != null) {
          reader.close()
        }
      }
    }
  }

  def main(args: Array[String]): Unit = {

    val file = new File("")
    file.lines.foreach(e => {
      println(e)
    })

  }

} 
Example 55
Source File: BufferedSource.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package scala.io

import java.io.{ InputStream, BufferedReader, InputStreamReader, PushbackReader }
import Source.DefaultBufSize
import scala.collection.{ Iterator, AbstractIterator }


  override def mkString = {
    // Speed up slurping of whole data set in the simplest cases.
    val allReader = decachedReader
    val sb = new StringBuilder
    val buf = new Array[Char](bufferSize)
    var n = 0
    while (n != -1) {
      n = allReader.read(buf)
      if (n>0) sb.appendAll(buf, 0, n)
    }
    sb.result
  }
} 
Example 56
Source File: GetUrlTest.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.io.{BufferedReader, InputStreamReader, PrintWriter}
import java.net.{HttpURLConnection, InetAddress, URL, URLConnection}

import cn.piflow.Runner
import cn.piflow.conf.bean.FlowBean
import cn.piflow.conf.util.{FileUtil, OptionUtil}
import cn.piflow.util.{PropertyUtil, ServerIpUtil}
import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet}
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.sql.SparkSession
import org.h2.tools.Server
import org.junit.Test

import scala.util.parsing.json.JSON

class GetUrlTest {

  @Test
  def testFlow(): Unit ={

    //parse flow json
    val file = "src/main/resources/flow/http/getUrl.json"
    val flowJsonStr = FileUtil.fileReader(file)
    val map = OptionUtil.getAny(JSON.parseFull(flowJsonStr)).asInstanceOf[Map[String, Any]]
    println(map)

    //create flow
    val flowBean = FlowBean(map)
    val flow = flowBean.constructFlow()


    val ip = InetAddress.getLocalHost.getHostAddress
    cn.piflow.util.FileUtil.writeFile("server.ip=" + ip, ServerIpUtil.getServerIpFile())
    val h2Server = Server.createTcpServer("-tcp", "-tcpAllowOthers", "-tcpPort","50001").start()
    //execute flow
    val spark = SparkSession.builder()
      .master("local[12]")
      .appName("hive")
      .config("spark.driver.memory", "4g")
      .config("spark.executor.memory", "8g")
      .config("spark.cores.max", "8")
      .config("hive.metastore.uris",PropertyUtil.getPropertyValue("hive.metastore.uris"))
      .enableHiveSupport()
      .getOrCreate()

    val process = Runner.create()
      .bind(classOf[SparkSession].getName, spark)
      .bind("checkpoint.path", "")
      .bind("debug.path","")
      .start(flow);

    process.awaitTermination();
    val pid = process.pid();
    println(pid + "!!!!!!!!!!!!!!!!!!!!!")
    spark.close();
  }

} 
Example 57
Source File: Pathway.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.microorganism

import java.io.{BufferedReader, InputStreamReader, OutputStreamWriter}

import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FSDataOutputStream, FileSystem, Path}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.json.JSONObject


class Pathway extends ConfigurableStop{
  override val authorEmail: String = "[email protected]"
  override val description: String = "Parse Pathway data"
  override val inportList: List[String] =List(Port.DefaultPort.toString)
  override val outportList: List[String] = List(Port.DefaultPort.toString)


  var cachePath:String = _
  def setProperties(map: Map[String, Any]): Unit = {
    cachePath=MapUtil.get(map,key="cachePath").asInstanceOf[String]
  }

  override def getPropertyDescriptor(): List[PropertyDescriptor] = {
    var descriptor : List[PropertyDescriptor] = List()
    val cachePath = new PropertyDescriptor().name("cachePath").displayName("cachePath").description("Temporary Cache File Path")
      .defaultValue("/pathway").required(true)
    descriptor = cachePath :: descriptor
    descriptor
  }

  override def getIcon(): Array[Byte] = {
    ImageUtil.getImage("icon/microorganism/Pathway.png")
  }

  override def getGroup(): List[String] = {
    List(StopGroup.MicroorganismGroup)
  }

  override def initialize(ctx: ProcessContext): Unit = {

  }

  override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
    val inDf: DataFrame = in.read()
    var pathStr: String =inDf.take(1)(0).get(0).asInstanceOf[String]

    val configuration: Configuration = new Configuration()
    val pathARR: Array[String] = pathStr.split("\\/")
    var hdfsUrl:String=""
    for (x <- (0 until 3)){
      hdfsUrl+=(pathARR(x) +"/")
    }
    configuration.set("fs.defaultFS",hdfsUrl)
    var fs: FileSystem = FileSystem.get(configuration)


    val hdfsPathTemporary = hdfsUrl+cachePath+"/pathwayCache/pathwayCache.json"
    val path: Path = new Path(hdfsPathTemporary)
    if(fs.exists(path)){
      fs.delete(path)
    }
    fs.create(path).close()
    val hdfsWriter: OutputStreamWriter = new OutputStreamWriter(fs.append(path))

    var fdis: FSDataInputStream = null
    var br: BufferedReader = null
    var doc: JSONObject = null
    var hasAnotherSequence:Boolean = true

    inDf.collect().foreach(row => {
      pathStr = row.get(0).asInstanceOf[String]

      fdis = fs.open(new Path(pathStr))
      br = new BufferedReader(new InputStreamReader(fdis))
      var count = 0
      while (hasAnotherSequence) {
          count += 1
          doc = new JSONObject
          hasAnotherSequence = util.KeggPathway.process(br, doc)

          doc.write(hdfsWriter)
          hdfsWriter.write("\n")
        }
      br.close()
      fdis.close()
    })
    hdfsWriter.close()

    val df: DataFrame = pec.get[SparkSession]().read.json(hdfsPathTemporary)
    df.schema.printTreeString()
    println(df.count)

    out.write(df)

  }
} 
Example 58
Source File: PostUrl.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.bundle.http

import java.io.{BufferedReader, InputStreamReader}
import java.net.URI

import cn.piflow.conf.bean.PropertyDescriptor
import cn.piflow.conf.util.{ImageUtil, MapUtil}
import cn.piflow.conf.{ConfigurableStop, Port, StopGroup}
import cn.piflow.{JobContext, JobInputStream, JobOutputStream, ProcessContext}
import org.apache.commons.httpclient.HttpClient
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.sql.SparkSession


class PostUrl extends ConfigurableStop{
  override val authorEmail: String = "[email protected]"
  override val inportList: List[String] = List(Port.DefaultPort)
  override val outportList: List[String] = List(Port.DefaultPort)
  override val description: String = "Send a post request to the specified http"

  var url : String= _
  var jsonPath : String = _


  override def perform(in: JobInputStream, out: JobOutputStream, pec: JobContext): Unit = {
    val spark = pec.get[SparkSession]()

    //read  json from hdfs
    val conf = new Configuration()
    val fs = FileSystem.get(URI.create(jsonPath),conf)
    val stream: FSDataInputStream = fs.open(new Path(jsonPath))
    val bufferReader = new BufferedReader(new InputStreamReader(stream))
    var lineTxt = bufferReader.readLine()
    val buffer = new StringBuffer()
    while (lineTxt != null ){
      buffer.append(lineTxt.mkString)
      lineTxt=bufferReader.readLine()
    }

    // post
    val client = HttpClients.createDefault()
    val httpClient = new HttpClient()
    httpClient.getParams().setContentCharset("utf-8")

    val post = new HttpPost(url)
    post.addHeader("content-Type","application/json")
    post.setEntity(new StringEntity(buffer.toString))
    val response = client.execute(post)
    val entity = response.getEntity
    val str = EntityUtils.toString(entity,"UTF-8")
    println("Code is " + str)

  }


  override def setProperties(map: Map[String, Any]): Unit = {
    url = MapUtil.get(map,key="url").asInstanceOf[String]
    jsonPath = MapUtil.get(map,key="jsonPath").asInstanceOf[String]
  }

  override def getPropertyDescriptor(): List[PropertyDescriptor] = {
    var descriptor : List[PropertyDescriptor] = List()
    val url = new PropertyDescriptor()
      .name("url")
      .displayName("Url")
      .defaultValue("")
      .description("http request address")
      .required(true)
      .example("http://master:8002/flow/start")

    val jsonPath = new PropertyDescriptor()
      .name("jsonPath")
      .displayName("JsonPath")
      .defaultValue("")
      .description("json parameter path for post request")
      .required(true)
        .example("hdfs://master:9000/work/flow.json")

    descriptor = url :: descriptor
    descriptor = jsonPath :: descriptor
    descriptor
  }

  override def getIcon(): Array[Byte] = {
    ImageUtil.getImage("icon/http/PostUrl.png")
  }

  override def getGroup(): List[String] = {
    List(StopGroup.HttpGroup.toString)
  }

  override def initialize(ctx: ProcessContext): Unit = {

  }

} 
Example 59
Source File: Console.scala    From slide-desktop   with GNU General Public License v2.0 5 votes vote down vote up
package gui

import java.awt.{BorderLayout, Insets}
import java.io.{BufferedReader, InputStreamReader}
import javax.swing._

import connections.usb.Adb

class Console extends JFrame {
    private val consoleTextField: JTextArea = new JTextArea()

    {
        this.setTitle("Output")
        this.setBounds(100, 100, 400, 200)

        consoleTextField.setEditable(false)
        consoleTextField.setMargin(new Insets(10, 10, 10, 10))
        consoleTextField.setAlignmentX(0)
        this.getContentPane.add(consoleTextField, BorderLayout.CENTER)
        consoleTextField.setCaretPosition(0)

        val scroll: JScrollPane = new JScrollPane(consoleTextField,
            ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER)
        this.getContentPane.add(scroll)
    }

    def append(text: String): Unit = {
        if (consoleTextField.getText == "")
            consoleTextField.append(text)
        else
            consoleTextField.append("\n" + text)
    }

    def showConsole(): Unit = this.setVisible(true)

    def runProcess(pr: Process): Unit = {

        var consoleOut: String = null
        var stdInput: BufferedReader = null
        var stdError: BufferedReader = null
        if (pr != null) {
            stdInput = new BufferedReader(new InputStreamReader(pr.getInputStream))
            stdError = new BufferedReader(new InputStreamReader(pr.getErrorStream))
            while ( {
                consoleOut = stdInput.readLine()
                consoleOut != null
            }) {
                this.append(consoleOut)
            }

            var errorOut: String = null
            while ( {
                errorOut = stdError.readLine()
                errorOut != null
            }) {
                this.append(errorOut)
            }
        }

        showConsole()
    }

    def runAdbProcess(pr: Process): Unit = {
        var deviceAvailable: Boolean = false

        var consoleOut: String = null
        var stdInput: BufferedReader = null
        var stdError: BufferedReader = null
        if (Adb.isAdbFilePresent && pr != null) {
            stdInput = new BufferedReader(new InputStreamReader(pr.getInputStream))
            stdError = new BufferedReader(new InputStreamReader(pr.getErrorStream))

            while ( {
                consoleOut = stdInput.readLine()
                consoleOut != null
            }) {
                if (consoleOut.contains("	device")) {
                    deviceAvailable = true
                }
                this.append(consoleOut)
            }

            var errorOut: String = null
            while ( {
                errorOut = stdError.readLine()
                errorOut != null
            }) {
                this.append(errorOut)
            }
        } else {
            this.append("Error: ADB is not installed")
        }

        showConsole()
    }
} 
Example 60
Source File: SparkILoop.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.repl

import java.io.BufferedReader

import scala.Predef.{println => _, _}
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
import scala.tools.nsc.util.stringFromStream
import scala.util.Properties.{javaVersion, javaVmName, versionString}


  def run(code: String, sets: Settings = new Settings): String = {
    import java.io.{ BufferedReader, StringReader, OutputStreamWriter }

    stringFromStream { ostream =>
      Console.withOut(ostream) {
        val input = new BufferedReader(new StringReader(code))
        val output = new JPrintWriter(new OutputStreamWriter(ostream), true)
        val repl = new SparkILoop(input, output)

        if (sets.classpath.isDefault) {
          sets.classpath.value = sys.props("java.class.path")
        }
        repl process sets
      }
    }
  }
  def run(lines: List[String]): String = run(lines.map(_ + "\n").mkString)
} 
Example 61
Source File: CustomReceiver.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 62
Source File: ProxyServer.scala    From devbox   with Apache License 2.0 5 votes vote down vote up
package cmdproxy

import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.OutputStreamWriter
import java.io.PrintWriter
import java.net.InetAddress
import java.net.ServerSocket
import java.net.Socket

import scala.util.Using

import devbox.logger.FileLogger
import os.RelPath
import ujson.ParseException
import upickle.default.{macroRW, ReadWriter}

case class Request(workingDir: String, cmd: Seq[String])
object Request {
  implicit val rw: ReadWriter[Request] = macroRW
}


  val localDir: Map[os.RelPath, os.Path] = dirMapping.map(_.swap).toMap

  def start(): Unit = {
    logger.info(s"Starting command proxy server, listening at ${socket.getInetAddress}:${socket.getLocalPort}")
    (new Thread("Git Proxy Thread") {
      override def run(): Unit = {
        while (!socket.isClosed) {
          Using(socket.accept()) { handleConnection } recover {
            case e: Exception =>
              logger.error(s"Error handling request ${e.getMessage}")
            case e: java.net.SocketException if e.getMessage == "Socket closed" =>
              logger.error(s"Git proxy socket closed")
          }
        }
      }
    }).start()

  }

  def handleConnection(conn: Socket): Unit = try {
    logger.info(s"Accepting connection from ${conn.getInetAddress}")
    val in = new BufferedReader(new InputStreamReader(conn.getInputStream, ProxyServer.CHARSET_NAME))
    val out = new PrintWriter(new OutputStreamWriter(conn.getOutputStream, ProxyServer.CHARSET_NAME))

    upickle.default.read[Request](in.readLine()) match {
      case Request(dir, args) =>
        val workingDir = localDir
          .collect{case (remote, local) if RelPath(dir).startsWith(remote) =>
            local / RelPath(dir).relativeTo(remote)
          }
          .head

        // being cautious here and only execute "git" commands
        if (args.headOption.exists((_ == "git"))) {
          logger.info(s"Executing `${args.mkString(" ")}` in $workingDir")

          val proc = os.proc(args).call(
            workingDir,
            mergeErrIntoOut = true,
            stdout = os.ProcessOutput.Readlines(str =>
              out.println(upickle.default.write(Left[String, Int](str)))
            ),
            check = false,
            timeout = 10000
          )

          out.println(upickle.default.write(Right[String, Int](proc.exitCode)))
        } else {
          val msg = s"Not executing non-git commend: `${args.mkString(" ")}`."
          logger.info(msg)
          out.println(upickle.default.write(Right[String, Int](1)))
        }

        out.flush()
    }
  } catch {
    case e: ParseException => logger.error(s"Error parsing incoming json request: ${e.getMessage}")
  }
}

object ProxyServer {
  val DEFAULT_PORT = 20280
  val CHARSET_NAME = "UTF-8"
} 
Example 63
Source File: BuildInfo.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.buildinfo

import java.io.{BufferedReader, InputStreamReader}

import scala.collection.JavaConverters.asScalaIteratorConverter

object BuildInfo {
  val Version: String =
    Option(getClass.getClassLoader.getResourceAsStream("MVN_VERSION")).fold {
      "{component version not found on classpath}"
    } { is =>
      try {
        val reader = new BufferedReader(new InputStreamReader(is))
        reader.lines.iterator.asScala.mkString.trim
      } finally {
        is.close()
      }
    }
} 
Example 64
Source File: ImmutableMigrationsSpec.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.on.sql

import java.io.{BufferedReader, FileNotFoundException}
import java.math.BigInteger
import java.nio.charset.Charset
import java.security.MessageDigest
import java.util

import com.daml.ledger.on.sql.ImmutableMigrationsSpec._
import org.flywaydb.core.Flyway
import org.flywaydb.core.api.configuration.FluentConfiguration
import org.flywaydb.core.internal.resource.LoadableResource
import org.flywaydb.core.internal.scanner.{LocationScannerCache, ResourceNameCache, Scanner}
import org.scalatest.Matchers._
import org.scalatest.WordSpec

import scala.collection.JavaConverters._

class ImmutableMigrationsSpec extends WordSpec {
  "migration files" should {
    "never change, according to their accompanying digest file" in {
      val configuration = Flyway
        .configure()
        .locations(s"classpath:/$migrationsResourcePath")
      val resourceScanner = flywayScanner(configuration)
      val resources = resourceScanner.getResources("", ".sql").asScala.toSeq
      resources.size should be >= 3

      resources.foreach { resource =>
        val migrationFile = resource.getRelativePath
        val digestFile = migrationFile + ".sha256"
        val expectedDigest = readExpectedDigest(migrationFile, digestFile, resourceScanner)
        val currentDigest = computeCurrentDigest(resource, configuration.getEncoding)
        assert(
          currentDigest == expectedDigest,
          s"""The contents of the migration file "$migrationFile" have changed! Migrations are immutable; you must not change their contents or their digest.""",
        )
      }
    }
  }
}

object ImmutableMigrationsSpec {
  private val migrationsResourcePath = "com/daml/ledger/on/sql/migrations"
  private val hashMigrationsScriptPath = "ledger/ledger-on-sql/hash-migrations.sh"

  private def flywayScanner(configuration: FluentConfiguration) =
    new Scanner(
      classOf[Object],
      util.Arrays.asList(configuration.getLocations: _*),
      getClass.getClassLoader,
      configuration.getEncoding,
      new ResourceNameCache,
      new LocationScannerCache,
    )

  private def readExpectedDigest(
      sourceFile: String,
      digestFile: String,
      resourceScanner: Scanner[_],
  ): String = {
    val resource = Option(resourceScanner.getResource(digestFile))
      .getOrElse(throw new FileNotFoundException(
        s"""\"$digestFile\" is missing. If you are introducing a new Flyway migration step, you need to create an SHA-256 digest file by running $hashMigrationsScriptPath."""))
    new BufferedReader(resource.read()).readLine()
  }

  private def computeCurrentDigest(resource: LoadableResource, encoding: Charset): String = {
    val sha256 = MessageDigest.getInstance("SHA-256")
    new BufferedReader(resource.read())
      .lines()
      .forEach(line => sha256.update((line + "\n").getBytes(encoding)))
    val digest = sha256.digest()
    String.format(s"%0${digest.length * 2}x", new BigInteger(1, digest))
  }
} 
Example 65
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import org.jfree.chart._
import org.jfree.data.xy._
import scala.math._
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object AndrewsCurve {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def andrewsCurve(row: Array[Double]) = (t: Double) => {
    var result: Double = 0.0
    for ((attr, i) <- row.zipWithIndex) {
      if (i == 0) {
        result = result + row(i) / sqrt(2.0)
      } else if (i % 2 != 0) {
        result = result + row(i) * sin(((i + 1) / 2) * t)
      } else {
        result = result + row(i) * cos(((i + 1) / 2) * t)
      }
    }
    result
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val x: Array[Double] = Array.tabulate(100) {
      (i: Int) => -Pi + 2.0 * Pi * (i / 100.0)
    }
    val dataset = new DefaultXYDataset
    for (i <- 0 until data("sepal length").size) {
      val x1 = data("sepal length")(i).toDouble
      val x2 = data("sepal width")(i).toDouble
      val x3 = data("petal length")(i).toDouble
      val x4 = data("petal width")(i).toDouble
      val cls = data("class")(i)
      val curve = x.map(andrewsCurve(Array(x1, x2, x3, x4)))
      dataset.addSeries(cls + i, Array(x, curve))
    }
    val frame = new ChartFrame("Andrews Curve",
      ChartFactory.createXYLineChart("Andrews Curve", "x", "y",
      dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
      false, false, false))
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 66
Source File: PullExample.scala    From Hands-On-Data-Analysis-with-Scala   with MIT License 5 votes vote down vote up
package handson.example.extract

import java.io.{BufferedReader, InputStreamReader}
import java.util.function.Consumer

import scala.collection.mutable.ListBuffer


class DataConsumer extends Consumer[String] {
  val buf = ListBuffer[String]()
  override def accept(t: String): Unit = {
    buf += t
  }
}
object PullExample {
  def main(args: Array[String]): Unit = {
    val reader = new BufferedReader(
      new InputStreamReader(
        new java.net.URL("https://data.lacity.org/api/views/nxs9-385f/rows.csv?accessType=DOWNLOAD").openStream()
      )
    )
    val dataConsumer = new DataConsumer
    reader.lines().forEach(dataConsumer)
    dataConsumer.buf.toList.take(5).foreach(println)
  }

} 
Example 67
Source File: Adapter.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
package org.nlp4l.util

import java.io.{FileReader, BufferedReader, File}

import org.nlp4l.core.RawReader
import resource._

trait Adapter {
  def parseCommonOption(parsed: Map[Symbol, String], list: List[String]): Map[Symbol, String] = list match {
    case Nil => parsed
    case "-s" :: value :: tail => parseCommonOption(parsed + ('schema -> value), tail)
    case "-f" :: value :: tail => parseCommonOption(parsed + ('field -> value), tail)
    case "--type" :: value :: tail => parseCommonOption(parsed + ('type -> value), tail)
    case "--tfmode" :: value :: tail => parseCommonOption(parsed + ('tfmode -> value), tail)
    case "--smthterm" :: value :: tail => parseCommonOption(parsed + ('smthterm -> value), tail)
    case "--idfmode" :: value :: tail => parseCommonOption(parsed + ('idfmode -> value), tail)
    case "-o" :: value :: tail => parseCommonOption(parsed + ('outdir -> value), tail)
    case "--features" :: value :: tail => parseCommonOption(parsed + ('features -> value), tail)
    case "--outputSep" :: value :: tail => parseCommonOption(parsed + ('outputSep -> value), tail)
    case "--values" :: value :: tail => parseCommonOption(parsed + ('values -> value), tail)
    case "--valuesSep" :: value :: tail => parseCommonOption(parsed + ('valuesSep -> value), tail)
    case "--boosts" :: value :: tail => parseCommonOption(parsed + ('boosts -> value), tail)
    case value :: tail => parseCommonOption(parsed + ('index -> value), tail)
  }

  def fieldValues(reader: RawReader, docIds: List[Int], fields: Seq[String]): List[Map[String, Seq[String]]] = {
    docIds.map(id => reader.document(id) match {
      case Some(doc) => {
        fields.map(f => (f, doc.getValue(f).getOrElse(List.empty))).toMap
      }
      case _ => Map.empty[String, Seq[String]]
    })
  }

  def readFeatures(featureFile: String): Set[String] = {
    val file = new File(featureFile)
    // build word set from feature file
    val builder = Set.newBuilder[String]
    if (file.exists()) {
      for (input <- managed(new BufferedReader(new FileReader(file)))) {
        def read(): Unit = input.readLine() match {
          case null => ()
          case line => {
            builder += (line.trim)
            read()
          }
        }
        read()
      }
    }
    builder.result()
  }

  def readTermBoosts(boostFile: String): Map[String, Double] = {
    val file = new File(boostFile)
    // build term boosts map
    val builder = Map.newBuilder[String, Double]
    if (file.exists()) {
      for (input <- managed(new BufferedReader(new FileReader(file)))) {
        def read(): Unit = input.readLine() match {
          case null => ()
          case line => {
            val cols = line.split(",")
            builder += (cols(0).trim -> cols(1).trim.toDouble)
            read()
          }
        }
        read()
      }
    }
    builder.result()
  }
} 
Example 68
Source File: index_ceeaus.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = if(file.indexOf("smk") >= 0) "smk" else "ptj"   // smoking or part time job
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat),
    Field(if(ja) "body_ja" else "body_en", body)
  ))
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS").children()
// write English docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")<0 && e.name.indexOf("PLAIN")<0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, false)))
)
// write Japanese docs
c.toList.sorted.filter(e => e.name.indexOf("CJEJUS")>=0).foreach( f =>
  f.children().toList.sorted.filter( g => g.name.indexOf("(1)") < 0 && g.name.endsWith(".txt")).foreach(h => writer.write(document(h, true)))
)
writer.close

// search
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 69
Source File: index_ceeaus_all.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
import java.io.File
import java.io.FileInputStream
import java.io.InputStreamReader
import java.io.BufferedReader
import java.nio.file.FileSystems
import org.apache.lucene.index._
import org.apache.lucene.search.TermQuery
import org.nlp4l.core.analysis.Analyzer
import org.nlp4l.core.analysis.AnalyzerBuilder
import org.nlp4l.core._

import scalax.file.Path
import scalax.file.PathSet

val index = "/tmp/index-ceeaus-all"

def lines(fl: Path, encoding: String): List[String] = {
  val is = new FileInputStream(fl.path)
  val r = new InputStreamReader(is, encoding)
  val br = new BufferedReader(r)
  var result: List[String] = Nil

  try{
    var line = br.readLine()
    while(line != null){
      result = result :+ line
      line = br.readLine()
    }
    result
  }
  finally{
    br.close
    r.close
    is.close
  }
}

def document(fl: Path, ja: Boolean): Document = {
  val ps: Array[String] = fl.path.split(File.separator)
  // for Windows
  // val ps: Array[String] = file.path.split("\\\\")
  val file = ps(3)
  val typ = ps(2)
  val cat = "all"
  val encoding = if(ja) "sjis" else "UTF-8"
  val body = lines(fl, encoding)
  val body_set = if(ja) Set(Field("body_ja", body)) else Set(Field("body_en", body), Field("body_ws", body))
  Document(Set(
    Field("file", file), Field("type", typ), Field("cat", cat)) ++ body_set
  )
}

// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()

// write documents into an index
val schema = SchemaLoader.loadFile("examples/schema/ceeaus.conf")
val writer = IWriter(index, schema)

val c: PathSet[Path] = Path("corpora", "CEEAUS", "PLAIN").children()
// write English docs
c.filter(e => e.name.indexOf("cjejus")<0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, false)))
// write English docs
c.filter(e => e.name.indexOf("cjejus")>=0 && e.name.endsWith(".txt")).toList.sorted.foreach(g => writer.write(document(g, true)))
writer.close

// search test
val searcher = ISearcher(index)
val results = searcher.search(query=new TermQuery(new Term("body_ja", "喫煙")), rows=10)

results.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
})

// search test for ch4
val results2 = searcher.search(query=new TermQuery(new Term("body_ws", "still,")), rows=10)

results2.foreach(doc => {
  printf("[DocID] %d: %s\n", doc.docId, doc.get("file"))
}) 
Example 70
Source File: OpenMLTask.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.openml

import java.io.{BufferedReader, FileReader}
import org.openml.apiconnector.xml.Task
import OpenML._
import io.github.mandar2812.dynaml.dataformat.{ARFF, ArffFile}


case class OpenMLTask(t: Task) {

  def inputs(): Array[Task#Input] = t.getInputs

  def getDataSplitsAsStream: Stream[String] = {
    val estimation_procedure_index = inputs().map(_.getName).indexOf("estimation_procedure")

    val splits = inputs()(estimation_procedure_index)
      .getEstimation_procedure.getDataSplits(t.getTask_id)

    val arff = new ArffFile()
    arff.parse(new BufferedReader(new FileReader(splits)))
    ARFF.asStream(arff)
  }

  def getDataAsStream: Stream[String] = {
    val data_index = inputs().map(_.getName).indexOf("source_data")
    val data_id = inputs()(data_index).getData_set.getData_set_id
    val data = dataset(data_id)
    data.getFormat match {
      case "ARFF" =>  ARFF.asStream(loadDataIntoARFF(data_id))
    }
  }

} 
Example 71
Source File: PreprocessForestCover.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

object FileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new FileInputStream(file), encoding))
  }
}

object PreprocessForestCover {
  def apply(args: String = "") = {
    val iterator:BufferedReader = FileIterator(new java.io.File(args+"covtype.data"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"cover.csv")
    val writert = CSVWriter.open(args+"covertest.csv")

    println("Outputting train and test csv files ...")
    while(line != null) {

      val row = line.split(',')
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 2.0) 1.0 else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 72
Source File: PreprocessAdult.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.BufferedReader

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

class ExtendedString(s:String) {
  def isNumber: Boolean = s.matches("[+-]?\\d+.?\\d+")
}

// and this is the companion object that provides the implicit conversion
object ExtendedString {
  implicit def String2ExtendedString(s:String): ExtendedString = new ExtendedString(s)
}

object PreprocessAdult {
  def apply(args: String = "") = {
    val iterator:BufferedReader = FileIterator(new java.io.File(args+"adult.data"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"adult.csv")
    val writert = CSVWriter.open(args+"adulttest.csv")

    println("Outputting train and test csv files ...")
    while(line != null) {

      val row = line.split(',').map(_.trim)
      val label = if(row.last == ">50K") 1.0 else -1.0
      val procrow = Array(row(0), row(2), row(4), row(10), row(11), row(12), label.toString)

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 73
Source File: PreprocessSusy.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.examples

import java.io.{BufferedReader, FileInputStream, InputStreamReader}
import java.util.zip.GZIPInputStream

import com.github.tototoshi.csv.CSVWriter

import scala.util.Random

case class BufferedReaderIterator(reader: BufferedReader) extends Iterator[String] {
  override def hasNext() = reader.ready
  override def next() = reader.readLine()
}

object GzFileIterator {
  def apply(file: java.io.File, encoding: String): BufferedReader = {
    new BufferedReader(
      new InputStreamReader(
        new GZIPInputStream(
          new FileInputStream(file)), encoding))
  }
}

object PreprocessSusy {
  def apply(args: String = "") = {
    val iterator:BufferedReader = GzFileIterator(new java.io.File(args+"SUSY.csv.gz"),
      "US-ASCII")
    var line = iterator.readLine()
    val writer = CSVWriter.open(args+"susy.csv")
    val writert = CSVWriter.open(args+"susytest.csv")

    println("Outputting train and test csv files ...")
    while(line != null || line != "\n") {

      val row = line.split(',').reverse
      val procrow = Array.tabulate(row.length)((i) => {
        if(i == row.length-1) {
          val label = if(row(i).toDouble == 1.0) row(i).toDouble else -1.0
          label.toString
        } else {
          row(i)
        }
      })

      if(Random.nextDouble() <= 0.9)
      {
        writer.writeRow(procrow)
      } else {
        writert.writeRow(procrow)
      }
      line = iterator.readLine()
    }
    writer.close()
    writert.close()
    println("Done ...")
  }
} 
Example 74
Source File: Util.scala    From avrohugger   with Apache License 2.0 5 votes vote down vote up
import java.io.BufferedReader
import java.io.File
import java.io.FileReader
import java.io.IOException

object Util {
  
  def readFile(fileName: String, maxTries: Int = 3): String = {
    def readFile0(count: Int): String = {
      try { // if file is empty, try again, it should be there
        val contents: String = scala.io.Source.fromFile(fileName).mkString
        if (contents.isEmpty && (count < maxTries)) readFile0(count + 1)
        else contents
      } catch { // if file is not found, try again, it should be there
        case e: Throwable =>
          if (count < maxTries) readFile0(count + 1)
          else sys.error("File not found: " + fileName)
      }
    }
    readFile0(0)
  }
  
  
} 
Example 75
Source File: BlockingIoExample.scala    From netty-in-action-scala   with Apache License 2.0 5 votes vote down vote up
package nia.chapter1.scaladsl

import java.io.{ BufferedReader, IOException, InputStreamReader, PrintWriter }
import java.net.ServerSocket


  // #snip
  @throws[IOException]
  def serve(portNumber: Int): Unit = {
    //创建一个新的 ServerSocket,用以监听指定端口上的连接请求
    val serverSocket = new ServerSocket(portNumber)
    //对accept()方法的调用将被阻塞,直到一个连接建立
    val clientSocket = serverSocket.accept
    //这些流对象都派生于该套接字的流对象
    val in = new BufferedReader(new InputStreamReader(clientSocket.getInputStream))
    val out = new PrintWriter(clientSocket.getOutputStream, true)
    var request: String = in.readLine
    var response: String = null
    //处理循环开始
    while (request ne null) {
      if ("Done" != request) {
        //请求被传递给服务器的处理方法
        response = processRequest(request)
        //服务器的响应被发送给了客户端
        out.println(response)
        //继续执行处理循环
      }
      request = in.readLine
    }
    // #snip
  }
  private def processRequest(request: String): String = "Processed"
} 
Example 76
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import scala.collection.mutable.{MutableList, Map}
import scala.math._
import org.jfree.chart._
import org.jfree.data.xy._
import org.jfree.data.statistics._
import java.io.{FileReader, BufferedReader}
import java.awt.GridLayout
import javax.swing.JFrame
import javax.swing.JPanel

object ScatterPlotMatrix {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val frame = new JFrame("Scatter Plot Matrix")
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
    frame.setLayout(new GridLayout(4, 4))
    val attributes = List("sepal length", "sepal width", 
      "petal length", "petal width")
    val classes = List("Iris-setosa", "Iris-versicolor", "Iris-virginica")
    for ((a1, i) <- attributes.zipWithIndex) {
      for ((a2, j) <- attributes.zipWithIndex) {
        if (a1 == a2) {
          val dataset = new HistogramDataset();
          dataset.setType(HistogramType.RELATIVE_FREQUENCY);
          val xs = (for (x <- data(a1)) yield { x.toDouble }).toArray
          dataset.addSeries(a1, xs, 11);
          val chart = ChartFactory.createHistogram(null, a1, "frequency",
            dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
            false, false, false)
          frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200,
            false, true, true, true, true, true))
        } else {
          val dataset = new DefaultXYDataset
          for (cls <- classes) {
            val xs = (for ((x, index) <- data(a1).zipWithIndex
              if data("class")(index) == cls)
            yield { x.toDouble }).toArray
            val ys = (for ((y, index) <- data(a2).zipWithIndex
              if data("class")(index) == cls)
            yield { y.toDouble }).toArray
            dataset.addSeries(cls, Array(xs, ys))
          }
          val chart = ChartFactory.createScatterPlot(null, 
            a1, a2, dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, 
            false, false, false)
          frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200,
            false, true, true, true, true, true))
        }
      }
    }
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 77
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import org.jfree.chart._
import org.jfree.data.xy._
import scala.math._
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object ParallelCoordinates {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val dataset = new DefaultXYDataset
    for (i <- 0 until data("sepal length").size) {
      val x = Array(0.0, 1.0, 2.0, 3.0)
      val y1 = data("sepal length")(i).toDouble
      val y2 = data("sepal width")(i).toDouble
      val y3 = data("petal length")(i).toDouble
      val y4 = data("petal width")(i).toDouble
      val y = Array(y1, y2, y3, y4)
      val cls = data("class")(i)
      dataset.addSeries(cls + i, Array(x, y))
    }
    val frame = new ChartFrame("Parallel Coordinates",
      ChartFactory.createXYLineChart("Parallel Coordinates", "x", "y",
      dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
      false, false, false))
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 78
Source File: CSVReader.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object CSVReader {
 def main(args: Array[String]) {
   val file = new FileReader("iris.csv")
   val reader = new BufferedReader(file)
   try {
     val alldata = new MutableList[Array[String]]
     var line:String = null
     while ({line = reader.readLine(); line} != null) {
       if (line.length != 0) {
         val delimiter: String = ","
         var splitline: Array[String] = line.split(delimiter).map(_.trim)
         alldata += splitline
       }
     }
     val labels = MutableList("sepal length", "sepal width",
       "petal length", "petal width", "class")
     val labelled = labels.zipWithIndex.map {
       case (label, index) => label -> alldata.map(x => x(index))
     }
     val csvdata: Map[String, MutableList[String]] = Map()
     for (pair <- labelled) {
       csvdata += pair                                           
     }
   }
   finally {
     reader.close()
   }
 }
} 
Example 79
Source File: CustomReceiver.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
// scalastyle:off println
package org.apache.spark.examples.streaming

import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets

import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver


  private def receive() {
   var socket: Socket = null
   var userInput: String = null
   try {
     logInfo("Connecting to " + host + ":" + port)
     socket = new Socket(host, port)
     logInfo("Connected to " + host + ":" + port)
     val reader = new BufferedReader(
       new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
     userInput = reader.readLine()
     while(!isStopped && userInput != null) {
       store(userInput)
       userInput = reader.readLine()
     }
     reader.close()
     socket.close()
     logInfo("Stopped receiving")
     restart("Trying to connect again")
   } catch {
     case e: java.net.ConnectException =>
       restart("Error connecting to " + host + ":" + port, e)
     case t: Throwable =>
       restart("Error receiving data", t)
   }
  }
}
// scalastyle:on println 
Example 80
Source File: Build.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
import java.net.HttpURLConnection
import java.io.BufferedReader
import java.io.InputStreamReader

import sbt.IO
import sbt.File

object DevModeBuild {

  def waitForReloads(file: File, count: Int): Unit = {
    waitFor[Int](
      IO.readLines(file).count(_.nonEmpty),
      _ == count,
      actual => s"Expected $count reloads, but only got $actual"
    )
  }

  def waitFor[T](check: => T, assertion: T => Boolean, error: T => String): Unit = {
    var checks = 0
    var actual = check
    while (!assertion(actual) && checks < 10) {
      Thread.sleep(1000)
      actual = check
      checks += 1
    }
    if (!assertion(actual)) {
      throw new RuntimeException(error(actual))
    }
  }

} 
Example 81
Source File: Build.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
import java.net.HttpURLConnection
import java.io.BufferedReader
import java.io.InputStreamReader

import sbt.IO
import sbt.File

object DevModeBuild {

  def waitForReloads(file: File, count: Int): Unit = {
    waitFor[Int](
      IO.readLines(file).count(_.nonEmpty),
      _ == count,
      actual => s"Expected $count reloads, but only got $actual"
    )
  }

  def waitFor[T](check: => T, assertion: T => Boolean, error: T => String): Unit = {
    var checks = 0
    var actual = check
    while (!assertion(actual) && checks < 10) {
      Thread.sleep(1000)
      actual = check
      checks += 1
    }
    if (!assertion(actual)) {
      throw new RuntimeException(error(actual))
    }
  }

} 
Example 82
Source File: ExtractNodes.scala    From tensorframes   with Apache License 2.0 5 votes vote down vote up
package org.tensorframes.dsl

import java.io.{BufferedReader, InputStreamReader, File}
import java.nio.file.Files
import java.nio.charset.StandardCharsets
import org.tensorframes.Logging
import org.scalatest.Matchers

import scala.collection.JavaConverters._

object ExtractNodes extends Matchers with Logging {

  def executeCommand(py: String): Map[String, String] = {
    val content =
      s"""
         |from __future__ import print_function
         |import tensorflow as tf
         |
         |$py
         |g = tf.get_default_graph().as_graph_def()
         |for n in g.node:
         |    print(">>>>>", str(n.name), "<<<<<<")
         |    print(n)
       """.stripMargin
    val f = File.createTempFile("pythonTest", ".py")
    logTrace(s"Created temp file ${f.getAbsolutePath}")
    Files.write(f.toPath, content.getBytes(StandardCharsets.UTF_8))
    // Using the standard python installation in the PATH. It needs to have TensorFlow installed.
    val p = new ProcessBuilder("python", f.getAbsolutePath).start()
    val s = p.getInputStream
    val isr = new InputStreamReader(s)
    val br = new BufferedReader(isr)
    var res: String = ""
    var str: String = ""
    while(str != null) {
      str = br.readLine()
      if (str != null) {
        res = res + "\n" + str
      }
    }

    p.waitFor()
    assert(p.exitValue() === 0, (p.exitValue(),
      {
        println(content)
        s"===========\n$content\n==========="
      }))
    res.split(">>>>>").map(_.trim).filterNot(_.isEmpty).map { b =>
      val zs = b.split("\n")
      val node = zs.head.dropRight(7)
      val rest = zs.tail
      node -> rest.mkString("\n")
    } .toMap
  }

  def compareOutput(py: String, nodes: Operation*): Unit = {
    val g = TestUtilities.buildGraph(nodes.head, nodes.tail:_*)
    val m1 = g.getNodeList.asScala.map { n =>
      n.getName -> n.toString.trim
    } .toMap
    val pym = executeCommand(py)
    logTrace(s"m1 = '$m1'")
    logTrace(s"pym = '$pym'")
    assert((m1.keySet -- pym.keySet).isEmpty, {
      val diff = (m1.keySet -- pym.keySet).toSeq.sorted
      s"Found extra nodes in scala: $diff"
    })
    assert((pym.keySet -- m1.keySet).isEmpty, {
      val diff = (pym.keySet -- m1.keySet).toSeq.sorted
      s"Found extra nodes in python: $diff"
    })
    for (k <- m1.keySet) {
      assert(m1(k) === pym(k),
        s"scala=${m1(k)}\npython=${pym(k)}")
    }
  }
} 
Example 83
Source File: Util.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.util

import java.io.{BufferedReader, File, FileInputStream, InputStreamReader}
import java.net.{ServerSocket, URI}
import scala.concurrent.forkjoin.ThreadLocalRandom
import scala.sys.process.Process
import scala.util.{Failure, Success, Try}

import com.typesafe.config.{Config, ConfigFactory}

import org.apache.gearpump.cluster.AppJar
import org.apache.gearpump.jarstore.JarStoreClient
import org.apache.gearpump.transport.HostPort

object Util {
  val LOG = LogUtil.getLogger(getClass)
  private val defaultUri = new URI("file:///")
  private val appNamePattern = "^[a-zA-Z_][a-zA-Z0-9_]+$".r.pattern

  def validApplicationName(appName: String): Boolean = {
    appNamePattern.matcher(appName).matches()
  }

  def getCurrentClassPath: Array[String] = {
    val classpath = System.getProperty("java.class.path")
    val classpathList = classpath.split(File.pathSeparator)
    classpathList
  }

  def version: String = {
    val home = System.getProperty(Constants.GEARPUMP_HOME)
    val version = Try {
      val versionFile = new FileInputStream(new File(home, "VERSION"))
      val reader = new BufferedReader(new InputStreamReader(versionFile))
      val version = reader.readLine().replace("version:=", "")
      versionFile.close()
      version
    }
    version match {
      case Success(version) =>
        version
      case Failure(ex) =>
        LOG.error("failed to read VERSION file, " + ex.getMessage)
        "Unknown-Version"
    }
  }

  def startProcess(options: Array[String], classPath: Array[String], mainClass: String,
      arguments: Array[String]): RichProcess = {
    val java = System.getProperty("java.home") + "/bin/java"

    val command = List(java) ++ options ++
      List("-cp", classPath.mkString(File.pathSeparator), mainClass) ++ arguments
    LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")} " +
      s"\n ${options.mkString(" ")}")
    val logger = new ProcessLogRedirector()
    val process = Process(command).run(logger)
    new RichProcess(process, logger)
  }

  
  def resolveJvmSetting(conf: Config): AppJvmSettings = {

    import org.apache.gearpump.util.Constants._

    val appMasterVMArgs = Try(conf.getString(GEARPUMP_APPMASTER_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption
    val executorVMArgs = Try(conf.getString(GEARPUMP_EXECUTOR_ARGS).split("\\s+")
      .filter(_.nonEmpty)).toOption

    val appMasterClassPath = Try(
      conf.getString(GEARPUMP_APPMASTER_EXTRA_CLASSPATH)
        .split("[;:]").filter(_.nonEmpty)).toOption

    val executorClassPath = Try(
      conf.getString(GEARPUMP_EXECUTOR_EXTRA_CLASSPATH)
        .split(File.pathSeparator).filter(_.nonEmpty)).toOption

    AppJvmSettings(
      JvmSetting(appMasterVMArgs.getOrElse(Array.empty[String]),
        appMasterClassPath.getOrElse(Array.empty[String])),
      JvmSetting(executorVMArgs
        .getOrElse(Array.empty[String]), executorClassPath.getOrElse(Array.empty[String])))
  }

  def asSubDirOfGearpumpHome(dir: String): File = {
    new File(System.getProperty(Constants.GEARPUMP_HOME), dir)

  }
} 
Example 84
Source File: VaultHelper.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.helper

import java.io.{BufferedReader, InputStreamReader}

import akka.event.slf4j.SLF4JLogging
import org.apache.http.client.HttpClient
import org.apache.http.client.methods.{HttpPost, HttpUriRequest}
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.HttpClientBuilder

import scala.util.parsing.json.JSON

object VaultHelper extends SLF4JLogging {

  lazy val client: HttpClient = HttpClientBuilder.create().build()
  lazy val jsonTemplate: String = "{ \"token\" : \"_replace_\" }"

  def getTemporalToken(vaultHost: String, token: String): String = {
    val requestUrl = s"$vaultHost/v1/sys/wrapping/wrap"

    log.debug(s"Requesting temporal token: $requestUrl")

    val post = new HttpPost(requestUrl)

    post.addHeader("X-Vault-Token", token)
    post.addHeader("X-Vault-Wrap-TTL", "2000s")
    post.setEntity(new StringEntity(jsonTemplate.replace("_replace_", token)))

    getContentFromResponse(post, "wrap_info")("token").asInstanceOf[String]
  }

  private def getContentFromResponse(uriRequest: HttpUriRequest,
                                     parentField: String): Map[String, Any] = {
    val response = client.execute(uriRequest)
    val rd = new BufferedReader(new InputStreamReader(response.getEntity.getContent))
    val json = JSON.parseFull(
      Stream.continually(rd.readLine()).takeWhile(_ != null).mkString).get.asInstanceOf[Map[String, Any]]

    log.debug(s"getFrom Vault ${json.mkString("\n")}")
    if (response.getStatusLine.getStatusCode != 200) {
      val errors = json("errors").asInstanceOf[List[String]].mkString("\n")
      throw new RuntimeException(errors)
    } else json(parentField).asInstanceOf[Map[String, Any]]
  }
} 
Example 85
Source File: FileSystem.scala    From ohara   with Apache License 2.0 4 votes vote down vote up
package oharastream.ohara.client.filesystem

import java.io.{BufferedReader, BufferedWriter, IOException, InputStreamReader, OutputStreamWriter}
import java.nio.charset.{Charset, StandardCharsets}

import oharastream.ohara.client.filesystem.ftp.FtpFileSystem
import oharastream.ohara.client.filesystem.hdfs.HdfsFileSystem
import oharastream.ohara.client.filesystem.smb.SmbFileSystem
import oharastream.ohara.common.exception.FileSystemException

trait FileSystem extends oharastream.ohara.kafka.connector.storage.FileSystem {
  
  def readLines(path: String, encode: String = "UTF-8"): Array[String] = {
    val reader = new BufferedReader(new InputStreamReader(open(path), Charset.forName(encode)))
    try Iterator.continually(reader.readLine()).takeWhile(_ != null).toArray
    finally reader.close()
  }

  def wrap[T](f: () => T): T =
    try {
      f()
    } catch {
      case e: IOException           => throw new FileSystemException(e.getMessage, e)
      case e: IllegalStateException => throw new FileSystemException(e.getMessage, e)
    }
}

object FileSystem {
  def hdfsBuilder: HdfsFileSystem.Builder = HdfsFileSystem.builder
  def ftpBuilder: FtpFileSystem.Builder   = FtpFileSystem.builder
  def smbBuilder: SmbFileSystem.Builder   = SmbFileSystem.builder
}