java.util.NoSuchElementException Scala Examples

The following examples show how to use java.util.NoSuchElementException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: NextIteratorSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 2
Source File: ExecutionReportJsonProtocol.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.models.json.workflow

import java.util.NoSuchElementException

import spray.json._

import io.deepsense.commons.exception.FailureDescription
import io.deepsense.graph.Node
import io.deepsense.graph.nodestate.NodeStatus
import io.deepsense.models.json.graph.NodeStatusJsonProtocol
import io.deepsense.models.workflows._

trait ExecutionReportJsonProtocol
  extends NodeStatusJsonProtocol
  with EntitiesMapJsonProtocol{

  implicit val executionReportJsonFormat: RootJsonFormat[ExecutionReport] =
      new RootJsonFormat[ExecutionReport] {
    override def write(executionReport: ExecutionReport): JsValue = JsObject(
      "resultEntities" -> executionReport.resultEntities.toJson,
      "nodes" -> executionReport.nodesStatuses.toJson,
      "error" -> executionReport.error.toJson
    )

    override def read(json: JsValue): ExecutionReport = {
      val fieldGetter = getField(json.asJsObject.fields) _
      val resultEntities: EntitiesMap = fieldGetter("resultEntities").convertTo[EntitiesMap]
      val nodes: Map[Node.Id, NodeStatus] = fieldGetter("nodes").convertTo[Map[Node.Id, NodeStatus]]
      val error: Option[FailureDescription] =
        fieldGetter("error").convertTo[Option[FailureDescription]]
      ExecutionReport(nodes, resultEntities, error)
    }
  }

  private def getField(fields: Map[String, JsValue])(fieldName: String): JsValue = {
    try {
      fields(fieldName)
    } catch {
      case e: NoSuchElementException =>
        throw new DeserializationException(s"Could not find field: $fieldName", e)
    }
  }
}

object ExecutionReportJsonProtocol extends ExecutionReportJsonProtocol 
Example 3
Source File: Aggregator.scala    From cave   with MIT License 5 votes vote down vote up
package com.cave.metrics.data.evaluator

import java.util.NoSuchElementException

object Aggregator extends Enumeration {
  type Aggregator = Value

  def toInflux(aggregator: Aggregator) = {
    aggregator match {
      case `p99` => "percentile(value, 99)"
      case `p999` => "percentile(value, 99.9)"
      case `p95` => "percentile(value, 95)"
      case `p90` => "percentile(value, 90)"
      case x => x + "(value)"
    }
  }

  val count, min, max, mean, mode, median, sum, stddev, p99, p999, p95, p90 = Value

  def withNameOpt(name: String): Option[Value] = {
    try {
      Some(withName(name))
    } catch {
      case _: NoSuchElementException => None
    }
  }
} 
Example 4
Source File: ModifyOptionAtTest.scala    From quicklens   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.quicklens

import java.util.NoSuchElementException

import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class ModifyOptionAtTest extends AnyFlatSpec with Matchers {

  it should "modify a Option with case class item" in {
    modify(Option(1))(_.at).using(_ + 1) should be(Option(2))
  }

  it should "modify a Option in a case class hierarchy" in {
    case class Foo(a: Int)
    case class Bar(foo: Foo)
    case class BarOpt(maybeBar: Option[Bar])
    case class BazOpt(barOpt: BarOpt)
    modify(BazOpt(BarOpt(Some(Bar(Foo(4))))))(_.barOpt.maybeBar.at.foo.a).using(_ + 1) should be(
      BazOpt(BarOpt(Some(Bar(Foo(5)))))
    )
  }

  it should "crashes on missing key" in {
    an[NoSuchElementException] should be thrownBy modify(Option.empty[Int])(_.at).using(_ + 1)
  }
} 
Example 5
Source File: PipelineAction.scala    From marvin-engine-executor   with Apache License 2.0 5 votes vote down vote up
package org.marvin.executor.actions

import java.time.LocalDateTime
import java.util.NoSuchElementException

import akka.Done
import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import akka.pattern.ask
import akka.util.Timeout
import org.marvin.artifact.manager.ArtifactSaver
import org.marvin.artifact.manager.ArtifactSaver.SaveToRemote
import org.marvin.exception.MarvinEExecutorException
import org.marvin.executor.actions.PipelineAction.{PipelineExecute, PipelineExecutionStatus}
import org.marvin.executor.proxies.BatchActionProxy
import org.marvin.executor.proxies.EngineProxy.{ExecuteBatch, Reload}
import org.marvin.model._
import org.marvin.util.{JsonUtil, LocalCache}

import scala.collection.mutable.ListBuffer
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.Success

object PipelineAction {
  case class PipelineExecute(protocol:String, params:String)
  case class PipelineExecutionStatus(protocol:String)
}

class PipelineAction(metadata: EngineMetadata) extends Actor with ActorLogging{
  implicit val ec = context.dispatcher

  var artifactSaver: ActorRef = _
  var cache: LocalCache[BatchExecution] = _

  override def preStart() = {
    artifactSaver = context.actorOf(ArtifactSaver.build(metadata), name = "artifactSaver")
    cache = new LocalCache[BatchExecution](maximumSize = 10000L, defaultTTL = 30.days)
  }

  override def receive  = {
    case PipelineExecute(protocol, params) =>
      implicit val futureTimeout = Timeout(metadata.pipelineTimeout milliseconds)

      log.info(s"Starting to process pipeline process with. Protocol: [$protocol] and Params: [$params].")
      cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Working))

      try{
        for(actionName <- metadata.pipelineActions){
          val engineActionMetadata = metadata.actionsMap(actionName)
          val _actor: ActorRef = context.actorOf(Props(new BatchActionProxy(engineActionMetadata)), name = actionName.concat("Actor"))
          Await.result((_actor ? Reload(protocol)), futureTimeout.duration)
          Await.result((_actor ? ExecuteBatch(protocol, params)), futureTimeout.duration)
          context stop _actor

          val futures:ListBuffer[Future[Done]] = ListBuffer[Future[Done]]()

          for(artifactName <- engineActionMetadata.artifactsToPersist) {
            futures += (artifactSaver ? SaveToRemote(artifactName, protocol)).mapTo[Done]
          }

          if (!futures.isEmpty) Future.sequence(futures).onComplete{
            case Success(response) =>
              log.info(s"All artifacts from [$actionName] were saved with success!! [$response]")
          }
        }
      }catch {
        case e: Exception =>
          cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Failed))
          throw e
      }

      cache.save(protocol, new BatchExecution("pipeline", protocol, LocalDateTime.now, Finished))

    case PipelineExecutionStatus(protocol) =>
      log.info(s"Getting pipeline execution status to protocol $protocol.")

      try {
        sender ! JsonUtil.toJson(cache.load(protocol).get)

      }catch {
        case _: NoSuchElementException =>
          sender ! akka.actor.Status.Failure(new MarvinEExecutorException(s"Protocol $protocol not found!"))
      }

    case Done =>
      log.info("Work Done!")

    case _ =>
      log.warning(s"Not valid message !!")

  }
} 
Example 6
Source File: JsonSupport.scala    From akka-stream-json   with Apache License 2.0 5 votes vote down vote up
package de.knutwalker.akka.http

import de.knutwalker.akka.stream.JsonStreamParser

import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller }
import akka.http.scaladsl.util.FastFuture
import akka.stream.scaladsl.Sink
import akka.stream.stage.{ GraphStageLogic, GraphStageWithMaterializedValue, InHandler }
import akka.stream.{ AbruptStageTerminationException, Attributes, Inlet, SinkShape }

import jawn.Facade

import scala.concurrent.{ Future, Promise }
import java.util.NoSuchElementException

object JsonSupport extends JsonSupport {
  private def firstElementSink[J <: AnyRef]: Sink[J, Future[J]] =
    Sink.fromGraph(new FirstElementSinkStage[J])

  private final class FirstElementSinkStage[J <: AnyRef] extends GraphStageWithMaterializedValue[SinkShape[J], Future[J]] {
    private[this] val in: Inlet[J] = Inlet("firstElement.in")

    override val shape: SinkShape[J] = SinkShape.of(in)
    override protected def initialAttributes: Attributes = Attributes.name("firstElement")

    override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[J]) = {
      val p: Promise[J] = Promise()
      (new GraphStageLogic(shape) with InHandler {
        private[this] var element: J = null.asInstanceOf[J]

        override def preStart(): Unit = pull(in)

        def onPush(): Unit = {
          if (element eq null) {
            element = grab(in)
          }
          pull(in)
        }

        override def onUpstreamFinish(): Unit = {
          val el = element
          element = null.asInstanceOf[J]
          if (el ne null) {
            p.trySuccess(el)
          } else {
            p.tryFailure(new NoSuchElementException("No complete json entity consumed"))
          }
          completeStage()
        }

        override def onUpstreamFailure(ex: Throwable): Unit = {
          element = null.asInstanceOf[J]
          p.tryFailure(ex)
          failStage(ex)
        }

        override def postStop(): Unit = {
          if (!p.isCompleted) {
            p.failure(new AbruptStageTerminationException(this))
            ()
          }
        }

        setHandler(in, this)
      }, p.future)
    }

    override def toString: String = "FirstElementSinkStage"
  }
}

trait JsonSupport {

  implicit def jsonUnmarshaller[J <: AnyRef : Facade]: FromEntityUnmarshaller[J] =
    Unmarshaller.withMaterializer[HttpEntity, J](_ => implicit mat => {
      case HttpEntity.Strict(_, data) => FastFuture(JsonStreamParser.parse[J](data))
      case entity                     => entity.dataBytes.via(JsonStreamParser[J]).runWith(JsonSupport.firstElementSink[J])
    }).forContentTypes(`application/json`)
} 
Example 7
Source File: NextIteratorSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 8
Source File: RowIterator.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 9
Source File: MedianHeapSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util.collection

import java.util.NoSuchElementException

import org.apache.spark.SparkFunSuite

class MedianHeapSuite extends SparkFunSuite {

  test("If no numbers in MedianHeap, NoSuchElementException is thrown.") {
    val medianHeap = new MedianHeap()
    intercept[NoSuchElementException] {
      medianHeap.median
    }
  }

  test("Median should be correct when size of MedianHeap is even") {
    val array = Array(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
    val medianHeap = new MedianHeap()
    array.foreach(medianHeap.insert(_))
    assert(medianHeap.size() === 10)
    assert(medianHeap.median === 4.5)
  }

  test("Median should be correct when size of MedianHeap is odd") {
    val array = Array(0, 1, 2, 3, 4, 5, 6, 7, 8)
    val medianHeap = new MedianHeap()
    array.foreach(medianHeap.insert(_))
    assert(medianHeap.size() === 9)
    assert(medianHeap.median === 4)
  }

  test("Median should be correct though there are duplicated numbers inside.") {
    val array = Array(0, 0, 1, 1, 2, 3, 4)
    val medianHeap = new MedianHeap()
    array.foreach(medianHeap.insert(_))
    assert(medianHeap.size === 7)
    assert(medianHeap.median === 1)
  }

  test("Median should be correct when input data is skewed.") {
    val medianHeap = new MedianHeap()
    (0 until 10).foreach(_ => medianHeap.insert(5))
    assert(medianHeap.median === 5)
    (0 until 100).foreach(_ => medianHeap.insert(10))
    assert(medianHeap.median === 10)
    (0 until 1000).foreach(_ => medianHeap.insert(0))
    assert(medianHeap.median === 0)
  }
} 
Example 10
Source File: NextIteratorSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 11
Source File: RowIterator.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 12
Source File: NextIteratorSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {//一个迭代器
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {//两个迭代器
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {//空的迭代器
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {//关闭为一次为空的迭代器
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }
  //关闭被称为一次非空迭代
  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    //关闭不调用,直到检查有下一个元素
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 13
Source File: RowIterator.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 14
Source File: ExecutionReportJsonProtocol.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.models.json.workflow

import java.util.NoSuchElementException

import spray.json._

import ai.deepsense.commons.exception.FailureDescription
import ai.deepsense.graph.Node
import ai.deepsense.graph.nodestate.NodeStatus
import ai.deepsense.models.json.graph.NodeStatusJsonProtocol
import ai.deepsense.models.workflows._

trait ExecutionReportJsonProtocol
  extends NodeStatusJsonProtocol
  with EntitiesMapJsonProtocol{

  implicit val executionReportJsonFormat: RootJsonFormat[ExecutionReport] =
      new RootJsonFormat[ExecutionReport] {
    override def write(executionReport: ExecutionReport): JsValue = JsObject(
      "resultEntities" -> executionReport.resultEntities.toJson,
      "nodes" -> executionReport.nodesStatuses.toJson,
      "error" -> executionReport.error.toJson
    )

    override def read(json: JsValue): ExecutionReport = {
      val fieldGetter = getField(json.asJsObject.fields) _
      val resultEntities: EntitiesMap = fieldGetter("resultEntities").convertTo[EntitiesMap]
      val nodes: Map[Node.Id, NodeStatus] = fieldGetter("nodes").convertTo[Map[Node.Id, NodeStatus]]
      val error: Option[FailureDescription] =
        fieldGetter("error").convertTo[Option[FailureDescription]]
      ExecutionReport(nodes, resultEntities, error)
    }
  }

  private def getField(fields: Map[String, JsValue])(fieldName: String): JsValue = {
    try {
      fields(fieldName)
    } catch {
      case e: NoSuchElementException =>
        throw new DeserializationException(s"Could not find field: $fieldName", e)
    }
  }
}

object ExecutionReportJsonProtocol extends ExecutionReportJsonProtocol 
Example 15
Source File: RowIterator.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 16
Source File: NextIteratorSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 17
Source File: RowIterator.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 18
Source File: EthereumGethSpout.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.examples.blockchain.spouts

import java.net.InetAddress
import java.util.NoSuchElementException

import com.raphtory.core.components.Spout.SpoutTrait
import com.raphtory.core.utils.Utils
import com.raphtory.tests.EtherAPITest.baseRequest
import com.raphtory.tests.EtherAPITest.currentBlock
import com.raphtory.tests.EtherAPITest.request

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.duration.MILLISECONDS
import scala.concurrent.duration.NANOSECONDS
import scala.concurrent.duration.SECONDS
import scala.language.postfixOps
import scala.sys.process._
import scalaj.http.Http
import scalaj.http.HttpRequest
import spray.json._
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.stream.ActorMaterializer
import spray.json.DefaultJsonProtocol._

import scala.concurrent.duration._
import scala.concurrent.Await

case class EthResult(blockHash:Option[String],blockNumber:Option[String],from:Option[String],gas:Option[String],gasPrice:Option[String],hash:Option[String],input:Option[String],nonce:Option[String],r:Option[String],s:Option[String],to:Option[String],transactionIndex:Option[String],v:Option[String],value:Option[String])
case class EthTransaction(id:Option[String],jsonrpc:Option[String],result:EthResult)
class EthereumGethSpout extends SpoutTrait {
  var currentBlock = System.getenv().getOrDefault("SPOUT_ETHEREUM_START_BLOCK_INDEX", "9014194").trim.toInt
  var highestBlock = System.getenv().getOrDefault("SPOUT_ETHEREUM_MAXIMUM_BLOCK_INDEX", "10026447").trim.toInt
  val nodeIP       = System.getenv().getOrDefault("SPOUT_ETHEREUM_IP_ADDRESS", "127.0.0.1").trim
  val nodePort     = System.getenv().getOrDefault("SPOUT_ETHEREUM_PORT", "8545").trim
  val baseRequest  = requestBuilder()

  implicit val materializer = ActorMaterializer()
  implicit val EthFormat = jsonFormat14(EthResult)
  implicit val EthTransactionFormat = jsonFormat3(EthTransaction)
  if (nodeIP.matches(Utils.IPRegex))
    println(s"Connecting to Ethereum RPC \n Address:$nodeIP \n Port:$nodePort")
  else
    println(s"Connecting to Ethereum RPC \n Address:${hostname2Ip(nodeIP)} \n Port:$nodePort")

  override protected def ProcessSpoutTask(message: Any): Unit = message match {
    case StartSpout  => pullNextBlock()
    case "nextBlock" => pullNextBlock()
  }

  def pullNextBlock(): Unit = {
    if (currentBlock > highestBlock)
      return
    try {
      log.debug(s"Trying block $currentBlock")
      val transactionCountHex = executeRequest("eth_getBlockTransactionCountByNumber", "\"0x" + currentBlock.toHexString + "\"")
      val transactionCount = Integer.parseInt(transactionCountHex.fields("result").toString().drop(3).dropRight(1), 16)
      if(transactionCount>0){
        var transactions = "["
        for (i <- 0 until transactionCount)
          transactions = transactions + batchRequestBuilder("eth_getTransactionByBlockNumberAndIndex",s""""0x${currentBlock.toHexString}","0x${i.toHexString}"""")+","
        val trasnactionBlock = executeBatchRequest(transactions.dropRight(1)+"]")
        val transList = trasnactionBlock.parseJson.convertTo[List[EthTransaction]]
        transList.foreach(t => { //try needed to ignore contracts //todo include them
          try{sendTuple(s"${t.result.blockNumber.get},${t.result.from.get},${t.result.to.get},${t.result.value.get}")}
          catch {case e:NoSuchElementException =>}

        })

      }
      currentBlock += 1
      AllocateSpoutTask(Duration(1, NANOSECONDS), "nextBlock")
    } catch {
      case e: NumberFormatException => AllocateSpoutTask(Duration(1, SECONDS), "nextBlock")
      case e: Exception             => e.printStackTrace(); AllocateSpoutTask(Duration(1, SECONDS), "nextBlock")
    }
  }


  def batchRequestBuilder(command:String,params:String):String = s"""{"jsonrpc": "2.0", "id":"100", "method": "$command", "params": [$params]}"""
  def executeBatchRequest(data: String) = requestBatch(data).execute().body.toString
  def requestBatch(data: String): HttpRequest = baseRequest.postData(data)
  def requestBuilder() =
    if (nodeIP.matches(Utils.IPRegex))
      Http("http://" + nodeIP + ":" + nodePort).header("content-type", "application/json")
    else
      Http("http://" + hostname2Ip(nodeIP) + ":" + nodePort).header("content-type", "application/json")
  def request(command: String, params: String = ""): HttpRequest =
    baseRequest.postData(s"""{"jsonrpc": "2.0", "id":"100", "method": "$command", "params": [$params]}""")
  def executeRequest(command: String, params: String = "") =
    request(command, params).execute().body.toString.parseJson.asJsObject

  def hostname2Ip(hostname: String): String = InetAddress.getByName(hostname).getHostAddress()

} 
Example 19
Source File: PrimitiveVector.scala    From SparseML   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.sparselr.Utils

import java.util.NoSuchElementException
import scala.reflect.ClassTag


  def resize(newLength: Int): PrimitiveVector[V] = {
    val newArray = new Array[V](newLength)
    _array.copyToArray(newArray)
    _array = newArray
    if (newLength < _numElements) {
      _numElements = newLength
    }
    this
  }
} 
Example 20
Source File: NextIteratorSuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.FunSuite
import org.scalatest.Matchers

class NextIteratorSuite extends FunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext() = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 21
Source File: NextIteratorSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 22
Source File: RowIterator.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 23
Source File: BufferedIterator.scala    From splash   with Apache License 2.0 5 votes vote down vote up
package com.memverge.splash

import java.util.NoSuchElementException

import scala.reflect.ClassTag


case class BufferedIterator[T](
    iterator: Iterator[T],
    bufferSize: Int = 512)(implicit m: ClassTag[T]) extends Iterator[T] {
  private var writeIndex: Int = 0
  private val buffer = new Array[T](bufferSize)
  private var readIndex: Int = 0

  fill()

  override def next(): T = {
    if (readIndex == writeIndex && writeIndex < bufferSize) {
      throw new NoSuchElementException("end of iterator")
    }
    val obj = buffer(readIndex)
    readIndex += 1
    if (readIndex == writeIndex) {
      if (writeIndex == bufferSize) {
        readIndex = 0
        writeIndex = 0
        fill()
      }
    }
    obj
  }

  override def hasNext: Boolean = readIndex < writeIndex

  def fill(): Unit = {
    while (iterator.hasNext && writeIndex < bufferSize) {
      val obj = iterator.next()
      if (obj != null) {
        buffer(writeIndex) = obj
        writeIndex += 1
      }
    }
  }

  def bufferLastOpt(): Option[T] = {
    if (writeIndex == 0) {
      None
    } else {
      Some(buffer(writeIndex - 1))
    }
  }
} 
Example 24
Source File: VectorIndexerModel.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.core.feature

import java.util.NoSuchElementException

import ml.combust.mleap.core.Model
import ml.combust.mleap.core.annotation.SparkCode
import ml.combust.mleap.core.types.{StructType, TensorType}
import org.apache.spark.ml.linalg.{DenseVector, SparseVector, Vector}


@SparkCode(uri = "https://github.com/apache/spark/blob/v2.4.5/mllib/src/main/scala/org/apache/spark/ml/feature/VectorIndexer.scala")
case class VectorIndexerModel(numFeatures: Int,
                              categoryMaps: Map[Int, Map[Double, Int]],
                              handleInvalid: HandleInvalid = HandleInvalid.Error) extends Model {
  val sortedCatFeatureIndices = categoryMaps.keys.toArray.sorted
  val localVectorMap = categoryMaps
  val localNumFeatures = numFeatures
  val localHandleInvalid = handleInvalid

  def apply(features: Vector): Vector = predict(features)
  def predict(features: Vector): Vector = {
    assert(features.size == localNumFeatures, "VectorIndexerModel expected vector of length" +
      s" $numFeatures but found length ${features.size}")
    features match {
      case dv: DenseVector =>
        var hasInvalid = false
        val tmpv = dv.copy
        localVectorMap.foreach { case (featureIndex: Int, categoryMap: Map[Double, Int]) =>
          try {
            tmpv.values(featureIndex) = categoryMap(tmpv(featureIndex))
          } catch {
            case _: NoSuchElementException =>
              localHandleInvalid match {
                case HandleInvalid.Error =>
                  throw new IllegalArgumentException(s"VectorIndexer encountered invalid value " +
                    s"${tmpv(featureIndex)} on feature index $featureIndex. To handle " +
                    s"or skip invalid value, try setting VectorIndexer.handleInvalid.")
                case HandleInvalid.Keep =>
                  tmpv.values(featureIndex) = categoryMap.size
                case HandleInvalid.Skip =>
                  hasInvalid = true
              }
          }
        }
        if (hasInvalid) null else tmpv
      case sv: SparseVector =>
        // We use the fact that categorical value 0 is always mapped to index 0.
        var hasInvalid = false
        val tmpv = sv.copy
        var catFeatureIdx = 0 // index into sortedCatFeatureIndices
        var k = 0 // index into non-zero elements of sparse vector
        while (catFeatureIdx < sortedCatFeatureIndices.length && k < tmpv.indices.length) {
          val featureIndex = sortedCatFeatureIndices(catFeatureIdx)
          if (featureIndex < tmpv.indices(k)) {
            catFeatureIdx += 1
          } else if (featureIndex > tmpv.indices(k)) {
            k += 1
          } else {
            try {
              tmpv.values(k) = localVectorMap(featureIndex)(tmpv.values(k))
            } catch {
              case _: NoSuchElementException =>
                localHandleInvalid match {
                  case HandleInvalid.Error =>
                    throw new IllegalArgumentException(s"VectorIndexer encountered invalid value " +
                      s"${tmpv.values(k)} on feature index $featureIndex. To handle " +
                      s"or skip invalid value, try setting VectorIndexer.handleInvalid.")
                  case HandleInvalid.Keep =>
                    tmpv.values(k) = localVectorMap(featureIndex).size
                  case HandleInvalid.Skip =>
                    hasInvalid = true
                }
            }
            catFeatureIdx += 1
            k += 1
          }
        }
        if (hasInvalid) null else tmpv
    }
  }

  override def inputSchema: StructType = StructType("input" -> TensorType.Double(localNumFeatures)).get

  override def outputSchema: StructType = StructType("output" -> TensorType.Double(localNumFeatures)).get

} 
Example 25
Source File: RowIterator.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.util.NoSuchElementException

import org.apache.spark.sql.catalyst.InternalRow


  def toScala: Iterator[InternalRow] = new RowIteratorToScala(this)
}

object RowIterator {
  def fromScala(scalaIter: Iterator[InternalRow]): RowIterator = {
    scalaIter match {
      case wrappedRowIter: RowIteratorToScala => wrappedRowIter.rowIter
      case _ => new RowIteratorFromScala(scalaIter)
    }
  }
}

private final class RowIteratorToScala(val rowIter: RowIterator) extends Iterator[InternalRow] {
  private [this] var hasNextWasCalled: Boolean = false
  private [this] var _hasNext: Boolean = false
  override def hasNext: Boolean = {
    // Idempotency:
    if (!hasNextWasCalled) {
      _hasNext = rowIter.advanceNext()
      hasNextWasCalled = true
    }
    _hasNext
  }
  override def next(): InternalRow = {
    if (!hasNext) throw new NoSuchElementException
    hasNextWasCalled = false
    rowIter.getRow
  }
}

private final class RowIteratorFromScala(scalaIter: Iterator[InternalRow]) extends RowIterator {
  private[this] var _next: InternalRow = null
  override def advanceNext(): Boolean = {
    if (scalaIter.hasNext) {
      _next = scalaIter.next()
      true
    } else {
      _next = null
      false
    }
  }
  override def getRow: InternalRow = _next
  override def toScala: Iterator[InternalRow] = scalaIter
} 
Example 26
Source File: TestCaseSupport.scala    From parquet4s   with MIT License 5 votes vote down vote up
package com.github.mjakubowski84.parquet4s

import java.util.NoSuchElementException

import com.github.mjakubowski84.parquet4s.CompatibilityParty.CompatibilityParty
import com.github.mjakubowski84.parquet4s.ParquetWriter.ParquetWriterFactory

import scala.reflect.runtime.universe.TypeTag

object CompatibilityParty {
  sealed trait CompatibilityParty
  case object Spark extends CompatibilityParty
  case object Reader extends CompatibilityParty
  case object Writer extends CompatibilityParty

  val All: Set[CompatibilityParty] = Set(Spark, Reader, Writer)
}

object Case {

  type CaseDef = Case[_ <: Product]

  def apply[T <: Product : TypeTag : ParquetReader : ParquetWriterFactory](
                                                                     description: String,
                                                                     data: Seq[T],
                                                                     compatibilityParties: Set[CompatibilityParty] = CompatibilityParty.All
                                                                   ): Case[T] =
    new Case(
      description = description,
      compatibilityParties = compatibilityParties,
      _data = data,
      _reader = implicitly[ParquetReader[T]],
      _writerFactory = implicitly[ParquetWriterFactory[T]],
      _typeTag = implicitly[TypeTag[T]]
    )
}


class Case[T <: Product](
                          val description: String,
                          val compatibilityParties: Set[CompatibilityParty],
                          _data: Seq[T],
                          _reader: ParquetReader[T],
                          _writerFactory: ParquetWriterFactory[T],
                          _typeTag: TypeTag[T]
                        ) {
  type DataType = T
  def data: Seq[DataType] = _data
  def reader: ParquetReader[DataType] = _reader
  def writerFactory: ParquetWriterFactory[DataType] = _writerFactory
  def typeTag: TypeTag[DataType] = _typeTag
}

trait TestCaseSupport {

  def caseDefinitions: Seq[Case.CaseDef]

  def cases(compatibilityParties: Set[CompatibilityParty] = CompatibilityParty.All): Seq[Case.CaseDef] =
    caseDefinitions.filter { caseDefinition =>
      compatibilityParties.forall(caseDefinition.compatibilityParties.contains)
    }

  def cases(compatibilityParty: CompatibilityParty*): Seq[Case.CaseDef] = cases(compatibilityParty.toSet)

  def only[T: TypeTag]: Case.CaseDef = {
    val targetTpe = implicitly[TypeTag[T]].tpe
    caseDefinitions
      .find(_.typeTag.tpe =:= targetTpe)
      .getOrElse(throw new NoSuchElementException(s"Case $targetTpe is not defined"))
  }

} 
Example 27
Source File: NextIteratorSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
}