org.scalatest.BeforeAndAfterEach Scala Examples

The following examples show how to use org.scalatest.BeforeAndAfterEach. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: WireMockHelper.scala    From pertax-frontend   with Apache License 2.0 6 votes vote down vote up
package util

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait WireMockHelper extends BeforeAndAfterAll with BeforeAndAfterEach {
  this: Suite =>

  protected val server: WireMockServer = new WireMockServer(wireMockConfig().dynamicPort())

  override def beforeAll(): Unit = {
    server.start()
    super.beforeAll()
  }

  override def beforeEach(): Unit = {
    server.resetAll()
    super.beforeEach()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    server.stop()
  }
} 
Example 2
Source File: KafkaSpec.scala    From kmq   with Apache License 2.0 6 votes vote down vote up
package com.softwaremill.kmq.redelivery.infrastructure

import net.manub.embeddedkafka.{EmbeddedKafka, EmbeddedKafkaConfig}
import org.apache.kafka.common.serialization.StringDeserializer
import org.scalatest.{BeforeAndAfterEach, Suite}

trait KafkaSpec extends BeforeAndAfterEach { self: Suite =>

  val testKafkaConfig = EmbeddedKafkaConfig(9092, 2182)
  private implicit val stringDeserializer = new StringDeserializer()

  def sendToKafka(topic: String, message: String): Unit = {
    EmbeddedKafka.publishStringMessageToKafka(topic, message)(testKafkaConfig)
  }

  def consumeFromKafka(topic: String): String = {
    EmbeddedKafka.consumeFirstStringMessageFrom(topic)(testKafkaConfig)
  }

  override def beforeEach(): Unit = {
    super.beforeEach()
    EmbeddedKafka.start()(testKafkaConfig)
  }

  override def afterEach(): Unit = {
    super.afterEach()
    EmbeddedKafka.stop()
  }
} 
Example 3
Source File: SqlUnitTest.scala    From SparkUnitTestingExamples   with Apache License 2.0 6 votes vote down vote up
package com.cloudera.sa.spark.unittest.sql

import org.apache.spark.sql.Row
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import scala.collection.mutable

class SqlUnitTest extends FunSuite with
BeforeAndAfterEach with BeforeAndAfterAll{

  @transient var sc: SparkContext = null
  @transient var hiveContext: HiveContext = null

  override def beforeAll(): Unit = {

    val envMap = Map[String,String](("Xmx", "512m"))

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    sparkConfig.set("spark.io.compression.codec", "lzf")
    sc = new SparkContext("local[2]", "unit test", sparkConfig)
    hiveContext = new HiveContext(sc)
  }

  override def afterAll(): Unit = {
    sc.stop()
  }

  test("Test table creation and summing of counts") {
    val personRDD = sc.parallelize(Seq(Row("ted", 42, "blue"),
      Row("tj", 11, "green"),
      Row("andrew", 9, "green")))

    hiveContext.sql("create table person (name string, age int, color string)")

    val emptyDataFrame = hiveContext.sql("select * from person limit 0")

    val personDataFrame = hiveContext.createDataFrame(personRDD, emptyDataFrame.schema)
    personDataFrame.registerTempTable("tempPerson")

    val ageSumDataFrame = hiveContext.sql("select sum(age) from tempPerson")

    val localAgeSum = ageSumDataFrame.take(10)

    assert(localAgeSum(0).get(0) == 62, "The sum of age should equal 62 but it equaled " + localAgeSum(0).get(0))
  }
} 
Example 4
Source File: TrackerImplTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.apiserver.services.tracking

import akka.NotUsed
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.{Keep, Source, SourceQueueWithComplete}
import akka.stream.testkit.TestSubscriber
import akka.stream.testkit.scaladsl.TestSink
import com.daml.ledger.api.testing.utils.{
  AkkaBeforeAndAfterAll,
  IsStatusException,
  TestingException
}
import com.daml.ledger.api.v1.command_service.SubmitAndWaitRequest
import com.daml.ledger.api.v1.commands.Commands
import com.daml.ledger.api.v1.completion.Completion
import com.daml.dec.DirectExecutionContext
import com.google.rpc.status.{Status => RpcStatus}
import io.grpc.Status
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterEach, Matchers, Succeeded, WordSpec}

import scala.concurrent.ExecutionContext.Implicits.global

class TrackerImplTest
    extends WordSpec
    with Matchers
    with BeforeAndAfterEach
    with ScalaFutures
    with AkkaBeforeAndAfterAll {

  private var sut: Tracker = _
  private var consumer: TestSubscriber.Probe[NotUsed] = _
  private var queue: SourceQueueWithComplete[TrackerImpl.QueueInput] = _

  private def input(cid: Int) = SubmitAndWaitRequest(Some(Commands(commandId = cid.toString)))

  override protected def beforeEach(): Unit = {
    val (q, sink) = Source
      .queue[TrackerImpl.QueueInput](1, OverflowStrategy.dropNew)
      .map { in =>
        in.context.success(Completion(in.value.getCommands.commandId, Some(RpcStatus())))
        NotUsed
      }
      .toMat(TestSink.probe[NotUsed])(Keep.both)
      .run()
    queue = q
    sut = new TrackerImpl(q)
    consumer = sink
  }

  override protected def afterEach(): Unit = {
    consumer.cancel()
    queue.complete()
  }

  "Tracker Implementation" when {

    "input is submitted, and the queue is available" should {

      "work successfully" in {

        val resultF1 = sut.track(input(1))
        consumer.requestNext()
        val resultF = resultF1.flatMap(_ => sut.track(input(2)))(DirectExecutionContext)
        consumer.requestNext()
        whenReady(resultF)(_ => Succeeded)
      }
    }

    "input is submitted, and the queue is backpressuring" should {

      "return a RESOURCE_EXHAUSTED error" in {

        sut.track(input(1))
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.RESOURCE_EXHAUSTED))
      }
    }

    "input is submitted, and the queue has been completed" should {

      "return an ABORTED error" in {

        queue.complete()
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED))
      }
    }

    "input is submitted, and the queue has failed" should {

      "return an ABORTED error" in {

        queue.fail(TestingException("The queue fails with this error."))
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED))
      }
    }
  }
} 
Example 5
Source File: SuiteResourceManagement.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.testing.utils

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait SuiteResource[+T] {
  protected def suiteResource: Resource[T]
}


trait SuiteResourceManagement {}

trait SuiteResourceManagementAroundAll
    extends SuiteResource[Any]
    with SuiteResourceManagement
    with BeforeAndAfterAll {
  self: Suite =>

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    suiteResource.setup()
  }

  override protected def afterAll(): Unit = {
    suiteResource.close()
    super.afterAll()
  }
}

trait SuiteResourceManagementAroundEach
    extends SuiteResource[Any]
    with SuiteResourceManagement
    with BeforeAndAfterEach {
  self: Suite =>

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    suiteResource.setup()
  }

  override protected def afterEach(): Unit = {
    suiteResource.close()
    super.afterEach()
  }
} 
Example 6
Source File: DerivedResourceTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.testing.utils

import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec}

class DerivedResourceTest extends WordSpec with BeforeAndAfterEach with Matchers {

  private var base: Resource[Int] = _
  private var derived: Resource[(Int, Long)] = _

  private var baseTornDown = false
  private var derivedTornDown = false

  override def beforeEach(): Unit = {
    base = new ManagedResource[Int] {
      override protected def construct(): Int = 1

      override protected def destruct(resource: Int): Unit = baseTornDown = true
    }

    derived = new DerivedResource[Int, Long](base) {
      override protected def construct(source: Int): Long = source.toLong

      override protected def destruct(target: Long): Unit = derivedTornDown = true
    }

    baseTornDown = false
  }

  "Derived and base resources" when {

    "uninitialized" should {

      "throw error when base is accessed" in {
        assertThrows[IllegalStateException](base.value)
      }

      "throw error when derived is accessed" in {
        assertThrows[IllegalStateException](derived.value)
      }
    }

    "initialized" should {

      "allow access to base" in {

        derived.setup()
        base.value shouldEqual 1
      }

      "allow access to derived" in {

        derived.setup()
        derived.value shouldEqual (1 -> 1L)
      }
    }

    "torn down" should {

      "execute destructor method in base" in {
        derived.setup()
        derived.close()
        baseTornDown shouldEqual true
      }

      "execute destructor method in derived" in {
        derived.setup()
        derived.close()
        derivedTornDown shouldEqual true
      }

      "throw error when base is accessed" in {
        derived.setup()
        derived.close()
        assertThrows[IllegalStateException](base.value)
      }

      "throw error when derived is accessed" in {
        derived.setup()
        derived.close()
        assertThrows[IllegalStateException](derived.value)
      }
    }
  }
} 
Example 7
Source File: ServerSubscriberStressTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.grpc.adapter.server.rs

import com.daml.grpc.adapter.TestExecutionSequencerFactory
import org.reactivestreams.tck.flow.support.HelperPublisher
import org.scalatest.concurrent.AsyncTimeLimitedTests
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._
import org.scalatest.{Assertion, AsyncWordSpec, BeforeAndAfterEach, Matchers}

import scala.concurrent.ExecutionContext.global
import scala.concurrent.Future

class ServerSubscriberStressTest
    extends AsyncWordSpec
    with BeforeAndAfterEach
    with Matchers
    with AsyncTimeLimitedTests {

  private val elemCount = 10000
  private val testRunCount = 50
  private val expectedElemRange = 0.until(elemCount)

  var serverCallStreamObserver: MockServerCallStreamObserver[Int] = _
  var sut: ServerSubscriber[Int] = _
  var helperPublisher: HelperPublisher[Int] = _

  override protected def beforeEach(): Unit = {
    serverCallStreamObserver = new MockServerCallStreamObserver[Int]
    val executor = TestExecutionSequencerFactory.instance.getExecutionSequencer
    sut = new ServerSubscriber[Int](serverCallStreamObserver, executor)
    helperPublisher = new HelperPublisher[Int](0, elemCount, i => i, global)
  }

  "ServerSubscriber" should {

    for (i <- 1.to(testRunCount)) {

      s"work with $elemCount elements when they are requested one by one (test run #$i)" in {
        helperPublisher.subscribe(sut)
        expectedElemRange.foreach(_ => serverCallStreamObserver.demandResponse())
        verifyExpectedElementsArrivedInOrder()
      }

    }
    for (i <- 1.to(testRunCount)) {

      s"work with $elemCount elements when they are requested in bulk (isReady stays true) (test run #$i)" in {
        helperPublisher.subscribe(sut)
        serverCallStreamObserver.demandResponse(elemCount)
        verifyExpectedElementsArrivedInOrder()
      }
    }
  }

  private def verifyExpectedElementsArrivedInOrder(): Future[Assertion] = {
    serverCallStreamObserver.elementsWhenCompleted.map { receivedElements =>
      receivedElements should contain theSameElementsInOrderAs expectedElemRange
    }
  }

  override def timeLimit: Span = 10.seconds
} 
Example 8
Source File: PostgresAroundEach.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.testing.postgresql

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait PostgresAroundEach
    extends PostgresAroundSuite
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  self: Suite =>

  override protected def beforeAll(): Unit = {
    // We start PostgreSQL before calling `super` because _generally_ the database needs to be up
    // before everything else.
    connectToPostgresqlServer()
    super.beforeAll()
  }

  override protected def afterAll(): Unit = {
    super.afterAll()
    disconnectFromPostgresqlServer()
  }

  override protected def beforeEach(): Unit = {
    // We create the database before calling `super` for the same reasons as above.
    createNewDatabase()
    super.beforeEach()
  }

  override protected def afterEach(): Unit = {
    super.afterEach()
    dropDatabase()
  }
} 
Example 9
Source File: ActorSystemMetricsSpec.scala    From prometheus-akka   with Apache License 2.0 5 votes vote down vote up
package com.workday.prometheus.akka

import scala.collection.JavaConverters._
import scala.concurrent.duration._

import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually

import com.workday.prometheus.akka.ActorSystemMetrics._

import akka.actor._
import io.prometheus.client.Collector

class ActorSystemMetricsSpec extends TestKitBaseSpec("ActorSystemMetricsSpec") with BeforeAndAfterEach with Eventually {

  override def beforeEach(): Unit = {
    super.beforeEach()
    clearSystemMetrics
  }

  "the actor system metrics" should {
    "count actors" in {
      val trackedActor = system.actorOf(Props[ActorMetricsTestActor])
      eventually(timeout(5 seconds)) {
        findSystemMetricsRecorder(system.name) should not be empty
        val map = findSystemMetricsRecorder(system.name)
        map.getOrElse(ActorCountMetricName, -1.0) shouldEqual 1.0
      }
      system.stop(trackedActor)
      eventually(timeout(5 seconds)) {
        val metrics = findSystemMetricsRecorder(system.name)
        metrics.getOrElse(ActorCountMetricName, -1.0) shouldEqual 0.0
      }
    }
    "count unhandled messages" in {
      val count = findSystemMetricsRecorder(system.name).getOrElse(UnhandledMessageCountMetricName, 0.0)
      val trackedActor = system.actorOf(Props[ActorMetricsTestActor])
      trackedActor ! "unhandled"
      eventually(timeout(5 seconds)) {
        findSystemMetricsRecorder(system.name).getOrElse(UnhandledMessageCountMetricName, -1.0) shouldEqual (count + 1.0)
      }
    }
    "count dead letters" in {
      val count = findSystemMetricsRecorder(system.name).getOrElse(DeadLetterCountMetricName, 0.0)
      val trackedActor = system.actorOf(Props[ActorMetricsTestActor])
      system.stop(trackedActor)
      eventually(timeout(5 seconds)) {
        trackedActor ! "dead"
        findSystemMetricsRecorder(system.name).getOrElse(DeadLetterCountMetricName, -1.0) shouldBe > (count)
      }
    }
  }

  def findSystemMetricsRecorder(name: String): Map[String, Double] = {
    val metrics: List[Collector.MetricFamilySamples] =
      ActorSystemMetrics.actorCount.collect().asScala.toList ++
        ActorSystemMetrics.deadLetterCount.collect().asScala.toList ++
        ActorSystemMetrics.unhandledMessageCount.collect().asScala.toList
    val values = for(samples <- metrics;
      sample <- samples.samples.asScala if sample.labelValues.contains(name))
      yield (sample.name, sample.value)
    values.toMap
  }

  def clearSystemMetrics: Unit = {
    ActorSystemMetrics.actorCount.clear()
  }
} 
Example 10
Source File: TestTableStatsSinglePathMain.scala    From Spark.TableStatsExample   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sa.examples.tablestats


import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{StringType, LongType, StructField, StructType}
import org.scalatest.{FunSuite, BeforeAndAfterEach, BeforeAndAfterAll}


class TestTableStatsSinglePathMain extends FunSuite with BeforeAndAfterEach with BeforeAndAfterAll{
  test("run table stats on sample data") {

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    var sc = new SparkContext("local", "test", sparkConfig)
    try {
      val sqlContext = new org.apache.spark.sql.SQLContext(sc)

      val schema =
        StructType(
          Array(
            StructField("id", LongType, true),
            StructField("name", StringType, true),
            StructField("age", LongType, true),
            StructField("gender", StringType, true),
            StructField("height", LongType, true),
            StructField("job_title", StringType, true)
          )
        )

      val rowRDD = sc.parallelize(Array(
        Row(1l, "Name.1", 20l, "M", 6l, "dad"),
        Row(2l, "Name.2", 20l, "F", 5l, "mom"),
        Row(3l, "Name.3", 20l, "F", 5l, "mom"),
        Row(4l, "Name.4", 20l, "M", 5l, "mom"),
        Row(5l, "Name.5", 10l, "M", 4l, "kid"),
        Row(6l, "Name.6", 8l, "M", 3l, "kid")))

      val df = sqlContext.createDataFrame(rowRDD, schema)

      val firstPassStats = TableStatsSinglePathMain.getFirstPassStat(df)

      assertResult(6l)(firstPassStats.columnStatsMap(0).maxLong)
      assertResult(1l)(firstPassStats.columnStatsMap(0).minLong)
      assertResult(21l)(firstPassStats.columnStatsMap(0).sumLong)
      assertResult(3l)(firstPassStats.columnStatsMap(0).avgLong)

      assertResult(2)(firstPassStats.columnStatsMap(3).topNValues.topNCountsForColumnArray.length)

      firstPassStats.columnStatsMap(3).topNValues.topNCountsForColumnArray.foreach { r =>
        if (r._1.equals("M")) {
          assertResult(4l)(r._2)
        } else if (r._1.equals("F")) {
          assertResult(2l)(r._2)
        } else {
          throw new RuntimeException("Unknown gender: " + r._1)
        }
      }
    } finally {
      sc.stop()
    }
  }
} 
Example 11
Source File: TestDB.scala    From slick-jdbc-extension-scala   with MIT License 5 votes vote down vote up
package com.github.tarao
package slickjdbc
package helper

import scala.language.implicitConversions
import scala.concurrent.duration.Duration
import org.scalatest.{FunSpec, BeforeAndAfterAll, BeforeAndAfterEach}
import slick.jdbc.H2Profile.api.Database

case class Timeout(duration: Duration)
object Timeout {
  implicit val forever: Timeout = Timeout(Duration.Inf)
}

class DBRunner(val db: Database) {
  import scala.concurrent.{Future, Await}
  import slick.driver.H2Driver.api.Database
  import slick.dbio.{DBIOAction, NoStream, Effect}

  def run[R](a: DBIOAction[R, NoStream, Nothing])(implicit
    timeout: Timeout
  ): R = Await.result(db.run(a), timeout.duration)

  def close = db.close
}

object FreshId {
  var id = 0
  def apply() = { id = max; id }
  def max = { id + 1 }
}

trait Repository {
  def db: DBRunner
}

trait TestDB extends BeforeAndAfterAll with BeforeAndAfterEach {
  self: FunSpec =>

  lazy val config = {
    import com.typesafe.config.{ConfigFactory, ConfigValueFactory => V}
    import slick.jdbc.JdbcDataSource

    // Rewrite database name to thread local one so that writing from
    // multiple test threads run parallel won't conflict each other.
    val c = ConfigFactory.load.getConfig("h2memtest")
    val name = "test" + Thread.currentThread.getId
    val url = c.getString("url").replaceFirst("""\btest\b""", name)
    c.withValue("url", V.fromAnyRef(url))
  }

  lazy val db = new DBRunner(Database.forConfig("", config))

  override def beforeAll = {
    import slick.driver.H2Driver.api._

    db.run { sqlu"""
      CREATE TABLE IF NOT EXISTS entry (
        entry_id BIGINT NOT NULL PRIMARY KEY,
        url VARCHAR(2048) NOT NULL UNIQUE
      )
    """ }

    db.run { sqlu"""
      CREATE TABLE IF NOT EXISTS ids (
        id BIGINT NOT NULL PRIMARY KEY
      )
    """ }

    super.beforeAll
  }

  override def afterAll = {
    db.close
    super.afterAll
  }
} 
Example 12
Source File: HiveContextCompatibilitySuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {

  private var sc: SparkContext = null
  private var hc: HiveContext = null

  override def beforeAll(): Unit = {
    super.beforeAll()
    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
    HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
      sc.hadoopConfiguration.set(k, v)
    }
    hc = new HiveContext(sc)
  }

  override def afterEach(): Unit = {
    try {
      hc.sharedState.cacheManager.clearCache()
      hc.sessionState.catalog.reset()
    } finally {
      super.afterEach()
    }
  }

  override def afterAll(): Unit = {
    try {
      sc = null
      hc = null
    } finally {
      super.afterAll()
    }
  }

  test("basic operations") {
    val _hc = hc
    import _hc.implicits._
    val df1 = (1 to 20).map { i => (i, i) }.toDF("a", "x")
    val df2 = (1 to 100).map { i => (i, i % 10, i % 2 == 0) }.toDF("a", "b", "c")
      .select($"a", $"b")
      .filter($"a" > 10 && $"b" > 6 && $"c")
    val df3 = df1.join(df2, "a")
    val res = df3.collect()
    val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
    assert(res.toSeq == expected.toSeq)
    df3.createOrReplaceTempView("mai_table")
    val df4 = hc.table("mai_table")
    val res2 = df4.collect()
    assert(res2.toSeq == expected.toSeq)
  }

  test("basic DDLs") {
    val _hc = hc
    import _hc.implicits._
    val databases = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases.toSeq == Seq("default"))
    hc.sql("CREATE DATABASE mee_db")
    hc.sql("USE mee_db")
    val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases2.toSet == Set("default", "mee_db"))
    val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
    df.createOrReplaceTempView("mee_table")
    hc.sql("CREATE TABLE moo_table (name string, age int)")
    hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
    assert(
      hc.sql("SELECT * FROM moo_table order by name").collect().toSeq ==
      df.collect().toSeq.sortBy(_.getString(0)))
    val tables = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables.toSet == Set("moo_table", "mee_table"))
    hc.sql("DROP TABLE moo_table")
    hc.sql("DROP TABLE mee_table")
    val tables2 = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables2.isEmpty)
    hc.sql("USE default")
    hc.sql("DROP DATABASE mee_db CASCADE")
    val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases3.toSeq == Seq("default"))
  }

} 
Example 13
Source File: SharedSQLContext.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}



  protected override def afterAll(): Unit = {
    try {
      if (_spark != null) {
        _spark.stop()
        _spark = null
      }
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 14
Source File: IOEncryptionSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn

import java.io._
import java.nio.charset.StandardCharsets
import java.security.PrivilegedExceptionAction
import java.util.UUID

import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers}

import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.config._
import org.apache.spark.serializer._
import org.apache.spark.storage._

class IOEncryptionSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll
  with BeforeAndAfterEach {
  private[this] val blockId = new TempShuffleBlockId(UUID.randomUUID())
  private[this] val conf = new SparkConf()
  private[this] val ugi = UserGroupInformation.createUserForTesting("testuser", Array("testgroup"))
  private[this] val serializer = new KryoSerializer(conf)

  override def beforeAll(): Unit = {
    System.setProperty("SPARK_YARN_MODE", "true")
    ugi.doAs(new PrivilegedExceptionAction[Unit]() {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        val creds = new Credentials()
        SecurityManager.initIOEncryptionKey(conf, creds)
        SparkHadoopUtil.get.addCurrentUserCredentials(creds)
      }
    })
  }

  override def afterAll(): Unit = {
    SparkEnv.set(null)
    System.clearProperty("SPARK_YARN_MODE")
  }

  override def beforeEach(): Unit = {
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    super.afterEach()
    conf.set("spark.shuffle.compress", false.toString)
    conf.set("spark.shuffle.spill.compress", false.toString)
  }

  test("IO encryption read and write") {
    ugi.doAs(new PrivilegedExceptionAction[Unit] {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        conf.set("spark.shuffle.compress", false.toString)
        conf.set("spark.shuffle.spill.compress", false.toString)
        testYarnIOEncryptionWriteRead()
      }
    })
  }

  test("IO encryption read and write with shuffle compression enabled") {
    ugi.doAs(new PrivilegedExceptionAction[Unit] {
      override def run(): Unit = {
        conf.set(IO_ENCRYPTION_ENABLED, true)
        conf.set("spark.shuffle.compress", true.toString)
        conf.set("spark.shuffle.spill.compress", true.toString)
        testYarnIOEncryptionWriteRead()
      }
    })
  }

  private[this] def testYarnIOEncryptionWriteRead(): Unit = {
    val plainStr = "hello world"
    val outputStream = new ByteArrayOutputStream()
    val serializerManager = new SerializerManager(serializer, conf)
    val wrappedOutputStream = serializerManager.wrapStream(blockId, outputStream)
    wrappedOutputStream.write(plainStr.getBytes(StandardCharsets.UTF_8))
    wrappedOutputStream.close()

    val encryptedBytes = outputStream.toByteArray
    val encryptedStr = new String(encryptedBytes)
    assert(plainStr !== encryptedStr)

    val inputStream = new ByteArrayInputStream(encryptedBytes)
    val wrappedInputStream = serializerManager.wrapStream(blockId, inputStream)
    val decryptedBytes = new Array[Byte](1024)
    val len = wrappedInputStream.read(decryptedBytes)
    val decryptedStr = new String(decryptedBytes, 0, len, StandardCharsets.UTF_8)
    assert(decryptedStr === plainStr)
  }
} 
Example 15
Source File: SharedSparkContext.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.Suite


trait SharedSparkContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  @transient private var _sc: SparkContext = _

  def sc: SparkContext = _sc

  var conf = new SparkConf(false)

  override def beforeAll() {
    super.beforeAll()
    _sc = new SparkContext(
      "local[4]", "test", conf.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName))
  }

  override def afterAll() {
    try {
      LocalSparkContext.stop(_sc)
      _sc = null
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 16
Source File: ResetSystemProperties.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 17
Source File: DiskBlockManagerSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 18
Source File: TestBooleanCompressSuite.scala    From CarbonDataLearning   with GNU General Public License v3.0 5 votes vote down vote up
package org.github.xubo245.carbonDataLearning.booleanDataType

import java.io.File

import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

class TestBooleanCompressSuite extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
  val rootPath = new File(this.getClass.getResource("/").getPath
    + "../..").getCanonicalPath

  override def beforeEach(): Unit = {
    sql("drop table if exists boolean_table")
  }

  override def afterAll(): Unit = {
    sql("drop table if exists boolean_table")
    assert(BooleanFile.deleteFile(randomBoolean))
  }

  val pathOfManyDataType = s"$rootPath/src/test/resources/bool/supportBooleanBigFile.csv"
  val pathOfOnlyBoolean = s"$rootPath/src/test/resources/bool/supportBooleanBigFileOnlyBoolean.csv"
  val randomBoolean = s"$rootPath/src/test/resources/bool/supportRandomBooleanBigFile.csv"
  val trueNum = 10000000

  override def beforeAll(): Unit = {
    assert(BooleanFile.createBooleanFileRandom(randomBoolean, trueNum, 0.4))
    CarbonProperties.getInstance()
      .addProperty("carbon.storelocation", s"$rootPath/target/warehouse/")

  }

  test("test boolean compress rate: random file") {
    sql(
      s"""
         | CREATE TABLE boolean_table(
         | booleanField BOOLEAN
         | )
         | STORED BY 'carbondata'
       """.stripMargin)

    sql(
      s"""
         | LOAD DATA LOCAL INPATH '${randomBoolean}'
         | INTO TABLE boolean_table
         | options('FILEHEADER'='booleanField')
           """.stripMargin)

    sql("select * from boolean_table").show(100)
    sql("select count(*) from boolean_table").show()
    sql("select count(*) from boolean_table where booleanField= true").show()
    sql("select count(*) from boolean_table where booleanField= false").show()
    checkAnswer(
      sql("select count(*) from boolean_table"),
      Row(trueNum))
  }

} 
Example 19
Source File: TestSFObjectWriter.scala    From spark-salesforce   with Apache License 2.0 5 votes vote down vote up
package com.springml.spark.salesforce

import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{ FunSuite, BeforeAndAfterEach}
import com.springml.salesforce.wave.api.BulkAPI
import org.apache.spark.{ SparkConf, SparkContext}
import com.springml.salesforce.wave.model.{ JobInfo, BatchInfo}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{ Row, DataFrame, SQLContext}
import org.apache.spark.sql.types.{ StructType, StringType, StructField}

class TestSFObjectWriter extends FunSuite with MockitoSugar with BeforeAndAfterEach {
  val contact = "Contact";
  val jobId = "750B0000000WlhtIAC";
  val batchId = "751B0000000scSHIAY";
  val data = "Id,Description\n003B00000067Rnx,123456\n003B00000067Rnw,7890";

  val bulkAPI = mock[BulkAPI](withSettings().serializable())
  val writer = mock[SFObjectWriter]

  var sparkConf: SparkConf = _
  var sc: SparkContext = _

  override def beforeEach() {
    val jobInfo = new JobInfo
    jobInfo.setId(jobId)
    when(bulkAPI.createJob(contact)).thenReturn(jobInfo)

    val batchInfo = new BatchInfo
    batchInfo.setId(batchId)
    batchInfo.setJobId(jobId)
    when(bulkAPI.addBatch(jobId, data)).thenReturn(batchInfo)

    when(bulkAPI.closeJob(jobId)).thenReturn(jobInfo)
    when(bulkAPI.isCompleted(jobId)).thenReturn(true)

    sparkConf = new SparkConf().setMaster("local").setAppName("Test SF Object Update")
    sc = new SparkContext(sparkConf)
  }

  private def sampleDF() : DataFrame = {
    val rowArray = new Array[Row](2)
    val fieldArray = new Array[String](2)

    fieldArray(0) = "003B00000067Rnx"
    fieldArray(1) = "Desc1"
    rowArray(0) = Row.fromSeq(fieldArray)

    val fieldArray1 = new Array[String](2)
    fieldArray1(0) = "001B00000067Rnx"
    fieldArray1(1) = "Desc2"
    rowArray(1) = Row.fromSeq(fieldArray1)

    val rdd = sc.parallelize(rowArray)
    val schema = StructType(
      StructField("id", StringType, true) ::
      StructField("desc", StringType, true) :: Nil)

    val sqlContext = new SQLContext(sc)
    sqlContext.createDataFrame(rdd, schema)
  }

  test ("Write Object to Salesforce") {
    val df = sampleDF();
    val csvHeader = Utils.csvHeadder(df.schema)
    writer.writeData(df.rdd)
    sc.stop()
  }
} 
Example 20
Source File: FacetedLuceneRDDImplicitsSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd.facets

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.FavoriteCaseClass
import org.zouzias.spark.lucenerdd.{LuceneRDD, LuceneRDDKryoRegistrator}

class FacetedLuceneRDDImplicitsSpec  extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  var luceneRDD: LuceneRDD[_] = _


  override val conf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  override def afterEach() {
    luceneRDD.close()
  }


  val elem = Array("fear", "death", "water", "fire", "house")
    .zipWithIndex.map{ case (str, index) =>
    FavoriteCaseClass(str, index, 10L, 12.3F, s"${str}@gmail.com")}


  "FacetedLuceneRDD(case class).count" should "return correct number of elements" in {
    val rdd = sc.parallelize(elem)
    val spark = SparkSession.builder().getOrCreate()
    import spark.implicits._
    val df = rdd.toDF()
    luceneRDD = FacetedLuceneRDD(df)
    luceneRDD.count should equal (elem.size)
  }

  "FacetedLuceneRDD(case class).fields" should "return all fields" in {
    val rdd = sc.parallelize(elem)
    val spark = SparkSession.builder().getOrCreate()
    import spark.implicits._
    val df = rdd.toDF()
    luceneRDD = FacetedLuceneRDD(df)

    luceneRDD.fields().size should equal(5)
    luceneRDD.fields().contains("name") should equal(true)
    luceneRDD.fields().contains("age") should equal(true)
    luceneRDD.fields().contains("myLong") should equal(true)
    luceneRDD.fields().contains("myFloat") should equal(true)
    luceneRDD.fields().contains("email") should equal(true)
  }

  "FacetedLuceneRDD(case class).termQuery" should "correctly search with TermQueries" in {
    val rdd = sc.parallelize(elem)
    val spark = SparkSession.builder().getOrCreate()
    import spark.implicits._
    val df = rdd.toDF()
    luceneRDD = FacetedLuceneRDD(df)

    val results = luceneRDD.termQuery("name", "water")
    results.count() should equal(1)
  }
} 
Example 21
Source File: LucenePrimitiveTypesSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class LucenePrimitiveTypesSpec extends FlatSpec with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  override val conf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  def randomString(length: Int): String = scala.util.Random.alphanumeric.take(length).mkString
  val array = (1 to 24).map(randomString(_))

  var luceneRDD: LuceneRDD[_] = _

  override def afterEach() {
    luceneRDD.close()
  }

  

  "LuceneRDD" should "work with RDD[Array[String]]" in {
    val array = Array(Array("aaa", "aaa2"), Array("bbb", "bbb2"),
      Array("ccc", "ccc2"), Array("ddd"), Array("eee"))
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.length)
  }

  "LuceneRDD" should "work with RDD[Set[String]]" in {
    val array = Array(Set("aaa", "aaa2"), Set("bbb", "bbb2"),
      Set("ccc", "ccc2"), Set("ddd"), Set("eee"))
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.length)
  }

  "LuceneRDD" should "work with RDD[String]" in {
    val array = Array("aaa", "bbb", "ccc", "ddd", "eee")
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.length)
  }

  "LuceneRDD" should "work with RDD[Int]" in {
    val array = (1 to 22)
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.size)
  }

  "LuceneRDD" should "work with RDD[Float]" in {
    val array: IndexedSeq[Float] = (1 to 22).map(_.toFloat)
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.size)
  }

  "LuceneRDD" should "work with RDD[Double]" in {
    val array: IndexedSeq[Double] = (1 to 22).map(_.toDouble)
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.size)
  }

  "LuceneRDD" should "work with RDD[Long]" in {
    val array: IndexedSeq[Long] = (1 to 22).map(_.toLong)
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should equal (array.size)
  }

  "LuceneRDD" should "work with RDD[Map[String, String]]" in {
    val maps = List(Map( "a" -> "hello"), Map("b" -> "world"), Map("c" -> "how are you"))
    val rdd = sc.parallelize(maps)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should equal (maps.size)
    luceneRDD.termQuery("a", "hello").isEmpty() should equal (false)
    luceneRDD.prefixQuery("b", "wor").isEmpty() should equal (false)
    luceneRDD.prefixQuery("a", "no").isEmpty() should equal (true)
  }

  "LuceneRDD" should "work with RDD[String] and ignore null values" in {
    val array = Array("aaa", null, "ccc", null, "eee")
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should be (array.length)
  }

} 
Example 22
Source File: BlockingLinkageSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.lucene.index.Term
import org.apache.lucene.search.{Query, TermQuery}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Row, SparkSession}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.Person

class BlockingLinkageSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  override val conf: SparkConf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  "LuceneRDD.blockEntityLinkage" should "deduplicate elements on unique elements" in {
    val spark = SparkSession.builder().getOrCreate()
    import spark.implicits._

    val peopleLeft: Array[Person] = Array("fear", "death", "water", "fire", "house")
      .zipWithIndex.map { case (str, index) =>
      val email = if (index % 2 == 0) "[email protected]" else "[email protected]"
      Person(str, index, email)
    }

    val peopleRight: Array[Person] = Array("fear", "death", "water", "fire", "house")
      .zipWithIndex.map { case (str, index) =>
      val email = if (index % 2 == 0) "[email protected]" else "[email protected]"
      Person(str, index, email)
    }

    val leftDF = sc.parallelize(peopleLeft).repartition(2).toDF()
    val rightDF = sc.parallelize(peopleRight).repartition(3).toDF()

    // Define a Lucene Term linker
    val linker: Row => Query = { row =>
      val name = row.getString(row.fieldIndex("name"))
      val term = new Term("name", name)

      new TermQuery(term)
    }


    val linked = LuceneRDD.blockEntityLinkage(leftDF, rightDF, linker,
      Array("email"), Array("email"))

    val linkedCount, dfCount = (linked.count, leftDF.count())

    linkedCount should equal(dfCount)

    // Check for correctness
    // Age is a unique index
    linked.collect().foreach { case (row, results) =>
      val leftAge, rightAge = (row.getInt(row.fieldIndex("age")),
        results.headOption.map(x => x.getInt(x.fieldIndex("age"))))

      leftAge should equal(rightAge)

    }
  }
} 
Example 23
Source File: LuceneRDDCustomCaseClassImplicitsSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.Person

class LuceneRDDCustomCaseClassImplicitsSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  var luceneRDD: LuceneRDD[_] = _

  override def afterEach() {
    luceneRDD.close()
  }

  override val conf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  val elem: Array[Person] = Array("fear", "death", "water", "fire", "house")
    .zipWithIndex.map{ case (str, index) => Person(str, index, s"${str}@gmail.com")}

  "LuceneRDD(case class).count" should "handle nulls properly" in {
    val elemsWithNulls = Array("fear", "death", "water", "fire", "house")
      .zipWithIndex.map{ case (str, index) => Person(str, index, null)}
    val rdd = sc.parallelize(elemsWithNulls)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count() should equal (elemsWithNulls.length)
  }

  "LuceneRDD(case class).count" should "return correct number of elements" in {
    val rdd = sc.parallelize(elem)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count() should equal (elem.length)
  }

  "LuceneRDD(case class).fields" should "return all fields" in {
    val rdd = sc.parallelize(elem)
    luceneRDD = LuceneRDD(rdd)

    luceneRDD.fields().size should equal(3)
    luceneRDD.fields().contains("name") should equal(true)
    luceneRDD.fields().contains("age") should equal(true)
    luceneRDD.fields().contains("email") should equal(true)
  }

  "LuceneRDD(case class).termQuery" should "correctly search with TermQueries" in {
    val rdd = sc.parallelize(elem)
    luceneRDD = LuceneRDD(rdd)

    val results = luceneRDD.termQuery("name", "water")
    results.count() should equal(1)
  }
} 
Example 24
Source File: ShapeLuceneRDDImplicitsSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd.spatial.shape.implicits

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.spatial.shape.{ShapeLuceneRDD, _}
import org.zouzias.spark.lucenerdd.testing.LuceneRDDTestUtils
import org.zouzias.spark.lucenerdd._
import org.zouzias.spark.lucenerdd.spatial.shape.context.ContextLoader

class ShapeLuceneRDDImplicitsSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext
  with ContextLoader
  with LuceneRDDTestUtils {

  val Radius: Double = 5D

  override val conf = ShapeLuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  "ShapeLuceneRDDImplicits" should "implicitly convert to point" in {

    val rdd = sc.parallelize(cities)
    val shapeRDD = ShapeLuceneRDD(rdd)

    shapeRDD.count should equal(cities.length)
  }

  "ShapeLuceneRDDImplicits" should "implicitly convert to circle" in {

    val circleCities: Array[(((Double, Double), Double), String)]
    = cities.map(convertToCircle)
    val rdd = sc.parallelize(circleCities)
    val shapeRDD = ShapeLuceneRDD(rdd)

    shapeRDD.count should equal(circleCities.length)
  }

  "ShapeLuceneRDDImplicits" should "implicitly convert to rectangle" in {

    val rectangleCities = cities.map(convertToRectangle)
    val rdd = sc.parallelize(rectangleCities)
    val shapeRDD = ShapeLuceneRDD(rdd)

    shapeRDD.count should equal(rectangleCities.length)
  }

  "ShapeLuceneRDDImplicits" should "implicitly convert POINTS from WKT" in {
    val sparkSession = SparkSession.builder().getOrCreate()
    val citiesDF = sparkSession.read.parquet("data/world-cities-points.parquet")
    import sparkSession.implicits._
    val citiesRDD = citiesDF.map(row =>
      (row.getString(2), (row.getString(0), row.getString(1))))

    val total = citiesDF.count()
    total > 0 should equal(true)

    val shapeRDD = ShapeLuceneRDD(citiesRDD)

    shapeRDD.count > 0 should equal(true)
  }

  "ShapeLuceneRDDImplicits" should "implicitly convert BBOX from WKT" in {
    val sparkSession = SparkSession.builder().getOrCreate()
    import sparkSession.implicits._
    val countriesDF = sparkSession.read.parquet("data/countries-bbox.parquet")
    val citiesRDD = countriesDF.map(row =>
      (row.getString(2), (row.getString(0), row.getString(1))))

    val total = countriesDF.count()
    total > 0 should equal(true)

    val shapeRDD = ShapeLuceneRDD(citiesRDD)

    shapeRDD.count > 0 should equal(true)
  }

  "ShapeLuceneRDDImplicits" should "implicitly convert to polygon" in {

    val polygonCities = cities.map(convertToPolygon(_, Radius))
    val rdd = sc.parallelize(polygonCities)
    val shapeRDD = ShapeLuceneRDD(rdd)

    shapeRDD.count should equal(polygonCities.length)
  }

} 
Example 25
Source File: AnalyzersConfigurableSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd.analyzers

import org.apache.lucene.analysis.en.EnglishAnalyzer
import org.apache.lucene.analysis.el.GreekAnalyzer
import org.apache.lucene.analysis.de.GermanAnalyzer
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class AnalyzersConfigurableSpec extends FlatSpec with Matchers
  with BeforeAndAfterEach
  with AnalyzerConfigurable {

  "AnalyzersConfigurable.getAnalyzer" should "return english analyzer with 'en' input" in {
    val englishAnalyzer = getAnalyzer(Some("en"))
    englishAnalyzer shouldNot equal(null)
    englishAnalyzer.isInstanceOf[EnglishAnalyzer] should equal(true)
  }

  "AnalyzersConfigurable.getAnalyzer" should
    "return custom test analyzer with 'org.apache.lucene.analysis.el.GreekAnalyzer'" in {
    val greekAnalyzer = getAnalyzer(Some("org.apache.lucene.analysis.el.GreekAnalyzer"))
    greekAnalyzer shouldNot equal(null)
    greekAnalyzer.isInstanceOf[GreekAnalyzer] should equal(true)
  }

  "AnalyzersConfigurable.getAnalyzer" should
    "return custom test analyzer with 'org.apache.lucene.analysis.de.GermanAnalyzer'" in {
    val deutschAnalyzer = getAnalyzer(Some("org.apache.lucene.analysis.de.GermanAnalyzer"))
    deutschAnalyzer shouldNot equal(null)
    deutschAnalyzer.isInstanceOf[GermanAnalyzer] should equal(true)
  }
} 
Example 26
Source File: LuceneRDDSearchSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.LuceneRDDTestUtils

class LuceneRDDSearchSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with LuceneRDDTestUtils
  with SharedSparkContext {

  var luceneRDD: LuceneRDD[_] = _

  override def Radius: Double = 0

  override val conf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  override def afterEach() {
    luceneRDD.close()
  }


  val First = "_1"

  val array = List("fear", "death", " apologies", "romance", "tree", "fashion", "fascism")

  "LuceneRDD.query" should "use phrase query syntax" in {
    val words = Array("aabaa", "aaacaa", "aadaa", "aaaa", "qwerty")
    val rdd = sc.parallelize(words)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.query("_1:aadaa").isEmpty() should equal (false)
    luceneRDD.query("_1:aa*").count() should equal (4)
    luceneRDD.query("_1:q*").count() should equal (1)
  }

  "LuceneRDD.count" should "return correct number of elements" in {
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    luceneRDD.count should equal (array.size)
  }

  "LuceneRDD.termQuery" should "correctly search with TermQueries" in {
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)
    val results = luceneRDD.termQuery(First, array(1))
    results.count() should equal (1)
  }

  "LuceneRDD.prefixQuery" should "correctly search with PrefixQueries" in {

    val prefices = Array("aaaabcd", "aaadcb", "aaz", "az", "qwerty")
    val rdd = sc.parallelize(prefices)
    luceneRDD = LuceneRDD(rdd)

    luceneRDD.prefixQuery(First, "a").count() should equal (4)
    luceneRDD.prefixQuery(First, "aa").count() should equal(3)
    luceneRDD.prefixQuery(First, "aaa").count() should equal (2)
    luceneRDD.prefixQuery(First, "aaaa").count() should equal (1)
  }

  "LuceneRDD.fuzzyQuery" should "correctly search with FuzzyQuery" in {
    val rdd = sc.parallelize(array)
    luceneRDD = LuceneRDD(rdd)

    luceneRDD.fuzzyQuery(First, "fear", 1).count() should equal (1)
    luceneRDD.fuzzyQuery(First, "fascsm", 1).count() should equal(1)
    luceneRDD.fuzzyQuery(First, "dath", 1).count() should equal (1)
    luceneRDD.fuzzyQuery(First, "tree", 1).count() should equal (1)
  }

  

  "LuceneRDD.phraseQuery" should "correctly search with PhraseQuery" in {
    val phrases = Array("hello world", "the company name was", "highlight lucene")
    val rdd = sc.parallelize(phrases)
    luceneRDD = LuceneRDD(rdd)

    luceneRDD.phraseQuery(First, "company name", 10).count() should equal (1)
    luceneRDD.phraseQuery(First, "hello world", 10).count() should equal (1)
    luceneRDD.phraseQuery(First, "highlight lucene", 10).count() should equal(1)
  }
} 
Example 27
Source File: BlockingDedupSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.lucene.index.Term
import org.apache.lucene.search.{Query, TermQuery}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{Row, SparkSession}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.Person

class BlockingDedupSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  override val conf: SparkConf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  "LuceneRDD.blockDedup" should "deduplicate elements on unique elements" in {
    val spark = SparkSession.builder().getOrCreate()
    import spark.implicits._

    val people: Array[Person] = Array("fear", "death", "water", "fire", "house")
      .zipWithIndex.map { case (str, index) =>
      val email = if (index % 2 == 0) "[email protected]" else "[email protected]"
      Person(str, index, email)
    }
    val df = sc.parallelize(people).repartition(2).toDF()

    val linker: Row => Query = { row =>
      val name = row.getString(row.fieldIndex("name"))
      val term = new Term("name", name)

      new TermQuery(term)
    }


    val linked = LuceneRDD.blockDedup(df, linker, Array("email"))

    val linkedCount, dfCount = (linked.count, df.count())

    linkedCount should equal(dfCount)

    // Check for correctness
    // Age is a unique index
    linked.collect().foreach { case (row, results) =>
      val leftAge, rightAge = (row.getInt(row.fieldIndex("age")),
        results.headOption.map(x => x.getInt(x.fieldIndex("age"))))

      leftAge should equal(rightAge)

    }
  }
} 
Example 28
Source File: LuceneDocToSparkRowpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import java.io.{Reader, StringReader}

import org.apache.lucene.document.{Document, DoublePoint, Field, FloatPoint, IntPoint, LongPoint, StoredField, TextField}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.models.SparkScoreDoc
import org.zouzias.spark.lucenerdd.models.SparkScoreDoc.{DocIdField, ScoreField, ShardField}

import scala.collection.JavaConverters._

class LuceneDocToSparkRowpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach {

  val (score: Float, docId: Int, shardIndex: Int) = (1.0f, 1, 2)
  val float: Float = 20.001f
  val double: Double = 10.1000000001D

  def generate_doc(): Document = {
    val doc = new Document()

    // Add long field
    doc.add(new LongPoint("longField", 10))
    doc.add(new StoredField("longField", 10))

    doc.add(new FloatPoint("floatField", float))
    doc.add(new StoredField("floatField", float))

    doc.add(new IntPoint("intField", 9))
    doc.add(new StoredField("intField", 9))

    doc.add(new DoublePoint("doubleField", double))
    doc.add(new StoredField("doubleField", double))

    doc.add(new TextField("textField", "hello world", Field.Store.NO))
    doc.add(new StoredField("textField", "hello world"))

    doc
  }

  private val doc: Document = generate_doc()

  val sparkScoreDoc = SparkScoreDoc(score, docId, shardIndex, doc)


  "SparkScoreDoc.toRow" should "return correct score" in {
    val row = sparkScoreDoc.toRow()
    row.getFloat(row.fieldIndex(ScoreField)) should equal(score)
  }

  "SparkScoreDoc.toRow" should "return correct docId" in {
    val row = sparkScoreDoc.toRow()
    row.getInt(row.fieldIndex(DocIdField)) should equal(docId)
  }

  "SparkScoreDoc.toRow" should "return correct shard number" in {
    val row = sparkScoreDoc.toRow()
    row.getInt(row.fieldIndex(ShardField)) should equal(shardIndex)
  }

  "SparkScoreDoc.toRow" should "return correct number of fields" in {
    val row = sparkScoreDoc.toRow()
    row.getFields().asScala.count(_.fieldType().stored()) should equal(8)
  }

  "SparkScoreDoc.toRow" should "set correctly DoublePoint" in {
    val row = sparkScoreDoc.toRow()
    row.getDouble(row.fieldIndex("doubleField")) should equal(double)
  }

  "SparkScoreDoc.toRow" should "set correctly FloatPoint" in {
    val row = sparkScoreDoc.toRow()
    row.getFloat(row.fieldIndex("floatField")) should equal(float)
  }
} 
Example 29
Source File: LuceneRDDTermVectorsSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.zouzias.spark.lucenerdd.testing.LuceneRDDTestUtils

class LuceneRDDTermVectorsSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with LuceneRDDTestUtils
  with SharedSparkContext {

  var luceneRDD: LuceneRDD[_] = _

  override def Radius: Double = 0

  override val conf: SparkConf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  override def afterEach() {
    luceneRDD.close()
  }

  val First = "_1"

  "LuceneRDD.termVectors" should "return valid terms" in {

    val words = Array("To smile or not to smile smile",
      "Don't cry because it's over, smile because it happened",
      "So many books, so little time",
      "A room without books is like a body without a soul",
      "If you tell the truth, you don't have to remember anything")
    val rdd = sc.parallelize(words)

    luceneRDD = LuceneRDD(rdd)

    val terms = luceneRDD.termVectors(First).collect()

    // These terms should exist
    terms.exists(_.term.compareToIgnoreCase("time") == 0) should equal(true)
    terms.exists(_.term.compareToIgnoreCase("room") == 0) should equal(true)
    terms.exists(_.term.compareToIgnoreCase("soul") == 0) should equal(true)
    terms.exists(_.term.compareToIgnoreCase("smile") == 0) should equal(true)

    terms.exists(t => (t.term.compareToIgnoreCase("smile") == 0)
      && t.count == 3) should equal (true)
    terms.exists(t => (t.term.compareToIgnoreCase("becaus") == 0)
      && t.count == 2) should equal (true)
  }
} 
Example 30
Source File: LuceneRDDMoreLikeThisSpec.scala    From spark-lucenerdd   with Apache License 2.0 5 votes vote down vote up
package org.zouzias.spark.lucenerdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.SparkConf
import scala.collection.JavaConverters._
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

import scala.io.Source

class LuceneRDDMoreLikeThisSpec extends FlatSpec
  with Matchers
  with BeforeAndAfterEach
  with SharedSparkContext {

  var luceneRDD: LuceneRDD[_] = _


  override val conf = LuceneRDDKryoRegistrator.registerKryoClasses(new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID))

  override def afterEach() {
    luceneRDD.close()
  }

  "LuceneRDD.moreLikeThis" should "return relevant documents" in {
    val words: Seq[String] = Source.fromFile("src/test/resources/alice.txt")
      .getLines().map(_.toLowerCase).toSeq
    val rdd = sc.parallelize(words)
    luceneRDD = LuceneRDD(rdd)
    val results = luceneRDD
      .moreLikeThis("_1", "alice adventures wonderland", 1, 1)
      .collect()

    results.length > 0 should equal(true)
    val firstDoc = results.head
    val x = firstDoc.getString(firstDoc.fieldIndex("_1"))

    x.contains("alice") &&
      x.contains("wonderland") &&
      x.contains("adventures") should equal(true)

    val lastDoc = results.last
    val y = lastDoc.getString(lastDoc.fieldIndex("_1"))


      y.contains("alice") &&
        !y.contains("wonderland") &&
        !y.contains("adventures") should equal(true)

  }
} 
Example 31
Source File: SparkFunSuite.scala    From spark-alchemy   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import scala.annotation.tailrec
import org.apache.log4j.{Appender, Level, Logger}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome, Suite}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.util.{AccumulatorContext, Utils}


  protected def withLogAppender(
    appender: Appender,
    loggerName: Option[String] = None,
    level: Option[Level] = None)(
    f: => Unit): Unit = {
    val logger = loggerName.map(Logger.getLogger).getOrElse(Logger.getRootLogger)
    val restoreLevel = logger.getLevel
    logger.addAppender(appender)
    if (level.isDefined) {
      logger.setLevel(level.get)
    }
    try f finally {
      logger.removeAppender(appender)
      if (level.isDefined) {
        logger.setLevel(restoreLevel)
      }
    }
  }
} 
Example 32
Source File: StepTest.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.step

import org.scalatest.{BeforeAndAfterEach, FunSuite, Matchers}
import rx.lang.scala.{Observable, Subscription}

class StepTest extends FunSuite with Matchers {

  trait GraphE {
    def id: String
  }

  case class V(id: String) extends GraphE

  case class E(id: String, src: V, tgt: V) extends GraphE

  object GraphModels {
    
    val va = V("V_A")
    val vb = V("V_B")

    val e1 = E("E1", va, vb)
    val e2 = E("E2", vb, va)

    val allVertices = List(va, vb)
    val allEdges = List(e1, e2)
  }

  case class VertexStep(vid: String) extends RxStep[Unit, V] {
    override def apply(in: Unit): Observable[V] = {
      val vertices = GraphModels.allVertices.filter(v => vid == v.id)
      Observable.from(vertices)
    }
  }

  case class EdgeStep(dir: String) extends RxStep[V, E] {
    override def apply(in: V): Observable[E] = {
      val edges = if (dir == "OUT") {
        GraphModels.allEdges.filter(e => in == e.src)
      } else {
        GraphModels.allEdges.filter(e => in == e.tgt)
      }

      Observable.from(edges)
    }
  }

  case class EdgeToVertexStep() extends RxStep[E, V] {
    override def apply(in: E): Observable[V] = {
      Observable.just(in.tgt)
    }
  }

  test("basic step") {
    val v1: RxStep[Unit, V] = VertexStep("V_A")

    val e1: RxStep[V, E] = EdgeStep("OUT")
    val e2 = EdgeStep("IN")

    val g = v1(())
      .flatMap(v => e1(v) ++ e2(v))
      .flatMap(EdgeToVertexStep())
      .flatMap(v => e1(v) ++ e2(v))
      .distinct

    val expected = List(
      E("E1", V("V_A"), V("V_B")),
      E("E2", V("V_B"), V("V_A"))
    ).sortBy(_.id)

    val actual = g.toBlocking.toList.sortBy(_.id)

    println(actual)
    actual shouldBe expected
  }
} 
Example 33
Source File: LabelLabelIndexMutateOptionTest.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.Integrate

import org.apache.s2graph.core._
import org.scalatest.{BeforeAndAfterEach, Tag}
import play.api.libs.json._

class LabelLabelIndexMutateOptionTest extends IntegrateCommon with BeforeAndAfterEach {

  import TestUtil._

  // called by start test, once
  override def initTestData(): Unit = {
    super.initTestData()

    val insert = "insert"
    val e = "e"
    val weight = "weight"
    val is_hidden = "is_hidden"

    insertEdgesSync(
      toEdge(1, insert, e, 0, 1, testLabelNameLabelIndex),
      toEdge(1, insert, e, 0, 2, testLabelNameLabelIndex),
      toEdge(1, insert, e, 0, 3, testLabelNameLabelIndex)
    )
  }

  def getQuery(ids: Seq[Int], direction: String, indexName: String): Query =
    Query(
      vertices = ids.map(graph.toVertex(testServiceName, testColumnName, _)),
      steps = Vector(
        Step(Seq(QueryParam(testLabelNameLabelIndex, direction = direction, indexName = indexName)))
      )
    )

  
  ignore("index for out direction should drop out direction edge and store degree") {
    val edges = getEdgesSync(getQuery(Seq(0), "out", idxDropOutStoreDegree))
    (edges \ "results").as[Seq[JsValue]].size should be(0)
    (edges \\ "_degree").map(_.as[Long]).sum should be(3)
  }
} 
Example 34
Source File: MatcherSpec.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.actors

import akka.actor.ActorSystem
import akka.testkit.TestKitBase
import com.typesafe.config.ConfigFactory
import com.wavesplatform.dex.domain.utils.ScorexLogging
import com.wavesplatform.dex.settings.loadConfig
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

abstract class MatcherSpec(_actorSystemName: String) extends AnyWordSpecLike with MatcherSpecLike {
  protected def actorSystemName: String = _actorSystemName
}

trait MatcherSpecLike extends TestKitBase with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with ScorexLogging {
  this: Suite =>

  protected def actorSystemName: String

  implicit override lazy val system: ActorSystem = ActorSystem(
    actorSystemName,
    loadConfig(ConfigFactory.empty())
  )

  override protected def afterAll(): Unit = {
    super.afterAll()
    shutdown(system)
  }
} 
Example 35
Source File: WithDB.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.db

import java.nio.file.Files

import com.wavesplatform.dex.db.leveldb.LevelDBFactory
import com.wavesplatform.dex.domain.account.Address
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.util.Implicits._
import com.wavesplatform.dex.util.TestHelpers
import monix.reactive.subjects.Subject
import org.iq80.leveldb.{DB, Options}
import org.scalatest.{BeforeAndAfterEach, Suite}

trait WithDB extends BeforeAndAfterEach { this: Suite =>

  private val path                  = Files.createTempDirectory("lvl").toAbsolutePath
  private var currentDBInstance: DB = _

  def db: DB = currentDBInstance

  protected val ignoreSpendableBalanceChanged: Subject[(Address, Asset), (Address, Asset)] = Subject.empty

  override def beforeEach(): Unit = {
    currentDBInstance = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true))
    super.beforeEach()
  }

  override def afterEach(): Unit =
    try {
      super.afterEach()
      db.close()
    } finally {
      TestHelpers.deleteRecursively(path)
    }

  protected def tempDb(f: DB => Any): Any = {
    val path = Files.createTempDirectory("lvl-temp").toAbsolutePath
    val db   = LevelDBFactory.factory.open(path.toFile, new Options().createIfMissing(true))
    try {
      f(db)
    } finally {
      db.close()
      TestHelpers.deleteRecursively(path)
    }
  }
} 
Example 36
Source File: MatcherSuiteBase.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.it

import java.nio.charset.StandardCharsets
import java.util.concurrent.ThreadLocalRandom

import cats.instances.FutureInstances
import com.wavesplatform.dex.asset.DoubleOps
import com.wavesplatform.dex.domain.account.KeyPair
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.utils.ScorexLogging
import com.wavesplatform.dex.it.api.BaseContainersKit
import com.wavesplatform.dex.it.api.node.HasWavesNode
import com.wavesplatform.dex.it.config.{GenesisConfig, PredefinedAccounts, PredefinedAssets}
import com.wavesplatform.dex.it.dex.HasDex
import com.wavesplatform.dex.it.matchers.ItMatchers
import com.wavesplatform.dex.it.test.InformativeTestStart
import com.wavesplatform.dex.it.waves.{MkWavesEntities, ToWavesJConversions}
import com.wavesplatform.dex.test.matchers.DiffMatcherWithImplicits
import com.wavesplatform.dex.waves.WavesFeeConstants
import com.wavesplatform.it.api.ApiExtensions
import org.scalatest.concurrent.Eventually
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, CancelAfterFailure}

import scala.concurrent.duration.DurationInt

trait MatcherSuiteBase
    extends AnyFreeSpec
    with Matchers
    with CancelAfterFailure
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with Eventually
    with BaseContainersKit
    with HasDex
    with HasWavesNode
    with MkWavesEntities
    with ApiExtensions
    with ItMatchers
    with DoubleOps
    with WavesFeeConstants
    with PredefinedAssets
    with PredefinedAccounts
    with DiffMatcherWithImplicits
    with InformativeTestStart
    with FutureInstances
    with ToWavesJConversions
    with ScorexLogging {

  GenesisConfig.setupAddressScheme()

  override protected val moduleName: String = "dex-it"

  override implicit def patienceConfig: PatienceConfig = super.patienceConfig.copy(timeout = 30.seconds, interval = 1.second)

  override protected def beforeAll(): Unit = {
    log.debug(s"Perform beforeAll")
    kafkaServer.foreach { _ =>
      createKafkaTopic(dexRunConfig.getString("waves.dex.events-queue.kafka.topic"))
    }
    wavesNode1.start()
    dex1.start()
  }

  override protected def afterAll(): Unit = {
    log.debug(s"Perform afterAll")
    stopBaseContainers()
    super.afterAll()
  }

  def createAccountWithBalance(balances: (Long, Asset)*): KeyPair = {
    val account = KeyPair(ByteStr(s"account-test-${ThreadLocalRandom.current().nextInt()}".getBytes(StandardCharsets.UTF_8)))

    balances.foreach {
      case (balance, asset) =>
        assert(
          wavesNode1.api.balance(alice, asset) >= balance,
          s"Alice doesn't have enough balance in ${asset.toString} to make a transfer"
        )
        broadcastAndAwait(mkTransfer(alice, account.toAddress, balance, asset))
    }
    account
  }
} 
Example 37
Source File: IntegrationSuiteBase.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.grpc.integration

import com.wavesplatform.dex.asset.DoubleOps
import com.wavesplatform.dex.domain.utils.ScorexLogging
import com.wavesplatform.dex.it.api.BaseContainersKit
import com.wavesplatform.dex.it.api.node.{HasWavesNode, NodeApiExtensions}
import com.wavesplatform.dex.it.config.{GenesisConfig, PredefinedAccounts, PredefinedAssets}
import com.wavesplatform.dex.it.test.InformativeTestStart
import com.wavesplatform.dex.it.waves.{MkWavesEntities, ToWavesJConversions}
import com.wavesplatform.dex.test.matchers.DiffMatcherWithImplicits
import com.wavesplatform.dex.waves.WavesFeeConstants
import org.scalatest.concurrent.Eventually
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import scala.concurrent.duration.DurationInt

trait IntegrationSuiteBase
    extends AnyFreeSpec
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with Eventually
    with BaseContainersKit
    with HasWavesNode
    with MkWavesEntities
    with WavesFeeConstants
    with NodeApiExtensions
    with PredefinedAssets
    with PredefinedAccounts
    with DoubleOps
    with DiffMatcherWithImplicits
    with InformativeTestStart
    with ToWavesJConversions
    with ScorexLogging {

  GenesisConfig.setupAddressScheme()

  override protected val moduleName: String = "waves-integration-it"

  override implicit def patienceConfig: PatienceConfig = super.patienceConfig.copy(timeout = 30.seconds, interval = 1.second)

  override protected def beforeAll(): Unit = {
    log.debug(s"Perform beforeAll")
    wavesNode1.start()
  }

  override protected def afterAll(): Unit = {
    log.debug(s"Perform afterAll")
    stopBaseContainers()
    super.afterAll()
  }
} 
Example 38
Source File: MakingNestedTableTest.scala    From SparkUnitTestingExamples   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sa.spark.unittest.sql

import org.apache.spark.sql.Row
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

object MakingNestedTableTest   extends FunSuite with
  BeforeAndAfterEach with BeforeAndAfterAll {

  @transient var sc: SparkContext = null
  @transient var hiveContext: HiveContext = null

  override def beforeAll(): Unit = {

    val envMap = Map[String, String](("Xmx", "512m"))

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    sparkConfig.set("spark.io.compression.codec", "lzf")
    sc = new SparkContext("local[2]", "unit test", sparkConfig)
    hiveContext = new HiveContext(sc)
  }

  override def afterAll(): Unit = {
    sc.stop()
  }

  test("Test table creation and summing of counts") {

    val loanRDD = sc.parallelize(Seq(Row("100", "100000000"),
                                      Row("101", "100000000"),
                                      Row("102", "100000000")))

    val partiesRDD = sc.parallelize(Seq(Row("100", "ted"),
      Row("101", "bob", "42"),
      Row("101", "cat", "42"),
      Row("102", "Jen", "42"),
      Row("102", "Jenny", "42"),
      Row("102", "Ed", "42")))

    //loan
    hiveContext.sql("create table loan (id string, amount string) as parquet")
    val emptyLoanDF = hiveContext.sql("select * from loan limit 0;")
    val loanDF = hiveContext.createDataFrame(loanRDD, emptyLoanDF.schema)
    loanDF.registerTempTable("loanTmp")
    hiveContext.sql("insert into loan select * from loanTmp")

    //parties
    hiveContext.sql("create table party (loan_id string, name string, age string) as parquet")
    val emptyPartyDF = hiveContext.sql("select * from party limit 0;")
    val partyDF = hiveContext.createDataFrame(partiesRDD, emptyPartyDF.schema)
    partyDF.registerTempTable("partyTmp")
    hiveContext.sql("insert into party select * from partyTmp")

    val keyValueParty = hiveContext.sql("select * from party").map(r => {
      //Key Value
      (r.getString(r.fieldIndex("loan_id")), Seq(r))
    }).reduceByKey((a, b) => {
      a ++ b
    })

    val keyValueLoan = hiveContext.sql("select * from loan").map(r => {
      //Key Value
      (r.getString(r.fieldIndex("id")), r.getString(r.fieldIndex("amount")))
    })

    val nestedRDD = keyValueLoan.join(keyValueParty).map(r => {
      val loanId = r._1
      val loanAmount = r._2._1
      val seqOfParties = r._2._2.map(r => {
        Row(r.getString(r.fieldIndex("name")),
        r.getString(r.fieldIndex("age")))
      })

      Row(loanId, loanAmount, seqOfParties)
    })

    hiveContext.sql("create table nested (" +
      "loan_id string, " +
      "amount string, " +
      "party <array<struct<" +
      "  name: String," +
      "  age: String>>" +
      ") as parquet")

    val emptyNestedDF = hiveContext.sql("select * from nested limit 0;")
    val nestedDF = hiveContext.createDataFrame(nestedRDD, emptyNestedDF.schema)
    nestedDF.registerTempTable("nestedTmp")
    hiveContext.sql("insert into nested select * from nestedTmp")


  }
} 
Example 39
Source File: StreamingUnitTest.scala    From SparkUnitTestingExamples   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sa.spark.unittest.streaming

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import scala.collection.mutable.Queue

class StreamingUnitTest extends FunSuite with
BeforeAndAfterEach with BeforeAndAfterAll{

  @transient var sc: SparkContext = null
  @transient var ssc: StreamingContext = null

  override def beforeAll(): Unit = {

    val envMap = Map[String,String](("Xmx", "512m"))

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    sparkConfig.set("spark.io.compression.codec", "lzf")
    sc = new SparkContext("local[2]", "unit test", sparkConfig)
    ssc = new StreamingContext(sc, Milliseconds(200))
  }

  override def afterAll(): Unit = {
    sc.stop()
  }

  test("Streaming word count") {

    val firstBatchRDD = sc.parallelize(Seq("a", "b", "c"))
    val secondBatchRDD = sc.parallelize(Seq("a", "e"))
    val thirdBatchRDD = sc.parallelize(Seq("b", "c", "e", "f"))
    val forthBatchRDD = sc.parallelize(Seq("a", "e"))

    val queue = new Queue[RDD[String]]

    queue.+=(firstBatchRDD)
    queue.+=(secondBatchRDD)
    queue.+=(thirdBatchRDD)
    queue.+=(forthBatchRDD)

    println(queue)

    val startTime = System.currentTimeMillis()

    val dstream = new TestableQueueInputDStream(ssc, queue, true, sc.makeRDD(Seq[String](), 1))
    //ssc.queueStream(queue)

    dstream.checkpoint(Seconds(100))

    val batchTotals:DStream[(String, Int)] = dstream.map(r => (r, 1)).reduceByKey(_ + _)

    val streamTotals = batchTotals.updateStateByKey(
      (seq:Seq[Int], opt:Option[Int]) => {
        if (!seq.isEmpty) {
          val totalCountForNew = seq.reduce(_ + _)
          if (opt.isEmpty) {
            Option(totalCountForNew)
          } else {
            Option(opt.get + totalCountForNew)
          }
        } else {
          opt
        }
    })

    streamTotals.foreachRDD(rdd => {

    })

    ssc.checkpoint("./tmp")
    ssc.start()
    ssc.awaitTerminationOrTimeout(2000)

    val endTime = System.currentTimeMillis()

    val rddList = streamTotals.slice(new Time(startTime), new Time(endTime))

    rddList(0).collect().foreach(println)
    assert(rddList(0).collect().filter(r => r._1.equals("a"))(0)._2 == 1)
    rddList(1).collect().foreach(println)
    assert(rddList(1).collect().filter(r => r._1.equals("a"))(0)._2  == 2)
    rddList(2).collect().foreach(println)
    assert(rddList(2).collect().filter(r => r._1.equals("a"))(0)._2  == 2)
    rddList(3).collect().foreach(println)
    assert(rddList(3).collect().filter(r => r._1.equals("a"))(0)._2  == 3)
  }
} 
Example 40
Source File: CoreUnitTest.scala    From SparkUnitTestingExamples   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sa.spark.unittest.core

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import scala.collection.mutable

class CoreUnitTest extends FunSuite with
BeforeAndAfterEach with BeforeAndAfterAll{

  @transient var sc: SparkContext = null

  override def beforeAll(): Unit = {

    val envMap = Map[String,String](("Xmx", "512m"))

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    sparkConfig.set("spark.io.compression.codec", "lzf")
    sc = new SparkContext("local[2]", "unit test", sparkConfig)
  }

  override def afterAll(): Unit = {
    sc.stop()
  }

  test("Test word count") {
    val quotesRDD = sc.parallelize(Seq("Courage is not simply one of the virtues, but the form of every virtue at the testing point",
      "We have a very active testing community which people don't often think about when you have open source",
      "Program testing can be used to show the presence of bugs, but never to show their absence",
      "Simple systems are not feasible because they require infinite testing",
      "Testing leads to failure, and failure leads to understanding"))

    val wordCountRDD = quotesRDD.flatMap(r => r.split(' ')).
      map(r => (r.toLowerCase, 1)).
      reduceByKey((a,b) => a + b)

    val wordMap = new mutable.HashMap[String, Int]()
    wordCountRDD.take(100).
      foreach{case(word, count) => wordMap.put(word, count)}
    //Note this is better then foreach(r => wordMap.put(r._1, r._2)

    assert(wordMap.get("to").get == 4, "The word count for 'to' should had been 4 but it was " + wordMap.get("to").get)
    assert(wordMap.get("testing").get == 5, "The word count for 'testing' should had been 5 but it was " + wordMap.get("testing").get)
    assert(wordMap.get("is").get == 1, "The word count for 'is' should had been 1 but it was " + wordMap.get("is").get)
  }
} 
Example 41
Source File: KuduServiceIntegrationTest.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import io.phdata.pulse.common.domain.TimeseriesEvent
import org.apache.kudu.test.KuduTestHarness
import org.scalatest.{ BeforeAndAfterEach, FunSuite }

class KuduServiceIntegrationTest extends FunSuite with BeforeAndAfterEach {
  val kuduTestHarness = new KuduTestHarness()

  override def beforeEach(): Unit = {
    super.beforeEach()
    kuduTestHarness.before()
  }

  override def afterEach(): Unit = {
    super.afterEach()
    kuduTestHarness.after()
  }

  ignore("Create a table if it doesn't exist") {
    val client    = kuduTestHarness.getClient
    val service   = new KuduService(kuduTestHarness.getClient)
    val tableName = "footable"

    assert(service.tableCache.isEmpty)

    service.getOrCreateTable(tableName)

    assert(client.tableExists(tableName))

    // Make sure we can add an existing table to the cache
    service.tableCache.clear()
    val table = service.getOrCreateTable(tableName)
    assert(table != null)
  }

  ignore("Write events into Kudu") {
    val tableName = "fooApp"
    val client    = kuduTestHarness.getClient
    val numRows   = 1001

    val events = (1 to numRows).map { n =>
      new TimeseriesEvent(n, "id", "metric", 1.5d)
    }
    val stream = new KuduService(kuduTestHarness.getClient)
    events.foreach(e => stream.save(tableName, List(e)))

    // Sleep until the table is created, 'stream.put' runs asynchronously.
    while (!client.tableExists(tableName)) {
      Thread.sleep(100)
    }

    // Give some time for the row to be inserted
    Thread.sleep(10000)

    val table = client.openTable(tableName)
    val scanner = client
      .newScannerBuilder(table)
      .build()

    var rowCount = 0

    while (scanner.hasMoreRows) {
      rowCount = rowCount + scanner.nextRows().getNumRows
    }

    assertResult(numRows)(rowCount)
  }
} 
Example 42
Source File: SolrAlertTriggerTest.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.trigger

import io.phdata.pulse.alertengine.{ AlertsDb, TestObjectGenerator }
import io.phdata.pulse.solr.{ BaseSolrCloudTest, TestUtil }
import org.scalatest.BeforeAndAfterEach

class SolrAlertTriggerTest extends BaseSolrCloudTest with BeforeAndAfterEach {
  val CONF_NAME               = "testconf"
  private val applicationName = TestUtil.randomIdentifier()

  override def beforeEach(): Unit = {
    super.beforeEach()
    AlertsDb.reset()
  }

  override def beforeAll(): Unit = {
    super.beforeAll()
    AlertsDb.reset()
    val alias = applicationName + "_all"

    solrClient.setDefaultCollection(alias)
    solrService.createCollection(alias, 1, 1, CONF_NAME, null)

    Seq[Map[String, String]](
      Map("id" -> "1", "level" -> "ERROR", "message" -> "sad"),
      Map("id" -> "2", "level" -> "INFO", "message"  -> "happy"),
      Map("id" -> "3", "level" -> "ERROR", "message" -> "very sad")
    ).foreach(addDocument)

    solrClient.commit(true, true, true)
    // unset the default collection so we are sure it is being set in the request
    solrClient.setDefaultCollection("")
  }

  private def stripVersion(result: Seq[Map[String, Any]]): Seq[Map[String, Any]] = {
    result.collect {
      case doc: Map[String, Any] =>
      assert(doc.contains("_version_"))
      doc - "_version_"
    }
  }

  test("query returns matching documents") {
    val alertRule =
      TestObjectGenerator.alertRule(
        query = "level:ERROR",
        retryInterval = 1,
        resultThreshold = Some(1),
        alertProfiles = List("[email protected]")
      )
    val expectedDocuments = Seq(
      Map("id" -> "1", "level" -> "ERROR", "message" -> "sad"),
      Map("id" -> "3", "level" -> "ERROR", "message" -> "very sad")
    )

    val trigger = new SolrAlertTrigger(solrService)
    val result  = trigger.query(applicationName, alertRule)
    val modifiedResult = stripVersion(result)
    assertResult(expectedDocuments)(modifiedResult)
  }

  test("query returns no documents") {
    val alertRule = TestObjectGenerator.alertRule(query = "level:WARN")

    val trigger = new SolrAlertTrigger(solrService)
    assertResult(Seq.empty)(trigger.query(applicationName, alertRule))
  }

  test("invalid query") {
    val alertRule = TestObjectGenerator.alertRule(query = ":")

    val trigger = new SolrAlertTrigger(solrService)
    assertThrows[Exception](trigger.query(applicationName, alertRule))
  }

} 
Example 43
Source File: SqlAlertTriggerTest.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.trigger

import java.sql.{ DriverManager, Statement }

import io.phdata.pulse.alertengine.{ AlertsDb, TestObjectGenerator }
import io.phdata.pulse.solr.TestUtil
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach, FunSuite }

class SqlAlertTriggerTest extends FunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val applicationName: String = "sql_test_" + TestUtil.randomIdentifier()
  private val dbUrl                   = s"jdbc:h2:mem:$applicationName;DB_CLOSE_DELAY=-1"

  override def beforeEach(): Unit = {
    super.beforeEach()
    AlertsDb.reset()
    prepareDatabase()
  }

  override def afterAll(): Unit =
    withStatement(statement => statement.execute("DROP ALL OBJECTS DELETE FILES;"))

  private def withStatement(function: Statement => Unit): Unit = {
    val connection = DriverManager.getConnection(dbUrl)
    try {
      val statement = connection.createStatement()
      try {
        function.apply(statement)
      } finally {
        statement.close()
      }
    } finally {
      connection.close()
    }
  }

  private def prepareDatabase(): Unit =
    withStatement { statement =>
      statement.execute("DROP ALL OBJECTS DELETE FILES;")
      statement.execute(s"""CREATE TABLE $applicationName (
           |id int not null,
           |error boolean not null,
           |message varchar(255) not null,
           |);""".stripMargin)
    }

  test("query returns matching documents") {
    withStatement { statement =>
      statement.execute(s"""INSERT INTO $applicationName (id, error, message) VALUES
           |(1, true, 'sad'),
           |(3, true, 'very sad'),
           |(2, false, 'happy');""".stripMargin)
    }
    val alertRule =
      TestObjectGenerator.alertRule(
        query = s"""select * from $applicationName
           |where error = true
           |order by id""".stripMargin,
        retryInterval = 1,
        resultThreshold = Some(1),
        alertProfiles = List("[email protected]")
      )
    val expectedDocuments = Seq(
      Map("id" -> 1, "error" -> true, "message" -> "sad"),
      Map("id" -> 3, "error" -> true, "message" -> "very sad")
    )

    val trigger = new SqlAlertTrigger(dbUrl)
    val result  = trigger.query(applicationName, alertRule)
    assertResult(expectedDocuments)(result)
  }

  test("query returns no documents") {
    val alertRule = TestObjectGenerator.alertRule(query = s"select * from $applicationName")

    val trigger = new SqlAlertTrigger(dbUrl)
    assertResult(Seq.empty)(trigger.query(applicationName, alertRule))
  }

  test("invalid query") {
    val alertRule = TestObjectGenerator.alertRule()

    val trigger = new SqlAlertTrigger(dbUrl)
    assertThrows[Exception](trigger.query(applicationName, alertRule))
  }

  test("connection with options") {
    val alertRule = TestObjectGenerator.alertRule(query = s"select * from $applicationName")

    val trigger = new SqlAlertTrigger(dbUrl, dbOptions = Map("hello" -> "stuff"))
    trigger.query(applicationName, alertRule)
  }

  test("dbUrl null") {
    assertThrows[IllegalArgumentException](new SqlAlertTrigger(null))
  }

  test("dbUrl empty") {
    assertThrows[IllegalArgumentException](new SqlAlertTrigger(""))
  }

} 
Example 44
Source File: LocalSparkContext.scala    From flint   with Apache License 2.0 5 votes vote down vote up
package com.twosigma.flint

import org.apache.spark.SparkContext
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
import org.scalatest.Suite


trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll {

  self: Suite =>

  @transient var sc: SparkContext = _

  override def beforeAll() {
    super.beforeAll()
  }

  override def afterEach() {
    resetSparkContext()
    super.afterEach()
  }

  def resetSparkContext(): Unit = {
    LocalSparkContext.stop(sc)
    sc = null
  }
}

object LocalSparkContext {
  def stop(sc: SparkContext) {
    if (sc != null) {
      sc.stop()
    }
    System.clearProperty("spark.driver.port")
  }
} 
Example 45
Source File: SKRSpec.scala    From spark-kafka-writer   with Apache License 2.0 5 votes vote down vote up
package com.github.benfradet.spark.kafka.writer

import java.util.concurrent.atomic.AtomicInteger

import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.kafka010.{ConsumerStrategies, KafkaUtils, LocationStrategies}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

case class Foo(a: Int, b: String)

trait SKRSpec
  extends AnyWordSpec
  with Matchers
  with BeforeAndAfterEach
  with BeforeAndAfterAll
  with Eventually {

  val sparkConf = new SparkConf()
    .setMaster("local[1]")
    .setAppName(getClass.getSimpleName)

  var ktu: KafkaTestUtils = _
  override def beforeAll(): Unit = {
    ktu = new KafkaTestUtils
    ktu.setup()
  }
  override def afterAll(): Unit = {
    SKRSpec.callbackTriggerCount.set(0)
    if (ktu != null) {
      ktu.tearDown()
      ktu = null
    }
  }

  var topic: String = _
  var ssc: StreamingContext = _
  var spark: SparkSession = _
  override def afterEach(): Unit = {
    if (ssc != null) {
      ssc.stop()
      ssc = null
    }
    if (spark != null) {
      spark.stop()
      spark = null
    }
  }
  override def beforeEach(): Unit = {
    ssc = new StreamingContext(sparkConf, Seconds(1))
    spark = SparkSession.builder
      .config(sparkConf)
      .getOrCreate()
    topic = s"topic-${Random.nextInt()}"
    ktu.createTopics(topic)
  }

  def collect(ssc: StreamingContext, topic: String): ArrayBuffer[String] = {
    val kafkaParams = Map(
      "bootstrap.servers" -> ktu.brokerAddress,
      "auto.offset.reset" -> "earliest",
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "test-collect"
    )
    val results = new ArrayBuffer[String]
    KafkaUtils.createDirectStream[String, String](
      ssc,
      LocationStrategies.PreferConsistent,
      ConsumerStrategies.Subscribe[String, String](Set(topic), kafkaParams)
    ).map(_.value())
      .foreachRDD { rdd =>
        results ++= rdd.collect()
        ()
      }
    results
  }

  val producerConfig = Map(
    "bootstrap.servers" -> "127.0.0.1:9092",
    "key.serializer" -> classOf[StringSerializer].getName,
    "value.serializer" -> classOf[StringSerializer].getName
  )
}

object SKRSpec {
  val callbackTriggerCount = new AtomicInteger()
} 
Example 46
Source File: ProcessStepTestSupport.scala    From process   with Apache License 2.0 5 votes vote down vote up
package processframework

import akka.pattern.ask
import akka.actor.{ ActorRef, ActorContext, Actor, Props }
import akka.util.Timeout

import scala.concurrent.duration._
import scala.concurrent.Await
import scala.reflect.ClassTag

import akka.testkit.{ TestProbe, TestKit }
import org.scalatest.BeforeAndAfterEach

object ProcessStepTestSupport {
  case object GetStep
  case object ACommand
  case object AnEvent extends Process.Event
}

trait ProcessStepTestSupport[S, PS <: ProcessStep[S]] { this: TestKit with BeforeAndAfterEach ⇒
  implicit val timeout: Timeout = 1 second

  var testProbe: TestProbe = null
  var processActor: ActorRef = null

  override protected def beforeEach(): Unit = {
    testProbe = createTestProbe()
    processActor = createProcessActor()
  }

  def createTestProbe(): TestProbe
  def createProcessStep(executeProbe: TestProbe)(implicit context: ActorContext): PS

  def createProcessActor() = system.actorOf(Props(new Actor {
    val step = createProcessStep(testProbe)

    def receive = {
      case msg if sender() == step        ⇒ testActor forward msg
      case ProcessStepTestSupport.GetStep ⇒ sender() ! step
      case e: Process.Event               ⇒ testActor ! e
    }
  }))

  def processStep()(implicit classTag: ClassTag[PS]): PS =
    Await.result[PS]((processActor ? ProcessStepTestSupport.GetStep).mapTo[PS], 2 seconds)
} 
Example 47
Source File: SharedSparkContext.scala    From tispark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import org.apache.spark.SharedSparkContext._
import org.apache.spark.sql.internal.StaticSQLConf
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait SharedSparkContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  protected var _isHiveEnabled: Boolean = false
  protected var conf: SparkConf = new SparkConf(false)

  def sc: SparkContext = _sc

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    if (_sc != null) {
      SharedSparkContext.stop()
    }
    initializeContext()
  }

  protected def initializeContext(): Unit =
    synchronized {
      if (null == _sc) {
        conf.set("spark.sql.test.key", "true")
        if (_isHiveEnabled) {
          conf.set(StaticSQLConf.CATALOG_IMPLEMENTATION, "hive")
        }
        _sc = new SparkContext("local[4]", "tispark-integration-test", conf)
      }
    }

  override protected def afterAll(): Unit = {
    try {
      SharedSparkContext.stop()
    } finally {
      super.afterAll()
    }
  }
}

object SharedSparkContext {

  @transient private var _sc: SparkContext = _

  def stop(): Unit =
    synchronized {
      if (_sc != null) {
        _sc.stop()
        _sc = null
      }
      // To avoid RPC rebinding to the same port, since it doesn't unbind immediately on shutdown
      System.clearProperty("spark.driver.port")
    }

} 
Example 48
Source File: SparkStreamingRedisSuite.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.redislabs.provider.redis

import com.redislabs.provider.redis.env.Env
import com.redislabs.provider.redis.util.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.scalatest.{BeforeAndAfterEach, FunSuite}


trait SparkStreamingRedisSuite extends FunSuite with Env with BeforeAndAfterEach with Logging {

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    spark = SparkSession.builder().config(conf).getOrCreate()
    sc = spark.sparkContext
    ssc = new StreamingContext(sc, Seconds(1))
  }

  override protected def afterEach(): Unit = {
    ssc.stop()
    spark.stop
    System.clearProperty("spark.driver.port")
    super.afterEach()
  }

} 
Example 49
Source File: VertxEnvironment.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.adapter.vertx

import java.util.UUID

import akka.testkit.TestKit
import io.vertx.core.Vertx
import org.scalatest.{ BeforeAndAfterEach, Suite }

trait VertxEnvironment extends BeforeAndAfterEach {
  this: TestKit with Suite =>

  var vertx: Vertx = _

  override def beforeEach(): Unit = {
    super.beforeEach()
    vertx = Vertx.vertx()
  }

  def registerEventBusCodec(clazz: Class[_]): Unit = {
    vertx.eventBus().registerDefaultCodec(clazz.asInstanceOf[Class[AnyRef]], AkkaSerializationMessageCodec(clazz))
  }

  def endpointAddress(id: String): String =
    s"vertx-endpoint-$id-${UUID.randomUUID().toString}"
} 
Example 50
Source File: VertxEventBusProbes.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.adapter.vertx

import akka.actor.ActorSystem
import akka.testkit.{ TestKit, TestProbe }
import com.rbmhtechnology.eventuate.adapter.vertx.utilities.EventBusMessage
import io.vertx.core.eventbus.Message
import org.scalatest.{ BeforeAndAfterEach, Suite }

trait VertxEventBusProbes extends BeforeAndAfterEach {
  this: TestKit with Suite with VertxEnvironment =>

  import VertxHandlerConverters._

  var endpoint1: EventBusEndpoint = _
  var endpoint2: EventBusEndpoint = _

  override def beforeEach(): Unit = {
    super.beforeEach()

    endpoint1 = EventBusEndpoint.withId("1")
    endpoint2 = EventBusEndpoint.withId("2")
  }

  def eventBusProbe(endpoint: String): TestProbe = {
    val probe = TestProbe()
    val handler = (m: Message[String]) => probe.ref ! EventBusMessage(m.body(), m, endpoint)
    vertx.eventBus().consumer[String](endpoint, handler.asVertxHandler)
    probe
  }

  object EventBusEndpoint {
    def apply(address: String): EventBusEndpoint =
      new EventBusEndpoint(address, eventBusProbe(address))

    def withId(id: String): EventBusEndpoint =
      apply(endpointAddress(id))
  }

  case class EventBusEndpoint(address: String, probe: TestProbe)
} 
Example 51
Source File: TestStreamingContext.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark

import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{StreamingContext, Seconds}
import jp.gihyo.spark.ch06.UserDic

private[spark]
trait TestStreamingContext extends BeforeAndAfterEach { self: Suite =>
  @transient var ssc: StreamingContext = _
  @transient var sc: SparkContext = _
  val master = "local[2]"
  val appN = "StreamingUnitTest"
  val bd = Seconds(1)

  override def beforeEach() {
    super.beforeEach()
    val conf = new SparkConf().setMaster(master)
      .setAppName(appN)
      .set("spark.streaming.clock", "org.apache.spark.util.ManualClock")
      .registerKryoClasses(Array(classOf[UserDic]))

    ssc = new StreamingContext(conf, bd)
    sc = ssc.sparkContext
  }

  override def afterEach() {
    try {
      if (ssc != null) {
        // stop with sc
        ssc.stop(true)
      }
      ssc = null;
    } finally {
      super.afterEach()
    }
  }
} 
Example 52
Source File: LoginHandlerSpec.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.protocol.command

import akka.actor.ActorSystem
import akka.testkit.TestProbe
import com.linagora.gatling.imap.Fixture.bart
import com.linagora.gatling.imap.protocol.{Command, Response, UserId}
import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer}
import com.sun.mail.imap.protocol.IMAPResponse
import org.scalatest.matchers.{MatchResult, Matcher}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec}
import org.slf4j
import org.slf4j.LoggerFactory

import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

class LoginHandlerSpec extends WordSpec with ImapTestUtils with BeforeAndAfterEach with Matchers {
  val logger: slf4j.Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName)

  private val server: RunningServer = CyrusServer.start()

  override def beforeEach(): Unit = {
    server.addUser(bart)
  }

  override protected def afterEach(): Unit = {
    system.terminate()
    server.stop()
  }

  implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec")
  "Login handler" should {
    "send the response back when logged in" in {
      val probe = TestProbe()
      val sessionFuture = connect(server.mappedImapPort())
      sessionFuture.onComplete(session => {
        val handler = system.actorOf(LoginHandler.props(session.get))
        probe.send(handler, Command.Login(UserId(1), bart))
      })
      probe.expectMsgPF(1.minute) {
        case Response.LoggedIn(responses) => responses.isOk shouldBe true
      }
    }
  }

  object IMAPResponseMatchers {

    class HasTagMatcher(tag: String) extends Matcher[IMAPResponse] {
      def apply(left: IMAPResponse): MatchResult = {
        val name = left.getTag
        MatchResult(
          name == tag,
          s"""ImapResponse doesn't have tag "$tag"""",
          s"""ImapResponse has tag "$tag""""
        )
      }
    }

    class IsOkMatcher() extends Matcher[IMAPResponse] {
      def apply(left: IMAPResponse): MatchResult = {
        MatchResult(
          left.isOK,
          s"""ImapResponse isn't OK """,
          s"""ImapResponse is OK """
        )
      }
    }

    def isOk = new IsOkMatcher()

    def hasTag(tag: String) = new HasTagMatcher(tag)
  }

} 
Example 53
Source File: ImapSessionsSpec.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.protocol.command

import java.util.Properties

import akka.actor.ActorSystem
import akka.testkit.TestProbe
import com.linagora.gatling.imap.Fixture.bart
import com.linagora.gatling.imap.protocol.{Command, ImapProtocol, ImapResponses, ImapSessions, Response, UserId}
import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec}
import org.slf4j.{Logger, LoggerFactory}

import scala.concurrent.duration._

class ImapSessionsSpec extends WordSpec with Matchers with ImapTestUtils with BeforeAndAfterEach {
  val logger: Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName)

  private val server: RunningServer = CyrusServer.start()

  override def beforeEach(): Unit = {
    server.addUser(bart)
  }

  override protected def afterEach(): Unit = {
    system.terminate()
    server.stop()
  }

  implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec")
  "the imap sessions actor" should {
    "log a user in" in {
      val config = new Properties()
      val protocol = ImapProtocol("localhost", server.mappedImapPort(), config)

      val sessions = system.actorOf(ImapSessions.props(protocol))
      val probe = TestProbe()
      val userId = UserId(1)
      probe.send(sessions, Command.Connect(userId))
      probe.expectMsg(10.second, Response.Connected(ImapResponses.empty))
      probe.send(sessions, Command.Login(userId, bart))
      probe.expectMsgPF(10.second) {
        case Response.LoggedIn(responses: ImapResponses) => responses.isOk shouldBe true
      }
    }
  }

} 
Example 54
Source File: OutwatchSpec.scala    From outwatch   with Apache License 2.0 5 votes vote down vote up
package outwatch

import scala.concurrent.Future
import cats.effect.ContextShift
import cats.effect.IO
import monix.execution.Ack.Continue
import monix.execution.ExecutionModel.SynchronousExecution
import monix.execution.schedulers.TrampolineScheduler
import monix.execution.{Cancelable, Scheduler}
import monix.reactive.Observable
import org.scalajs.dom.{document, window}
import org.scalatest.BeforeAndAfterEach
import org.scalatest._
import outwatch.Deprecated.IgnoreWarnings.initEvent
import org.scalatest.flatspec.{ AnyFlatSpec, AsyncFlatSpec }
import org.scalatest.matchers.should.Matchers

trait EasySubscribe {

  implicit class Subscriber[T](obs: Observable[T]) {
    def apply(next: T => Unit)(implicit s: Scheduler): Cancelable = obs.subscribe { t =>
      next(t)
      Continue
    }
  }
}

// TODO: We need this mock until localStorage is implemented in jsdom (https://github.com/tmpvar/jsdom/pull/2076)
trait LocalStorageMock {
  import scala.collection.mutable
  import scala.scalajs.js


  if (js.isUndefined(window.localStorage)) {
    val storageObject = new js.Object {
      private val map = new mutable.HashMap[String, String]

      def getItem(key: String): String = map.getOrElse(key, null)

      def setItem(key: String, value: String): Unit = {
        map += key -> value
      }

      def removeItem(key: String): Unit = {
        map -= key
      }

      def clear(): Unit = map.clear()
    }

    js.Dynamic.global.window.updateDynamic("localStorage")(storageObject)
  }

  def dispatchStorageEvent(key: String, newValue: String, oldValue: String): Unit = {
    if (key == null) window.localStorage.clear()
    else window.localStorage.setItem(key, newValue)

    val event = document.createEvent("Events")
    initEvent(event)("storage", canBubbleArg = true, cancelableArg = false)
    event.asInstanceOf[js.Dynamic].key = key
    event.asInstanceOf[js.Dynamic].newValue = newValue
    event.asInstanceOf[js.Dynamic].oldValue = oldValue
    event.asInstanceOf[js.Dynamic].storageArea = window.localStorage
    window.dispatchEvent(event)
    ()
  }
}

trait OutwatchSpec extends Matchers with BeforeAndAfterEach with EasySubscribe with LocalStorageMock { self: Suite =>

  implicit val scheduler: TrampolineScheduler = TrampolineScheduler(Scheduler.global, SynchronousExecution)
  implicit val cs: ContextShift[IO] = IO.contextShift(scheduler)

  override def beforeEach(): Unit = {

    document.body.innerHTML = ""

    window.localStorage.clear()

    // prepare body with <div id="app"></div>
    val root = document.createElement("div")
    root.id = "app"
    document.body.appendChild(root)
    ()
  }

}

abstract class JSDomSpec extends AnyFlatSpec with OutwatchSpec {
  implicit def executionContext = scheduler
}
abstract class JSDomAsyncSpec extends AsyncFlatSpec with OutwatchSpec {
  override def executionContext = scheduler

  implicit def ioAssertionToFutureAssertion(io: IO[Assertion]): Future[Assertion] = io.unsafeToFuture()
} 
Example 55
Source File: HiveContextCompatibilitySuite.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {

  override protected val enableAutoThreadAudit = false
  private var sc: SparkContext = null
  private var hc: HiveContext = null

  override def beforeAll(): Unit = {
    super.beforeAll()
    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
    HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
      sc.hadoopConfiguration.set(k, v)
    }
    hc = new HiveContext(sc)
  }

  override def afterEach(): Unit = {
    try {
      hc.sharedState.cacheManager.clearCache()
      hc.sessionState.catalog.reset()
    } finally {
      super.afterEach()
    }
  }

  override def afterAll(): Unit = {
    try {
      sc = null
      hc = null
    } finally {
      super.afterAll()
    }
  }

  test("basic operations") {
    val _hc = hc
    import _hc.implicits._
    val df1 = (1 to 20).map { i => (i, i) }.toDF("a", "x")
    val df2 = (1 to 100).map { i => (i, i % 10, i % 2 == 0) }.toDF("a", "b", "c")
      .select($"a", $"b")
      .filter($"a" > 10 && $"b" > 6 && $"c")
    val df3 = df1.join(df2, "a")
    val res = df3.collect()
    val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
    assert(res.toSeq == expected.toSeq)
    df3.createOrReplaceTempView("mai_table")
    val df4 = hc.table("mai_table")
    val res2 = df4.collect()
    assert(res2.toSeq == expected.toSeq)
  }

  test("basic DDLs") {
    val _hc = hc
    import _hc.implicits._
    val databases = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases.toSeq == Seq("default"))
    hc.sql("CREATE DATABASE mee_db")
    hc.sql("USE mee_db")
    val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases2.toSet == Set("default", "mee_db"))
    val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
    df.createOrReplaceTempView("mee_table")
    hc.sql("CREATE TABLE moo_table (name string, age int)")
    hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
    assert(
      hc.sql("SELECT * FROM moo_table order by name").collect().toSeq ==
      df.collect().toSeq.sortBy(_.getString(0)))
    val tables = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables.toSet == Set("moo_table", "mee_table"))
    hc.sql("DROP TABLE moo_table")
    hc.sql("DROP TABLE mee_table")
    val tables2 = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables2.isEmpty)
    hc.sql("USE default")
    hc.sql("DROP DATABASE mee_db CASCADE")
    val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases3.toSeq == Seq("default"))
  }

} 
Example 56
Source File: BufferHolderSparkSubmitSuite.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.expressions.codegen

import org.scalatest.{Assertions, BeforeAndAfterEach, Matchers}

import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.deploy.SparkSubmitSuite
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.util.ResetSystemProperties

// A test for growing the buffer holder to nearly 2GB. Due to the heap size limitation of the Spark
// unit tests JVM, the actually test code is running as a submit job.
class BufferHolderSparkSubmitSuite
  extends SparkFunSuite
    with Matchers
    with BeforeAndAfterEach
    with ResetSystemProperties {

  test("SPARK-22222: Buffer holder should be able to allocate memory larger than 1GB") {
    val unusedJar = TestUtils.createJarWithClasses(Seq.empty)

    val argsForSparkSubmit = Seq(
      "--class", BufferHolderSparkSubmitSuite.getClass.getName.stripSuffix("$"),
      "--name", "SPARK-22222",
      "--master", "local-cluster[1,1,4096]",
      "--driver-memory", "4g",
      "--conf", "spark.ui.enabled=false",
      "--conf", "spark.master.rest.enabled=false",
      "--conf", "spark.driver.extraJavaOptions=-ea",
      unusedJar.toString)
    SparkSubmitSuite.runSparkSubmit(argsForSparkSubmit, "../..")
  }
}

object BufferHolderSparkSubmitSuite extends Assertions {

  def main(args: Array[String]): Unit = {

    val ARRAY_MAX = ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH

    val unsafeRow = new UnsafeRow(1000)
    val holder = new BufferHolder(unsafeRow)

    holder.reset()

    assert(intercept[IllegalArgumentException] {
      holder.grow(-1)
    }.getMessage.contains("because the size is negative"))

    // while to reuse a buffer may happen, this test checks whether the buffer can be grown
    holder.grow(ARRAY_MAX / 2)
    assert(unsafeRow.getSizeInBytes % 8 == 0)

    holder.grow(ARRAY_MAX / 2 + 7)
    assert(unsafeRow.getSizeInBytes % 8 == 0)

    holder.grow(Integer.MAX_VALUE / 2)
    assert(unsafeRow.getSizeInBytes % 8 == 0)

    holder.grow(ARRAY_MAX - holder.totalSize())
    assert(unsafeRow.getSizeInBytes % 8 == 0)

    assert(intercept[IllegalArgumentException] {
      holder.grow(ARRAY_MAX + 1 - holder.totalSize())
    }.getMessage.contains("because the size after growing"))
  }
} 
Example 57
Source File: WholeStageCodegenSparkSubmitSuite.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import org.scalatest.{Assertions, BeforeAndAfterEach, Matchers}
import org.scalatest.concurrent.TimeLimits

import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.deploy.SparkSubmitSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{LocalSparkSession, QueryTest, Row, SparkSession}
import org.apache.spark.sql.functions.{array, col, count, lit}
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.unsafe.Platform
import org.apache.spark.util.ResetSystemProperties

// Due to the need to set driver's extraJavaOptions, this test needs to use actual SparkSubmit.
class WholeStageCodegenSparkSubmitSuite extends SparkFunSuite
  with Matchers
  with BeforeAndAfterEach
  with ResetSystemProperties {

  test("Generated code on driver should not embed platform-specific constant") {
    val unusedJar = TestUtils.createJarWithClasses(Seq.empty)

    // HotSpot JVM specific: Set up a local cluster with the driver/executor using mismatched
    // settings of UseCompressedOops JVM option.
    val argsForSparkSubmit = Seq(
      "--class", WholeStageCodegenSparkSubmitSuite.getClass.getName.stripSuffix("$"),
      "--master", "local-cluster[1,1,1024]",
      "--driver-memory", "1g",
      "--conf", "spark.ui.enabled=false",
      "--conf", "spark.master.rest.enabled=false",
      "--conf", "spark.driver.extraJavaOptions=-XX:-UseCompressedOops",
      "--conf", "spark.executor.extraJavaOptions=-XX:+UseCompressedOops",
      unusedJar.toString)
    SparkSubmitSuite.runSparkSubmit(argsForSparkSubmit, "../..")
  }
}

object WholeStageCodegenSparkSubmitSuite extends Assertions with Logging {

  var spark: SparkSession = _

  def main(args: Array[String]): Unit = {
    TestUtils.configTestLog4j("INFO")

    spark = SparkSession.builder().getOrCreate()

    // Make sure the test is run where the driver and the executors uses different object layouts
    val driverArrayHeaderSize = Platform.BYTE_ARRAY_OFFSET
    val executorArrayHeaderSize =
      spark.sparkContext.range(0, 1).map(_ => Platform.BYTE_ARRAY_OFFSET).collect.head.toInt
    assert(driverArrayHeaderSize > executorArrayHeaderSize)

    val df = spark.range(71773).select((col("id") % lit(10)).cast(IntegerType) as "v")
      .groupBy(array(col("v"))).agg(count(col("*")))
    val plan = df.queryExecution.executedPlan
    assert(plan.find(_.isInstanceOf[WholeStageCodegenExec]).isDefined)

    val expectedAnswer =
      Row(Array(0), 7178) ::
        Row(Array(1), 7178) ::
        Row(Array(2), 7178) ::
        Row(Array(3), 7177) ::
        Row(Array(4), 7177) ::
        Row(Array(5), 7177) ::
        Row(Array(6), 7177) ::
        Row(Array(7), 7177) ::
        Row(Array(8), 7177) ::
        Row(Array(9), 7177) :: Nil
    val result = df.collect
    QueryTest.sameRows(result.toSeq, expectedAnswer) match {
      case Some(errMsg) => fail(errMsg)
      case _ =>
    }
  }
} 
Example 58
Source File: SharedSparkSession.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import scala.concurrent.duration._

import org.scalatest.{BeforeAndAfterEach, Suite}
import org.scalatest.concurrent.Eventually

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation
import org.apache.spark.sql.internal.SQLConf


  protected override def afterAll(): Unit = {
    try {
      super.afterAll()
    } finally {
      try {
        if (_spark != null) {
          try {
            _spark.sessionState.catalog.reset()
          } finally {
            _spark.stop()
            _spark = null
          }
        }
      } finally {
        SparkSession.clearActiveSession()
        SparkSession.clearDefaultSession()
      }
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    // Clear all persistent datasets after each test
    spark.sharedState.cacheManager.clearCache()
    // files can be closed from other threads, so wait a bit
    // normally this doesn't take more than 1s
    eventually(timeout(10.seconds), interval(2.seconds)) {
      DebugFilesystem.assertNoOpenStreams()
    }
  }
} 
Example 59
Source File: WithSQLContext.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package com.sap.spark

import java.util.Locale

import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.scalatest.{BeforeAndAfterEach, Suite}

trait WithSQLContext extends BeforeAndAfterEach {
  self: Suite with WithSparkContext =>

  override def beforeEach(): Unit = {
    try {
      super.beforeEach()
      setUpSQLContext()
    } catch {
      case ex: Throwable =>
        tearDownSQLContext()
        throw ex
    }
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      tearDownSQLContext()
    }
  }

  implicit def sqlContext: SQLContext = _sqlContext
  def sqlc: SQLContext = sqlContext

  var _sqlContext: SQLContext = _

  protected def setUpSQLContext(): Unit =
    _sqlContext = SQLContext.getOrCreate(sc).newSession()


  protected def tearDownSQLContext(): Unit =
    _sqlContext = null

  protected def tableName(name: String): String =
    sqlc match {
      
      case _: HiveContext => name.toLowerCase(Locale.ENGLISH)
      case _ => name
    }

} 
Example 60
Source File: SQLRunnerSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package com.sap.spark.cli

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStream}

import org.apache.spark.SparkContext
import org.apache.spark.sql.{GlobalSapSQLContext, SQLContext}
import org.scalatest.{BeforeAndAfterEach, FunSuite, ShouldMatchers}



    // good call
    val goodOpts =
      SQLRunner.parseOpts(List("a.sql", "b.sql", "-o", "output.csv"))

    goodOpts.sqlFiles should be(List("a.sql", "b.sql"))
    goodOpts.output should be(Some("output.csv"))

    // bad call
    val badOpts = SQLRunner.parseOpts(List())

    badOpts.sqlFiles should be(List())
    badOpts.output should be(None)

    // ugly call
    val uglyOpts =
      SQLRunner.parseOpts(List("a.sql", "-o", "output.csv", "b.sql"))

    uglyOpts.sqlFiles should be(List("a.sql", "b.sql"))
    uglyOpts.output should be(Some("output.csv"))
  }

  def runSQLTest(input: String, expectedOutput: String): Unit = {
    val inputStream: InputStream = new ByteArrayInputStream(input.getBytes())
    val outputStream = new ByteArrayOutputStream()

    SQLRunner.sql(inputStream, outputStream)

    val output = outputStream.toString
    output should be(expectedOutput)
  }

  test("can run dummy query") {
    val input = "SELECT 1;"
    val output = "1\n"

    runSQLTest(input, output)
  }

  test("can run multiple dummy queries") {
    val input = """
        |SELECT 1;SELECT 2;
        |SELECT 3;
      """.stripMargin

    val output = "1\n2\n3\n"

    runSQLTest(input, output)
  }

  test("can run a basic example with tables") {
    val input = """
                  |SELECT * FROM DEMO_TABLE;
                  |SELECT * FROM DEMO_TABLE LIMIT 1;
                  |DROP TABLE DEMO_TABLE;
                """.stripMargin

    val output = "1,a\n2,b\n3,c\n1,a\n"

    runSQLTest(input, output)
  }

  test("can run an example with comments") {
    val input = """
                  |SELECT * FROM DEMO_TABLE; -- this is the first query
                  |SELECT * FROM DEMO_TABLE LIMIT 1;
                  |-- now let's drop a table
                  |DROP TABLE DEMO_TABLE;
                """.stripMargin

    val output = "1,a\n2,b\n3,c\n1,a\n"

    runSQLTest(input, output)
  }
} 
Example 61
Source File: LocalSparkContext.scala    From streamliner-examples   with Apache License 2.0 5 votes vote down vote up
package test.util

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.BeforeAndAfterEach
import org.scalatest._

trait LocalSparkContext extends BeforeAndAfterEach { self: Suite =>

  @transient private var _sc: SparkContext = _

  val _sparkConf = new SparkConf(false)
    .set("spark.ui.showConsoleProgress", "false")

  def sc: SparkContext = _sc

  override def beforeEach() {
    _sc = new SparkContext("local[4]", "test", _sparkConf)
    super.beforeEach()
  }

  override def afterEach() {
    resetSparkContext()
    super.afterEach()
  }

  def resetSparkContext(): Unit = {
    LocalSparkContext.stop(_sc)
    _sc = null
  }

}

object LocalSparkContext {
  def stop(sc: SparkContext) {
    if (sc != null) {
      sc.stop()
    }
    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
    System.clearProperty("spark.driver.port")
  }

  
  def withSpark[T](sc: SparkContext)(f: SparkContext => T): T = {
    try {
      f(sc)
    } finally {
      stop(sc)
    }
  }

} 
Example 62
Source File: LocalSparkContext.scala    From streamliner-examples   with Apache License 2.0 5 votes vote down vote up
package test.util

import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.BeforeAndAfterEach
import org.scalatest._

trait LocalSparkContext extends BeforeAndAfterEach { self: Suite =>

  @transient private var _sc: SparkContext = _

  val _sparkConf = new SparkConf(false)
    .set("spark.ui.showConsoleProgress", "false")

  def sc: SparkContext = _sc

  override def beforeEach() {
    _sc = new SparkContext("local[4]", "test", _sparkConf)
    super.beforeEach()
  }

  override def afterEach() {
    resetSparkContext()
    super.afterEach()
  }

  def resetSparkContext(): Unit = {
    LocalSparkContext.stop(_sc)
    _sc = null
  }

}

object LocalSparkContext {
  def stop(sc: SparkContext) {
    if (sc != null) {
      sc.stop()
    }
    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
    System.clearProperty("spark.driver.port")
  }

  
  def withSpark[T](sc: SparkContext)(f: SparkContext => T): T = {
    try {
      f(sc)
    } finally {
      stop(sc)
    }
  }

} 
Example 63
Source File: IntegrationBaseSpec.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package support

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.json.{JsValue, Json}
import play.api.libs.ws.{WSClient, WSRequest, WSResponse}
import play.api.{Application, Environment, Mode}

trait IntegrationBaseSpec extends UnitSpec with WireMockHelper with GuiceOneServerPerSuite
  with BeforeAndAfterEach with BeforeAndAfterAll {

  val mockHost: String = WireMockHelper.host
  val mockPort: String = WireMockHelper.wireMockPort.toString

  lazy val client: WSClient = app.injector.instanceOf[WSClient]

  def servicesConfig: Map[String, Any] = Map(
    "microservice.services.des.host" -> mockHost,
    "microservice.services.des.port" -> mockPort,
    "microservice.services.auth.host" -> mockHost,
    "microservice.services.auth.port" -> mockPort,
    "auditing.consumer.baseUri.port" -> mockPort,
    "microservice.services.non-repudiation.host" -> mockHost,
    "microservice.services.non-repudiation.port" -> mockPort,
    "feature-switch.refactor.enabled" -> true,
    "feature-switch.refactor.prod.enabled" -> false,
    "microservice.services.non-repudiation.maxTimeout" -> 5000
  )

  override implicit lazy val app: Application = new GuiceApplicationBuilder()
    .in(Environment.simple(mode = Mode.Dev))
    .configure(servicesConfig)
    .build()

  override def beforeAll(): Unit = {
    super.beforeAll()
    startWireMock()
  }

  override def afterAll(): Unit = {
    stopWireMock()
    super.afterAll()
  }

  def buildRequest(path: String): WSRequest = client.url(s"http://localhost:$port$path").withFollowRedirects(false)

  def document(response: WSResponse): JsValue = Json.parse(response.body)
} 
Example 64
Source File: TestApplication.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.vatapi

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock
import com.github.tomakehurst.wiremock.client.WireMock._
import com.github.tomakehurst.wiremock.core.WireMockConfiguration._
import com.github.tomakehurst.wiremock.stubbing.StubMapping
import org.scalamock.scalatest.MockFactory
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import play.api.http.Status._

import scala.concurrent.duration._
import scala.language.postfixOps

trait TestApplication
  extends UnitSpec
    with BeforeAndAfterEach
    with BeforeAndAfterAll
    with MockFactory {

  override implicit val timeout: FiniteDuration = 100 seconds

  val mockPort = 22222
  val mockHost = "localhost"

  protected val wiremockBaseUrl: String = s"http://$mockHost:$mockHost"
  private val wireMockServer = new WireMockServer(wireMockConfig().port(mockPort))

  protected def baseBeforeAll(): StubMapping = {
    wireMockServer.stop()
    wireMockServer.start()
    WireMock.configureFor(mockHost, mockPort)
    // the below stub is here so that the application finds the registration endpoint which is called on startup
    stubFor(post(urlPathEqualTo("/registration")).willReturn(aResponse().withStatus(OK)))
  }

  override def beforeAll(): Unit = {
    super.beforeAll()
    baseBeforeAll()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    wireMockServer.stop()
  }

  override def beforeEach(): Unit = {
    super.beforeEach()
    WireMock.reset()
  }

} 
Example 65
Source File: Mock.scala    From vat-api   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.vatapi.mocks

import org.mockito.{ArgumentMatchers => Matchers}
import org.mockito.Mockito
import org.mockito.stubbing.OngoingStubbing
import org.mockito.verification.VerificationMode
import org.scalatest.{BeforeAndAfterEach, Suite}
import org.scalatestplus.mockito.MockitoSugar

trait Mock extends MockitoSugar with BeforeAndAfterEach { _: Suite =>

  def any[T]() = Matchers.any[T]()
  def eqTo[T](t: T) = Matchers.eq[T](t)
  def when[T](t: T) = Mockito.when(t)
  def reset[T](t: T) = Mockito.reset(t)

  def verify[T](t: T): T = Mockito.verify(t)
  def verify[T](t: T, mode: VerificationMode): T = Mockito.verify(t, mode)
  def times(n: Int): VerificationMode = Mockito.times(n)
  def never: VerificationMode = Mockito.never()
  def once: VerificationMode = Mockito.times(1)

  implicit class stubbingOps[T](stubbing: OngoingStubbing[T]){
    def returns(t: T) = stubbing.thenReturn(t)
  }
} 
Example 66
Source File: RpcBackendTest.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web.guide.demos.rpc

import io.udash.web.SeleniumTest
import org.scalatest.BeforeAndAfterEach

class RpcBackendTest extends SeleniumTest with BeforeAndAfterEach {
  override protected final val url = "/rpc/client-server"

  "RpcBackend view" should {
    "receive ClientId in demo" in {
      val callDemo = findElementById("client-id-demo")
      var response = findElementById("client-id-demo-response")

      callDemo.isEnabled should be(true)
      response.getText.equalsIgnoreCase("???") should be(true)

      callDemo.click()

      eventually {
        response = findElementById("client-id-demo-response")
        response.getText.startsWith("ClientId") should be(true)
        callDemo.isEnabled should be(false)
      }
    }
  }
} 
Example 67
Source File: RestClientServerTest.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash
package web.guide.demos.rest

import io.udash.web.SeleniumTest
import org.openqa.selenium.By.ById
import org.scalatest.{BeforeAndAfterEach, Ignore}

//todo migrate content from udash selenium or remove
@Ignore
class RestClientServerTest extends SeleniumTest with BeforeAndAfterEach {
  val url = "/rest"

  "RestClientServer view" should {
    "receive response in demo" in {
      val callDemo = findElementById("echo-rest-demo")
      val inputDemo = callDemo.findElement(new ById("echo-rest-demo-input"))
      val responseDemo = callDemo.findElement(new ById("echo-rest-demo-response"))
      val queryButton = callDemo.findElement(new ById("echo-rest-demo-query-btn"))
      val headerButton = callDemo.findElement(new ById("echo-rest-demo-header-btn"))
      val urlButton = callDemo.findElement(new ById("echo-rest-demo-url-btn"))
      val bodyButton = callDemo.findElement(new ById("echo-rest-demo-body-btn"))

      eventually {
        responseDemo.getText should be("Response:")
      }

      val request = inputDemo.getAttribute("value")

      queryButton.click()
      eventually {
        responseDemo.getText should be(s"Response:\nQuery:$request")
      }

      headerButton.click()
      eventually {
        responseDemo.getText should be(s"Response:\nHeader:$request")
      }

      urlButton.click()
      eventually {
        responseDemo.getText should be(s"Response:\nURL:$request")
      }

      bodyButton.click()
      eventually {
        responseDemo.getText should be(s"Response:\nBody:$request")
      }
    }
  }
} 
Example 68
Source File: SeleniumTest.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web

import java.util.concurrent.TimeUnit

import org.openqa.selenium.firefox.{FirefoxDriver, FirefoxOptions}
import org.openqa.selenium.remote.RemoteWebDriver
import org.openqa.selenium.{Dimension, WebElement}
import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Millis, Seconds, Span}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

private trait ServerConfig {
  def init(): Unit
  def createUrl(part: String): String
  def destroy(): Unit
}

// Doesn't launch embedded guide app server
private final class ExternalServerConfig(urlPrefix: String) extends ServerConfig {
  require(!urlPrefix.endsWith("/"))

  override def createUrl(part: String): String = {
    require(part.startsWith("/"))
    urlPrefix + part
  }

  override def init(): Unit = {}
  override def destroy(): Unit = {}
}

// Launches embedded guide server
private final class InternalServerConfig extends ServerConfig {
  private val server = Launcher.createApplicationServer()

  override def init(): Unit = server.start()

  override def destroy(): Unit = server.stop()

  override def createUrl(part: String): String = {
    require(part.startsWith("/"))
    s"http://127.0.0.2:${server.port}$part"
  }
}

abstract class SeleniumTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with Eventually {
  override implicit val patienceConfig: PatienceConfig = PatienceConfig(scaled(Span(10, Seconds)), scaled(Span(50, Millis)))

  protected final val driver: RemoteWebDriver = new FirefoxDriver(new FirefoxOptions().setHeadless(true))
  driver.manage().timeouts().implicitlyWait(200, TimeUnit.MILLISECONDS)
  driver.manage().window().setSize(new Dimension(1440, 800))

  protected final def findElementById(id: String): WebElement = eventually {
    driver.findElementById(id)
  }

  protected def url: String

  private val server: ServerConfig = new InternalServerConfig

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    server.init()
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    driver.get(server.createUrl(url))
  }

  override protected def afterAll(): Unit = {
    super.afterAll()
    server.destroy()
    driver.close()
  }
} 
Example 69
Source File: BrokerResources.scala    From reactive-activemq   with Apache License 2.0 5 votes vote down vote up
package akka.stream.integration

import java.io.InputStream
import java.net.URL

import akka.stream.integration.BrokerResources.{ QueueStat, TopicStat }
import org.scalatest.BeforeAndAfterEach

import scala.xml.NodeSeq

trait BrokerResources extends BeforeAndAfterEach { _: TestSpec =>

  def enableClearQueus: Boolean

  private def callBroker(path: String): InputStream = {
    val amqHost = system.settings.config.getString("amq.host")
    val url = new URL(s"http://$amqHost:8161" + path)
    val urlConnection = url.openConnection()
    val basicAuth = "Basic " + new String(java.util.Base64.getUrlEncoder.encode("admin:admin".getBytes()))
    urlConnection.addRequestProperty("Authorization", basicAuth)
    urlConnection.getInputStream
  }

  // communicate with the broker //
  private def getQueueXmlFromBroker: NodeSeq = {
    import scala.xml.XML
    XML.load(callBroker("/admin/xml/queues.jsp"))
  }

  def getTopicXmlFromBroker: NodeSeq = {
    import scala.xml.XML
    XML.load(callBroker("/admin/xml/topics.jsp"))
  }

  def getQueueStats: List[QueueStat] = (for {
    e ← getQueueXmlFromBroker \\ "queue"
    stat ← e \ "stats"
  } yield QueueStat(
    (e \ "@name").text,
    (stat \ "@size").text.toInt,
    (stat \ "@consumerCount").text.toInt,
    (stat \ "@enqueueCount").text.toInt,
    (stat \ "@dequeueCount").text.toInt
  )).toList

  def getTopicStats: List[TopicStat] = (for {
    e ← getTopicXmlFromBroker \\ "topic"
    stat ← e \ "stats"
  } yield TopicStat(
    (e \ "@name").text,
    (stat \ "@size").text.toInt,
    (stat \ "@consumerCount").text.toInt,
    (stat \ "@enqueueCount").text.toInt,
    (stat \ "@dequeueCount").text.toInt
  )).toList

  def purgeQueues(): Unit = {
    def purgeQueue(destinationName: String): InputStream = {
      val path = s"/api/jolokia/exec/org.apache.activemq:brokerName=localhost,destinationName=$destinationName,destinationType=Queue,type=Broker/purge"
      callBroker(path)
    }
    getQueueList.foreach(purgeQueue)
  }

  def getQueueList: List[String] = (for {
    e ← getQueueXmlFromBroker \\ "queue"
  } yield (e \ "@name").text).toList

  def getQueueStatFor(topic: String): Option[QueueStat] =
    getQueueStats.find(_.name contains topic)

  def getQueueMessageCount(topic: String): Option[Int] = for {
    stat ← getQueueStatFor(topic)
  } yield stat.enqueueCount - stat.dequeueCount

  override protected def beforeEach(): Unit = {
    if (enableClearQueus)
      purgeQueues()
    super.beforeEach()
  }
}

object BrokerResources {
  case class QueueStat(name: String, size: Int, consumerCount: Int, enqueueCount: Int, dequeueCount: Int)
  case class TopicStat(name: String, size: Int, consumerCount: Int, enqueueCount: Int, dequeueCount: Int)
} 
Example 70
Source File: RunCliCmdTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.common

import java.io.File

import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterEach, FlatSpec}
import org.scalatest.junit.JUnitRunner
import common.RunCliCmd
import common.TestUtils._

import scala.collection.mutable.Buffer

@RunWith(classOf[JUnitRunner])
class RunCliCmdTests extends FlatSpec with RunCliCmd with BeforeAndAfterEach {

  case class TestRunResult(code: Int) extends RunResult(code, "", "")
  val defaultRR = TestRunResult(0)

  override def baseCommand = Buffer.empty

  override def runCmd(expectedExitCode: Int,
                      dir: File,
                      env: Map[String, String],
                      fileStdin: Option[File],
                      params: Seq[String]): RunResult = {
    cmdCount += 1
    rr.getOrElse(defaultRR)
  }

  override def beforeEach() = {
    rr = None
    cmdCount = 0
  }

  var rr: Option[TestRunResult] = None // optional run result override per test
  var cmdCount = 0

  it should "retry commands that experience network errors" in {
    Seq(ANY_ERROR_EXIT, DONTCARE_EXIT, NETWORK_ERROR_EXIT).foreach { code =>
      cmdCount = 0

      rr = Some(TestRunResult(NETWORK_ERROR_EXIT))
      noException shouldBe thrownBy {
        cli(Seq.empty, expectedExitCode = code)
      }

      cmdCount shouldBe 3 + 1
    }
  }

  it should "not retry commands if retry is disabled" in {
    rr = Some(TestRunResult(NETWORK_ERROR_EXIT))
    noException shouldBe thrownBy {
      cli(Seq.empty, expectedExitCode = ANY_ERROR_EXIT, retriesOnNetworkError = 0)
    }

    cmdCount shouldBe 1
  }

  it should "not retry commands if failure is not retriable" in {
    Seq(MISUSE_EXIT, ERROR_EXIT, SUCCESS_EXIT).foreach { code =>
      cmdCount = 0

      rr = Some(TestRunResult(code))
      noException shouldBe thrownBy {
        cli(Seq.empty, expectedExitCode = DONTCARE_EXIT, retriesOnNetworkError = 3)
      }

      cmdCount shouldBe 1
    }
  }

} 
Example 71
Source File: RuncClientTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.containerpool.docker.test

import akka.actor.ActorSystem

import scala.concurrent.Future
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import org.scalatest.Matchers
import org.apache.openwhisk.core.containerpool.docker.RuncClient
import common.{StreamLogging, WskActorSystem}
import org.apache.openwhisk.core.containerpool.ContainerId
import org.apache.openwhisk.common.TransactionId
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.apache.openwhisk.common.LogMarker
import org.apache.openwhisk.common.LoggingMarkers.INVOKER_RUNC_CMD

@RunWith(classOf[JUnitRunner])
class RuncClientTests
    extends FlatSpec
    with Matchers
    with StreamLogging
    with BeforeAndAfterEach
    with WskActorSystem
    with ScalaFutures
    with IntegrationPatience {

  override def beforeEach = stream.reset()

  implicit val transid = TransactionId.testing
  val id = ContainerId("Id")

  val runcCommand = "docker-runc"

  
  def verifyLogs(cmd: String, failed: Boolean = false) = {
    logLines.head should include(s"${runcCommand} ${cmd} ${id.asString}")

    // start log maker must be found
    val start = LogMarker.parse(logLines.head)
    start.token.toStringWithSubAction should be(INVOKER_RUNC_CMD(cmd).toStringWithSubAction)

    // end log marker must be found
    val expectedEnd = if (failed) INVOKER_RUNC_CMD(cmd).asError else INVOKER_RUNC_CMD(cmd).asFinish
    val end = LogMarker.parse(logLines.last)
    end.token.toStringWithSubAction shouldBe expectedEnd.toStringWithSubAction
  }

  behavior of "RuncClient"

  Seq("pause", "resume").foreach { cmd =>
    it should s"$cmd a container successfully and create log entries" in {
      val rc = runcClient { Future.successful("") }
      runcProxy(rc, cmd).futureValue
      verifyLogs(cmd)
    }

    it should s"write error markers when $cmd fails" in {
      val rc = runcClient { Future.failed(new RuntimeException()) }
      a[RuntimeException] should be thrownBy runcProxy(rc, cmd).futureValue
      verifyLogs(cmd, failed = true)
    }

  }
} 
Example 72
Source File: WhiskAdminCliTestBase.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database

import akka.stream.ActorMaterializer
import common.{StreamLogging, WskActorSystem}
import org.rogach.scallop.throwError
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FlatSpec, Matchers}
import org.apache.openwhisk.core.cli.{Conf, WhiskAdmin}
import org.apache.openwhisk.core.database.test.DbUtils
import org.apache.openwhisk.core.entity.WhiskAuthStore

import scala.util.Random

trait WhiskAdminCliTestBase
    extends FlatSpec
    with WskActorSystem
    with DbUtils
    with StreamLogging
    with BeforeAndAfterEach
    with BeforeAndAfterAll
    with ScalaFutures
    with Matchers {

  implicit val materializer = ActorMaterializer()
  //Bring in sync the timeout used by ScalaFutures and DBUtils
  implicit override val patienceConfig: PatienceConfig = PatienceConfig(timeout = dbOpTimeout)
  protected val authStore = WhiskAuthStore.datastore()

  //Ensure scalaop does not exit upon validation failure
  throwError.value = true

  override def afterEach(): Unit = {
    cleanup()
  }

  override def afterAll(): Unit = {
    println("Shutting down store connections")
    authStore.shutdown()
    super.afterAll()
  }

  protected def randomString(len: Int = 5): String = Random.alphanumeric.take(len).mkString

  protected def resultOk(args: String*): String =
    WhiskAdmin(new Conf(args.toSeq))
      .executeCommand()
      .futureValue
      .right
      .get

  protected def resultNotOk(args: String*): String =
    WhiskAdmin(new Conf(args.toSeq))
      .executeCommand()
      .futureValue
      .left
      .get
      .message
} 
Example 73
Source File: ActivationStoreBehaviorBase.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.test.behavior

import java.time.Instant

import akka.stream.ActorMaterializer
import common.{StreamLogging, WskActorSystem}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{ActivationStore, CacheChangeNotification, UserContext}
import org.apache.openwhisk.core.database.test.behavior.ArtifactStoreTestUtil.storeAvailable
import org.apache.openwhisk.core.entity._
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers, Outcome}

import scala.collection.mutable.ListBuffer
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import scala.util.{Random, Try}

trait ActivationStoreBehaviorBase
    extends FlatSpec
    with ScalaFutures
    with Matchers
    with StreamLogging
    with WskActorSystem
    with IntegrationPatience
    with BeforeAndAfterEach {

  protected implicit val materializer: ActorMaterializer = ActorMaterializer()
  protected implicit val notifier: Option[CacheChangeNotification] = None

  def context: UserContext
  def activationStore: ActivationStore
  private val docsToDelete = ListBuffer[(UserContext, ActivationId)]()

  def storeType: String

  protected def transId() = TransactionId(Random.alphanumeric.take(32).mkString)

  override def afterEach(): Unit = {
    cleanup()
    stream.reset()
  }

  override protected def withFixture(test: NoArgTest): Outcome = {
    assume(storeAvailable(storeAvailableCheck), s"$storeType not configured or available")
    val outcome = super.withFixture(test)
    if (outcome.isFailed) {
      println(logLines.mkString("\n"))
    }
    outcome
  }

  protected def storeAvailableCheck: Try[Any] = Try(true)
  //~----------------------------------------< utility methods >

  protected def store(activation: WhiskActivation, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): DocInfo = {
    val doc = activationStore.store(activation, context).futureValue
    docsToDelete.append((context, ActivationId(activation.docid.asString)))
    doc
  }

  protected def newActivation(ns: String, actionName: String, start: Long): WhiskActivation = {
    WhiskActivation(
      EntityPath(ns),
      EntityName(actionName),
      Subject(),
      ActivationId.generate(),
      Instant.ofEpochMilli(start),
      Instant.ofEpochMilli(start + 1000))
  }

  
  def cleanup()(implicit timeout: Duration = 10 seconds): Unit = {
    implicit val tid: TransactionId = transId()
    docsToDelete.map { e =>
      Try {
        Await.result(activationStore.delete(e._2, e._1), timeout)
      }
    }
    docsToDelete.clear()
  }

} 
Example 74
Source File: ControllerRoutesTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.controller.test

import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.server.Route
import org.apache.openwhisk.common.AkkaLogging
import org.apache.openwhisk.core.controller.Controller
import org.apache.openwhisk.core.entity.ExecManifest.Runtimes
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner
import system.rest.RestUtil
import spray.json._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import spray.json.DefaultJsonProtocol._



@RunWith(classOf[JUnitRunner])
class ControllerRoutesTests extends ControllerTestCommon with BeforeAndAfterEach with RestUtil {

  implicit val logger = new AkkaLogging(akka.event.Logging.getLogger(actorSystem, this))

  behavior of "Controller"

  it should "return unhealthy invokers status" in {

    configureBuildInfo()

    val controller =
      new Controller(instance, Runtimes(Set.empty, Set.empty, None), whiskConfig, system, materializer, logger)
    Get("/invokers/ready") ~> Route.seal(controller.internalInvokerHealth) ~> check {
      status shouldBe InternalServerError
      responseAs[JsObject].fields("unhealthy") shouldBe JsString("0/0")
    }
  }

  it should "return ready state true when healthy == total invokers" in {

    val res = Controller.readyState(5, 5, 1.0)
    res shouldBe true
  }

  it should "return ready state false when 0 invokers" in {

    val res = Controller.readyState(0, 0, 0.5)
    res shouldBe false
  }

  it should "return ready state false when threshold < (healthy / total)" in {

    val res = Controller.readyState(7, 3, 0.5)
    res shouldBe false
  }

  private def configureBuildInfo(): Unit = {
    System.setProperty("whisk.info.build-no", "")
    System.setProperty("whisk.info.date", "")
  }

} 
Example 75
Source File: SwaggerRoutesTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.controller.test

import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner

import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.model.Uri

import spray.json._
import spray.json.DefaultJsonProtocol._

import org.apache.openwhisk.core.controller.SwaggerDocs



@RunWith(classOf[JUnitRunner])
class SwaggerRoutesTests extends ControllerTestCommon with BeforeAndAfterEach {

  behavior of "Swagger routes"

  it should "server docs" in {
    implicit val tid = transid()
    val swagger = new SwaggerDocs(Uri.Path.Empty, "infoswagger.json")
    Get("/docs") ~> Route.seal(swagger.swaggerRoutes) ~> check {
      status shouldBe PermanentRedirect
      header("location").get.value shouldBe "docs/index.html?url=/api-docs"
    }

    Get("/api-docs") ~> Route.seal(swagger.swaggerRoutes) ~> check {
      status shouldBe OK
      responseAs[JsObject].fields("swagger") shouldBe JsString("2.0")
    }
  }
} 
Example 76
Source File: InstanceIdAssignerTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.invoker.test

import common.StreamLogging
import org.apache.curator.test.TestingServer
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.apache.openwhisk.core.invoker.InstanceIdAssigner

@RunWith(classOf[JUnitRunner])
class InstanceIdAssignerTests extends FlatSpec with Matchers with StreamLogging with BeforeAndAfterEach {
  behavior of "Id Assignment"

  private var zkServer: TestingServer = _

  override protected def beforeEach(): Unit = {
    zkServer = new TestingServer()
  }

  override protected def afterEach(): Unit = {
    zkServer.stop()
  }

  it should "assign fresh id" in {
    val assigner = new InstanceIdAssigner(zkServer.getConnectString)
    assigner.getId("foo") shouldBe 0
  }

  it should "reuse id if exists" in {
    val assigner = new InstanceIdAssigner(zkServer.getConnectString)
    assigner.getId("foo") shouldBe 0
    assigner.getId("bar") shouldBe 1
    assigner.getId("bar") shouldBe 1
  }

} 
Example 77
Source File: GrpcSpec.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.grpc.server

import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, Materializer}
import akka.testkit.TestKit
import io.grpc.{ManagedChannel, Server}
import ml.combust.mleap.executor.service.TransformService
import ml.combust.mleap.executor.testkit.TransformServiceSpec
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.concurrent.ScalaFutures

import scala.concurrent.duration._
import ml.combust.mleap.grpc.server.TestUtil._

class GrpcSpec extends TestKit(ActorSystem("grpc-server-test"))
  with TransformServiceSpec
  with BeforeAndAfterEach
  with BeforeAndAfterAll
  with ScalaFutures {

  private lazy val server = createServer(system)
  private lazy val channel = inProcessChannel
  private lazy val client = createClient(channel)

  override lazy val transformService: TransformService = {
    server
    client
  }

  override implicit def materializer: Materializer = ActorMaterializer()(system)

  override protected def afterAll(): Unit = {
    server.shutdown()
    channel.shutdown()
    TestKit.shutdownActorSystem(system, 5.seconds, verifySystemShutdown = true)
  }
} 
Example 78
Source File: TestSpec.scala    From spark-distcp   with Apache License 2.0 5 votes vote down vote up
package com.coxautodata

import java.io.ByteArrayInputStream
import java.nio.file.Files

import com.coxautodata.objects.SerializableFileStatus
import com.coxautodata.utils.FileListing
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, LocalFileSystem, Path}
import org.scalatest.{BeforeAndAfterEach, FunSpec, Matchers}

trait TestSpec extends FunSpec with Matchers with BeforeAndAfterEach {

  var testingBaseDir: java.nio.file.Path = _
  var testingBaseDirName: String = _
  var testingBaseDirPath: Path = _
  var localFileSystem: LocalFileSystem = _

  override def beforeEach(): Unit = {
    super.beforeEach()
    testingBaseDir = Files.createTempDirectory("test_output")
    testingBaseDirName = testingBaseDir.toString
    localFileSystem = FileSystem.getLocal(new Configuration())
    testingBaseDirPath = localFileSystem.makeQualified(new Path(testingBaseDirName))
  }

  override def afterEach(): Unit = {
    super.afterEach()
    FileUtils.deleteDirectory(testingBaseDir.toFile)
  }

  def createFile(relativePath: Path, content: Array[Byte]): SerializableFileStatus = {
    val path = new Path(testingBaseDirPath, relativePath)
    localFileSystem.mkdirs(path.getParent)
    val in = new ByteArrayInputStream(content)
    val out = localFileSystem.create(path)
    IOUtils.copy(in, out)
    in.close()
    out.close()
    SerializableFileStatus(localFileSystem.getFileStatus(path))
  }

  def fileStatusToResult(f: SerializableFileStatus): FileListing = {
    FileListing(f.getPath.toString, if (f.isFile) Some(f.getLen) else None)
  }

} 
Example 79
Source File: SaveJSON.scala    From Hands-On-Big-Data-Analytics-with-PySpark   with MIT License 5 votes vote down vote up
package com.tomekl007.chapter_4

import com.tomekl007.UserTransaction
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.scalatest.Matchers._
import org.scalatest.{BeforeAndAfterEach, FunSuite}

import scala.reflect.io.Path

class SaveJSON extends FunSuite with BeforeAndAfterEach {
  val spark = SparkSession.builder().master("local[2]").getOrCreate()

  private val FileName = "transactions.json"

  override def afterEach() {
    val path = Path(FileName)
    path.deleteRecursively()
  }

  test("should save and load in JSON") {
    //given
    import spark.sqlContext.implicits._
    val rdd = spark.sparkContext
      .makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))
      .toDF()

    //when
    rdd.coalesce(1).write.format("json").save(FileName)

    val fromFile = spark.read.json(FileName)

    fromFile.show()
    assert(fromFile.count() == 2)
  }
} 
Example 80
Source File: SaveCSV.scala    From Hands-On-Big-Data-Analytics-with-PySpark   with MIT License 5 votes vote down vote up
package com.tomekl007.chapter_4

import com.tomekl007.UserTransaction
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FunSuite}

import scala.reflect.io.Path

class SaveCSV extends FunSuite with BeforeAndAfterEach {
  val spark = SparkSession.builder().master("local[2]").getOrCreate()

  private val FileName = "transactions.csv"

  override def afterEach() {
    val path = Path(FileName)
    path.deleteRecursively()
  }

  test("should save and load CSV with header") {
    //given
    import spark.sqlContext.implicits._
    val rdd = spark.sparkContext
      .makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))
      .toDF()

    //when
    rdd.coalesce(1)
      .write
      .format("csv")
      .option("header", "true")
      .save(FileName)

    val fromFile = spark.read.option("header", "true").csv(FileName)

    fromFile.show()
    assert(fromFile.count() == 2)
  }

  test("should save and load CSV without header") {
    //given
    import spark.sqlContext.implicits._
    val rdd = spark.sparkContext
      .makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))
      .toDF()

    //when
    rdd.coalesce(1)
      .write
      .format("csv")
      .option("header", "false")
      .save(FileName)

    val fromFile = spark.read.option("header", "false").csv(FileName)

    fromFile.show()
    assert(fromFile.count() == 2)
  }
} 
Example 81
Source File: SavePlainText.scala    From Hands-On-Big-Data-Analytics-with-PySpark   with MIT License 5 votes vote down vote up
package com.tomekl007.chapter_4

import java.io.File

import com.tomekl007.UserTransaction
import org.apache.spark.sql.SparkSession
import org.apache.spark.{Partitioner, SparkContext}
import org.scalatest.{BeforeAndAfterEach, FunSuite}
import org.scalatest.Matchers._

import scala.reflect.io.Path

class SavePlainText extends FunSuite with BeforeAndAfterEach{
  val spark: SparkContext = SparkSession.builder().master("local[2]").getOrCreate().sparkContext

  private val FileName = "transactions.txt"

  override def afterEach() {
    val path = Path (FileName)
    path.deleteRecursively()
  }

  test("should save and load in plain text") {
    //given
    val rdd = spark.makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))

    //when
    rdd.coalesce(1).saveAsTextFile(FileName)

    val fromFile = spark.textFile(FileName)

    fromFile.collect().toList should contain theSameElementsAs List(
      "UserTransaction(a,100)", "UserTransaction(b,200)"
      //note - this is string!
    )
  }
} 
Example 82
Source File: SaveParquet.scala    From Hands-On-Big-Data-Analytics-with-PySpark   with MIT License 5 votes vote down vote up
package com.tomekl007.chapter_4

import com.databricks.spark.avro._
import com.tomekl007.UserTransaction
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FunSuite}

import scala.reflect.io.Path

class SaveParquet extends FunSuite with BeforeAndAfterEach {
  val spark = SparkSession.builder().master("local[2]").getOrCreate()

  private val FileName = "transactions.parquet"

  override def afterEach() {
    val path = Path(FileName)
    path.deleteRecursively()
  }

  test("should save and load parquet") {
    //given
    import spark.sqlContext.implicits._
    val rdd = spark.sparkContext
      .makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))
      .toDF()

    //when
    rdd.coalesce(2)
      .write
      .parquet(FileName)

    val fromFile = spark.read.parquet(FileName)

    fromFile.show()
    assert(fromFile.count() == 2)
  }

} 
Example 83
Source File: SaveAvro.scala    From Hands-On-Big-Data-Analytics-with-PySpark   with MIT License 5 votes vote down vote up
package com.tomekl007.chapter_4

import com.tomekl007.UserTransaction
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FunSuite}
import com.databricks.spark.avro._

import scala.reflect.io.Path

class SaveAvro extends FunSuite with BeforeAndAfterEach {
  val spark = SparkSession.builder().master("local[2]").getOrCreate()

  private val FileName = "transactions.avro"

  override def afterEach() {
    val path = Path(FileName)
    path.deleteRecursively()
  }

  test("should save and load avro") {
    //given
    import spark.sqlContext.implicits._
    val rdd = spark.sparkContext
      .makeRDD(List(UserTransaction("a", 100), UserTransaction("b", 200)))
      .toDF()

    //when
    rdd.coalesce(2)
      .write
      .avro(FileName)

    val fromFile = spark.read.avro(FileName)

    fromFile.show()
    assert(fromFile.count() == 2)
  }

} 
Example 84
Source File: MDCSupportSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.internal

import com.comcast.money.api.SpanId
import org.scalatest.{ BeforeAndAfterEach, Matchers, OneInstancePerTest, WordSpec }
import org.slf4j.MDC

import scala.collection.JavaConverters._
import scala.collection.mutable

class MDCSupportSpec extends WordSpec with Matchers with BeforeAndAfterEach with OneInstancePerTest {

  val testMDCSupport = new MDCSupport
  val spanId = new SpanId()

  override def beforeEach() = {
    SpanLocal.clear()
  }

  "MDCSupport" should {
    "set the span in MDC when provide" in {
      testMDCSupport.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldEqual MDCSupport.format(spanId)
    }
    "clear the MDC value when set to None" in {
      testMDCSupport.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldEqual MDCSupport.format(spanId)

      testMDCSupport.setSpanMDC(None)
      MDC.get("moneyTrace") shouldBe null
    }
    "not be run if tracing is disabled" in {
      val disabled = new MDCSupport(false)
      disabled.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldBe null
    }
    "not propogate MDC if disabled" in {
      val mdcContext: mutable.Map[_, _] = mutable.HashMap("FINGERPRINT" -> "print")
      val disabled = new MDCSupport(false)
      disabled.propogateMDC(Some(mdcContext.asJava))
      MDC.get("FINGERPRINT") shouldBe null
    }
    "propogate MDC if not disabled" in {
      val mdcContext: mutable.Map[_, _] = mutable.HashMap("FINGERPRINT" -> "print")

      testMDCSupport.propogateMDC(Some(mdcContext.asJava))
      MDC.get("FINGERPRINT") shouldBe "print"
    }
    "clear MDC if given an empty context" in {
      MDC.put("FINGERPRINT", "print")
      testMDCSupport.propogateMDC(None)
      MDC.get("FINGERPRINT") shouldBe null
    }
    "set span name" in {
      testMDCSupport.setSpanNameMDC(Some("foo"))
      MDC.get("spanName") shouldBe "foo"
      testMDCSupport.getSpanNameMDC shouldBe Some("foo")
    }
    "clear span name from MDC when given an empty value" in {
      MDC.put("spanName", "shouldBeRemoved")
      testMDCSupport.setSpanNameMDC(None)
      MDC.get("spanName") shouldBe null
      testMDCSupport.getSpanNameMDC shouldBe None
    }
  }
} 
Example 85
Source File: SpanLocalSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.internal

import com.comcast.money.api.SpanId
import com.comcast.money.core.handlers.TestData
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ OneInstancePerTest, BeforeAndAfterEach, Matchers, WordSpec }
import org.slf4j.MDC

class SpanLocalSpec extends WordSpec
  with Matchers with OneInstancePerTest with BeforeAndAfterEach with MockitoSugar with TestData {

  override def afterEach() = {
    SpanLocal.clear()
  }

  "SpanLocal" when {
    "an item exists in span local" should {
      "return the span local value" in {
        SpanLocal.push(testSpan)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "clear the stored value" in {
        SpanLocal.push(testSpan)

        SpanLocal.clear()
        SpanLocal.current shouldEqual None
      }
      "do nothing if trying to push a null value" in {
        SpanLocal.push(testSpan)
        SpanLocal.push(null)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "add to the existing call stack" in {
        val nested = testSpan.copy(new SpanId())

        SpanLocal.push(testSpan)
        SpanLocal.push(nested)
        SpanLocal.current shouldEqual Some(nested)
      }
      "pop the last added item from the call stack" in {
        val nested = testSpan.copy(new SpanId())
        SpanLocal.push(testSpan)
        SpanLocal.push(nested)

        val popped = SpanLocal.pop()
        popped shouldEqual Some(nested)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "set the MDC value on push" in {
        SpanLocal.push(testSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
      }
      "remove the MDC value on pop" in {
        SpanLocal.push(testSpan)
        SpanLocal.pop()

        MDC.get("moneyTrace") shouldBe null
        MDC.get("spanName") shouldBe null
      }
      "reset the MDC value on pop" in {
        SpanLocal.push(testSpan)
        SpanLocal.push(childSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(childSpan.id)
        MDC.get("spanName") shouldEqual childSpan.name

        SpanLocal.pop()

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
      }
      "remove the MDC value on clear" in {
        SpanLocal.push(testSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
        SpanLocal.clear()

        MDC.get("moneyTrace") shouldBe null
        MDC.get("spanName") shouldBe null
      }
    }
  }
} 
Example 86
Source File: TraceFriendlyExecutionContextExecutorSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.concurrent

import com.comcast.money.api.SpanId
import com.comcast.money.core.SpecHelpers
import com.comcast.money.core.internal.SpanLocal
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ BeforeAndAfterEach, Matchers, OneInstancePerTest, WordSpec }
import org.slf4j.MDC

import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future }

class TraceFriendlyExecutionContextExecutorSpec extends WordSpec
  with Matchers
  with MockitoSugar
  with OneInstancePerTest
  with ConcurrentSupport
  with SpecHelpers
  with BeforeAndAfterEach {

  import com.comcast.money.core.concurrent.TraceFriendlyExecutionContextExecutor.Implicits.global

  override def beforeEach() = {
    SpanLocal.clear()
    MDC.clear()
  }

  // brings in the implicit executor

  "TraceFriendlyExecutionContext" should {
    "propagate the current trace local value" in {
      val originalSpanId = new SpanId("1", 2L, 3L)
      val originalSpan = testSpan(originalSpanId)
      SpanLocal.push(originalSpan)

      val future = Future {
        SpanLocal.current.get.info.id
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual originalSpanId
    }
    "propagate no span value if none is present" in {
      SpanLocal.clear()

      val future = Future {
        SpanLocal.current
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual None
    }
    "propagate only the latest span id value" in {
      val spanId1 = new SpanId()
      val spanId2 = new SpanId()
      SpanLocal.push(testSpan(spanId1))
      SpanLocal.push(testSpan(spanId2))

      val future = Future {
        SpanLocal.current.get.info.id
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual spanId2
    }
    "delegate reportFailure to the wrapped executor" in {
      val mockExecutionContext = mock[ExecutionContext]
      val traceFriendly = TraceFriendlyExecutionContextExecutor(mockExecutionContext)
      val failure = new IllegalArgumentException()

      traceFriendly.reportFailure(failure)
      verify(mockExecutionContext).reportFailure(failure)
    }
    "propogate MDC data" in {
      MDC.put("FINGERPRINT", "print")
      val future = Future {
        MDC.get("FINGERPRINT")
      }
      MDC.get("FINGERPRINT") shouldEqual "print"
      Await.result(future, 100 millis) shouldEqual "print"
    }

    "Child MDC should not escape to parent " in {
      val future = Future {
        MDC.put("FINGERPRINT", "print")
        MDC.get("FINGERPRINT")
      }
      MDC.get("FINGERPRINT") shouldBe null
      Await.result(future, 100 millis) shouldEqual "print"
    }
  }
} 
Example 87
Source File: AkkaMoneyScope.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.akka

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import com.comcast.money.akka.SpanHandlerMatchers.clearHandlerChain
import com.typesafe.config.ConfigFactory
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike }

abstract class AkkaMoneyScope extends WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach {

  val configString: String =
    """
      | money {
      |  handling = {
      |    async = false
      |    handlers = [
      |    {
      |      class = "com.comcast.money.akka.CollectingSpanHandler"
      |      log-level = "INFO"
      |    }]
      |  }
      | }""".stripMargin

  implicit val actorSystem: ActorSystem = ActorSystem("MoneyAkkaScope", ConfigFactory.parseString(configString))

  implicit val moneyExtension: MoneyExtension = MoneyExtension(actorSystem)

  implicit val matierializer: ActorMaterializer = ActorMaterializer()

  override def afterAll(): Unit = TestKit.shutdownActorSystem(actorSystem)

  override def beforeEach(): Unit = clearHandlerChain
} 
Example 88
Source File: BitmapAnalyzeStatisticsSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.index

import org.apache.hadoop.fs.RawLocalFileSystem
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.test.oap.SharedOapContext
import org.apache.spark.util.Utils


trait SharedOapContextWithRawLocalFileSystem extends SharedOapContext {
  oapSparkConf.set("spark.hadoop.fs.file.impl", classOf[RawLocalFileSystem].getName)
}

class BitmapAnalyzeStatisticsSuite extends QueryTest with SharedOapContextWithRawLocalFileSystem
    with BeforeAndAfterEach {
  import testImplicits._

  override def beforeEach(): Unit = {
    val tempDir = Utils.createTempDir()
    val path = tempDir.getAbsolutePath
    sql(s"""CREATE TEMPORARY VIEW oap_test (a INT, b STRING)
            | USING oap
            | OPTIONS (path '$path')""".stripMargin)
  }

  override def afterEach(): Unit = {
    sqlContext.dropTempTable("oap_test")
  }

  test("Bitmap index typical equal test") {
    val data: Seq[(Int, String)] = (1 to 200).map { i => (i, s"this is test $i") }
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test select * from t")
    sql("create oindex idxa on oap_test (a) USING BITMAP")
    checkAnswer(sql(s"SELECT * FROM oap_test WHERE a = 20 OR a = 21"),
      Row(20, "this is test 20") :: Row(21, "this is test 21") :: Nil)
    sql("drop oindex idxa on oap_test")
  }
} 
Example 89
Source File: ClusteredFilterSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.cluster

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.test.oap.SharedOapLocalClusterContext
import org.apache.spark.util.Utils

class ClusteredFilterSuite
  extends QueryTest with SharedOapLocalClusterContext with BeforeAndAfterEach {

  import testImplicits._

  private var currentPath: String = _

  override def beforeEach(): Unit = {
    val path = Utils.createTempDir().getAbsolutePath
    currentPath = path
    sql(s"""CREATE TEMPORARY VIEW oap_test (a INT, b STRING)
           | USING oap
           | OPTIONS (path '$path')""".stripMargin)
  }

  override def afterEach(): Unit = {
    sqlContext.dropTempTable("oap_test")
  }

  test("filtering") {
    val data: Seq[(Int, String)] = (1 to 300).map { i => (i, s"this is test $i") }
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test select * from t")
    sql("create oindex index1 on oap_test (a)")

    checkAnswer(sql("SELECT * FROM oap_test WHERE a = 1"),
      Row(1, "this is test 1") :: Nil)

    checkAnswer(sql("SELECT * FROM oap_test WHERE a > 1 AND a <= 3"),
      Row(2, "this is test 2") :: Row(3, "this is test 3") :: Nil)

    checkAnswer(sql("SELECT * FROM oap_test WHERE a <= 2"),
      Row(1, "this is test 1") :: Row(2, "this is test 2") :: Nil)

    checkAnswer(sql("SELECT * FROM oap_test WHERE a >= 300"),
      Row(300, "this is test 300") :: Nil)

    sql("drop oindex index1 on oap_test")
  }

} 
Example 90
Source File: StatisticsTest.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.statistics

import java.io.ByteArrayOutputStream

import scala.collection.mutable.ArrayBuffer

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.BaseOrdering
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateOrdering
import org.apache.spark.sql.execution.datasources.oap.filecache.FiberCache
import org.apache.spark.sql.execution.datasources.oap.index.RangeInterval
import org.apache.spark.sql.execution.datasources.oap.utils.{NonNullKeyReader, NonNullKeyWriter}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.unsafe.Platform
import org.apache.spark.unsafe.memory.MemoryBlock
import org.apache.spark.unsafe.types.UTF8String

abstract class StatisticsTest extends SparkFunSuite with BeforeAndAfterEach {

  protected def rowGen(i: Int): InternalRow = InternalRow(i, UTF8String.fromString(s"test#$i"))

  protected lazy val schema: StructType = StructType(StructField("a", IntegerType)
    :: StructField("b", StringType) :: Nil)
  @transient
  protected lazy val nnkw: NonNullKeyWriter = new NonNullKeyWriter(schema)
  @transient
  protected lazy val nnkr: NonNullKeyReader = new NonNullKeyReader(schema)
  @transient
  protected lazy val ordering: BaseOrdering = GenerateOrdering.create(schema)
  @transient
  protected lazy val partialOrdering: BaseOrdering =
    GenerateOrdering.create(StructType(schema.dropRight(1)))
  protected var out: ByteArrayOutputStream = _

  protected var intervalArray: ArrayBuffer[RangeInterval] = new ArrayBuffer[RangeInterval]()

  override def beforeEach(): Unit = {
    out = new ByteArrayOutputStream(8000)
  }

  override def afterEach(): Unit = {
    out.close()
    intervalArray.clear()
  }

  protected def generateInterval(
      start: InternalRow, end: InternalRow,
      startInclude: Boolean, endInclude: Boolean): Unit = {
    intervalArray.clear()
    intervalArray.append(new RangeInterval(start, end, startInclude, endInclude))
  }

  protected def checkInternalRow(row1: InternalRow, row2: InternalRow): Unit = {
    val res = row1 == row2 // it works..
    assert(res, s"row1: $row1 does not match $row2")
  }

  protected def wrapToFiberCache(out: ByteArrayOutputStream): FiberCache = {
    val bytes = out.toByteArray
    FiberCache(bytes)
  }
} 
Example 91
Source File: DataFiberReaderWriterSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.apache.parquet.it.unimi.dsi.fastutil.ints.IntArrayList
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.datasources.oap.filecache.{FiberCache, TestFiberCache}
import org.apache.spark.sql.execution.vectorized.Dictionary
import org.apache.spark.sql.test.oap.SharedOapContext

abstract class DataFiberReaderWriterSuite extends SparkFunSuite with SharedOapContext
  with BeforeAndAfterEach with Logging {

  protected val total: Int = 10000
  protected val start: Int = 4096
  protected val num: Int = 4096
  protected val ints: Array[Int] = Array[Int](1, 667, 9999)
  protected val rowIdList = {
    val ret = new IntArrayList(3)
    ints.foreach(ret.add)
    ret
  }

  protected var fiberCache: FiberCache = _
  protected override def afterEach(): Unit = {
    if (fiberCache !== null) {
      new TestFiberCache(fiberCache).free()
      fiberCache = null
    }
  }
  protected def dictionary: Dictionary

} 
Example 92
Source File: FileSkipSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.test.oap.SharedOapContext
import org.apache.spark.util.Utils

class FileSkipSuite extends QueryTest with SharedOapContext with BeforeAndAfterEach {
  import testImplicits._

  override def beforeEach(): Unit = {
    val path1 = Utils.createTempDir().getAbsolutePath

    sql(s"""CREATE TEMPORARY VIEW oap_test_1 (a INT, b STRING)
           | USING oap
           | OPTIONS (path '$path1')""".stripMargin)
  }

  override def afterEach(): Unit = {
    sqlContext.dropTempTable("oap_test_1")
  }

  test("skip all file (is not null)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, null)).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result = sql("SELECT * FROM oap_test_1 WHERE b is not null")
    assert(result.count == 0)
  }

  test("skip all file (equal)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, s"this is test $i")).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result1 = sql("SELECT * FROM oap_test_1 WHERE a = 1")
    assert(result1.count == 1)
    val result2 = sql("SELECT * FROM oap_test_1 WHERE a = 500")
    assert(result2.count == 0)
  }

  test("skip all file (lt)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, s"this is test $i")).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result1 = sql("SELECT * FROM oap_test_1 WHERE a < 1")
    assert(result1.count == 0)
    val result2 = sql("SELECT * FROM oap_test_1 WHERE a < 2")
    assert(result2.count == 1)
  }

  test("skip all file (lteq)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, s"this is test $i")).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result1 = sql("SELECT * FROM oap_test_1 WHERE a <= 0")
    assert(result1.count == 0)
    val result2 = sql("SELECT * FROM oap_test_1 WHERE a <= 1")
    assert(result2.count == 1)
  }

  test("skip all file (gt)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, s"this is test $i")).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result1 = sql("SELECT * FROM oap_test_1 WHERE a > 300")
    assert(result1.count == 0)
    val result2 = sql("SELECT * FROM oap_test_1 WHERE a > 2")
    assert(result2.count == 298)
  }

  test("skip all file (gteq)") {
    val data: Seq[(Int, String)] =
      scala.util.Random.shuffle(1 to 300).map(i => (i, s"this is test $i")).toSeq
    data.toDF("key", "value").createOrReplaceTempView("t")
    sql("insert overwrite table oap_test_1 select * from t")
    val result1 = sql("SELECT * FROM oap_test_1 WHERE a >= 300")
    assert(result1.count == 1)
    val result2 = sql("SELECT * FROM oap_test_1 WHERE a >= 500")
    assert(result2.count == 0)
  }
} 
Example 93
Source File: OapSharedSQLContext.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import scala.concurrent.duration._

import org.scalatest.{BeforeAndAfterEach, Suite}
import org.scalatest.concurrent.Eventually

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.oap.OapRuntime


trait OapSharedSQLContext extends SQLTestUtils with OapSharedSparkSession


  protected override def afterAll(): Unit = {
    try {
      super.afterAll()
    } finally {
      try {
        if (_spark != null) {
          try {
            _spark.sessionState.catalog.reset()
          } finally {
            OapRuntime.stop()
            _spark.stop()
            _spark = null
          }
        }
      } finally {
        SparkSession.clearActiveSession()
        SparkSession.clearDefaultSession()
      }
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    // Clear all persistent datasets after each test
    spark.sharedState.cacheManager.clearCache()
    // files can be closed from other threads, so wait a bit
    // normally this doesn't take more than 1s
    eventually(timeout(10.seconds), interval(2.seconds)) {
      DebugFilesystem.assertNoOpenStreams()
    }
  }
} 
Example 94
Source File: TestContext.scala    From freestyle   with Apache License 2.0 5 votes vote down vote up
package freestyle.free.cache.redis

import _root_.redis.embedded.RedisServer
import _root_.redis.RedisClient
import akka.actor.ActorSystem
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait RedisTestContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  private[this] val server: RedisServer = new RedisServer()

  private[this] implicit val actorSystem: ActorSystem =
    ActorSystem.create("testing")
  val client: RedisClient =
    RedisClient(host = "localhost", port = server.getPort)

  override def beforeAll = {
    server.start()
    ()
  }
  override def afterAll = {
    server.stop()
    actorSystem.terminate()
    ()
  }
  override def beforeEach = {
    client.flushdb
    ()
  }
} 
Example 95
Source File: BaseResourceIT.scala    From spark-atlas-connector   with Apache License 2.0 5 votes vote down vote up
package com.hortonworks.spark.atlas

import scala.collection.JavaConverters._
import com.sun.jersey.core.util.MultivaluedMapImpl
import org.apache.atlas.AtlasClientV2
import org.apache.atlas.model.SearchFilter
import org.apache.atlas.model.instance.AtlasEntity
import org.apache.atlas.model.typedef.{AtlasStructDef, AtlasTypesDef}
import org.apache.atlas.utils.AuthenticationUtil
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

abstract class BaseResourceIT extends FunSuite with BeforeAndAfterAll with BeforeAndAfterEach {

  protected var atlasUrls: Array[String] = null
  private var client: AtlasClientV2 = null
  protected val atlasClientConf = new AtlasClientConf
  private var uniquePostfix: Long = 0

  override protected def beforeAll(): Unit = {
    super.beforeAll()

    // set high timeouts so that tests do not fail due to read timeouts while you
    // are stepping through the code in a debugger
    atlasClientConf.set("atlas.client.readTimeoutMSecs", "100000000")
    atlasClientConf.set("atlas.client.connectTimeoutMSecs", "100000000")
    atlasUrls = Array(atlasClientConf.get(AtlasClientConf.ATLAS_REST_ENDPOINT))
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()

    uniquePostfix = System.currentTimeMillis()
  }

  private def atlasClient(): AtlasClientV2 = {
    if (client == null) {
      if (!AuthenticationUtil.isKerberosAuthenticationEnabled) {
        client = new AtlasClientV2(atlasUrls, Array[String]("admin", "admin"))
      } else {
        client = new AtlasClientV2(atlasUrls: _*)
      }
    }

    client
  }

  protected def getTypeDef(name: String): AtlasStructDef = {
    require(atlasClient != null)

    val searchParams = new MultivaluedMapImpl()
    searchParams.add(SearchFilter.PARAM_NAME, name)
    val searchFilter = new SearchFilter(searchParams)
    val typesDef = atlasClient.getAllTypeDefs(searchFilter)
    if (!typesDef.getClassificationDefs.isEmpty) {
      typesDef.getClassificationDefs.get(0)
    } else if (!typesDef.getEntityDefs.isEmpty) {
      typesDef.getEntityDefs.get(0)
    } else if (!typesDef.getRelationshipDefs.isEmpty) {
      typesDef.getRelationshipDefs.get(0)
    } else {
      null
    }
  }

  protected def updateTypesDef(typesDef: AtlasTypesDef): Unit = {
    require(atlasClient != null)

    atlasClient.updateAtlasTypeDefs(typesDef)
  }

  protected def deleteTypesDef(typesDef: AtlasTypesDef): Unit = {
    require(atlasClient != null)

    atlasClient.deleteAtlasTypeDefs(typesDef)
  }

  protected def getEntity(typeName: String, uniqueAttr: String): AtlasEntity = {
    require(atlasClient != null)

    atlasClient.getEntityByAttribute(typeName,
        Map(org.apache.atlas.AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME -> uniqueAttr).asJava)
      .getEntity
  }

  protected def it(desc: String)(testFn: => Unit): Unit = {
    test(desc) {
      assume(
        sys.env.get("ATLAS_INTEGRATION_TEST").contains("true"),
        "integration test can be run only when env ATLAS_INTEGRATION_TEST is set and local Atlas" +
          " is running")
      testFn
    }
  }

  protected def uniqueName(name: String): String = {
    s"${name}_$uniquePostfix"
  }
} 
Example 96
Source File: UpickleSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.services.util

import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import upickle.default.{read, write}

import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.metrics.Metrics.{Counter, MetricType}
import org.apache.gearpump.services.util.UpickleUtil._
import org.apache.gearpump.streaming.ProcessorId
import org.apache.gearpump.streaming.appmaster.{ProcessorSummary, StreamAppMasterSummary}
import org.apache.gearpump.util.Graph

class UpickleSpec extends FlatSpec with Matchers with BeforeAndAfterEach {

  "UserConfig" should "serialize and deserialize with upickle correctly" in {
    val conf = UserConfig.empty.withString("key", "value")
    val serialized = write(conf)
    val deserialized = read[UserConfig](serialized)
    assert(deserialized.getString("key") == Some("value"))
  }

  "Graph" should "be able to serialize/deserialize correctly" in {
    val graph = new Graph[Int, String](List(0, 1), List((0, "edge", 1)))
    val serialized = write(graph)

    val deserialized = read[Graph[Int, String]](serialized)

    graph.getVertices.toSet shouldBe deserialized.getVertices.toSet
    graph.getEdges.toSet shouldBe deserialized.getEdges.toSet
  }

  "MetricType" should "be able to serialize/deserialize correctly" in {
    val metric: MetricType = Counter("counter", 100L)
    val serialized = write(metric)
    val deserialized = read[MetricType](serialized)
    metric shouldBe deserialized
  }

  "StreamingAppMasterDataDetail" should "serialize and deserialize with upickle correctly" in {
    val app = new StreamAppMasterSummary(appId = 0,
      processors = Map.empty[ProcessorId, ProcessorSummary],
      processorLevels = Map.empty[ProcessorId, Int]
    )

    val serialized = write(app)
    val deserialized = read[StreamAppMasterSummary](serialized)
    assert(deserialized == app)
  }
} 
Example 97
Source File: TaskRegistrySpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.streaming.appmaster

import org.apache.gearpump.cluster.scheduler.Resource
import org.apache.gearpump.streaming.appmaster.TaskRegistry.{Accept, Reject, TaskLocation, TaskLocations}
import org.apache.gearpump.streaming.task.TaskId
import org.apache.gearpump.transport.HostPort
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
class TaskRegistrySpec extends FlatSpec with Matchers with BeforeAndAfterEach {

  it should "maintain registered tasks" in {
    val task0 = TaskId(0, 0)
    val task1 = TaskId(0, 1)
    val task2 = TaskId(0, 2)

    val register = new TaskRegistry(expectedTasks = List(task0, task1, task2))
    val host1 = HostPort("127.0.0.1:3000")
    val host2 = HostPort("127.0.0.1:3001")

    val executorId = 0
    assert(Accept == register.registerTask(task0, TaskLocation(executorId, host1)))
    assert(Accept == register.registerTask(task1, TaskLocation(executorId, host1)))
    assert(Accept == register.registerTask(task2, TaskLocation(executorId, host2)))

    assert(Reject == register.registerTask(TaskId(100, 0), TaskLocation(executorId, host2)))

    assert(register.isAllTasksRegistered)
    val TaskLocations(taskLocations) = register.getTaskLocations
    val tasksOnHost1 = taskLocations.get(host1).get
    val tasksOnHost2 = taskLocations.get(host2).get
    assert(tasksOnHost1.contains(task0))
    assert(tasksOnHost1.contains(task1))
    assert(tasksOnHost2.contains(task2))

    assert(register.getExecutorId(task0) == Some(executorId))
    assert(register.isTaskRegisteredForExecutor(executorId))

    register.processorExecutors(0) shouldBe Map(
      executorId -> List(task0, task1, task2)
    )

    register.usedResource.resources shouldBe Map(
      executorId -> Resource(3)
    )
  }
} 
Example 98
Source File: ApplicationStatusSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.cluster

import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class ApplicationStatusSpec extends FlatSpec with Matchers with BeforeAndAfterEach {

  "ApplicationStatus" should "check status transition properly" in {
    val pending = ApplicationStatus.PENDING
    assert(!pending.canTransitTo(ApplicationStatus.NONEXIST))
    assert(pending.canTransitTo(ApplicationStatus.PENDING))
    assert(pending.canTransitTo(ApplicationStatus.ACTIVE))
    assert(pending.canTransitTo(ApplicationStatus.SUCCEEDED))

    val active = ApplicationStatus.ACTIVE
    assert(active.canTransitTo(ApplicationStatus.SUCCEEDED))
    assert(active.canTransitTo(ApplicationStatus.PENDING))
    assert(!active.canTransitTo(ApplicationStatus.ACTIVE))
    assert(!active.canTransitTo(ApplicationStatus.NONEXIST))

    val succeed = ApplicationStatus.SUCCEEDED
    assert(!succeed.canTransitTo(ApplicationStatus.NONEXIST))
    assert(!succeed.canTransitTo(ApplicationStatus.SUCCEEDED))
    assert(!succeed.canTransitTo(ApplicationStatus.FAILED))
  }
} 
Example 99
Source File: InMemoryKVServiceSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.cluster.appmaster

import akka.actor.Props
import akka.testkit.TestProbe
import com.typesafe.config.Config
import org.apache.gearpump.cluster.master.InMemoryKVService
import org.apache.gearpump.cluster.master.InMemoryKVService._
import org.apache.gearpump.cluster.{MasterHarness, TestUtil}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

import scala.concurrent.duration._

class InMemoryKVServiceSpec
  extends FlatSpec with Matchers with BeforeAndAfterEach with MasterHarness {

  override def beforeEach(): Unit = {
    startActorSystem()
  }

  override def afterEach(): Unit = {
    shutdownActorSystem()
  }

  override def config: Config = TestUtil.MASTER_CONFIG

  "KVService" should "get, put, delete correctly" in {
    val system = getActorSystem
    val kvService = system.actorOf(Props(new InMemoryKVService()))
    val group = "group"

    val client = TestProbe()(system)

    client.send(kvService, PutKV(group, "key", 1))
    client.expectMsg(PutKVSuccess)

    client.send(kvService, PutKV(group, "key", 2))
    client.expectMsg(PutKVSuccess)

    client.send(kvService, GetKV(group, "key"))
    client.expectMsg(GetKVSuccess("key", 2))

    client.send(kvService, DeleteKVGroup(group))

    // After DeleteGroup, it no longer accept Get and Put message for this group.
    client.send(kvService, GetKV(group, "key"))
    client.expectNoMsg(3.seconds)

    client.send(kvService, PutKV(group, "key", 3))
    client.expectNoMsg(3.seconds)
  }
} 
Example 100
Source File: AppMasterLauncherSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.cluster.master

import org.apache.gearpump.cluster.worker.WorkerId

import scala.util.Success

import akka.actor._
import akka.testkit.TestProbe
import com.typesafe.config.Config
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

import org.apache.gearpump.cluster.AppMasterToMaster.RequestResource
import org.apache.gearpump.cluster.AppMasterToWorker.{LaunchExecutor, ShutdownExecutor}
import org.apache.gearpump.cluster.MasterToAppMaster.ResourceAllocated
import org.apache.gearpump.cluster.MasterToClient.SubmitApplicationResult
import org.apache.gearpump.cluster.WorkerToAppMaster.ExecutorLaunchRejected
import org.apache.gearpump.cluster.scheduler.{Resource, ResourceAllocation, ResourceRequest}
import org.apache.gearpump.cluster.{MasterHarness, TestUtil}
import org.apache.gearpump.util.ActorSystemBooter._

class AppMasterLauncherSpec extends FlatSpec with Matchers
  with BeforeAndAfterEach with MasterHarness {

  override def config: Config = TestUtil.DEFAULT_CONFIG

  val appId = 1
  val executorId = 2
  var master: TestProbe = null
  var client: TestProbe = null
  var worker: TestProbe = null
  var watcher: TestProbe = null
  var appMasterLauncher: ActorRef = null

  override def beforeEach(): Unit = {
    startActorSystem()
    master = createMockMaster()
    client = TestProbe()(getActorSystem)
    worker = TestProbe()(getActorSystem)
    watcher = TestProbe()(getActorSystem)
    appMasterLauncher = getActorSystem.actorOf(AppMasterLauncher.props(appId, executorId,
      TestUtil.dummyApp, None, "username", master.ref, Some(client.ref)))
    watcher watch appMasterLauncher
    master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified)))
    val resource = ResourceAllocated(
      Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L))))
    master.reply(resource)
    worker.expectMsgType[LaunchExecutor]
  }

  override def afterEach(): Unit = {
    shutdownActorSystem()
  }

  "AppMasterLauncher" should "launch appmaster correctly" in {
    worker.reply(RegisterActorSystem("systempath"))
    worker.expectMsgType[ActorSystemRegistered]

    worker.expectMsgType[CreateActor]
    worker.reply(ActorCreated(master.ref, "appmaster"))

    client.expectMsg(SubmitApplicationResult(Success(appId)))
    watcher.expectTerminated(appMasterLauncher)
  }

  "AppMasterLauncher" should "reallocate resource if executor launch rejected" in {
    worker.reply(ExecutorLaunchRejected(""))
    master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified)))

    val resource = ResourceAllocated(
      Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L))))
    master.reply(resource)
    worker.expectMsgType[LaunchExecutor]

    worker.reply(RegisterActorSystem("systempath"))
    worker.expectMsgType[ActorSystemRegistered]

    worker.expectMsgType[CreateActor]
    worker.reply(CreateActorFailed("", new Exception))
    worker.expectMsgType[ShutdownExecutor]
    assert(client.receiveN(1).head.asInstanceOf[SubmitApplicationResult].appId.isFailure)
    watcher.expectTerminated(appMasterLauncher)
  }
} 
Example 101
Source File: ApplicationMetaDataSpec.scala    From incubator-retired-gearpump   with Apache License 2.0 5 votes vote down vote up
package org.apache.gearpump.cluster.master

import org.apache.gearpump.cluster.AppDescription
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.apache.gearpump.cluster.appmaster.ApplicationMetaData

class ApplicationMetaDataSpec extends FlatSpec with Matchers with BeforeAndAfterEach {

  "ApplicationMetaData" should "check equal with respect to only appId and attemptId" in {
    val appDescription = AppDescription("app", "AppMaster", null)
    val metaDataA = ApplicationMetaData(0, 0, appDescription, null, null)
    val metaDataB = ApplicationMetaData(0, 0, appDescription, null, null)
    val metaDataC = ApplicationMetaData(0, 1, appDescription, null, null)

    assert(metaDataA == metaDataB)
    assert(metaDataA.hashCode == metaDataB.hashCode)
    assert(metaDataA != metaDataC)
  }
} 
Example 102
Source File: SharedSparkContext.scala    From arangodb-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.arangodb.spark

import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
import org.scalatest.Suite
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf


trait SharedSparkContext extends BeforeAndAfterAll { self: Suite =>

  @transient private var _sc: SparkContext = _
  def sc: SparkContext = _sc
  val conf = new SparkConf(false)
    .setMaster("local")
    .setAppName("test")

  override def beforeAll() {
    super.beforeAll()
    _sc = new SparkContext(conf)
  }

  override def afterAll() {
    try {
      _sc.stop()
      _sc = null
    } finally {
      super.afterAll()
    }
  }

} 
Example 103
Source File: ArangoSparkSSLReadTest.scala    From arangodb-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.arangodb.spark

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.FunSuite
import org.scalatest.Matchers
import collection.JavaConverters._
import com.arangodb.ArangoDB
import com.arangodb.ArangoDBException
import com.arangodb.velocypack.VPackBuilder
import com.arangodb.velocypack.ValueType
import scala.reflect.ClassTag
import com.arangodb.spark.rdd.partition.ArangoPartitionerSinglePartition
import org.scalatest.Ignore

@Ignore
class ArangoSparkSSLReadTest extends FunSuite with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with SharedSparkContextSSL {

  val DB = "spark_test_db"
  val COLLECTION = "spark_test_col"
  val arangoDB = new ArangoDB.Builder().build()

  override def beforeAll() {
    super.beforeAll()
    try {
      arangoDB.db(DB).drop()
    } catch {
      case e: ArangoDBException =>
    }
    arangoDB.createDatabase(DB)
    arangoDB.db(DB).createCollection(COLLECTION)
    val documents = sc.parallelize((1 to 100).map { i => TestEntity(i) })
    ArangoSpark.save(documents, COLLECTION, WriteOptions(DB))
  }

  override def afterAll() {
    try {
      arangoDB.db(DB).drop()
      arangoDB.shutdown()
    } finally {
      super.afterAll()
    }
  }

  test("load all documents from collection") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB))
    rdd.count() should be(100)
  }

  test("load with custom partionier") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB, partitioner = new ArangoPartitionerSinglePartition()))
    rdd.count() should be(100)
  }

  test("load documents from collection with filter statement") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB))
    val rdd2 = rdd.filter("doc.test <= 50")
    rdd2.count() should be(50)
  }
} 
Example 104
Source File: ArangoSparkSSLWriteTest.scala    From arangodb-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.arangodb.spark

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.FunSuite
import org.scalatest.Matchers

import com.arangodb.ArangoDB
import com.arangodb.ArangoDBException
import com.arangodb.velocypack.VPackBuilder
import com.arangodb.velocypack.ValueType
import org.scalatest.Ignore

@Ignore
class ArangoSparkSSLWriteTest extends FunSuite with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with SharedSparkContextSSL {

  val DB = "spark_test_db"
  val COLLECTION = "spark_test_col"
  val arangoDB = new ArangoDB.Builder().build()

  override def beforeAll() {
    super.beforeAll()
    try {
      arangoDB.db(DB).drop()
    } catch {
      case e: ArangoDBException =>
    }
    arangoDB.createDatabase(DB)
    arangoDB.db(DB).createCollection(COLLECTION)
  }

  override def afterAll() {
    try {
      arangoDB.db(DB).drop()
      arangoDB.shutdown()
    } finally {
      super.afterAll()
    }
  }

  override def afterEach() {
    arangoDB.db(DB).collection(COLLECTION).truncate()
  }

  private def checkDocumentCount(count: Int) {
    arangoDB.db(DB).collection(COLLECTION).count().getCount should be(count)
  }

  test("save RDD to ArangoDB") {
    checkDocumentCount(0)

    val documents = sc.parallelize((1 to 100).map { i => TestEntity(i) })
    ArangoSpark.save(documents, COLLECTION, WriteOptions(DB))

    checkDocumentCount(100)
  }

  test("save RDD[VPackSlice] to ArangoDB") {
    checkDocumentCount(0)

    val documents = sc.parallelize((1 to 100).map { i => new VPackBuilder().add(ValueType.OBJECT).add("test", Integer.valueOf(i)).close().slice() })
    ArangoSpark.save(documents, COLLECTION, WriteOptions(DB))

    checkDocumentCount(100)
  }

  test("save DataFrame to ArangoDB") {
    checkDocumentCount(0)

    val documents = sc.parallelize((1 to 100).map { i => TestEntity(i) })
    val sql: SQLContext = SQLContext.getOrCreate(sc);
    val df = sql.createDataFrame(documents, classOf[TestEntity])
    ArangoSpark.saveDF(df, COLLECTION, WriteOptions(DB))

    checkDocumentCount(100)
  }

  test("save Dataset to ArangoDB") {
    checkDocumentCount(0)

    val documents = sc.parallelize((1 to 100).map { i => TestEntity(i) })
    val sql: SQLContext = SQLContext.getOrCreate(sc);
    val encoder = ExpressionEncoder.javaBean(classOf[TestEntity])
    val ds = sql.createDataset(documents)(encoder);
    ArangoSpark.save(ds, COLLECTION, WriteOptions(DB))

    checkDocumentCount(100)
  }

} 
Example 105
Source File: SharedSparkContextSSL.scala    From arangodb-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.arangodb.spark

import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
import org.scalatest.Suite
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf


trait SharedSparkContextSSL extends BeforeAndAfterAll { self: Suite =>

  @transient private var _sc: SparkContext = _
  def sc: SparkContext = _sc
  val conf = new SparkConf(false)
    .setMaster("local")
    .setAppName("test")
    .set("arangodb.user", "root")
    .set("arangodb.password", "")
    .set("arangodb.hosts", "127.0.0.1:8530")
    .set("arangodb.useSsl", true.toString)
    .set("arangodb.ssl.keyStoreFile", this.getClass().getResource("/example.truststore").getFile())
    .set("arangodb.ssl.passPhrase", "12345678")

  override def beforeAll() {
    super.beforeAll()
    _sc = new SparkContext(conf)
  }

  override def afterAll() {
    try {
      _sc.stop()
      _sc = null
    } finally {
      super.afterAll()
    }
  }

} 
Example 106
Source File: ArangoSparkReadTest.scala    From arangodb-spark-connector   with Apache License 2.0 5 votes vote down vote up
package com.arangodb.spark

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.FunSuite
import org.scalatest.Matchers
import collection.JavaConverters._
import com.arangodb.ArangoDB
import com.arangodb.ArangoDBException
import com.arangodb.velocypack.VPackBuilder
import com.arangodb.velocypack.ValueType
import scala.reflect.ClassTag
import com.arangodb.spark.rdd.partition.ArangoPartitionerSinglePartition
import org.scalatest.Ignore
import com.arangodb.entity.LoadBalancingStrategy

class ArangoSparkReadTest extends FunSuite with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with SharedSparkContext {

  val DB = "spark_test_db"
  val COLLECTION = "spark_test_col"
  val arangoDB = new ArangoDB.Builder().build()

  override def beforeAll() {
    super.beforeAll()
    try {
      arangoDB.db(DB).drop()
    } catch {
      case e: ArangoDBException =>
    }
    arangoDB.createDatabase(DB)
    arangoDB.db(DB).createCollection(COLLECTION)
    val documents = sc.parallelize((1 to 100).map { i => TestEntity(i) })
    ArangoSpark.save(documents, COLLECTION, WriteOptions(DB))
  }

  override def afterAll() {
    try {
      arangoDB.db(DB).drop()
      arangoDB.shutdown()
    } finally {
      super.afterAll()
    }
  }

  test("load all documents from collection") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB))
    rdd.count() should be(100)
  }

  test("load with custom partionier") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB, partitioner = new ArangoPartitionerSinglePartition()))
    rdd.count() should be(100)
  }

  test("load documents from collection with filter statement") {
    val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB))
    val rdd2 = rdd.filter("doc.test <= 50")
    rdd2.count() should be(50)
  }
  
  test("load all documents from collection with load balancing") {
	  val rdd = ArangoSpark.load[TestEntity](sc, COLLECTION, ReadOptions(DB).acquireHostList(false).loadBalancingStrategy(LoadBalancingStrategy.ROUND_ROBIN))
	  rdd.count() should be(100)
  }

} 
Example 107
Source File: LogAnalyticsStreamingQueryListenerSuite.scala    From spark-monitoring   with MIT License 5 votes vote down vote up
package org.apache.spark.sql.streaming

import java.util.UUID

import org.apache.spark.listeners.ListenerSuite
import org.apache.spark.sql.streaming.StreamingQueryListener.{QueryProgressEvent, QueryStartedEvent, QueryTerminatedEvent}
import org.scalatest.BeforeAndAfterEach

import scala.collection.JavaConversions.mapAsJavaMap

object LogAnalyticsStreamingQueryListenerSuite {
  val queryStartedEvent = new QueryStartedEvent(UUID.randomUUID, UUID.randomUUID, "name")
  val queryTerminatedEvent = new QueryTerminatedEvent(UUID.randomUUID, UUID.randomUUID, None)
  val queryProgressEvent = new QueryProgressEvent(
    new StreamingQueryProgress(
      UUID.randomUUID,
      UUID.randomUUID,
      null,
      ListenerSuite.EPOCH_TIME_AS_ISO8601,
      2L,
      mapAsJavaMap(Map("total" -> 0L)),
      mapAsJavaMap(Map.empty[String, String]),
      Array(new StateOperatorProgress(
        0, 1, 2)),
      Array(
        new SourceProgress(
          "source",
          "123",
          "456",
          678,
          Double.NaN,
          Double.NegativeInfinity
        )
      ),
      new SinkProgress("sink")
    )
  )
}

class LogAnalyticsStreamingQueryListenerSuite extends ListenerSuite
  with BeforeAndAfterEach {

  test("should invoke sendToSink for QueryStartedEvent with full class name") {
    val (json, event) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryStartedEvent
    )

    this.assertEvent(json, event)
  }

  test("should invoke sendToSink for QueryTerminatedEvent with full class name") {
    val (json, event) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryTerminatedEvent
    )

    this.assertEvent(json, event)
  }

  test("should invoke sendToSink for QueryProgressEvent with full class name") {
    val (json, event) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryProgressEvent
    )

    this.assertEvent(json, event)
  }

  test("QueryProgressEvent should have expected SparkEventTime") {
    val (json, _) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryProgressEvent
    )

    this.assertSparkEventTime(
      json,
      (_, value) => assert(value.extract[String] === ListenerSuite.EPOCH_TIME_AS_ISO8601)
    )
  }

  test("QueryStartedEvent should have SparkEventTime") {
    val (json, _) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryStartedEvent
    )
    this.assertSparkEventTime(
      json,
      (_, value) => assert(!value.extract[String].isEmpty)
    )
  }

  test("QueryTerminatedEvent should have SparkEventTime") {
    val (json, _) = this.onStreamingQueryListenerEvent(
      LogAnalyticsStreamingQueryListenerSuite.queryTerminatedEvent
    )
    this.assertSparkEventTime(
      json,
      (_, value) => assert(!value.extract[String].isEmpty)
    )
  }
} 
Example 108
Source File: PerTestSparkContext.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark._

import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.Suite


trait PerTestSparkContext extends LocalSparkContext with BeforeAndAfterEach
    with SparkContextProvider { self: Suite =>

  override def beforeEach() {
    sc = new SparkContext(conf)
    setup(sc)
    super.beforeEach()
  }

  override def afterEach() {
    super.afterEach()
  }
} 
Example 109
Source File: LagomClientFactorySpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.client.integration

import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.ActorSystem
import akka.actor.Props
import com.typesafe.config.ConfigFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.BeforeAndAfterEach

import scala.concurrent.duration._
import scala.concurrent.Await
import akka.pattern._
import akka.stream.SystemMaterializer
import akka.util.Timeout
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class LagomClientFactorySpec extends AnyFlatSpec with Matchers with BeforeAndAfterEach with ScalaFutures {
  private var system: ActorSystem = _
  private var echoActor: ActorRef = _
  implicit val timeout            = Timeout(5.seconds)

  
  "LagomClientFactory" should "when using a unmanaged actor system, shoudl not terminate it upon closing" in {
    // check that actor system is operational
    (echoActor ? "hey").mapTo[String].futureValue shouldBe "hey"

    LagomClientFactory
    // create a factory by passing the existing ActorSystem
      .create(
        "test",
        this.getClass.getClassLoader,
        system,
        SystemMaterializer(system).materializer
      )
      // closing the factory should not close the existing ActorSystem
      .close()

    // check that actor system is still operational
    (echoActor ? "hey").mapTo[String].futureValue shouldBe "hey"
  }

  protected override def beforeEach(): Unit = {
    system = ActorSystem("test", ConfigFactory.load())
    echoActor = system.actorOf(Props(new EchoActor), "echo")
  }

  class EchoActor extends Actor {
    override def receive: Receive = {
      case s: String => sender() ! s
    }
  }
  protected override def afterEach(): Unit = {
    Await.ready(system.terminate(), 5.seconds)
  }
} 
Example 110
Source File: SparkLaunchTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.sparklaunch

import com.ibm.sparktc.sparkbench.testfixtures.BuildAndTeardownData
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class SparkLaunchTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  val dataShiznit = new BuildAndTeardownData("multi-spark")

  override def beforeEach(): Unit = {
    super.beforeEach()
    dataShiznit.deleteFolders()
    dataShiznit.createFolders()
    dataShiznit.generateKMeansData(1000, 5, dataShiznit.kmeansFile) // scalastyle:ignore
  }

  override def afterEach(): Unit = {
    dataShiznit.deleteFolders()
  }
  "Launching Spark" should "work" in {
    assert(sys.env.get("SPARK_HOME").nonEmpty)
    val relativePath = "/etc/testConfFile1.conf"
    val resource = getClass.getResource(relativePath)
    val path = resource.getPath
    SparkLaunch.main(Array(path))
  }
} 
Example 111
Source File: SparkLaunchOneSparkContextPerRunTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.sparklaunch

import com.ibm.sparktc.sparkbench.testfixtures.BuildAndTeardownData
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class SparkLaunchOneSparkContextPerRunTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  val dataShiznit = new BuildAndTeardownData("spark-launch-one-spark-context-per-run")

  override def beforeEach(): Unit = {
    super.beforeEach()
    dataShiznit.createFolders()
    dataShiznit.generateKMeansData(1000, 5, dataShiznit.kmeansFile)
  }

  override def afterEach(): Unit = {
    dataShiznit.deleteFolders()
  }
  "Launching Spark" should "work" in {
    val relativePath = "/etc/craig-test.conf"
    val resource = getClass.getResource(relativePath)
    val path = resource.getPath
    SparkLaunch.main(Array(path))
  }
} 
Example 112
Source File: NotebookSimTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench

import com.ibm.sparktc.sparkbench.cli.CLIKickoff
import com.ibm.sparktc.sparkbench.testfixtures.BuildAndTeardownData
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

import scala.io.Source

class NotebookSimTest extends FlatSpec with Matchers with BeforeAndAfterEach with Capturing {
  val dataMaker = new BuildAndTeardownData("notebook-sim-test")

  val giantData = s"${dataMaker.sparkBenchTestFolder}/giant-kmeans-data.parquet"
  val tinyData = s"${dataMaker.sparkBenchTestFolder}/tiny-kmeans-data.parquet"

  override def beforeEach(): Unit = {
    super.beforeEach()
    dataMaker.deleteFolders()
    dataMaker.createFolders()
    dataMaker.generateKMeansData(400000, 50, giantData)
    dataMaker.generateKMeansData(100, 5, tinyData)
  }

  override def afterEach(): Unit = {
    dataMaker.deleteFolders()
  }

  "Simulating two notebook users" should "work" in {
    val relativePath = "/etc/notebook-sim.conf"
    val resource = getClass.getResource(relativePath)
    val path = resource.getPath
    val text = Source.fromFile(path).mkString
    CLIKickoff.main(Array(text))
  }



} 
Example 113
Source File: KMeansWorkloadTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.workload.ml

import java.io.File

import com.holdenkarau.spark.testing.Utils
import com.ibm.sparktc.sparkbench.testfixtures.SparkSessionProvider
import com.ibm.sparktc.sparkbench.utils.SaveModes
import com.ibm.sparktc.sparkbench.utils.SparkFuncs.{load, writeToDisk}
import org.apache.spark.mllib.util.KMeansDataGenerator
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.types.{DoubleType, StructField, StructType}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class KMeansWorkloadTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  private val spark = SparkSessionProvider.spark
  private val fileName = s"/tmp/spark-bench-scalatest/kmeans-${java.util.UUID.randomUUID.toString}.csv"

  override def afterEach() {
    Utils.deleteRecursively(new File(fileName))
  }

  def makeDataFrame(): DataFrame = {
    val data: RDD[Array[Double]] = KMeansDataGenerator.generateKMeansRDD(
      spark.sparkContext, 1, 1, 1, KMeansWorkload.scaling, KMeansWorkload.numOfPartitions
    )
    val schemaString = data.first().indices.map(_.toString).mkString(" ")
    val fields = schemaString.split(" ").map(fieldName => StructField(fieldName, DoubleType, nullable = false))
    val schema = StructType(fields)
    val rowRDD = data.map(arr => Row(arr: _*))
    spark.createDataFrame(rowRDD, schema)
  }

  "reconcileSchema" should "handle a StringType schema and turn it into a DoubleType Schema" in {
    val df2Disk = makeDataFrame()
    writeToDisk(fileName, SaveModes.error, df2Disk, spark, Some("csv"))
    val conf = Map("name" -> "kmeans", "input" -> fileName)
    val work = KMeansWorkload(conf)
    val df = load(spark, fileName)
    val ddf = work.reconcileSchema(df)
    ddf.schema.head.dataType shouldBe DoubleType
  }

  "The load function" should "parse the DataFrame it's given into an RDD[Vector]" in {
    val df = makeDataFrame()
    val conf = Map("name" -> "kmeans", "input" -> "")
    val work = KMeansWorkload(conf)
    val ddf = work.reconcileSchema(df)
    val (_, rdd) = work.loadToCache(ddf, spark)
    rdd.first()
  }

  it should "work even when we've pulled the data from disk" in {
    val df2Disk = makeDataFrame()
    writeToDisk(fileName, SaveModes.error, df2Disk, spark, Some("csv"))
    val conf = Map("name" -> "kmeans", "input" -> fileName)
    val work = KMeansWorkload(conf)
    val df = load(spark, fileName)
    val ddf = work.reconcileSchema(df)
    val (_, rdd) = work.loadToCache(ddf, spark)
    rdd.first()
  }

  "doWorkload" should "work" in {
    val df2Disk = makeDataFrame()
    writeToDisk(fileName, SaveModes.error, df2Disk, spark, Some("csv"))
    val conf = Map("name" -> "kmeans", "input" -> fileName)
    val work = KMeansWorkload(conf)
    val df = load(spark, fileName)
    val ddf = work.reconcileSchema(df)
    work.doWorkload(Some(ddf), spark)
  }
} 
Example 114
Source File: SparkPiTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.workload.exercise

import com.ibm.sparktc.sparkbench.testfixtures.SparkSessionProvider
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class SparkPiTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  val spark = SparkSessionProvider.spark

  "SparkPi" should "instantiate and run" in {
    val workload = SparkPi(Map("name" -> "sparkpi", "slices" -> 4))
    val res = workload.doWorkload(None, spark).collect
    res.length shouldBe 1
    val row = res(0)
    row.length shouldBe 4
    row.getAs[String]("name") shouldBe "sparkpi"
    row.getAs[Double]("pi_approximate") shouldBe 3.14 +- 1
  }
} 
Example 115
Source File: GraphDataGenTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.datageneration

import java.io.File

import com.ibm.sparktc.sparkbench.testfixtures.{BuildAndTeardownData, SparkSessionProvider}
import com.ibm.sparktc.sparkbench.utils.SparkBenchException
import org.apache.spark.graphx.GraphLoader
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class GraphDataGenTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  val cool = new BuildAndTeardownData("graph-data-gen")

  val fileName = s"${cool.sparkBenchTestFolder}/${java.util.UUID.randomUUID.toString}.txt"

  var file: File = _

  override def beforeEach() {
    cool.createFolders()
    file = new File(fileName)
  }

  override def afterEach() {
    cool.deleteFolders()
  }

  "GraphDataGeneration" should "generate data correctly with all default options" in {

    val m = Map(
      "name" -> "graph-data-generator",
      "vertices" -> 100,
      "output" -> fileName
    )
    val generator = GraphDataGen(m)
    generator.doWorkload(spark = SparkSessionProvider.spark)
    val res = GraphLoader.edgeListFile(SparkSessionProvider.spark.sparkContext, fileName)

    res.vertices.count() shouldBe m("vertices")
  }

  it should "throw an error for any output format but .txt" in {
    val m1 = Map(
      "name" -> "graph-data-generator",
      "vertices" -> 100,
      "output" -> "/my-cool-file.csv"
    )
    val m2 = Map(
      "name" -> "graph-data-generator",
      "vertices" -> 100,
      "output" -> "/my-cool-file.parquet"
    )
    val m3 = Map(
      "name" -> "graph-data-generator",
      "vertices" -> 100,
      "output" -> "/my-cool-file.tsv"
    )

    a [SparkBenchException] should be thrownBy GraphDataGen(m1)
    a [SparkBenchException] should be thrownBy GraphDataGen(m2)
    a [SparkBenchException] should be thrownBy GraphDataGen(m3)
  }

  it should "throw errors when required values are missing" in {
    // Missing vertices
    val m1 = Map(
      "name" -> "graph-data-generator",
      "output" -> "/my-cool-file.csv"
    )
    // Missing output file name
    val m2 = Map(
      "name" -> "graph-data-generator",
      "vertices" -> 100
    )
    a [SparkBenchException] should be thrownBy GraphDataGen(m1)
    a [SparkBenchException] should be thrownBy GraphDataGen(m2)
  }
} 
Example 116
Source File: LinearRegDataGenTest.scala    From spark-bench   with Apache License 2.0 5 votes vote down vote up
package com.ibm.sparktc.sparkbench.datageneration

import java.io.File

import com.ibm.sparktc.sparkbench.datageneration.mlgenerator.LinearRegressionDataGen
import com.ibm.sparktc.sparkbench.testfixtures.{BuildAndTeardownData, SparkSessionProvider}
import com.ibm.sparktc.sparkbench.utils.SparkBenchException
import com.ibm.sparktc.sparkbench.utils.SparkFuncs.load
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

class LinearRegDataGenTest extends FlatSpec with Matchers with BeforeAndAfterEach {
  val cool = new BuildAndTeardownData("linear-reg-datagen")

  val fileName = s"${cool.sparkBenchTestFolder}/${java.util.UUID.randomUUID.toString}.parquet"
  val csvFileName = s"${cool.sparkBenchTestFolder}/${java.util.UUID.randomUUID.toString}.csv"

  var file: File = _

  override def beforeEach() {
    file = new File(fileName)
    cool.createFolders()
  }

  override def afterEach() {
    cool.deleteFolders()
  }

  "LinearRegressionDataGen" should "generate data correctly for Parquet output" in {

    val m = Map(
      "name" -> "kmeans",
      "rows" -> 10,
      "cols" -> 10,
      "output" -> fileName
    )

    val generator = LinearRegressionDataGen(m)

    generator.doWorkload(spark = SparkSessionProvider.spark)

//    val fileList = file.listFiles().toList.filter(_.getName.startsWith("part"))
    val fileContents = load(SparkSessionProvider.spark, fileName, Some("parquet"))
    val length: Long = fileContents.count()

    length shouldBe generator.numRows
  }

  //TODO ultimately fix LabeledPoints being output to CSV. Surely there's a way...
  it should "throw an exception when somebody tries to output to CSV" in {
    a [SparkBenchException] should be thrownBy {
      val m = Map(
        "name" -> "kmeans",
        "rows" -> 10,
        "cols" -> 10,
        "output" -> csvFileName
      )
      val generator = LinearRegressionDataGen(m)
      generator.doWorkload(spark = SparkSessionProvider.spark)
    }
  }

} 
Example 117
Source File: WireMockSupport.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package support.wiremock

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock
import com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait WireMockConfig {
  val wiremockHost = "localhost"
  val wiremockPort = 11111
  val wiremockUrl = s"http://$wiremockHost:$wiremockPort"
}

trait WireMockSupport extends WireMockConfig
  with BeforeAndAfterAll
  with BeforeAndAfterEach { _: Suite =>

  private val wireMockServer = new WireMockServer(wireMockConfig().port(wiremockPort))

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    wireMockServer.start()
    WireMock.configureFor(wiremockHost, wiremockPort)
  }

  override protected def afterAll(): Unit = {
    wireMockServer.stop()
    super.afterAll()
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    WireMock.reset()
  }
} 
Example 118
Source File: Mock.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package mocks

import org.mockito.{ArgumentMatchers => Matchers}
import org.mockito.Mockito
import org.mockito.stubbing.OngoingStubbing
import org.mockito.verification.VerificationMode
import org.scalatest.{BeforeAndAfterEach, Suite}
import org.scalatestplus.mockito.MockitoSugar

trait Mock extends MockitoSugar with BeforeAndAfterEach { _: Suite =>

  // predefined mocking functions to avoid importing
  def any[T]() = Matchers.any[T]()
  def eqTo[T](t: T) = Matchers.eq[T](t)
  def when[T](t: T) = Mockito.when(t)
  def reset[T](t: T) = Mockito.reset(t)
  def verify[T](mock: T, mode: VerificationMode) = Mockito.verify(mock, mode)
  def times(num: Int) = Mockito.times(num)

  implicit class stubbingOps[T](stubbing: OngoingStubbing[T]){
    def returns(t: T) = stubbing.thenReturn(t)
  }
} 
Example 119
Source File: HiveContextCompatibilitySuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {

  private var sc: SparkContext = null
  private var hc: HiveContext = null

  override def beforeAll(): Unit = {
    super.beforeAll()
    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
    HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
      sc.hadoopConfiguration.set(k, v)
    }
    hc = new HiveContext(sc)
  }

  override def afterEach(): Unit = {
    try {
      hc.sharedState.cacheManager.clearCache()
      hc.sessionState.catalog.reset()
    } finally {
      super.afterEach()
    }
  }

  override def afterAll(): Unit = {
    try {
      sc = null
      hc = null
    } finally {
      super.afterAll()
    }
  }

  test("basic operations") {
    val _hc = hc
    import _hc.implicits._
    val df1 = (1 to 20).map { i => (i, i) }.toDF("a", "x")
    val df2 = (1 to 100).map { i => (i, i % 10, i % 2 == 0) }.toDF("a", "b", "c")
      .select($"a", $"b")
      .filter($"a" > 10 && $"b" > 6 && $"c")
    val df3 = df1.join(df2, "a")
    val res = df3.collect()
    val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
    assert(res.toSeq == expected.toSeq)
    df3.createOrReplaceTempView("mai_table")
    val df4 = hc.table("mai_table")
    val res2 = df4.collect()
    assert(res2.toSeq == expected.toSeq)
  }

  test("basic DDLs") {
    val _hc = hc
    import _hc.implicits._
    val databases = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases.toSeq == Seq("default"))
    hc.sql("CREATE DATABASE mee_db")
    hc.sql("USE mee_db")
    val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases2.toSet == Set("default", "mee_db"))
    val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
    df.createOrReplaceTempView("mee_table")
    hc.sql("CREATE TABLE moo_table (name string, age int)")
    hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
    assert(
      hc.sql("SELECT * FROM moo_table order by name").collect().toSeq ==
      df.collect().toSeq.sortBy(_.getString(0)))
    val tables = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables.toSet == Set("moo_table", "mee_table"))
    hc.sql("DROP TABLE moo_table")
    hc.sql("DROP TABLE mee_table")
    val tables2 = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables2.isEmpty)
    hc.sql("USE default")
    hc.sql("DROP DATABASE mee_db CASCADE")
    val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases3.toSeq == Seq("default"))
  }

} 
Example 120
Source File: SharedSQLContext.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}



  protected override def afterAll(): Unit = {
    try {
      if (_spark != null) {
        _spark.stop()
        _spark = null
      }
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 121
Source File: SharedSparkContext.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.Suite


trait SharedSparkContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  @transient private var _sc: SparkContext = _

  def sc: SparkContext = _sc

  var conf = new SparkConf(false)

  override def beforeAll() {
    super.beforeAll()
    _sc = new SparkContext(
      "local[4]", "test", conf.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName))
  }

  override def afterAll() {
    try {
      LocalSparkContext.stop(_sc)
      _sc = null
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 122
Source File: ResetSystemProperties.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 123
Source File: DiskBlockManagerSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 124
Source File: BeforeAndAfterWithContext.scala    From uberdata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.rpc.netty

import eleflow.uberdata.core.IUberdataContext
import eleflow.uberdata.core.util.ClusterSettings
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkEnv}
import org.scalatest.{BeforeAndAfterEach, Suite}

object TestSparkConf {
  def conf = {
    val sconf = new SparkConf()
    sconf.set("spark.app.name", "teste")
    sconf
  }

  val separator = ","

}


trait BeforeAndAfterWithContext extends BeforeAndAfterEach { this: Suite =>

  val defaultFilePath = "src/test/resources/"
  import TestSparkConf._
  ClusterSettings.master = Some("local[*]")
  conf.set("spark.driver.allowMultipleContexts", "true")
  @transient val context = IUberdataContext.getUC(conf)

  override def beforeEach() = {
    setLogLevels(Level.INFO, Seq("spark", "org.eclipse.jetty", "akka"))
  }

  def setLogLevels(level: org.apache.log4j.Level, loggers: TraversableOnce[String]) = {
    loggers.map { loggerName =>
      val logger = Logger.getLogger(loggerName)
      val prevLevel = logger.getLevel
      logger.setLevel(level)
      loggerName -> prevLevel
    }.toMap
  }

  override def afterEach() = {
    val get = SparkEnv.get
    val rpcEnv =
      if (get != null) {
        Some(get.rpcEnv)
      } else None
    context.clearContext()
    //rpcEnv.foreach(
    //  _.fileServer.asInstanceOf[org.apache.spark.rpc.netty.HttpBasedFileServer].shutdown())


    System.clearProperty("spark.master.port")
  }
} 
Example 125
Source File: TestDBSettings.scala    From scuruto   with MIT License 5 votes vote down vote up
package model

import org.flywaydb.core.Flyway
import org.scalatest.{ BeforeAndAfterEach, Suite }
import scalikejdbc.ConnectionPool
import skinny._
import skinny.exception.DBSettingsException

trait TestDBSettings extends BeforeAndAfterEach with DBSettings { this: Suite =>

  override protected def beforeEach(): Unit = {
    clean()
    dbmigration.DBMigration.migrate()
  }

  private def clean(env: String = SkinnyEnv.Test, poolName: String = ConnectionPool.DEFAULT_NAME.name): Unit = {
    val skinnyEnv = SkinnyEnv.get()
    try {
      System.setProperty(SkinnyEnv.PropertyKey, env)
      DBSettings.initialize()
      try {
        val pool = ConnectionPool.get(Symbol(poolName))
        val flyway = new Flyway
        flyway.setDataSource(pool.dataSource)
        flyway.clean()
      } catch {
        case e: IllegalStateException =>
          throw new DBSettingsException(s"ConnectionPool named $poolName is not found.")
      }
    } finally {
      skinnyEnv.foreach { env => System.setProperty(SkinnyEnv.PropertyKey, env) }
      DBSettings.initialize()
    }
  }

} 
Example 126
Source File: NettyBlockTransferSecuritySuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.network.netty

import java.nio._
import java.util.concurrent.TimeUnit

import scala.concurrent.duration._
import scala.concurrent.{Await, Promise}
import scala.util.{Failure, Success, Try}

import org.apache.commons.io.IOUtils
import org.apache.spark.network.buffer.{ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.shuffle.BlockFetchingListener
import org.apache.spark.network.{BlockDataManager, BlockTransferService}
import org.apache.spark.storage.{BlockId, ShuffleBlockId}
import org.apache.spark.{SecurityManager, SparkConf}
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, ShouldMatchers}

class NettyBlockTransferSecuritySuite extends FunSuite with MockitoSugar with ShouldMatchers {
  test("security default off") {
    val conf = new SparkConf()
      .set("spark.app.id", "app-id")
    testConnection(conf, conf) match {
      case Success(_) => // expected
      case Failure(t) => fail(t)
    }
  }

  test("security on same password") {
    val conf = new SparkConf()
      .set("spark.authenticate", "true")
      .set("spark.authenticate.secret", "good")
      .set("spark.app.id", "app-id")
    testConnection(conf, conf) match {
      case Success(_) => // expected
      case Failure(t) => fail(t)
    }
  }

  test("security on mismatch password") {
    val conf0 = new SparkConf()
      .set("spark.authenticate", "true")
      .set("spark.authenticate.secret", "good")
      .set("spark.app.id", "app-id")
    val conf1 = conf0.clone.set("spark.authenticate.secret", "bad")
    testConnection(conf0, conf1) match {
      case Success(_) => fail("Should have failed")
      case Failure(t) => t.getMessage should include ("Mismatched response")
    }
  }

  test("security mismatch auth off on server") {
    val conf0 = new SparkConf()
      .set("spark.authenticate", "true")
      .set("spark.authenticate.secret", "good")
      .set("spark.app.id", "app-id")
    val conf1 = conf0.clone.set("spark.authenticate", "false")
    testConnection(conf0, conf1) match {
      case Success(_) => fail("Should have failed")
      case Failure(t) => // any funny error may occur, sever will interpret SASL token as RPC
    }
  }

  test("security mismatch auth off on client") {
    val conf0 = new SparkConf()
      .set("spark.authenticate", "false")
      .set("spark.authenticate.secret", "good")
      .set("spark.app.id", "app-id")
    val conf1 = conf0.clone.set("spark.authenticate", "true")
    testConnection(conf0, conf1) match {
      case Success(_) => fail("Should have failed")
      case Failure(t) => t.getMessage should include ("Expected SaslMessage")
    }
  }

  
  private def fetchBlock(
      self: BlockTransferService,
      from: BlockTransferService,
      execId: String,
      blockId: BlockId): Try[ManagedBuffer] = {

    val promise = Promise[ManagedBuffer]()

    self.fetchBlocks(from.hostName, from.port, execId, Array(blockId.toString),
      new BlockFetchingListener {
        override def onBlockFetchFailure(blockId: String, exception: Throwable): Unit = {
          promise.failure(exception)
        }

        override def onBlockFetchSuccess(blockId: String, data: ManagedBuffer): Unit = {
          promise.success(data.retain())
        }
      })

    Await.ready(promise.future, FiniteDuration(1000, TimeUnit.MILLISECONDS))
    promise.future.value.get
  }
} 
Example 127
Source File: ResetSystemProperties.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 128
Source File: DiskBlockManagerSuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import org.apache.spark.SparkConf
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends FunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 129
Source File: MultiFilesDataLoagdingTestCase.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.carbondata.spark.testsuite.dataload

import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterEach


class MultiFilesDataLoagdingTestCase extends QueryTest with BeforeAndAfterEach {

  override def beforeEach {
    sql("DROP TABLE IF EXISTS multifile")
    sql("CREATE TABLE multifile(empno int, empname String, designation string, doj String," +
      "workgroupcategory int, workgroupcategoryname String,deptno int, deptname String," +
      "projectcode int, projectjoindate String,projectenddate String, attendance double," +
      "utilization double,salary double) STORED AS carbondata")
  }

  test("test data loading for multi files and nested folder") {
    val testData = s"$resourcesPath/loadMultiFiles"
    sql(s"LOAD DATA LOCAL INPATH '$testData' into table multifile")
    checkAnswer(
      sql("select count(empno) from multifile"),
      Seq(Row(10))
    )
  }

  test("test data loading multiple files") {
    val testData = s"$resourcesPath/loadMultiFiles/data.csv, $resourcesPath/loadMultiFiles/non-csv"
    sql(s"LOAD DATA LOCAL INPATH '$testData' into table multifile")
    checkAnswer(
      sql("select count(empno) from multifile"),
      Seq(Row(5))
    )
  }

  override def afterEach {
    sql("DROP TABLE IF EXISTS multifile")
  }
} 
Example 130
Source File: TestBooleanCompressSuite.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.carbondata.spark.testsuite.booleantype.compress

import java.io.{File, PrintWriter}

import scala.util.Random

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest

class TestBooleanCompressSuite extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {

  override def beforeEach(): Unit = {
    sql("drop table if exists boolean_table")
  }

  override def afterAll(): Unit = {
    sql("drop table if exists boolean_table")
    assert(deleteFile(randomBoolean))
  }

  val randomBoolean = s"$resourcesPath/bool/supportRandomBooleanBigFile.csv"
  val trueNum = 10000000

  override def beforeAll(): Unit = {
    assert(createBooleanFileRandom(randomBoolean, trueNum, 0.5))
  }

  test("test boolean compress rate: random file") {
    sql(
      s"""
         | CREATE TABLE boolean_table(
         | booleanField BOOLEAN
         | )
         | STORED AS carbondata
       """.stripMargin)

    sql(
      s"""
         | LOAD DATA LOCAL INPATH '${randomBoolean}'
         | INTO TABLE boolean_table
         | options('FILEHEADER'='booleanField')
           """.stripMargin)

    //    Test for compress rate
    //    sql("select * from boolean_table").show(100)
    //    sql("select count(*) from boolean_table").show()
    //    sql("select count(*) from boolean_table where booleanField= true").show()
    //    sql("select count(*) from boolean_table where booleanField= false").show()
    checkAnswer(
      sql("select count(*) from boolean_table"),
      Row(trueNum))
  }

  val randomNumber = 10000
  def createBooleanFileRandom(path: String, totalLines: Int, rate: Double): Boolean = {
    try {
      val write = new PrintWriter(path)
      var d: Double = 0.0
      val random = new Random()
      for (i <- 0 until totalLines) {
        val eachNum = random.nextInt(randomNumber)
        var flag: Boolean = true
        if (eachNum >= randomNumber * rate) {
          flag = false
        }
        write.println(flag)
        d = d + 1
      }

      write.close()
    } catch {
      case _: Exception => assert(false)
    }
    return true
  }

  def deleteFile(path: String): Boolean = {
    try {
      val file = new File(path)
      file.delete()
    } catch {
      case _: Exception => assert(false)
    }
    return true
  }
} 
Example 131
Source File: StoredAsCarbondataSuite.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.carbondata.sql.commands

import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterEach

import org.apache.carbondata.core.constants.CarbonCommonConstants

class StoredAsCarbondataSuite extends QueryTest with BeforeAndAfterEach {
  override def beforeEach(): Unit = {
    sql("DROP TABLE IF EXISTS carbon_table")
    sql("DROP TABLE IF EXISTS tableSize3")
  }

  override def afterEach(): Unit = {
    sql("DROP TABLE IF EXISTS carbon_table")
    sql("DROP TABLE IF EXISTS tableSize3")
  }

  test("CARBONDATA-2262: Support the syntax of 'STORED AS CARBONDATA', upper case") {
    sql("CREATE TABLE carbon_table(key INT, value STRING) STORED AS CARBONDATA")
    sql("INSERT INTO carbon_table VALUES (28,'Bob')")
    checkAnswer(sql("SELECT * FROM carbon_table"), Seq(Row(28, "Bob")))
  }

  test("CARBONDATA-2262: Support the syntax of 'STORED AS carbondata', low case") {
    sql("CREATE TABLE carbon_table(key INT, value STRING) STORED AS carbondata")
    sql("INSERT INTO carbon_table VALUES (28,'Bob')")
    checkAnswer(sql("SELECT * FROM carbon_table"), Seq(Row(28, "Bob")))
  }

  test("CARBONDATA-2262: Support the syntax of 'STORED AS carbondata, get data size and index size after minor compaction") {
    sql("CREATE TABLE tableSize3 (empno INT, workgroupcategory STRING, deptno INT, projectcode INT, attendance INT) STORED AS carbondata")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE tableSize3 OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\"', 'FILEHEADER'='')""")
    sql("ALTER TABLE tableSize3 COMPACT 'minor'")
    checkExistence(sql("DESCRIBE FORMATTED tableSize3"), true, CarbonCommonConstants.TABLE_DATA_SIZE)
    checkExistence(sql("DESCRIBE FORMATTED tableSize3"), true, CarbonCommonConstants.TABLE_INDEX_SIZE)
    val res3 = sql("DESCRIBE FORMATTED tableSize3").collect()
      .filter(row => row.getString(0).contains(CarbonCommonConstants.TABLE_DATA_SIZE) ||
        row.getString(0).contains(CarbonCommonConstants.TABLE_INDEX_SIZE))
    assert(res3.length == 2)
    res3.foreach(row => assert(row.getString(1).trim.substring(0, 3).toDouble > 0))
  }

  test("CARBONDATA-2262: Don't Support the syntax of 'STORED AS 'carbondata''") {
    try {
      sql("CREATE TABLE carbon_table(key INT, value STRING) STORED AS 'carbondata'")
    } catch {
      case e: Exception =>
        assert(e.getMessage.contains("mismatched input"))
    }
  }

  test("CARBONDATA-2262: Don't Support the syntax of 'stored by carbondata'") {
    try {
      sql("CREATE TABLE carbon_table(key INT, value STRING) STORED BY carbondata")
    } catch {
      case e: Exception =>
        assert(e.getMessage.contains("mismatched input"))
    }
  }

  test("CARBONDATA-2262: Don't Support the syntax of 'STORED AS  ', null format") {
    try {
      sql("CREATE TABLE carbon_table(key INT, value STRING) STORED AS  ")
    } catch {
      case e: Exception =>
        assert(e.getMessage.contains("no viable alternative at input") ||
        e.getMessage.contains("mismatched input '<EOF>' expecting "))
    }
  }

  test("CARBONDATA-2262: Don't Support the syntax of 'STORED AS carbon'") {
    try {
      sql("CREATE TABLE carbon_table(key INT, value STRING) STORED AS carbon")
    } catch {
      case e: Exception =>
        assert(e.getMessage.contains("Operation not allowed: STORED AS with file format 'carbon'"))
    }
  }
} 
Example 132
Source File: CouchbaseSnapshotSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase
import akka.actor.{ActorSystem, PoisonPill}
import akka.persistence.couchbase.TestActor.{GetLastRecoveredEvent, SaveSnapshot}
import akka.stream.ActorMaterializer
import akka.testkit.{TestKit, TestProbe, WithLogCapturing}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class CouchbaseSnapshotSpec
    extends TestKit(
      ActorSystem(
        "CouchbaseSnapshotSpec",
        ConfigFactory.parseString("""
            |akka.loggers = ["akka.testkit.SilenceAllTestEventListener"]
          """.stripMargin).withFallback(ConfigFactory.load())
      )
    )
    with WordSpecLike
    with BeforeAndAfterAll
    with Matchers
    with CouchbaseBucketSetup
    with BeforeAndAfterEach
    with WithLogCapturing {
  protected override def afterAll(): Unit = {
    super.afterAll()
    shutdown(system)
  }

  val waitTime = 100.millis
  implicit val materializer = ActorMaterializer()

  "entity" should {
    "recover" in {
      val senderProbe = TestProbe()
      implicit val sender = senderProbe.ref

      {
        val pa1 = system.actorOf(TestActor.props("p1"))
        pa1 ! "p1-evt-1"
        senderProbe.expectMsg("p1-evt-1-done")

        senderProbe.watch(pa1)
        pa1 ! PoisonPill
        senderProbe.expectTerminated(pa1)
      }
      {
        val pa1 = system.actorOf(TestActor.props("p1"))

        pa1 ! GetLastRecoveredEvent
        senderProbe.expectMsg("p1-evt-1")
      }
    }
    "recover after snapshot" in {
      val senderProbe = TestProbe()
      implicit val sender = senderProbe.ref

      {
        val pa1 = system.actorOf(TestActor.props("p2"))
        pa1 ! "p2-evt-1"
        senderProbe.expectMsg("p2-evt-1-done")

        pa1 ! SaveSnapshot
        senderProbe.expectMsgType[Long]

        senderProbe.watch(pa1)
        pa1 ! PoisonPill
        senderProbe.expectTerminated(pa1)
      }
      {
        val pa1 = system.actorOf(TestActor.props("p2"))

        pa1 ! GetLastRecoveredEvent
        senderProbe.expectMsg("p2-evt-1")
      }
    }
  }
} 
Example 133
Source File: BaseAlgorithmTest.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.utils

import java.util.UUID

import com.adidas.analytics.util.{DFSWrapper, LoadMode}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.types.StructType
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}
import org.slf4j.{Logger, LoggerFactory}

import scala.io.Source

trait BaseAlgorithmTest extends Suite with BeforeAndAfterAll with BeforeAndAfterEach with HDFSSupport with SparkSupport {

  override val logger: Logger = LoggerFactory.getLogger(getClass)
  override val testAppId: String = UUID.randomUUID().toString
  override val localTestDir: String = "target"
  override val sparkHadoopConf: Option[Configuration] = Some(fs.getConf)

  val hdfsRootTestPath: Path = new Path("hdfs:///tmp/tests")
  val dfs: DFSWrapper = DFSWrapper(spark.sparkContext.hadoopConfiguration)

  override def afterAll(): Unit = {
    spark.stop()
    cluster.shutdown(true)
  }

  override def beforeEach(): Unit = {
    fs.delete(hdfsRootTestPath, true)
    fs.mkdirs(hdfsRootTestPath)
  }

  override def afterEach(): Unit = {
    spark.sqlContext.clearCache()
    spark.sparkContext.getPersistentRDDs.foreach {
      case (_, rdd) => rdd.unpersist(true)
    }
  }

  def resolveResource(fileName: String, withProtocol: Boolean = false): String = {
    val resource = s"${getClass.getSimpleName}/$fileName"
    logger.info(s"Resolving resource $resource")
    val location = getClass.getClassLoader.getResource(resource).getPath
    if (withProtocol) {
      s"file://$location"
    } else {
      location
    }
  }

  def getResourceAsText(fileName: String): String = {
    val resource = s"${getClass.getSimpleName}/$fileName"
    logger.info(s"Reading resource $resource")
    val stream = getClass.getClassLoader.getResourceAsStream(resource)
    Source.fromInputStream(stream).mkString
  }

  def copyResourceFileToHdfs(resource: String, targetPath: Path): Unit = {
    val localResourceRoot = resolveResource("", withProtocol = true)
    val sourcePath = new Path(localResourceRoot, resource)
    logger.info(s"Copying local resource to HDFS $sourcePath -> $targetPath")
    fs.copyFromLocalFile(sourcePath, targetPath)
  }

  
  def createAndLoadParquetTable(database: String, tableName: String, partitionColumns: Option[Seq[String]] = None, schema: StructType, filePath: String, reader: FileReader): Table = {
    val table = createParquetTable(database, tableName, partitionColumns, schema)
    val inputTableDataURI = resolveResource(filePath, withProtocol = true)
    table.write(Seq(inputTableDataURI), reader, LoadMode.OverwritePartitions)
    table
  }
} 
Example 134
Source File: TestHttpServerTest.scala    From fintrospect   with Apache License 2.0 5 votes vote down vote up
package io.fintrospect.testing

import com.twitter.finagle.http.Method.Get
import com.twitter.finagle.http.Status.{Accepted, Conflict}
import com.twitter.finagle.http.{Request, Response, Status}
import com.twitter.finagle.{Http, Service}
import com.twitter.util.{Await, Future}
import io.fintrospect.RouteSpec
import org.scalatest.{BeforeAndAfterEach, FunSpec, Matchers}


class TestHttpServerTest extends FunSpec with Matchers with BeforeAndAfterEach {
  val originalStatus = Conflict

  private val server = new TestHttpServer(9888, RouteSpec().at(Get) bindTo Service.mk { r: Request => Future(Response(originalStatus)) })

  override def beforeEach() = {
    Await.result(server.start())
  }

  override def afterEach() = {
    Await.result(server.stop())
  }

  it("will serve routes that are passed to it") {
    statusShouldBe(originalStatus)
  }

  it("can override status") {
    server.respondWith(Accepted)
    statusShouldBe(Accepted)
  }

  private def statusShouldBe(expected: Status): Unit = {
    Await.result(Http.newService("localhost:9888", "")(Request())).status shouldBe expected
  }
} 
Example 135
Source File: WiremockTestServer.scala    From http-verbs   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.examples.utils

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock
import org.scalatest.BeforeAndAfterEach
import org.scalatest.wordspec.AnyWordSpecLike


trait WiremockTestServer extends AnyWordSpecLike with BeforeAndAfterEach {

  val wireMockServer = new WireMockServer(20001)

  override protected def beforeEach(): Unit = {
    wireMockServer.start()
    WireMock.configureFor("localhost", 20001)
  }

  override protected def afterEach(): Unit = {
    wireMockServer.stop()
  }
} 
Example 136
Source File: CommunicationLogging.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis

import org.scalatest.{BeforeAndAfterEach, Suite}


trait CommunicationLogging extends BeforeAndAfterEach { this: Suite =>
  protected val listener = new TestDebugListener

  protected def assertCommunication(comm: String): Unit = {
    assert(listener.result().trim == comm.trim)
  }

  override protected def beforeEach() = {
    super.beforeEach()
    listener.clear()
  }
} 
Example 137
Source File: TestRenaming.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.lir

import at.forsyte.apalache.tla.lir.transformations.impl.TrackerWithListeners
import at.forsyte.apalache.tla.lir.transformations.standard.Renaming
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}


@RunWith(classOf[JUnitRunner])
class TestRenaming extends FunSuite with BeforeAndAfterEach with TestingPredefs {
  import at.forsyte.apalache.tla.lir.Builder._

  private var renaming = new Renaming(TrackerWithListeners())

  override protected def beforeEach(): Unit = {
    renaming = new Renaming(TrackerWithListeners())
  }

  test("test renaming exists/forall") {
    val original =
        and(
          exists(n_x, n_S, gt(n_x, int(1))),
          forall(n_x, n_T, lt(n_x, int(42))))
    ///
    val expected =
      and(
        exists(name("x_1"), n_S, gt(name("x_1"), int(1))),
        forall(name("x_2"), n_T, lt(name("x_2"), int(42))))
    val renamed = renaming.renameBindingsUnique(original)
    assert(expected == renamed)
  }

  test("test renaming filter") {
    val original =
        cup(
          filter(name("x"), name("S"), eql(name("x"), int(1))),
          filter(name("x"), name("S"), eql(name("x"), int(2)))
        )
    val expected =
      cup(
        filter(name("x_1"), name("S"), eql(name("x_1"), int(1))),
        filter(name("x_2"), name("S"), eql(name("x_2"), int(2))))
    val renamed = renaming.renameBindingsUnique(original)
    assert(expected == renamed)
  }

  test( "Test renaming LET-IN" ) {
    // LET p(t) == \A x \in S . R(t,x) IN \E x \in S . p(x)
    val original =
      letIn(
        exists( n_x, n_S, appOp( name( "p" ), n_x ) ),
        declOp( "p", forall( n_x, n_S, appOp( name( "R" ), name( "t" ), n_x ) ), "t" )
      )

    val expected =
      letIn(
        exists( name( "x_2" ), n_S, appOp( name( "p_1" ), name( "x_2" ) ) ),
        declOp( "p_1", forall( name( "x_1" ), n_S, appOp( name( "R" ), name( "t_1" ), name( "x_1" ) ) ), "t_1" )
      )

    val actual = renaming( original )

    assert(expected == actual)
  }

  test( "Test renaming multiple LET-IN" ) {
    // LET X == TRUE IN X /\ LET X == FALSE IN X
    val original =
      and(
        letIn(
          appOp( name( "X" ) ),
          declOp( "X", trueEx )
        ),
        letIn(
          appOp( name( "X" ) ),
          declOp( "X", falseEx )
        )
      )

    val expected =
      and(
      letIn(
        appOp( name( "X_1" ) ),
        declOp( "X_1", trueEx )
      ),
      letIn(
        appOp( name( "X_2" ) ),
        declOp( "X_2", falseEx )
      )
    )

    val actual = renaming( original )

    assert(expected == actual)
  }

} 
Example 138
Source File: RewriterBase.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import java.io.{PrintWriter, StringWriter}

import at.forsyte.apalache.tla.bmcmt.types.eager.TrivialTypeFinder
import at.forsyte.apalache.tla.lir.convenience.tla
import org.scalatest.{BeforeAndAfterEach, FunSuite}

class RewriterBase extends FunSuite with BeforeAndAfterEach {
  protected var solverContext: SolverContext = new PreproSolverContext(new Z3SolverContext())
  protected var arena: Arena = Arena.create(solverContext)

  override def beforeEach() {
    solverContext = new PreproSolverContext(new Z3SolverContext(debug = true))
    arena = Arena.create(solverContext)
  }

  override def afterEach() {
    solverContext.dispose()
  }

  protected def create(): SymbStateRewriterAuto = {
    new SymbStateRewriterAuto(solverContext)
  }

  protected def createWithoutCache(): SymbStateRewriter = {
    new SymbStateRewriterImpl(solverContext, new TrivialTypeFinder())
  }

  protected def assertUnsatOrExplain(rewriter: SymbStateRewriter, state: SymbState): Unit = {
    assertOrExplain("UNSAT", rewriter, state, !solverContext.sat())
  }

  protected def assumeTlaEx(rewriter: SymbStateRewriter, state: SymbState): SymbState = {
    val nextState = rewriter.rewriteUntilDone(state.setTheory(BoolTheory()))
    solverContext.assertGroundExpr(nextState.ex)
    assert(solverContext.sat())
    nextState
  }

  protected def assertTlaExAndRestore(rewriter: SymbStateRewriter, state: SymbState): Unit = {
    rewriter.push()
    val nextState = rewriter.rewriteUntilDone(state.setTheory(BoolTheory()))
    assert(solverContext.sat())
    rewriter.push()
    solverContext.assertGroundExpr(nextState.ex)
    assert(solverContext.sat())
    rewriter.pop()
    rewriter.push()
    solverContext.assertGroundExpr(tla.not(nextState.ex))
    assertUnsatOrExplain(rewriter, nextState)
    rewriter.pop()
    rewriter.pop()
  }

  private def assertOrExplain(msg: String, rewriter: SymbStateRewriter,
                              state: SymbState, outcome: Boolean): Unit = {
    if (!outcome) {
      val writer = new StringWriter()
      new SymbStateDecoder(solverContext, rewriter).dumpArena(state, new PrintWriter(writer))
      solverContext.log(writer.getBuffer.toString)
      solverContext.push() // push and pop flush the log output
      solverContext.pop()
      fail("Expected %s, check log.smt for explanation".format(msg))
    }

  }
} 
Example 139
Source File: TestConstAndDefRewriter.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.pp

import at.forsyte.apalache.tla.imp.SanyImporter
import at.forsyte.apalache.tla.imp.src.SourceStore
import at.forsyte.apalache.tla.lir.{SimpleFormalParam, TlaOperDecl}
import at.forsyte.apalache.tla.lir.convenience._
import at.forsyte.apalache.tla.lir.transformations.impl.IdleTracker
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}

import scala.io.Source

@RunWith(classOf[JUnitRunner])
class TestConstAndDefRewriter extends FunSuite with BeforeAndAfterEach {
  test("override a constant") {
    val text =
      """---- MODULE const ----
        |CONSTANT n
        |OVERRIDE_n == 10
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    val rewritten = new ConstAndDefRewriter(new IdleTracker())(root)
    assert(rewritten.constDeclarations.isEmpty) // no constants anymore
    assert(rewritten.operDeclarations.size == 2)
    val expected_n = TlaOperDecl("n", List(), tla.int(10))
    assert(expected_n == rewritten.operDeclarations.head)
    val expected_A = TlaOperDecl("A", List(), tla.enumSet(tla.appOp(tla.name("n"))))
    assert(expected_A == rewritten.operDeclarations(1))
  }

  // In TLA+, constants may be operators with multiple arguments.
  // We do not support that yet.
  test("override a constant with a unary operator") {
    val text =
      """---- MODULE const ----
        |CONSTANT n
        |OVERRIDE_n(x) == x
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }

  test("overriding a variable with an operator => error") {
    val text =
      """---- MODULE const ----
        |VARIABLE n, m
        |OVERRIDE_n == m
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }

  test("override an operator") {
    val text =
      """---- MODULE op ----
        |BoolMin(S) == CHOOSE x \in S: \A y \in S: x => y
        |OVERRIDE_BoolMin(S) == CHOOSE x \in S: TRUE
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("op", Source.fromString(text))
    val root = modules(rootName)
    val rewritten = new ConstAndDefRewriter(new IdleTracker())(root)
    assert(rewritten.constDeclarations.isEmpty)
    assert(rewritten.operDeclarations.size == 1)
    val expected = TlaOperDecl("BoolMin", List(SimpleFormalParam("S")),
      tla.choose(tla.name("x"), tla.name("S"), tla.bool(true)))
    assert(expected == rewritten.operDeclarations.head)
  }

  test("override a unary operator with a binary operator") {
    val text =
      """---- MODULE op ----
        |BoolMin(S) == CHOOSE x \in S: \A y \in S: x => y
        |OVERRIDE_BoolMin(S, T) == CHOOSE x \in S: x \in T
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("op", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }
} 
Example 140
Source File: TestUniqueNameGenerator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.pp

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}

@RunWith(classOf[JUnitRunner])
class TestUniqueNameGenerator extends FunSuite with BeforeAndAfterEach {
  test("first three") {
    val gen = new UniqueNameGenerator
    assert("t_1" == gen.newName())
    assert("t_2" == gen.newName())
    assert("t_3" == gen.newName())
  }

  test("after 10000") {
    val gen = new UniqueNameGenerator
    for (i <- 1.to(10000)) {
      gen.newName()
    }
    assert("t_7pt" == gen.newName())
  }
} 
Example 141
Source File: PlanningTest.scala    From spark-druid-olap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.sources.druid.test

import java.util.TimeZone

import com.github.nscala_time.time.Imports._
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.catalyst.expressions.{Expression, PredicateHelper}
import org.apache.spark.sql.catalyst.plans.logical.Filter
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.hive.test.sparklinedata.TestHive
import org.apache.spark.sql.sources.druid.DruidPlanner
import org.scalatest.BeforeAndAfterEach
import org.sparklinedata.druid._
import org.sparklinedata.druid.client.test.BaseTest
import org.sparklinedata.druid.metadata.DruidRelationInfo

trait PlanningTestHelper extends PredicateHelper {
  System.setProperty("user.timezone", "UTC")
  TimeZone.setDefault(TimeZone.getTimeZone("UTC"))

  override def splitConjunctivePredicates(condition: Expression): Seq[Expression] = {
    super.splitConjunctivePredicates(condition)
  }
}

abstract class PlanningTest extends BaseTest with BeforeAndAfterEach with PlanningTestHelper {

  val dPlanner = new DruidPlanner(TestHive)
  var tab: DataFrame = _
  var drInfo: DruidRelationInfo = _
  var dqb: DruidQueryBuilder = _
  var iCE: IntervalConditionExtractor = _
  var iCE2: SparkIntervalConditionExtractor = _

  override def beforeAll() = {
    super.beforeAll()
    tab = TestHive.table("orderLineItemPartSupplier")
    drInfo = tab.queryExecution.optimizedPlan.
      asInstanceOf[LogicalRelation].relation.asInstanceOf[DruidRelation].info
  }

  override protected def beforeEach(): Unit = {
    dqb = DruidQueryBuilder(drInfo)
    iCE = new IntervalConditionExtractor(dqb)
    iCE2 = new SparkIntervalConditionExtractor(dqb)
  }

  def validateFilter(filterStr: String,
                     pushedToDruid: Boolean = true,
                     filSpec: Option[FilterSpec] = None,
                     intervals: List[Interval] = List()
                    ): Unit = {
    val q = tab.where(filterStr)
    val filter = q.queryExecution.optimizedPlan.asInstanceOf[Filter]
    val dqbs = dPlanner.translateProjectFilter(
      Some(dqb),
      Seq(),
      splitConjunctivePredicates(filter.condition),
      true
    )
    if (pushedToDruid) {
      assert(dqbs.size == 1)
      val odqb = dqbs(0)
      assert(odqb.filterSpec == filSpec)
      assert(odqb.queryIntervals.intervals == intervals)
    }
  }

} 
Example 142
Source File: PubSubRouterAltSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.pubsub.inmemory

import akka.actor.Props
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import cool.graph.akkautil.SingleThreadedActorSystem
import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe}
import cool.graph.messagebus.pubsub.PubSubRouterAlt
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class PubSubRouterAltSpec
    extends TestKit(SingleThreadedActorSystem("pubsub-router-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  override def afterAll = shutdown(verifySystemShutdown = true)

  "The PubSubRouter implementation" should {
    "subscribe subscribers correctly and route messages" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Publish(topic, "test")
      probe.expectMsg("test")
      probe.expectNoMsg(max = 1.second)

      routerActor ! Publish("testTopic2", "test2")
      probe.expectNoMsg(max = 1.second)
    }

    "unsubscribe subscribers correctly" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      routerActor ! Unsubscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 0

      routerActor ! Publish(topic, "test")
      probe.expectNoMsg(max = 1.second)
    }

    "handle actor terminations" in {
      val routerActor = TestActorRef(Props[PubSubRouterAlt])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouterAlt]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.router.routees.length shouldEqual 1

      system.stop(probe.ref)
      Thread.sleep(50)
      router.router.routees.length shouldEqual 0
    }
  }
} 
Example 143
Source File: PubSubRouterSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.pubsub.inmemory

import akka.actor.Props
import akka.testkit.{TestActorRef, TestKit, TestProbe}
import cool.graph.akkautil.SingleThreadedActorSystem
import cool.graph.messagebus.pubsub.PubSubProtocol.{Publish, Subscribe, Unsubscribe}
import cool.graph.messagebus.pubsub.PubSubRouter
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.duration._

class PubSubRouterSpec
    extends TestKit(SingleThreadedActorSystem("pubsub-router-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {
  override def afterAll = shutdown(verifySystemShutdown = true)

  "The PubSubRouter implementation" should {
    "subscribe subscribers correctly and route messages" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      routerActor ! Publish(topic, "test")
      probe.expectMsg("test")
      probe.expectNoMsg(max = 1.second)

      routerActor ! Publish("testTopic2", "test2")
      probe.expectNoMsg(max = 1.second)
    }

    "unsubscribe subscribers correctly" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      routerActor ! Unsubscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 0

      routerActor ! Publish(topic, "test")
      probe.expectNoMsg(max = 1.second)
    }

    "handle actor terminations" in {
      val routerActor = TestActorRef(Props[PubSubRouter])
      val router      = routerActor.underlyingActor.asInstanceOf[PubSubRouter]
      val probe       = TestProbe()
      val topic       = "testTopic"

      routerActor ! Subscribe(topic, probe.ref)
      router.subscribers.values.map(_.size).sum shouldEqual 1

      system.stop(probe.ref)
      Thread.sleep(50)
      router.subscribers.values.map(_.size).sum shouldEqual 0
    }
  }
} 
Example 144
Source File: InMemoryAkkaQueueSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.queue.inmemory

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.{TestKit, TestProbe}
import cool.graph.messagebus.QueuePublisher
import cool.graph.messagebus.queue.{BackoffStrategy, ConstantBackoff}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.Future
import scala.concurrent.duration._

class InMemoryAkkaQueueSpec
    extends TestKit(ActorSystem("queueing-spec"))
    with WordSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with ScalaFutures {

  implicit val materializer = ActorMaterializer()

  def withInMemoryQueue[T](backoff: BackoffStrategy = ConstantBackoff(100.millis))(testFn: (InMemoryAkkaQueue[T], TestProbe) => Unit) = {
    val inMemoryQueue = InMemoryAkkaQueue[T](backoff)
    val testProbe     = TestProbe()

    try {
      testFn(inMemoryQueue, testProbe)
    } finally {
      inMemoryQueue.shutdown
    }
  }

  override def afterAll = shutdown(verifySystemShutdown = true)

  "Queue" should {
    "call the onMsg function if a valid message arrives" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) })
        queue.publish("test")
        probe.expectMsg("test")
      }
    }

    "increment the message tries correctly on failure" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        queue.withConsumer((str: String) => { probe.ref ! str; Future.failed(new Exception("Kabooom")) })
        queue.publish("test")

        // 5 tries, 5 times the same message (can't check for the tries explicitly here)
        probe.expectMsgAllOf(2.seconds, Vector.fill(5) { "test" }: _*)
        probe.expectNoMsg(1.second)
      }
    }

    "map a type correctly with a MappingQueueConsumer" in {
      withInMemoryQueue[String]() { (queue, probe) =>
        val mapped = queue.map[Int]((str: String) => str.toInt)

        mapped.withConsumer((int: Int) => { probe.ref ! int; Future.successful(()) })
        queue.publish("123")

        probe.expectMsg(123)
      }
    }

    "map a type correctly with a MappingQueuePublisher" in {
      withInMemoryQueue[String]() { (queue: InMemoryAkkaQueue[String], probe) =>
        val mapped: QueuePublisher[Int] = queue.map[Int]((int: Int) => int.toString)

        queue.withConsumer((str: String) => { probe.ref ! str; Future.successful(()) })
        mapped.publish(123)

        probe.expectMsg("123")
      }
    }
  }
} 
Example 145
Source File: RabbitAkkaPubSubTestKitSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.messagebus.testkits

import cool.graph.bugsnag.BugSnagger
import cool.graph.messagebus.Conversions
import cool.graph.messagebus.pubsub.{Message, Only}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Matchers, WordSpecLike}
import play.api.libs.json.Json

class RabbitAkkaPubSubTestKitSpec extends WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with ScalaFutures {

  case class TestMessage(id: String, testOpt: Option[Int], testSeq: Seq[String])

  implicit val bugSnagger: BugSnagger = null
  implicit val testMessageFormat      = Json.format[TestMessage]
  implicit val testMarshaller         = Conversions.Marshallers.FromJsonBackedType[TestMessage]()
  implicit val testUnmarshaller       = Conversions.Unmarshallers.ToJsonBackedType[TestMessage]()

  val amqpUri = sys.env.getOrElse("RABBITMQ_URI", sys.error("RABBITMQ_URI required for testing"))
  val testRK  = Only("SomeRoutingKey")

  var testKit: RabbitAkkaPubSubTestKit[TestMessage] = _

  override def beforeEach = {
    testKit = RabbitAkkaPubSubTestKit[TestMessage](amqpUri, "test")
    testKit.start.futureValue
  }

  override def afterEach(): Unit = testKit.stop.futureValue

  "The rabbit pubsub testing kit" should {

    
    "should expect a message correctly" in {
      val testMsg = TestMessage("someId1", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)
      testKit.expectMsg(Message[TestMessage](testRK.topic, testMsg))
    }

    "should blow up it expects a message and none arrives" in {
      val testMsg = TestMessage("someId2", None, Seq("1", "2"))

      an[AssertionError] should be thrownBy {
        testKit.expectMsg(Message[TestMessage](testRK.topic, testMsg))
      }
    }

    "should expect no message correctly" in {
      testKit.expectNoMsg()
    }

    "should blow up if no message was expected but one arrives" in {
      val testMsg = TestMessage("someId3", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)

      an[AssertionError] should be thrownBy {
        testKit.expectNoMsg()
      }
    }

    "should expect a message count correctly" in {
      val testMsg  = TestMessage("someId4", None, Seq("1", "2"))
      val testMsg2 = TestMessage("someId5", Some(123), Seq("2", "1"))

      testKit.publish(testRK, testMsg)
      testKit.publish(testRK, testMsg2)

      testKit.expectMsgCount(2)
    }

    "should blow up if it expects a message count and less arrive" in {
      val testMsg = TestMessage("someId6", None, Seq("1", "2"))

      testKit.publish(testRK, testMsg)

      an[AssertionError] should be thrownBy {
        testKit.expectMsgCount(2)
      }
    }

    "should blow up if it expects a message count and more arrive" in {
      val testMsg  = TestMessage("someId7", None, Seq("1", "2"))
      val testMsg2 = TestMessage("someId8", Some(123), Seq("2", "1"))

      testKit.publish(testRK, testMsg)
      testKit.publish(testRK, testMsg2)

      an[AssertionError] should be thrownBy {
        testKit.expectMsgCount(1)
      }
    }
  }
} 
Example 146
Source File: TheFlashTweetsConsumerSpec.scala    From KafkaPlayground   with GNU General Public License v3.0 5 votes vote down vote up
package com.github.pedrovgs.kafkaplayground.flash

import com.github.pedrovgs.kafkaplayground.flash.elasticsearch.ElasticClient
import com.github.pedrovgs.kafkaplayground.utils.EmbeddedKafkaServer
import org.mockito.Mockito.verify
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}

object TheFlashTweetsConsumerSpec {
  private val anyTopic        = "topic"
  private val anyContent      = "content"
  private val anyOtherContent = "anyOtherContent"
}

class TheFlashTweetsConsumerSpec
    extends FlatSpec
    with Matchers
    with EmbeddedKafkaServer
    with ScalaFutures
    with MockitoSugar
    with BeforeAndAfterEach {

  import TheFlashTweetsConsumerSpec._

  private var elasticClient: ElasticClient = _

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    elasticClient = mock[ElasticClient]
  }

  "TheFlashTweetsConsumer" should "create a new document for the configured index using the messages polled from the kafka cluster" in {
    produceMessage(anyTopic, anyContent)

    givenAElasticConsumer().poll()

    val expectedId = s"topic_0_0"
    verify(elasticClient).insertOrUpdate(expectedId, anyContent)
  }

  it should "send more than a message to elasticsearch" in {
    produceMessage(anyTopic, anyContent)
    produceMessage(anyTopic, anyOtherContent)

    givenAElasticConsumer().poll()

    verify(elasticClient).insertOrUpdate("topic_0_0", anyContent)
    verify(elasticClient).insertOrUpdate("topic_0_1", anyOtherContent)
  }

  private def givenAElasticConsumer() =
    new TheFlashTweetsConsumer(kafkaServerAddress(), anyTopic, elasticClient)

} 
Example 147
Source File: WireMockFixture.scala    From kafka-serialization   with Apache License 2.0 5 votes vote down vote up
package com.ovoenergy.kafka.serialization.testkit

import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock
import com.github.tomakehurst.wiremock.core.WireMockConfiguration
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait WireMockFixture extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  private lazy val wireMockServer: WireMockServer = new WireMockServer(WireMockConfiguration.options().dynamicPort())

  val wireMockHost: String = "localhost"
  def wireMockPort: Int = wireMockServer.port()
  def wireMockEndpoint: String = s"http://$wireMockHost:$wireMockPort"

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    wireMockServer.start()
    WireMock.configureFor(wireMockPort)
  }

  override protected def afterAll(): Unit = {
    wireMockServer.shutdown()
    super.afterAll()
  }

  override protected def afterEach(): Unit = {
    resetWireMock()
    super.afterEach()
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    resetWireMock()
  }

  def resetWireMock(): Unit = {
    wireMockServer.resetMappings()
    wireMockServer.resetRequests()
    wireMockServer.resetScenarios()
  }
} 
Example 148
Source File: CachedDataSourceTest.scala    From morpheus   with Apache License 2.0 5 votes vote down vote up
package org.opencypher.morpheus.api.io

import org.apache.spark.storage.StorageLevel
import org.opencypher.morpheus.api.io.util.CachedDataSource._
import org.opencypher.morpheus.impl.MorpheusConverters._
import org.opencypher.morpheus.impl.table.SparkTable.DataFrameTable
import org.opencypher.morpheus.testing.MorpheusTestSuite
import org.opencypher.morpheus.testing.fixture.GraphConstructionFixture
import org.opencypher.okapi.api.graph.{Namespace, PropertyGraph}
import org.opencypher.okapi.relational.api.graph.RelationalCypherGraph
import org.opencypher.okapi.relational.impl.graph.ScanGraph
import org.scalatest.BeforeAndAfterEach

class CachedDataSourceTest extends MorpheusTestSuite with GraphConstructionFixture with BeforeAndAfterEach {

  override val testNamespace: Namespace = morpheus.catalog.sessionNamespace
  private val testDataSource = morpheus.catalog.source(testNamespace)

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    testDataSource.store(testGraphName, initGraph(s"CREATE (:A)"))
  }

  override protected def afterEach(): Unit = {
    if (testDataSource.hasGraph(testGraphName)) {
      unpersist(testDataSource.graph(testGraphName).asMorpheus)
      testDataSource.delete(testGraphName)
    }
    super.afterEach()
  }

  it("should cache the graph on first read") {
    val g0 = testDataSource.graph(testGraphName)
    assert(g0, StorageLevel.NONE)

    val cachedDataSource = testDataSource.withCaching
    val g1 = cachedDataSource.graph(testGraphName)
    assert(g1, StorageLevel.MEMORY_AND_DISK)

    assert(g0, StorageLevel.MEMORY_AND_DISK) // side effect for session ds

    cachedDataSource.hasGraph(testGraphName) should equal(true)
    testDataSource.hasGraph(testGraphName) should equal(true)
  }

  it("should cache the graph on first read with specific storage level") {
    val cachedDs = testDataSource.withCaching(StorageLevel.MEMORY_ONLY)
    val g = cachedDs.graph(testGraphName)
    assert(g, StorageLevel.MEMORY_ONLY)
  }

  it("should delete a graph and remove from cache") {
    val cachedDs = testDataSource.withCaching
    val g = cachedDs.graph(testGraphName)
    assert(g, StorageLevel.MEMORY_AND_DISK)

    cachedDs.delete(testGraphName)
    assert(g, StorageLevel.NONE)

    cachedDs.hasGraph(testGraphName) should equal(false)
    testDataSource.hasGraph(testGraphName) should equal(false)
  }

  private def assert(g: PropertyGraph, storageLevel: StorageLevel): Unit = {
    g.asInstanceOf[ScanGraph[DataFrameTable]].scans
      .map(_.table.df)
      .foreach(_.storageLevel should equal(storageLevel))
  }

  private def unpersist(graph: RelationalCypherGraph[DataFrameTable]): Unit = {
    graph.tables.foreach(_.df.unpersist)
  }
} 
Example 149
Source File: HiveContextCompatibilitySuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {

  private var sc: SparkContext = null
  private var hc: HiveContext = null

  override def beforeAll(): Unit = {
    super.beforeAll()
    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
    HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
      sc.hadoopConfiguration.set(k, v)
    }
    hc = new HiveContext(sc)
  }

  override def afterEach(): Unit = {
    try {
      hc.sharedState.cacheManager.clearCache()
      hc.sessionState.catalog.reset()
    } finally {
      super.afterEach()
    }
  }

  override def afterAll(): Unit = {
    try {
      sc = null
      hc = null
    } finally {
      super.afterAll()
    }
  }

  test("basic operations") {
    val _hc = hc
    import _hc.implicits._
    val df1 = (1 to 20).map { i => (i, i) }.toDF("a", "x")
    val df2 = (1 to 100).map { i => (i, i % 10, i % 2 == 0) }.toDF("a", "b", "c")
      .select($"a", $"b")
      .filter($"a" > 10 && $"b" > 6 && $"c")
    val df3 = df1.join(df2, "a")
    val res = df3.collect()
    val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
    assert(res.toSeq == expected.toSeq)
    df3.createOrReplaceTempView("mai_table")
    val df4 = hc.table("mai_table")
    val res2 = df4.collect()
    assert(res2.toSeq == expected.toSeq)
  }

  test("basic DDLs") {
    val _hc = hc
    import _hc.implicits._
    val databases = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases.toSeq == Seq("default"))
    hc.sql("CREATE DATABASE mee_db")
    hc.sql("USE mee_db")
    val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases2.toSet == Set("default", "mee_db"))
    val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
    df.createOrReplaceTempView("mee_table")
    hc.sql("CREATE TABLE moo_table (name string, age int)")
    hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
    assert(
      hc.sql("SELECT * FROM moo_table order by name").collect().toSeq ==
      df.collect().toSeq.sortBy(_.getString(0)))
    val tables = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables.toSet == Set("moo_table", "mee_table"))
    hc.sql("DROP TABLE moo_table")
    hc.sql("DROP TABLE mee_table")
    val tables2 = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables2.isEmpty)
    hc.sql("USE default")
    hc.sql("DROP DATABASE mee_db CASCADE")
    val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases3.toSeq == Seq("default"))
  }

} 
Example 150
Source File: SharedSQLContext.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}



  protected override def afterAll(): Unit = {
    try {
      if (_spark != null) {
        _spark.stop()
        _spark = null
      }
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 151
Source File: SharedSparkContext.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.Suite


trait SharedSparkContext extends BeforeAndAfterAll with BeforeAndAfterEach { self: Suite =>

  @transient private var _sc: SparkContext = _

  def sc: SparkContext = _sc

  var conf = new SparkConf(false)

  override def beforeAll() {
    super.beforeAll()
    _sc = new SparkContext(
      "local[4]", "test", conf.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName))
  }

  override def afterAll() {
    try {
      LocalSparkContext.stop(_sc)
      _sc = null
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 152
Source File: ResetSystemProperties.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 153
Source File: DiskBlockManagerSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 154
Source File: SpecBase.scala    From kafka-lag-exporter   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.kafkalagexporter.integration

import akka.actor.typed.ActorSystem
import akka.kafka.testkit.scaladsl.{EmbeddedKafkaLike, ScalatestKafkaSpec}
import com.lightbend.kafkalagexporter.MainApp
import com.lightbend.kafkalagexporter.KafkaClusterManager
import com.typesafe.config.{Config, ConfigFactory}
import net.manub.embeddedkafka.EmbeddedKafkaConfig
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpecLike}

import scala.concurrent.Await
import scala.concurrent.duration._

abstract class SpecBase(kafkaPort: Int, val exporterPort: Int)
  extends ScalatestKafkaSpec(kafkaPort)
    with WordSpecLike
    with BeforeAndAfterEach
    with EmbeddedKafkaLike
    with Matchers
    with ScalaFutures
    with Eventually
    with PrometheusUtils
    with LagSim {

  override def createKafkaConfig: EmbeddedKafkaConfig =
    EmbeddedKafkaConfig(kafkaPort,
      zooKeeperPort,
      Map(
        "offsets.topic.replication.factor" -> "1"
      ))

  var kafkaLagExporter: ActorSystem[KafkaClusterManager.Message] = _

  val clusterName = "default"

  val config: Config = ConfigFactory.parseString(s"""
                                            |kafka-lag-exporter {
                                            |  port: $exporterPort
                                            |  clusters = [
                                            |    {
                                            |      name: "$clusterName"
                                            |      bootstrap-brokers: "localhost:$kafkaPort"
                                            |    }
                                            |  ]
                                            |  poll-interval = 5 seconds
                                            |  lookup-table-size = 20
                                            |}""".stripMargin).withFallback(ConfigFactory.load())

  override def beforeEach(): Unit = {
    kafkaLagExporter = MainApp.start(config)
  }

  override def afterEach(): Unit = {
    kafkaLagExporter ! KafkaClusterManager.Stop
    Await.result(kafkaLagExporter.whenTerminated, 10 seconds)
  }
} 
Example 155
Source File: ResetSystemProperties.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 156
Source File: DiskBlockManagerSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 157
Source File: ArrayEncodingBaseSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql.encoding

import java.time.LocalDate
import java.util.Date

import io.getquill.{ MappedEncoding, Spec }
import org.scalatest.{ Assertion, BeforeAndAfterEach }

trait ArrayEncodingBaseSpec extends Spec with BeforeAndAfterEach {

  // Support all sql base types and `Seq` implementers
  case class ArraysTestEntity(
    texts:      List[String],
    decimals:   Seq[BigDecimal],
    bools:      Vector[Boolean],
    bytes:      List[Byte],
    shorts:     IndexedSeq[Short],
    ints:       Seq[Int],
    longs:      Seq[Long],
    floats:     Seq[Float],
    doubles:    Seq[Double],
    timestamps: Seq[Date],
    dates:      Seq[LocalDate]
  )

  val e = ArraysTestEntity(List("test"), Seq(BigDecimal(2.33)), Vector(true, true), List(1),
    IndexedSeq(3), Seq(2), Seq(1, 2, 3), Seq(1f, 2f), Seq(4d, 3d),
    Seq(new Date(System.currentTimeMillis())), Seq(LocalDate.now()))

  // casting types can be dangerous so we need to ensure that everything is ok
  def baseEntityDeepCheck(e1: ArraysTestEntity, e2: ArraysTestEntity): Assertion = {
    e1.texts.head mustBe e2.texts.head
    e1.decimals.head mustBe e2.decimals.head
    e1.bools.head mustBe e2.bools.head
    e1.bytes.head mustBe e2.bytes.head
    e1.shorts.head mustBe e2.shorts.head
    e1.ints.head mustBe e2.ints.head
    e1.longs.head mustBe e2.longs.head
    e1.floats.head mustBe e2.floats.head
    e1.doubles.head mustBe e2.doubles.head
    e1.timestamps.head mustBe e2.timestamps.head
    e1.dates.head mustBe e2.dates.head
  }

  // Support Seq encoding basing on MappedEncoding
  case class StrWrap(str: String)
  implicit val strWrapEncode: MappedEncoding[StrWrap, String] = MappedEncoding(_.str)
  implicit val strWrapDecode: MappedEncoding[String, StrWrap] = MappedEncoding(StrWrap.apply)
  case class WrapEntity(texts: Seq[StrWrap])
  val wrapE = WrapEntity(List("hey", "ho").map(StrWrap.apply))
} 
Example 158
Source File: MavenAddManagedDependenciesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import com.ebay.rtran.maven.util.MavenModelUtil
import com.ebay.rtran.maven.util.MavenModelUtil.SimpleDependency
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}


class MavenAddManagedDependenciesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenAddManagedDependenciesRule" should "be able to add dependencies to dependency management" in {
    val ruleConfig = MavenAddManagedDependenciesRuleConfig(
      Set(
        SimpleDependency("org.slf4j", "slf4j-api", Some("1.7.12")),
        SimpleDependency("com.typesafe.akka", "akka-actor_2.11", Some("2.3.9"))
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenAddManagedDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    val parent = transformed.parents.head
    val dm1 = parent.managedDependencies.values.find(_.getArtifactId == "slf4j-api")
    dm1 should not be None
    dm1.get.getVersion should be ("1.7.12")
    val dm2 = parent.managedDependencies.values.find(_.getArtifactId == "akka-actor_2.11")
    dm2 should not be None
    dm2.get.getVersion should be ("2.4.17")
  }
} 
Example 159
Source File: MavenDependenciesMappingRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import com.ebay.rtran.maven.util.MavenModelUtil
import com.ebay.rtran.maven.util.MavenModelUtil.SimpleDependency
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenDependenciesMappingRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenDependenciesMappingRule" should "be able to alter dependencies according to mapping" in {
    val ruleConfig = MavenDependenciesMappingRuleConfig(
      Set(SimpleDependency("junit", "junit")),
      Set(SimpleDependency("org.slf4j", "slf4j-api"), SimpleDependency("org.slf4j", "slf4j-log4j12"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenDependenciesMappingRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (false)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-api") should be (true)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-log4j12") should be (true)
    }
  }

  "MavenDependenciesMappingRule" should "not alter dependencies that don't exist" in {
    val ruleConfig = MavenDependenciesMappingRuleConfig(
      Set(SimpleDependency("org.slf4j", "slf4j-api")),
      Set(SimpleDependency("org.slf4j", "slf4j-log4j12"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenDependenciesMappingRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-api") should be (false)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-log4j12") should be (false)
    }
  }

  "MavenDependenciesMappingRule" should "alter dependencies matches that match other condition" in {
    val ruleConfig = MavenDependenciesMappingRuleConfig(
      Set(SimpleDependency("junit", "junit", Some("4.9"))),
      Set(SimpleDependency("org.slf4j", "slf4j-api"), SimpleDependency("org.slf4j", "slf4j-log4j12"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenDependenciesMappingRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      if (module.pomModel.getPackaging == "pom") {
        module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (true)
      } else {
        module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (false)
        module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-api") should be (true)
        module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-log4j12") should be (true)
      }
    }
  }

  "MavenDependenciesMappingRule" should "not alter dependencies if other condition doesn't match" in {
    val ruleConfig = MavenDependenciesMappingRuleConfig(
      Set(SimpleDependency("junit", "junit", scope = Some("compile"))),
      Set(SimpleDependency("org.slf4j", "slf4j-api"), SimpleDependency("org.slf4j", "slf4j-log4j12"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenDependenciesMappingRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (true)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-api") should be (false)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-log4j12") should be (false)
    }
  }
} 
Example 160
Source File: MavenRemoveDependenciesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import com.ebay.rtran.maven.util.MavenModelUtil
import com.ebay.rtran.maven.util.MavenModelUtil.SimpleDependency
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenRemoveDependenciesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenRemoveDependencies" should "be able to remove dependencies" in {
    val ruleConfig = MavenRemoveDependenciesRuleConfig(
      Set(SimpleDependency("junit", "junit"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (false)
    }
  }

  "MavenAddDependenciesRule" should "not remove dependencies that don't exist" in {
    val ruleConfig = MavenRemoveDependenciesRuleConfig(
      Set(SimpleDependency("org.slf4j", "slf4j-api"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveDependenciesRule(ruleConfig)
    val originalSizes = model.modules map (_.pomModel.getDependencies.size)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules map (_.pomModel.getDependencies.size) should be (originalSizes)
  }

  "MavenRemoveDependencies" should "remove dependencies matches that match other condition" in {
    val ruleConfig = MavenRemoveDependenciesRuleConfig(
      Set(SimpleDependency("junit", "junit", version = Some("4.9")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      if (module.pomModel.getPackaging == "pom") {
        module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (true)
      } else {
        module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (false)
      }
    }
  }

  "MavenRemoveDependencies" should "not remove dependencies if other condition doesn't match" in {
    val ruleConfig = MavenRemoveDependenciesRuleConfig(
      Set(SimpleDependency("junit", "junit", scope = Some("compile")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (true)
    }
  }
} 
Example 161
Source File: MavenPluginsMappingRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenPluginsMappingRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenPluginsMappingRule" should "be able to alter both plugins and managed plugins" in {
    val ruleConfig = MavenPluginsMappingRuleConfig(
      List(
        PluginMapping(
          SimplePlugin(Some("com.ebay.rtran.old"), "some-maven-plugin"),
          SimplePlugin(Some("com.ebay.rtran.new"), "some-maven-plugin")
        )
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenPluginsMappingRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.old") should be (false)

    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.new") should be (true)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.old") should be (false)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.new") should be (true)
  }

  "MavenPluginsMappingRule" should "not alter plugins or managed plugins that don't exist" in {
    val ruleConfig = MavenPluginsMappingRuleConfig(
      List(
        PluginMapping(
          SimplePlugin(Some("com.ebay.rtran.old"), "non-exist"),
          SimplePlugin(Some("com.ebay.rtran.new"), "non-exist")
        )
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenPluginsMappingRule(ruleConfig)
    val mpSize = model.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins.size
    val pluginSize = model.parents.head
      .pomModel.getBuild.getPlugins.size
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins.size should be (mpSize)

    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.old") should be (true)

    transformed.parents.head
      .pomModel.getBuild.getPlugins.size should be (pluginSize)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getGroupId == "com.ebay.rtran.old") should be (true)
  }
} 
Example 162
Source File: MavenRemoveManagedDependenciesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil.SimpleDependency
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, Matchers, FlatSpecLike}

import scala.collection.JavaConversions._


class MavenRemoveManagedDependenciesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenRemoveManagedDependenciesRule" should "be able to remove managed dependencies" in {
    val ruleConfig = MavenRemoveManagedDependenciesRuleConfig(
      Set(SimpleDependency("org.eclipse.aether", "aether-spi"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveManagedDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getDependencyManagement.getDependencies.exists(_.getArtifactId == "aether-spi") should be (false)
  }

  "MavenRemoveManagedDependenciesRule" should "not remove managed dependencies that don't exist" in {
    val ruleConfig = MavenRemoveManagedDependenciesRuleConfig(
      Set(SimpleDependency("org.slf4j", "slf4j-api"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveManagedDependenciesRule(ruleConfig)
    val originalSize = model.parents.head
      .pomModel.getDependencyManagement.getDependencies.size
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getDependencyManagement.getDependencies.size should be (originalSize)
  }

  "MavenRemoveManagedDependenciesRule" should "remove managed dependencies matches that match other condition" in {
    val ruleConfig = MavenRemoveManagedDependenciesRuleConfig(
      Set(SimpleDependency("org.eclipse.aether", "aether-spi", version = Some("1.0.2.v20150114")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveManagedDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getDependencyManagement.getDependencies.exists(_.getArtifactId == "aether-spi") should be (false)
  }

  "MavenRemoveManagedDependenciesRule" should "not remove managed dependencies if other condition doesn't match" in {
    val ruleConfig = MavenRemoveManagedDependenciesRuleConfig(
      Set(SimpleDependency("org.eclipse.aether", "aether-spi", version = Some("1.0.3.v20150114")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemoveManagedDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getDependencyManagement.getDependencies.exists(_.getArtifactId == "aether-spi") should be (true)
  }
} 
Example 163
Source File: MavenRemoveRepositoriesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}


class MavenRemoveRepositoriesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenRemoveRepositoriesRule" should "remove repository that matches given patterns" in {
    val ruleConfig = MavenRemoveRepositoriesRuleConfig(
      Set(
        ".*/content/repositories/releases[/]?",
        ".*/content/repositories/snapshots[/]?"
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val rule = new MavenRemoveRepositoriesRule(ruleConfig)
    val model = provider create projectCtx
    provider save (rule transform model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getRepositories.size should be (0)
    }
  }
} 
Example 164
Source File: MavenAddDependenciesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import com.ebay.rtran.maven.util.MavenModelUtil
import com.ebay.rtran.maven.util.MavenModelUtil.SimpleDependency
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenAddDependenciesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenAddDependenciesRule" should "be able to add dependencies" in {
    val ruleConfig = MavenAddDependenciesRuleConfig(
      Set(
        SimpleDependency("org.slf4j", "slf4j-api"),
        SimpleDependency("org.slf4j", "slf4j-log4j12")
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenAddDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-api") should be (true)
      module.pomModel.getDependencies.exists(_.getArtifactId == "slf4j-log4j12") should be (true)
    }
  }

  "MavenAddDependenciesRule" should "not add dependencies that already exist" in {
    val ruleConfig = MavenAddDependenciesRuleConfig(
      Set(
        SimpleDependency("junit", "junit")
      )
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenAddDependenciesRule(ruleConfig)
    val originalSize = model.modules
      .find(_.pomModel.getPackaging == "pom")
      .map(_.pomModel.getDependencies.size)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules
      .find(_.pomModel.getPackaging == "pom")
      .map(_.pomModel.getDependencies.size) should be (originalSize)
    transformed.modules foreach { module =>
      module.pomModel.getDependencies.exists(_.getArtifactId == "junit") should be (true)
    }
  }
} 
Example 165
Source File: MavenRemovePluginsRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenRemovePluginsRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenRemovePluginsRule" should "be able to remove both plugins and managed plugins" in {
    val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
      Set(SimplePlugin(artifactId = "maven-source-plugin"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemovePluginsRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (false)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (false)
  }

  "MavenRemovePluginsRule" should "not remove plugins or managed plugins that don't exist" in {
    val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
      Set(SimplePlugin(artifactId = "maven-surefire-plugin"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemovePluginsRule(ruleConfig)
    val mpSize = model.parents.head.pomModel.getBuild.getPluginManagement.getPlugins.size
    val pluginSize = model.parents.head.pomModel.getBuild.getPlugins.size
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins.size should be (mpSize)

    transformed.parents.head
      .pomModel.getBuild.getPlugins.size should be (pluginSize)
  }

  "MavenRemovePluginsRule" should "remove both plugins and managed plugins matches that match other condition" in {
    val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
      Set(SimplePlugin(artifactId = "maven-source-plugin", version = Some("2.2.1")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemovePluginsRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (false)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (false)
  }

  "MavenRemoveManagedPluginsRule" should "not remove plugins or managed plugins if other condition doesn't match" in {
    val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
      Set(SimplePlugin(artifactId = "maven-source-plugin", version = Some("2.2.0")))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx
    val rule = new MavenRemovePluginsRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.parents.head
      .pomModel.getBuild.getPluginManagement.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (true)

    transformed.parents.head
      .pomModel.getBuild.getPlugins
      .exists(_.getArtifactId == "maven-source-plugin") should be (true)
  }
} 
Example 166
Source File: MavenExcludeDependenciesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.collection.JavaConversions._


class MavenExcludeDependenciesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MavenExcludeDependenciesRule" should "exclude the dependencies if they are used transitively" in {
    val ruleConfig = MavenExcludeDependenciesRuleConfig(
      Set(SimpleExclusion("org.springframework", "spring-aop"))
    )
    val projectCtx = new MavenProjectCtx(destProjectRoot)
    val provider = new MultiModuleMavenModelProvider
    val model = provider create projectCtx

    val rule = new MavenExcludeDependenciesRule(ruleConfig)
    provider save rule.transform(model)

    val transformed = provider create projectCtx
    transformed.modules foreach { module =>
      if (module.pomModel.getPackaging != "war") {
        module.pomModel.getDependencies.forall(_.getExclusions.size == 0) should be (true)
      }else {
        module.pomModel.getDependencies.exists(_.getExclusions.size > 0) should be (true)
      }
    }
  }

} 
Example 167
Source File: ModifyFilesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.generic

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.io.Source


class ModifyFilesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("someproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "ModifyFilesRule" should "modify the file correctly" in {
    val ruleConfig = ModifyFilesRuleConfig(
      "**/fileA.txt",
      None,
      List(
        ContentMapping("hello\\s(.+)\\n", "hallo $1\n"),
        ContentMapping("(.+)\\sBob", "$1 Alice")
      )
    )
    val projectCtx = new GenericProjectCtx(destProjectRoot)
    val provider = new AllFilesModelProvider
    val model = provider create projectCtx
    val rule = new ModifyFilesRule(ruleConfig)
    val result = rule transform model
    val file = result.files.find(_.getName == "fileA.txt")
    file.nonEmpty should be (true)
    Source.fromFile(file.get).getLines.toList should be (List("hallo world", "hi Alice"))
  }

} 
Example 168
Source File: MoveFilesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.generic

import java.io.File

import org.apache.commons.io.FileUtils
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}


class MoveFilesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("someproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "MoveFilesRule" should "move file to the dest directory" in {
    val ruleConfigJson = asJsonNode(parse(
      """
        |{
        | "moves":[
        |   {
        |     "pathPattern":"**.txt", "otherdirectory/dest"),
        Move("*.txt", "otherdirectory")
      )
    )
    val projectCtx = new GenericProjectCtx(destProjectRoot)
    val provider = new AllFilesModelProvider
    val model = provider create projectCtx
    val rule = new MoveFilesRule(ruleConfig)
    val result = rule transform model
    result.files forall (_.exists) should be (true)
  }

} 
Example 169
Source File: ModifyXMLFilesRuleTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.generic

import java.io.File

import org.apache.commons.io.FileUtils
import com.ebay.rtran.xml._
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.io.Source
import scala.language.postfixOps


class ModifyXMLFilesRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectRoot = new File(getClass.getClassLoader.getResource("someproject").getFile)
  val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")

  override def beforeEach = {
    FileUtils.deleteQuietly(destProjectRoot)
    FileUtils.copyDirectory(projectRoot, destProjectRoot)
  }

  "ModifyXMLFilesRuleTest" should "able to delete nodes" in {
    val provider = new XMLFilesModelProvider
    val ruleConfig = ModifyXMLFilesRuleConfig(
      Some("***.xml"),
      List(
        ModifyXMLOperation(
          "//person[@name=\'Bob\']/job",
          OperationType.Replace,
          Some("<job>Software Engineer</job>")
        )
      )
    )
    val provider = new XMLFilesModelProvider
    val rule = new ModifyXMLFilesRule(ruleConfig)
    val transformedModel = rule.transform(provider.create(new GenericProjectCtx(destProjectRoot)))
    provider save transformedModel

    val transformedContent = Source.fromFile(new File(destProjectRoot, "somedirectory/someXML.xml")).getLines.mkString("\n")
    transformedContent should include ("Bob")
    transformedContent should include ("Software Engineer")
    transformedContent should not include "Salesman"
  }

} 
Example 170
Source File: RuleEngineTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import java.io.File

import org.apache.commons.io.FileUtils
import org.json4s.jackson.JsonMethods._
import com.ebay.rtran.core.mock.{MyModifyFileRule, MyProject, MyRenameFileRule, MyRenameFileRuleConfig}
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.io.Source
import scala.collection.JavaConversions._


class RuleEngineTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {

  val projectDir = new File(getClass.getClassLoader.getResource("myproject").getFile)
  val backupDir = new File(projectDir.getParentFile, projectDir.getName + "-bak")

  override def beforeEach = {
    FileUtils.copyDirectory(projectDir, backupDir)
  }
  override def afterEach = {
    FileUtils.deleteQuietly(backupDir)
  }

  "RuleEngine" should "execute rules from UpgradeConfiguration" in {
    val engine = new RuleEngine
    val projectRoot = backupDir
    val configuration = JsonUpgradeConfiguration( List(
      JsonRuleConfiguration("ModifyFileRule", None),
      JsonRuleConfiguration("RenameFileRule", Some(parse("""{"newName":"anotherfile"}""")))
    ))
    engine.execute(new MyProject(projectRoot), configuration)
    new File(projectRoot, "somefile").exists should be (false)
    new File(projectRoot, "anotherfile").exists should be (true)
    Source.fromFile(new File(projectRoot, "anotherfile")).getLines.toList should be (List("hi world", "hi Bob"))
  }

  "RuleEngine" should "execute rules from code" in {
    val engine = new RuleEngine
    val projectRoot = backupDir
    engine.execute(
      new MyProject(projectRoot),
      List(
        new MyModifyFileRule(),
        new MyRenameFileRule(MyRenameFileRuleConfig("anotherfile"))
      )
    )
    new File(projectRoot, "somefile").exists should be (false)
    new File(projectRoot, "anotherfile").exists should be (true)
    Source.fromFile(new File(projectRoot, "anotherfile")).getLines.toList should be (List("hi world", "hi Bob"))
  }

} 
Example 171
Source File: ReportAndLogSupportTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.report

import java.io.File

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.io.Source


class ReportAndLogSupportTest extends FlatSpecLike with Matchers with BeforeAndAfterEach with LazyLogging {

  val projectRoot = new File(getClass.getClassLoader.getResource(".").getFile, "testdir")
  projectRoot.mkdirs

  val report = new ReportAndLogSupport {
    override val warnLogPrefix: String = "test-warn-log"
    override val debugLogPrefix: String = "test-debug-log"
    override val reportFilePrefix: String = "test-report"
  }

  "report" should "get all subscribers that implement IReportEventSubscriber" in {
    report.allSubscribers(projectRoot, "com.ebay.rtran.report").size should not be 0
  }

  "report" should "create the logs and report" in {
    report.createReportAndLogs(projectRoot, None) {
      logger.info("This is an info")
      logger.warn("This is a warning")
      logger.debug("Debug this")
    }
    val reportFile = new File(projectRoot, report.reportFilePrefix + ".md")
    reportFile.exists should be (true)
    val warnLog = new File(projectRoot, report.warnLogPrefix + ".log")
    warnLog.exists should be (true)
    Source.fromFile(warnLog).getLines.mkString should include ("This is a warning")
    val debugLog = new File(projectRoot, report.debugLogPrefix + ".log")
    debugLog.exists should be (true)
    val content = Source.fromFile(debugLog).getLines.mkString
    content should include ("This is an info")
    content should include ("This is a warning")
    content should include ("Debug this")

    reportFile.delete
    warnLog.delete
    debugLog.delete
  }
} 
Example 172
Source File: SparkStageParamTest.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.ml

import com.salesforce.op.stages.SparkStageParam
import com.salesforce.op.test.TestSparkContext
import org.apache.spark.ml.feature.StandardScaler
import org.joda.time.DateTime
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods.{parse, _}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FlatSpec}


@RunWith(classOf[JUnitRunner])
class SparkStageParamTest extends FlatSpec with TestSparkContext with BeforeAndAfterEach {
  import SparkStageParam._

  var savePath: String = _
  var param: SparkStageParam[StandardScaler] = _
  var stage: StandardScaler = _

  override def beforeEach(): Unit = {
    super.beforeEach()
    savePath = tempDir + "/op-stage-param-test-" + DateTime.now().getMillis
    param = new SparkStageParam[StandardScaler](parent = "test" , name = "test", doc = "none")
    // by setting both to be the same, we guarantee that at least one isn't the default value
    stage = new StandardScaler().setWithMean(true).setWithStd(false)
  }

  // easier if test both at the same time
  Spec[SparkStageParam[_]] should "encode and decode properly when is set" in {
    param.savePath = Option(savePath)
    val jsonOut = param.jsonEncode(Option(stage))
    val parsed = parse(jsonOut).asInstanceOf[JObject]
    val updated = parsed ~ ("path" -> savePath) // inject path for decoding

    updated shouldBe JObject(
      "className" -> JString(stage.getClass.getName),
      "uid" -> JString(stage.uid),
      "path" -> JString(savePath)
    )
    val updatedJson = compact(updated)

    param.jsonDecode(updatedJson) match {
      case None => fail("Failed to recover the stage")
      case Some(stageRecovered) =>
        stageRecovered shouldBe a[StandardScaler]
        stageRecovered.uid shouldBe stage.uid
        stageRecovered.getWithMean shouldBe stage.getWithMean
        stageRecovered.getWithStd shouldBe stage.getWithStd
    }
  }

  it should "except out when path is empty" in {
    intercept[RuntimeException](param.jsonEncode(Option(stage))).getMessage shouldBe
      s"Path must be set before Spark stage '${stage.uid}' can be saved"
  }

  it should "have empty path if stage is empty" in {
    param.savePath = Option(savePath)
    val jsonOut = param.jsonEncode(None)
    val parsed = parse(jsonOut)

    parsed shouldBe JObject("className" -> JString(NoClass), "uid" -> JString(NoUID))
    param.jsonDecode(jsonOut) shouldBe None
  }
} 
Example 173
Source File: SparkWrapperParamsTest.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages.sparkwrappers.generic

import com.salesforce.op.features.types._
import com.salesforce.op.test.TestCommon
import org.apache.spark.ml.feature.{StandardScaler, StandardScalerModel}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FlatSpec}

@RunWith(classOf[JUnitRunner])
class SparkWrapperParamsTest extends FlatSpec with BeforeAndAfterEach with TestCommon {

  private def estimator(sparkMlStageIn: Option[StandardScaler] = None) = {
    new SwUnaryEstimator[Real, Real, StandardScalerModel, StandardScaler](
      inputParamName = "in", outputParamName = "out",
      operationName = "test-op", sparkMlStageIn = sparkMlStageIn
    )
  }

  Spec[SparkWrapperParams[_]] should "have proper default values for path and stage" in {
    val stage = estimator()
    stage.getStageSavePath() shouldBe None
    stage.getSparkMlStage() shouldBe None
  }
  it should "when setting path, it should also set path to the stage param" in {
    val stage = estimator()
    stage.setStageSavePath("/test/path")
    stage.getStageSavePath() shouldBe Some("/test/path")
  }
  it should "allow set/get spark params on a wrapped stage" in {
    val sparkStage = new StandardScaler()
    val stage = estimator(sparkMlStageIn = Some(sparkStage))
    stage.getSparkMlStage() shouldBe Some(sparkStage)
    for {
      sparkStage <- stage.getSparkMlStage()
      withMean = sparkStage.getOrDefault(sparkStage.withMean)
    } {
      withMean shouldBe false
      sparkStage.set[Boolean](sparkStage.withMean, true)
      sparkStage.get(sparkStage.withMean) shouldBe Some(true)
    }
  }

} 
Example 174
Source File: LoadBalanceServiceSuite.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package yaooqinn.kyuubi.ha

import java.io.IOException

import org.apache.spark.{KyuubiSparkUtil, SparkFunSuite}
import org.scalatest.{BeforeAndAfterEach, Matchers}
import org.scalatest.mock.MockitoSugar

import yaooqinn.kyuubi.SecuredFunSuite
import yaooqinn.kyuubi.server.KyuubiServer
import yaooqinn.kyuubi.service.ServiceException
import yaooqinn.kyuubi.service.State._

class LoadBalanceServiceSuite extends SparkFunSuite
  with ZookeeperFunSuite
  with Matchers
  with SecuredFunSuite
  with MockitoSugar
  with BeforeAndAfterEach {

  private var server: KyuubiServer = _

  private var haService: HighAvailableService = _

  override def beforeEach(): Unit = {
    server = new KyuubiServer()
    haService = new LoadBalanceService(server)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    if (server != null) {
      server.stop()
    }

    if (haService != null) {
      haService.stop()
    }

    super.afterEach()
  }

  test("Init") {
    haService.getConf should be(null)
    haService.getStartTime should be(0)
    haService.getName should be(classOf[LoadBalanceService].getSimpleName)
    haService.getServiceState should be(NOT_INITED)

    haService.init(conf)
    haService.getConf should be(conf)
    haService.getStartTime should be(0)
    haService.getServiceState should be(INITED)

    tryWithSecurityEnabled {
      val e = intercept[IOException](haService.init(conf))
      e.getMessage should startWith(KyuubiSparkUtil.KEYTAB)
    }
  }

  test("Start") {
    val e1 = intercept[ServiceException](haService.start())
    e1.getCause.getMessage should startWith("Unable to address the server")

    server.init(conf)
    val e2 = intercept[ServiceException](haService.start())
    e2.getCause.getMessage should be("client cannot be null")
    haService.init(conf)
    haService.start()
    haService.getServiceState should be(STARTED)
    haService.getStartTime should not be 0
  }

  test("Stop before init") {
    haService.stop()
  }

  test("Stop after init") {
    haService.init(conf)
    haService.stop()
  }

  test("Stop after start") {
    server.init(conf)
    haService.init(conf)
    haService.start()
    haService.stop()
  }
} 
Example 175
Source File: KyuubiFunSuite.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package org.apache.kyuubi

// scalastyle:off
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome}

trait KyuubiFunSuite extends FunSuite
  with BeforeAndAfterAll
  with BeforeAndAfterEach
  with ThreadAudit
  with Logging {
  // scalastyle:on
  override def beforeAll(): Unit = {
    doThreadPostAudit()
    super.beforeAll()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    doThreadPostAudit()
  }

  final override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org\\.apache\\.kyuubi", "o\\.a\\.k")
    try {
      info(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      info(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }
} 
Example 176
Source File: RepositoryRDDProviderSpec.scala    From jgit-spark-connector   with Apache License 2.0 5 votes vote down vote up
package tech.sourced.engine.provider

import java.nio.file.{Path, Paths}
import java.util.UUID

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import tech.sourced.engine.util.RepoUtils
import tech.sourced.engine.{BaseSivaSpec, BaseSparkSpec}

class RepositoryRDDProviderSpec extends FlatSpec with Matchers with BeforeAndAfterEach
  with BaseSparkSpec with BaseSivaSpec {

  private var provider: RepositoryRDDProvider = _
  private var tmpPath: Path = _

  override def beforeEach(): Unit = {
    super.beforeEach()
    provider = RepositoryRDDProvider(ss.sparkContext)
    tmpPath = Paths.get(
      System.getProperty("java.io.tmpdir"),
      UUID.randomUUID().toString
    )
  }

  override def afterEach(): Unit = {
    super.afterEach()

    FileUtils.deleteQuietly(tmpPath.toFile)
  }

  "RepositoryRDDProvider" should "retrieve bucketized raw repositories" in {
    tmpPath.resolve("a").toFile.mkdir()
    createRepo(tmpPath.resolve("a").resolve("repo"))

    tmpPath.resolve("b").toFile.mkdir()
    createRepo(tmpPath.resolve("b").resolve("repo"))

    createRepo(tmpPath.resolve("repo"))

    val repos = provider.get(tmpPath.toString, "standard").collect()
    repos.length should be(3)
  }

  it should "retrieve non-bucketized raw repositories" in {
    tmpPath.resolve("a").toFile.mkdir()
    createRepo(tmpPath.resolve("repo"))

    tmpPath.resolve("b").toFile.mkdir()
    createRepo(tmpPath.resolve("repo2"))

    val repos = provider.get(tmpPath.toString, "standard").collect()
    repos.length should be(2)
  }

  it should "retrieve bucketized siva repositories" in {
    val repos = provider.get(resourcePath, "siva").collect()
    repos.length should be(3)
  }

  it should "retrieve non-bucketized siva repositories" in {
    val repos = provider.get(Paths.get(resourcePath, "ff").toString, "siva").collect()
    repos.length should be(1)
  }

  private def createRepo(path: Path) = {
    val repo = RepoUtils.createRepo(path)
    RepoUtils.commitFile(repo, "file.txt", "something something", "some commit")
  }

} 
Example 177
Source File: EmailNotifierIntegrationSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.api.notifier.email

import com.typesafe.config.{Config, ConfigFactory}
import vinyldns.core.notifier._
import vinyldns.api.MySqlApiIntegrationSpec
import vinyldns.mysql.MySqlIntegrationSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike
import vinyldns.core.domain.batch._
import vinyldns.core.domain.record.RecordType
import vinyldns.core.domain.record.AData
import org.joda.time.DateTime
import vinyldns.core.TestMembershipData._
import java.nio.file.{Files, Path, Paths}
import cats.effect.{IO, Resource}
import scala.collection.JavaConverters._
import org.scalatest.BeforeAndAfterEach
import cats.implicits._

class EmailNotifierIntegrationSpec
    extends MySqlApiIntegrationSpec
    with MySqlIntegrationSpec
    with Matchers
    with AnyWordSpecLike
    with BeforeAndAfterEach {

  import vinyldns.api.domain.DomainValidations._

  val emailConfig: Config = ConfigFactory.load().getConfig("vinyldns.email.settings")

  val targetDirectory = Paths.get("../../docker/email")

  override def beforeEach: Unit =
    deleteEmailFiles(targetDirectory).unsafeRunSync()

  override def afterEach: Unit =
    deleteEmailFiles(targetDirectory).unsafeRunSync()

  "Email Notifier" should {

    "send an email" in {
      val batchChange = BatchChange(
        okUser.id,
        okUser.userName,
        None,
        DateTime.now,
        List(
          SingleAddChange(
            Some("some-zone-id"),
            Some("zone-name"),
            Some("record-name"),
            "a" * HOST_MAX_LENGTH,
            RecordType.A,
            300,
            AData("1.1.1.1"),
            SingleChangeStatus.Complete,
            None,
            None,
            None
          )
        ),
        approvalStatus = BatchChangeApprovalStatus.AutoApproved
      )

      val program = for {
        _ <- userRepository.save(okUser)
        notifier <- new EmailNotifierProvider()
          .load(NotifierConfig("", emailConfig), userRepository)
        _ <- notifier.notify(Notification(batchChange))
        emailFiles <- retrieveEmailFiles(targetDirectory)
      } yield emailFiles

      val files = program.unsafeRunSync()

      files.length should be(1)

    }

  }

  def deleteEmailFiles(path: Path): IO[Unit] =
    for {
      files <- retrieveEmailFiles(path)
      _ <- files.traverse { file =>
        IO(Files.delete(file))
      }
    } yield ()

  def retrieveEmailFiles(path: Path): IO[List[Path]] =
    Resource.fromAutoCloseable(IO(Files.newDirectoryStream(path, "*.eml"))).use { s =>
      IO {
        s.iterator.asScala.toList
      }
    }

} 
Example 178
Source File: BatchChangeHandlerSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.api.engine

import cats.effect._
import org.joda.time.DateTime
import org.mockito.Matchers.any
import org.mockito.Mockito.{doReturn, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.wordspec.AnyWordSpec
import org.scalatestplus.mockito.MockitoSugar
import vinyldns.api.CatsHelpers
import vinyldns.api.repository.InMemoryBatchChangeRepository
import vinyldns.core.domain.batch._
import vinyldns.core.domain.record._
import vinyldns.core.notifier.{AllNotifiers, Notification, Notifier}

import scala.concurrent.ExecutionContext

class BatchChangeHandlerSpec
    extends AnyWordSpec
    with MockitoSugar
    with BeforeAndAfterEach
    with CatsHelpers {

  implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.global
  implicit val contextShift: ContextShift[IO] = IO.contextShift(ec)

  private val batchRepo = new InMemoryBatchChangeRepository
  private val mockNotifier = mock[Notifier]
  private val notifiers = AllNotifiers(List(mockNotifier))

  private val addChange = SingleAddChange(
    Some("zoneId"),
    Some("zoneName"),
    Some("recordName"),
    "recordName.zoneName",
    RecordType.A,
    300,
    AData("1.1.1.1"),
    SingleChangeStatus.Complete,
    None,
    Some("recordChangeId"),
    Some("recordSetId"),
    List(),
    "changeId"
  )

  private val completedBatchChange = BatchChange(
    "userId",
    "userName",
    Some("comments"),
    DateTime.now,
    List(addChange),
    Some("ownerGroupId"),
    BatchChangeApprovalStatus.AutoApproved
  )

  override protected def beforeEach(): Unit =
    batchRepo.clear()

  "notify on batch change complete" in {
    doReturn(IO.unit).when(mockNotifier).notify(any[Notification[_]])

    await(batchRepo.save(completedBatchChange))

    BatchChangeHandler
      .process(batchRepo, notifiers, BatchChangeCommand(completedBatchChange.id))
      .unsafeRunSync()

    verify(mockNotifier).notify(Notification(completedBatchChange))
  }

  "notify on failure" in {
    doReturn(IO.unit).when(mockNotifier).notify(any[Notification[_]])

    val partiallyFailedBatchChange =
      completedBatchChange.copy(changes = List(addChange.copy(status = SingleChangeStatus.Failed)))

    await(batchRepo.save(partiallyFailedBatchChange))

    BatchChangeHandler
      .process(batchRepo, notifiers, BatchChangeCommand(partiallyFailedBatchChange.id))
      .unsafeRunSync()

    verify(mockNotifier).notify(Notification(partiallyFailedBatchChange))
  }
} 
Example 179
Source File: MembershipValidationsSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.api.domain.membership

import cats.scalatest.EitherMatchers
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import vinyldns.api.ResultHelpers
import vinyldns.core.TestMembershipData._
import vinyldns.core.domain.auth.AuthPrincipal
import vinyldns.api.domain.zone.NotAuthorizedError
import vinyldns.core.domain.membership.User

class MembershipValidationsSpec
    extends AnyWordSpec
    with Matchers
    with MockitoSugar
    with BeforeAndAfterEach
    with ResultHelpers
    with EitherMatchers {

  import vinyldns.api.domain.membership.MembershipValidations._

  "MembershipValidations" should {
    "hasMembersAndAdmins" should {
      "return true when a group has at least one member and one admin" in {
        hasMembersAndAdmins(okGroup) should be(right)
      }

      "return an error when a group has no members" in {
        val badGroup = okGroup.copy(memberIds = Set())
        val error = leftValue(hasMembersAndAdmins(badGroup))
        error shouldBe an[InvalidGroupError]
      }

      "return an error when a group has no admins" in {
        val badGroup = okGroup.copy(adminUserIds = Set())
        val error = leftValue(hasMembersAndAdmins(badGroup))
        error shouldBe an[InvalidGroupError]
      }
    }

    "isAdmin" should {
      "return true when the user is in admin group" in {
        canEditGroup(okGroup, okAuth) should be(right)
      }
      "return true when the user is a super user" in {
        canEditGroup(okGroup, superUserAuth) should be(right)
      }
      "return an error when the user is a support admin only" in {
        val user = User("some", "new", "user", isSupport = true)
        val supportAuth = AuthPrincipal(user, Seq())
        val error = leftValue(canEditGroup(okGroup, supportAuth))
        error shouldBe an[NotAuthorizedError]
      }
      "return an error when the user has no access and is not super" in {
        val user = User("some", "new", "user")
        val nonSuperAuth = AuthPrincipal(user, Seq())
        val error = leftValue(canEditGroup(okGroup, nonSuperAuth))
        error shouldBe an[NotAuthorizedError]
      }
    }

    "canSeeGroup" should {
      "return true when the user is in the group" in {
        canSeeGroup(okGroup.id, okAuth) should be(right)
      }
      "return true when the user is a super user" in {
        canSeeGroup(okGroup.id, superUserAuth) should be(right)
      }
      "return true when the user is a support admin" in {
        val user = User("some", "new", "user", isSupport = true)
        val supportAuth = AuthPrincipal(user, Seq())
        canSeeGroup(okGroup.id, supportAuth) should be(right)
      }
      "return an error when the user has no access and is not super" in {
        val user = User("some", "new", "user")
        val nonSuperAuth = AuthPrincipal(user, Seq())
        val error = leftValue(canSeeGroup(okGroup.id, nonSuperAuth))
        error shouldBe an[NotAuthorizedError]
      }

    }
  }
} 
Example 180
Source File: PrometheusRoutingSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.api.route

import akka.http.scaladsl.model.{HttpProtocol, HttpResponse, StatusCodes}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import io.prometheus.client.CollectorRegistry
import io.prometheus.client.dropwizard.DropwizardExports
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.BeforeAndAfterEach
import vinyldns.core.VinylDNSMetrics

class PrometheusRoutingSpec
    extends AnyWordSpec
    with ScalatestRouteTest
    with PrometheusRoute
    with BeforeAndAfterEach
    with MockitoSugar
    with Matchers {

  val metricRegistry = VinylDNSMetrics.metricsRegistry

  val collectorRegistry = CollectorRegistry.defaultRegistry

  collectorRegistry.register(new DropwizardExports(metricRegistry))

  "GET /metrics/prometheus" should {
    "return metrics logged in prometheus" in {
      Get("/metrics/prometheus") ~> prometheusRoute ~> check {
        response.status shouldBe StatusCodes.OK
        val resultStatus = responseAs[HttpResponse]
        resultStatus.protocol shouldBe HttpProtocol("HTTP/1.1")
      }
    }
  }
} 
Example 181
Source File: MySqlUserChangeRepositoryIntegrationSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.mysql.repository

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scalikejdbc.DB
import vinyldns.core.domain.membership.UserChange.{CreateUser, UpdateUser}
import vinyldns.core.domain.membership.{User, UserChangeRepository}
import vinyldns.mysql.TestMySqlInstance

class MySqlUserChangeRepositoryIntegrationSpec
    extends AnyWordSpec
    with BeforeAndAfterAll
    with BeforeAndAfterEach
    with Matchers {

  private val repo: UserChangeRepository = TestMySqlInstance.userChangeRepository
  private val user: User = User("user-id", "access-key", "secret-key")
  private val createUser = CreateUser(user, "creator-id", user.created)
  private val updateUser =
    UpdateUser(user.copy(userName = "new-username"), "creator-id", user.created, user)

  def clear(): Unit =
    DB.localTx { implicit s =>
      s.executeUpdate("DELETE FROM user_change")
      ()
    }

  override protected def beforeEach(): Unit = clear()

  override protected def afterAll(): Unit = clear()

  "MySqlUserChangeRepo.get" should {
    "get a user change" in {
      repo.save(createUser).unsafeRunSync() shouldBe createUser
      repo.get(createUser.id).unsafeRunSync() shouldBe Some(createUser)
    }

    "return None if user change doesn't exist" in {
      repo.get("does-not-exist").unsafeRunSync() shouldBe None
    }
  }

  "MySqlUserChangeRepo.save" should {
    "successfully save a CreateUser" in {
      repo.save(createUser).unsafeRunSync() shouldBe createUser
    }

    "successfully save an UpdateUser" in {
      repo.save(updateUser).unsafeRunSync() shouldBe updateUser
    }

    "on duplicate key update a user change" in {
      val overwriteCreateUser = createUser.copy(madeByUserId = "overwrite-creator")
      repo.save(createUser).unsafeRunSync() shouldBe createUser
      repo.save(overwriteCreateUser).unsafeRunSync() shouldBe overwriteCreateUser

      repo.get(createUser.id).unsafeRunSync() shouldBe Some(overwriteCreateUser)
    }
  }
} 
Example 182
Source File: AllNotifiersSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.core.notifier

import cats.scalatest.{EitherMatchers, EitherValues, ValidatedMatchers}
import org.scalatestplus.mockito.MockitoSugar
import org.mockito.Mockito._
import cats.effect.IO
import org.scalatest.BeforeAndAfterEach
import cats.effect.ContextShift
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class AllNotifiersSpec
    extends AnyWordSpec
    with Matchers
    with MockitoSugar
    with EitherValues
    with EitherMatchers
    with ValidatedMatchers
    with BeforeAndAfterEach {

  implicit val cs: ContextShift[IO] = IO.contextShift(scala.concurrent.ExecutionContext.global)

  val mockNotifiers = List.fill(3)(mock[Notifier])

  val notification = Notification("anything")

  override def beforeEach: Unit =
    mockNotifiers.foreach { mock =>
      reset(mock)
      when(mock.notify(notification)).thenReturn(IO.unit)
    }

  "notifier" should {
    "notify all contained notifiers" in {

      val notifier = AllNotifiers(mockNotifiers)

      notifier.notify(notification)

      mockNotifiers.foreach(verify(_).notify(notification))
    }

    "suppress errors from notifiers" in {
      val notifier = AllNotifiers(mockNotifiers)

      when(mockNotifiers(2).notify(notification)).thenReturn(IO.raiseError(new Exception("fail")))

      notifier.notify(notification).unsafeRunSync()

      mockNotifiers.foreach(verify(_).notify(notification))
    }
  }

} 
Example 183
Source File: TaskSchedulerSpec.scala    From vinyldns   with Apache License 2.0 5 votes vote down vote up
package vinyldns.core.task
import cats.effect.{ContextShift, IO, Timer}
import org.mockito.Mockito
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterEach

import scala.concurrent.duration._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class TaskSchedulerSpec
    extends AnyWordSpec
    with Matchers
    with MockitoSugar
    with BeforeAndAfterEach {

  private implicit val cs: ContextShift[IO] =
    IO.contextShift(scala.concurrent.ExecutionContext.global)
  private implicit val timer: Timer[IO] = IO.timer(scala.concurrent.ExecutionContext.global)

  private val mockRepo = mock[TaskRepository]

  class TestTask(
      val name: String,
      val timeout: FiniteDuration,
      val runEvery: FiniteDuration,
      val checkInterval: FiniteDuration,
      testResult: IO[Unit] = IO.unit
  ) extends Task {
    def run(): IO[Unit] = testResult
  }

  override def beforeEach() = Mockito.reset(mockRepo)

  "TaskScheduler" should {
    "run a scheduled task" in {
      val task = new TestTask("test", 5.seconds, 500.millis, 500.millis)
      val spied = spy(task)
      doReturn(IO.unit).when(mockRepo).saveTask(task.name)
      doReturn(IO.pure(true)).when(mockRepo).claimTask(task.name, task.timeout, task.runEvery)
      doReturn(IO.unit).when(mockRepo).releaseTask(task.name)

      TaskScheduler.schedule(spied, mockRepo).take(1).compile.drain.unsafeRunSync()

      // We run twice because we run once on start up
      verify(spied, times(2)).run()
      verify(mockRepo, times(2)).claimTask(task.name, task.timeout, task.runEvery)
      verify(mockRepo, times(2)).releaseTask(task.name)
    }

    "release the task even on error" in {
      val task =
        new TestTask(
          "test",
          5.seconds,
          500.millis,
          500.millis,
          IO.raiseError(new RuntimeException("fail"))
        )
      doReturn(IO.unit).when(mockRepo).saveTask(task.name)
      doReturn(IO.pure(true)).when(mockRepo).claimTask(task.name, task.timeout, task.runEvery)
      doReturn(IO.unit).when(mockRepo).releaseTask(task.name)

      TaskScheduler.schedule(task, mockRepo).take(1).compile.drain.unsafeRunSync()

      // We release the task twice, once on start and once on the run
      verify(mockRepo, times(2)).releaseTask(task.name)
    }

    "fail to start if the task cannot be saved" in {
      val task = new TestTask("test", 5.seconds, 500.millis, 500.millis)
      val spied = spy(task)
      doReturn(IO.raiseError(new RuntimeException("fail"))).when(mockRepo).saveTask(task.name)

      a[RuntimeException] should be thrownBy TaskScheduler
        .schedule(task, mockRepo)
        .take(1)
        .compile
        .drain
        .unsafeRunSync()
      verify(spied, never()).run()
    }
  }
} 
Example 184
Source File: ResetSystemProperties.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 185
Source File: DiskBlockManagerSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils
//DiskBlockManager管理和维护了逻辑上的Block和存储在Disk上的物理的Block的映射。
//一般来说,一个逻辑的Block会根据它的BlockId生成的名字映射到一个物理上的文件
class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  //DiskBlockManager创建和维护逻辑块和物理磁盘位置之间的逻辑映射,默认情况下,一个块被映射到一个文件,其名称由其BlockId给出
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {//基本块的创建
    val blockId = new TestBlockId("test")
    //DiskBlockManager创建和维护逻辑块和物理磁盘位置之间的逻辑映射,默认情况下,一个块被映射到一个文件,其名称由其BlockId给出
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {//枚举块
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 186
Source File: ElasticSearchWriterUtilsSpec.scala    From haystack-traces   with Apache License 2.0 5 votes vote down vote up
package com.expedia.www.haystack.trace.indexer.unit

import com.expedia.www.haystack.trace.indexer.writers.es.ElasticSearchWriterUtils
import org.scalatest.{BeforeAndAfterEach, FunSpec, GivenWhenThen, Matchers}

class ElasticSearchWriterUtilsSpec extends FunSpec with Matchers with GivenWhenThen with BeforeAndAfterEach {
  var timezone: String = _

  override def beforeEach() {
    timezone = System.getProperty("user.timezone")
    System.setProperty("user.timezone", "CST")
  }

  override def afterEach(): Unit = {
    System.setProperty("user.timezone", timezone)
  }

  describe("elastic search writer") {
    it("should use UTC when generating ES indexes") {
      Given("the system timezone is not UTC")
      System.setProperty("user.timezone", "CST")
      val eventTimeInMicros = System.currentTimeMillis() * 1000

      When("the writer generates the ES indexes")
      val cstName = ElasticSearchWriterUtils.indexName("haystack-traces", 6, eventTimeInMicros)
      System.setProperty("user.timezone", "UTC")
      val utcName = ElasticSearchWriterUtils.indexName("haystack-traces", 6, eventTimeInMicros)

      Then("it should use UTC to get those indexes")
      cstName shouldBe utcName
    }
  }
} 
Example 187
Source File: CustomSchemaTest.scala    From spark-sftp   with Apache License 2.0 5 votes vote down vote up
package com.springml.spark.sftp

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, _}
import org.scalatest.{BeforeAndAfterEach, FunSuite}


class CustomSchemaTest extends FunSuite with BeforeAndAfterEach {
  var ss: SparkSession = _

  val csvTypesMap = Map("ProposalId" -> IntegerType,
    "OpportunityId" -> StringType,
    "Clicks" -> LongType,
    "Impressions" -> LongType
  )

  val jsonTypesMap = Map("name" -> StringType,
    "age" -> IntegerType
  )

  override def beforeEach() {
    ss = SparkSession.builder().master("local").appName("Custom Schema Test").getOrCreate()
  }

  private def validateTypes(field : StructField, typeMap : Map[String, DataType]) = {
    val expectedType = typeMap(field.name)
    assert(expectedType == field.dataType)
  }

  private def columnArray(typeMap : Map[String, DataType]) : Array[StructField] = {
    val columns = typeMap.map(x => new StructField(x._1, x._2, true))

    val columnStruct = Array[StructField] ()
    columns.copyToArray(columnStruct)

    columnStruct
  }

  test ("Read CSV with custom schema") {
    val columnStruct = columnArray(csvTypesMap)
    val expectedSchema = StructType(columnStruct)

    val fileLocation = getClass.getResource("/sample.csv").getPath
    val dsr = DatasetRelation(fileLocation, "csv", "false", "true", ",", "\"", "\\", "false", null, expectedSchema, ss.sqlContext)
    val rdd = dsr.buildScan()

    assert(dsr.schema.fields.length == columnStruct.length)
    dsr.schema.fields.foreach(s => validateTypes(s, csvTypesMap))
  }

  test ("Read Json with custom schema") {
    val columnStruct = columnArray(jsonTypesMap)
    val expectedSchema = StructType(columnStruct)

    val fileLocation = getClass.getResource("/people.json").getPath
    val dsr = DatasetRelation(fileLocation, "json", "false", "true", ",", "\"", "\\", "false", null, expectedSchema, ss.sqlContext)
    val rdd = dsr.buildScan()

    assert(dsr.schema.fields.length == columnStruct.length)
    dsr.schema.fields.foreach(s => validateTypes(s, jsonTypesMap))
  }

} 
Example 188
Source File: TestDatasetRelation.scala    From spark-sftp   with Apache License 2.0 5 votes vote down vote up
package com.springml.spark.sftp

import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterEach, FunSuite}


class TestDatasetRelation extends FunSuite with BeforeAndAfterEach {
  var ss: SparkSession = _

  override def beforeEach() {
    ss = SparkSession.builder().master("local").enableHiveSupport().appName("Test Dataset Relation").getOrCreate()
  }

  test ("Read CSV") {
    val fileLocation = getClass.getResource("/sample.csv").getPath
    val dsr = DatasetRelation(fileLocation, "csv", "false", "true", ",", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(3 == rdd.count())
  }

  test ("Read CSV using custom delimiter") {
    val fileLocation = getClass.getResource("/sample.csv").getPath
    val dsr = DatasetRelation(fileLocation, "csv", "false", "true", ";", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(3 == rdd.count())
  }

  test ("Read multiline CSV using custom quote and escape") {
    val fileLocation = getClass.getResource("/sample_quoted_multiline.csv").getPath
    val dsr = DatasetRelation(fileLocation, "csv", "false", "true", ",", "\"", "\\", "true", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(3 == rdd.count())
  }


  test ("Read JSON") {
    val fileLocation = getClass.getResource("/people.json").getPath
    val dsr = DatasetRelation(fileLocation, "json", "false", "true", ",", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(3 == rdd.count())
  }

  test ("Read AVRO") {
    val fileLocation = getClass.getResource("/users.avro").getPath
    val dsr = DatasetRelation(fileLocation, "avro", "false", "true", ",", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(2 == rdd.count())
  }

  test ("Read parquet") {
    val fileLocation = getClass.getResource("/users.parquet").getPath
    val dsr = DatasetRelation(fileLocation, "parquet", "false", "true", ",", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(2 == rdd.count())
  }

  test ("Read text file") {
    val fileLocation = getClass.getResource("/plaintext.txt").getPath
    val dsr = DatasetRelation(fileLocation, "txt", "false", "true", ",", "\"", "\\", "false", null, null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(3 == rdd.count())
  }

  test ("Read xml file") {
    val fileLocation = getClass.getResource("/books.xml").getPath
    val dsr = DatasetRelation(fileLocation, "xml", "false", "true", ",", "\"", "\\", "false", "book", null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(12 == rdd.count())
  }
  test ("Read orc file") {
    val fileLocation = getClass.getResource("/books.orc").getPath
    val dsr = DatasetRelation(fileLocation, "orc", "false", "true", ",", "\"", "\\", "false", "book", null, ss.sqlContext)
    val rdd = dsr.buildScan()
    assert(12 == rdd.count())
  }
} 
Example 189
Source File: HiveContextCompatibilitySuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {

  private var sc: SparkContext = null
  private var hc: HiveContext = null

  override def beforeAll(): Unit = {
    super.beforeAll()
    sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
    HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
      sc.hadoopConfiguration.set(k, v)
    }
    hc = new HiveContext(sc)
  }

  override def afterEach(): Unit = {
    try {
      hc.sharedState.cacheManager.clearCache()
      hc.sessionState.catalog.reset()
    } finally {
      super.afterEach()
    }
  }

  override def afterAll(): Unit = {
    try {
      sc = null
      hc = null
    } finally {
      super.afterAll()
    }
  }

  test("basic operations") {
    val _hc = hc
    import _hc.implicits._
    val df1 = (1 to 20).map { i => (i, i) }.toDF("a", "x")
    val df2 = (1 to 100).map { i => (i, i % 10, i % 2 == 0) }.toDF("a", "b", "c")
      .select($"a", $"b")
      .filter($"a" > 10 && $"b" > 6 && $"c")
    val df3 = df1.join(df2, "a")
    val res = df3.collect()
    val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
    assert(res.toSeq == expected.toSeq)
    df3.createOrReplaceTempView("mai_table")
    val df4 = hc.table("mai_table")
    val res2 = df4.collect()
    assert(res2.toSeq == expected.toSeq)
  }

  test("basic DDLs") {
    val _hc = hc
    import _hc.implicits._
    val databases = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases.toSeq == Seq("default"))
    hc.sql("CREATE DATABASE mee_db")
    hc.sql("USE mee_db")
    val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases2.toSet == Set("default", "mee_db"))
    val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
    df.createOrReplaceTempView("mee_table")
    hc.sql("CREATE TABLE moo_table (name string, age int)")
    hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
    assert(
      hc.sql("SELECT * FROM moo_table order by name").collect().toSeq ==
      df.collect().toSeq.sortBy(_.getString(0)))
    val tables = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables.toSet == Set("moo_table", "mee_table"))
    hc.sql("DROP TABLE moo_table")
    hc.sql("DROP TABLE mee_table")
    val tables2 = hc.sql("SHOW TABLES IN mee_db").select("tableName").collect().map(_.getString(0))
    assert(tables2.isEmpty)
    hc.sql("USE default")
    hc.sql("DROP DATABASE mee_db CASCADE")
    val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
    assert(databases3.toSeq == Seq("default"))
  }

} 
Example 190
Source File: BufferHolderSparkSubmitSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.expressions.codegen

import org.scalatest.{BeforeAndAfterEach, Matchers}

import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.deploy.SparkSubmitSuite
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.util.ResetSystemProperties

// A test for growing the buffer holder to nearly 2GB. Due to the heap size limitation of the Spark
// unit tests JVM, the actually test code is running as a submit job.
class BufferHolderSparkSubmitSuite
  extends SparkFunSuite
    with Matchers
    with BeforeAndAfterEach
    with ResetSystemProperties {

  test("SPARK-22222: Buffer holder should be able to allocate memory larger than 1GB") {
    val unusedJar = TestUtils.createJarWithClasses(Seq.empty)

    val argsForSparkSubmit = Seq(
      "--class", BufferHolderSparkSubmitSuite.getClass.getName.stripSuffix("$"),
      "--name", "SPARK-22222",
      "--master", "local-cluster[2,1,1024]",
      "--driver-memory", "4g",
      "--conf", "spark.ui.enabled=false",
      "--conf", "spark.master.rest.enabled=false",
      "--conf", "spark.driver.extraJavaOptions=-ea",
      unusedJar.toString)
    SparkSubmitSuite.runSparkSubmit(argsForSparkSubmit, "../..")
  }
}

object BufferHolderSparkSubmitSuite {

  def main(args: Array[String]): Unit = {

    val ARRAY_MAX = ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH

    val holder = new BufferHolder(new UnsafeRow(1000))

    holder.reset()
    holder.grow(roundToWord(ARRAY_MAX / 2))

    holder.reset()
    holder.grow(roundToWord(ARRAY_MAX / 2 + 8))

    holder.reset()
    holder.grow(roundToWord(Integer.MAX_VALUE / 2))

    holder.reset()
    holder.grow(roundToWord(Integer.MAX_VALUE))
  }

  private def roundToWord(len: Int): Int = {
    ByteArrayMethods.roundNumberOfBytesToNearestWord(len)
  }
} 
Example 191
Source File: SharedSparkSession.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import scala.concurrent.duration._

import org.scalatest.{BeforeAndAfterEach, Suite}
import org.scalatest.concurrent.Eventually

import org.apache.spark.{DebugFilesystem, SparkConf}
import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.internal.SQLConf


  protected override def afterAll(): Unit = {
    try {
      super.afterAll()
    } finally {
      try {
        if (_spark != null) {
          try {
            _spark.sessionState.catalog.reset()
          } finally {
            _spark.stop()
            _spark = null
          }
        }
      } finally {
        SparkSession.clearActiveSession()
        SparkSession.clearDefaultSession()
      }
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    // Clear all persistent datasets after each test
    spark.sharedState.cacheManager.clearCache()
    // files can be closed from other threads, so wait a bit
    // normally this doesn't take more than 1s
    eventually(timeout(10.seconds), interval(2.seconds)) {
      DebugFilesystem.assertNoOpenStreams()
    }
  }
} 
Example 192
Source File: SharedSparkContext.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatest.Suite


  protected def initializeContext(): Unit = {
    if (null == _sc) {
      _sc = new SparkContext(
        "local[4]", "test", conf.set("spark.hadoop.fs.file.impl", classOf[DebugFilesystem].getName))
    }
  }

  override def beforeAll() {
    super.beforeAll()
    initializeContext()
  }

  override def afterAll() {
    try {
      LocalSparkContext.stop(_sc)
      _sc = null
    } finally {
      super.afterAll()
    }
  }

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    DebugFilesystem.clearOpenStreams()
  }

  protected override def afterEach(): Unit = {
    super.afterEach()
    DebugFilesystem.assertNoOpenStreams()
  }
} 
Example 193
Source File: ResetSystemProperties.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 194
Source File: DiskBlockManagerSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}
import java.util.UUID

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  test("SPARK-22227: non-block files are skipped") {
    val file = diskBlockManager.getFile("unmanaged_file")
    writeToFile(file, 10)
    assert(diskBlockManager.getAllBlocks().isEmpty)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 195
Source File: SharedDriver.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.compiler.sql.driver

import org.neo4j.driver.v1.{Driver, Session}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}

trait SharedDriver[DriverT <: Driver, SessionT <: Session] extends BeforeAndAfterAll with BeforeAndAfterEach {
  this: Suite =>

  var driver: DriverT = _
  var session: SessionT = _

  def initNewDriver(): DriverT

  protected def init(): Unit = {
    driver = initNewDriver()
    // any alternative?
    session = driver.session().asInstanceOf[SessionT]
  }

  override protected def beforeAll(): Unit = {
    super.beforeAll()

    init()
  }

  override protected def beforeEach(): Unit = {
    super.beforeEach()

    if (!session.isOpen)
      init()
  }

  override protected def afterEach(): Unit = {
    if (!session.isOpen)
      driver.close()

    super.afterEach()
  }

  override protected def afterAll(): Unit = {
    session.close()
    driver.close()

    super.afterAll()
  }
} 
Example 196
Source File: InMemoryShellNotificationManagerTest.scala    From shellbase   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.shellbase.notifications

import com.sumologic.shellbase.CommonWordSpec
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar

@RunWith(classOf[JUnitRunner])
class InMemoryShellNotificationManagerTest extends CommonWordSpec with BeforeAndAfterEach with MockitoSugar {

  "InMemoryShellNotificationManager" should {
    "provide notification names" in {
      val sut = new InMemoryShellNotificationManager("", Seq(notification1, notification2))
      sut.notifierNames should be(Seq(firstName, secondName))
    }

    "know if a notification is enabled by default" in {
      val sut = new InMemoryShellNotificationManager("", Seq(notification1, notification2), enabledByDefault = false)
      sut.notificationEnabled(firstName) should be(false)
      sut.notificationEnabled(secondName) should be(false)
      sut.notificationEnabled("madeUp") should be(false)

      val sut2 = new InMemoryShellNotificationManager("", Seq(notification1, notification2), enabledByDefault = true)
      sut2.notificationEnabled(firstName) should be(true)
      sut2.notificationEnabled(secondName) should be(true)
      sut2.notificationEnabled("madeUp") should be(true)
    }

    "support enabling and disabling notifications" in {
      val sut = new InMemoryShellNotificationManager("", Seq(notification1, notification2))
      sut.notificationEnabled(firstName) should be(false)
      sut.notificationEnabled(secondName) should be(false)

      sut.enable(firstName)
      sut.notificationEnabled(firstName) should be(true)
      sut.notificationEnabled(secondName) should be(false)

      sut.enable(secondName)
      sut.notificationEnabled(firstName) should be(true)
      sut.notificationEnabled(secondName) should be(true)

      sut.disable(firstName)
      sut.notificationEnabled(firstName) should be(false)
      sut.notificationEnabled(secondName) should be(true)

      sut.disable(secondName)
      sut.notificationEnabled(firstName) should be(false)
      sut.notificationEnabled(secondName) should be(false)
    }

    "only notify enabled notifications" in {
      val notificationString = "test"
      val sut = new InMemoryShellNotificationManager("", Seq(notification1, notification2))

      sut.notify(notificationString)
      verify(notification1, times(0)).notify("", notificationString)
      verify(notification2, times(0)).notify("", notificationString)

      sut.enable(firstName)
      sut.notify(notificationString)
      verify(notification1, times(1)).notify("", notificationString)
      verify(notification2, times(0)).notify("", notificationString)

      sut.enable(secondName)
      sut.notify(notificationString)
      verify(notification1, times(2)).notify("", notificationString)
      verify(notification2, times(1)).notify("", notificationString)

      sut.disable(firstName)
      sut.notify(notificationString)
      verify(notification1, times(2)).notify("", notificationString)
      verify(notification2, times(2)).notify("", notificationString)

    }
  }

  private val firstName = "first"
  private val secondName = "second"

  private var notification1: ShellNotification = _
  private var notification2: ShellNotification = _

  override protected def beforeEach(): Unit = {
    notification1 = mock[ShellNotification]
    notification2 = mock[ShellNotification]

    when(notification1.name).thenReturn(firstName)
    when(notification2.name).thenReturn(secondName)
  }
} 
Example 197
Source File: ResetSystemProperties.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 198
Source File: DiskBlockManagerSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 199
Source File: TalkCtrlSpec.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.pages.user.talks

import gospeak.core.domain.messages.Message
import gospeak.libs.scala.BasicMessageBus
import gospeak.libs.scala.domain.Page
import gospeak.web.services.MessageSrv
import gospeak.web.testingutils.CtrlSpec
import org.scalatest.BeforeAndAfterEach
import play.api.http.Status
import play.api.test.Helpers._

class TalkCtrlSpec extends CtrlSpec with BeforeAndAfterEach {
  private val params = Page.Params()
  private val messageSrv = new MessageSrv(db.group, db.cfp, db.venue, db.proposal, db.sponsor, db.user)
  private val messageBus = new BasicMessageBus[Message]()
  private val ctrl = new TalkCtrl(cc, silhouette, conf, db.user, db.userRequest, db.talk, db.externalEvent, db.externalProposal, emailSrv, messageSrv, messageBus)

  override def beforeEach(): Unit = db.migrate().unsafeRunSync()

  override def afterEach(): Unit = db.dropTables().unsafeRunSync()

  describe("TalkCtrl") {
    describe("list") {
      it("should return 200") {
        val res = ctrl.list(params).apply(securedReq)
        status(res) shouldBe Status.OK
        contentAsString(res) should include("""<div class="jumbotron">""")
      }
    }
  }
} 
Example 200
Source File: CfpCtrlSpec.scala    From gospeak   with Apache License 2.0 5 votes vote down vote up
package gospeak.web.pages.published.cfps

import gospeak.core.domain.messages.Message
import gospeak.libs.scala.BasicMessageBus
import gospeak.libs.scala.domain.Page
import gospeak.web.services.MessageSrv
import gospeak.web.testingutils.CtrlSpec
import org.scalatest.BeforeAndAfterEach
import play.api.http.Status
import play.api.test.Helpers._

class CfpCtrlSpec extends CtrlSpec with BeforeAndAfterEach {
  private val params = Page.Params()
  private val messageSrv = new MessageSrv(db.group, db.cfp, db.venue, db.proposal, db.sponsor, db.user)
  private val messageBus = new BasicMessageBus[Message]()
  private val ctrl = new CfpCtrl(cc, silhouette, conf, db.group, db.cfp, db.talk, db.proposal, db.userRequest, db.externalEvent, db.externalCfp, authSrv, emailSrv, messageSrv, messageBus)

  override def beforeEach(): Unit = db.migrate().unsafeRunSync()

  override def afterEach(): Unit = db.dropTables().unsafeRunSync()

  describe("CfpCtrl") {
    it("should display the cfp list page empty") {
      val res = ctrl.list(params).apply(unsecuredReq)
      status(res) shouldBe Status.OK
    }
  }
}