org.scalatest.Outcome Scala Examples

The following examples show how to use org.scalatest.Outcome. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: SparkFunSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()//没有参数方法
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 2
Source File: WaitForFundingCreatedInternalStateSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel.states.b

import akka.testkit.{TestFSMRef, TestProbe}
import fr.acinq.bitcoin.{ByteVector32, Satoshi}
import fr.acinq.eclair.TestConstants.{Alice, Bob}
import fr.acinq.eclair.blockchain.{MakeFundingTxResponse, TestWallet}
import fr.acinq.eclair.channel._
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
import fr.acinq.eclair.wire._
import fr.acinq.eclair.{TestConstants, TestKitBaseClass}
import org.scalatest.Outcome
import org.scalatest.funsuite.FixtureAnyFunSuiteLike
import scodec.bits.ByteVector

import scala.concurrent.duration._
import scala.concurrent.{Future, Promise}



class WaitForFundingCreatedInternalStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods {

  case class FixtureParam(alice: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, alice2blockchain: TestProbe)

  override def withFixture(test: OneArgTest): Outcome = {
    val noopWallet = new TestWallet {
      override def makeFundingTx(pubkeyScript: ByteVector, amount: Satoshi, feeRatePerKw: Long): Future[MakeFundingTxResponse] = Promise[MakeFundingTxResponse].future  // will never be completed
    }
    val setup = init(wallet = noopWallet)
    import setup._
    val aliceInit = Init(Alice.channelParams.features)
    val bobInit = Init(Bob.channelParams.features)
    within(30 seconds) {
      alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD)
      bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit)
      alice2bob.expectMsgType[OpenChannel]
      alice2bob.forward(bob)
      bob2alice.expectMsgType[AcceptChannel]
      bob2alice.forward(alice)
      awaitCond(alice.stateName == WAIT_FOR_FUNDING_INTERNAL)
      withFixture(test.toNoArgTest(FixtureParam(alice, alice2bob, bob2alice, alice2blockchain)))
    }
  }

  test("recv Error") { f =>
    import f._
    alice ! Error(ByteVector32.Zeroes, "oops")
    awaitCond(alice.stateName == CLOSED)
  }

  test("recv CMD_CLOSE") { f =>
    import f._
    alice ! CMD_CLOSE(None)
    awaitCond(alice.stateName == CLOSED)
  }

} 
Example 3
Source File: WaitForFundingSignedStateSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel.states.b

import akka.testkit.{TestFSMRef, TestProbe}
import fr.acinq.bitcoin.{ByteVector32, ByteVector64}
import fr.acinq.eclair.TestConstants.{Alice, Bob}
import fr.acinq.eclair.blockchain._
import fr.acinq.eclair.channel.Channel.TickChannelOpenTimeout
import fr.acinq.eclair.channel._
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
import fr.acinq.eclair.wire.{AcceptChannel, Error, FundingCreated, FundingSigned, Init, OpenChannel}
import fr.acinq.eclair.{TestConstants, TestKitBaseClass}
import org.scalatest.Outcome
import org.scalatest.funsuite.FixtureAnyFunSuiteLike

import scala.concurrent.duration._



class WaitForFundingSignedStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods {

  case class FixtureParam(alice: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, alice2blockchain: TestProbe)

  override def withFixture(test: OneArgTest): Outcome = {
    val setup = init()
    import setup._
    val aliceInit = Init(Alice.channelParams.features)
    val bobInit = Init(Bob.channelParams.features)
    within(30 seconds) {
      alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD)
      bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit)
      alice2bob.expectMsgType[OpenChannel]
      alice2bob.forward(bob)
      bob2alice.expectMsgType[AcceptChannel]
      bob2alice.forward(alice)
      alice2bob.expectMsgType[FundingCreated]
      alice2bob.forward(bob)
      awaitCond(alice.stateName == WAIT_FOR_FUNDING_SIGNED)
      withFixture(test.toNoArgTest(FixtureParam(alice, alice2bob, bob2alice, alice2blockchain)))
    }
  }

  test("recv FundingSigned with valid signature") { f =>
    import f._
    bob2alice.expectMsgType[FundingSigned]
    bob2alice.forward(alice)
    awaitCond(alice.stateName == WAIT_FOR_FUNDING_CONFIRMED)
    alice2blockchain.expectMsgType[WatchSpent]
    alice2blockchain.expectMsgType[WatchConfirmed]
  }

  test("recv FundingSigned with invalid signature") { f =>
    import f._
    // sending an invalid sig
    alice ! FundingSigned(ByteVector32.Zeroes, ByteVector64.Zeroes)
    awaitCond(alice.stateName == CLOSED)
    alice2bob.expectMsgType[Error]
  }

  test("recv CMD_CLOSE") { f =>
    import f._
    alice ! CMD_CLOSE(None)
    awaitCond(alice.stateName == CLOSED)
  }

  test("recv CMD_FORCECLOSE") { f =>
    import f._
    alice ! CMD_FORCECLOSE
    awaitCond(alice.stateName == CLOSED)
  }

  test("recv INPUT_DISCONNECTED") { f =>
    import f._
    val fundingTx = alice.stateData.asInstanceOf[DATA_WAIT_FOR_FUNDING_SIGNED].fundingTx
    assert(alice.underlyingActor.wallet.asInstanceOf[TestWallet].rolledback.isEmpty)
    alice ! INPUT_DISCONNECTED
    awaitCond(alice.stateName == CLOSED)
    assert(alice.underlyingActor.wallet.asInstanceOf[TestWallet].rolledback.contains(fundingTx))
  }

  test("recv TickChannelOpenTimeout") { f =>
    import f._
    alice ! TickChannelOpenTimeout
    awaitCond(alice.stateName == CLOSED)
  }

} 
Example 4
Source File: WaitForFundingCreatedStateSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel.states.b

import akka.testkit.{TestFSMRef, TestProbe}
import fr.acinq.bitcoin.{ByteVector32, Satoshi}
import fr.acinq.eclair.TestConstants.{Alice, Bob}
import fr.acinq.eclair.blockchain._
import fr.acinq.eclair.channel._
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
import fr.acinq.eclair.transactions.Transactions
import fr.acinq.eclair.wire._
import fr.acinq.eclair.{LongToBtcAmount, TestConstants, TestKitBaseClass, ToMilliSatoshiConversion}
import org.scalatest.funsuite.FixtureAnyFunSuiteLike
import org.scalatest.{Outcome, Tag}

import scala.concurrent.duration._



class WaitForFundingCreatedStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods {

  case class FixtureParam(bob: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, bob2blockchain: TestProbe)

  override def withFixture(test: OneArgTest): Outcome = {
    val setup = init()
    import setup._
    val (fundingSatoshis, pushMsat) = if (test.tags.contains("funder_below_reserve")) {
      (1000100 sat, (1000000 sat).toMilliSatoshi) // toLocal = 100 satoshis
    } else {
      (TestConstants.fundingSatoshis, TestConstants.pushMsat)
    }
    val aliceInit = Init(Alice.channelParams.features)
    val bobInit = Init(Bob.channelParams.features)
    within(30 seconds) {
      alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, fundingSatoshis, pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD)
      bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit)
      alice2bob.expectMsgType[OpenChannel]
      alice2bob.forward(bob)
      bob2alice.expectMsgType[AcceptChannel]
      bob2alice.forward(alice)
      awaitCond(bob.stateName == WAIT_FOR_FUNDING_CREATED)
      withFixture(test.toNoArgTest(FixtureParam(bob, alice2bob, bob2alice, bob2blockchain)))
    }
  }

  test("recv FundingCreated") { f =>
    import f._
    alice2bob.expectMsgType[FundingCreated]
    alice2bob.forward(bob)
    awaitCond(bob.stateName == WAIT_FOR_FUNDING_CONFIRMED)
    bob2alice.expectMsgType[FundingSigned]
    bob2blockchain.expectMsgType[WatchSpent]
    bob2blockchain.expectMsgType[WatchConfirmed]
  }

  test("recv FundingCreated (funder can't pay fees)", Tag("funder_below_reserve")) { f =>
    import f._
    val fees = Satoshi(Transactions.commitWeight * TestConstants.feeratePerKw / 1000)
    val reserve = Bob.channelParams.channelReserve
    val missing = 100.sat - fees - reserve
    val fundingCreated = alice2bob.expectMsgType[FundingCreated]
    alice2bob.forward(bob)
    val error = bob2alice.expectMsgType[Error]
    assert(error === Error(fundingCreated.temporaryChannelId, s"can't pay the fee: missing=${-missing} reserve=$reserve fees=$fees"))
    awaitCond(bob.stateName == CLOSED)
  }

  test("recv Error") { f =>
    import f._
    bob ! Error(ByteVector32.Zeroes, "oops")
    awaitCond(bob.stateName == CLOSED)
  }

  test("recv CMD_CLOSE") { f =>
    import f._
    bob ! CMD_CLOSE(None)
    awaitCond(bob.stateName == CLOSED)
  }

} 
Example 5
Source File: ScreenCapturingSpec.scala    From renku   with Apache License 2.0 5 votes vote down vote up
package ch.renku.acceptancetests.tooling

import java.io.File
import java.nio.file.Paths
import java.time.LocalDateTime.now
import java.time.format.DateTimeFormatter.ofPattern

import org.scalatest.{Outcome, TestSuite}
import org.scalatestplus.selenium.{Driver, WebBrowser}

trait ScreenCapturingSpec extends ScreenCapturing {
  this: AcceptanceSpec =>

  override def withFixture(test: NoArgTest): Outcome = {
    val outcome = test()

    if (outcome.isExceptional) {
      saveScreenshot
    }
    outcome
  }
} 
Example 6
Source File: SparkFunSuite.scala    From spark-iforest   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 7
Source File: APSPSpec.scala    From spark-all-pairs-shortest-path   with Apache License 2.0 5 votes vote down vote up
import org.apache.log4j.{Level, Logger}
import org.apache.spark.mllib.linalg.Matrix
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.mllib.linalg.distributed.{CoordinateMatrix, MatrixEntry}
import org.scalatest.{Outcome, FlatSpec}
import AllPairsShortestPath._
import breeze.linalg.{DenseMatrix => BDM}

class APSPSpec extends FlatSpec {

  val conf = new SparkConf().setAppName("AllPairsShortestPath").setMaster("local[4]").set("spark.driver.allowMultipleContexts", "true")
  val sc = new SparkContext(conf)

  override def withFixture(test: NoArgTest) : Outcome = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    Logger.getLogger("akka").setLevel(Level.ERROR)
    try {
      test() // invoke the test function
    }
  }

  def fourByFourBlockMatrx = {
    val entries = sc.parallelize(Array(
      (0, 1, 20), (0, 2, 4), (0, 3, 2),
      (1, 0, 2), (1, 2, 1), (1, 3, 3), (2, 0, 1),
      (2, 1, 6), (2, 3, 5), (3, 0, 4), (3, 1, 2), (3, 2, 2))).map { case (i, j, v) => MatrixEntry(i, j, v) }
    val coordMat = new CoordinateMatrix(entries)
    val matA = coordMat.toBlockMatrix(2, 2).cache()
    matA
  }

  def ApspPartitioner = {
    GridPartitioner(fourByFourBlockMatrx.numRowBlocks, fourByFourBlockMatrx.numColBlocks, fourByFourBlockMatrx.blocks.partitions.length)
  }

  def toBreeze(A: Matrix): BDM[Double] = {
    new BDM[Double](A.numRows, A.numCols, A.toArray)
  }

  "The sample 4x4 Block Matrix" should "be valid" in {
    fourByFourBlockMatrx.validate()
  }

  it should "match our APSP matrix" in {
    println(fourByFourBlockMatrx.toLocalMatrix())
    val result = new DistributedBlockFW
    val observed = toBreeze(result.compute(fourByFourBlockMatrx).toLocal())
    val expected = BDM(
      (0.0, 4.0, 4.0, 2.0),
      (2.0, 0.0, 1.0, 3.0),
      (1.0, 5.0, 0.0, 3.0),
      (3.0, 2.0, 2.0, 0.0)
    )
    assert(observed === expected)
  }
} 
Example 8
Source File: SparkFunSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 9
Source File: SparkFunSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 10
Source File: KyuubiFunSuite.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package org.apache.kyuubi

// scalastyle:off
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome}

trait KyuubiFunSuite extends FunSuite
  with BeforeAndAfterAll
  with BeforeAndAfterEach
  with ThreadAudit
  with Logging {
  // scalastyle:on
  override def beforeAll(): Unit = {
    doThreadPostAudit()
    super.beforeAll()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    doThreadPostAudit()
  }

  final override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org\\.apache\\.kyuubi", "o\\.a\\.k")
    try {
      info(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      info(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }
} 
Example 11
Source File: H2Sandbox.scala    From redshift-fake-driver   with Apache License 2.0 5 votes vote down vote up
package jp.ne.opt.redshiftfake

import java.sql.{DriverManager, Connection}
import java.util.Properties

import jp.ne.opt.redshiftfake.util.Loan.using
import org.scalatest.{Outcome, fixture}

trait H2Sandbox { self: fixture.TestSuite =>

  type FixtureParam = Connection

  override def withFixture(test: OneArgTest): Outcome = {
    val url = "jdbc:h2redshift:mem:redshift;MODE=PostgreSQL;DATABASE_TO_UPPER=false"
    val prop = new Properties()
    prop.setProperty("driver", "org.h2.jdbc.FakeH2Driver")
    prop.setProperty("user", "sa")

    Class.forName("org.h2.jdbc.FakeH2Driver")
    using(DriverManager.getConnection(url, prop))(test)
  }
} 
Example 12
Source File: SparkFunSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 13
Source File: SparkFunSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 14
Source File: SparkFunSuite.scala    From yggdrasil   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.util

import org.apache.spark.Logging
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }
} 
Example 15
Source File: SparkFunSuite.scala    From spark-ranking-algorithms   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{Outcome, FunSuite}
import org.apache.log4j.{Level, Logger}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      Logger.getLogger("org").setLevel(Level.OFF)
      Logger.getLogger("akka").setLevel(Level.OFF)

      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 16
Source File: SparkFunSuite.scala    From spark-gbtlr   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext
import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 17
Source File: SparkFunSuite.scala    From click-through-rate-prediction   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 18
Source File: RunEnforcementJobTest.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.testing

import com.spotify.scio.testing.{JobTest => InnerJobTest}
import org.scalactic.source.Position
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.{Failed, Outcome}

import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag


trait RunEnforcementJobTest extends AnyFlatSpec { this: PipelineSpec =>

  private val tests = ArrayBuffer.empty[InnerJobTest.Builder]

  def JobTest[T: ClassTag]: InnerJobTest.Builder = {
    val jt = InnerJobTest[T]
    tests += jt
    jt
  }

  private[testing] def JobTest[T: ClassTag](enforceRun: Boolean = true): InnerJobTest.Builder = {
    val jt = InnerJobTest[T]
    if (enforceRun) tests += jt
    jt
  }

  def JobTest(className: String): InnerJobTest.Builder = {
    val jt = InnerJobTest(className)
    tests += jt
    jt
  }

  override protected def withFixture(test: NoArgTest): Outcome = {
    // Tests within Suites are executed sequentially, thus we need to clear the tests, if
    // ParallelTestExecution was enabled, clear is obsolete given the OneInstancePerTest
    tests.clear()
    val outcome = super.withFixture(test)
    if (outcome.isSucceeded) {
      val notRun = tests.filterNot(_.wasRunInvoked)
      if (notRun.nonEmpty) {
        val m = notRun.mkString(start = "Missing run(): ", sep = "\nMissing run(): ", end = "")
        Failed(s"Did you forget run()?\n$m")(test.pos.getOrElse(Position.here))
      } else {
        outcome
      }
    } else {
      outcome
    }
  }
} 
Example 19
Source File: SparkContextFixture.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector

import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.spark.sql.SparkSession
import org.scalatest.{ fixture, Outcome }


trait SparkContextFixture { this: fixture.Suite =>
  case class FixtureParam(spark: SparkSession) {
    val sc = spark.sparkContext
  }

  // Give test suite opportunity to set up config
  def setupSparkConf(testName: String, sparkConf: SparkConf) {}

  // And override master setting
  def getMaster(testName: String): String = {
    "local[*]"
  }

  def withFixture(test: OneArgTest): Outcome = {
    val config = new SparkConf(false)
    setupSparkConf(test.name, config)

    // Create context and fixture
    val spark = SparkSession.builder
                            .appName(test.name)
                            .master(getMaster(test.name))
                            .config(config)
                            .getOrCreate()
    val contextFixture = FixtureParam(spark)

    try {
      // Run the test
      withFixture(test.toNoArgTest(contextFixture))
    } finally spark.stop() // shut down spark session
  }

}

object SparkContextFixture {
  // Useful for test suites where a subset of tests require Spark
  def withSpark(appName: String = "test", master: String = "local[*]")(op: SparkSession => Unit): Unit = {
    val config = new SparkConf(false)

    val spark = SparkSession.builder
                            .appName(appName)
                            .master(master)
                            .config(config)
                            .getOrCreate()

    try {
      op(spark)
    } finally {
      // Shut down Spark context after every test
      spark.stop()
    }
  }
} 
Example 20
Source File: ActivationStoreBehaviorBase.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.test.behavior

import java.time.Instant

import akka.stream.ActorMaterializer
import common.{StreamLogging, WskActorSystem}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{ActivationStore, CacheChangeNotification, UserContext}
import org.apache.openwhisk.core.database.test.behavior.ArtifactStoreTestUtil.storeAvailable
import org.apache.openwhisk.core.entity._
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers, Outcome}

import scala.collection.mutable.ListBuffer
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import scala.util.{Random, Try}

trait ActivationStoreBehaviorBase
    extends FlatSpec
    with ScalaFutures
    with Matchers
    with StreamLogging
    with WskActorSystem
    with IntegrationPatience
    with BeforeAndAfterEach {

  protected implicit val materializer: ActorMaterializer = ActorMaterializer()
  protected implicit val notifier: Option[CacheChangeNotification] = None

  def context: UserContext
  def activationStore: ActivationStore
  private val docsToDelete = ListBuffer[(UserContext, ActivationId)]()

  def storeType: String

  protected def transId() = TransactionId(Random.alphanumeric.take(32).mkString)

  override def afterEach(): Unit = {
    cleanup()
    stream.reset()
  }

  override protected def withFixture(test: NoArgTest): Outcome = {
    assume(storeAvailable(storeAvailableCheck), s"$storeType not configured or available")
    val outcome = super.withFixture(test)
    if (outcome.isFailed) {
      println(logLines.mkString("\n"))
    }
    outcome
  }

  protected def storeAvailableCheck: Try[Any] = Try(true)
  //~----------------------------------------< utility methods >

  protected def store(activation: WhiskActivation, context: UserContext)(
    implicit transid: TransactionId,
    notifier: Option[CacheChangeNotification]): DocInfo = {
    val doc = activationStore.store(activation, context).futureValue
    docsToDelete.append((context, ActivationId(activation.docid.asString)))
    doc
  }

  protected def newActivation(ns: String, actionName: String, start: Long): WhiskActivation = {
    WhiskActivation(
      EntityPath(ns),
      EntityName(actionName),
      Subject(),
      ActivationId.generate(),
      Instant.ofEpochMilli(start),
      Instant.ofEpochMilli(start + 1000))
  }

  
  def cleanup()(implicit timeout: Duration = 10 seconds): Unit = {
    implicit val tid: TransactionId = transId()
    docsToDelete.map { e =>
      Try {
        Await.result(activationStore.delete(e._2, e._1), timeout)
      }
    }
    docsToDelete.clear()
  }

} 
Example 21
Source File: FTracingSpec.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.doobie

import cats.effect.{ContextShift, IO}
import io.opencensus.scala.Tracing
import io.opencensus.scala.http.testSuite.MockTracing
import io.opencensus.trace.{BlankSpan, Status}
import org.scalatest.{OptionValues, Outcome}

import scala.concurrent.ExecutionContext.global
import scala.util.Try
import org.scalatest.flatspec
import org.scalatest.matchers.should.Matchers

class FTracingSpec
    extends flatspec.FixtureAnyFlatSpec
    with Matchers
    with OptionValues {

  implicit val cs: ContextShift[IO] = IO.contextShift(global)

  case class TestInput(fTracing: FTracing[IO], mock: MockTracing)
  override protected def withFixture(test: OneArgTest): Outcome =
    test(clientTracingWithMock())

  override type FixtureParam = TestInput

  behavior of "FTracingSpec"

  it should "start with the correct name" in { f =>
    f.fTracing.traceF(IO(()), "testSpan", None).unsafeRunSync()
    f.mock.startedSpans should have size 1
    f.mock.startedSpans.head.name shouldBe "testSpan"
  }

  it should "trace with parent Span" in { f =>
    val parentSpan = BlankSpan.INSTANCE

    f.fTracing.traceF(IO(()), "testSpan", Some(parentSpan)).unsafeRunSync()
    f.mock.startedSpans should have size 1
    f.mock.startedSpans.head.parentContext.value shouldBe parentSpan.getContext
  }

  it should "stop after normal exit" in { f =>
    f.fTracing.traceF(IO(()), "testSpan", None).unsafeRunSync()
    f.mock.endedSpans should have size 1
    f.mock.endedSpans.head._2.value.getCanonicalCode shouldBe Status.OK.getCanonicalCode
  }

  it should "stop after error" in { f =>
    Try(
      f.fTracing
        .traceF(IO.raiseError(new Exception("TEST")), "testSpan", None)
        .unsafeRunSync()
    )
    f.mock.endedSpans should have size 1
    f.mock.endedSpans.head._2.value.getCanonicalCode shouldBe Status.INTERNAL.getCanonicalCode
  }

  def clientTracingWithMock() = {
    val mockTracing = new MockTracing
    val fTracing = new FTracing[IO] {
      override protected val tracing: Tracing = mockTracing
    }
    TestInput(fTracing, mockTracing)
  }
} 
Example 22
Source File: SparkFunSuite.scala    From spark-alchemy   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import scala.annotation.tailrec
import org.apache.log4j.{Appender, Level, Logger}
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, BeforeAndAfterEach, FunSuite, Outcome, Suite}
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.util.{AccumulatorContext, Utils}


  protected def withLogAppender(
    appender: Appender,
    loggerName: Option[String] = None,
    level: Option[Level] = None)(
    f: => Unit): Unit = {
    val logger = loggerName.map(Logger.getLogger).getOrElse(Logger.getRootLogger)
    val restoreLevel = logger.getLevel
    logger.addAppender(appender)
    if (level.isDefined) {
      logger.setLevel(level.get)
    }
    try f finally {
      logger.removeAppender(appender)
      if (level.isDefined) {
        logger.setLevel(restoreLevel)
      }
    }
  }
} 
Example 23
Source File: SparkSolrFunSuite.scala    From spark-solr   with Apache License 2.0 5 votes vote down vote up
package com.lucidworks.spark

import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("com.lucidworks.spark", "c.l.s")
    try {
      logger.info(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logger.info(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }
} 
Example 24
Source File: SparkFunSuite.scala    From bisecting-kmeans   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off

import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 25
Source File: SparkTest.scala    From Spark-Scala-Maven-Example   with MIT License 5 votes vote down vote up
package net.martinprobson.spark

import java.io.InputStream

import grizzled.slf4j.Logging
import org.apache.spark.sql.SparkSession
import org.scalatest.{Outcome, fixture}

class SparkTest extends fixture.FunSuite with Logging {

  type FixtureParam = SparkSession

  def withFixture(test: OneArgTest): Outcome = {
    val sparkSession = SparkSession.builder
      .appName("Test-Spark-Local")
      .master("local[2]")
      .getOrCreate()
    try {
      withFixture(test.toNoArgTest(sparkSession))
    } finally sparkSession.stop
  }

  test("empsRDD rowcount") { spark =>
    val empsRDD = spark.sparkContext.parallelize(getInputData("/data/employees.json"), 5)
    assert(empsRDD.count === 1000)
  }

  test("titlesRDD rowcount") { spark =>
    val titlesRDD = spark.sparkContext.parallelize(getInputData("/data/titles.json"), 5)
    assert(titlesRDD.count === 1470)
  }

  private def getInputData(name: String): Seq[String] = {
    val is: InputStream = getClass.getResourceAsStream(name)
    scala.io.Source.fromInputStream(is).getLines.toSeq
  }
} 
Example 26
Source File: SparkFunSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark

// scalastyle:off
import org.apache.log4j.{Level, Logger}
import org.scalatest.{FunSuite, Outcome}

import org.apache.spark.Logging


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      Logger.getLogger("org").setLevel(Level.OFF)
      Logger.getLogger("akka").setLevel(Level.OFF)

      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 27
Source File: TemporaryFolder.scala    From random-projections-at-berlinbuzzwords   with Apache License 2.0 5 votes vote down vote up
package com.stefansavev

import org.junit.rules.TemporaryFolder
import org.junit.runner.Description
import org.junit.runners.model.Statement
import org.scalatest.{SuiteMixin, Outcome, Suite}

//Reference: http://stackoverflow.com/questions/32160549/using-junit-rule-with-scalatest-e-g-temporaryfolder
trait TemporaryFolderFixture extends SuiteMixin {
  this: Suite =>
  val temporaryFolder = new TemporaryFolder

  abstract override def withFixture(test: NoArgTest) = {
    var outcome: Outcome = null
    val statementBody = () => outcome = super.withFixture(test)
    temporaryFolder(
      new Statement() {
        override def evaluate(): Unit = statementBody()
      },
      Description.createSuiteDescription("JUnit rule wrapper")
    ).evaluate()
    outcome
  }
} 
Example 28
Source File: StandaloneSanityTestSupport.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import org.scalatest.{Canceled, Outcome, TestSuite}

trait StandaloneSanityTestSupport extends TestSuite {

  protected def supportedTests: Set[String]

  override def withFixture(test: NoArgTest): Outcome = {
    if (supportedTests.contains(test.name)) {
      super.withFixture(test)
    } else {
      Canceled()
    }
  }
} 
Example 29
Source File: SparkFunSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 30
Source File: SparkFunSuite.scala    From hivemall-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 31
Source File: RerunnableClockSuite.scala    From catbird   with Apache License 2.0 5 votes vote down vote up
package io.catbird.util.effect

import java.time.Instant
import java.util.concurrent.TimeUnit

import cats.effect.Clock
import com.twitter.util.Await
import io.catbird.util.Rerunnable
import org.scalatest.Outcome
import org.scalatest.concurrent.Eventually
import org.scalatest.funsuite.FixtureAnyFunSuite


class RerunnableClockSuite extends FixtureAnyFunSuite with Eventually {

  protected final class FixtureParam {
    def now: Instant = Instant.now()

    val clock: Clock[Rerunnable] = RerunnableClock()
  }

  test("Retrieval of real time") { f =>
    eventually {
      val result = Await.result(
        f.clock.realTime(TimeUnit.MILLISECONDS).map(Instant.ofEpochMilli).run
      )

      assert(java.time.Duration.between(result, f.now).abs().toMillis < 50)
    }
  }

  test("Retrieval of monotonic time") { f =>
    eventually {
      val result = Await.result(
        f.clock.monotonic(TimeUnit.NANOSECONDS).run
      )

      val durationBetween = Math.abs(System.nanoTime() - result)
      assert(TimeUnit.MILLISECONDS.convert(durationBetween, TimeUnit.NANOSECONDS) < 5)
    }
  }

  override protected def withFixture(test: OneArgTest): Outcome = withFixture(test.toNoArgTest(new FixtureParam))
} 
Example 32
Source File: RerunnableContextShiftSuite.scala    From catbird   with Apache License 2.0 5 votes vote down vote up
package io.catbird.util.effect

import cats.effect.{ ContextShift, IO, Sync }
import com.twitter.util.{ Await, Future, FuturePool }
import io.catbird.util.Rerunnable
import org.scalatest.Outcome
import org.scalatest.funsuite.FixtureAnyFunSuite

class RerunnableContextShiftSuite extends FixtureAnyFunSuite with ThreadPoolNamingSupport {

  protected final class FixtureParam {
    val futurePoolName = "future-pool"
    val otherPoolName = "other-pool"
    val ioPoolName = "io-pool"

    val futurePool = FuturePool.interruptible(newNamedThreadPool(futurePoolName))
    val otherPool = newNamedThreadPool(otherPoolName)
    val ioPool = newNamedThreadPool(ioPoolName)

    def newIO: IO[String] = IO(currentThreadName())

    def newFuture: Future[String] = futurePool(currentThreadName())

    def newRerunnable: Rerunnable[String] = Rerunnable(currentThreadName())
  }

  test("ContextShift[Rerunnable].shift shifts to the pool of the instance") { f =>
    implicit val cs: ContextShift[Rerunnable] =
      RerunnableContextShift.fromExecutionContext(f.ioPool)

    val (poolName1, poolName2, poolName3) =
      (for {
        poolName1 <- Rerunnable.fromFuture(f.newFuture)

        _ <- ContextShift[Rerunnable](cs).shift

        poolName2 <- Sync[Rerunnable].delay(currentThreadName())

        poolName3 <- Rerunnable.fromFuture(f.newFuture)
      } yield (poolName1, poolName2, poolName3)).run.await

    assert(poolName1 == f.futurePoolName)
    assert(poolName2 == f.ioPoolName)
    assert(poolName2 == f.ioPoolName)
  }

  test("ContextShift[Rerunnable].evalOn executes on correct pool and shifts back to previous pool") { f =>
    implicit val cs: ContextShift[Rerunnable] =
      RerunnableContextShift.fromExecutionContext(f.ioPool)

    val (poolName1, poolName2, poolName3) =
      (for {
        poolName1 <- f.newRerunnable

        poolName2 <- ContextShift[Rerunnable].evalOn(f.otherPool)(f.newRerunnable)

        poolName3 <- f.newRerunnable
      } yield (poolName1, poolName2, poolName3)).run.await

    assert(poolName1 == currentThreadName()) // The first rerunnable is not explicitly evaluated on a dedicated pool
    assert(poolName2 == f.otherPoolName)
    assert(poolName3 == f.ioPoolName)
  }

  test("ContextShift[Rerunnable].evalOn executes on correct pool and shifts back to future pool") { f =>
    implicit val cs: ContextShift[Rerunnable] =
      RerunnableContextShift.fromExecutionContext(f.ioPool)

    val (poolName1, poolName2, poolName3) =
      (for {
        poolName1 <- Rerunnable.fromFuture(f.newFuture) // The future was started on a dedicated pool (e.g. netty)

        poolName2 <- ContextShift[Rerunnable].evalOn(f.otherPool)(f.newRerunnable)

        poolName3 <- Rerunnable.fromFuture(f.newFuture)
      } yield (poolName1, poolName2, poolName3)).run.await

    assert(poolName1 == f.futurePoolName)
    assert(poolName2 == f.otherPoolName)
    assert(poolName3 == f.futurePoolName)
  }

  implicit private class FutureAwaitOps[A](future: Future[A]) {
    def await: A = Await.result(future)
  }

  override protected def withFixture(test: OneArgTest): Outcome = withFixture(test.toNoArgTest(new FixtureParam))
} 
Example 33
Source File: ContextShiftingSuite.scala    From catbird   with Apache License 2.0 5 votes vote down vote up
package io.catbird.util.effect

import cats.effect.{ ContextShift, IO }
import com.twitter.util.{ ExecutorServiceFuturePool, Future, FuturePool }
import org.scalatest.Outcome
import org.scalatest.funsuite.FixtureAnyFunSuite

import scala.concurrent.ExecutionContext

class ContextShiftingSuite extends FixtureAnyFunSuite with ThreadPoolNamingSupport {

  protected final class FixtureParam {
    val ioPoolName = "io-pool"
    val futurePoolName = "future-pool"

    val ioPool = newNamedThreadPool(ioPoolName)

    val futurePool: ExecutorServiceFuturePool = // threadpool of Future (often managed by a library like finagle-http)
      FuturePool(newNamedThreadPool(futurePoolName))

    def newIO: IO[String] = IO(currentThreadName())

    def newFuture: Future[String] = futurePool.apply {
      // Not 100% sure why but this sleep is needed to reproduce the error. There might be an optimization if the
      // Future is already resolved at some point
      Thread.sleep(200)
      currentThreadName()
    }
  }

  test("After resolving the Future with futureToAsync stay on the Future threadpool") { f =>
    implicit val contextShift: ContextShift[IO] = // threadpool of IO (often provided by IOApp)
      IO.contextShift(ExecutionContext.fromExecutor(f.ioPool))

    val (futurePoolName, ioPoolName) = (for {
      futurePoolName <- futureToAsync[IO, String](f.newFuture)

      ioPoolName <- f.newIO
    } yield (futurePoolName, ioPoolName)).start(contextShift).flatMap(_.join).unsafeRunSync()

    assert(futurePoolName == f.futurePoolName)
    assert(ioPoolName == f.futurePoolName) // Uh oh, this is likely not what the user wants
  }

  test("After resolving the Future with futureToAsyncAndShift shift back to the threadpool of ContextShift[F]") { f =>
    implicit val contextShift: ContextShift[IO] = // threadpool of IO (often provided by IOApp)
      IO.contextShift(ExecutionContext.fromExecutor(f.ioPool))

    // If you'd use `futureToAsync` here instead, this whole thing would sometimes stay on the future-pool
    val (futurePoolName, ioPoolName) = (for {
      futurePoolName <- futureToAsyncAndShift[IO, String](f.newFuture)

      ioPoolName <- f.newIO
    } yield (futurePoolName, ioPoolName))
      .start(contextShift) // start the computation on the default threadpool...
      .flatMap(_.join) // ...then block until we have the results
      .unsafeRunSync()

    assert(futurePoolName == f.futurePoolName)
    assert(ioPoolName == f.ioPoolName)
  }

  override protected def withFixture(test: OneArgTest): Outcome = withFixture(test.toNoArgTest(new FixtureParam))
} 
Example 34
Source File: RerunnableTimerSuite.scala    From catbird   with Apache License 2.0 5 votes vote down vote up
package io.catbird.util.effect

import cats.effect.Timer
import org.scalatest.Outcome
import org.scalatest.funsuite.FixtureAnyFunSuite
import com.twitter.util
import com.twitter.util.{ Await, Future }
import io.catbird.util.Rerunnable

import scala.concurrent.duration._

class RerunnableTimerSuite extends FixtureAnyFunSuite {

  protected final class FixtureParam {
    val twitterTimer: util.Timer = new util.JavaTimer()
  }

  test("A timer can be used to delay execution") { f =>
    implicit val timer: Timer[Rerunnable] = RerunnableTimer(f.twitterTimer)

    val result = Await.result(
      Future.selectIndex(
        Vector(
          for {
            _ <- Timer[Rerunnable].sleep(100.milliseconds).run
            r <- Future.value("slow")
          } yield r,
          Future.value("fast").delayed(util.Duration.fromMilliseconds(50))(f.twitterTimer)
        )
      )
    )

    assert(result == 1) // The first future is delayed for longer, so we expect the second one to win
  }

  override protected def withFixture(test: OneArgTest): Outcome = withFixture(test.toNoArgTest(new FixtureParam))
} 
Example 35
Source File: SparkFunSuite.scala    From graphframes   with Apache License 2.0 5 votes vote down vote up
package org.graphframes

import org.scalatest.{FunSuite, Outcome}


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 36
Source File: LivyBaseUnitTestSuite.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy

import org.scalatest.{Outcome, Suite}

trait LivyBaseUnitTestSuite extends Suite with Logging {

  protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.name
    val suiteName = this.getClass.getName
    try {
      info(s"\n\n==== TEST OUTPUT FOR $suiteName: '$testName' ====\n")
      test()
    } finally {
      info(s"\n\n==== FINISHED $suiteName: '$testName' ====\n")
    }
  }
} 
Example 37
Source File: SparkFunSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

// scalastyle:off
import java.io.File

import org.scalatest.{BeforeAndAfterAll, FunSuite, Outcome}

import org.apache.spark.internal.Logging
import org.apache.spark.util.AccumulatorContext


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

}