org.scalatest.Args Scala Examples

The following examples show how to use org.scalatest.Args. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Main.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test

import scala.collection._
import scala.collection.JavaConverters._
import java.util.concurrent.ConcurrentHashMap

import com.daml.navigator.test.config.Arguments
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.events._
import org.scalatest.{Args, ConfigMap, Reporter}

import scala.util.Try

object Main extends LazyLogging {
  def main(args: Array[String]): Unit = {
    Arguments.parse(args) match {
      case None =>
        System.exit(1)
      case Some(arguments) =>
        val reporter = new LoggerReporter()
        val status = new BrowserTest(arguments)
          .run(None, Args(reporter = reporter, configMap = new ConfigMap(Map.empty[String, Any])))
        val success = Try(status.succeeds()).getOrElse(false)
        val exitCode = if (success) 0 else 1
        val header =
          """
            | 
            |***************************************************************************
            |
            |     Test Results
            |
            |***************************************************************************
            |
          """.stripMargin
        logger.info(header)
        reporter.results.foreach { kv =>
          logger.info(s"${kv._1}")
          kv._2.foreach(logger.info(_))
        }
        val results = reporter.results.toList
        val allTests = results.size
        val failedTests = results.count(kv => kv._2.isDefined)
        val footer =
          s"""
            | 
            |***************************************************************************
            |
            |     All tests: $allTests; tests failed: $failedTests
            |
            |***************************************************************************
            |
          """.stripMargin
        logger.info(footer)
        System.exit(exitCode)
    }
  }
}

class LoggerReporter extends Reporter {

  // Test statuses with optional errors
  val results: concurrent.Map[String, Option[String]] =
    new ConcurrentHashMap[String, Option[String]]().asScala

  override def apply(e: Event): Unit = {
    e match {
      case t: TestSucceeded =>
        results.put(s"  Test succeeded: ${t.testName}", None)
        ()
      case t: TestFailed =>
        results.put(
          s"  Test failed: ${t.testName}",
          t.throwable.map(_.getMessage).map(e => s"      error: $e")
        )
        ()

      case _ => ()
    }

  }

} 
Example 2
Source File: InformativeTestStart.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.it.test

import java.time.{LocalDateTime, ZoneId}

import com.wavesplatform.dex.it.api.BaseContainersKit
import mouse.any._
import org.scalatest.{Args, Status, Suite}

import scala.util.{Failure, Success}

trait InformativeTestStart extends Suite { self: BaseContainersKit =>

  override protected def runTest(testName: String, args: Args): Status = {

    def print(text: String): Unit = writeGlobalLog(s"---------- [${LocalDateTime.now(ZoneId.of("UTC"))}] $text ----------")

    print(s"Test '$testName' started")

    super.runTest(testName, args) unsafeTap {
      _.whenCompleted {
        case Success(r) => print(s"Test '$testName' ${if (r) "succeeded" else "failed"}")
        case Failure(e) => print(s"Test '$testName' failed with exception '${e.getClass.getSimpleName}'")
      }
    }
  }

  protected def writeGlobalLog(x: String): Unit = {
    log.debug(x)
    knownContainers.get().foreach { _.printDebugMessage(x) }
  }
} 
Example 3
Source File: LeaseTestSuite.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.coordination.lease.kubernetes

import akka.actor.{ ActorSystem, CoordinatedShutdown }
import org.scalatest.{ Args, Filter, Reporter, Stopper }
import org.scalatest.events.{ Event, TestFailed }

import scala.util.{ Failure, Success, Try }

object LeaseTestSuite {

  def main(args: Array[String]): Unit = {
    val as = ActorSystem("LeaseTestSuite")
    val log = as.log
    log.info("Running test")

    val leaseSpec = new LeaseSpec {
      override def system: ActorSystem = as
    }
    @volatile var failed = false

    val reporter = new Reporter() {
      override def apply(event: Event): Unit =
        event match {
          case tf: TestFailed =>
            failed = true
            log.error("TestFailed({}): {}", tf.testName, tf.message)
          case _ =>
        }
    }

    val testSuite = Try(leaseSpec.run(None, Args(reporter, Stopper.default, Filter())))
    log.info("Test complete {}", testSuite)
    testSuite match {
      case Success(_) if !failed =>
        log.info("Test succeeded")
        CoordinatedShutdown(as).run(TestPassedReason)
      case Success(_) if failed =>
        log.info("Test failed, see the logs")
        CoordinatedShutdown(as).run(TestFailedReason)
      case Failure(exception) =>
        log.error(exception, "Test exception")
        CoordinatedShutdown(as).run(TestFailedReason)
    }
  }

} 
Example 4
Source File: TestContainersForAll.scala    From testcontainers-scala   with MIT License 5 votes vote down vote up
package com.dimafeng.testcontainers.scalatest

import org.scalatest.{Args, CompositeStatus, Status, Suite}


trait TestContainersForAll extends TestContainersSuite { self: Suite =>

  abstract override def run(testName: Option[String], args: Args): Status = {
    if (expectedTestCount(args.filter) == 0) {
      new CompositeStatus(Set.empty)
    } else {
      val containers = startContainers()
      startedContainers = Some(containers)
      try {
        afterContainersStart(containers)
        super.run(testName, args)
      } finally {
        try {
          beforeContainersStop(containers)
        }
        finally {
          try {
            startedContainers.foreach(_.stop())
          }
          finally {
            startedContainers = None
          }
        }
      }
    }
  }

  abstract protected override def runTest(testName: String, args: Args): Status = {
    @volatile var testCalled = false
    @volatile var afterTestCalled = false

    try {
      startedContainers.foreach(beforeTest)

      testCalled = true
      val status = super.runTest(testName, args)

      afterTestCalled = true
      if (!status.succeeds()) {
        val err = new RuntimeException("Test failed")
        startedContainers.foreach(afterTest(_, Some(err)))
      } else {
        startedContainers.foreach(afterTest(_, None))
      }

      status
    }
    catch {
      case e: Throwable =>
        if (testCalled && !afterTestCalled) {
          afterTestCalled = true
          startedContainers.foreach(afterTest(_, Some(e)))
        }

        throw e
    }
  }
} 
Example 5
Source File: TestContainersForEach.scala    From testcontainers-scala   with MIT License 5 votes vote down vote up
package com.dimafeng.testcontainers.scalatest

import org.scalatest.{Args, Status, Suite}


trait TestContainersForEach extends TestContainersSuite { self: Suite =>

  abstract protected override def runTest(testName: String, args: Args): Status = {
    val containers = startContainers()
    startedContainers = Some(containers)

    @volatile var testCalled = false
    @volatile var afterTestCalled = false

    try {
      afterContainersStart(containers)
      beforeTest(containers)

      testCalled = true
      val status = super.runTest(testName, args)

      afterTestCalled = true
      if (!status.succeeds()) {
        val err = new RuntimeException("Test failed")
        startedContainers.foreach(afterTest(_, Some(err)))
      } else {
        startedContainers.foreach(afterTest(_, None))
      }

      status
    }
    catch {
      case e: Throwable =>
        if (testCalled && !afterTestCalled) {
          afterTestCalled = true
          afterTest(containers, Some(e))
        }

        throw e
    }
    finally {
      try {
        beforeContainersStop(containers)
      }
      finally {
        try {
          startedContainers.foreach(_.stop())
        }
        finally {
          startedContainers = None
        }
      }
    }
  }
} 
Example 6
Source File: MultipleContainersSpec.scala    From testcontainers-scala   with MIT License 5 votes vote down vote up
package com.dimafeng.testcontainers

import java.util.Optional

import com.dimafeng.testcontainers.MultipleContainersSpec.{InitializableContainer, TestSpec}
import org.mockito.ArgumentMatchers
import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.verify
import org.scalatest.{Args, FlatSpec, Reporter}
import org.scalatestplus.mockito.MockitoSugar

class MultipleContainersSpec extends BaseSpec[ForEachTestContainer] {
  it should "call all expected methods of the multiple containers" in {
    val container1 = mock[SampleJavaContainer]
    val container2 = mock[SampleJavaContainer]

    val containers = MultipleContainers(new SampleContainer(container1), new SampleContainer(container2))

    new TestSpec({
      assert(1 == 1)
    }, containers).run(None, Args(mock[Reporter]))

    verify(container1).beforeTest(any())
    verify(container1).start()
    verify(container1).afterTest(any(), ArgumentMatchers.eq(Optional.empty()))
    verify(container1).stop()

    verify(container2).beforeTest(any())
    verify(container2).start()
    verify(container2).afterTest(any(), ArgumentMatchers.eq(Optional.empty()))
    verify(container2).stop()
  }

  
  it should "initialize containers lazily in `MultipleContainers` to let second container be depended on start data of the first one" in {
    lazy val container1 = new InitializableContainer("after start value")
    lazy val container2 = new InitializableContainer(container1.value)

    val containers = MultipleContainers(container1, container2)

    new TestSpec({
      assert(1 == 1)
    }, containers).run(None, Args(mock[Reporter]))

    assert(container1.value == "after start value")
    assert(container2.value == "after start value")
  }
}

object MultipleContainersSpec {

  class InitializableContainer(valueToBeSetAfterStart: String) extends SingleContainer[SampleJavaContainer] with MockitoSugar {
    override implicit val container: SampleJavaContainer = mock[SampleJavaContainer]
    var value: String = _

    override def start(): Unit = {
      value = valueToBeSetAfterStart
    }
  }

  class ExampleContainerWithVariable(val variable: String) extends SingleContainer[SampleJavaContainer] with MockitoSugar {
    override implicit val container: SampleJavaContainer = mock[SampleJavaContainer]
  }

  protected class TestSpec(testBody: => Unit, _container: Container) extends FlatSpec with ForEachTestContainer {
    override val container = _container

    it should "test" in {
      testBody
    }
  }

} 
Example 7
Source File: TestNameFixture.scala    From morpheus   with Apache License 2.0 5 votes vote down vote up
package org.opencypher.okapi.testing

import org.scalatest.{Args, Status}

trait TestNameFixture extends BaseTestSuite {

  
  protected def separator: String

  final def testName: String = __testName.get

  private var __testName: Option[String] = None

  override protected def runTest(testName: String, args: Args): Status = {
    val separatorIndex = testName.indexOf(separator)

    val name = separatorIndex match {
      case -1 => testName
      case _ => testName.substring(separatorIndex + separator.length).trim.stripMargin
    }
    __testName = Some(name)
    try {
      super.runTest(testName, args)
    } finally {
      __testName = None
    }
  }
} 
Example 8
Source File: ControlledParallelSuite.scala    From scala-debugger   with Apache License 2.0 5 votes vote down vote up
package org.scaladebugger.test.helpers

import java.util.concurrent._
import java.util.concurrent.atomic.AtomicInteger

import ControlledParallelSuite._
import org.scalatest.{Args, Distributor, Status, Suite}

import scala.util.Try

object ControlledParallelSuite {
  lazy val EnvironmentPoolSize: Try[Int] =
    Try(System.getenv("SCALATEST_PARALLEL_TESTS").toInt)
  lazy val DefaultPoolSize: Int = Runtime.getRuntime.availableProcessors() * 2
  def calculatePoolSize(): Int = EnvironmentPoolSize.getOrElse(DefaultPoolSize)

  private val atomicThreadCounter: AtomicInteger = new AtomicInteger

  lazy val threadFactory: ThreadFactory = new ThreadFactory {
    val defaultThreadFactory = Executors.defaultThreadFactory

    def newThread(runnable: Runnable): Thread = {
      val thread = defaultThreadFactory.newThread(runnable)
      thread.setName("ScalaTest-" + atomicThreadCounter.incrementAndGet())
      thread
    }
  }

  import scala.collection.JavaConverters._
  val semaMap: collection.mutable.Map[String, Semaphore] =
    new ConcurrentHashMap[String, Semaphore]().asScala
}


  def semaSync[T](id: String)(thunk: => T): T = {
    val semaphore = semaMap.getOrElseUpdate(id, new Semaphore(poolSize))

    semaphore.acquire()
    val result = Try(thunk)
    semaphore.release()
    result.get
  }
} 
Example 9
Source File: OpEstimatorSpec.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.test

import java.io.File

import com.salesforce.op.features.types._
import com.salesforce.op.stages._
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.sql.Dataset
import org.scalactic.Equality
import org.scalatest.events.{Event, TestFailed}
import org.scalatest.{Args, Reporter}

import scala.collection.mutable.ArrayBuffer
import scala.reflect._
import scala.reflect.runtime.universe._


  private def modelSpec(): Unit = {
    // Define transformer spec for the fitted model reusing the same inputs & Spark context
    val modelSpec = new OpTransformerSpec[O, ModelType] {
      override implicit val featureTypeEquality: Equality[O] = OpEstimatorSpec.this.featureTypeEquality
      override implicit val seqEquality: Equality[Seq[O]] = OpEstimatorSpec.this.seqEquality
      lazy val transformer: ModelType = OpEstimatorSpec.this.model
      lazy val inputData: Dataset[_] = OpEstimatorSpec.this.inputData
      lazy val expectedResult: Seq[O] = OpEstimatorSpec.this.expectedResult
      override implicit lazy val spark = OpEstimatorSpec.this.spark
      override def specName: String = "model"
      override def tempDir: File = OpEstimatorSpec.this.tempDir
    }

    // Register all model spec tests
    for {
      testName <- modelSpec.testNames
    } registerTest(testName) {
      // Run test & collect failures
      val failures = ArrayBuffer.empty[TestFailed]
      val reporter = new Reporter {
        def apply(event: Event): Unit = event match {
          case f: TestFailed => failures += f
          case _ =>
        }
      }
      // Note: We set 'runTestInNewInstance = true' to avoid restarting Spark context on every test run
      val args = Args(reporter, runTestInNewInstance = true)
      modelSpec.run(testName = Some(testName), args = args)

      // Propagate the failure if any
      for {failure <- failures.headOption} {
        failure.throwable.map(fail(failure.message, _)).getOrElse(fail(failure.message))
      }
    }
  }

} 
Example 10
Source File: ReportingTestName.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.it

import com.wavesplatform.http.DebugMessage
import com.wavesplatform.it.api.AsyncHttpApi._
import com.wavesplatform.utils.ScorexLogging
import org.scalatest.{Args, Status, Suite, SuiteMixin}

import scala.concurrent.duration._
import scala.concurrent.{Await, Future}

trait ReportingTestName extends SuiteMixin with ScorexLogging {
  th: Suite with Nodes =>

  abstract override protected def runTest(testName: String, args: Args): Status = {
    print(s"Test '$testName' started")
    val r = super.runTest(testName, args)
    print(s"Test '$testName' ${if (r.succeeds()) "SUCCEEDED" else "FAILED"}")
    r
  }

  private def print(text: String): Unit = {
    import scala.concurrent.ExecutionContext.Implicits.global
    val formatted = s"---------- $text ----------"
    log.debug(formatted)
    try {
      Await.result(Future.traverse(nodes)(_.printDebugMessage(DebugMessage(formatted))), 10.seconds)
    } catch {
      case _: Throwable => ()
    }
  }
} 
Example 11
Source File: DockerBased.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.it

import com.wavesplatform.it.Docker.DockerNode
import monix.eval.Coeval
import org.scalatest.{Args, BeforeAndAfterAll, Status, Suite}

trait DockerBased extends BeforeAndAfterAll {
  this: Suite with Nodes =>

  protected val dockerSingleton: Coeval[Docker] = Coeval.evalOnce(createDocker)
  final def docker: Docker                      = dockerSingleton()

  abstract override protected def runTest(testName: String, args: Args): Status = {
    def printThreadDump(): Unit = nodes.collect {
      case node: DockerNode =>
        docker.printThreadDump(node)
    }
    val r = super.runTest(testName, args)
    if (!r.succeeds()) printThreadDump()
    r
  }

  protected def createDocker: Docker = Docker(getClass)
  override protected def afterAll(): Unit = {
    super.afterAll()
    docker.close()
  }
} 
Example 12
Source File: GlowBaseTest.scala    From glow   with Apache License 2.0 5 votes vote down vote up
package io.projectglow.sql

import htsjdk.samtools.util.Log
import org.apache.spark.sql.SparkSession
import org.apache.spark.{DebugFilesystem, SparkConf}
import org.scalatest.concurrent.{AbstractPatienceConfiguration, Eventually}
import org.scalatest.time.{Milliseconds, Seconds, Span}
import org.scalatest.{Args, FunSuite, Status, Tag}

import io.projectglow.Glow
import io.projectglow.SparkTestShim.SharedSparkSessionBase
import io.projectglow.common.{GlowLogging, TestUtils}
import io.projectglow.sql.util.BGZFCodec

abstract class GlowBaseTest
    extends FunSuite
    with SharedSparkSessionBase
    with GlowLogging
    with GlowTestData
    with TestUtils
    with JenkinsTestPatience {

  override protected def sparkConf: SparkConf = {
    super
      .sparkConf
      .set("spark.hadoop.io.compression.codecs", classOf[BGZFCodec].getCanonicalName)
  }

  override def initializeSession(): Unit = ()

  override protected implicit def spark: SparkSession = {
    val sess = SparkSession.builder().config(sparkConf).master("local[2]").getOrCreate()
    Glow.register(sess)
    SparkSession.setActiveSession(sess)
    Log.setGlobalLogLevel(Log.LogLevel.ERROR)
    sess
  }

  protected def gridTest[A](testNamePrefix: String, testTags: Tag*)(params: Seq[A])(
      testFun: A => Unit): Unit = {
    for (param <- params) {
      test(testNamePrefix + s" ($param)", testTags: _*)(testFun(param))
    }
  }

  override def afterEach(): Unit = {
    DebugFilesystem.assertNoOpenStreams()
    eventually {
      assert(spark.sparkContext.getPersistentRDDs.isEmpty)
      assert(spark.sharedState.cacheManager.isEmpty, "Cache not empty.")
    }
    super.afterEach()
  }

  override def runTest(testName: String, args: Args): Status = {
    logger.info(s"Running test '$testName'")
    val res = super.runTest(testName, args)
    if (res.succeeds()) {
      logger.info(s"Done running test '$testName'")
    } else {
      logger.info(s"Done running test '$testName' with a failure")
    }
    res
  }

  protected def withSparkConf[T](configs: Map[String, String])(f: => T): T = {
    val initialConfigValues = configs.keys.map(k => (k, spark.conf.getOption(k)))
    try {
      configs.foreach { case (k, v) => spark.conf.set(k, v) }
      f
    } finally {
      initialConfigValues.foreach {
        case (k, Some(v)) => spark.conf.set(k, v)
        case (k, None) => spark.conf.unset(k)
      }
    }
  }
}


  final override implicit val patienceConfig: PatienceConfig =
    if (sys.env.get("JENKINS_HOST").nonEmpty) {
      // increase the timeout on jenkins where parallelizing causes things to be very slow
      PatienceConfig(Span(10, Seconds), Span(50, Milliseconds))
    } else {
      // use the default timeout on local machines so failures don't hang for a long time
      PatienceConfig(Span(5, Seconds), Span(15, Milliseconds))
    }
} 
Example 13
Source File: PipelineSpec.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.testing

import org.scalatest.{Args, ConfigMap, Status}
import JobTest.BeamOptions
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers


trait PipelineSpec
    extends AnyFlatSpec
    with Matchers
    with SCollectionMatchers
    with PipelineTestUtils
    with RunEnforcementJobTest {
  private val Beam = """beam\.(.*)""".r

  private var beamOpts: BeamOptions = _

  private val aliases =
    Map(
      "flink" ->
        List("runner" -> "FlinkRunner", "flinkMaster" -> "[local]")
    )

  private def getBeamOptions(m: ConfigMap): List[String] =
    m.collect { case (Beam(k), v) => k -> v }
      .flatMap { case (k, v) => aliases.getOrElse(k, List(k -> v)) }
      .map { case (k, v) => s"--$k=$v" }
      .toList

  implicit def beamOptions: BeamOptions = {
    assume(
      beamOpts != null,
      "PipelineSpec#beamOpts is null, are you using JobTest outside of a " +
        "`\"Test\" should \"work\" in {}` block?"
    )
    beamOpts
  }

  override def run(testName: Option[String], args: Args): Status = {
    if (beamOpts == null && !args.runTestInNewInstance && (expectedTestCount(args.filter) > 0)) {
      beamOpts = BeamOptions(getBeamOptions(args.configMap))
    }
    super.run(testName, args)
  }
}