org.scalatest.exceptions.TestFailedException Scala Examples

The following examples show how to use org.scalatest.exceptions.TestFailedException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: NonEmptyListScalaTestInstancesSpec.scala    From cats-scalatest   with Apache License 2.0 5 votes vote down vote up
package cats.scalatest

import org.scalatest.exceptions.TestFailedException
import org.scalatest.enablers.Collecting
import org.scalatest.LoneElement._
import org.scalatest.Inspectors._
import cats.data.NonEmptyList

class NonEmptyListScalaTestInstancesSpec extends TestBase {
  import NonEmptyListScalaTestInstances._

  "loneElement" should {
    "apply an assertion when there is a single element" in {
      val nel: NonEmptyList[Int] = NonEmptyList.one(10)
      nel.loneElement should be <= 10
    }

    "should throw TestFailedException if the NonEmptyList has more elements" in {
      val nel: NonEmptyList[Int] = NonEmptyList.of(10, 16)
      val caught =
        intercept[TestFailedException] {
          nel.loneElement should ===(thisRecord)
        }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("NonEmptyListScalaTestInstancesSpec.scala")
    }
  }

  "inspectors" should {
    "state something about all elements" in {
      val nel: NonEmptyList[Int] = NonEmptyList.of(1, 2, 3, 4, 5)
      forAll(nel)(_ should be > 0)
    }
  }

  "sizeOf" should {
    "return the size of the collection" in {
      val nel: NonEmptyList[Int] = NonEmptyList.of(1, 2)
      implicitly[Collecting[Int, NonEmptyList[Int]]].sizeOf(nel) shouldBe 2
    }
  }
} 
Example 2
Source File: FitsTypesAndArraySizesSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.json

import org.scalatest.exceptions.TestFailedException
import play.api.libs.json.{JsValue, Json}

class FitsTypesAndArraySizesSpec extends ScalaWebTestJsonBaseSpec with FitsTypeMismatchBehavior {
  path = "/jsonResponse.json.jsp"
  def dijkstra: JsValue = Json.parse(webDriver.getPageSource)

  "The json response representing Edsger Dijkstra" should "use the correct types" in {

    dijkstra fits typesAndArraySizes of
      """{
        | "name": "",
        | "firstName": "",
        | "isTuringAwardWinner": true,
        | "theories": ["", ""],
        | "universities": [{"name": "", "begin": 0, "end": 0}, {}, {}, {}]
        |}
      """.stripMargin
  }
  it should "not have only a single entry in universities" in {
    assertThrows[TestFailedException] {
      dijkstra fits typesAndArraySizes of
      """{
        | "universities": [{}]
        |}
      """.stripMargin
    }
  }
  it should behave like jsonGaugeFitting(typesAndArraySizes)
} 
Example 3
Source File: FitsTypeMismatchBehavior.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.json

import org.scalatest.exceptions.TestFailedException
import play.api.libs.json.Json

//Can only be used in specs which request /jsonResponse.json.jsp
trait FitsTypeMismatchBehavior {
  self: ScalaWebTestJsonBaseSpec =>

  def jsonGaugeFitting(gaugeType: GaugeType): Unit = {
    def dijkstra = Json.parse(webDriver.getPageSource)

    "When verifying JSON using fitsTypes or fitsTypesAndArraySizes" should
      "fail when a String is expected, but an Int provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"yearOfBirth": ""}"""
      }
    }
    it should "fail when an Int is expected, but a String provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"name": 0}"""
      }
    }
    it should "fail when a Boolean is expected, but a String provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"name": true}"""
      }
    }
    it should "fail when an Object is expected, but a String provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"name": {}}"""
      }
    }
    it should "fail when an Array is expected, but a String provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"name": []}"""
      }
    }
    it should "fail when null is expected, but a String provided" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"name": null}"""
      }
    }
    it should "fail when a property is missing" in {
      assertThrows[TestFailedException] {
        dijkstra fits gaugeType of """{"thesis": "Communication with an Automatic Computer"}"""
      }
    }
  }
} 
Example 4
Source File: FitsValuesSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.json

import org.scalatest.exceptions.TestFailedException
import play.api.libs.json.{JsValue, Json}

class FitsValuesSpec extends ScalaWebTestJsonBaseSpec with FitsTypeMismatchBehavior {
  path = "/jsonResponse.json.jsp"
  def dijkstra: JsValue = Json.parse(webDriver.getPageSource)

  "Fits types" should "report success, when the json gauge contains the same values as the response it is tested against" in {
    dijkstra fits values of
      """{
        | "name": "Dijkstra",
        | "firstName": "Edsger",
        | "yearOfBirth": 1930,
        | "isTuringAwardWinner": true,
        | "theories": [
        |   "shortest path",
        |   "graph theory"
        | ],
        | "falseTheories": null
        |}
      """.stripMargin
  }
  it should "provide the fit synonym" in {
    val universities = dijkstra \ "universities"
    universities fit values of
      """
        | [
        |   { "name": "Universität Leiden","begin": 1948, "end": 1956 },
        |   { "name": "Mathematisch Centrum Amsterdam", "begin": 1951, "end": 1959 },
        |   { "name": "Technische Universiteit Eindhoven", "begin": 1962, "end": 1984 },
        |   { "name": "University of Texas at Austin", "begin": 1984, "end": 1999 }
        | ]
      """.stripMargin
  }
  it should behave like jsonGaugeFitting(values)
  it should "fail when a String doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits values of """{"name": "Turing"}"""
    }
  }
  it should "fail when an Int doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits values of """{"yearOfBirth": 1995}"""
    }
  }
  it should "fail when a Boolean doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits values of """{"isTuringAwardWinner": false}"""
    }
  }
  it should "fail when an array is not complete" in {
    assertThrows[TestFailedException]{
      dijkstra fits values of
        """{
          | "theories": ["shortest path"]
          |}""".stripMargin
    }
  }
  it should "fail when an array contains a wrong value" in {
    assertThrows[TestFailedException]{
      dijkstra fits values of
        """{
          | "theories": ["shortest path", "relational algebra"]
          |}""".stripMargin
    }
  }
} 
Example 5
Source File: PolynomialExpansionSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.feature

import org.apache.spark.ml.param.ParamsSuite
import org.scalatest.exceptions.TestFailedException

import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.Row

class PolynomialExpansionSuite extends SparkFunSuite with MLlibTestSparkContext {

  test("params") {
    ParamsSuite.checkParams(new PolynomialExpansion)
  }

  test("Polynomial expansion with default parameter") {
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val twoDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(9, Array(0, 1, 2, 3, 4), Array(-2.0, 4.0, 2.3, -4.6, 5.29)),
      Vectors.dense(-2.0, 4.0, 2.3, -4.6, 5.29),
      Vectors.dense(new Array[Double](9)),
      Vectors.dense(0.6, 0.36, -1.1, -0.66, 1.21, -3.0, -1.8, 3.3, 9.0),
      Vectors.sparse(9, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(twoDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")

    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }

  test("Polynomial expansion with setter") {
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val threeDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(19, Array(0, 1, 2, 3, 4, 5, 6, 7, 8),
        Array(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17)),
      Vectors.dense(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17),
      Vectors.dense(new Array[Double](19)),
      Vectors.dense(0.6, 0.36, 0.216, -1.1, -0.66, -0.396, 1.21, 0.726, -1.331, -3.0, -1.8,
        -1.08, 3.3, 1.98, -3.63, 9.0, 5.4, -9.9, -27.0),
      Vectors.sparse(19, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(threeDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")
      .setDegree(3)

    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }
} 
Example 6
Source File: EitherTValues.scala    From guardrail   with MIT License 5 votes vote down vote up
package tests.scalatest

import cats.Functor
import cats.data.EitherT
import org.scalactic.source
import org.scalatest._
import org.scalatest.exceptions.{ StackDepthException, TestFailedException }
import scala.language.higherKinds
import scala.language.implicitConversions

trait EitherTValues {

  implicit def convertEitherTToValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) = new EitherTValuable(eitherT)

  class EitherTValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) {
    def leftValue(implicit pos: source.Position): F[L] =
      eitherT.fold(identity, { _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      })

    def rightValue(implicit pos: source.Position): F[R] =
      eitherT.fold({ _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      }, identity)
  }
} 
Example 7
Source File: EitherTValues.scala    From guardrail   with MIT License 5 votes vote down vote up
package tests.scalatest

import cats.Functor
import cats.data.EitherT
import org.scalactic.source
import org.scalatest._
import org.scalatest.exceptions.{ StackDepthException, TestFailedException }
import scala.language.higherKinds
import scala.language.implicitConversions

trait EitherTValues {

  implicit def convertEitherTToValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) = new EitherTValuable(eitherT)

  class EitherTValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) {
    def leftValue(implicit pos: source.Position): F[L] =
      eitherT.fold(identity, { _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      })

    def rightValue(implicit pos: source.Position): F[R] =
      eitherT.fold({ _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      }, identity)
  }
} 
Example 8
Source File: EitherTValues.scala    From guardrail   with MIT License 5 votes vote down vote up
package tests.scalatest

import cats.Functor
import cats.data.EitherT
import org.scalactic.source
import org.scalatest._
import org.scalatest.exceptions.{ StackDepthException, TestFailedException }
import scala.language.higherKinds
import scala.language.implicitConversions

trait EitherTValues {

  implicit def convertEitherTToValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) = new EitherTValuable(eitherT)

  class EitherTValuable[F[_]: Functor, L, R](eitherT: EitherT[F, L, R]) {
    def leftValue(implicit pos: source.Position): F[L] =
      eitherT.fold(identity, { _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      })

    def rightValue(implicit pos: source.Position): F[R] =
      eitherT.fold({ _ =>
        throw new TestFailedException((_: StackDepthException) => Option.empty[String], Option.empty[Throwable], pos)
      }, identity)
  }
} 
Example 9
Source File: PolynomialExpansionSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.feature

import org.apache.spark.ml.param.ParamsSuite
import org.scalatest.exceptions.TestFailedException

import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.Row

class PolynomialExpansionSuite extends SparkFunSuite with MLlibTestSparkContext {

  test("params") {//参数
    ParamsSuite.checkParams(new PolynomialExpansion)
  }

  test("Polynomial expansion with default parameter") {//带有默认参数的多项式展开
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val twoDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(9, Array(0, 1, 2, 3, 4), Array(-2.0, 4.0, 2.3, -4.6, 5.29)),
      Vectors.dense(-2.0, 4.0, 2.3, -4.6, 5.29),
      Vectors.dense(new Array[Double](9)),
      Vectors.dense(0.6, 0.36, -1.1, -0.66, 1.21, -3.0, -1.8, 3.3, 9.0),
      Vectors.sparse(9, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(twoDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")
     //transform()方法将DataFrame转化为另外一个DataFrame的算法
    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }
  //多项式展开设置
  test("Polynomial expansion with setter") {
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val threeDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(19, Array(0, 1, 2, 3, 4, 5, 6, 7, 8),
        Array(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17)),
      Vectors.dense(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17),
      Vectors.dense(new Array[Double](19)),
      Vectors.dense(0.6, 0.36, 0.216, -1.1, -0.66, -0.396, 1.21, 0.726, -1.331, -3.0, -1.8,
        -1.08, 3.3, 1.98, -3.63, 9.0, 5.4, -9.9, -27.0),
      Vectors.sparse(19, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(threeDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")
      .setDegree(3)
    //transform()方法将DataFrame转化为另外一个DataFrame的算法
    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }
} 
Example 10
Source File: FitsValuesIgnoringArrayOrderSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.json

import org.scalatest.exceptions.TestFailedException
import play.api.libs.json.{JsValue, Json}

class FitsValuesIgnoringArrayOrderSpec extends ScalaWebTestJsonBaseSpec with FitsTypeMismatchBehavior {
  path = "/jsonResponse.json.jsp"
  def dijkstra: JsValue = Json.parse(webDriver.getPageSource)

  "Fits types" should "report success, when the json gauge contains the same valuesIgnoringArrayOrder as the response it is tested against" in {
    dijkstra fits valuesIgnoringArrayOrder of
      """{
        | "name": "Dijkstra",
        | "firstName": "Edsger",
        | "yearOfBirth": 1930,
        | "isTuringAwardWinner": true,
        | "theories": [
        |   "graph theory",
        |   "shortest path"
        | ],
        | "falseTheories": null
        |}
      """.stripMargin
  }
  it should "provide the fit synonym" in {
    val universities = dijkstra \ "universities"
    universities fit valuesIgnoringArrayOrder of
      """
        | [
        |   { "name": "Universität Leiden","begin": 1948, "end": 1956 },
        |   { "name": "University of Texas at Austin", "begin": 1984, "end": 1999 },
        |   { "name": "Technische Universiteit Eindhoven", "begin": 1962, "end": 1984 },
        |   { "name": "Mathematisch Centrum Amsterdam", "begin": 1951, "end": 1959 }
        | ]
      """.stripMargin
  }
  it should behave like jsonGaugeFitting(valuesIgnoringArrayOrder)
  it should "fail when a String doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits valuesIgnoringArrayOrder of """{"name": "Turing"}"""
    }
  }
  it should "fail when an Int doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits valuesIgnoringArrayOrder of """{"yearOfBirth": 1995}"""
    }
  }
  it should "fail when a Boolean doesn't contain the correct value" in {
    assertThrows[TestFailedException]{
      dijkstra fits valuesIgnoringArrayOrder of """{"isTuringAwardWinner": false}"""
    }
  }
  it should "fail when an array is not complete" in {
    assertThrows[TestFailedException]{
      dijkstra fits valuesIgnoringArrayOrder of
        """{
          | "theories": ["shortest path"]
          |}""".stripMargin
    }
  }
  it should "fail when an array contains a wrong value" in {
    assertThrows[TestFailedException]{
      dijkstra fits valuesIgnoringArrayOrder of
        """{
          | "theories": ["shortest path", "relational algebra"]
          |}""".stripMargin
    }
  }
} 
Example 11
Source File: ValidatedValuesSpec.scala    From cats-scalatest   with Apache License 2.0 5 votes vote down vote up
package cats.scalatest

import org.scalatest.exceptions.TestFailedException
import cats.data.Validated

class ValidatedValuesSpec extends TestBase {
  import ValidatedValues._

  "value on Validated" should {
    "return the value inside a Validated.Right if that Validated is Validated.Right" in {
      val r: String Validated String = Validated.Valid(thisRecord)
      r.value should ===(thisRecord)
    }

    "should throw TestFailedException if that Validated is a left " in {
      val r: String Validated String = Validated.Invalid(thisTobacconist)
      val caught =
        intercept[TestFailedException] {
          r.value should ===(thisRecord)
        }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("ValidatedValuesSpec.scala")
    }
  }

  "invalidValue on Validated" should {
    "return the value if it's invalid" in {
      val r = Validated.Invalid(thisRecord)
      r.invalidValue should ===(thisRecord)
    }

    "throw TestFailedException if the Validated is Valid" in {
      val r = Validated.Valid(thisRecord)
      val caught = intercept[TestFailedException] {
        r.invalidValue
      }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("ValidatedValuesSpec.scala")
    }
  }

  "valid on Validated" should {
    "return the valid if it's a Valid" in {
      val r = Validated.Valid(thisRecord)
      r.valid should ===(r)
    }

    "throw TestFailedException if the Validated is Invalid" in {
      val r: String Validated String = Validated.Invalid(thisTobacconist)
      val caught =
        intercept[TestFailedException] {
          r.valid should ===(r)
        }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("ValidatedValuesSpec.scala")
    }
  }

  "invalid on Validated" should {
    "return the invalid if it's a Invalid" in {
      val r = Validated.Invalid(thisTobacconist)
      r.invalid should ===(r)
    }

    "throw TestFailedException if the Validated is Valid" in {
      val r: String Validated String = Validated.Valid(thisRecord)
      val caught =
        intercept[TestFailedException] {
          r.invalid should ===(r)
        }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("ValidatedValuesSpec.scala")
    }
  }
} 
Example 12
Source File: EitherValuesSpec.scala    From cats-scalatest   with Apache License 2.0 5 votes vote down vote up
package cats.scalatest

import org.scalatest.exceptions.TestFailedException
import scala.util.{Either, Left, Right}

class EitherValuesSpec extends TestBase {
  import EitherValues._

  "value on Either" should {
    "return the value inside a Right if that Either is Right" in {
      val r: String Either String = Right(thisRecord)
      r.value should ===(thisRecord)
    }

    "should throw TestFailedException if that Either is a left " in {
      val r: String Either String = Left(thisTobacconist)
      val caught =
        intercept[TestFailedException] {
          r.value should ===(thisRecord)
        }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("EitherValuesSpec.scala")
    }
  }

  "leftValue on Either" should {
    "return the value if it's left" in {
      val r = Left(thisRecord)
      r.leftValue should ===(thisRecord)
    }

    "throw TestFailedException if the Either is right" in {
      val r = Right(thisRecord)
      val caught = intercept[TestFailedException] {
        r.leftValue
      }
      if (isJVM)
        caught.failedCodeLineNumber.value should equal(thisLineNumber - 3)
      caught.failedCodeFileName.value should be("EitherValuesSpec.scala")
    }
  }
} 
Example 13
Source File: PolynomialExpansionSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.feature

import org.apache.spark.ml.param.ParamsSuite
import org.scalatest.exceptions.TestFailedException

import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.Row

class PolynomialExpansionSuite
  extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {

  test("params") {
    ParamsSuite.checkParams(new PolynomialExpansion)
  }

  test("Polynomial expansion with default parameter") {
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val twoDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(9, Array(0, 1, 2, 3, 4), Array(-2.0, 4.0, 2.3, -4.6, 5.29)),
      Vectors.dense(-2.0, 4.0, 2.3, -4.6, 5.29),
      Vectors.dense(new Array[Double](9)),
      Vectors.dense(0.6, 0.36, -1.1, -0.66, 1.21, -3.0, -1.8, 3.3, 9.0),
      Vectors.sparse(9, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(twoDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")

    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }

  test("Polynomial expansion with setter") {
    val data = Array(
      Vectors.sparse(3, Seq((0, -2.0), (1, 2.3))),
      Vectors.dense(-2.0, 2.3),
      Vectors.dense(0.0, 0.0, 0.0),
      Vectors.dense(0.6, -1.1, -3.0),
      Vectors.sparse(3, Seq())
    )

    val threeDegreeExpansion: Array[Vector] = Array(
      Vectors.sparse(19, Array(0, 1, 2, 3, 4, 5, 6, 7, 8),
        Array(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17)),
      Vectors.dense(-2.0, 4.0, -8.0, 2.3, -4.6, 9.2, 5.29, -10.58, 12.17),
      Vectors.dense(new Array[Double](19)),
      Vectors.dense(0.6, 0.36, 0.216, -1.1, -0.66, -0.396, 1.21, 0.726, -1.331, -3.0, -1.8,
        -1.08, 3.3, 1.98, -3.63, 9.0, 5.4, -9.9, -27.0),
      Vectors.sparse(19, Array.empty, Array.empty))

    val df = sqlContext.createDataFrame(data.zip(threeDegreeExpansion)).toDF("features", "expected")

    val polynomialExpansion = new PolynomialExpansion()
      .setInputCol("features")
      .setOutputCol("polyFeatures")
      .setDegree(3)

    polynomialExpansion.transform(df).select("polyFeatures", "expected").collect().foreach {
      case Row(expanded: DenseVector, expected: DenseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case Row(expanded: SparseVector, expected: SparseVector) =>
        assert(expanded ~== expected absTol 1e-1)
      case _ =>
        throw new TestFailedException("Unmatched data types after polynomial expansion", 0)
    }
  }

  test("read/write") {
    val t = new PolynomialExpansion()
      .setInputCol("myInputCol")
      .setOutputCol("myOutputCol")
      .setDegree(3)
    testDefaultReadWrite(t)
  }
} 
Example 14
Source File: ToFutureWrapper.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.testing

import scala.concurrent.Future
import scala.language.higherKinds

import org.scalatest.exceptions.TestFailedException

trait ToFutureWrapper {

  implicit final class ConvertToFutureDecorator[F[_], T](wrapped: => F[T]) {
    def toFuture()(implicit ctf: ConvertToFuture[F]): Future[T] = {
      try {
        ctf.toFuture(wrapped)
      } catch {
        case e: TestFailedException if e.getCause != null => Future.failed(e.getCause)
      }
    }
  }
} 
Example 15
Source File: TestSpec.scala    From reactive-programming   with Apache License 2.0 5 votes vote down vote up
package com.test

import java.io.IOException
import java.util.UUID

import akka.actor.{ ActorRef, ActorSystem, PoisonPill }
import akka.event.{ Logging, LoggingAdapter }
import akka.testkit.TestProbe
import akka.util.Timeout
import org.scalatest.concurrent.{ Eventually, ScalaFutures }
import org.scalatest.exceptions.TestFailedException
import org.scalatest._
import rx.lang.scala._

import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContextExecutor, Future }
import scala.util.{ Random ⇒ Rnd, Try }

object Random {
  def apply(): Rnd = new Rnd()
}

trait TestSpec extends FlatSpec with Matchers with ScalaFutures with TryValues with OptionValues with Eventually with BeforeAndAfterAll {
  implicit val system: ActorSystem = ActorSystem("test")
  implicit val ec: ExecutionContextExecutor = system.dispatcher
  val log: LoggingAdapter = Logging(system, this.getClass)
  implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds)
  implicit val timeout = Timeout(50.seconds)

  override protected def afterAll(): Unit = {
    system.terminate()
  }

  
  def cleanup(actors: ActorRef*): Unit = {
    actors.foreach { (actor: ActorRef) ⇒
      actor ! PoisonPill
      probe watch actor
    }
  }

  implicit class PimpedByteArray(self: Array[Byte]) {
    def getString: String = new String(self)
  }

  implicit class PimpedFuture[T](self: Future[T]) {
    def toTry: Try[T] = Try(self.futureValue)
  }

  implicit class PimpedObservable[T](self: Observable[T]) {
    def waitFor: Unit = {
      self.toBlocking.toIterable.last
    }
  }

  implicit class MustBeWord[T](self: T) {
    def mustBe(pf: PartialFunction[T, Unit]): Unit =
      if (!pf.isDefinedAt(self)) throw new TestFailedException("Unexpected: " + self, 0)
  }

  object Socket { def apply() = new Socket }
  class Socket {
    def readFromMemory: Future[Array[Byte]] = Future {
      Thread.sleep(100) // sleep 100 millis
      "fromMemory".getBytes
    }

    def send(payload: Array[Byte], from: String, failed: Boolean): Future[Array[Byte]] =
      if (failed) Future.failed(new IOException(s"Network error: $from"))
      else {
        Future {
          Thread.sleep(250) // sleep 250 millis, not real life time, but hey
          s"${payload.getString}->$from".getBytes
        }
      }

    def sendToEurope(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] =
      send(payload, "fromEurope", failed)

    def sendToUsa(payload: Array[Byte], failed: Boolean = false): Future[Array[Byte]] =
      send(payload, "fromUsa", failed)
  }
} 
Example 16
Source File: CoderAssertionsTest.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.testing

import java.io.{InputStream, OutputStream}

import com.spotify.scio.coders.Coder
import com.spotify.scio.testing.CoderAssertions._
import org.apache.beam.sdk.coders.{AtomicCoder, StringUtf8Coder}
import org.scalatest.exceptions.TestFailedException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

case class Foo(id: String)

class CoderAssertionsTest extends AnyFlatSpec with Matchers {
  // A coder which roundtrips incorrectly
  private def incorrectCoder: Coder[Foo] =
    Coder.beam(new AtomicCoder[Foo] {
      override def encode(value: Foo, outStream: OutputStream): Unit =
        StringUtf8Coder.of().encode(value.id, outStream)
      override def decode(inStream: InputStream): Foo =
        Foo(StringUtf8Coder.of().decode(inStream) + "wrongBytes")
    })

  "CoderAssertions" should "support roundtrip" in {
    Foo("bar") coderShould roundtrip()

    an[TestFailedException] should be thrownBy {
      implicit def coder: Coder[Foo] = incorrectCoder

      Foo("baz") coderShould roundtrip()
    }
  }

  it should "support fallback" in {
    val str = "boom"
    val cs: java.lang.CharSequence = str
    cs coderShould fallback()

    an[TestFailedException] should be thrownBy {
      str coderShould fallback()
    }
  }

  it should "support notFallback" in {
    val str = "boom"
    str coderShould notFallback()

    an[TestFailedException] should be thrownBy {
      val cs: java.lang.CharSequence = str
      cs coderShould notFallback()
    }
  }

  it should "support coderIsSerializable" in {
    coderIsSerializable[Foo]
    coderIsSerializable(Coder[Foo])

    // Inner class's Coder is not serializable
    case class InnerCaseClass(id: String)

    an[TestFailedException] should be thrownBy {
      coderIsSerializable[InnerCaseClass]
    }

    an[TestFailedException] should be thrownBy {
      coderIsSerializable(Coder[InnerCaseClass])
    }
  }
} 
Example 17
Source File: EitherValues.scala    From lithium   with Apache License 2.0 5 votes vote down vote up
package akka.cluster.swissborg

import org.scalactic.source
import org.scalatest.exceptions.{StackDepthException, TestFailedException}

trait EitherValues {

  implicit def convertEitherToValuable[L, R](either: Either[L, R])(implicit p: source.Position): EitherValuable[L, R] =
    new EitherValuable(either, p)

  class EitherValuable[L, R](either: Either[L, R], pos: source.Position) {
    def rightValue: R = either match {
      case Right(value) => value
      case Left(_) =>
        throw new TestFailedException((_: StackDepthException) => Some("The Either value is not a Right(_)"), None, pos)
    }

    def leftValue: L = either match {
      case Left(value) => value
      case Right(_) =>
        throw new TestFailedException((_: StackDepthException) => Some("The Either value is not a Left(_)"), None, pos)
    }
  }
}

object EitherValues extends EitherValues 
Example 18
Source File: TryMatchersSpec.scala    From warp-core   with MIT License 5 votes vote down vote up
package com.workday.warp.common.utils

import com.workday.warp.common.spec.WarpJUnitSpec
import com.workday.warp.junit.UnitTest
import org.scalatest.exceptions.TestFailedException

import scala.util.Try


  @UnitTest
  def tryMatchers(): Unit = {
    // check that we can verify success
    Try(1 + 1) should win
    Try(1 + 1) should not (die)

    // check that we can verify failure
    Try(throw new RuntimeException) should die
    Try(throw new RuntimeException) should not (win)

    // check expected exceptions
    intercept[TestFailedException] { Try(1 + 1) should die }
    intercept[TestFailedException] { Try(throw new RuntimeException) should win }

    // check that the thrown TestFailedException contains the message of the underlying exception
    val exception: TestFailedException = intercept[TestFailedException] {
      Try(throw new RuntimeException("we failed")) should win
    }
    exception.getMessage should include ("we failed")

    // check that we can match the wrapped result
    Try(1 + 1) should hold (2)
  }
} 
Example 19
Source File: ImplicitsTest.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.testkit

import util.BitcoinSUnitTest
import Implicits._
import org.scalatest.exceptions.TestFailedException

class ImplicitsTest extends BitcoinSUnitTest {

  behavior of "AssertionSeqOps"

  it should "flatten succeeded assertions" in {
    val assertions = List(succeed, assert(true), assert(4 + 4 == 8))
    assertions.toAssertion
  }

  it should "fail to flatten a strict sequence of assertions where one has failed" in {
    try {
      val assertions: List[org.scalatest.Assertion] =
        List(succeed, assert(4 + 4 == 7), assert(true))
      assertions.toAssertion
    } catch {
      case e: TestFailedException =>
        succeed
      case e: Throwable => fail
    }
  }

  it should "fail to flatten a lazy sequence of assertions where one has failed" in {
    try {
      val assertions: Stream[org.scalatest.Assertion] =
        (0 until 10).toStream.map { i =>
          if (i == 7) assert(false) else assert(true)
        }

      assertions.toAssertion
    } catch {
      case e: TestFailedException =>
        succeed
      case e: Throwable => fail
    }
  }

  it should "fail to flatten an empty list" in {
    intercept[TestFailedException] {
      val xs = List.empty[org.scalatest.Assertion]
      xs.toAssertion
    }
  }

} 
Example 20
package net.manub.embeddedkafka

import org.scalatest.exceptions.TestFailedException

class EmbeddedKafkaWithRunningKafkaSpec
    extends EmbeddedKafkaSpecSupport
    with EmbeddedKafka {

  "the withRunningKafka method" should {
    "start a Kafka broker on port 6001 by default" in {
      withRunningKafka {
        kafkaIsAvailable()
      }
    }

    "start a ZooKeeper instance on port 6000 by default" in {
      withRunningKafka {
        zookeeperIsAvailable()
      }
    }

    "stop Kafka and Zookeeper successfully" when {
      "the enclosed test passes" in {
        withRunningKafka {
          true shouldBe true
        }

        kafkaIsNotAvailable()
        zookeeperIsNotAvailable()
      }

      "the enclosed test fails" in {
        a[TestFailedException] shouldBe thrownBy {
          withRunningKafka {
            true shouldBe false
          }
        }

        kafkaIsNotAvailable()
        zookeeperIsNotAvailable()
      }
    }

    "start a Kafka broker on a specified port" in {
      implicit val config = EmbeddedKafkaConfig(kafkaPort = 12345)

      withRunningKafka {
        kafkaIsAvailable(12345)
      }
    }

    "start a Zookeeper server on a specified port" in {
      implicit val config = EmbeddedKafkaConfig(zooKeeperPort = 12345)

      withRunningKafka {
        zookeeperIsAvailable(12345)
      }
    }

  }
} 
Example 21
Source File: AssertEqualsSpec.scala    From flint   with Apache License 2.0 5 votes vote down vote up
package com.twosigma.flint.timeseries

import com.twosigma.flint.timeseries.row.Schema
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.{ GenericRowWithSchema => SqlRow }
import org.apache.spark.sql.types.{ ArrayType, DoubleType }
import org.scalatest.exceptions.TestFailedException

import scala.collection.mutable

class AssertEqualsSpec extends TimeSeriesSuite {
  "TimeSeriesSuite" should "assertEquals for two sql rows of DoubleType correctly" in {
    val schema = Schema("x" -> DoubleType)
    val row1 = new SqlRow(Array(1L, 1.0), schema)
    val row2 = new SqlRow(Array(1L, 1.0 + defaultAdditivePrecision * 0.1), schema)
    val row3 = new SqlRow(Array(1L, 1.0 + defaultAdditivePrecision * 10.0), schema)
    assertAlmostEquals(row1, row2)
    intercept[TestFailedException] {
      assertAlmostEquals(row1, row3)
    }
  }

  it should "assertEquals for two sql rows of ArrayType(DoubleType) correctly" in {
    val schema = Schema("x" -> ArrayType(DoubleType))
    val row1: Row = new SqlRow(Array(1L, mutable.WrappedArray.make(Array(1.0))), schema)
    val row2: Row = new SqlRow(
      Array(1L, mutable.WrappedArray.make(Array(1.0 + defaultAdditivePrecision * 0.1))), schema
    )
    val row3: Row = new SqlRow(
      Array(1L, mutable.WrappedArray.make(Array(1.0 + defaultAdditivePrecision * 10.0))), schema
    )
    assertAlmostEquals(row1, row2)
    intercept[TestFailedException] {
      assertAlmostEquals(row1, row3)
    }
  }

  it should "assertEquals for two sql rows of ArrayType(DoubleType) that contain NaN values correctly" in {
    val schema = Schema("x" -> ArrayType(DoubleType))
    val row1 = new SqlRow(Array(1L, mutable.WrappedArray.make(Array(Double.NaN))), schema)
    val row2 = new SqlRow(Array(1L, mutable.WrappedArray.make(Array(Double.NaN))), schema)
    val row3 = new SqlRow(Array(1L, mutable.WrappedArray.make(Array(1.0))), schema)
    assertAlmostEquals(row1, row2)
    intercept[TestFailedException] {
      assertAlmostEquals(row1, row3)
    }
  }
} 
Example 22
Source File: QuickCheckSuite.scala    From Principles-of-Reactive-Programming   with GNU General Public License v3.0 5 votes vote down vote up
package quickcheck

import org.scalatest.FunSuite

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner

import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop
import org.scalacheck.Prop._

import org.scalatest.exceptions.TestFailedException

object QuickCheckBinomialHeap extends QuickCheckHeap with BinomialHeap

@RunWith(classOf[JUnitRunner])
class QuickCheckSuite extends FunSuite with Checkers {
  def checkBogus(p: Prop) {
    var ok = false
    try {
      check(p)
    } catch {
      case e: TestFailedException =>
        ok = true
    }
    assert(ok, "A bogus heap should NOT satisfy all properties. Try to find the bug!")
  }

  test("Binomial heap satisfies properties.") {
    check(new QuickCheckHeap with BinomialHeap)
  }

  test("Bogus (1) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus1BinomialHeap)
  }

  test("Bogus (2) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus2BinomialHeap)
  }

  test("Bogus (3) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus3BinomialHeap)
  }

  test("Bogus (4) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus4BinomialHeap)
  }

  test("Bogus (5) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus5BinomialHeap)
  }
} 
Example 23
Source File: TestUtils.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.currency

import org.scalatest.exceptions.TestFailedException

object TestUtils  {
  
  def assertComparesEqual(left: java.math.BigDecimal)(right: Any): Unit = {
    right match {
      case bd: java.math.BigDecimal => internalComp(left, bd)
      case s: String => internalComp(left, new java.math.BigDecimal(s))
      case _ => throw new TestFailedException(s"incompatible types: $left vs. $right", 1)
    }
  }

  private def internalComp(left: java.math.BigDecimal, right: java.math.BigDecimal) = {
    if (left.compareTo(right) != 0) {
      throw new TestFailedException(s"$left did not compare equal to $right", 2)
    }
  }
} 
Example 24
Source File: RestActionTester.scala    From naptime   with Apache License 2.0 5 votes vote down vote up
package org.coursera.naptime.actions

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.Materializer
import org.coursera.naptime.NaptimeActionException
import org.coursera.naptime.QueryFields
import org.coursera.naptime.QueryIncludes
import org.coursera.naptime.RequestEvidence
import org.coursera.naptime.RequestPagination
import org.coursera.naptime.RestContext
import org.coursera.naptime.RestError
import org.coursera.naptime.RestResponse
import org.junit.After
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.exceptions.TestFailedException
import play.api.test.FakeRequest

import scala.concurrent.ExecutionContext
import scala.util.Try


  protected[this] implicit class RestActionTestOps[AuthType, BodyType, ResponseType](
      action: RestAction[_, AuthType, BodyType, _, _, ResponseType]) {

    def testAction(ctx: RestContext[AuthType, BodyType]): RestResponse[ResponseType] = {
      val updatedAuthEither = action.restAuthGenerator.apply(ctx.body).check(ctx.auth)

      updatedAuthEither match {
        case Left(error) => RestError(error)
        case Right(updatedAuth) =>
          val responseFuture = action.safeApply(ctx.copyWithAuth(updatedAuth)).recover {
            case e: NaptimeActionException => RestError(e)
          }

          Try(responseFuture.futureValue).recover {
            case e: TestFailedException => e.cause.map(throw _).getOrElse(throw e)
          }.get
      }
    }

    def testActionPassAuth(ctx: RestContext[AuthType, BodyType]): RestResponse[ResponseType] = {
      val responseFuture = action.safeApply(ctx).recover {
        case e: NaptimeActionException => RestError(e)
      }

      Try(responseFuture.futureValue).recover {
        case e: TestFailedException => e.cause.map(throw _).getOrElse(throw e)
      }.get
    }
  }
} 
Example 25
Source File: EmbeddedKafkaWithRunningKafkaSpec.scala    From embedded-kafka   with MIT License 5 votes vote down vote up
package net.manub.embeddedkafka

import net.manub.embeddedkafka.EmbeddedKafka._
import net.manub.embeddedkafka.EmbeddedKafkaSpecSupport._
import org.scalatest.exceptions.TestFailedException
import net.manub.embeddedkafka.EmbeddedKafkaConfig.{
  defaultKafkaPort,
  defaultZookeeperPort
}

class EmbeddedKafkaWithRunningKafkaSpec extends EmbeddedKafkaSpecSupport {
  "the withRunningKafka method" should {
    "start a Kafka broker on port 6001 by default" in {
      withRunningKafka {
        expectedServerStatus(defaultKafkaPort, Available)
      }
    }

    "start a ZooKeeper instance on port 6000 by default" in {
      withRunningKafka {
        expectedServerStatus(defaultZookeeperPort, Available)
      }
    }

    "stop Kafka and Zookeeper successfully" when {
      "the enclosed test passes" in {
        withRunningKafka {
          true shouldBe true
        }

        expectedServerStatus(defaultKafkaPort, NotAvailable)
        expectedServerStatus(defaultZookeeperPort, NotAvailable)
      }

      "the enclosed test fails" in {
        a[TestFailedException] shouldBe thrownBy {
          withRunningKafka {
            true shouldBe false
          }
        }

        expectedServerStatus(defaultKafkaPort, NotAvailable)
        expectedServerStatus(defaultZookeeperPort, NotAvailable)
      }
    }

    "start a Kafka broker on a specified port" in {
      implicit val config: EmbeddedKafkaConfig =
        EmbeddedKafkaConfig(kafkaPort = 12345)

      withRunningKafka {
        expectedServerStatus(12345, Available)
      }
    }

    "start a Zookeeper server on a specified port" in {
      implicit val config: EmbeddedKafkaConfig =
        EmbeddedKafkaConfig(zooKeeperPort = 12345)

      withRunningKafka {
        expectedServerStatus(12345, Available)
      }
    }
  }
} 
Example 26
Source File: Implicits.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.testkit

import org.scalacheck.Gen
import scala.annotation.tailrec
import org.scalatest.compatible.Assertion
import org.scalatest.exceptions.TestFailedException


    def toAssertion: Assertion =
      assertions match {
        case Seq() =>
          throw new TestFailedException(
            message = "Cannot turn an empty list into an assertion!",
            failedCodeStackDepth = 0)
        // this should force all collection kinds to
        // evaluate all their members, throwing when
        // evaluating a bad one
        case nonEmpty =>
          nonEmpty.foreach(_ => ())
          nonEmpty.last
      }
  }
} 
Example 27
Source File: TestAsyncUtil.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.testkit.async

import akka.actor.ActorSystem
import org.scalatest.exceptions.{StackDepthException, TestFailedException}

import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.FiniteDuration

abstract class TestAsyncUtil
    extends org.bitcoins.rpc.util.AsyncUtil
    with Serializable {

  override protected def retryUntilSatisfiedWithCounter(
      conditionF: () => Future[Boolean],
      duration: FiniteDuration,
      counter: Int,
      maxTries: Int,
      stackTrace: Array[StackTraceElement])(implicit
      system: ActorSystem): Future[Unit] = {
    val retryF = super
      .retryUntilSatisfiedWithCounter(conditionF,
                                      duration,
                                      counter,
                                      maxTries,
                                      stackTrace)

    TestAsyncUtil.transformRetryToTestFailure(retryF)(system.dispatcher)
  }
}

object TestAsyncUtil extends TestAsyncUtil {

  
  def transformRetryToTestFailure[T](fut: Future[T])(implicit
      ec: ExecutionContext): Future[T] = {
    def transformRetry(err: Throwable): Throwable = {
      if (err.isInstanceOf[RpcRetryException]) {
        val retryErr = err.asInstanceOf[RpcRetryException]
        val relevantStackTrace = retryErr.caller.tail
          .dropWhile(elem => retryErr.internalFiles.contains(elem.getFileName))
          .takeWhile(!_.getFileName.contains("TestSuite"))
        val stackElement = relevantStackTrace.head
        val file = stackElement.getFileName
        val path = stackElement.getClassName
        val line = stackElement.getLineNumber
        val pos = org.scalactic.source.Position(file, path, line)
        val newErr = new TestFailedException({ _: StackDepthException =>
                                               Some(retryErr.message)
                                             },
                                             None,
                                             pos)
        newErr.setStackTrace(relevantStackTrace)
        newErr
      } else {
        err
      }
    }

    fut.transform({ elem: T =>
                    elem
                  },
                  transformRetry)
  }
} 
Example 28
Source File: BetterOptionValues.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package util

import org.scalactic._
import org.scalatest.exceptions.{StackDepthException, TestFailedException}

object BetterOptionValues {
  implicit class OptionOps[T](val opt: Option[T]) extends AnyVal {

    def getValue(implicit pos: source.Position): T =
      try {
        opt.get
      } catch {
        case cause: NoSuchElementException =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Option on which value was invoked was not defined."),
            Some(cause),
            pos)
      }
  }
} 
Example 29
Source File: ArtisinalStreamingTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Queue

import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream._
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext._

import org.scalatest.FunSuite
import org.scalatest.exceptions.TestFailedException


class ArtisinalStreamingTest extends FunSuite with SharedSparkContext {
  // tag::createQueueStream[]
  def makeSimpleQueueStream(ssc: StreamingContext) = {
    val input = List(List("hi"), List("happy pandas", "sad pandas"))
      .map(sc.parallelize(_))
    val idstream = ssc.queueStream(Queue(input:_*))
  }
  // end::createQueueStream[]

  // tag::HAPPY_PANDA[]
  test("artisinal streaming test") {
    val ssc = new StreamingContext(sc, Seconds(1))
    val input = List(List("hi"), List("happy pandas", "sad pandas"))
      .map(sc.parallelize(_))
    // Note: does not work for windowing or checkpointing
    val idstream = ssc.queueStream(Queue(input:_*))
    val tdstream = idstream.filter(_.contains("pandas"))
    val result = ArrayBuffer[String]()
    tdstream.foreachRDD{(rdd: RDD[String], _) =>
      result ++= rdd.collect()
    }
    val startTime = System.currentTimeMillis()
    val maxWaitTime = 60 * 60 * 30
    ssc.start()
    while (result.size < 2 && System.currentTimeMillis() - startTime < maxWaitTime) {
      ssc.awaitTerminationOrTimeout(50)
    }
    ssc.stop(stopSparkContext = false)
    assert(List("happy pandas", "sad pandas") === result.toList)
  }
  // end::HAPPY_PANDA[]
} 
Example 30
Source File: SourcingSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.sourcing

import org.scalactic.source
import org.scalatest.{Inspectors, OptionValues, TryValues}
import org.scalatest.exceptions.{StackDepthException, TestFailedException}
import org.scalatest.matchers.should.Matchers

trait SourcingSpec
    extends org.scalatest.wordspec.AnyWordSpecLike
    with Matchers
    with Inspectors
    with OptionValues
    with TryValues {

  class EitherValuable[L, R](either: Either[L, R], pos: source.Position) {
    def rightValue: R =
      either match {
        case Right(value) => value
        case Left(_)      =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Right(_)"),
            None,
            pos
          )
      }

    def leftValue: L =
      either match {
        case Left(value) => value
        case Right(_)    =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Left(_)"),
            None,
            pos
          )
      }
  }

  implicit def convertEitherToValuable[L, R](either: Either[L, R])(implicit p: source.Position): EitherValuable[L, R] =
    new EitherValuable(either, p)

} 
Example 31
Source File: EitherValues.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.util

import org.scalactic.source
import org.scalatest.exceptions.{StackDepthException, TestFailedException}

trait EitherValues {

  class EitherValuable[L, R](either: Either[L, R], pos: source.Position) {
    def rightValue: R =
      either match {
        case Right(value) => value
        case Left(_)      =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Right(_)"),
            None,
            pos
          )
      }

    def leftValue: L =
      either match {
        case Left(value) => value
        case Right(_)    =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Left(_)"),
            None,
            pos
          )
      }
  }

  implicit def convertEitherToValuable[L, R](either: Either[L, R])(implicit p: source.Position): EitherValuable[L, R] =
    new EitherValuable(either, p)

}

object EitherValues extends EitherValues 
Example 32
Source File: EitherValues.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.utils

import org.scalactic.source
import org.scalatest.exceptions.{StackDepthException, TestFailedException}

trait EitherValues {

  class EitherValuable[L, R](either: Either[L, R], pos: source.Position) {
    def rightValue: R =
      either match {
        case Right(value) => value
        case Left(_)      =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Right(_)"),
            None,
            pos
          )
      }

    def leftValue: L =
      either match {
        case Left(value) => value
        case Right(_)    =>
          throw new TestFailedException(
            (_: StackDepthException) => Some("The Either value is not a Left(_)"),
            None,
            pos
          )
      }
  }

  implicit def convertEitherToValuable[L, R](either: Either[L, R])(implicit p: source.Position): EitherValuable[L, R] =
    new EitherValuable(either, p)

}

object EitherValues extends EitherValues 
Example 33
Source File: MultiThreadingTest.scala    From ncdbg   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.programmaticallyspeaking.ncd.nashorn
import java.io.{BufferedReader, InputStreamReader}
import java.util.concurrent.TimeoutException
import java.util.concurrent.atomic.AtomicInteger

import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.testing.{SharedInstanceActorTesting, UnitTest}
import jdk.nashorn.api.scripting.NashornScriptEngineFactory
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.exceptions.TestFailedException
import org.slf4s.Logging

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future, Promise}

trait MultiThreadingTestFixture extends UnitTest with Logging with SharedInstanceActorTesting with VirtualMachineLauncher with ScalaFutures with FairAmountOfPatience with Eventually {
  override val scriptExecutor: ScriptExecutorBase = MultiThreadedScriptExecutor
  override implicit val executionContext: ExecutionContext = ExecutionContext.global
}

class MultiThreadingTest extends MultiThreadingTestFixture {
  def location(ln: Int) = ScriptLocation(ln, None)

  "Breakpoint requests from other threads should be ignore in a paused state" in {
    val scriptAddedPromise = Promise[Script]()
    val hitBreakpointPromise = Promise[String]()
    val breakpointCounter = new AtomicInteger()
    val host = getHost
    observeScriptEvents(new Observer[ScriptEvent] {

      override def onNext(item: ScriptEvent): Unit = item match {
        case ScriptAdded(script) =>
          scriptAddedPromise.success(script)
        case hb: HitBreakpoint =>
          breakpointCounter.incrementAndGet()
          hitBreakpointPromise.trySuccess("")
        case _ =>
      }

      override def onError(error: Throwable): Unit = {}

      override def onComplete(): Unit = {}
    })

    whenReady(scriptAddedPromise.future) { script =>
      val scriptLocation = eventually {
        host.getBreakpointLocations(ScriptIdentity.fromId(script.id), location(1), None).headOption.getOrElse(fail(s"No line numbers for script ${script.id}"))
      }
      host.setBreakpoint(ScriptIdentity.fromURL(script.url), scriptLocation, BreakpointOptions.empty)

      try {
        whenReady(hitBreakpointPromise.future) { _ =>
          // Ugly, but wait for a while to see if the counter increases over 1 (which it shouldn't).
          Thread.sleep(200)
          breakpointCounter.get() should be(1)
        }
      } catch {
        case t: TestFailedException if t.getMessage().contains("timeout") =>
          val progress = summarizeProgress()
          throw new TimeoutException("Timed out: " + progress)
      }
    }
  }
}

object MultiThreadedScriptExecutor extends App with ScriptExecutorBase {
  println("MultiThreadedScriptExecutor starting. Java version: " + System.getProperty("java.version"))
  val scriptEngine = new NashornScriptEngineFactory().getScriptEngine("--no-syntax-extensions")
  val reader = new BufferedReader(new InputStreamReader(System.in))
  println(Signals.ready)
  waitForSignal(Signals.go)

  // Used a compiled script here before, stopped working with JDK 10
  var src =
    """(function () {
      |  return Math.floor(5.5);
      |})();
    """.stripMargin

  implicit val ec = ExecutionContext.global

  val futures = (1 to 5).map { _ =>
    Future {
      while (true) {
        scriptEngine.eval(src)
      }
    }
  }

  Await.result(Future.sequence(futures), 30.seconds)
} 
Example 34
Source File: ResponseHeadersSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.response

import org.scalatest.exceptions.TestFailedException
import org.scalawebtest.core.ResponseAccessors
import org.scalawebtest.integration.ScalaWebTestBaseSpec

class ResponseHeadersSpec extends ScalaWebTestBaseSpec with ResponseAccessors {
  path = "/responseHeaders.jsp"
  "When accessing a website the responseHeaders accessor method" should "provide access to the current response headers" in {
    responseHeaders should not be empty
  }
  it should "provide direct access to specific headers field-value by field-name" in {
      responseHeaderValue("Content-Type") shouldBe "text/html;charset=utf-8"
  }
  it should "respond with a meaningful error message in case we request a response header field-value for a field-name which doesn't exist" in {
    assertThrows[TestFailedException] {
      responseHeaderValue("content-type") shouldBe "text/html;charset=utf-8"
    }
  }
  it should "merge response header field-values, when multiple entries with the same field-name exist. They have to be merged in order of their occurrence" in {
    
      responseHeaderValue("Cache-Control") shouldBe "no-cache, no-store"

  }
} 
Example 35
Source File: MisfitRelevanceSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.gauge

import org.scalatest.exceptions.TestFailedException
import org.scalawebtest.integration.{AdditionalAssertions, ScalaWebTestBaseSpec}

class MisfitRelevanceSpec extends ScalaWebTestBaseSpec with AdditionalAssertions {
  path = "/nested.jsp"
  "Misfit relevance" should "return the correct error message" in {
    assertThrowsAndTestMessage[TestFailedException](
      fits(<div>
        <select title="friendship book questions">
          <optgroup label="favorite color">
            <option value="green">green</option>
            <option value="red">red</option>
            <option value="blue">blue</option>
            <option value="yellow">yellow</option>
          </optgroup>
        </select>
        <textarea title="hobby"></textarea>
      </div>)
    )(message => {
      message should include("Misfitting Attribute:")
      message should include("[title] in [\n  <textarea title=\"hobbies\"></textarea>\n] with value [hobbies] didn't equal [hobby]")
      message should include regex "(?s)Expected:.*hobby.*Actual:.*hobbies"
    })
  }

} 
Example 36
Source File: HtmlElementGaugeSpec.scala    From ScalaWebTest   with Apache License 2.0 5 votes vote down vote up
package org.scalawebtest.integration.gauge

import org.scalatest.exceptions.TestFailedException
import org.scalawebtest.core.gauge.HtmlElementGauge
import org.scalawebtest.integration.ScalaWebTestBaseSpec

class HtmlElementGaugeSpec extends ScalaWebTestBaseSpec with HtmlElementGauge {
  path = "/galleryOverview.jsp"

  def images = findAll(CssSelectorQuery("ul div.image_columns"))

  val imageGauge = <div class="columns image_columns">
    <a href="@regex \/gallery\/image\/\d">
      <figure class="obj_aspect_ratio">
        <noscript>
          <img class="obj_full" src="@regex \/image\/\d\.jpg\?w=600"></img>
        </noscript>
        <img class="obj_full lazyload" srcset="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAMAAAADACAMAAABlApw1AAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAA2UExURUxpcQAAAAAAAGZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZmZgAAAAAAAMMpPhwBA7EjNlcGETVhaCQAAAANdFJOUwD18WUFTTcMFSN7LeaHjN0xAAABYElEQVR42u3XUVKDMBSG0baUkBi0uv/NGoIg+sxM507Pt4L/cHmAy0U6o3vA/u+fQvVHsK7PgZryatgBy/wSqoaYNkDbn/N8C9bcCKugP/9yew/WrWw3WA5QajxALfkASPEAaQcsB4gI6CfYAOM1GuA6HgEpIiABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJwGeJzQUwGfJ/T1TMDHCQEAAAAAAAC8LCD8t1D4r1E/NAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK8KqBEB9QhI8zVYc9oB6zs0DsNbmIZhXN+gHVC7IExj218PgC5ohDilvn8F9BPkUmpNTRGh2iol/xxgu0EjRCrvz78DmqARAtX2tv0boBPu06II0v13/jemI5vt8kv1XgAAAFd6VFh0UmF3IHByb2ZpbGUgdHlwZSBpcHRjAAB4nOPyDAhxVigoyk/LzEnlUgADIwsuYwsTIxNLkxQDEyBEgDTDZAMjs1Qgy9jUyMTMxBzEB8uASKBKLgDqFxF08kI1lQAAAABJRU5ErkJggg==" data-sizes="auto"></img>
      </figure>
    </a>
  </div>

  "The element gauge" should "successfully verify if single elements fit the given gauge" in {
    images.size should be > 5 withClue " - gallery didn't contain the expected amount of images"

    for (image <- images) {
      image fits imageGauge
    }
  }
  it should "work with the fit synonym as well" in {
    for (image <- images) {
      image fit <div class="columns image_columns"></div>
    }
  }
  it should "work with negative checks" in {
    for (image <- images) {
      image doesntFit <div class="noImage"></div>
    }
  }
  it should "and the doesNotFit synonym" in {
    for (image <- images) {
      image doesNotFit <div class="noImage"></div>
    }
  }
  it should "fail if expected to fit but doesn't" in {
    assertThrows[TestFailedException] {
      for (image <- images) {
        image fits <div class="noImage"></div>
      }
    }
  }
  it should "fail if expected not to fit but does" in {
    assertThrows[TestFailedException] {
      for (image <- images) {
        image doesntFit imageGauge
      }
    }
  }
  it should "fail if the gauge contains more then one top level element" in {
    assertThrows[TestFailedException] {
      images.next() fits <div class="columns_image"></div> <div class="columns_image"></div>
    }
  }
}