org.scalacheck.Prop.forAll Scala Examples

The following examples show how to use org.scalacheck.Prop.forAll. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: BloomFiltersSpec.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package tests.bloomfilter.mutable

import bloomfilter.CanGenerateHashFrom
import bloomfilter.mutable.BloomFilter
import org.scalacheck.Test.Parameters
import org.scalacheck.commands.Commands
import org.scalacheck.{Arbitrary, Gen, Prop, Properties}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll

class BloomFiltersSpec extends Properties("BloomFilters") {

  val maxNumElems = 10

  def genListOfMaxTenElems[A](implicit aGen: Gen[A]): Gen[List[A]] =
    Gen.posNum[Int] map (_ % maxNumElems) flatMap (i => Gen.listOfN(i, aGen))

  property("union") =
    forAll(genListOfMaxTenElems(arbitrary[Long]), genListOfMaxTenElems(arbitrary[Long])) {
      (leftElements: List[Long], rightElements: List[Long]) =>
        val leftBloomFilter = BloomFilter[Long](maxNumElems, 0.01)
        leftElements foreach leftBloomFilter.add
        val rightBloomFilter = BloomFilter[Long](maxNumElems, 0.01)
        rightElements foreach rightBloomFilter.add
        val unionBloomFilter = leftBloomFilter union rightBloomFilter
        val result = (leftElements ++ rightElements) forall unionBloomFilter.mightContain
        leftBloomFilter.dispose()
        rightBloomFilter.dispose()
        unionBloomFilter.dispose()
        result
    }

  property("intersect") =
    forAll(genListOfMaxTenElems(arbitrary[Long]), genListOfMaxTenElems(arbitrary[Long])) {
      (leftElements: List[Long], rightElements: List[Long]) =>
        val leftBloomFilter = BloomFilter[Long](maxNumElems, 0.01)
        leftElements foreach leftBloomFilter.add
        val rightBloomFilter = BloomFilter[Long](maxNumElems, 0.01)
        rightElements foreach rightBloomFilter.add
        val unionBloomFilter = leftBloomFilter intersect rightBloomFilter
        val intersectElems = leftElements.toSet intersect rightElements.toSet
        val result = intersectElems forall unionBloomFilter.mightContain
        leftBloomFilter.dispose()
        rightBloomFilter.dispose()
        unionBloomFilter.dispose()
        result
    }
} 
Example 2
Source File: TwoOrMoreKeysSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class TwoOrMoreKeysSuite extends BaseSpec {
  test("TwoOrMoreKeys fails to compile given non literal empty List") {
    assertNoDiff(
      compileErrors("TwoOrMoreKeys(List.empty)"),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreKeys(List.empty)
         |             ^
         |""".stripMargin
    )
  }

  test("TwoOrMoreKeys fails to compile given non literal single element List") {
    assertNoDiff(
      compileErrors("""TwoOrMoreKeys(List(Key("a")))"""),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreKeys(List(Key("a")))
         |             ^
         |""".stripMargin
    )
  }

  test("TwoOrMoreKeys fails to compile given non literal List of two elements") {
    assertNoDiff(
      compileErrors("""TwoOrMoreKeys(List(Key("a"), Key("b")))"""),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreKeys(List(Key("a"), Key("b")))
         |             ^
         |""".stripMargin
    )
  }

  property("TwoOrMoreKeys fails at runtime provided empty List") {
    intercept[IllegalArgumentException](TwoOrMoreKeys.unsafeFrom(List.empty))
  }

  property("TwoOrMoreKeys fails at runtime provided single element List") {
    intercept[IllegalArgumentException](TwoOrMoreKeys.unsafeFrom(List(Key("a"))))
  }

  property("TwoOrMoreKeys refines correctly provided non literal cases of Lists of length > 1") {
    forAll(keyLists.filter(_.size > 1)) { ks =>
      TwoOrMoreKeys.from(ks) onRight (_.value == ks)
    }
  }
} 
Example 3
Source File: SlotSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class SlotSuite extends BaseSpec {
  test("Slot fails to compile given out of range Int (< 0)") {
    assertNoDiff(
      compileErrors("Slot(-1)"),
      """|error: Left predicate of (!(-1 < 0) && !(-1 > 16383)) failed: Predicate (-1 < 0) did not fail.
         |Slot(-1)
         |    ^
         |""".stripMargin
    )
  }

  test("Slot fails to compile given out of range Int (> 16383)") {
    assertNoDiff(
      compileErrors("Slot(16384)"),
      """|error: Right predicate of (!(16384 < 0) && !(16384 > 16383)) failed: Predicate (16384 > 16383) did not fail.
         |Slot(16384)
         |    ^
         |""".stripMargin
    )
  }

  property("Slot fails at runtime provided non literal cases of out of range Ints (i < 0 | i > 16383)") {
    forAll(ints.filterNot(slotIsValid)) { i =>
      intercept[IllegalArgumentException](Slot.unsafeFrom(i))
    }
  }

  test("NonNegLong compiles given edge cases (0)") {
    Slot(0)
  }

  test("NonNegLong compiles given edge cases (16383)") {
    Slot(16383)
  }
} 
Example 4
Source File: NonZeroIntSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonZeroIntSuite extends BaseSpec {
  test("NonZeroInt fails to compile given 0") {
    assertNoDiff(
      compileErrors("NonZeroInt(0)"),
      """|error: Predicate (0 == 0) did not fail.
         |NonZeroInt(0)
         |          ^
         |""".stripMargin
    )
  }

  property("NonZeroInt refines correctly provided non literal cases of valid Ints (i != 0)") {
    forAll(nonZeroIntGen) { i =>
      NonZeroInt.from(i) onRight (_.value == i)
    }
  }
} 
Example 5
Source File: RESPFrameMixedSpec.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.scalacheck.Prop.forAll
import scodec.bits.BitVector

final class RESPFrameMixedSpec extends RESPFrameFixture {

  test(
    "Appending to a non empty mixed frame a bit vector composed of a complete sequence of integers, simple strings, bulk strings and errors gives MoreThanOne with a list of all the complete items"
  ) {
    val nonEmptyFrame = IncompleteFrame(BitVector("$16\r\nTest bulk str".getBytes), 0)
    val inputVector = BitVector(
      "ing\r\n+OK\r\n$0\r\n\r\n+Another simple string\r\n*3\r\n$16\r\nTest bulk string\r\n:100\r\n+A simple string\r\n-Possible error message\r\n*0\r\n:1\r\n:2\r\n*2\r\n$8\r\nAnother1\r\n-An error\r\n:177\r\n+Another simple string\r\n$21\r\nTest bulk string 1 11\r\n*5\r\n$16\r\nTest bulk string\r\n:13\r\n-1234 An error with numbers\r\n:100\r\n+A simple string\r\n-And an error message\r\n".getBytes
    )
    nonEmptyFrame.append(inputVector.toByteBuffer) onRightAll {
      case r @ MoreThanOneFrame(_, _) =>
        assertEquals(
          r.complete,
          Vector(
            CompleteFrame(BitVector("$16\r\nTest bulk string\r\n".getBytes())),
            CompleteFrame(BitVector("+OK\r\n".getBytes())),
            CompleteFrame(BitVector("$0\r\n\r\n".getBytes())),
            CompleteFrame(BitVector("+Another simple string\r\n".getBytes())),
            CompleteFrame(BitVector("*3\r\n$16\r\nTest bulk string\r\n:100\r\n+A simple string\r\n".getBytes)),
            CompleteFrame(BitVector("-Possible error message\r\n".getBytes())),
            CompleteFrame(BitVector("*0\r\n".getBytes())),
            CompleteFrame(BitVector(":1\r\n".getBytes())),
            CompleteFrame(BitVector(":2\r\n".getBytes())),
            CompleteFrame(BitVector("*2\r\n$8\r\nAnother1\r\n-An error\r\n".getBytes)),
            CompleteFrame(BitVector(":177\r\n".getBytes())),
            CompleteFrame(BitVector("+Another simple string\r\n".getBytes())),
            CompleteFrame(BitVector("$21\r\nTest bulk string 1 11\r\n".getBytes())),
            CompleteFrame(
              BitVector(
                "*5\r\n$16\r\nTest bulk string\r\n:13\r\n-1234 An error with numbers\r\n:100\r\n+A simple string\r\n".getBytes
              )
            ),
            CompleteFrame(BitVector("-And an error message\r\n".getBytes()))
          )
        )
      case _ => fail(s"expected a MoreThanOne type")
    }
  }

  test(
    "Appending to a non empty mixed frame a bit vector composed of sequence of integers, simple strings, bulk strings and errors that are not complete gives MoreThanOne with a list of all the complete items plus the remainder"
  ) {
    val nonEmptyFrame = IncompleteFrame(BitVector("$16\r\nTest bulk str".getBytes), 0)
    val inputVector = BitVector(
      "ing\r\n+OK\r\n+Another simple string\r\n-Possible error message\r\n:1\r\n:2\r\n:177\r\n+Another simple string\r\n$21\r\nTest bulk string 1 11\r\n-And an error message\r\n".getBytes
    )
    nonEmptyFrame.append(inputVector.toByteBuffer) onRightAll {
      case r @ MoreThanOneFrame(_, _) =>
        assertEquals(
          r.complete,
          Vector(
            CompleteFrame(BitVector("$16\r\nTest bulk string\r\n".getBytes())),
            CompleteFrame(BitVector("+OK\r\n".getBytes())),
            CompleteFrame(BitVector("+Another simple string\r\n".getBytes())),
            CompleteFrame(BitVector("-Possible error message\r\n".getBytes())),
            CompleteFrame(BitVector(":1\r\n".getBytes())),
            CompleteFrame(BitVector(":2\r\n".getBytes())),
            CompleteFrame(BitVector(":177\r\n".getBytes())),
            CompleteFrame(BitVector("+Another simple string\r\n".getBytes())),
            CompleteFrame(BitVector("$21\r\nTest bulk string 1 11\r\n".getBytes())),
            CompleteFrame(BitVector("-And an error message\r\n".getBytes()))
          )
        )
      case _ => fail(s"expected a MoreThanOne type")
    }
  }

  property("Appending to an empty frame a random sequence of complete messages gives MoreThanOne with all the complete items") {
    forAll { testSet: OneOrMore[ProtocolEncoded] =>
      val vector = BitVector(testSet.value.map(_.encoded).mkString.getBytes)

      EmptyFrame.append(vector.toByteBuffer) onRightAll {
        case MoreThanOneFrame(complete, remainder) =>
          assertEquals(complete.size, testSet.value.size)
          assert(remainder.isEmpty)
        case CompleteFrame(_) => succeed
        case other            => fail(s"expected a MoreThanOne type. Was $other")
      }
    }
  }
} 
Example 6
Source File: ConnectionPSpec.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.scalacheck.Prop.forAll

final class ConnectionPSpec extends BaseSpec with ConnectionP {
  property("The Connection protocol using auth roundtrips successfully given non empty password") {
    forAll { key: Key =>
      val protocol = auth(key)
      assertEquals(protocol.encode, Arr(Bulk("AUTH"), Bulk(key.value)))
      assertEquals(protocol.decode(Str(OK.value)), OK)
    }
  }

  property("The Connection protocol using echo roundtrips successfully given any String message") {
    forAll { s: String =>
      val protocol = echo(s)
      assertEquals(protocol.encode, Arr(Bulk("ECHO"), Bulk(s)))
      assertEquals(protocol.decode(Bulk(s)), s)
    }
  }

  property("The Connection protocol using echo roundtrips successfully given any Int message") {
    forAll { i: Int =>
      val protocol = echo(i)
      assertEquals(protocol.encode, Arr(Bulk("ECHO"), Bulk(i)))
      assertEquals(protocol.decode(Bulk(i)), i)
    }
  }

  property("The Connection protocol using ping roundtrips successfully given any String message") {
    forAll { s: String =>
      val protocol = ping(s)
      assertEquals(protocol.encode, Arr(Bulk("PING"), Bulk(s)))
      assertEquals(protocol.decode(Bulk(s)), s)
    }
  }

  property("The Connection protocol using ping roundtrips successfully given any Int message") {
    forAll { i: Int =>
      val protocol = ping(i)
      assertEquals(protocol.encode, Arr(Bulk("PING"), Bulk(i)))
      assertEquals(protocol.decode(Bulk(i)), i)
    }
  }

  property("The Connection protocol using ping roundtrips successfully using val to get back PONG message") {
    val protocol = ping
    assertEquals(protocol.encode, Arr(Bulk("PING")))
    assertEquals(protocol.decode(Str(PONG.value)), PONG)
  }

  property("The Connection protocol using quit roundtrips successfully") {
    val protocol = quit
    assertEquals(protocol.encode, Arr(Bulk("QUIT")))
    assertEquals(protocol.decode(Str(OK.value)), OK)
  }

  property("The Connection protocol using select roundtrips successfully given valid DbIndexes") {
    forAll { dbi: DbIndex =>
      val protocol = select(dbi)
      assertEquals(protocol.encode, Arr(Bulk("SELECT"), Bulk(dbi)))
      assertEquals(protocol.decode(Str(OK.value)), OK)
    }
  }

  property("The Connection protocol using swapdb roundtrips successfully given valid DbIndexes") {
    forAll { (dbi1: DbIndex, dbi2: DbIndex) =>
      val protocol = swapdb(dbi1, dbi2)
      assertEquals(protocol.encode, Arr(Bulk("SWAPDB"), Bulk(dbi1), Bulk(dbi2)))
      assertEquals(protocol.decode(Str(OK.value)), OK)
    }
  }
} 
Example 7
Source File: HyperLogLogPSpec.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.scalacheck.Prop.forAll

abstract class HyperLogLogPSpec extends BaseSpec with HyperLogLogP {

  property("The HyperLogLog protocol using pfadd roundtrips successfully given key and elements") {
    forAll { (k: Key, es: OneOrMoreKeys, b: Boolean) =>
      val protocol = pfadd(k, es)
      assertEquals(protocol.encode, Arr(Bulk("PFADD") :: Bulk(k) :: es.value.map(Bulk(_))))
      assertEquals(protocol.decode(boolToNum(b)), b)
    }
  }

  property("The HyperLogLog protocol using pfcount roundtrips successfully given keys") {
    forAll { (ks: OneOrMoreKeys, nni: NonNegInt) =>
      val protocol = pfcount(ks)
      assertEquals(protocol.encode, Arr(Bulk("PFCOUNT") :: ks.value.map(Bulk(_))))
      assertEquals(protocol.decode(Num(nni.value.toLong)), nni)
    }
  }

  property("The HyperLogLog protocol using pfmerge roundtrips successfully given two or more source keys and a destination key") {
    forAll { (sks: TwoOrMoreKeys, dk: Key) =>
      val protocol = pfmerge(sks, dk)
      assertEquals(protocol.encode, Arr(Bulk("PFMERGE") :: Bulk(dk) :: sks.value.map(Bulk(_))))
      assertEquals(protocol.decode(Str(OK.value)), OK)
    }
  }
} 
Example 8
Source File: CirceSpec.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package interop

import io.circe.generic.semiauto._
import io.circe.{Decoder, Encoder}
import laserdisc.interop.circe._
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}

sealed trait Foo                        extends Product with Serializable
final case class Bar(x: Int)            extends Foo
final case class Baz(y: String, z: Foo) extends Foo

object Foo {
  implicit val decoder: Decoder[Foo] = deriveDecoder
  implicit val encoder: Encoder[Foo] = deriveEncoder
}
object Bar {
  implicit val decoder: Decoder[Bar] = deriveDecoder
  implicit val encoder: Encoder[Bar] = deriveEncoder
}
object Baz {
  implicit val decoder: Decoder[Baz] = deriveDecoder
  implicit val encoder: Encoder[Baz] = deriveEncoder
}

final class CirceSpec extends CirceCheckSettings with EitherTestSyntax {
  private[this] val barGen: Gen[Bar] = Arbitrary.arbitrary[Int].map(Bar.apply)
  private[this] val bazGen: Gen[Baz] = for {
    s   <- Arbitrary.arbitrary[String]
    foo <- fooGen
  } yield Baz(s, foo)
  private[this] def fooGen: Gen[Foo] = Gen.oneOf(barGen, bazGen)

  private[this] implicit val barArbitrary: Arbitrary[Bar] = Arbitrary(barGen)
  private[this] implicit val bazArbitrary: Arbitrary[Baz] = Arbitrary(bazGen)

  property("Circe interop roundtrips with no errors when handling a simple type") {
    forAll { bar: Bar =>
      assertEquals(Read[Bulk, Bar].read(Bulk(bar)), bar)
    }
  }

  property("Circe interop roundtrips with no errors when handling a recursive type") {
    forAll { baz: Baz =>
      assertEquals(Read[Bulk, Baz].read(Bulk(baz)), baz)
    }
  }

  test("Circe interop fails to decode when handling a json that does not respect the contract") {
    Read[Bulk, Bar].read(
      Bulk("""{"i": null}""")
    ) onLeft (e => assertEquals(e.message, "DecodingFailure at .x: Attempt to decode value on failed cursor"))
  }
} 
Example 9
Source File: ApplicativeSpec.scala    From tutorial-cat   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.tutorial.cat.applicative

import com.danielasfregola.tutorial.cat.ArbitraryIntInstances._
import ApplicativeInstances._
import com.danielasfregola.tutorial.cat.functor.FunctorProperties
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Properties}

import scala.reflect._

class MaybeApplicativeSpec extends ApplicativeSpec(maybeApplicative)
class ZeroOrMoreApplicativeSpec extends ApplicativeSpec(zeroOrMoreApplicative)

abstract class ApplicativeSpec[Box[_]](val applicative: Applicative[Box])(implicit val arbitrary: Arbitrary[Box[Int]],
                                                                          tag: ClassTag[Box[_]])
    extends Properties(s"Applicative for $tag")
    with ApplicativeProperties[Box]

trait ApplicativeProperties[Box[_]] extends FunctorProperties[Box] { self: Properties =>

  val applicative: Applicative[Box]
  import applicative._

  val functor = applicative

  val pureIdentity: Box[A => A] = pure(identity)
  val pureF = pure(f)
  val toPureA = { a: A => pure(a)}

  // ap(id)(a) == a
  property("identity") = forAll { box: Box[A] =>
    ap(pureIdentity)(box) == box
  }

  // ap(pure(f))(pure(a)) == pure(f(a))
  property("homorphism") = forAll { a: A =>
    ap(pureF)(pure(a)) == pure(f(a))
  }

  // {x => pure(x)}(a) == pure(a)
  property("interchange") = forAll { a: A =>
    toPureA(a) == pure(a)
  }

  // pure(h o g o f) == ap(pure(h o g))(pure(f(a)))
  property("composition") = forAll { a: A =>
    val gH = g andThen h
    val fGH = f andThen gH
    val pureGH = pure(gH)
    val pureFA = pure(f(a))
    pure(fGH(a)) == ap(pureGH)(pureFA)
  }
} 
Example 10
Source File: FunctorSpec.scala    From tutorial-cat   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.tutorial.cat.functor

import com.danielasfregola.tutorial.cat._
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Properties}
import FunctorInstances._
import ArbitraryIntInstances._

import scala.reflect._

class MaybeFunctorSpec extends FunctorSpec(maybeFunctor)
class ZeroOrMoreFunctorSpec extends FunctorSpec(zeroOrMoreFunctor)

abstract class FunctorSpec[Box[_]](val functor: Functor[Box])(implicit val arbitrary: Arbitrary[Box[Int]],
                                                              tag: ClassTag[Box[_]])
    extends Properties(s"Functor for $tag")
    with FunctorProperties[Box]

trait FunctorProperties[Box[_]] extends SimpleCategoryUtils { self: Properties =>

  val functor: Functor[Box]
  import functor._

  implicit def arbitrary: Arbitrary[Box[A]]

  lazy val mapF: Box[A] => Box[B] = map(_)(f)
  lazy val mapG: Box[B] => Box[C] = map(_)(g)
  lazy val mapH: Box[C] => Box[D] = map(_)(h)

  // map_id == id
  property("identity") = forAll { box: Box[A] =>
    map(box)(identity) == box
  }

  // map_(g o f) == (map_g) o (map_f)
  property("composition") = forAll { boxA: Box[A] =>
    val fG = f andThen g
    val mapFG: Box[A] => Box[C] = map(_)(fG)
    mapFG(boxA) == (mapF andThen mapG)(boxA)
  }

  // map_(h o g) o map_f == map_h o map_(g o f)
  property("associativity") = forAll { boxA: Box[A] =>
    val fG = f andThen g
    val mapFG: Box[A] => Box[C] = map(_)(fG)
    val gH = g andThen h
    val mapGH: Box[B] => Box[D] = map(_)(gH)

    (mapF andThen mapGH)(boxA) == (mapFG andThen mapH)(boxA)
  }
} 
Example 11
Source File: MonoidSpec.scala    From tutorial-cat   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.tutorial.cat.monoid

import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Properties}

import scala.reflect._

class IntMonoidSpec extends MonoidSpec[Int](MonoidInstances.intMonoid)
class StringMonoidSpec extends MonoidSpec[String](MonoidInstances.stringMonoid)

abstract class MonoidSpec[A: ClassTag](monoid: Monoid[A])(implicit arbitrary: Arbitrary[A]) extends Properties(s"Monoid for ${classTag[A]}") {

  val id = monoid.identity

  // n o id == id o n == n
  property("identity") = forAll { n: A =>
    monoid.compose(n, id) == n &&
    monoid.compose(id, n) == n
  }

  // forall x, y => x o y
  property("composition") = forAll { (x: A, y: A) =>
    monoid.compose(x, y).isInstanceOf[A]
  }

  // x o (y o z) == (x o y) o z
  property("associativity") = forAll { (x: A, y: A, z: A) =>
    val xY = monoid.compose(x,y)
    val yZ = monoid.compose(y,z)
    monoid.compose(xY, z) == monoid.compose(x, yZ)
  }
} 
Example 12
Source File: MonadSpec.scala    From tutorial-cat   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.tutorial.cat.monad

import com.danielasfregola.tutorial.cat.ArbitraryIntInstances._
import MonadInstances._
import com.danielasfregola.tutorial.cat.applicative.ApplicativeProperties
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Properties}

import scala.reflect._

class MaybeMonadSpec extends MonadSpec(maybeMonad)
class ZeroOrMoreMonadSpec extends MonadSpec(zeroOrMoreMonad)

abstract class MonadSpec[Box[_]](val monad: Monad[Box])(implicit val arbitrary: Arbitrary[Box[Int]],
                                                        tag: ClassTag[Box[_]])
    extends Properties(s"Monad for $tag")
    with MonadProperties[Box]

trait MonadProperties[Box[_]] extends ApplicativeProperties[Box] { self: Properties =>

  val monad: Monad[Box]
  import monad._

  lazy val applicative = monad

  lazy val toPureF = { a: A => pure(f(a)) }
  lazy val toPureG = { b: B => pure(g(b)) }

  // flatMap(pure(a))(f(a)) == f(a)
  property("left identity") = forAll { a: A =>
    flatMap(pure(a))(toPureF) == toPureF(a)
  }

  // flatMap(pure(f(a)))(pure) == pure(f(a))
  property("right identity") = forAll { a: A =>
    flatMap(toPureF(a))(pure) == toPureF(a)
  }

  // flatMap(flatMap(boxA)(boxF))(boxG) == flatMap(boxA)(boxF(_))(boxG)
  property("associativity") = forAll { boxA: Box[A] =>
    val left: Box[C] = flatMap(flatMap(boxA)(toPureF))(toPureG)
    val right: Box[C] = flatMap(boxA)(a => flatMap(toPureF(a))(toPureG))
    left == right
  }
} 
Example 13
Source File: NonNegDoubleSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonNegDoubleSuite extends BaseSpec {
  test("NonNegDouble fails to compile given out of range Double (< 0.0D)") {
    assertNoDiff(
      compileErrors("NonNegDouble(-0.00000001D)"),
      """|error: Right predicate of ((-1.0E-8 != NaN) && !(-1.0E-8 < 0.0)) failed: Predicate (-1.0E-8 < 0.0) did not fail.
         |NonNegDouble(-0.00000001D)
         |            ^
         |""".stripMargin
    )
  }

  test("NonNegDouble fails to compile given NaN Double") {
    assertNoDiff(
      compileErrors("NonNegDouble(Double.NaN)"),
      """|error: Left predicate of ((NaN != NaN) && !(NaN < 0.0)) failed: Predicate failed: (NaN != NaN).
         |NonNegDouble(Double.NaN)
         |            ^
         |""".stripMargin
    )
  }

  property("NonNegDouble fails at runtime provided non literal cases of out of range Doubles (d < 0.0D)") {
    forAll(doubles.filterNot(nonNegDoubleIsValid)) { d =>
      intercept[IllegalArgumentException](NonNegDouble.unsafeFrom(d))
    }
  }

  test("NonNegDouble compiles given edge cases (0.0D)") {
    NonNegDouble(0.0d)
  }

  property("NonNegDouble refines correctly provided non literal cases of in range Doubles (d >= 0.0D)") {
    forAll(nonNegDoubleGen) { d =>
      NonNegDouble.from(d) onRight (_.value == d)
    }
  }
} 
Example 14
Source File: BloomFilterSerializationSpec.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package tests.bloomfilter.mutable._128bit

import java.io._

import bloomfilter.mutable._128bit.BloomFilter
import org.scalacheck.Prop.forAll
import org.scalacheck.{Gen, Properties}
import org.scalatest.Matchers

class BloomFilterSerializationSpec extends Properties("BloomFilter") with Matchers {
  def genListElems[A](max: Long)(implicit aGen: Gen[A]): Gen[List[A]] = {
    Gen.posNum[Int].map(_ % max).flatMap(i => Gen.listOfN(math.min(i, Int.MaxValue).toInt, aGen))
  }

  val gen = for {
    size <- Gen.oneOf[Long](1, 1000)
    indices <- genListElems[Long](size)(Gen.chooseNum(0, size))
  } yield (size, indices)

  property("writeTo & readFrom") = forAll(gen) {
    case (size: Long, indices: List[Long]) =>
      val initial = BloomFilter[Long](size, 0.01)
      indices.foreach(initial.add)

      val file = File.createTempFile("bloomFilterSerialized", ".tmp")
      val out = new BufferedOutputStream(new FileOutputStream(file), 10 * 1000 * 1000)
      initial.writeTo(out)
      out.close()
      val in = new BufferedInputStream(new FileInputStream(file), 10 * 1000 * 1000)
      val sut = BloomFilter.readFrom[Long](in)
      in.close()

      sut.approximateElementCount() shouldEqual initial.approximateElementCount()

      val result = indices.forall(sut.mightContain)

      file.delete()
      initial.dispose()
      sut.dispose()

      result
  }

} 
Example 15
Source File: BloomFilterSerializationSpec.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package tests.bloomfilter.mutable

import java.io._

import bloomfilter.mutable.BloomFilter
import org.scalacheck.Prop.forAll
import org.scalacheck.{Gen, Properties}
import org.scalatest.Matchers

class BloomFilterSerializationSpec extends Properties("BloomFilter") with Matchers {
  def genListElems[A](max: Long)(implicit aGen: Gen[A]): Gen[List[A]] = {
    Gen.posNum[Int].map(_ % max).flatMap(i => Gen.listOfN(math.min(i, Int.MaxValue).toInt, aGen))
  }

  val gen = for {
    size <- Gen.oneOf[Long](1, 1000 )
    indices <- genListElems[Long](size)(Gen.chooseNum(0, size - 1))
  } yield (size, indices)

  property("writeTo & readFrom") = forAll(gen) {
    case (size: Long, indices: List[Long]) =>
      val initial = BloomFilter[Long](size, 0.01)
      indices.foreach(initial.add)

      val file = File.createTempFile("bloomFilterSerialized", ".tmp")
      val out = new BufferedOutputStream(new FileOutputStream(file), 10 * 1000 * 1000)
      initial.writeTo(out)
      out.close()
      val in = new BufferedInputStream(new FileInputStream(file), 10 * 1000 * 1000)
      val sut = BloomFilter.readFrom[Long](in)
      in.close()

      sut.approximateElementCount() shouldEqual initial.approximateElementCount()

      val result = indices.forall(sut.mightContain)

      file.delete()
      initial.dispose()
      sut.dispose()

      result
  }

  property("supports java serialization") = {
    forAll(gen) {
      case (size, indices) =>
        val initial = BloomFilter[Long](size, 0.01)
        indices.foreach(initial.add)
        val file = File.createTempFile("bloomFilterSerialized", ".tmp")
        val out = new BufferedOutputStream(new FileOutputStream(file), 10 * 1000 * 1000)
        val oos = new ObjectOutputStream(out)
        oos.writeObject(initial)
        oos.close()
        out.close()
        val in = new BufferedInputStream(new FileInputStream(file), 10 * 1000 * 1000)
        val ois = new ObjectInputStream(in)
        val desrialized = ois.readObject()
        ois.close()
        in.close()

        desrialized should not be null
        desrialized should be(a[BloomFilter[Long]])
        val sut = desrialized.asInstanceOf[BloomFilter[Long]]

        sut.numberOfBits shouldEqual initial.numberOfBits
        sut.numberOfHashes shouldEqual initial.numberOfHashes
        sut.approximateElementCount() shouldEqual initial.approximateElementCount()


        val result = indices.forall(sut.mightContain)

        file.delete()
        initial.dispose()
        sut.dispose()

        result
    }
  }

} 
Example 16
Source File: BloomFilterSpec.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package tests.bloomfilter.mutable

import bloomfilter.CanGenerateHashFrom
import bloomfilter.mutable.BloomFilter
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.Test.Parameters
import org.scalacheck.commands.Commands
import org.scalacheck.{Arbitrary, Gen, Prop, Properties}

class BloomFilterSpec extends Properties("BloomFilter") {

  property("for Long") = new BloomFilterCommands[Long].property()
  property("for String") = new BloomFilterCommands[String].property()
  property("for Array[Byte]") = new BloomFilterCommands[Array[Byte]].property()


  override def overrideParameters(p: Parameters): Parameters = {
    super.overrideParameters(p).withMinSuccessfulTests(100)
  }

  class BloomFilterCommands[T: Arbitrary](implicit canGenerateHash: CanGenerateHashFrom[T]) extends Commands {
    type Sut = BloomFilter[T]

    case class State(expectedItems: Long, addedItems: Long)

    override def canCreateNewSut(
        newState: State,
        initSuts: Traversable[State],
        runningSuts: Traversable[Sut]): Boolean = {
      initSuts.isEmpty && runningSuts.isEmpty ||
          newState.addedItems > newState.expectedItems ||
          newState.addedItems > 100
    }

    override def destroySut(sut: Sut): Unit =
      sut.dispose()

    override def genInitialState: Gen[State] =
      Gen.chooseNum[Long](1, Int.MaxValue).map(State(_, 0))

    override def newSut(state: State): Sut =
      BloomFilter[T](state.expectedItems, 0.01)

    def initialPreCondition(state: State): Boolean = true

    def genCommand(state: State): Gen[Command] =
      for {
        item <- Arbitrary.arbitrary[T]
      } yield commandSequence(AddItem(item), CheckItem(item))

    case class AddItem(item: T) extends UnitCommand {
      def run(sut: Sut): Unit = sut.synchronized(sut.add(item))
      def nextState(state: State) = state.copy(addedItems = state.addedItems + 1)
      def preCondition(state: State) = true
      def postCondition(state: State, success: Boolean) = success
    }

    case class CheckItem(item: T) extends SuccessCommand {
      type Result = Boolean
      def run(sut: Sut): Boolean = sut.synchronized(sut.mightContain(item))
      def nextState(state: State) = state
      def preCondition(state: State) = true
      def postCondition(state: State, result: Boolean): Prop = result
    }

  }

  private val elemsToAddGen = for {
    numberOfElemsToAdd <- Gen.chooseNum[Int](1, 1000)
    elemsToAdd <- Gen.listOfN(numberOfElemsToAdd, arbitrary[Long])
  } yield elemsToAdd

  // TODO fix elemsToAddGen.filter() below, why Gen.listOfN above generates empty lists?
  property("approximateElementCount") = forAll(elemsToAddGen.filter(x => x.size > 10 && x.toSet.size > 10)) { elemsToAdd: List[Long] =>
    val bf = BloomFilter[Long](elemsToAdd.size * 10, 0.0001)
    elemsToAdd.foreach(bf.add)
    val numberOfUnique = elemsToAdd.toSet.size
    math.abs(bf.approximateElementCount() - numberOfUnique) < numberOfUnique * 0.1
  }

} 
Example 17
Source File: ScenarioRunnerProperties.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package com.github.agourlay.cornichon.core

import com.github.agourlay.cornichon.steps.cats.EffectStep
import com.github.agourlay.cornichon.testHelpers.CommonTesting
import monix.execution.atomic.AtomicBoolean
import org.scalacheck.Prop.forAll
import org.scalacheck.{ Properties, Test }
import org.typelevel.claimant.Claim

class ScenarioRunnerProperties extends Properties("ScenarioRunner") with CommonTesting {

  // avoid lists too long (default: 100)
  override def overrideParameters(p: Test.Parameters): Test.Parameters = super.overrideParameters(p.withMaxSize(10))

  property("a scenario containing only valid steps should not fail") =
    forAll(validStepsGen) { validSteps =>
      val s = Scenario("scenario with valid steps", validSteps)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty)(s))
      Claim {
        r.isSuccess
      }
    }

  property("a scenario containing at least one invalid step should fail") =
    forAll(validStepsGen, invalidStepGen) { (validSteps, invalidStep) =>
      val s = Scenario("scenario with valid steps", validSteps :+ invalidStep)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty)(s))
      Claim {
        !r.isSuccess
      }
    }

  property("a scenario stops at the first failed step") =
    forAll(validStepsGen, invalidStepGen) { (validSteps, invalidStep) =>
      val signal = AtomicBoolean(false)
      val signalingEffect = EffectStep.fromSync(
        "effect toggle signal",
        sc => {
          signal.set(true)
          sc.session
        }
      )
      val s = Scenario("scenario with valid steps", validSteps :+ invalidStep :+ signalingEffect)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty)(s))
      Claim {
        !r.isSuccess && !signal.get()
      }
    }

  property("a scenario containing at least one invalid step should fail and always execute its finally clause") =
    forAll(validStepsGen, invalidStepGen) { (validSteps, invalidStep) =>
      val signal = AtomicBoolean(false)
      val signallingEffect = EffectStep.fromSync(
        "effect toggle signal",
        sc => {
          signal.set(true)
          sc.session
        }
      )
      val context = FeatureContext.empty.copy(finallySteps = signallingEffect :: Nil)
      val s = Scenario("scenario with valid steps", validSteps :+ invalidStep)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty, context)(s))
      Claim {
        !r.isSuccess && signal.get()
      }
    }

  property("a scenario fails if its finally clause contains invalid steps") =
    forAll(validStepsGen, invalidStepGen) { (validSteps, invalidStep) =>
      val context = FeatureContext.empty.copy(finallySteps = invalidStep :: Nil)
      val s = Scenario("scenario with valid steps", validSteps)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty, context)(s))
      Claim {
        !r.isSuccess
      }
    }

  property("runScenario runs `main steps` if there is no failure in `beforeSteps`") =
    forAll(validStepsGen) { validSteps =>
      val signal = AtomicBoolean(false)
      val signalingEffect = EffectStep.fromSync(
        "effect toggle signal",
        sc => {
          signal.set(true)
          sc.session
        }
      )
      val context = FeatureContext.empty.copy(beforeSteps = validSteps)
      val s = Scenario("scenario with valid steps", signalingEffect :: Nil)
      val r = awaitTask(ScenarioRunner.runScenario(Session.newEmpty, context)(s))
      Claim {
        r.isSuccess && signal.get()
      }
    }

} 
Example 18
Source File: MessageSerializerTest.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.runtime.akkageneric

import aecor.runtime.akkageneric.GenericAkkaRuntime.KeyedCommand
import aecor.runtime.akkageneric.GenericAkkaRuntimeActor.{ Command, CommandResult }
import aecor.runtime.akkageneric.serialization.MessageSerializer
import akka.actor.ActorSystem
import akka.serialization.SerializationExtension
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop.forAll
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite
import scodec.bits.BitVector

import scala.concurrent.Await
import scala.concurrent.duration._

class MessageSerializerTest extends AnyFunSuite with BeforeAndAfterAll {

  implicit val system: ActorSystem = ActorSystem("test")
  val serialization = SerializationExtension(system)
  implicit val bitVector: Arbitrary[BitVector] = Arbitrary(arbitrary[Array[Byte]].map(BitVector(_)))

  def canSerialize[A <: AnyRef](a: A): Boolean = {
    val ser = serialization.serializerFor(a.getClass)
    assert(ser.isInstanceOf[MessageSerializer])
    val mser = ser.asInstanceOf[MessageSerializer]
    val (man, bytes) = (mser.manifest(a), mser.toBinary(a))
    val out = mser.fromBinary(bytes, man)
    out === a
  }

  test("serialization") {
    forAll { bb: BitVector =>
      canSerialize(Command(bb))
    }
    forAll { bb: BitVector =>
      canSerialize(CommandResult(bb))
    }
    forAll { (key: String, bb: BitVector) =>
      canSerialize(KeyedCommand(key, bb))
    }
  }

  override protected def afterAll(): Unit = {
    Await.result(system.terminate(), 5.seconds)
    ()
  }
} 
Example 19
Source File: ScheduleEventCodecSpec.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.tests

import java.time.temporal.{ ChronoField, Temporal }
import java.time.{ Instant, LocalDateTime }

import aecor.runtime.akkapersistence.serialization.{ PersistentDecoder, PersistentEncoder }
import aecor.schedule.ScheduleEvent
import org.scalacheck.{ Arbitrary, Gen, Properties, ScalacheckShapeless }
import org.scalacheck.Prop.forAll

class ScheduleEventCodecSpec extends Properties("ScheduleEventCodec") with ScalacheckShapeless {
  val encoder = PersistentEncoder[ScheduleEvent]
  val decoder = PersistentDecoder[ScheduleEvent]

  // OpenJDK 9+ offers more precise system clock than millisecond.
  // https://bugs.openjdk.java.net/browse/JDK-8068730
  def dropBelowMillis[A <: Temporal](t: A): A =
    t.`with`(ChronoField.MICRO_OF_SECOND, t.getLong(ChronoField.MILLI_OF_SECOND) * 1000L)
      .asInstanceOf[A]

  implicit val arbitraryLocalDateTime = Arbitrary(
    Gen.lzy(Gen.const(dropBelowMillis(LocalDateTime.now())))
  )
  implicit val arbitraryInstant = Arbitrary(Gen.lzy(Gen.const(dropBelowMillis(Instant.now()))))

  property("encode/decode") = forAll { e: ScheduleEvent =>
    val repr = encoder.encode(e)
    val decoded = decoder.decode(repr)
    decoded == Right(e)
  }

} 
Example 20
Source File: TaggingTest.scala    From aecor   with MIT License 5 votes vote down vote up
package aecor.tests

import aecor.data.{ EventTag, Tagging }
import org.scalacheck.Prop.forAll
import org.scalacheck.{ Gen, Properties }

class TaggingTest extends Properties("Tagging") {
  property("Const Tagging") = {
    val tagging = Tagging.const[Int](EventTag("foo"))
    forAll { x: Int =>
      tagging.tag(x) == Set(EventTag("foo"))
    }
  }

  property("Partitioned Tagging") = forAll(Gen.posNum[Int]) { partitionCount =>
    val tagging = Tagging.partitioned[Int](partitionCount)(EventTag(""))

    forAll { x: Int =>
      tagging.tags.contains(tagging.tag(x).head)
    }

    tagging.tags.size == partitionCount
  }
} 
Example 21
Source File: AvroGeneratorTest.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool.scalacheck

import com.spotify.ratatool.avro.specific.{RequiredNestedRecord, TestRecord}
import org.apache.beam.sdk.coders.AvroCoder
import org.apache.beam.sdk.util.CoderUtils
import org.scalacheck._
import org.scalacheck.Prop.{AnyOperators, all, forAll, propBoolean}

object AvroGeneratorTest extends Properties("AvroGenerator") {
  property("round trips") = forAll (specificRecordOf[TestRecord]) { m =>
    val coder = AvroCoder.of(classOf[TestRecord])

    val bytes = CoderUtils.encodeToByteArray(coder, m)
    val decoded = CoderUtils.decodeFromByteArray(coder, bytes)
    decoded ?= m
  }

  val richGen = specificRecordOf[TestRecord]
    .amend(Gen.choose(10, 20))(_.getNullableFields.setIntField)
    .amend(Gen.choose(10L, 20L))(_.getNullableFields.setLongField)
    .amend(Gen.choose(10.0f, 20.0f))(_.getNullableFields.setFloatField)
    .amend(Gen.choose(10.0, 20.0))(_.getNullableFields.setDoubleField)
    .amend(Gen.const(true))(_.getNullableFields.setBooleanField)
    .amend(Gen.const("hello"))(_.getNullableFields.setStringField,
      m => s => m.getNullableFields.setUpperStringField(s.toUpperCase))

  val richTupGen = (specificRecordOf[TestRecord], specificRecordOf[TestRecord]).tupled
    .amend2(specificRecordOf[RequiredNestedRecord])(_.setRequiredFields,
      _.setRequiredFields)

  property("support RichAvroGen") = forAll (richGen) { r =>
    all(
      "Int" |:
        r.getNullableFields.getIntField >= 10 && r.getNullableFields.getIntField <= 20,
      "Long" |:
        r.getNullableFields.getLongField >= 10L && r.getNullableFields.getLongField <= 20L,
      "Float" |:
        r.getNullableFields.getFloatField >= 10.0f && r.getNullableFields.getFloatField <= 20.0f,
      "Double" |:
        r.getNullableFields.getDoubleField >= 10.0 && r.getNullableFields.getDoubleField <= 20.0,
      "Boolean" |: r.getNullableFields.getBooleanField == true,
      "String" |: r.getNullableFields.getStringField == "hello",
      "String" |: r.getNullableFields.getUpperStringField == "HELLO"
    )
  }

  property("support RichAvroTupGen") = forAll (richTupGen) { case (a, b) =>
    (a.getRequiredFields.getBooleanField == b.getRequiredFields.getBooleanField
      && a.getRequiredFields.getIntField == b.getRequiredFields.getIntField
      && a.getRequiredFields.getStringField.toString == b.getRequiredFields.getStringField.toString
      && a.getRequiredFields.getLongField == b.getRequiredFields.getLongField)
  }

} 
Example 22
Source File: TableRowGeneratorTest.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool.scalacheck

import com.google.api.services.bigquery.model.TableRow
import com.spotify.ratatool.Schemas
import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.{propBoolean, all, forAll}

object TableRowGeneratorTest extends Properties("TableRowGenerator") {
  property("round trip") = forAll(tableRowOf(Schemas.tableSchema)) { m =>
    m.setF(m.getF) == m
  }

  val n = "nullable_fields"
  val r = "required_fields"
  val richGen = tableRowOf(Schemas.tableSchema)
    .amend(Gen.choose(10L, 20L))(_.getRecord(n).set("int_field"))
    .amend(Gen.choose(10.0, 20.0))(_.getRecord(n).set("float_field"))
    .amend(Gen.const(true))(_.getRecord(n).set("boolean_field"))
    .amend(Gen.const("hello"))(_.getRecord(n).set("string_field"),
      m => s => m.getRecord(n).set("upper_string_field")(s.asInstanceOf[String].toUpperCase))

  val richTupGen = (tableRowOf(Schemas.tableSchema), tableRowOf(Schemas.tableSchema)).tupled
    .amend2(Gen.choose(10L, 20L))(_.getRecord(r).set("int_field"),
      a => a.getRecord(r).set("int_field"))
    .amend2(Arbitrary.arbString.arbitrary)(_.getRecord(r).set("string_field"),
      a => a.getRecord(r).set("string_field"))
    .amend2(Arbitrary.arbBool.arbitrary)(_.getRecord(r).set("boolean_field"),
      _.getRecord(r).set("boolean_field"))

  property("support RichTableRowGen") = forAll (richGen) { r =>
    val fields = r.get(n).asInstanceOf[java.util.LinkedHashMap[String, Any]]
    val i = fields.get("int_field").asInstanceOf[Long]
    val f = fields.get("float_field").asInstanceOf[Double]
    val b = fields.get("boolean_field").asInstanceOf[Boolean]
    val s = fields.get("string_field").asInstanceOf[String]
    val upper = fields.get("upper_string_field").asInstanceOf[String]
    all(
      "Int"     |: i >= 10L && i <= 20L,
      "Float"   |: f >= 10.0 && f <= 20.0,
      "Boolean" |: b == true,
      "String"  |: s == "hello",
      "String"  |: upper == "HELLO"
    )
  }

  property("support RichTableRowTupGen") = forAll(richTupGen) { case (a, b) =>
    val ar = a.get(r).asInstanceOf[java.util.LinkedHashMap[String, Any]]
    val br = b.get(r).asInstanceOf[java.util.LinkedHashMap[String, Any]]
    (a.get("int_field").asInstanceOf[Long] == b.get("int_field").asInstanceOf[Long]
      && a.get("string_field").asInstanceOf[String] == b.get("string_field").asInstanceOf[String] &&
      a.get("boolean_field").asInstanceOf[Boolean] == b.get("boolean_field").asInstanceOf[Boolean])
  }

} 
Example 23
Source File: ProtoBufGeneratorTest.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool.scalacheck

import com.spotify.ratatool.proto.Schemas.{OptionalNestedRecord, RequiredNestedRecord, TestRecord}
import org.scalacheck.{Gen, Properties}
import org.scalacheck.Prop.{BooleanOperators, all, forAll}


object ProtoBufGeneratorTest extends Properties("ProtoBufGenerator") {
  property("round trip") = forAll(protoBufOf[TestRecord]) { m =>
    m == TestRecord.parseFrom(m.toByteArray)
  }

  val optionalNestedRecordGen: Gen[OptionalNestedRecord] = protoBufOf[OptionalNestedRecord]
    .map(_.toBuilder)
    .amend(Gen.choose(10, 20))(_.setInt32Field)
    .amend(Gen.choose(10L, 20L))(_.setInt64Field)
    .amend(Gen.choose(10.0f, 20.0f))(_.setFloatField)
    .amend(Gen.choose(10.0, 20.0))(_.setDoubleField)
    .amend(Gen.const(true))(_.setBoolField)
    .amend(Gen.const("hello"))(_.setStringField, m => s => m.setUpperStringField(s.toUpperCase))
    .map(_.build())


  val richGen: Gen[TestRecord] = protoBufOf[TestRecord].map(_.toBuilder)
    .amend(optionalNestedRecordGen)(_.setOptionalFields)
    .map(_.build())

  val richTupGen =
    (protoBufOf[TestRecord].map(_.toBuilder), protoBufOf[TestRecord].map(_.toBuilder)).tupled
    .amend2(protoBufOf[RequiredNestedRecord])(_.setRequiredFields, _.setRequiredFields)
    .map{ case (a, b) => (a.build(), b.build()) }


  property("support RichProtoGen") = forAll (richGen) { r =>
    all(
      "Int" |:
        (r.getOptionalFields.getInt32Field >= 10 && r.getOptionalFields.getInt32Field <= 20),
      "Long" |:
        r.getOptionalFields.getInt64Field >= 10L && r.getOptionalFields.getInt64Field <= 20L,
      "Float" |:
        r.getOptionalFields.getFloatField >= 10.0f && r.getOptionalFields.getFloatField <= 20.0f,
      "Double" |:
        r.getOptionalFields.getDoubleField >= 10.0 && r.getOptionalFields.getDoubleField <= 20.0,
      "Boolean" |: r.getOptionalFields.getBoolField,
      "String" |: r.getOptionalFields.getStringField == "hello",
      "String" |: r.getOptionalFields.getUpperStringField == "HELLO"
    )
  }

  property("support RichProtoTupGen") = forAll (richTupGen) { case (a, b) =>
    (a.getRequiredFields.getBoolField == b.getRequiredFields.getBoolField
      && a.getRequiredFields.getInt32Field == b.getRequiredFields.getInt32Field
      && a.getRequiredFields.getFixed64Field == b.getRequiredFields.getFixed64Field
      && a.getRequiredFields.getStringField == b.getRequiredFields.getStringField
      && a.getRequiredFields.getUint32Field == b.getRequiredFields.getUint32Field)
  }
} 
Example 24
Source File: ExampleAvroGenTest.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool.examples

import java.util.UUID

import com.spotify.ratatool.avro.specific.{EnumField, ExampleRecord}
import com.spotify.ratatool.examples.scalacheck.ExampleAvroGen
import org.scalacheck.{Gen, Properties}
import org.scalacheck.Prop.{AnyOperators, BooleanOperators, forAll}

import scala.jdk.CollectionConverters._

object ExampleAvroGenTest extends Properties("ExampleAvroGenerator") {
  val gen: Gen[ExampleRecord] = ExampleAvroGen.exampleRecordGen

  property("round trips UUID") = forAll(gen) { m =>
    UUID.fromString(m.getRecordId.toString).toString ?= m.getRecordId.toString
  }

  property("generates valid dependent int") = forAll(gen) { m =>
    (m.getIndependentIntField == 0
      && m.getDependentIntField == Int.MaxValue) :| "Max if indep is 0" ||
    (m.getIndependentIntField != 0
      && m.getDependentIntField == m.getIndependentIntField/2) :| "Half when indep is not 0"
  }

  property("generates valid dependent enum") = forAll(gen) { m =>
    (m.getIndependentStringField.toString.startsWith("Exception") &&
      m.getDependentEnumField == EnumField.Failure) :| "Is Failure on Exception" ||
    (!m.getIndependentStringField.toString.startsWith("Exception") &&
      m.getDependentEnumField == EnumField.Success) :| "Is Success when non-Exception"
  }

  property("double field within bounds") = forAll(gen) { m =>
    m.getBoundedDoubleField <= 1.0 && m.getBoundedDoubleField >= -1.0
  }

  property("map field size within bounds") = forAll(gen) { m =>
    val size = m.getNestedRecordField.getMapField.asScala.size
    size <= 5 && size >= 0
  }

  property("the record id is the same when using amend2 generators") =
    forAll(ExampleAvroGen.exampleRecordAmend2Gen) {
      case (gen1, gen2) => gen1.getRecordId == gen2.getRecordId
    }

  property("the record id is the same when using amend2 for correlated fields") =
    forAll(ExampleAvroGen.correlatedRecordGen) {
      correlatedGen =>
        correlatedGen.getRecordId == correlatedGen.getNestedRecordField.getParentRecordId
    }
} 
Example 25
Source File: OneOrMoreKeysSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class OneOrMoreKeysSuite extends BaseSpec {
  test("OneOrMoreKeys fails to compile given non literal empty List") {
    assertNoDiff(
      compileErrors("OneOrMoreKeys(List.empty)"),
      """|error: compile-time refinement only works with literals
         |OneOrMoreKeys(List.empty)
         |             ^
         |""".stripMargin
    )
  }

  test("OneOrMoreKeys fails to compile given non literal non empty List") {
    assertNoDiff(
      compileErrors("""OneOrMoreKeys(List(Key("a")))"""),
      """|error: compile-time refinement only works with literals
         |OneOrMoreKeys(List(Key("a")))
         |             ^
         |""".stripMargin
    )
  }

  test("OneOrMoreKeys fails at runtime provided empty List") {
    intercept[IllegalArgumentException](OneOrMoreKeys.unsafeFrom(List.empty))
  }

  property("OneOrMoreKeys refines correctly provided non literal cases of non empty Lists (length > 0)") {
    forAll(keyLists.filter(_.nonEmpty)) { ks =>
      OneOrMoreKeys.from(ks) onRight (_.value == ks)
    }
  }
} 
Example 26
Source File: CodecProperties.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos.ast

import org.scalacheck.Prop.forAll
import org.scalacheck.Properties
import ru.tinkoff.phobos.decoding.XmlDecoder
import ru.tinkoff.phobos.encoding.XmlEncoder
import org.scalacheck.{Arbitrary, Gen}

class CodecProperties extends Properties("Ast codecs") {
  import CodecProperties._

  private val encoder = XmlEncoder.fromElementEncoder[XmlEntry]("test")
  private val decoder = XmlDecoder.fromElementDecoder[XmlEntry]("test")

  property("decode(encode(ast)) === ast") = forAll { entry: XmlEntry =>
    decoder.decode(
      encoder.encode(entry)
    ) == Right(entry)
  }

  property("encode(decode(xmlAst)) === xmlAst") = forAll { entry: XmlEntry =>
    val encoded = encoder.encode(entry)

    decoder.decode(encoded).map(encoder.encode(_)) == Right(encoded)
  }
}

object CodecProperties {
  implicit val arbitraryXmlLong: Arbitrary[XmlNumber.Aux[Long]] = Arbitrary(
    Arbitrary.arbitrary[Long].map(XmlNumber.integral))

  implicit val arbitraryXmlDouble: Arbitrary[XmlNumber.Aux[Double]] = Arbitrary(
    Arbitrary.arbitrary[Double].map(XmlNumber.double))

  implicit val arbitraryXmlNumber: Arbitrary[XmlNumber] =
    Arbitrary(
      Gen.oneOf(arbitraryXmlLong.arbitrary, arbitraryXmlDouble.arbitrary)
    )

  implicit val arbitraryXmlBoolean: Arbitrary[XmlBoolean] = Arbitrary(
    Arbitrary.arbitrary[Boolean].map(XmlBoolean.fromBoolean))

  implicit val arbNonEmptyString: Arbitrary[String] = Arbitrary {
    Gen.choose(1, 10).flatMap { n =>
      Gen
        .containerOfN[List, Char](n, Gen.oneOf("abcdefghijklmnopqrstuvwxyz".toList))
        .map(_.mkString)
    }
  }

  implicit val arbitraryXmlText: Arbitrary[XmlText] = Arbitrary(
    arbNonEmptyString.arbitrary.map(XmlText(_))
  )

  implicit val arbitraryXmlLeaf: Arbitrary[XmlLeaf] = Arbitrary(
    Gen.oneOf(arbitraryXmlNumber.arbitrary, arbitraryXmlBoolean.arbitrary, arbitraryXmlText.arbitrary)
  )

  class Depth(val value: Int) extends AnyVal

  def arbitraryXmlNode(depth: Depth): Arbitrary[XmlNode] = Arbitrary {
    val arbInt           = Gen.choose(0, 5)
    def arbNames(n: Int) = Gen.containerOfN[Set, String](n, arbNonEmptyString.arbitrary)

    def arbLeafs(n: Int) =
      for {
        names <- arbNames(n)
        leafs <- Gen.containerOfN[List, XmlLeaf](n, arbitraryXmlLeaf.arbitrary)
      } yield names.toList zip leafs

    val arbNodes: Gen[List[(String, XmlEntry)]] = arbInt.flatMap { n =>
      if (depth.value > 3) arbLeafs(n)
      else {
        val depth2: Depth = new Depth(depth.value + 1)
        val arbEntries = Gen
          .containerOfN[List, XmlEntry](n, arbitraryXmlNode(depth2).arbitrary)

        for {
          names   <- arbNames(n)
          entries <- arbEntries
        } yield names.toList zip entries
      }
    }
    for {
      nAttrs <- arbInt
      attrs  <- arbLeafs(nAttrs)
      nodes  <- arbNodes
    } yield XmlNode(attrs, nodes)
  }

  implicit val arbitraryXmlEntry: Arbitrary[XmlEntry] =
    Arbitrary(arbitraryXmlNode(new Depth(0)).arbitrary)
} 
Example 27
Source File: CodecSpec.scala    From libisabelle   with Apache License 2.0 5 votes vote down vote up
package info.hupel.isabelle.tests

import scala.math.BigInt

import org.specs2.{ScalaCheck, Specification}
import org.specs2.specification.core.Env

import org.scalacheck._
import org.scalacheck.Prop.forAll

import info.hupel.isabelle._
import info.hupel.isabelle.api.XML

class CodecSpec(val specs2Env: Env) extends Specification with BasicSetup with ScalaCheck { def is = s2"""

  Round-trip property of Codecs

  Values can be converted
    of type Unit                      ${propCodec[Unit]}
    of type Boolean                   ${propCodec[Boolean]}
    of type BigInt                    ${propCodec[BigInt]}
    of type String                    ${propCodec[String]}
    of type (BigInt, BigInt)          ${propCodec[(BigInt, BigInt)]}
    of type (String, Unit)            ${propCodec[(String, Unit)]}
    of type List[Unit]                ${propCodec[List[Unit]]}
    of type List[BigInt]              ${propCodec[List[BigInt]]}
    of type List[String]              ${propCodec[List[String]]}
    of type List[List[String]]        ${propCodec[List[List[String]]]}
    of type List[(BigInt, BigInt)]    ${propCodec[List[(BigInt, BigInt)]]}
    of type (BigInt, List[BigInt])    ${propCodec[(BigInt, List[BigInt])]}
    of type Option[BigInt]            ${propCodec[Option[BigInt]]}
    of type Option[List[BigInt]]      ${propCodec[Option[List[BigInt]]]}
    of type List[Option[BigInt]]      ${propCodec[List[Option[BigInt]]]}
    of type Either[String, BigInt]    ${propCodec[Either[String, BigInt]]}
  """

  def propCodec[A : Codec : Arbitrary] = properties(new Properties("props") {
    property("encode/decode") = forAll { (a: A) =>
      Codec[A].decode(Codec[A].encode(a)) must beRight(a)
    }

    property("YXML") = forAll { (a: A) =>
      val encoded = Codec[A].encode(a)
      XML.fromYXML(encoded.toYXML) must be_===(encoded)
    }
  })

} 
Example 28
Source File: ColumnMetadataTest.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector.vector

import java.util.regex.Pattern

import org.apache.spark.sql.types.DecimalType

import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Gen
import org.scalacheck.Gen.{choose, identifier}
import org.scalacheck.Prop.{forAll, propBoolean}
import org.scalatest.{FunSuite, Matchers}

import com.actian.spark_vector.test.tags.RandomizedTest

class ColumnMetadataTest extends FunSuite with Matchers {
  // Generate random column metadata and ensure the resultant StructField's are valid
  test("generated", RandomizedTest) {
    forAll(columnMetadataGen)(colMD => {
      assertColumnMetadata(colMD)
    }).check
  }

  val milliSecsPattern = Pattern.compile(".*\\.(S*)")

  def assertColumnMetadata(columnMD: ColumnMetadata): Boolean = {
    val structField = columnMD.structField
    structField.dataType match {
      // For decimal type, ensure the scale and precision match
      case decType: DecimalType =>
        decType.precision should be(columnMD.precision)
        decType.scale should be(columnMD.scale)
      case _ =>
    }
    true
  }

  val columnMetadataGen: Gen[ColumnMetadata] =
    for {
      name <- identifier
      typeName <- VectorTypeGen.vectorJdbcTypeGen
      nullable <- arbitrary[Boolean]
      precision <- choose(0, 20)
      scale <- choose(0, Math.min(20, precision))
    } yield ColumnMetadata(name, typeName, nullable, precision, scale)
} 
Example 29
Source File: StringDiffSpecification.scala    From scala-clippy   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.clippy

import org.scalacheck.Prop.forAll
import org.scalacheck.Properties

class StringDiffSpecification extends Properties("StringDiff") with TypeNamesGenerators {

  val S     = "S"
  val E     = "E"
  val AddSE = (s: String) => S + s + E

  def innerTypeDiffsCorrectly(fourTypes: List[String]): Boolean = {
    val List(x, y, v, z) = fourTypes
    val expected         = s"$x[$y[$z]]"
    val actual           = s"$x[$v[$z]]"
    val msg              = new StringDiff(expected, actual, AddSE).diff("expected: %s actual: %s")
    msg == s"""expected: $x[$S$y$E[$z]] actual: $x[$S$v$E[$z]]"""
  }

  def twoTypesAreFullyDiff(twoTypes: List[String]): Boolean = {
    val List(x, y) = twoTypes
    new StringDiff(x, y, AddSE).diff("expected: %s actual: %s") == s"""expected: $S$x$E actual: $S$y$E"""
  }

  property("X[Y[Z]] vs X[V[Z]] always gives X[<diff>[Z]] excluding packages") =
    forAll(different(singleTypeName)(4))(innerTypeDiffsCorrectly)

  property("X[Y[Z]] vs X[V[Z]] always gives X[<diff>[Z]] if Y and V have common prefix") =
    forAll(typesWithCommonPrefix(4))(innerTypeDiffsCorrectly)

  property("X[Y[Z]] vs X[V[Z]] always gives X[<diff>[Z]] if Y and V have common suffix") =
    forAll(typesWithCommonSuffix(4))(innerTypeDiffsCorrectly)

  property("A[X] vs B[X] always marks outer as diff for A != B when A and B have common prefix") =
    forAll(typesWithCommonPrefix(2), complexTypeName(maxDepth = 3)) { (outerTypes, x) =>
      val List(a, b) = outerTypes
      val expected   = s"$a[$x]"
      val actual     = s"$b[$x]"
      val msg        = new StringDiff(expected, actual, AddSE).diff("expected: %s actual: %s")
      msg == s"""expected: $S$a$E[$x] actual: $S$b$E[$x]"""
    }

  property("package.A[X] vs package.B[X] always gives package.<diff>A</diff>[X]") =
    forAll(javaPackage, different(singleTypeName)(2), complexTypeName(maxDepth = 3)) { (pkg, outerTypes, x) =>
      val List(a, b) = outerTypes
      val expected   = s"$pkg$a[$x]"
      val actual     = s"$pkg$b[$x]"
      val msg        = new StringDiff(expected, actual, AddSE).diff("expected: %s actual: %s")
      msg == s"""expected: $pkg$S$a$E[$x] actual: $pkg$S$b$E[$x]"""
    }

  property("any complex X vs Y is a full diff when X and Y don't have common suffix nor prefix") =
    forAll(different(complexTypeName(maxDepth = 4))(2).suchThat(noCommonPrefixSuffix))(twoTypesAreFullyDiff)

  property("any single X vs Y is a full diff") = forAll(different(singleTypeName)(2))(twoTypesAreFullyDiff)

  def noCommonPrefixSuffix(twoTypes: List[String]): Boolean = {
    val List(x, y) = twoTypes
    x.head != y.head && x.last != y.last
  }

} 
Example 30
Source File: laws.scala    From newts   with Apache License 2.0 5 votes vote down vote up
package newts.internal

import cats.kernel.Eq
import cats.kernel.laws.OrderLaws
import cats.kernel.laws.discipline.OrderTests
import cats.laws._
import cats.laws.discipline._
import cats.syntax.order._
import org.scalacheck.{Arbitrary, Cogen}
import org.scalacheck.Prop.forAll

object laws {

  trait MaxBoundedLaws[A] extends OrderLaws[A] {

    override implicit def E: MaxBounded[A]

    def maxValue(a: A): IsEq[A] =
      (E.maxValue max a) <-> E.maxValue

  }
  object MaxBoundedLaws {
    def apply[A](implicit ev: MaxBounded[A]): MaxBoundedLaws[A] =
      new MaxBoundedLaws[A] { def E: MaxBounded[A] = ev }
  }


  trait MinBoundedLaws[A] extends OrderLaws[A] {

    override implicit def E: MinBounded[A]

    def maxValue(a: A): IsEq[A] =
      (E.minValue min a) <-> E.minValue

  }
  object MinBoundedLaws {
    def apply[A](implicit ev: MinBounded[A]): MinBoundedLaws[A] =
      new MinBoundedLaws[A] { def E: MinBounded[A] = ev }
  }

  object discipline {

    trait MaxBoundedTests[A] extends OrderTests[A] {

      override def laws: MaxBoundedLaws[A]

      def maxBounded(implicit arbA: Arbitrary[A], arbF: Arbitrary[A => A], eqOA: Eq[Option[A]], eqA: Eq[A]): RuleSet =
        new DefaultRuleSet(
          "maxBounded",
          Some(order),
          "maxValue" -> forAll(laws.maxValue _)
        )

    }
    object MaxBoundedTests {
      def apply[A: MaxBounded]: MaxBoundedTests[A] =
        new MaxBoundedTests[A] { def laws: MaxBoundedLaws[A] = MaxBoundedLaws[A] }
    }

    trait MinBoundedTests[A] extends OrderTests[A] {

      override def laws: MinBoundedLaws[A]

      def minBounded(implicit arbA: Arbitrary[A], arbF: Arbitrary[A => A], eqOA: Eq[Option[A]], eqA: Eq[A]): RuleSet =
        new DefaultRuleSet(
          "minBounded",
          Some(order),
          "minValue" -> forAll(laws.maxValue _)
        )

    }
    object MinBoundedTests {
      def apply[A: MinBounded]: MinBoundedTests[A] =
        new MinBoundedTests[A] { def laws: MinBoundedLaws[A] = MinBoundedLaws[A] }
    }

  }

} 
Example 31
Source File: NewtypeTests.scala    From http4s-poc-api   with MIT License 5 votes vote down vote up
package external

import cats.instances.int._
import cats.instances.list._
import cats.instances.option._
import cats.syntax.eq._
import external.library.newtype
import org.scalacheck.Prop.forAll
import org.scalacheck.Properties

final class NewtypeTests extends Properties("newtype") {
  property("unMk gives the original value") = forAll { i: Int => newtype[Int](i).unMk === i }

  property("unMkF gives the original values in F[_] for List") = forAll { xs: List[Int] =>
    val nt = newtype[Int]
    nt.unMkF(nt.mkF(xs)) === xs
  }

  property("unMkF gives the original values in F[_] for Option") = forAll { os: Option[Int] =>
    val nt = newtype[Int]
    nt.unMkF(nt.mkF(os)) === os
  }
} 
Example 32
Source File: RangeOffsetSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class RangeOffsetSuite extends BaseSpec {
  test("RangeOffset fails to compile given out of range Int (< 0)") {
    assertNoDiff(
      compileErrors("RangeOffset(-1)"),
      """|error: Left predicate of (!(-1 < 0) && !(-1 > 536870911)) failed: Predicate (-1 < 0) did not fail.
         |RangeOffset(-1)
         |           ^
         |""".stripMargin
    )
  }

  test("RangeOffset fails to compile given out of range Int (> 536870911)") {
    assertNoDiff(
      compileErrors("RangeOffset(536870912)"),
      """|error: Right predicate of (!(536870912 < 0) && !(536870912 > 536870911)) failed: Predicate (536870912 > 536870911) did not fail.
         |RangeOffset(536870912)
         |           ^
         |""".stripMargin
    )
  }

  property("RangeOffset fails at runtime provided non literal cases of out of range Ints (i < 0 | i > 536870911)") {
    forAll(ints.filterNot(rangeOffsetIsValid)) { i =>
      intercept[IllegalArgumentException](RangeOffset.unsafeFrom(i))
    }
  }

  test("RangeOffset compiles given edge cases (0)") {
    RangeOffset(0)
  }

  test("RangeOffset compiles given edge cases (536870911)") {
    RangeOffset(536870911)
  }

  property("RangeOffset refines correctly provided non literal cases of in range Ints (0 <= i <= 536870911)") {
    forAll(rangeOffsetGen) { i =>
      RangeOffset.from(i) onRight (_.value == i)
    }
  }
} 
Example 33
Source File: ConnectionNameSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class ConnectionNameSuite extends BaseSpec {
  test("ConnectionName fails to compile given an empty string") {
    assertNoDiff(
      compileErrors("""ConnectionName("")"""),
      """|error: Left predicate of (!isEmpty() && ()) failed: Predicate isEmpty() did not fail.
         |ConnectionName("")
         |              ^
         |""".stripMargin
    )
  }

  test("ConnectionName fails to compile given a space") {
    assertNoDiff(
      compileErrors("""ConnectionName(" ")"""),
      """|error: Right predicate of (!isEmpty( ) && ((!isWhitespace(' ') && !isControl(' ')))) failed: Predicate failed: ((!isWhitespace(' ') && !isControl(' '))).
         |ConnectionName(" ")
         |              ^
         |""".stripMargin
    )
  }

  property("ConnectionName fails at runtime provided non literal cases of strings that contain spaces") {
    forAll(stringsWithSpacesGen.filterNot(connectionNameIsValid)) { s =>
      intercept[IllegalArgumentException](ConnectionName.unsafeFrom(s))
    }
  }

  test("ConnectionName compiles given non empty String with no spaces") {
    ConnectionName("a")
  }

  property("ConnectionName refines correctly provided non literal cases of non empty strings with no spaces") {
    forAll(connectionNameGen) { s =>
      ConnectionName.from(s) onRight (_.value == s)
    }
  }
} 
Example 34
Source File: NodeIdSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NodeIdSuite extends BaseSpec {
  test("NodeId fails to compile given a non conformant String (length < 40)") {
    assertNoDiff(
      compileErrors("""NodeId("0123456789abcdef0123456789abcdef0123456")"""),
      """|error: Predicate failed: "0123456789abcdef0123456789abcdef0123456".matches("[0-9a-f]{40}").
         |NodeId("0123456789abcdef0123456789abcdef0123456")
         |      ^
         |""".stripMargin
    )
  }

  test("NodeId fails to compile given a non conformant String (length > 40)") {
    assertNoDiff(
      compileErrors("""NodeId("0123456789abcdef0123456789abcdef012345678")"""),
      """|error: Predicate failed: "0123456789abcdef0123456789abcdef012345678".matches("[0-9a-f]{40}").
         |NodeId("0123456789abcdef0123456789abcdef012345678")
         |      ^
         |""".stripMargin
    )
  }

  test("NodeId fails to compile given a non conformant String (uppercase)") {
    assertNoDiff(
      compileErrors("""NodeId("0123456789abcdEf0123456789abcdef01234567")"""),
      """|error: Predicate failed: "0123456789abcdEf0123456789abcdef01234567".matches("[0-9a-f]{40}").
         |NodeId("0123456789abcdEf0123456789abcdef01234567")
         |      ^
         |""".stripMargin
    )
  }

  test("NodeId fails to compile given a non conformant String (invalid chars)") {
    assertNoDiff(
      compileErrors("""NodeId("0123456789abcd&f0123456789abcdef01234567&fghijk")"""),
      """|error: Predicate failed: "0123456789abcd&f0123456789abcdef01234567&fghijk".matches("[0-9a-f]{40}").
         |NodeId("0123456789abcd&f0123456789abcdef01234567&fghijk")
         |      ^
         |""".stripMargin
    )
  }

  property("NodeId fails at runtime provided non literal cases of non conformant Strings") {
    forAll(strings.filterNot(nodeIdIsValid)) { d =>
      intercept[IllegalArgumentException](NodeId.unsafeFrom(d))
    }
  }

  test("NodeId compiles given conformant String") {
    NodeId("0123456789abcdef0123456789abcdef01234567")
  }

  property("NodeId refines correctly provided non literal cases of conformant Strings") {
    forAll(nodeIdGen) { i =>
      NodeId.from(i) onRight (_.value == i)
    }
  }
} 
Example 35
Source File: LongitudeSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class LongitudeSuite extends BaseSpec {
  test("Longitude fails to compile given out of range Double (< -180.0D)") {
    assertNoDiff(
      compileErrors("Longitude(-180.00000001D)"),
      """|error: Left predicate of (!(-180.00000001 < -180.0) && !(-180.00000001 > 180.0)) failed: Predicate (-180.00000001 < -180.0) did not fail.
         |Longitude(-180.00000001D)
         |         ^
         |""".stripMargin
    )
  }

  test("Longitude fails to compile given out of range Double (> 180.0D)") {
    assertNoDiff(
      compileErrors("Longitude(180.00000001D)"),
      """|error: Right predicate of (!(180.00000001 < -180.0) && !(180.00000001 > 180.0)) failed: Predicate (180.00000001 > 180.0) did not fail.
         |Longitude(180.00000001D)
         |         ^
         |""".stripMargin
    )
  }

  property("Longitude fails at runtime provided non literal cases of out of range Doubles (d < -180.0D | d > 180.0D)") {
    forAll(doubles.filterNot(longitudeIsValid)) { d =>
      intercept[IllegalArgumentException](Longitude.unsafeFrom(d))
    }
  }

  test("Longitude compiles given edge cases (-180.0D)") {
    Longitude(-180.0d)
  }

  test("Longitude compiles given edge cases (180.0D)") {
    Longitude(180.0d)
  }

  property("Longitude refines correctly provided non literal cases of in range Doubles (-180.0D <= d <= 180.0D)") {
    forAll(longitudeGen) { l =>
      Longitude.from(l) onRight (_.value == l)
    }
  }
} 
Example 36
Source File: NonZeroLongSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonZeroLongSuite extends BaseSpec {
  test("NonZeroLong fails to compile given 0L") {
    assertNoDiff(
      compileErrors("NonZeroLong(0L)"),
      """|error: Predicate (0 == 0) did not fail.
         |NonZeroLong(0L)
         |           ^
         |""".stripMargin
    )
  }

  property("NonZeroLong refines correctly provided non literal cases of valid Longs (l != 0L)") {
    forAll(nonZeroLongGen) { l =>
      NonZeroLong.from(l) onRight (_.value == l)
    }
  }
} 
Example 37
Source File: ExampleTableRowGenTest.scala    From ratatool   with Apache License 2.0 5 votes vote down vote up
package com.spotify.ratatool.examples

import com.google.api.services.bigquery.model.TableRow
import com.spotify.ratatool.examples.scalacheck.ExampleTableRowGen
import com.spotify.ratatool.scalacheck._
import org.scalacheck.{Gen, Properties}
import org.scalacheck.Prop.{AnyOperators, forAll}


object ExampleTableRowGenTest extends Properties("ExampleTableRowGenerator") {
  val gen: Gen[TableRow] = ExampleTableRowGen.tableRowGen
  val listGen: Gen[List[TableRow]] = Gen.listOfN(1000, gen)

  property("generates Foo and Bar more frequently than Fizz and Buzz") = forAll(listGen) { l =>
    val stringFields: Seq[String] = l.flatMap { r =>
      Option(r.getRecord("nullable_record"))
        .map(_.get("frequency_string_field").asInstanceOf[String])
    }

    stringFields.count(s => s == "Foo" || s == "Bar") >
      stringFields.count(s => s == "Fizz" || s == "Buzz")
  }

  property("generates valid dependent bytes") = forAll(gen) { r =>
    val s = r.getRecord("required_record").get("independent_string_field").asInstanceOf[String]
    val b = r.getRecord("required_record").get("dependent_bytes_field").asInstanceOf[Array[Byte]]
    new String(b) ?= s
  }

  property("the record id is the same when using amend2 generators") =
    forAll(ExampleTableRowGen.exampleRecordAmend2Gen) { case (gen1, gen2) =>
      gen1.get("record_id") == gen2.get("record_id")
    }

  property("the record id is the same when using amend2 for correlated fields") =
    forAll(ExampleTableRowGen.correlatedRecordGen) {
      case (correlatedRecord) => correlatedRecord.get("record_id") ==
        correlatedRecord.get("parent_record_id")
    }

} 
Example 38
Source File: TwoOrMoreWeightedKeysSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class TwoOrMoreWeightedKeysSuite extends BaseSpec {
  test("TwoOrMoreWeightedKeys fails to compile given non literal empty List") {
    assertNoDiff(
      compileErrors("TwoOrMoreWeightedKeys(List.empty)"),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreWeightedKeys(List.empty)
         |                     ^
         |""".stripMargin
    )
  }

  test("TwoOrMoreWeightedKeys fails to compile given non literal single element List") {
    assertNoDiff(
      compileErrors("""TwoOrMoreWeightedKeys(List(Key("a") -> ValidDouble(42.0D)))"""),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreWeightedKeys(List(Key("a") -> ValidDouble(42.0D)))
         |                     ^
         |""".stripMargin
    )
  }

  test("TwoOrMoreWeightedKeys fails to compile given non literal List of two elements") {
    assertNoDiff(
      compileErrors("""TwoOrMoreWeightedKeys(List(Key("a") -> ValidDouble(42.0D), Key("b") -> ValidDouble(23.0D)))"""),
      """|error: compile-time refinement only works with literals
         |TwoOrMoreWeightedKeys(List(Key("a") -> ValidDouble(42.0D), Key("b") -> ValidDouble(23.0D)))
         |                     ^
         |""".stripMargin
    )
  }

  test("TwoOrMoreWeightedKeys fails at runtime provided empty List") {
    intercept[IllegalArgumentException](TwoOrMoreWeightedKeys.unsafeFrom(List.empty))
  }

  test("TwoOrMoreWeightedKeys fails at runtime provided single element List") {
    intercept[IllegalArgumentException](TwoOrMoreWeightedKeys.unsafeFrom(List(Key("a") -> ValidDouble(42.0d))))
  }

  property("TwoOrMoreWeightedKeys refines correctly provided non literal cases of Lists of length > 1") {
    forAll(wightedKeyLists.filter(_.size > 1)) { ks =>
      TwoOrMoreWeightedKeys.from(ks) onRight (_.value == ks)
    }
  }
} 
Example 39
Source File: GeoHashSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class GeoHashSuite extends BaseSpec {
  test("GeoHash fails to compile given a non conformant String (length < 11)") {
    assertNoDiff(
      compileErrors("""GeoHash("abcdefghij")"""),
      """|error: Predicate failed: "abcdefghij".matches("[a-z0-9]{11}").
         |GeoHash("abcdefghij")
         |       ^
         |""".stripMargin
    )
  }

  test("GeoHash fails to compile given a non conformant String (length > 11)") {
    assertNoDiff(
      compileErrors("""GeoHash("abcdefghijkl")"""),
      """|error: Predicate failed: "abcdefghijkl".matches("[a-z0-9]{11}").
         |GeoHash("abcdefghijkl")
         |       ^
         |""".stripMargin
    )
  }

  test("GeoHash fails to compile given a non conformant String (uppercase)") {
    assertNoDiff(
      compileErrors("""GeoHash("abCdefghijk")"""),
      """|error: Predicate failed: "abCdefghijk".matches("[a-z0-9]{11}").
         |GeoHash("abCdefghijk")
         |       ^
         |""".stripMargin
    )
  }

  test("GeoHash fails to compile given a non conformant String (invalid chars)") {
    assertNoDiff(
      compileErrors("""GeoHash("abcd&fghijk")"""),
      """|error: Predicate failed: "abcd&fghijk".matches("[a-z0-9]{11}").
         |GeoHash("abcd&fghijk")
         |       ^
         |""".stripMargin
    )
  }

  property("GeoHash fails at runtime provided non literal cases of non conformant Strings") {
    forAll(strings.filterNot(geoHashIsValid)) { s =>
      intercept[IllegalArgumentException](GeoHash.unsafeFrom(s))
    }
  }

  test("GeoHash compiles given conformant String") {
    GeoHash("abcd3fgh1jk")
  }

  property("GeoHash refines correctly provided non literal cases of conformant Strings") {
    forAll(geoHashGen) { s =>
      GeoHash.from(s) onRight (_.value == s)
    }
  }
} 
Example 40
Source File: GlobPatternSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class GlobPatternSuite extends BaseSpec {
  test("GlobPattern fails to compile given an empty String") {
    assertNoDiff(
      compileErrors("""GlobPattern("")"""),
      """|error: Predicate failed: "".matches("(\[?[\w\*\?]+\]?)+").
         |GlobPattern("")
         |           ^
         |""".stripMargin
    )
  }

  test("GlobPattern fails to compile given a non conformant String") {
    assertNoDiff(
      compileErrors("""GlobPattern("!")"""),
      """|error: Predicate failed: "!".matches("(\[?[\w\*\?]+\]?)+").
         |GlobPattern("!")
         |           ^
         |""".stripMargin
    )
  }

  property("GlobPattern fails at runtime provided non literal cases of non conformant Strings") {
    forAll(strings.filterNot(globPatternIsValid)) { s =>
      intercept[IllegalArgumentException](GlobPattern.unsafeFrom(s))
    }
  }

  test("GlobPattern compiles given conformant String") {
    GlobPattern("abc*fg?1jk")
    GlobPattern("a[bc*]fg?1jk")
  }

  property("GlobPattern refines correctly provided non literal cases of conformant Strings") {
    forAll(globPatternGen) { s =>
      GlobPattern.from(s) onRight (_.value == s)
    }
  }
} 
Example 41
Source File: ValidDoubleSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class ValidDoubleSuite extends BaseSpec {
  test("ValidDouble fails to compile given Double.NaN") {
    assertNoDiff(
      compileErrors("ValidDouble(Double.NaN)"),
      """|error: Predicate failed: (NaN != NaN).
         |ValidDouble(Double.NaN)
         |           ^
         |""".stripMargin
    )
  }

  test("ValidDouble compiles given edge cases (-1.7976931348623157E308) -> can't use Double.MinValue as not a literal") {
    ValidDouble(-1.7976931348623157e308)
  }

  test("ValidDouble compiles given edge cases (Double.MaxValue)") {
    ValidDouble(Double.MaxValue)
  }

  property("ValidDouble refines correctly provided non literal cases of valid Doubles (d != Double.NaN)") {
    forAll(doubles.filter(validDoubleIsValid)) { d =>
      ValidDouble.from(d) onRight (_.value == d)
    }
  }
} 
Example 42
Source File: StringLengthSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class StringLengthSuite extends BaseSpec {
  test("StringLength fails to compile given out of range Long (< 0L)") {
    assertNoDiff(
      compileErrors("StringLength(-1L)"),
      """|error: Left predicate of (!(-1 < 0) && !(-1 > 4294967295)) failed: Predicate (-1 < 0) did not fail.
         |StringLength(-1L)
         |            ^
         |""".stripMargin
    )
  }

  test("StringLength fails to compile given out of range Long (> 4294967295L)") {
    assertNoDiff(
      compileErrors("StringLength(4294967296L)"),
      """|error: Right predicate of (!(4294967296 < 0) && !(4294967296 > 4294967295)) failed: Predicate (4294967296 > 4294967295) did not fail.
         |StringLength(4294967296L)
         |            ^
         |""".stripMargin
    )
  }

  property("StringLength fails at runtime provided non literal cases of out of range Longs (l < 0L | l > 4294967295L)") {
    forAll(longs.filterNot(stringLengthIsValid)) { l =>
      intercept[IllegalArgumentException](StringLength.unsafeFrom(l))
    }
  }

  test("StringLength compiles given edge cases (0L)") {
    StringLength(0L)
  }

  test("StringLength compiles given edge cases (4294967295L)") {
    StringLength(4294967295L)
  }

  property("StringLength refines correctly provided non literal cases of in range Longs (0L <= l <= 4294967295L)") {
    forAll(stringLengthGen) { l =>
      StringLength.from(l) onRight (_.value == l)
    }
  }
} 
Example 43
Source File: NonZeroDoubleSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonZeroDoubleSuite extends BaseSpec {
  test("NonZeroDouble fails to compile given 0.0D") {
    assertNoDiff(
      compileErrors("NonZeroDouble(0.0D)"),
      """|error: Right predicate of ((0.0 != NaN) && !(0.0 == 0.0)) failed: Predicate (0.0 == 0.0) did not fail.
         |NonZeroDouble(0.0D)
         |             ^
         |""".stripMargin
    )
  }

  test("NonZeroDouble fails to compile given NaN") {
    assertNoDiff(
      compileErrors("NonZeroDouble(Double.NaN)"),
      """|error: Left predicate of ((NaN != NaN) && !(NaN == 0.0)) failed: Predicate failed: (NaN != NaN).
         |NonZeroDouble(Double.NaN)
         |             ^
         |""".stripMargin
    )
  }

  property("NonZeroDouble refines correctly provided non literal cases of valid Doubles (d != 0.0D)") {
    forAll(nonZeroDoubleGen) { d =>
      NonZeroDouble.from(d) onRight (_.value == d)
    }
  }
} 
Example 44
Source File: LatitudeSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class LatitudeSuite extends BaseSpec {
  test("Latitude fails to compile given out of range Double (< -85.05112878D)") {
    assertNoDiff(
      compileErrors("Latitude(-85.05112879D)"),
      """|error: Left predicate of (!(-85.05112879 < -85.05112878) && !(-85.05112879 > 85.05112878)) failed: Predicate (-85.05112879 < -85.05112878) did not fail.
         |Latitude(-85.05112879D)
         |        ^
         |""".stripMargin
    )
  }

  test("Latitude fails to compile given out of range Double (> 85.05112878D)") {
    assertNoDiff(
      compileErrors("Latitude(85.05112879D)"),
      """|error: Right predicate of (!(85.05112879 < -85.05112878) && !(85.05112879 > 85.05112878)) failed: Predicate (85.05112879 > 85.05112878) did not fail.
         |Latitude(85.05112879D)
         |        ^
         |""".stripMargin
    )
  }

  property("Latitude fails at runtime provided non literal cases of out of range Doubles (d < -85.05112878D | d > 85.05112878D)") {
    forAll(doubles.filterNot(latitudeIsValid)) { d =>
      intercept[IllegalArgumentException](Latitude.unsafeFrom(d))
    }
  }

  test("Key compiles given edge cases (-85.05112878D)") {
    Latitude(-85.05112878d)
  }

  test("Key compiles given edge cases (85.05112878D)") {
    Latitude(85.05112878d)
  }

  property("Latitude refines correctly provided non literal cases of in range Doubles (-85.05112878D <= d <= 85.05112878D)") {
    forAll(latitudeGen) { l =>
      Latitude.from(l) onRight (_.value == l)
    }
  }
} 
Example 45
Source File: OneOrMoreSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class OneOrMoreSuite extends BaseSpec {
  property("OneOrMore fails at runtime provided empty List") {
    intercept[IllegalArgumentException](OneOrMore.unsafeFrom(List.empty[Int]))
  }

  property("OneOrMore refines correctly provided non literal cases of non empty Lists (length > 0)") {
    forAll(intLists.filter(_.nonEmpty)) { l =>
      OneOrMore.from(l) onRight (_.value == l)
    }
  }
} 
Example 46
Source File: NonNegLongSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonNegLongSuite extends BaseSpec {
  test("NonNegLong fails to compile given out of range Longs (< 0L)") {
    assertNoDiff(
      compileErrors("NonNegLong(-1L)"),
      """|error: Predicate (-1 < 0) did not fail.
         |NonNegLong(-1L)
         |          ^
         |""".stripMargin
    )
  }

  property("NonNegLong fails at runtime provided non literal cases of out of range Longs (l < 0L)") {
    forAll(longs.filterNot(nonNegLongIsValid)) { l =>
      intercept[IllegalArgumentException](NonNegLong.unsafeFrom(l))
    }
  }

  test("NonNegLong compiles given edge cases (0L)") {
    NonNegLong(0L)
  }

  property("NonNegLong refines correctly provided non literal cases of in range Longs (l > 0L)") {
    forAll(nonNegLongGen) { l =>
      NonNegLong.from(l) onRight (_.value == l)
    }
  }
} 
Example 47
Source File: NonNegIntSuite.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package refined.types

import org.scalacheck.Prop.forAll

final class NonNegIntSuite extends BaseSpec {
  test("NonNegInt fails to compile given out of range Ints (< 0)") {
    assertNoDiff(
      compileErrors("NonNegInt(-1)"),
      """|error: Predicate (-1 < 0) did not fail.
         |NonNegInt(-1)
         |         ^
         |""".stripMargin
    )
  }

  property("NonNegInt fails at runtime provided non literal cases of out of range Ints (i < 0)") {
    forAll(ints.filterNot(nonNegIntIsValid)) { i =>
      intercept[IllegalArgumentException](NonNegInt.unsafeFrom(i))
    }
  }

  test("NonNegInt compiles given edge cases (0)") {
    NonNegInt(0)
  }

  property("NonNegInt refines correctly provided non literal cases of in range Ints (i > 0)") {
    forAll(nonNegIntGen) { i =>
      NonNegInt.from(i) onRight (_.value == i)
    }
  }
} 
Example 48
Source File: CheckJsIntegerSpecification.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.shrinkable

import org.scalacheck.Prop.{BooleanOperators, forAll}
import org.scalacheck.{Arbitrary, Properties, Shrink}

object CheckJsIntegerSpecification extends Properties("JsInteger") {
  property("shrink no min/max") = forAll(Arbitrary.arbitrary[BigInt].suchThat(_ != 0)) {
    value =>
      val original = CheckJsInteger(None, None, value)

      val shrink = Shrink.shrink(original)

      shrink.nonEmpty :| "Shrink not empty" && shrink.forall {
        shrinked =>
          if (value < 0)
            shrinked.min.isEmpty && shrinked.max.isEmpty && shrinked.value > value
          else
            shrinked.min.isEmpty && shrinked.max.isEmpty && shrinked.value < value
      } :| "Shrink values valid"
  }

  property("shrink no max") = forAll(
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0),
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0).map(_.abs)) {
    (min, diff) =>
      val value = min + diff
      val original = CheckJsInteger(Some(min), None, value)

      val shrink = Shrink.shrink(original)

      if (value == 0)
        shrink.isEmpty :| "Shrink empty"
      else
        shrink.nonEmpty :| "Shrink not empty" && shrink.forall {
          shrinked =>
            if (value < 0)
              shrinked.min.contains(min) && shrinked.max.isEmpty && shrinked.value > value && shrinked.value >= min
            else
              shrinked.min.contains(min) && shrinked.max.isEmpty && shrinked.value < value && shrinked.value >= min
        } :| "Shrink values valid"
  }

  property("shrink no min") = forAll(
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0),
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0).map(_.abs)) {
    (max, diff) =>
      val value = max - diff
      val original = CheckJsInteger(None, Some(max), value)

      val shrink = Shrink.shrink(original)

      if (value == 0)
        shrink.isEmpty :| "Shrink empty"
      else
        shrink.nonEmpty :| "Shrink not empty" && shrink.forall {
          shrinked =>
            if (value < 0)
              shrinked.max.contains(max) && shrinked.min.isEmpty && shrinked.value > value && shrinked.value <= max
            else
              shrinked.max.contains(max) && shrinked.min.isEmpty && shrinked.value < value && shrinked.value <= max
        } :| "Shrink values valid"
  }


  property("shrink min/max") = forAll(
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0),
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0).map(_.abs),
    Arbitrary.arbitrary[BigInt].suchThat(_ != 0).map(_.abs)
  ) {
    (min, diff1, diff2) =>
      val max = min + diff1 + diff2
      val value = min + diff1
      val original = CheckJsInteger(Some(min), Some(max), value)

      val shrink = Shrink.shrink(original)

      if (value == 0)
        shrink.isEmpty :| "Shrink empty"
      else
        shrink.nonEmpty :| "Shrink not empty" && shrink.forall {
          shrinked =>
            if (value < 0)
              shrinked.min.contains(min) && shrinked.max.contains(max) && shrinked.value > value && shrinked.value <= max
            else
              shrinked.min.contains(min) && shrinked.max.contains(max) && shrinked.value < value && shrinked.value <= max
        } :| "Shrink values valid"
  }
} 
Example 49
Source File: SampleDatasetGeneratorTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.sql.{Dataset, SQLContext}
import org.scalacheck.{Gen, Arbitrary}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class SampleDatasetGeneratorTest extends FunSuite
    with SharedSparkContext with Checkers {

  test("test generating Datasets[String]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val property =
      forAll(
        DatasetGenerator.genDataset[String](sqlContext)(
          Arbitrary.arbitrary[String])) {
        dataset => dataset.map(_.length).count() == dataset.count()
      }

    check(property)
  }

  test("test generating sized Datasets[String]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val property =
      forAll {
        DatasetGenerator.genSizedDataset[(Int, String)](sqlContext) { size =>
          Gen.listOfN(size, Arbitrary.arbitrary[Char]).map(l => (size, l.mkString))
        }
      }{
        dataset =>
          val tuples = dataset.collect()
          val value = dataset.map{ case (_, str) => str.length}
          tuples.forall{ case (size, str) => size == str.length} &&
          value.count() == dataset.count
      }

    check(property)
  }

  test("test generating Datasets[Custom Class]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val carGen: Gen[Dataset[Car]] =
      DatasetGenerator.genDataset[Car](sqlContext) {
        val generator: Gen[Car] = for {
          name <- Arbitrary.arbitrary[String]
          speed <- Arbitrary.arbitrary[Int]
        } yield (Car(name, speed))

        generator
    }

    val property =
      forAll(carGen) {
        dataset => dataset.map(_.speed).count() == dataset.count()
      }

    check(property)
  }
}

case class Car(name: String, speed: Int) 
Example 50
Source File: LetterTests.scala    From Muse-CGH   with MIT License 5 votes vote down vote up
package tests

import org.scalacheck.Gen
import org.scalacheck.Prop.forAll
import utilities.{CubicCurve, Vec2}


object LetterTests {
  val vec2Gen = for{
    x <- Gen.choose(0.0,2.0)
    y <- Gen.choose(-2.0,2.0)
  } yield Vec2(x,y)

  val cubicGen = for{
    p0 <- vec2Gen
    p1 <- vec2Gen
    p2 <- vec2Gen
    p3 <- vec2Gen
  } yield CubicCurve(p0,p1,p2,p3)

  val samplesTest = forAll(cubicGen){ c =>
    val samples = c.samples(50)
    samples.head =~= c.p0 && samples.last =~= c.p3
  }

  def main(args: Array[String]) {
    samplesTest.check
  }
} 
Example 51
Source File: DictionaryBasedEncoderSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.apache.parquet.bytes.BytesInput
import org.apache.parquet.column.page.DictionaryPage
import org.apache.parquet.column.values.dictionary.PlainValuesDictionary.PlainBinaryDictionary
import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.forAll
import org.scalatest.prop.Checkers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.datasources.oap.adapter.PropertiesAdapter
import org.apache.spark.sql.execution.datasources.oap.filecache.StringFiberBuilder
import org.apache.spark.sql.types.StringType
import org.apache.spark.unsafe.types.UTF8String

class DictionaryBasedEncoderCheck extends Properties("DictionaryBasedEncoder") {
  private val rowCountInEachGroup = Gen.choose(1, 1024)
  private val rowCountInLastGroup = Gen.choose(1, 1024)
  private val groupCount = Gen.choose(1, 100)

  property("Encoding/Decoding String Type") = forAll { (values: Array[String]) =>

    forAll(rowCountInEachGroup, rowCountInLastGroup, groupCount) {
      (rowCount, lastCount, groupCount) =>
        if (values.nonEmpty) {
          // This is the 'PLAIN' FiberBuilder to validate the 'Encoding/Decoding'
          // Normally, the test case should be:
          // values => encoded bytes => decoded bytes => decoded values (Using ColumnValues class)
          // Validate if 'values' and 'decoded values' are identical.
          // But ColumnValues only support read value form DataFile. So, we have to use another way
          // to validate.
          val referenceFiberBuilder = StringFiberBuilder(rowCount, 0)
          val fiberBuilder = PlainBinaryDictionaryFiberBuilder(rowCount, 0, StringType)
          !(0 until groupCount).exists { group =>
            // If lastCount > rowCount, assume lastCount = rowCount
            val count =
              if (group < groupCount - 1) {
                rowCount
              } else if (lastCount > rowCount) {
                rowCount
              } else {
                lastCount
              }
            (0 until count).foreach { row =>
              fiberBuilder.append(InternalRow(UTF8String.fromString(values(row % values.length))))
              referenceFiberBuilder
                .append(InternalRow(UTF8String.fromString(values(row % values.length))))
            }
            val bytes = fiberBuilder.build().fiberData
            val dictionary = new PlainBinaryDictionary(
              new DictionaryPage(
                BytesInput.from(fiberBuilder.buildDictionary),
                fiberBuilder.getDictionarySize,
                org.apache.parquet.column.Encoding.PLAIN))
            val fiberParser = PlainDictionaryFiberParser(
              new OapDataFileMetaV1(rowCountInEachGroup = rowCount), dictionary, StringType)
            val parsedBytes = fiberParser.parse(bytes, count)
            val referenceBytes = referenceFiberBuilder.build().fiberData
            referenceFiberBuilder.clear()
            referenceFiberBuilder.resetDictionary()
            fiberBuilder.clear()
            fiberBuilder.resetDictionary()
            assert(parsedBytes.length == referenceBytes.length)
            parsedBytes.zip(referenceBytes).exists(byte => byte._1 != byte._2)
          }
        } else {
          true
        }
    }
  }
}

class DictionaryBasedEncoderSuite extends SparkFunSuite with Checkers {

  test("Check Encoding/Decoding") {
    check(PropertiesAdapter.getProp(new DictionaryBasedEncoderCheck()))
  }
} 
Example 52
Source File: DeltaByteArrayEncoderSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.forAll
import org.scalatest.prop.Checkers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.datasources.oap.adapter.PropertiesAdapter
import org.apache.spark.sql.execution.datasources.oap.filecache.StringFiberBuilder
import org.apache.spark.sql.types.StringType
import org.apache.spark.unsafe.types.UTF8String

class DeltaByteArrayEncoderCheck extends Properties("DeltaByteArrayEncoder") {

  private val rowCountInEachGroup = Gen.choose(1, 1024)
  private val rowCountInLastGroup = Gen.choose(1, 1024)
  private val groupCount = Gen.choose(1, 100)

  property("Encoding/Decoding String Type") = forAll { (values: Array[String]) =>

    forAll(rowCountInEachGroup, rowCountInLastGroup, groupCount) {
      (rowCount, lastCount, groupCount) =>
        if (values.nonEmpty) {
          // This is the 'PLAIN' FiberBuilder to validate the 'Encoding/Decoding'
          // Normally, the test case should be:
          // values => encoded bytes => decoded bytes => decoded values (Using ColumnValues class)
          // Validate if 'values' and 'decoded values' are identical.
          // But ColumnValues only support read value form DataFile. So, we have to use another way
          // to validate.
          val referenceFiberBuilder = StringFiberBuilder(rowCount, 0)
          val fiberBuilder = DeltaByteArrayFiberBuilder(rowCount, 0, StringType)
          val fiberParser = DeltaByteArrayDataFiberParser(
            new OapDataFileMetaV1(rowCountInEachGroup = rowCount), StringType)
          !(0 until groupCount).exists { group =>
            // If lastCount > rowCount, assume lastCount = rowCount
            val count = if (group < groupCount - 1) {
              rowCount
            } else if (lastCount > rowCount) {
              rowCount
            } else {
              lastCount
            }
            (0 until count).foreach { row =>
              fiberBuilder.append(InternalRow(UTF8String.fromString(values(row % values.length))))
              referenceFiberBuilder
                .append(InternalRow(UTF8String.fromString(values(row % values.length))))
            }
            val bytes = fiberBuilder.build().fiberData
            val parsedBytes = fiberParser.parse(bytes, count)
            val referenceBytes = referenceFiberBuilder.build().fiberData
            referenceFiberBuilder.clear()
            fiberBuilder.clear()
            assert(parsedBytes.length == referenceBytes.length)
            parsedBytes.zip(referenceBytes).exists(byte => byte._1 != byte._2)
          }
        } else true
    }
  }
}

class DeltaByteArrayEncoderSuite extends SparkFunSuite with Checkers {

  test("Check Encoding/Decoding") {
    check(PropertiesAdapter.getProp(new DictionaryBasedEncoderCheck()))
  }
} 
Example 53
Source File: P16Check.scala    From S99   with MIT License 5 votes vote down vote up
package jp.co.dwango.s99

import org.scalacheck.{Gen, Properties}
import Gen.listOf, Gen.chooseNum
import org.scalacheck.Prop.forAll

class P16Check extends Properties("P16") {
  property("drop()") =
    forAll(listOf(chooseNum(Int.MinValue, Int.MaxValue)), chooseNum(1, 10)) {
      (s: List[Int], i: Int) =>
        (P16.drop(i, s) == s.zipWithIndex
          .map { case (e, j) => (e, j + 1) }
          .filterNot {
            case (e, j) =>
              j % i == 0
          }
          .map { _._1 })
    }
} 
Example 54
Source File: P08Check.scala    From S99   with MIT License 5 votes vote down vote up
package jp.co.dwango.s99

import org.scalacheck.Prop.forAll
import org.scalacheck.Properties

class P08Check extends Properties("P08") {
  def duplicates(list: List[Int]): Int = {
    @scala.annotation.tailrec
    def loop(list: List[Int], curr: Int, count: Int): Int =
      list match {
        case Nil => count
        case x :: xs =>
          if (curr == x) loop(xs, curr, count + 1) else loop(xs, x, count)
      }
    list match {
      case x :: xs => loop(xs, x, 0)
      case Nil     => 0
    }
  }
  property("compress()") = forAll { (s: List[Int]) =>
    P08.compress(s).length == s.length - duplicates(s)
  }
} 
Example 55
Source File: Assesments.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
import org.scalacheck.{Arbitrary, Gen, Prop}
import org.scalacheck.Prop.forAll

object Assesments extends App {
  def invariant[T: Ordering: Arbitrary]: Prop =
    forAll((l: List[T]) => l.sorted.length == l.length)

  invariant[Long].check
  invariant[String].check

  def idempotent[T: Ordering: Arbitrary]: Prop =
    forAll((l: List[T]) => l.sorted.sorted == l.sorted)

  idempotent[Long].check
  idempotent[String].check

  def inductive[T: Ordering: Arbitrary]: Prop = {
    def ordered(l: List[T]): Boolean =
      (l.length < 2) ||
        (ordered(l.tail) && implicitly[Ordering[T]].lteq(l.head, l.tail.head))
    forAll((l: List[T]) => ordered(l.sorted))
  }

  inductive[Int].check
  inductive[String].check


  val genListListInt = Gen.listOf(Gen.listOf(Gen.posNum[Int]))
  genListListInt.sample

  val pairGen = for {
    uuid <- Gen.uuid
    function0 <- Gen.function0(Gen.asciiStr)
  } yield (uuid, function0)

  val mapGen = Gen.mapOf(pairGen)
} 
Example 56
Source File: BookDbApiSpecification.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck

import java.util.UUID

import de.leanovate.swaggercheck.fixtures.bookdb.Author
import de.leanovate.swaggercheck.schema.ValidationResultToProp._
import de.leanovate.swaggercheck.simple._
import org.scalacheck.Prop.{BooleanOperators, forAll}
import org.scalacheck.{Arbitrary, Properties, Shrink}
import play.api.libs.json.Json

object BookDbApiSpecification extends Properties("BookDB API") {
  val swaggerChecks = SwaggerChecks(getClass.getClassLoader.getResourceAsStream("bookdb_api.yaml"))

  property("Author is correctly written") = {
    val verifier = swaggerChecks.jsonVerifier("Author")

    forAll(Arbitrary.arbitrary[Author]) {
      author: Author =>
        val json = Json.stringify(Json.toJson(author))

        verifier.verify(json)
    }
  }

  property("Author can be correctly parsed") = {
    val verifier = swaggerChecks.jsonVerifier("Author")

    forAll(swaggerChecks.jsonGenerator("Author")) {
      json =>
        Json.parse(json.minified).validate[Author].isSuccess :| "Json can be deserialized" &&
          verifier.verify(json.minified).isSuccess :| "Json conforms to own schema" &&
          Shrink.shrink(json).forall {
            shrinked =>
              verifier.verify(shrinked.minified).isSuccess
          } :| "All shrinked variants conform to schema"
    }
  }

  property("Request generator POST /author") = {
    val verifier = swaggerChecks.jsonVerifier("Author")

    forAll(swaggerChecks.requestGenerator("POST", "/v1/authors")) {
      request =>
        (request.method == "POST") :| "Method" &&
          (request.path == "/v1/authors") :| "Path" &&
          request.body.isDefined :| "Has body" &&
          verifier.verify(request.body.get.minified).isSuccess :| "Body is author"
    }
  }

  property("Request generator GET /author/{id}") =
    forAll(swaggerChecks.requestGenerator("GET", "/v1/authors/{id}")) {
      request =>
        (request.method == "GET") :| "Method" &&
          request.path.startsWith("/v1/authors") :| "Path" &&
          (UUID.fromString(request.path.substring(12)) ne null) :| "Id is uuid" &&
          request.body.isEmpty :| "Has no body"
    }

  property("Operation verifier") = forAll(swaggerChecks.operationVerifier[SimpleRequest, SimpleResponse](_ == "/v1/authors")) {
    case operationVerifier: SimpleOperationVerifier if operationVerifier.request.method == "GET" =>
      val profileJson = swaggerChecks.jsonGenerator("AuthorsPage")
      val response = SimpleResponse(200, Map.empty, profileJson.sample.get.minified)

      (operationVerifier.request.path == "/v1/authors") :| "Path" &&
        (operationVerifier.request.method == "GET") :| "Method" &&
        operationVerifier.responseVerifier.verify(response).isSuccess :| "Response verifier"
    case operationVerifier: SimpleOperationVerifier =>
      val profileJson = swaggerChecks.jsonGenerator("Author")
      val response = SimpleResponse(201, Map.empty, "")

      (operationVerifier.request.path == "/v1/authors") :| "Path" &&
        (operationVerifier.request.method == "POST") :| "Method" &&
        operationVerifier.responseVerifier.verify(response).isSuccess :| "Response verifier"
      true :| "Just ok"
  }
} 
Example 57
Source File: UberApiSpecification.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck

import de.leanovate.swaggercheck.fixtures.uber.{UberError, UberProduct}
import de.leanovate.swaggercheck.schema.ValidationResultToProp
import de.leanovate.swaggercheck.simple._
import org.scalacheck.Prop.{BooleanOperators, forAll}
import org.scalacheck.{Arbitrary, Gen, Properties}
import play.api.libs.json.Json
import ValidationResultToProp._

object UberApiSpecification extends Properties("Uber API") {
  val swaggerChecks = SwaggerChecks(getClass.getClassLoader.getResourceAsStream("uber_api.yaml"))

  property("Error can be read") = forAll(swaggerChecks.jsonGenerator("Error")) {
    json =>
      Json.parse(json.minified).validate[UberError].isSuccess
  }

  property("Error can be written") = {
    val verifier = swaggerChecks.jsonVerifier("Error")

    forAll(Arbitrary.arbitrary[UberError]) {
      error: UberError =>
        val json = Json.stringify(Json.toJson(error))

        verifier.verify(json)
    }
  }

  property("Product can be read") = forAll(swaggerChecks.jsonGenerator("Product")) {
    json =>
      Json.parse(json.minified).validate[UberProduct].isSuccess
  }

  property("Product can be written") = {
    val verifier = swaggerChecks.jsonVerifier("Product")

    forAll(Arbitrary.arbitrary[UberProduct]) {
      product: UberProduct =>
        val json = Json.stringify(Json.toJson(product))

        verifier.verify(json)
    }
  }

  property("Request endpoints exists") = forAll(swaggerChecks.requestGenerator[SimpleRequest]()) {
    case SimpleRequest("GET", "/v1/estimates/price", queryParameters, headers, _) =>
      val paramNames = queryParameters.map(_._1).toSet
      (headers.head == "Accept" -> "application/json") :| "Accept header" &&
        paramNames.contains("start_latitude") :| "paramNames contains start_latitude" &&
        paramNames.contains("start_longitude") :| "paramNames contains start_longitude" &&
        paramNames.contains("end_latitude") :| "paramNames contains end_latitude" &&
        paramNames.contains("end_longitude") :| "paramNames contains end_longitude" &&
        (paramNames.size == 4) :| "paramNames size 4"
    case SimpleRequest("GET", "/v1/estimates/time", queryParameters, headers, _) =>
      val paramNames = queryParameters.map(_._1).toSet
      (headers.head == "Accept" -> "application/json") :| "Accept header" &&
        paramNames.contains("start_latitude") :| "paramNames contains start_latitude" &&
        paramNames.contains("start_longitude") :| "paramNames contains start_longitude" &&
        (paramNames.size <= 4) :| "paramNames size 4"
    case SimpleRequest("GET", "/v1/me", queryParameters, headers, _) =>
      (headers.head == "Accept" -> "application/json") :| "Accept header" &&
        queryParameters.isEmpty :| "query parameter is empty"
    case SimpleRequest("GET", "/v1/history", queryParameters, headers, _) =>
      (headers.head == "Accept" -> "application/json") :| "Accept header" &&
        (queryParameters.size <= 2) :| "query parameter is empty"
    case SimpleRequest("GET", "/v1/products", queryParameters, headers, _) =>
      val paramNames = queryParameters.map(_._1).toSet
      (headers.head == "Accept" -> "application/json") :| "Accept header" &&
        paramNames.contains("latitude") :| "paramNames contains latitude" &&
        paramNames.contains("longitude") :| "paramNames contains longitude" &&
        (paramNames.size <= 2) :| "paramNames size 2"
    case _ => false :| "Does not match any request"
  }

  property("Responses can be verified") = {
    val verifier = swaggerChecks.responseVerifier[SimpleResponse]("GET", "/v1/products")
    val okRepsonseGen = Gen.listOf(Arbitrary.arbitrary[UberProduct])
      .map(products => SimpleResponse(200, Map.empty, Json.stringify(Json.toJson(products))))
    val errorResponseGen = for {
      status <- Gen.choose(400, 599)
      error <- Arbitrary.arbitrary[UberError]
    } yield SimpleResponse(status, Map.empty, Json.stringify(Json.toJson(error)))

    forAll(Gen.oneOf(okRepsonseGen, errorResponseGen)) {
      response: SimpleResponse =>
        verifier.verify(response)
    }
  }

  property("Operation verifier") = forAll(swaggerChecks.operationVerifier[SimpleRequest, SimpleResponse](_ == "/v1/me")) {
    operationVerifier: SimpleOperationVerifier =>
      val profileJson = swaggerChecks.jsonGenerator("Profile")
      val response = SimpleResponse(200, Map.empty, profileJson.sample.get.minified)

      (operationVerifier.request.path == "/v1/me") :| "Path" &&
        (operationVerifier.request.method == "GET") :| "Method" &&
        operationVerifier.responseVerifier.verify(response).isSuccess :| "Response verifier"
  }
} 
Example 58
Source File: ValidatingReadsSpecification.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.play

import de.leanovate.swaggercheck.schema.model.DefaultSchema
import de.leanovate.swaggercheck.schema.play.model.ProductModel
import de.leanovate.swaggercheck.shrinkable.CheckJsValue
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
import play.api.libs.json.Json
import de.leanovate.swaggercheck.schema.play.Implicits._
import de.leanovate.swaggercheck.schema.gen.GeneratableDefaultSchema._

object ValidatingReadsSpecification extends Properties("ValidatingReads") {
  val schema = Json.parse(getClass.getClassLoader.getResourceAsStream("schema/simple1.json")).as[DefaultSchema]

  val valdiatingReads = ValidatingReads.validating[Seq[ProductModel]](schema)

  property("any generated can be deserialized") = forAll(schema.generate) {
    json : CheckJsValue =>
      Json.parse(json.minified).validate[Seq[ProductModel]].isSuccess
  }

  property("any generated can be validated") = forAll(schema.generate) {
    json : CheckJsValue =>
      Json.parse(json.minified).validate[Seq[ProductModel]](valdiatingReads).isSuccess
  }
} 
Example 59
Source File: GeneratorsSpecification.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.generators

import java.net.{URI, URL}

import de.leanovate.swaggercheck.schema.model.JsonPath
import de.leanovate.swaggercheck.schema.model.formats.StringFormats
import org.scalacheck.Prop.forAll
import org.scalacheck.Properties

import scala.util.Try

object GeneratorsSpecification extends Properties("Generators") {
  property("generate valid urls") = forAll(Generators.url) {
    url =>
      Try(new URL(url)).isSuccess
  }

  property("generate valid uris") = forAll(Generators.uri) {
    url =>
      Try(new URI(url)).isSuccess
  }

  property("generate valid emails") = forAll(Generators.email) {
    email =>
      StringFormats.EmailString.validate(JsonPath(), email).isSuccess
  }
} 
Example 60
Source File: MLScalaCheckTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.ml.linalg.SQLDataTypes.{MatrixType, VectorType}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types.{StructField, StructType}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class MLScalaCheckTest extends FunSuite with SharedSparkContext with Checkers {
  // re-use the spark context
  override implicit def reuseContextIfPossible: Boolean = false

  test("vector generation") {
    val schema = StructType(List(StructField("vector", VectorType)))
    val sqlContext = new SQLContext(sc)
    val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)

    val property =
      forAll(dataframeGen.arbitrary) {
        dataframe => {
          dataframe.schema === schema && dataframe.count >= 0
        }
      }

    check(property)
  }

  test("matrix generation") {
    val schema = StructType(List(StructField("matrix", MatrixType)))
    val sqlContext = new SQLContext(sc)
    val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)

    val property =
      forAll(dataframeGen.arbitrary) {
        dataframe => {
          dataframe.schema === schema && dataframe.count >= 0
        }
      }

    check(property)
  }
} 
Example 61
Source File: ResultsSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2._
import org.specs2.mock.Mockito
import roc.postgresql.failures.ElementNotFoundFailure

final class ResultsSpec extends Specification with ScalaCheck with Mockito { def is = s2"""

  Row
    get(column) must throw ElementNotFound failure for unknown column name  $columnNotFound
                                                                           """

  val columnNotFound = forAll { sym: Symbol =>
    val row = new Row(List.empty[Element])
    row.get(sym) must throwA[ElementNotFoundFailure]
  }

  lazy val genSymbol: Gen[Symbol] = for {
    str <-  arbitrary[String]
  } yield Symbol(str)
  implicit lazy val arbitrarySymbol: Arbitrary[Symbol] =
    Arbitrary(genSymbol)
} 
Example 62
Source File: StartupSpecs.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import com.twitter.finagle.client.StackClient
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2._
import roc.postgresql.Startup.{Database, Credentials}

final class StartupSpecs extends Specification with ScalaCheck { def is = s2"""

  Database
    must have correct database name $testDatabase

  Credentials
    must have correct username and password $testUserAndPasswd

  Startup
    must have correct database, username, and password              $testStartupClass
    must have correct defaults for username, password, and database $testStartupDefaults
                                                                            """

  val testDatabase = forAll { dbContainer: DbContainer =>
    val database = dbContainer.db
    database.db must_== dbContainer.dbName
  }

  val testUserAndPasswd = forAll { credentialsContainer: CredentialsContainer =>
    val expectedCredentials = Credentials(credentialsContainer.username, 
      credentialsContainer.passwd) 
    credentialsContainer.credentials must_== expectedCredentials
  }

  val testStartupClass = forAll { startupContainer: StartupContainer =>
    val expectedStartup = Startup(startupContainer.username, startupContainer.passwd,
      startupContainer.database)
    startupContainer.startup must_== expectedStartup
  }

  val testStartupDefaults= {
    val expectedStartup = Startup("postgres", "postgres", "postgres")
    Startup(StackClient.defaultParams) must_== expectedStartup
  }

  case class DbContainer(db: Database, dbName: String)
  private lazy val databaseGen: Gen[DbContainer] = for {
    db  <-  arbitrary[String]
  } yield DbContainer(Database(db), db)
  implicit lazy val arbitraryDatabase: Arbitrary[DbContainer] =
    Arbitrary(databaseGen)

  case class CredentialsContainer(credentials: Credentials, username: String, passwd: String)
  private lazy val credentialsContainerGen: Gen[CredentialsContainer] = for {
    username    <-  arbitrary[String]
    password    <-  arbitrary[String]
  } yield CredentialsContainer(Credentials(username, password), username, password)
  implicit lazy val arbitraryCredentialsContainer: Arbitrary[CredentialsContainer] =
    Arbitrary(credentialsContainerGen)

  case class StartupContainer(startup: Startup, username: String, passwd: String, database: String)
  private lazy val startupContainerGen: Gen[StartupContainer] = for {
    username    <-  arbitrary[String]
    passwd      <-  arbitrary[String]
    database    <-  arbitrary[String]
  } yield StartupContainer(Startup(username, passwd, database), username, passwd, database)
  implicit lazy val arbitraryStartupContainer: Arbitrary[StartupContainer] =
    Arbitrary(startupContainerGen)
} 
Example 63
Source File: MessageSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2._

final class MessagesSpec extends Specification with ScalaCheck { def is = s2"""

  PasswordMessage
    should MD5 encrypt a password with given salt           $pmEncrypt
                                                                            """

  val pmEncrypt = forAll { (user: String, pm: PasswordMessage, salt: Array[Byte]) =>
    val md = MessageDigest.getInstance("MD5")
    md.update((pm.password+ user).getBytes(StandardCharsets.UTF_8))
    val unsaltedHexStr = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("")(_ + _)
    val saltedBytes = unsaltedHexStr.getBytes ++ salt
    md.reset()
    md.update(saltedBytes)
    val passwd = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("md5")(_ + _)
    passwd must_== PasswordMessage.encryptMD5Passwd(user, pm.password, salt)
  }
  
  lazy val genByte: Gen[Byte] = arbitrary[Byte]
  lazy val genSalt: Gen[Array[Byte]] = Gen.containerOfN[Array, Byte](4, genByte)
  lazy val genPasswordMessage: Gen[PasswordMessage] = for {
    password    <-  arbitrary[String]
  } yield new PasswordMessage(password)
  implicit lazy val implicitPasswordMessage: Arbitrary[PasswordMessage] = 
    Arbitrary(genPasswordMessage)
} 
Example 64
Source File: PackageSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import java.nio.charset.StandardCharsets
import org.scalacheck.Prop.forAll
import org.specs2.{ScalaCheck, Specification}

final class PackageSpec extends Specification with ScalaCheck { def is = s2"""

  Postgresql Package
    should calculate length of C-Style String               $test0
    should calculate length of C-Style Strings              $test1
                                                                           """

  val test0 = forAll { (str: String) =>
    val bytes  = str.getBytes(StandardCharsets.UTF_8)
    val length = bytes.length + 1 // add 1 for null character
    lengthOfCStyleString(str) must_== length
  }

  val test1 = forAll { (xs: List[String]) =>
    val length = xs match {
      case h :: t => xs.map(lengthOfCStyleString).reduce(_ + _)
      case t      => 0
    }
    lengthOfCStyleStrings(xs) must_== length
  }
} 
Example 65
Source File: FailuresSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package postgresql

import cats.data.NonEmptyList
import org.scalacheck.Prop.forAll
import org.scalacheck.Gen
import org.specs2._
import roc.postgresql.failures._

final class FailuresSpec extends Specification with ScalaCheck { def is = s2"""

  Failure
    ReadyForQueryDecodingFailure should have correct message         $readyForQueryDecodingFailure
    UnknownAuthenticationFailure should have correct message         $unknownAuthRequestFailure
    UnsupportedAuthenticationFailure should have correct message     $unsupportedAuthFailure
    PostgresqlStateMachineFailure should have correct message        $postgresqlStateMachineFailure
    UnknownPostgresqlMessageTypeFailure should have correct message  $unknownPostgresqlMessageTypeFailure
    PostgresqlMessageDecodingFailure must have a correct error message   $postgresqlMessageDecodingFailure
                                                                         """

  val readyForQueryDecodingFailure = forAll { c: Char =>
    val msg = s"Received unexpected Char $c from Postgres Server."
    val error = new ReadyForQueryDecodingFailure(c)
    error.getMessage must_== msg
  }

  val unknownAuthRequestFailure = forAll { n: Int =>
    val expectedMessage = s"Unknown Authentication Request Type: $n"
    val error           = new UnknownAuthenticationRequestFailure(n)
    error.getMessage must_== expectedMessage
  }

  val unsupportedAuthFailure = forAll { s: String =>
    val expectedMessage =
      s"Unsupported Authentication Failure. $s authentication is not supported."
    val error           = new UnsupportedAuthenticationFailure(s)
    error.getMessage must_== expectedMessage
  }
  
  val postgresqlStateMachineFailure = forAll { (s1: String, s2: String) =>
    val expectedMessage = s"State Transition from $s1 -> $s2 is undefined."
    val error           = new PostgresqlStateMachineFailure(s1, s2)
    error.getMessage must_== expectedMessage
  }
  
  val unknownPostgresqlMessageTypeFailure = forAll { c: Char =>
    val expectedMessage = s"Unknown Postgresql MessageType '$c'."
    val error           = new UnknownPostgresqlMessageTypeFailure(c)
    error.getMessage must_== expectedMessage
  }

  val postgresqlMessageDecodingFailure = forAll(genNELErrorResponse) { nel: NonEmptyList[String] =>
    val error = new PostgresqlMessageDecodingFailure(nel)
    val expectedMessage = nel.foldLeft("")(_ + _)
    expectedMessage must_== error.getMessage
  }

  private lazy val genErrorResponseString: Gen[String] = Gen.oneOf(
    "Required Severity Level was not present.",
    "Required SQLSTATE Code was not present.",
    "Required Message was not present."
  )
  private lazy val genNELErrorResponse: Gen[NonEmptyList[String]] = for {
    string  <-  genErrorResponseString
  } yield NonEmptyList.of(string)

} 
Example 66
Source File: JsonDecoderSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package types

import io.circe.generic.auto._
import io.circe.syntax._
import java.nio.charset.StandardCharsets
import jawn.ast.JParser
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Gen}
import org.specs2.{ScalaCheck, Specification}
import roc.postgresql.Null
import roc.types.failures.{ElementDecodingFailure, NullDecodedFailure}
import roc.types.{decoders => Decoders}

final class JsonDecoderSpec extends Specification with ScalaCheck { def is = s2"""

  Json Decoder
    must correctly decode Text representation                              $testValidText
    must throw a ElementDecodingFailure when Text decoding invalid Json    $testInvalidText
    must correctly decode Binary representation                            $testValidBinary
    must throw a ElementDecodingFailure when Binary decoding invalid Json  $testInvalidBinary
    must throw a NullDecodedFailure when Null decoding Json                $testNullDecoding

                                                                               """

  private val testValidText = forAll { x: JsonContainer =>
    Decoders.jsonElementDecoder.textDecoder(x.text) must_== x.json
  }

  private val testInvalidText = forAll { x: String =>
    Decoders.jsonElementDecoder.textDecoder(x) must throwA[ElementDecodingFailure]
  }

  private val testValidBinary = forAll { x: BinaryJsonContainer =>
    Decoders.jsonElementDecoder.binaryDecoder(x.binary) must_== x.json
  }

  private val testInvalidBinary = forAll { xs: Array[Byte] =>
    Decoders.jsonElementDecoder.binaryDecoder(xs) must throwA[ElementDecodingFailure]
  }

  private val testNullDecoding =
    Decoders.jsonElementDecoder.nullDecoder(Null('doesnotmatter, -71)) must throwA[NullDecodedFailure]

  case class JsonContainer(text: String, json: Json)
  private lazy val genJsonContainer: Gen[JsonContainer] = for {
    jObject <- arbitrary[JsonObject]
  } yield {
    val text = jObject.asJson.noSpaces
    val json = JParser.parseUnsafe(text)
    new JsonContainer(text, json)
  }
  private implicit lazy val arbitraryJsonContainer: Arbitrary[JsonContainer] = 
    Arbitrary(genJsonContainer)

  case class BinaryJsonContainer(binary: Array[Byte], json: Json)
  private lazy val genBinaryJsonContainer: Gen[BinaryJsonContainer] = for {
    jObject <- arbitrary[JsonObject]
  } yield {
    val text = jObject.asJson.noSpaces
    val json = JParser.parseUnsafe(text)
    val bytes = text.getBytes(StandardCharsets.UTF_8)
    new BinaryJsonContainer(bytes, json)
  }
  private implicit lazy val arbitraryBinaryJsonContainer: Arbitrary[BinaryJsonContainer] =
    Arbitrary(genBinaryJsonContainer)

  case class JsonObject(name: String, first_names: List[String])

  private lazy val genJsonObject: Gen[JsonObject] = for {
    name <- arbitrary[String]
    first_names <- arbitrary[List[String]]
  } yield new JsonObject(name, first_names)
  private implicit lazy val arbitraryJsonObject: Arbitrary[JsonObject] = 
    Arbitrary(genJsonObject)
} 
Example 67
Source File: FailureSpec.scala    From roc   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package roc
package types

import org.scalacheck.Prop.forAll
import org.specs2._
import roc.types.failures._

final class FailureSpec extends Specification with ScalaCheck { def is = s2"""

  BinaryDecodingUnsupportedFailure
    must have correct error message  $testBinaryDecodingFailureErrMsg

  TextDecodingUnsupportedFailure
    must have correct error message  $testTextDecodingFailureErrMsg

  NullDecodedFailure
    must have correct error message  $testNullDecodedFailureErrMsg

  ElementDecodingFailure
    must have correct error message $testElementDecodingFailureErrMsg
                                                                           """

  val testBinaryDecodingFailureErrMsg = forAll { s: String =>
    val failure = new BinaryDecodingUnsupportedFailure(s)
    failure.getMessage() must_== s"Binary decoding of type $s is currently unsupported."
  }

  val testTextDecodingFailureErrMsg = forAll { s: String =>
    val failure = new TextDecodingUnsupportedFailure(s)
    failure.getMessage() must_== s"Text decoding of type $s is currently unsupported."
  }

  val testNullDecodedFailureErrMsg = forAll { s: String =>
    val failure = new NullDecodedFailure(s)
    failure.getMessage() must_== 
      s"A NULL value was decoded for type $s. Hint: use the Option[$s] decoder, or ensure that Postgres cannot return NULL for the requested value."
  }

  val testElementDecodingFailureErrMsg = forAll { (s: String, t: Throwable) =>
    val failure = new ElementDecodingFailure(s, t)
    failure.getMessage() must_== s"Failure to decode $s. ${t.getMessage()}"
  }
} 
Example 68
Source File: TransformBstProperties.scala    From functional-way   with GNU General Public License v3.0 5 votes vote down vote up
import org.scalacheck.{Gen, Properties}
import org.scalacheck.Gen._
import org.scalacheck.Prop.forAll
import org.scalacheck.Arbitrary.arbitrary
import tree.transformBst
import tree.MaybeNode

class TransformBstProperties extends Properties("TransformBST") {

  import tree.Node

  private val leafNodeGen : Gen[Node] = arbitrary[Int].map(v => Node(left = null, right = null, value = v))

  private val nodeGen = for {
    v <- arbitrary[Int]
    left <- genTree
    right <- genTree
  } yield Node(value = v, left = left, right = right)

  private val genTree : Gen[Node] = oneOf(nodeGen, leafNodeGen)

  
  private def isZeroPresent(node : MaybeNode) : Boolean = node match {
    case n: Node => if(n.value == 0) true else {
      isZeroPresent(n.left) || isZeroPresent(n.right)
    }
    case null => false
  }

  //Not a complete test here. But a good one to begin with
  property("transformBst") = forAll(genTree) { (root : Node) =>
    val transformedTreeRoot = transformBst(root)
    isZeroPresent(transformedTreeRoot)
  }

} 
Example 69
Source File: ProviderLaws.scala    From dtc   with Apache License 2.0 5 votes vote down vote up
package dtc.laws


import cats.Order
import cats.syntax.order._
import dtc.{Provider, TimeZoneId}
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop
import org.scalacheck.{Arbitrary, Gen, Prop}

trait ProviderLaws[A] {

  implicit def P: Provider[A]
  implicit def O: Order[A]

  val genA: Gen[A]
  val genTimeZone: Gen[TimeZoneId]

  def twoConsequentNowCalls: Prop = forAll(genTimeZone) { zone: TimeZoneId =>
    val prev = P.currentTime(zone)
    val current = P.currentTime(zone)
    Prop(prev <= current)
  }
}

object ProviderLaws {
  def apply[A](
    gTimeZone: Gen[TimeZoneId])(
    implicit order: Order[A],
    provider: Provider[A],
    arbA: Arbitrary[A]): ProviderLaws[A] = new ProviderLaws[A] {

    implicit def P: Provider[A] = provider
    implicit def O: Order[A] = order

    val genTimeZone: Gen[TimeZoneId] = gTimeZone
    val genA: Gen[A] = arbA.arbitrary
  }
} 
Example 70
Source File: LocalDateTimeLaws.scala    From dtc   with Apache License 2.0 5 votes vote down vote up
package dtc.laws

import java.time.temporal.{ChronoField, ChronoUnit}
import java.time.{LocalDate, LocalTime}

import cats.kernel.laws._
import cats.kernel.laws.discipline.{catsLawsIsEqToProp => p}
import dtc.Local
import dtc.syntax.local._
import org.scalacheck.{Gen, Prop}
import org.scalacheck.Prop.forAll


trait LocalDateTimeLaws[A] {
  implicit def D: Local[A]

  val genLocalDate: Gen[LocalDate]
  val genLocalTime: Gen[LocalTime]

  def constructorConsistency: Prop = forAll(genLocalDate, genLocalTime) { (date: LocalDate, time: LocalTime) =>
    val dt = D.of(date, time)
    p(dt.date <-> date) && (dt.time <-> time.truncatedTo(ChronoUnit.MILLIS))
  }

  def plainConstructorConsistency: Prop = forAll(genLocalDate, genLocalTime) { (date: LocalDate, time: LocalTime) =>
    val dt = D.of(
      date.getYear, date.getMonthValue, date.getDayOfMonth,
      time.getHour, time.getMinute, time.getSecond, time.get(ChronoField.MILLI_OF_SECOND))
    p(dt.date <-> date) && (dt.time <-> time.truncatedTo(ChronoUnit.MILLIS))
  }
}

object LocalDateTimeLaws {
  def apply[A](
    gLocalTime: Gen[LocalTime],
    gLocalDate: Gen[LocalDate])(
    implicit ev: Local[A]): LocalDateTimeLaws[A] = new LocalDateTimeLaws[A] {
    def D: Local[A] = ev
    val genLocalDate: Gen[LocalDate] = gLocalDate
    val genLocalTime: Gen[LocalTime] = gLocalTime
  }
} 
Example 71
Source File: BasicFunctionalitySpec.scala    From chronoscala   with MIT License 5 votes vote down vote up
package jp.ne.opt.chronoscala

import jp.ne.opt.chronoscala.Imports._
import org.scalacheck.Prop.forAll
import org.scalacheck.{Prop, Properties}

object BasicFunctionalitySpec extends Properties("ZonedDateTime") with Gens {

  property("LocalDateTime equality") = Prop.secure {
    forAll(localDateTimeGen) { localDateTime =>
      localDateTime == localDateTime
    }
  }

  property("ZonedDateTime equality") = Prop.secure {
    forAll(zonedDateTimeGen) { zonedDateTime =>
      zonedDateTime == zonedDateTime
    }
  }

  property("OffsetDateTime equality") = Prop.secure {
    forAll(offsetDateTimeGen) { offsetDateTime =>
      offsetDateTime == offsetDateTime
    }
  }

  property("localDateTime < (localDateTime + 1.hour)") = Prop.secure {
    forAll(localDateTimeGen) { localDateTime =>
      localDateTime < (localDateTime + 1.hour)
    }
  }

  property("zonedDateTime < (zonedDateTime + 1.hour)") = Prop.secure {
    forAll(zonedDateTimeGen) { zonedDateTime =>
      zonedDateTime < (zonedDateTime + 1.hour)
    }
  }

  property("offsetDateTime < (offsetDateTime + 1.hour)") = Prop.secure {
    forAll(offsetDateTimeGen) { offsetDateTime =>
      offsetDateTime < (offsetDateTime + 1.hour)
    }
  }
} 
Example 72
Source File: RoleBindingSpec.scala    From sbt-kubeyml   with MIT License 5 votes vote down vote up
package kubeyml.roles

import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
import json_support._
import io.circe.syntax._

class RoleBindingSpec extends Properties("RoleBinding") with KubernetesRoleBinding {

  property("validrolebinding") = forAll(roleBindingGen) {
    case (serviceAccount, roleBinding) =>
      val expectedJson = binding(
        roleBinding.metadata.namespace.value,
        roleBinding.metadata.name.value,
        serviceAccount,
        roleBinding.roleRef.role.metadata.name.value)

      val actualJson = Right(roleBinding.asJson)

      if (expectedJson != actualJson) {
        println(expectedJson)
        println(actualJson)
      }

      expectedJson == actualJson
  }

} 
Example 73
Source File: TraverseHListSuite.scala    From kittens   with Apache License 2.0 5 votes vote down vote up
package cats.sequence

import cats.data._
import cats.derived._
import cats.instances.all._
import org.scalacheck.Prop.forAll
import shapeless._

class TraverseHListSuite extends KittensSuite {

  def optToValidation[T](opt: Option[T]): Validated[String, T] =
    Validated.fromOption(opt, "Nothing Here")

  object headOption extends Poly1 {
    implicit def caseSet[T] = at[Set[T]](_.headOption)
  }

  object optionToValidation extends Poly1 {
    implicit def caseOption[T] = at[Option[T]](optToValidation)
  }

  test("traversing Set with Set => Option")(check {
    forAll { (x: Set[Int], y: Set[String], z: Set[Float]) =>
      val expected = (x.headOption, y.headOption, z.headOption).mapN(_ :: _ :: _ :: HNil)
      (x :: y :: z :: HNil).traverse(headOption) == expected
    }
  })

  test("traversing Option with Option => Validation")(check {
    forAll { (x: Option[Int], y: Option[String], z: Option[Float]) =>
      val expected = (optToValidation(x), optToValidation(y), optToValidation(z)).mapN(_ :: _ :: _ :: HNil)
      (x :: y :: z :: HNil).traverse(optionToValidation) == expected
    }
  })
} 
Example 74
Source File: SQLContextTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import org.scalacheck.Prop
import org.scalacheck.Prop.{forAll, _}

class SQLContextTests extends TypedDatasetSuite {
  test("sqlContext") {
    def prop[A: TypedEncoder](data: Vector[A]): Prop = {
      val dataset = TypedDataset.create[A](data)

      dataset.sqlContext =? dataset.dataset.sqlContext
    }

    check(forAll(prop[Int] _))
    check(forAll(prop[String] _))
  }
} 
Example 75
Source File: ColumnsTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import org.scalacheck.Prop
import org.scalacheck.Prop.forAll

class ColumnsTests extends TypedDatasetSuite {
  test("columns") {
    def prop(i: Int, s: String, b: Boolean, l: Long, d: Double, by: Byte): Prop = {
      val x1 = X1(i) :: Nil
      val x2 = X2(i, s) :: Nil
      val x3 = X3(i, s, b) :: Nil
      val x4 = X4(i, s, b, l) :: Nil
      val x5 = X5(i, s, b, l, d) :: Nil
      val x6 = X6(i, s, b, l, d, by) :: Nil

      val datasets = Seq(TypedDataset.create(x1), TypedDataset.create(x2),
        TypedDataset.create(x3), TypedDataset.create(x4),
        TypedDataset.create(x5), TypedDataset.create(x6))

      Prop.all(datasets.flatMap { dataset =>
        val columns = dataset.dataset.columns
        dataset.columns.map(col =>
          Prop.propBoolean(columns contains col)
        )
      }: _*)
    }

    check(forAll(prop _))
  }
} 
Example 76
Source File: QueryExecutionTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import org.scalacheck.Prop
import org.scalacheck.Prop.{forAll, _}

class QueryExecutionTests extends TypedDatasetSuite {
  test("queryExecution") {
    def prop[A: TypedEncoder](data: Vector[A]): Prop = {
      val dataset = TypedDataset.create[A](data)

      dataset.queryExecution =? dataset.dataset.queryExecution
    }

    check(forAll(prop[Int] _))
    check(forAll(prop[String] _))
  }
} 
Example 77
Source File: CheckpointTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import org.scalacheck.Prop
import org.scalacheck.Prop.{forAll, _}


class CheckpointTests extends TypedDatasetSuite {
  test("checkpoint") {
    def prop[A: TypedEncoder](data: Vector[A], isEager: Boolean): Prop = {
      val dataset = TypedDataset.create(data)

      dataset.sparkSession.sparkContext.setCheckpointDir(TEST_OUTPUT_DIR)

      dataset.checkpoint(isEager).run().queryExecution.toString() =?
        dataset.dataset.checkpoint(isEager).queryExecution.toString()
    }

    check(forAll(prop[Int] _))
    check(forAll(prop[String] _))
  }
} 
Example 78
Source File: FlattenTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import org.scalacheck.Prop
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop._


class FlattenTests extends TypedDatasetSuite {
  test("simple flatten test") {
    val ds: TypedDataset[(Int,Option[Int])] = TypedDataset.create(Seq((1,Option(1))))
    ds.flattenOption('_2): TypedDataset[(Int,Int)]
  }

  test("different Optional types") {
    def prop[A: TypedEncoder](xs: List[X1[Option[A]]]): Prop = {
      val tds: TypedDataset[X1[Option[A]]] = TypedDataset.create(xs)

      val framelessResults: Seq[Tuple1[A]] = tds.flattenOption('a).collect().run().toVector
      val scalaResults = xs.flatMap(_.a).map(Tuple1(_)).toVector

      framelessResults ?= scalaResults
    }

    check(forAll(prop[Long] _))
    check(forAll(prop[Int] _))
    check(forAll(prop[Char] _))
    check(forAll(prop[String] _))
  }
} 
Example 79
Source File: ColumnTypesTest.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless
package ops

import org.scalacheck.Prop
import org.scalacheck.Prop.forAll
import shapeless.HNil
import shapeless.::

class ColumnTypesTest extends TypedDatasetSuite {
  test("test summoning") {
    def prop[A: TypedEncoder, B: TypedEncoder, C: TypedEncoder, D: TypedEncoder](data: Vector[X4[A, B, C, D]]): Prop = {
      val d: TypedDataset[X4[A, B, C, D]] = TypedDataset.create(data)
      val hlist = d('a) :: d('b) :: d('c) :: d('d) :: HNil

      type TC[N] = TypedColumn[X4[A,B,C,D], N]

      type IN = TC[A] :: TC[B] :: TC[C] :: TC[D] :: HNil
      type OUT = A :: B :: C :: D :: HNil

      implicitly[ColumnTypes.Aux[X4[A,B,C,D], IN, OUT]]
      Prop.passed // successful compilation implies test correctness
    }

    check(forAll(prop[Int, String, X1[String], Boolean] _))
    check(forAll(prop[Vector[Int], Vector[Vector[String]], X1[String], Option[String]] _))
  }
} 
Example 80
Source File: ExplodeTests.scala    From frameless   with Apache License 2.0 5 votes vote down vote up
package frameless

import frameless.functions.CatalystExplodableCollection
import org.scalacheck.{Arbitrary, Prop}
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop._

import scala.reflect.ClassTag


class ExplodeTests extends TypedDatasetSuite {
  test("simple explode test") {
    val ds = TypedDataset.create(Seq((1,Array(1,2))))
    ds.explode('_2): TypedDataset[(Int,Int)]
  }

  test("explode on vectors/list/seq") {
    def prop[F[X] <: Traversable[X] : CatalystExplodableCollection, A: TypedEncoder](xs: List[X1[F[A]]])(implicit arb: Arbitrary[F[A]], enc: TypedEncoder[F[A]]): Prop = {
      val tds = TypedDataset.create(xs)

      val framelessResults = tds.explode('a).collect().run().toVector
      val scalaResults = xs.flatMap(_.a).map(Tuple1(_)).toVector

      framelessResults ?= scalaResults
    }

    check(forAll(prop[Vector, Long] _))
    check(forAll(prop[Seq, Int] _))
    check(forAll(prop[Vector, Char] _))
    check(forAll(prop[Vector, String] _))
    check(forAll(prop[List, Long] _))
    check(forAll(prop[List, Int] _))
    check(forAll(prop[List, Char] _))
    check(forAll(prop[List, String] _))
  }

  test("explode on arrays") {
    def prop[A: TypedEncoder: ClassTag](xs: List[X1[Array[A]]]): Prop = {
      val tds = TypedDataset.create(xs)

      val framelessResults = tds.explode('a).collect().run().toVector
      val scalaResults = xs.flatMap(_.a).map(Tuple1(_)).toVector

      framelessResults ?= scalaResults
    }

    check(forAll(prop[Long] _))
    check(forAll(prop[Int] _))
    check(forAll(prop[String] _))
  }
} 
Example 81
Source File: ApplicativeLaws.scala    From scalaz-reactive   with Apache License 2.0 5 votes vote down vote up
package scalaz.reactive.laws
import org.scalacheck.{ Gen, Prop }
import org.scalacheck.Prop.forAll
import org.specs2.matcher.{ MatchResult, MustMatchers }
import scalaz.Applicative

class ApplicativeLaws[F[_], A](
  applicative: Applicative[F],
  aGen: Gen[A],
  faGen: Gen[F[A]],
  abGen: Gen[A => A],
  fabGen: Gen[F[A => A]],
  valueForEqGen: Gen[F[A] => A]
)(implicit
  pa: MatchResult[A] => Prop,
  pfa: MatchResult[F[A]] => Prop)
    extends MustMatchers {

  implicit class Equalable(v: F[A]) {
    def valueForEq(f: F[A] => A) = f(v)
  }

  // ap(fa)(point(_)) == fa
  def apIdentityLaw = forAll(faGen, valueForEqGen) { (fa, v) =>
    applicative
      .ap(fa)(applicative.point(identity[A](_)))
      .valueForEq(v) must beEqualTo(fa.valueForEq(v))
  }

  // ap(point(a))(point(ab)) == point(ab(a))
  def apHomomorphismLaw = forAll(aGen, abGen) { (a, ab) =>
    applicative
      .ap(applicative.point(a))(applicative.point(ab)) must
      beEqualTo(applicative.point(ab(a)))
  }

  // ap(point(a))(fab) == ap(fab)(point(_.apply(a)))
  def apInterchangeLaw = forAll(aGen, fabGen) { (a, fab) =>
    applicative.ap(applicative.point(a))(fab) must
      beEqualTo(
        applicative.ap(fab)(applicative.point((x: A => A) => x.apply(a)))
      )
  }

  //map(fa)(ab) == ap(fa)(point(ab))
  def apDerivedMapLaw = forAll(faGen, abGen, valueForEqGen) { (fa, ab, v) =>
    applicative.map(fa)(ab).valueForEq(v) must
      beEqualTo(applicative.ap(fa)(applicative.point(ab)).valueForEq(v))
  }
} 
Example 82
Source File: PilotSpec.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.models

import moe.pizza.auth.models.Pilot.CrestToken
import moe.pizza.eveapi.ApiKey
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll


object PilotSpec extends Properties("Pilot") {

  property("getCrestKeys") = forAll { (charid: Long, token: String) =>
    val p = Pilot(null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  List("%d:%s".format(charid, token)),
                  null)
    p.getCrestTokens == List(new CrestToken(charid, token))
  }

  property("getApiKeys") = forAll { (id: Int, key: String) =>
    val p = Pilot(null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  null,
                  List("%d:%s".format(id, key)))
    p.getApiKeys == List(new ApiKey(id, key))
  }

  property("toJson") = forAll {
    (uid: String, status: String, alliance: String, corp: String,
     character: String, email: String, meta: String, groups: List[String]) =>
      val p =
        Pilot(uid,
              Pilot.Status.lookup.getOrElse(status, Pilot.Status.ineligible),
              alliance,
              corp,
              character,
              email,
              Pilot.OM.readTree("{\"meta\": \"%s\"}".format(meta)),
              groups,
              List.empty,
              List.empty)
      val json = p.toJson
      val p2 = Pilot.fromJson(json)
      p2.contains(p)
  }

  property("getCrestKeys badinput") = forAll { (s: String) =>
    val p =
      Pilot(null, null, null, null, null, null, null, null, List(s), null)
    s.contains(":") == !p.getCrestTokens.isEmpty
  }

  property("getApiKeys, badinput") = forAll { (s: String) =>
    val p =
      Pilot(null, null, null, null, null, null, null, null, null, List(s))
    s.contains(":") == !p.getApiKeys.isEmpty
  }

  property("getGroups") = forAll { (g: String, g2: String) =>
    val p = Pilot(null, null, null, null, null, null, null, null, null, null)
    val p2 = p.copy(authGroups = List(g, g2 + "-pending"))
    p2.getGroups == List(g) && p2.getPendingGroups == List(g2)
  }

} 
Example 83
Source File: sbt-test-scalacheck.scala    From scala-course   with GNU General Public License v3.0 5 votes vote down vote up
import org.scalatest.matchers.should.Matchers

import org.scalacheck._
import org.scalacheck.Prop.{forAll,propBoolean}

class SqrtSpecification extends Properties("Sqrt") with Matchers {

  property("math.sqrt should square to give original") =
    forAll { a: Double =>
      (a >= 0.0) ==> {
        val s = math.sqrt(a)
        val tol = 1e-8 * a
        s*s === a +- tol
      }
    }

}

class GammaSpec extends Properties("Gamma") with Matchers {

  import breeze.stats.distributions.Gamma

  val tol = 1e-8
  val big = 1e100

  property("mean") =
    forAll { (a: Double, b: Double) =>
      ((a > tol) && (a < big) && (b > tol) && (b < big)) ==> {
        Gamma(a,b).mean === a*b +- tol
      }
    }

}

class StringSpecification extends Properties("String") with Matchers {

  property("startwith first string") =
    forAll { (a: String, b: String) =>
      (a+b).startsWith(a)
    } 

  property("concatenate bound") =
    forAll { (a: String, b: String) =>
      (a+b).length >= a.length && (a+b).length >= b.length
    }

  property("concatenate length") =
    forAll { (a: String, b: String) =>
      (a+b).length == a.length + b.length
    }

  property("substring") =
    forAll { (a: String, b: String, c: String) =>
      (a+b+c).substring(a.length, a.length+b.length) == b
    }

}

// eof 
Example 84
Source File: ModelScalaCheck.scala    From spark-tools   with Apache License 2.0 5 votes vote down vote up
package io.univalence

import io.univalence.centrifuge.Result
import io.univalence.centrifuge._
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop.all
import org.scalacheck.Properties

object ModelScalaCheck extends Properties("String") {

  property("isNotPure") = forAll { a: String =>
    all(
      !Result(Some(a), Vector(Annotation("msg", Some("oF"), Vector("fF"), false, 1))).isPure,
      !Result(None, Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))).isPure,
      !Result(Some(a), Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))).isPure,
      !Result(Some("msg"), Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))).isPure
    )
  }
  property("isPure") = forAll { a: String =>
    Result(Some(a), Vector()).isPure
  }
  property("filter") = forAll { a: String =>
    Result(Some(a), Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1)))
      .filter(_.contains(a)) == Result(
      Some(a),
      Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))
    )
  }
  property("map") = forAll { a: String =>
    all(
      Result(Some(a), Vector(Annotation("msg", Some("oF"), Vector("fF"), false, 1)))
        .map(_.toString) == Result(
        Some(a),
        Vector(Annotation("msg", Some("oF"), Vector("fF"), false, 1))
      ),
      Result(Some(a), Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1)))
        .map(_.toString) == Result(
        Some(a),
        Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))
      ),
      Result(Some("msg"), Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))) == Result(
        Some("msg"),
        Vector(Annotation(a, Some("oF"), Vector("fF"), false, 1))
      )
    )
  }

} 
Example 85
Source File: RecordMatcherSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.record

import org.scalacheck.Prop.{all, forAll, propBoolean}
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import shapeless._
import shapeless.poly._

object RecordMatcherSpec extends Properties("RecordMatcher") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  // always generate [-π, π] for Double
  implicit val arbDouble = Arbitrary(Gen.chooseNum(-math.Pi, math.Pi))
  // always generate Some[T] for Option[T]
  implicit def arbOption[T](implicit arb: Arbitrary[T]) = Arbitrary(Gen.some(arb.arbitrary))
  // always generate non-empty List[T]
  implicit def arbList[T](implicit arb: Arbitrary[T]) = Arbitrary(Gen.nonEmptyListOf(arb.arbitrary))

  object negate extends ->((x: Double) => -x)
  object inc extends ->((x: Double) => x + 1.0)
  implicit def compareDoubles(x: Double, y: Double) = math.abs(x) == math.abs(y)

  def test[A, L <: HList](original: A, withNegate: A, withInc: A)(implicit
    gen: LabelledGeneric.Aux[A, L],
    mr: MatchRecord[L]
  ): Prop = {
    val t = ensureSerializable(RecordMatcher[A])
    all(
      "equal self" |: t(original, original),
      "equal negate" |: t(original, withNegate),
      "not equal inc" |: !t(original, withInc)
    )
  }

  property("required") = forAll { m: Required =>
    test(m, everywhere(negate)(m), everywhere(inc)(m))
  }
  property("optional") = forAll { m: Optional =>
    test(m, everywhere(negate)(m), everywhere(inc)(m))
  }
  property("repeated") = forAll { m: Repeated =>
    test(m, everywhere(negate)(m), everywhere(inc)(m))
  }
  property("mixed") = forAll { m: Mixed => test(m, everywhere(negate)(m), everywhere(inc)(m)) }
  property("nested") = forAll { m: Nested => test(m, everywhere(negate)(m), everywhere(inc)(m)) }
} 
Example 86
Source File: LensMatcherSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.record

import org.scalacheck.Prop.{all, forAll, propBoolean}
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import shapeless._

object LensMatcherSpec extends Properties("LensMatcher") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  // always generate alphabet string for String
  implicit val arbString = Arbitrary(Gen.alphaStr)
  // always generate Some[T] for Option[T]
  implicit def arbOption[T](implicit arb: Arbitrary[T]) = Arbitrary(Gen.some(arb.arbitrary))
  // always generate non-empty List[T]
  implicit def arbList[T](implicit arb: Arbitrary[T]) = Arbitrary(Gen.nonEmptyListOf(arb.arbitrary))

  val mL = ensureSerializable(LensMatcher[Nested].on(_ >> 'longField)(math.abs(_) == math.abs(_)))
  val mML = ensureSerializable(
    LensMatcher[Nested].on(_ >> 'mixedField >> 'longField)(math.abs(_) == math.abs(_))
  )
  val mMulti = ensureSerializable(
    LensMatcher[Nested]
      .on(_ >> 'mixedField >> 'longField)(math.abs(_) == math.abs(_))
      .on(_ >> 'mixedField >> 'stringField)(_.toLowerCase == _.toLowerCase)
  )

  val lensL = lens[Nested] >> 'longField
  val lensML = lens[Nested] >> 'mixedField >> 'longField
  val lensMS = lens[Nested] >> 'mixedField >> 'stringField

  property("root") = forAll { m: Nested =>
    all(
      "equal self" |: mL(m, m),
      "equal negate" |: mL(m, lensL.modify(m)(-_)),
      "not equal inc" |: !mL(m, lensL.modify(m)(_ + 1L)),
      "not equal rest" |: !mL(m, lensML.modify(m)(_ + 1L))
    )
  }

  property("nested") = forAll { m: Nested =>
    all(
      "equal self" |: mML(m, m),
      "equal negate" |: mML(m, lensML.modify(m)(-_)),
      "not equal inc" |: !mML(m, lensML.modify(m)(_ + 1L)),
      "not equal rest" |: !mML(m, lensL.modify(m)(_ + 1L))
    )
  }

  property("multi") = forAll { m: Nested =>
    all(
      "equal self" |: mMulti(m, m),
      "equal negate" |: mMulti(m, lensML.modify(m)(-_)),
      "equal upper" |: mMulti(m, lensMS.modify(m)(_.toUpperCase)),
      "equal negate+upper" |: mMulti(m, lensMS.modify(lensML.modify(m)(-_))(_.toUpperCase)),
      "not equal inc" |: !mMulti(m, lensML.modify(m)(_ + 1L)),
      "not equal append" |: !mMulti(m, lensMS.modify(m)(_ + "!")),
      "not equal rest" |: !mMulti(m, lensL.modify(m)(_ + 1L))
    )
  }
} 
Example 87
Source File: TensorFlowTypeSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.tensorflow

import java.net.URI

import org.joda.time.Instant
import org.scalacheck.Prop.{all, forAll}
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import org.tensorflow.example.Example
import shapeless._
import shapeless.datatype.record._

object TensorFlowTypeSpec extends Properties("TensorFlowType") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  implicit def compareByteArrays(x: Array[Byte], y: Array[Byte]) = java.util.Arrays.equals(x, y)
  implicit def compareIntArrays(x: Array[Int], y: Array[Int]) = java.util.Arrays.equals(x, y)
  implicit def compareDouble(x: Double, y: Double) = x.toFloat == y.toFloat

  def roundTrip[A, L <: HList](m: A)(implicit
    gen: LabelledGeneric.Aux[A, L],
    fromL: FromFeatures[L],
    toL: ToFeatures[L],
    mr: MatchRecord[L]
  ): Prop = {
    val t = ensureSerializable(TensorFlowType[A])
    val f1: SerializableFunction[A, Example] =
      new SerializableFunction[A, Example] {
        override def apply(m: A): Example = t.toExample(m)
      }
    val f2: SerializableFunction[Example, Option[A]] =
      new SerializableFunction[Example, Option[A]] {
        override def apply(m: Example): Option[A] = t.fromExample(m)
      }
    val f3: SerializableFunction[A, Example.Builder] =
      new SerializableFunction[A, Example.Builder] {
        override def apply(m: A): Example.Builder = t.toExampleBuilder(m)
      }
    val f4: SerializableFunction[Example.Builder, Option[A]] =
      new SerializableFunction[Example.Builder, Option[A]] {
        override def apply(m: Example.Builder): Option[A] = t.fromExampleBuilder(m)
      }
    val toFn1 = ensureSerializable(f1)
    val fromFn1 = ensureSerializable(f2)
    val toFn2 = ensureSerializable(f3)
    val fromFn2 = ensureSerializable(f4)
    val copy1 = fromFn1(toFn1(m))
    val copy2 = fromFn2(toFn2(m))
    val rm = RecordMatcher[A]
    all(copy1.exists(rm(_, m)), copy2.exists(rm(_, m)))
  }

  implicit val timestampTensorFlowMappableType = TensorFlowType.at[Instant](
    TensorFlowType.toLongs(_).map(new Instant(_)),
    xs => TensorFlowType.fromLongs(xs.map(_.getMillis))
  )
  property("required") = forAll { m: Required => roundTrip(m) }
  property("optional") = forAll { m: Optional => roundTrip(m) }
  property("repeated") = forAll { m: Repeated => roundTrip(m) }
  property("mixed") = forAll { m: Mixed => roundTrip(m) }
  property("seqs") = forAll { m: Seqs => roundTrip(m) }

  implicit val uriTensorFlowType = TensorFlowType.at[URI](
    TensorFlowType.toStrings(_).map(URI.create),
    xs => TensorFlowType.fromStrings(xs.map(_.toString))
  )
  property("custom") = forAll { m: Custom => roundTrip(m) }
} 
Example 88
Source File: DatastoreTypeSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.datastore

import java.net.URI

import com.google.datastore.v1.Entity
import com.google.datastore.v1.client.DatastoreHelper._
import org.scalacheck.Prop.{all, forAll}
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import shapeless._
import shapeless.datatype.record._

object DatastoreTypeSpec extends Properties("DatastoreType") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  implicit def compareByteArrays(x: Array[Byte], y: Array[Byte]) = java.util.Arrays.equals(x, y)
  implicit def compareIntArrays(x: Array[Int], y: Array[Int]) = java.util.Arrays.equals(x, y)

  def roundTrip[A, L <: HList](m: A)(implicit
    gen: LabelledGeneric.Aux[A, L],
    fromL: FromEntity[L],
    toL: ToEntity[L],
    mr: MatchRecord[L]
  ): Prop = {
    val t = ensureSerializable(DatastoreType[A])
    val f1: SerializableFunction[A, Entity] =
      new SerializableFunction[A, Entity] {
        override def apply(m: A): Entity = t.toEntity(m)
      }
    val f2: SerializableFunction[Entity, Option[A]] =
      new SerializableFunction[Entity, Option[A]] {
        override def apply(m: Entity): Option[A] = t.fromEntity(m)
      }
    val f3: SerializableFunction[A, Entity.Builder] =
      new SerializableFunction[A, Entity.Builder] {
        override def apply(m: A): Entity.Builder = t.toEntityBuilder(m)
      }
    val f4: SerializableFunction[Entity.Builder, Option[A]] =
      new SerializableFunction[Entity.Builder, Option[A]] {
        override def apply(m: Entity.Builder): Option[A] = t.fromEntityBuilder(m)
      }
    val toFn1 = ensureSerializable(f1)
    val fromFn1 = ensureSerializable(f2)
    val toFn2 = ensureSerializable(f3)
    val fromFn2 = ensureSerializable(f4)
    val copy1 = fromFn1(toFn1(m))
    val copy2 = fromFn2(toFn2(m))
    val rm = RecordMatcher[A]
    all(copy1.exists(rm(_, m)), copy2.exists(rm(_, m)))
  }

  property("required") = forAll { m: Required => roundTrip(m) }
  property("optional") = forAll { m: Optional => roundTrip(m) }
  property("repeated") = forAll { m: Repeated => roundTrip(m) }
  property("mixed") = forAll { m: Mixed => roundTrip(m) }
  property("nested") = forAll { m: Nested => roundTrip(m) }
  property("seqs") = forAll { m: Seqs => roundTrip(m) }

  implicit val uriDatastoreType =
    DatastoreType.at[URI](v => URI.create(v.getStringValue), u => makeValue(u.toString).build())
  property("custom") = forAll { m: Custom => roundTrip(m) }
} 
Example 89
Source File: AvroTypeSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.avro

import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.net.URI
import java.nio.ByteBuffer

import com.google.protobuf.ByteString
import org.apache.avro.Schema
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
import org.apache.avro.io.{DecoderFactory, EncoderFactory}
import org.joda.time.Instant
import org.scalacheck.Prop.forAll
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import shapeless._
import shapeless.datatype.record._

import scala.reflect.runtime.universe._

object AvroTypeSpec extends Properties("AvroType") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  implicit def compareByteArrays(x: Array[Byte], y: Array[Byte]) = java.util.Arrays.equals(x, y)
  implicit def compareIntArrays(x: Array[Int], y: Array[Int]) = java.util.Arrays.equals(x, y)

  def roundTrip[A: TypeTag, L <: HList](m: A)(implicit
    gen: LabelledGeneric.Aux[A, L],
    fromL: FromAvroRecord[L],
    toL: ToAvroRecord[L],
    mr: MatchRecord[L]
  ): Boolean = {
    val t = ensureSerializable(AvroType[A])
    val f1: SerializableFunction[A, GenericRecord] =
      new SerializableFunction[A, GenericRecord] {
        override def apply(m: A): GenericRecord = t.toGenericRecord(m)
      }
    val f2: SerializableFunction[GenericRecord, Option[A]] =
      new SerializableFunction[GenericRecord, Option[A]] {
        override def apply(m: GenericRecord): Option[A] = t.fromGenericRecord(m)
      }
    val toFn = ensureSerializable(f1)
    val fromFn = ensureSerializable(f2)
    val copy = fromFn(roundTripRecord(toFn(m)))
    val rm = RecordMatcher[A]
    copy.exists(rm(_, m))
  }

  def roundTripRecord(r: GenericRecord): GenericRecord = {
    val writer = new GenericDatumWriter[GenericRecord](r.getSchema)
    val baos = new ByteArrayOutputStream()
    val encoder = EncoderFactory.get().binaryEncoder(baos, null)
    writer.write(r, encoder)
    encoder.flush()
    baos.close()
    val bytes = baos.toByteArray

    val reader = new GenericDatumReader[GenericRecord](r.getSchema)
    val bais = new ByteArrayInputStream(bytes)
    val decoder = DecoderFactory.get().binaryDecoder(bais, null)
    reader.read(null, decoder)
  }

  implicit val byteStringAvroType = AvroType.at[ByteString](Schema.Type.BYTES)(
    v => ByteString.copyFrom(v.asInstanceOf[ByteBuffer]),
    v => ByteBuffer.wrap(v.toByteArray)
  )
  implicit val instantAvroType =
    AvroType.at[Instant](Schema.Type.LONG)(v => new Instant(v.asInstanceOf[Long]), _.getMillis)
  property("required") = forAll { m: Required => roundTrip(m) }
  property("optional") = forAll { m: Optional => roundTrip(m) }
  property("repeated") = forAll { m: Repeated => roundTrip(m) }
  property("mixed") = forAll { m: Mixed => roundTrip(m) }
  property("nested") = forAll { m: Nested => roundTrip(m) }
  property("seqs") = forAll { m: Seqs => roundTrip(m) }

  implicit val uriAvroType =
    AvroType.at[URI](Schema.Type.STRING)(v => URI.create(v.toString), _.toString)
  property("custom") = forAll { m: Custom => roundTrip(m) }
} 
Example 90
Source File: BigQueryTypeSpec.scala    From shapeless-datatype   with Apache License 2.0 5 votes vote down vote up
package shapeless.datatype.bigquery

import java.net.URI

import com.fasterxml.jackson.databind.{ObjectMapper, SerializationFeature}
import com.google.api.services.bigquery.model.TableRow
import com.google.common.io.BaseEncoding
import com.google.protobuf.ByteString
import org.joda.time.{Instant, LocalDate, LocalDateTime, LocalTime}
import org.scalacheck.Prop.forAll
import org.scalacheck.ScalacheckShapeless._
import org.scalacheck._
import shapeless._
import shapeless.datatype.record._

import scala.reflect.runtime.universe._

object BigQueryTypeSpec extends Properties("BigQueryType") {
  import shapeless.datatype.test.Records._
  import shapeless.datatype.test.SerializableUtils._

  val mapper = new ObjectMapper().disable(SerializationFeature.FAIL_ON_EMPTY_BEANS)

  implicit def compareByteArrays(x: Array[Byte], y: Array[Byte]) = java.util.Arrays.equals(x, y)
  implicit def compareIntArrays(x: Array[Int], y: Array[Int]) = java.util.Arrays.equals(x, y)

  def roundTrip[A: TypeTag, L <: HList](m: A)(implicit
    gen: LabelledGeneric.Aux[A, L],
    fromL: FromTableRow[L],
    toL: ToTableRow[L],
    mr: MatchRecord[L]
  ): Boolean = {
    BigQuerySchema[A] // FIXME: verify the generated schema
    val t = ensureSerializable(BigQueryType[A])
    val f1: SerializableFunction[A, TableRow] =
      new SerializableFunction[A, TableRow] {
        override def apply(m: A): TableRow = t.toTableRow(m)
      }
    val f2: SerializableFunction[TableRow, Option[A]] =
      new SerializableFunction[TableRow, Option[A]] {
        override def apply(m: TableRow): Option[A] = t.fromTableRow(m)
      }
    val toFn = ensureSerializable(f1)
    val fromFn = ensureSerializable(f2)
    val copy = fromFn(mapper.readValue(mapper.writeValueAsString(toFn(m)), classOf[TableRow]))
    val rm = RecordMatcher[A]
    copy.exists(rm(_, m))
  }

  implicit val byteStringBigQueryMappableType = BigQueryType.at[ByteString]("BYTES")(
    x => ByteString.copyFrom(BaseEncoding.base64().decode(x.toString)),
    x => BaseEncoding.base64().encode(x.toByteArray)
  )
  property("required") = forAll { m: Required => roundTrip(m) }
  property("optional") = forAll { m: Optional => roundTrip(m) }
  property("repeated") = forAll { m: Repeated => roundTrip(m) }
  property("mixed") = forAll { m: Mixed => roundTrip(m) }
  property("nested") = forAll { m: Nested => roundTrip(m) }
  property("seqs") = forAll { m: Seqs => roundTrip(m) }

  implicit val arbDate = Arbitrary(arbInstant.arbitrary.map(i => new LocalDate(i.getMillis)))
  implicit val arbTime = Arbitrary(arbInstant.arbitrary.map(i => new LocalTime(i.getMillis)))
  implicit val arbDateTime = Arbitrary(
    arbInstant.arbitrary.map(i => new LocalDateTime(i.getMillis))
  )

  case class DateTimeTypes(
    instant: Instant,
    date: LocalDate,
    time: LocalTime,
    dateTime: LocalDateTime
  )
  property("date time types") = forAll { m: DateTimeTypes => roundTrip(m) }

  implicit val uriBigQueryType =
    BigQueryType.at[URI]("STRING")(v => URI.create(v.toString), _.toASCIIString)
  property("custom") = forAll { m: Custom => roundTrip(m) }
} 
Example 91
Source File: UtilsGenTest.scala    From sscheck   with Apache License 2.0 5 votes vote down vote up
package es.ucm.fdi.sscheck.gen

import org.scalacheck.{Properties, Gen}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.{forAll, exists, BooleanOperators}
import Buildables.buildableSeq

object UtilsGenTest extends Properties("UtilsGenTest test") {  
  property("""containerOfNtoM is able to generate sequences of 
		  string with size between N and M strings for both N and M >= 0""")  = 
    forAll (Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, m : Int) => 
      val g = UtilsGen.containerOfNtoM(n, m, arbitrary[String]) : Gen[Seq[String]]
      forAll (g) { ( xs : Seq[String]) =>   
       xs.length >= n && xs.length <= m 
      }
    }
  
  property("repN respects its lenght constraints") = 
    forAll (Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, xsLen : Int) => 
      val g = UtilsGen.repN(n, Gen.listOfN(xsLen, Gen.alphaStr)) 
      forAll (g) { (xs : Seq[String])  =>
        xs.length == xsLen * n
      }
    }
  
  property("repNtoM respects its lenght constraints") = 
    forAll (Gen.choose(0, 10), Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, m : Int, xsLen : Int) => 
      val g = UtilsGen.repNtoM(n, m, Gen.listOfN(xsLen, arbitrary[String])) 
      forAll (g) { (xs : Seq[String])  =>
        val xsLenObs = xs.length
        xsLenObs >= xsLen * n && xsLen * n <= xsLen * m 
      }
    }   
    
  property("""concSeq returns the result of concatenating the sequences 
generated by its arguments""") = {
    // In order for Prop.exists to be effective, we use a small domain.  
    // For all the lengths considered: this is very weak because very little lengths
    // are considered but it's better than nothing. 
    forAll (Gen.choose(0, 2)) { (xsLen : Int) =>
      // we consider two generators for lists of elements with that size
      val (gxs1, gxs2) = (Gen.listOfN(xsLen, Gen.choose(1, 3)), Gen.listOfN(xsLen, Gen.choose(4, 6)))
      // val g = UtilsGen.concSeq(gxs1, gxs2)
      forAll (UtilsGen.concSeq(gxs1, gxs2)) { (xs : Seq[Int]) =>
        // Prop.exists is not overloaded to support several generators
        // so we have to use zip
        exists (Gen.zip(gxs1, gxs2)) { (xs12 : (List[Int], List[Int])) =>
          xs == xs12._1 ++ xs12._2
        }
      }
    }    
  } 
} 
Example 92
Source File: ReGenTest.scala    From sscheck   with Apache License 2.0 5 votes vote down vote up
package es.ucm.fdi.sscheck.gen

import org.scalacheck.{Properties, Gen}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.{forAll, BooleanOperators, exists, atLeastOne}

object ReGenTest extends Properties("ReGen regex generators properties") {
 
  property("epsilon generates empty sequences") = 
    forAll (ReGen.epsilon) { (xs : Seq[Int]) =>
  	  xs.length == 0
  	}
  
  property("symbol generates a single element that contains the argument") = 
    forAll { x : String => 
      forAll (ReGen.symbol(x)) { xs : Seq[String] =>
        xs.length == 1 && xs(0) == x
      }
  }
  
   // def alt[A](gs : Gen[Seq[A]]*) : Gen[Seq[A]] = {
  property("alt is equivalent to epsilon if zero generators are provided") = 
    forAll (ReGen.alt()) { (xs : Seq[Int]) =>
      forAll (ReGen.epsilon) { (ys : Seq[Int]) =>
        xs == ys
      }
  }
  
  property("alt works for more than one argument, and generates values for some of the alternatives (weak, existential)") =  {
    val (g1, g2, g3) = (ReGen.symbol(0), ReGen.symbol(1), ReGen.symbol(2))
    forAll (ReGen.alt(g1, g2, g3)) { (xs : Seq[Int]) =>
      atLeastOne(
        exists (g1) { (ys : Seq[Int]) =>
        xs == ys
        },
        exists (g2) { (ys : Seq[Int]) =>
        xs == ys
        },
        exists (g3) { (ys : Seq[Int]) =>
        xs == ys
        }
      )
    }
  }

  // conc and star only have similar weak existential properties
} 
Example 93
Source File: UtilsProp.scala    From sscheck   with Apache License 2.0 5 votes vote down vote up
package es.ucm.fdi.sscheck.prop

import org.scalacheck.{Properties, Gen}
import org.scalacheck.Prop.{forAll, exists, AnyOperators}
import org.scalacheck.Prop
import org.scalatest._
import org.scalatest.Matchers._
import org.specs2.matcher.MatchFailureException
import scala.util.{Try, Success, Failure}
import org.scalacheck.util.Pretty

object UtilsProp {
  def safeProp[P <% Prop](p : => P) : Prop = {
    Try(p) match {
      case Success(pVal) => pVal
      case Failure(t) => t match {
        case _: TestFailedException => Prop.falsified
        case _: MatchFailureException[_] => Prop.falsified
        case _ => Prop.exception(t) 
      }
    }
  }
  
  def safeExists[A, P](g: Gen[A])(f: (A) => P)
                      (implicit pv: (P) => Prop, pp: (A) => Pretty): Prop = {    
    exists(g)((x : A) => safeProp(pv(f(x))))(identity[Prop], pp)
    // This doesn't work for reasons unknown
    // exists(g)((x : A) => Prop.secure(pv(f(x))))(identity[Prop], pp)
  }  
} 
Example 94
Source File: DatasetGeneratorSizeSpecial.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.sql.{Dataset, SQLContext}
import org.scalacheck.{Gen, Arbitrary}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class DatasetGeneratorSizeSpecial extends FunSuite
    with SharedSparkContext with Checkers {

  test("test generating sized Datasets[Custom Class]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    // In 2.3 List is fine, however prior to 2.1 the generator returns
    // a concrete sub type which isn't handled well.
    // This works in 1.6.1+ but we only test in 2.0+ because that's easier
    val carGen: Gen[Dataset[Seq[Car]]] =
      DatasetGenerator.genSizedDataset[Seq[Car]](sqlContext) { size =>
        val slowCarsTopNumber = math.ceil(size * 0.1).toInt
        def carGenerator(speed: Gen[Int]): Gen[Car] = for {
          name <- Arbitrary.arbitrary[String]
          speed <- speed
        } yield Car(name, speed)

        val cars: Gen[List[Car]] = for {
          slowCarsNumber: Int <- Gen.choose(0, slowCarsTopNumber)
          slowCars: List[Car] <- Gen.listOfN(slowCarsNumber, carGenerator(Gen.choose(0, 20)))
          normalSpeedCars: List[Car] <- Gen.listOfN(
            size - slowCarsNumber,
            carGenerator(Gen.choose(21, 150))
          )
        } yield {
          slowCars ++ normalSpeedCars
        }
        cars
      }

    val property =
      forAll(carGen.map(_.flatMap(identity))) {
        dataset =>
          val cars = dataset.collect()
          val dataSetSize  = cars.length
          val slowCars = cars.filter(_.speed < 21)
          slowCars.length <= dataSetSize * 0.1 &&
            cars.map(_.speed).length == dataSetSize
      }

    check(property)
  }
}