org.scalacheck.Arbitrary.arbitrary Scala Examples
The following examples show how to use org.scalacheck.Arbitrary.arbitrary.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: UnwrappedSemiautoDerivedSuite.scala From circe-generic-extras with Apache License 2.0 | 5 votes |
package io.circe.generic.extras import cats.Eq import io.circe._ import io.circe.generic.extras.semiauto._ import io.circe.testing.CodecTests import org.scalacheck.{ Arbitrary, Gen } import org.scalacheck.Arbitrary.arbitrary object UnwrappedSemiautoDerivedSuite { case class Foo(value: String) object Foo { implicit val eq: Eq[Foo] = Eq.fromUniversalEquals implicit val encoder: Encoder[Foo] = deriveUnwrappedEncoder implicit val decoder: Decoder[Foo] = deriveUnwrappedDecoder val codec: Codec[Foo] = deriveUnwrappedCodec val fooGen: Gen[Foo] = arbitrary[String].map(Foo(_)) implicit val arbitraryFoo: Arbitrary[Foo] = Arbitrary(fooGen) } } class UnwrappedSemiautoDerivedSuite extends CirceSuite { import UnwrappedSemiautoDerivedSuite._ checkLaws("Codec[Foo]", CodecTests[Foo].codec) checkLaws("Codec[Foo] via Codec", CodecTests[Foo](Foo.codec, Foo.codec).codec) checkLaws("Codec[Foo] via Decoder and Codec", CodecTests[Foo](implicitly, Foo.codec).codec) checkLaws("Codec[Foo] via Encoder and Codec", CodecTests[Foo](Foo.codec, implicitly).codec) "Semi-automatic derivation" should "encode value classes" in forAll { (s: String) => val foo = Foo(s) val expected = Json.fromString(s) assert(Encoder[Foo].apply(foo) === expected) } it should "decode value classes" in forAll { (s: String) => val json = Json.fromString(s) val expected = Right(Foo(s)) assert(Decoder[Foo].decodeJson(json) === expected) } it should "fail decoding incompatible JSON" in forAll { (i: Int, s: String) => val json = Json.fromInt(i) val expected = Left(DecodingFailure("String", List())) assert(Decoder[Foo].decodeJson(json) === expected) } }
Example 2
Source File: MapWriterTests.scala From Muse-CGH with MIT License | 5 votes |
package tests import ui.font_editor.Editing import main.{LetterSeg, MuseChar, MuseCharType} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalacheck.Prop._ import utilities.{CubicCurve, Vec2} object MapWriterTests { import utilities.MapWriter._ val vec2Gen = for{ x <- arbitrary[Double] y <- arbitrary[Double] } yield Vec2(x,y) val cubicGen = for{ p0 <- vec2Gen p1 <- vec2Gen p2 <- vec2Gen p3 <- vec2Gen } yield CubicCurve(p0,p1,p2,p3) val letterSegGen = for{ c <- cubicGen dots <- Gen.choose(1, 100) start <- Gen.choose(0.01, 1.0) end <- Gen.choose(0.01, 1.0) b1 <- arbitrary[Boolean] b2 <- arbitrary[Boolean] } yield LetterSeg(c, start, end, b1, b2) val letterGen = for{ segs <- Gen.containerOf[IndexedSeq, LetterSeg](letterSegGen) tId <- Gen.choose(0, MuseCharType.maxId) } yield MuseChar(segs, MuseCharType(tId)) val editingGen = for{ l <- letterGen selects <- Gen.containerOf[List, Int](Gen.choose(0,100)) } yield Editing(l, selects) val vec2Check = forAll(vec2Gen){ v => readOption[Vec2](write(v)) contains v } val cubicCheck = forAll(cubicGen){ c => readOption[CubicCurve](write(c)) contains c } val letterSegCheck = forAll(letterSegGen) { l => readOption[LetterSeg](write(l)) contains l} val letterCheck = forAll(letterGen) { l => readOption[MuseChar](write(l)) contains l} val editingCheck = forAll(editingGen) { e => readOption[Editing](write(e)) contains e} def main(args: Array[String]) { vec2Check.check cubicCheck.check letterSegCheck.check letterCheck.check editingCheck.check } }
Example 3
Source File: DecodeErrorProperties.scala From case-classy with Apache License 2.0 | 5 votes |
package classy import scala.Predef._ //import _root_.cats.instances.all._ import _root_.cats.kernel.laws._ import org.scalacheck._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen.listOf import org.scalacheck.Prop._ import org.scalacheck.derive._ import org.scalacheck.{ Shapeless => blackMagic } import cats._ class DecodeErrorProperties extends Properties("DecodeError") { import DecodeError._ val genLeaf: Gen[DecodeError] = { import blackMagic._ MkArbitrary[LeafDecodeError].arbitrary.arbitrary } property("and two leaf errors") = forAll(genLeaf, genLeaf)((a, b) => DecodeError.and(a, b) ?= And(a, b)) property("&& two leaf errors") = forAll(genLeaf, genLeaf)((a, b) => a && b ?= And(a, b)) property("or two leaf errors") = forAll(genLeaf, genLeaf)((a, b) => DecodeError.or(a, b) ?= Or(a, b)) property("|| two leaf errors") = forAll(genLeaf, genLeaf)((a, b) => a || b ?= Or(a, b)) property("and many leaf errors") = forAll(listOf(genLeaf))(errors => errors.length >= 2 ==> ( errors.reduce(DecodeError.and) ?= And(errors.head, errors.tail))) property("&& many leaf errors") = forAll(listOf(genLeaf))(errors => errors.length >= 2 ==> ( errors.reduce(_ && _) ?= And(errors.head, errors.tail))) property("or many leaf errors") = forAll(listOf(genLeaf))(errors => errors.length >= 2 ==> ( errors.reduce(DecodeError.or) ?= Or(errors.head, errors.tail))) property("|| many leaf errors") = forAll(listOf(genLeaf))(errors => errors.length >= 2 ==> ( errors.reduce(_ || _) ?= Or(errors.head, errors.tail))) property("atPath") = forAll( arbitrary[String] :| "path", arbitrary[String] :| "missing path" )((path, missingPath) => Missing.atPath(missingPath).atPath(path) ?= AtPath(path, AtPath(missingPath, Missing))) property("atIndex") = forAll( arbitrary[Int] :| "index", arbitrary[String] :| "missing path" )((index, missingPath) => Missing.atPath(missingPath).atIndex(index) ?= AtIndex(index, AtPath(missingPath, Missing))) property("AtPath.deepError") = forAll( genLeaf :| "deep error", arbitrary[String] :| "paths head", arbitrary[List[String]] :| "paths tail" )((error, pathHead, pathTail) => pathTail.foldLeft( error.atPath(pathHead) )(_ atPath _).deepError ?= error) { import blackMagic._ implicit val arbitraryDecodeError = MkArbitrary[DecodeError].arbitrary { import DecodeErrorMonoid.and._ include(GroupLaws[DecodeError].monoid.all, "and ") } { import DecodeErrorMonoid.or._ include(GroupLaws[DecodeError].monoid.all, "or ") } } }
Example 4
Source File: DefaultDecoderChecks.scala From case-classy with Apache License 2.0 | 5 votes |
package classy package testing import predef._ import scala.Predef.{ augmentString, ArrowAssoc, classOf } import org.scalacheck._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop._ import shapeless.Typeable import java.util.UUID object DefaultDecoderChecks { implicit class DecoderCheckOps[A, B](val decoder: Decoder[A, B]) extends AnyVal { def succeeds(input: A, output: B): Prop = decoder(input) ?= output.right def failsWrongType(input: A)(implicit ev: Typeable[B]): Prop = decoder(input) ?= DecodeError.WrongType(ev.describe, input.toString.some).left } def section[A, B](decoder: Decoder[A, B], f: B => A = (_: B).toString)(implicit arbB: Arbitrary[B]): Prop = forAll(arbB.arbitrary :| "input")((output) => decoder.succeeds(f(output), output)) def retraction[A: Arbitrary, B: Typeable](decoder: Decoder[A, B])(f: A => Option[B]): Prop = forAll(arbitrary[A] :| "input")((input) => f(input).fold( decoder.failsWrongType(input))( decoder.succeeds(input, _))) def catching[A, B](f: A => B): A => Option[B] = (a: A) => scala.util.Try(f(a)).toOption def properties(name: String)(props: (String, Prop)*): Properties = { val properties = new Properties(name) props.foreach(p => properties.property(p._1) = p._2) properties } def stringToBooleanProperties(decoder: Decoder[String, Boolean]): Properties = properties("String to Boolean")( "section String" -> section(decoder), "retract Boolean" -> retraction(decoder)(catching(_.toBoolean))) def stringToByteProperties(decoder: Decoder[String, Byte]): Properties = properties("String to Byte")( "section String" -> section(decoder), "retract Byte" -> retraction(decoder)(catching(_.toByte))) def stringToShortProperties(decoder: Decoder[String, Short]): Properties = properties("String to Short")( "section String" -> section(decoder), "retract Short" -> retraction(decoder)(catching(_.toShort))) def stringToIntProperties(decoder: Decoder[String, Int]): Properties = properties("String to Int")( "section String" -> section(decoder), "retract Int" -> retraction(decoder)(catching(_.toInt))) def stringToLongProperties(decoder: Decoder[String, Long]): Properties = properties("String to Long")( "section String" -> section(decoder), "retract Long" -> retraction(decoder)(catching(_.toLong))) def stringToFloatProperties(decoder: Decoder[String, Float]): Properties = properties("String to Float")( "section String" -> section(decoder), "retract Float" -> retraction(decoder)(catching(_.toFloat))) def stringToDoubleProperties(decoder: Decoder[String, Double]): Properties = properties("String to Double")( "section String" -> section(decoder), "retract Double" -> retraction(decoder)(catching(_.toDouble))) def stringToUUIDProperties(decoder: Decoder[String, UUID]): Properties = properties("String to UUID")( "section String" -> section(decoder)(Arbitrary(Gen.uuid)), "retract UUID" -> retraction(decoder)(catching(UUID.fromString))) }
Example 5
Source File: StreamingFormulaDemo1.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.spark.demo import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import org.specs2.ScalaCheck import org.specs2.Specification import org.specs2.matcher.ResultMatchers import org.scalacheck.Arbitrary.arbitrary import org.apache.spark.rdd.RDD import org.apache.spark.streaming.Duration import org.apache.spark.streaming.dstream.DStream import es.ucm.fdi.sscheck.spark.streaming.SharedStreamingContextBeforeAfterEach import es.ucm.fdi.sscheck.prop.tl.{Formula,DStreamTLProperty} import es.ucm.fdi.sscheck.prop.tl.Formula._ import es.ucm.fdi.sscheck.gen.{PDStreamGen,BatchGen} @RunWith(classOf[JUnitRunner]) class StreamingFormulaDemo1 extends Specification with DStreamTLProperty with ResultMatchers with ScalaCheck { // Spark configuration override def sparkMaster : String = "local[*]" override def batchDuration = Duration(150) override def defaultParallelism = 4 def is = sequential ^ s2""" Simple demo Specs2 example for ScalaCheck properties with temporal formulas on Spark Streaming programs - where a simple property for DStream.count is a success ${countForallAlwaysProp(_.count)} - where a faulty implementation of the DStream.count is detected ${countForallAlwaysProp(faultyCount) must beFailing} """ def faultyCount(ds : DStream[Double]) : DStream[Long] = ds.count.transform(_.map(_ - 1)) def countForallAlwaysProp(testSubject : DStream[Double] => DStream[Long]) = { type U = (RDD[Double], RDD[Long]) val (inBatch, transBatch) = ((_ : U)._1, (_ : U)._2) val numBatches = 10 val formula : Formula[U] = always { (u : U) => transBatch(u).count === 1 and inBatch(u).count === transBatch(u).first } during numBatches val gen = BatchGen.always(BatchGen.ofNtoM(10, 50, arbitrary[Double]), numBatches) forAllDStream( gen)( testSubject)( formula) }.set(minTestsOk = 10).verbose }
Example 6
Source File: StreamingFormulaDemo2.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.spark.demo import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import org.specs2.ScalaCheck import org.specs2.Specification import org.specs2.matcher.ResultMatchers import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.apache.spark.rdd.RDD import org.apache.spark.streaming.Duration import org.apache.spark.streaming.dstream.DStream import org.apache.spark.streaming.dstream.DStream._ import scalaz.syntax.std.boolean._ import es.ucm.fdi.sscheck.spark.streaming.SharedStreamingContextBeforeAfterEach import es.ucm.fdi.sscheck.prop.tl.{Formula,DStreamTLProperty} import es.ucm.fdi.sscheck.prop.tl.Formula._ import es.ucm.fdi.sscheck.gen.{PDStreamGen,BatchGen} import es.ucm.fdi.sscheck.gen.BatchGenConversions._ import es.ucm.fdi.sscheck.gen.PDStreamGenConversions._ import es.ucm.fdi.sscheck.matcher.specs2.RDDMatchers._ @RunWith(classOf[JUnitRunner]) class StreamingFormulaDemo2 extends Specification with DStreamTLProperty with ResultMatchers with ScalaCheck { // Spark configuration override def sparkMaster : String = "local[*]" override def batchDuration = Duration(300) override def defaultParallelism = 3 override def enableCheckpointing = true def is = sequential ^ s2""" Check process to persistently detect and ban bad users - where a stateful implementation extracts the banned users correctly ${checkExtractBannedUsersList(listBannedUsers)} - where a trivial implementation ${checkExtractBannedUsersList(statelessListBannedUsers) must beFailing} """ type UserId = Long def listBannedUsers(ds : DStream[(UserId, Boolean)]) : DStream[UserId] = ds.updateStateByKey((flags : Seq[Boolean], maybeFlagged : Option[Unit]) => maybeFlagged match { case Some(_) => maybeFlagged case None => flags.contains(false) option {()} } ).transform(_.keys) def statelessListBannedUsers(ds : DStream[(UserId, Boolean)]) : DStream[UserId] = ds.map(_._1) def checkExtractBannedUsersList(testSubject : DStream[(UserId, Boolean)] => DStream[UserId]) = { val batchSize = 20 val (headTimeout, tailTimeout, nestedTimeout) = (10, 10, 5) val (badId, ids) = (15L, Gen.choose(1L, 50L)) val goodBatch = BatchGen.ofN(batchSize, ids.map((_, true))) val badBatch = goodBatch + BatchGen.ofN(1, (badId, false)) val gen = BatchGen.until(goodBatch, badBatch, headTimeout) ++ BatchGen.always(Gen.oneOf(goodBatch, badBatch), tailTimeout) type U = (RDD[(UserId, Boolean)], RDD[UserId]) val (inBatch, outBatch) = ((_ : U)._1, (_ : U)._2) val formula = { val badInput = at(inBatch)(_ should existsRecord(_ == (badId, false))) val allGoodInputs = at(inBatch)(_ should foreachRecord(_._2 == true)) val noIdBanned = at(outBatch)(_.isEmpty) val badIdBanned = at(outBatch)(_ should existsRecord(_ == badId)) ( ( allGoodInputs and noIdBanned ) until badIdBanned on headTimeout ) and ( always { badInput ==> (always(badIdBanned) during nestedTimeout) } during tailTimeout ) } forAllDStream( gen)( testSubject)( formula) }.set(minTestsOk = 10).verbose }
Example 7
Source File: SimpleStreamingFormulas.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.spark.simple import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import org.specs2.matcher.ResultMatchers import org.scalacheck.Arbitrary.arbitrary import org.apache.spark.rdd.RDD import org.apache.spark.streaming.Duration import org.apache.spark.streaming.dstream.DStream import es.ucm.fdi.sscheck.spark.streaming.SharedStreamingContextBeforeAfterEach import es.ucm.fdi.sscheck.prop.tl.{Formula,DStreamTLProperty} import es.ucm.fdi.sscheck.prop.tl.Formula._ import es.ucm.fdi.sscheck.matcher.specs2.RDDMatchers._ import es.ucm.fdi.sscheck.gen.{PDStreamGen,BatchGen} import org.scalacheck.Gen import es.ucm.fdi.sscheck.gen.PDStream import es.ucm.fdi.sscheck.gen.Batch @RunWith(classOf[JUnitRunner]) class SimpleStreamingFormulas extends org.specs2.Specification with DStreamTLProperty with org.specs2.ScalaCheck { // Spark configuration override def sparkMaster : String = "local[*]" override def batchDuration = Duration(50) override def defaultParallelism = 4 def is = sequential ^ s2""" Simple demo Specs2 example for ScalaCheck properties with temporal formulas on Spark Streaming programs - Given a stream of integers When we filter out negative numbers Then we get only numbers greater or equal to zero $filterOutNegativeGetGeqZero - where time increments for each batch $timeIncreasesMonotonically """ def filterOutNegativeGetGeqZero = { type U = (RDD[Int], RDD[Int]) val numBatches = 10 val gen = BatchGen.always(BatchGen.ofNtoM(10, 50, arbitrary[Int]), numBatches) val formula = always(nowTime[U]{ (letter, time) => val (_input, output) = letter output should foreachRecord {_ >= 0} }) during numBatches forAllDStream( gen)( _.filter{ x => !(x < 0)})( formula) }.set(minTestsOk = 50).verbose def timeIncreasesMonotonically = { type U = (RDD[Int], RDD[Int]) val numBatches = 10 val gen = BatchGen.always(BatchGen.ofNtoM(10, 50, arbitrary[Int])) val formula = always(nextTime[U]{ (letter, time) => nowTime[U]{ (nextLetter, nextTime) => time.millis <= nextTime.millis } }) during numBatches-1 forAllDStream( gen)( identity[DStream[Int]])( formula) }.set(minTestsOk = 10).verbose }
Example 8
Source File: ScalaCheckStreamingTest.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.spark.streaming import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import org.specs2.ScalaCheck import org.specs2.execute.{AsResult, Result} import org.scalacheck.{Prop, Gen} import org.scalacheck.Arbitrary.arbitrary import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.streaming.{Duration} import org.apache.spark.streaming.dstream.DStream import es.ucm.fdi.sscheck.prop.tl.Formula._ import es.ucm.fdi.sscheck.prop.tl.DStreamTLProperty import es.ucm.fdi.sscheck.matcher.specs2.RDDMatchers._ @RunWith(classOf[JUnitRunner]) class ScalaCheckStreamingTest extends org.specs2.Specification with DStreamTLProperty with org.specs2.matcher.ResultMatchers with ScalaCheck { override def sparkMaster : String = "local[5]" override def batchDuration = Duration(350) override def defaultParallelism = 4 def is = sequential ^ s2""" Simple properties for Spark Streaming - where the first property is a success $prop1 - where a simple property for DStream.count is a success ${countProp(_.count)} - where a faulty implementation of the DStream.count is detected ${countProp(faultyCount) must beFailing} """ def prop1 = { val batchSize = 30 val numBatches = 10 val dsgenSeqSeq1 = { val zeroSeqSeq = Gen.listOfN(numBatches, Gen.listOfN(batchSize, 0)) val oneSeqSeq = Gen.listOfN(numBatches, Gen.listOfN(batchSize, 1)) Gen.oneOf(zeroSeqSeq, oneSeqSeq) } type U = (RDD[Int], RDD[Int]) forAllDStream[Int, Int]( "inputDStream" |: dsgenSeqSeq1)( (inputDs : DStream[Int]) => { val transformedDs = inputDs.map(_+1) transformedDs })(always ((u : U) => { val (inputBatch, transBatch) = u inputBatch.count === batchSize and inputBatch.count === transBatch.count and (inputBatch.intersection(transBatch).isEmpty should beTrue) and ( inputBatch should foreachRecord(_ == 0) or (inputBatch should foreachRecord(_ == 1)) ) }) during numBatches )}.set(minTestsOk = 10).verbose def faultyCount(ds : DStream[Double]) : DStream[Long] = ds.count.transform(_.map(_ - 1)) def countProp(testSubject : DStream[Double] => DStream[Long]) = { type U = (RDD[Double], RDD[Long]) val numBatches = 10 forAllDStream[Double, Long]( Gen.listOfN(numBatches, Gen.listOfN(30, arbitrary[Double])))( testSubject )(always ((u : U) => { val (inputBatch, transBatch) = u transBatch.count === 1 and inputBatch.count === transBatch.first }) during numBatches )}.set(minTestsOk = 10).verbose }
Example 9
Source File: ReGenTest.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.gen import org.scalacheck.{Properties, Gen} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.{forAll, BooleanOperators, exists, atLeastOne} object ReGenTest extends Properties("ReGen regex generators properties") { property("epsilon generates empty sequences") = forAll (ReGen.epsilon) { (xs : Seq[Int]) => xs.length == 0 } property("symbol generates a single element that contains the argument") = forAll { x : String => forAll (ReGen.symbol(x)) { xs : Seq[String] => xs.length == 1 && xs(0) == x } } // def alt[A](gs : Gen[Seq[A]]*) : Gen[Seq[A]] = { property("alt is equivalent to epsilon if zero generators are provided") = forAll (ReGen.alt()) { (xs : Seq[Int]) => forAll (ReGen.epsilon) { (ys : Seq[Int]) => xs == ys } } property("alt works for more than one argument, and generates values for some of the alternatives (weak, existential)") = { val (g1, g2, g3) = (ReGen.symbol(0), ReGen.symbol(1), ReGen.symbol(2)) forAll (ReGen.alt(g1, g2, g3)) { (xs : Seq[Int]) => atLeastOne( exists (g1) { (ys : Seq[Int]) => xs == ys }, exists (g2) { (ys : Seq[Int]) => xs == ys }, exists (g3) { (ys : Seq[Int]) => xs == ys } ) } } // conc and star only have similar weak existential properties }
Example 10
Source File: UtilsGenTest.scala From sscheck with Apache License 2.0 | 5 votes |
package es.ucm.fdi.sscheck.gen import org.scalacheck.{Properties, Gen} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.{forAll, exists, BooleanOperators} import Buildables.buildableSeq object UtilsGenTest extends Properties("UtilsGenTest test") { property("""containerOfNtoM is able to generate sequences of string with size between N and M strings for both N and M >= 0""") = forAll (Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, m : Int) => val g = UtilsGen.containerOfNtoM(n, m, arbitrary[String]) : Gen[Seq[String]] forAll (g) { ( xs : Seq[String]) => xs.length >= n && xs.length <= m } } property("repN respects its lenght constraints") = forAll (Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, xsLen : Int) => val g = UtilsGen.repN(n, Gen.listOfN(xsLen, Gen.alphaStr)) forAll (g) { (xs : Seq[String]) => xs.length == xsLen * n } } property("repNtoM respects its lenght constraints") = forAll (Gen.choose(0, 10), Gen.choose(0, 10), Gen.choose(0, 10)) { (n : Int, m : Int, xsLen : Int) => val g = UtilsGen.repNtoM(n, m, Gen.listOfN(xsLen, arbitrary[String])) forAll (g) { (xs : Seq[String]) => val xsLenObs = xs.length xsLenObs >= xsLen * n && xsLen * n <= xsLen * m } } property("""concSeq returns the result of concatenating the sequences generated by its arguments""") = { // In order for Prop.exists to be effective, we use a small domain. // For all the lengths considered: this is very weak because very little lengths // are considered but it's better than nothing. forAll (Gen.choose(0, 2)) { (xsLen : Int) => // we consider two generators for lists of elements with that size val (gxs1, gxs2) = (Gen.listOfN(xsLen, Gen.choose(1, 3)), Gen.listOfN(xsLen, Gen.choose(4, 6))) // val g = UtilsGen.concSeq(gxs1, gxs2) forAll (UtilsGen.concSeq(gxs1, gxs2)) { (xs : Seq[Int]) => // Prop.exists is not overloaded to support several generators // so we have to use zip exists (Gen.zip(gxs1, gxs2)) { (xs12 : (List[Int], List[Int])) => xs == xs12._1 ++ xs12._2 } } } } }
Example 11
Source File: SomethingPlain.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package mongo import com.avsystem.commons.serialization.GenCodec import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen case class SomethingPlain( string: String, boolean: Boolean, int: Int, long: Long, timestamp: JDate, double: Double, binary: Bytes, list: List[String], map: Map[String, String] ) object SomethingPlain { def sizedListOf[T](maxSize: Int, gen: => Gen[T]): Gen[List[T]] = { Gen.resize(maxSize, Gen.listOf(gen)) } val stringListGen: Gen[List[String]] = sizedListOf(8, arbitrary[String]) val entryGen: Gen[(String, String)] = for { key <- Gen.alphaStr value <- arbitrary[String] } yield key -> value val gen: Gen[SomethingPlain] = for { string <- arbitrary[String] boolean <- arbitrary[Boolean] int <- arbitrary[Int] long <- arbitrary[Long] timestamp <- arbitrary[JDate] double <- arbitrary[Double] binary <- Gen.buildableOf[Array[Byte], Byte](arbitrary[Byte]).map(new Bytes(_)) list <- stringListGen map <- Gen.mapOf(entryGen) } yield SomethingPlain( string, boolean, int, long, timestamp, double, binary, list, map ) implicit val codec: GenCodec[SomethingPlain] = GenCodec.materialize } case class SomethingComplex( embeddedObject: SomethingPlain, complexList: List[SomethingPlain], nestedList: List[List[String]], nestedComplexList: List[List[SomethingPlain]], option: Option[Int] ) object SomethingComplex { val sthListGen: Gen[List[SomethingPlain]] = SomethingPlain.sizedListOf(8, SomethingPlain.gen) val gen: Gen[SomethingComplex] = for { embeddedObject <- SomethingPlain.gen complexList <- sthListGen nestedList <- SomethingPlain.sizedListOf(5, SomethingPlain.stringListGen) nestedComplexList <- SomethingPlain.sizedListOf(5, sthListGen) option <- arbitrary[Option[Int]] } yield SomethingComplex( embeddedObject, complexList, nestedList, nestedComplexList, option ) implicit val codec: GenCodec[SomethingComplex] = GenCodec.materialize }
Example 12
Source File: SerializationTestUtils.scala From scala-commons with MIT License | 5 votes |
package com.avsystem.commons package serialization.json import java.math.MathContext import com.avsystem.commons.serialization.HasGenCodec import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary trait SerializationTestUtils { private def limitMathContext(bd: BigDecimal) = if (bd.mc == MathContext.UNLIMITED) bd(BigDecimal.defaultMathContext) else bd case class TestCC(i: Int, l: Long, intAsDouble: Double, b: Boolean, s: String, list: List[Char]) object TestCC extends HasGenCodec[TestCC] { implicit val arb: Arbitrary[TestCC] = Arbitrary(for { i <- arbitrary[Int] l <- arbitrary[Long] b <- arbitrary[Boolean] s <- arbitrary[String] list <- arbitrary[List[Char]] } yield TestCC(i, l, i.toDouble, b, s, list)) } case class NestedTestCC(i: Int, t: TestCC, t2: TestCC) object NestedTestCC extends HasGenCodec[NestedTestCC] case class DeepNestedTestCC(n: TestCC, l: DeepNestedTestCC) object DeepNestedTestCC extends HasGenCodec[DeepNestedTestCC] case class CompleteItem( unit: Unit, string: String, char: Char, boolean: Boolean, byte: Byte, short: Short, int: Int, long: Long, float: Float, double: Double, bigInt: BigInt, bigDecimal: BigDecimal, binary: Array[Byte], list: List[String], set: Set[String], obj: TestCC, map: Map[String, Int] ) object CompleteItem extends HasGenCodec[CompleteItem] { implicit val arb: Arbitrary[CompleteItem] = Arbitrary(for { u <- arbitrary[Unit] str <- arbitrary[String] c <- arbitrary[Char] bool <- arbitrary[Boolean] b <- arbitrary[Byte] s <- arbitrary[Short] i <- arbitrary[Int] l <- arbitrary[Long] f <- arbitrary[Float] d <- arbitrary[Double] bi <- arbitrary[BigInt] bd <- arbitrary[BigDecimal].map(limitMathContext) binary <- arbitrary[Array[Byte]] list <- arbitrary[List[String]] set <- arbitrary[Set[String]] obj <- arbitrary[TestCC] map <- arbitrary[Map[String, Int]] } yield CompleteItem(u, str, c, bool, b, s, i, l, f, d, bi, bd, binary, list, set, obj, map)) } }
Example 13
Source File: PivotTest.scala From frameless with Apache License 2.0 | 5 votes |
package frameless package ops import frameless.functions.aggregate._ import org.apache.spark.sql.{functions => sparkFunctions} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop._ import org.scalacheck.{Gen, Prop} class PivotTest extends TypedDatasetSuite { def withCustomGenX4: Gen[Vector[X4[String, String, Int, Boolean]]] = { val kvPairGen: Gen[X4[String, String, Int, Boolean]] = for { a <- Gen.oneOf(Seq("1", "2", "3", "4")) b <- Gen.oneOf(Seq("a", "b", "c")) c <- arbitrary[Int] d <- arbitrary[Boolean] } yield X4(a, b, c, d) Gen.listOfN(4, kvPairGen).map(_.toVector) } test("X4[Boolean, String, Int, Boolean] pivot on String") { def prop(data: Vector[X4[String, String, Int, Boolean]]): Prop = { val d = TypedDataset.create(data) val frameless = d.groupBy(d('a)). pivot(d('b)).on("a", "b", "c"). agg(sum(d('c)), first(d('d))).collect().run().toVector val spark = d.dataset.groupBy("a") .pivot("b", Seq("a", "b", "c")) .agg(sparkFunctions.sum("c"), sparkFunctions.first("d")).collect().toVector (frameless.map(_._1) ?= spark.map(x => x.getAs[String](0))).&&( frameless.map(_._2) ?= spark.map(x => Option(x.getAs[Long](1)))).&&( frameless.map(_._3) ?= spark.map(x => Option(x.getAs[Boolean](2)))).&&( frameless.map(_._4) ?= spark.map(x => Option(x.getAs[Long](3)))).&&( frameless.map(_._5) ?= spark.map(x => Option(x.getAs[Boolean](4)))).&&( frameless.map(_._6) ?= spark.map(x => Option(x.getAs[Long](5)))).&&( frameless.map(_._7) ?= spark.map(x => Option(x.getAs[Boolean](6)))) } check(forAll(withCustomGenX4)(prop)) } test("Pivot on Boolean") { val x: Seq[X3[String, Boolean, Boolean]] = Seq(X3("a", true, true), X3("a", true, true), X3("a", true, false)) val d = TypedDataset.create(x) d.groupByMany(d('a)). pivot(d('c)).on(true, false). agg(count[X3[String, Boolean, Boolean]]()). collect().run().toVector ?= Vector(("a", Some(2L), Some(1L))) // two true one false } test("Pivot with groupBy on two columns, pivot on Long") { val x: Seq[X3[String, String, Long]] = Seq(X3("a", "x", 1), X3("a", "x", 1), X3("a", "c", 20)) val d = TypedDataset.create(x) d.groupBy(d('a), d('b)). pivot(d('c)).on(1L, 20L). agg(count[X3[String, String, Long]]()). collect().run().toSet ?= Set(("a", "x", Some(2L), None), ("a", "c", None, Some(1L))) } test("Pivot with cube on two columns, pivot on Long") { val x: Seq[X3[String, String, Long]] = Seq(X3("a", "x", 1), X3("a", "x", 1), X3("a", "c", 20)) val d = TypedDataset.create(x) d.cube(d('a), d('b)) .pivot(d('c)).on(1L, 20L) .agg(count[X3[String, String, Long]]()) .collect().run().toSet ?= Set(("a", "x", Some(2L), None), ("a", "c", None, Some(1L))) } test("Pivot with cube on Boolean") { val x: Seq[X3[String, Boolean, Boolean]] = Seq(X3("a", true, true), X3("a", true, true), X3("a", true, false)) val d = TypedDataset.create(x) d.cube(d('a)). pivot(d('c)).on(true, false). agg(count[X3[String, Boolean, Boolean]]()). collect().run().toVector ?= Vector(("a", Some(2L), Some(1L))) } test("Pivot with rollup on two columns, pivot on Long") { val x: Seq[X3[String, String, Long]] = Seq(X3("a", "x", 1), X3("a", "x", 1), X3("a", "c", 20)) val d = TypedDataset.create(x) d.rollup(d('a), d('b)) .pivot(d('c)).on(1L, 20L) .agg(count[X3[String, String, Long]]()) .collect().run().toSet ?= Set(("a", "x", Some(2L), None), ("a", "c", None, Some(1L))) } test("Pivot with rollup on Boolean") { val x: Seq[X3[String, Boolean, Boolean]] = Seq(X3("a", true, true), X3("a", true, true), X3("a", true, false)) val d = TypedDataset.create(x) d.rollupMany(d('a)). pivot(d('c)).on(true, false). agg(count[X3[String, Boolean, Boolean]]()). collect().run().toVector ?= Vector(("a", Some(2L), Some(1L))) } }
Example 14
Source File: RandomSplitTests.scala From frameless with Apache License 2.0 | 5 votes |
package frameless import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop._ import org.scalacheck.{Arbitrary, Gen} import scala.collection.JavaConverters._ import org.scalatest.matchers.should.Matchers class RandomSplitTests extends TypedDatasetSuite with Matchers { val nonEmptyPositiveArray: Gen[Array[Double]] = Gen.nonEmptyListOf(Gen.posNum[Double]).map(_.toArray) test("randomSplit(weight, seed)") { def prop[A: TypedEncoder : Arbitrary] = forAll(vectorGen[A], nonEmptyPositiveArray, arbitrary[Long]) { (data: Vector[A], weights: Array[Double], seed: Long) => val dataset = TypedDataset.create(data) dataset.randomSplit(weights, seed).map(_.count().run()) sameElements dataset.dataset.randomSplit(weights, seed).map(_.count()) } check(prop[Int]) check(prop[String]) } test("randomSplitAsList(weight, seed)") { def prop[A: TypedEncoder : Arbitrary] = forAll(vectorGen[A], nonEmptyPositiveArray, arbitrary[Long]) { (data: Vector[A], weights: Array[Double], seed: Long) => val dataset = TypedDataset.create(data) dataset.randomSplitAsList(weights, seed).asScala.map(_.count().run()) sameElements dataset.dataset.randomSplitAsList(weights, seed).asScala.map(_.count()) } check(prop[Int]) check(prop[String]) } }
Example 15
Source File: AvroRecordPropSpec.scala From affinity with Apache License 2.0 | 5 votes |
package io.amient.affinity.avro import java.util.UUID import io.amient.affinity.avro.record.AvroRecord import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalacheck.Gen._ import org.scalatest.prop.PropertyChecks import org.scalatest.{Matchers, PropSpec} class AvroRecordPropSpec extends PropSpec with PropertyChecks with Matchers { import SimpleEnum._ property("Case Class constructor default arguments are AvroRecord defaults") { val b = SimpleRecord() assert(b == SimpleRecord(SimpleKey(0), A, Seq())) AvroRecord.read[SimpleRecord](AvroRecord.write(b, b.schema), b.schema) should equal(SimpleRecord(SimpleKey(0), A, Seq())) val c = Record_Current() assert(c == Record_Current(Seq(), Map(), Set())) AvroRecord.read[Record_Current](AvroRecord.write(c, c.schema), c.schema) should equal(Record_Current(Seq(), Map(), Set())) } def uuids: Gen[UUID] = for { hi <- arbitrary[Long] lo <- arbitrary[Long] } yield new UUID(hi, lo) property("java.lang.UUID can be represented as Avro Bytes") { forAll(uuids) { uuid: UUID => val x = AvroUUID(uuid) val bytes = AvroRecord.write(x, x.schema) val copy = AvroRecord.read[AvroUUID](bytes, x.schema) copy.uuid should be(uuid) } } def bases: Gen[SimpleRecord] = for { id <- arbitrary[Int] side <- Gen.oneOf(SimpleEnum.A, SimpleEnum.B) ints <- listOf(arbitrary[Int]) } yield SimpleRecord(SimpleKey(id), side, ints.map(SimpleKey(_))) def composites: Gen[Record_Current] = for { nitems <- Gen.choose(1, 2) items <- listOfN(nitems, bases) keys <- listOfN(nitems, Gen.alphaStr) longs <- listOf(arbitrary[Long]) } yield Record_Current(items, keys.zip(items).toMap, longs.toSet) property("AvroRecord.write is fully reversible by AvroRecord.read") { forAll(composites) { composite: Record_Current => val bytes = AvroRecord.write(composite, composite.schema) AvroRecord.read[Record_Current](bytes, composite.schema) should equal(composite ) } } }
Example 16
Source File: SemiautoDerivedSuiteInputs.scala From circe-magnolia with Apache License 2.0 | 5 votes |
package io.circe.magnolia import cats.kernel.Eq import io.circe.generic.semiauto._ import io.circe.{Decoder, Encoder} import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary.arbitrary object SemiautoDerivedSuiteInputs { sealed trait RecursiveAdtExample case class BaseAdtExample(a: String) extends RecursiveAdtExample case class NestedAdtExample(r: RecursiveAdtExample) extends RecursiveAdtExample object RecursiveAdtExample { implicit val eqRecursiveAdtExample: Eq[RecursiveAdtExample] = Eq.fromUniversalEquals private def atDepth(depth: Int): Gen[RecursiveAdtExample] = if (depth < 3) Gen.oneOf( Arbitrary.arbitrary[String].map(BaseAdtExample(_)), atDepth(depth + 1).map(NestedAdtExample(_)) ) else Arbitrary.arbitrary[String].map(BaseAdtExample(_)) implicit val arbitraryRecursiveAdtExample: Arbitrary[RecursiveAdtExample] = Arbitrary(atDepth(0)) } case class RecursiveWithOptionExample(o: Option[RecursiveWithOptionExample]) object RecursiveWithOptionExample { implicit val eqRecursiveWithOptionExample: Eq[RecursiveWithOptionExample] = Eq.fromUniversalEquals private def atDepth(depth: Int): Gen[RecursiveWithOptionExample] = if (depth < 3) Gen.option(atDepth(depth + 1)).map(RecursiveWithOptionExample(_)) else Gen.const(RecursiveWithOptionExample(None)) implicit val arbitraryRecursiveWithOptionExample : Arbitrary[RecursiveWithOptionExample] = Arbitrary(atDepth(0)) } case class AnyInt(value: Int) extends AnyVal object AnyInt { implicit val encodeAnyInt: Encoder[AnyInt] = deriveEncoder implicit val decodeAnyInt: Decoder[AnyInt] = deriveDecoder } case class AnyValInside(v: AnyInt) object AnyValInside { implicit val eqAnyValInside: Eq[AnyValInside] = Eq.fromUniversalEquals implicit val arbitraryAnyValInside: Arbitrary[AnyValInside] = Arbitrary(arbitrary[Int].map(i => AnyValInside(AnyInt(i)))) } case class OvergenerationExampleInner(i: Int) case class OvergenerationExampleOuter0(i: OvergenerationExampleInner) case class OvergenerationExampleOuter1(oi: Option[OvergenerationExampleInner]) }
Example 17
Source File: ConfiguredJsonCodecWithKeySuite.scala From circe-generic-extras with Apache License 2.0 | 5 votes |
package io.circe.generic.extras import cats.kernel.Eq import io.circe.{ Decoder, Encoder } import io.circe.literal._ import io.circe.testing.CodecTests import org.scalacheck.{ Arbitrary, Gen } import org.scalacheck.Arbitrary.arbitrary object ConfiguredJsonCodecWithKeySuite { implicit val customConfig: Configuration = Configuration.default.withSnakeCaseMemberNames.withDefaults.withDiscriminator("type").withSnakeCaseConstructorNames @ConfiguredJsonCodec sealed trait ConfigExampleBase case class ConfigExampleFoo(thisIsAField: String, a: Int = 0, @JsonKey("myField") b: Double) extends ConfigExampleBase object ConfigExampleFoo { implicit val eqConfigExampleFoo: Eq[ConfigExampleFoo] = Eq.fromUniversalEquals val genConfigExampleFoo: Gen[ConfigExampleFoo] = for { thisIsAField <- arbitrary[String] a <- arbitrary[Int] b <- arbitrary[Double] } yield ConfigExampleFoo(thisIsAField, a, b) implicit val arbitraryConfigExampleFoo: Arbitrary[ConfigExampleFoo] = Arbitrary(genConfigExampleFoo) } object ConfigExampleBase { implicit val eqConfigExampleBase: Eq[ConfigExampleBase] = Eq.fromUniversalEquals val genConfigExampleBase: Gen[ConfigExampleBase] = ConfigExampleFoo.genConfigExampleFoo implicit val arbitraryConfigExampleBase: Arbitrary[ConfigExampleBase] = Arbitrary(genConfigExampleBase) } } class ConfiguredJsonCodecWithKeySuite extends CirceSuite { import ConfiguredJsonCodecWithKeySuite._ checkLaws("Codec[ConfigExampleBase]", CodecTests[ConfigExampleBase].codec) "ConfiguredJsonCodec" should "support key annotation and configuration" in forAll { (f: String, b: Double) => val foo: ConfigExampleBase = ConfigExampleFoo(f, 0, b) val json = json"""{ "type": "config_example_foo", "this_is_a_field": $f, "myField": $b}""" val expected = json"""{ "type": "config_example_foo", "this_is_a_field": $f, "a": 0, "myField": $b}""" assert(Encoder[ConfigExampleBase].apply(foo) === expected) assert(Decoder[ConfigExampleBase].decodeJson(json) === Right(foo)) } }
Example 18
Source File: TimeArbitraries.scala From cats-time with MIT License | 5 votes |
package io.chrisdavenport.cats.time.instances import java.time._ import org.scalacheck.{ Arbitrary, Gen } import org.scalacheck.Arbitrary.arbitrary object TimeArbitraries { implicit def functionArbitrary[B, A: Arbitrary]: Arbitrary[B => A] = Arbitrary{ for { a <- Arbitrary.arbitrary[A] } yield {_: B => a} } implicit val arbitraryZoneId: Arbitrary[ZoneId] = Arbitrary{ import scala.jdk.CollectionConverters._ Gen.oneOf(ZoneId.getAvailableZoneIds.asScala.map(ZoneId.of).toSeq) } implicit val arbitraryZoneOffset: Arbitrary[ZoneOffset] = Arbitrary{ // Range is specified in `ofTotalSeconds` javadoc. Gen.choose(-64800, 64800).map(ZoneOffset.ofTotalSeconds) } implicit val arbitraryInstant: Arbitrary[Instant] = Arbitrary( Gen.choose(Instant.MIN.getEpochSecond, Instant.MAX.getEpochSecond).map(Instant.ofEpochSecond) ) implicit val arbitraryPeriod: Arbitrary[Period] = Arbitrary( for { years <- arbitrary[Int] months <- arbitrary[Int] days <- arbitrary[Int] } yield Period.of(years, months, days) ) implicit val arbitraryLocalDateTime: Arbitrary[LocalDateTime] = Arbitrary( for { instant <- arbitrary[Instant] zoneId <- arbitrary[ZoneId] } yield LocalDateTime.ofInstant(instant, zoneId) ) implicit val arbitraryZonedDateTime: Arbitrary[ZonedDateTime] = Arbitrary( for { instant <- arbitrary[Instant] zoneId <- arbitrary[ZoneId] } yield ZonedDateTime.ofInstant(instant, zoneId) ) implicit val arbitraryOffsetDateTime: Arbitrary[OffsetDateTime] = Arbitrary( for { instant <- arbitrary[Instant] zoneId <- arbitrary[ZoneId] } yield OffsetDateTime.ofInstant(instant, zoneId) ) implicit val arbitraryLocalDate: Arbitrary[LocalDate] = Arbitrary(arbitrary[LocalDateTime].map(_.toLocalDate)) implicit val arbitraryLocalTime: Arbitrary[LocalTime] = Arbitrary(arbitrary[LocalDateTime].map(_.toLocalTime)) implicit val arbitraryOffsetTime: Arbitrary[OffsetTime] = Arbitrary(arbitrary[OffsetDateTime].map(_.toOffsetTime)) implicit val arbitraryYearMonth: Arbitrary[YearMonth] = Arbitrary(arbitrary[LocalDateTime].map( ldt => YearMonth.of(ldt.getYear, ldt.getMonth))) implicit val arbitraryYear: Arbitrary[Year] = Arbitrary(arbitrary[LocalDateTime].map( ldt => Year.of(ldt.getYear))) implicit val arbitraryDuration: Arbitrary[Duration] = Arbitrary( for { first <- arbitrary[Instant] second <- arbitrary[Instant] } yield Duration.between(first, second) ) implicit val arbitraryMonthDay: Arbitrary[MonthDay] = Arbitrary(arbitrary[LocalDateTime].map( ldt => MonthDay.of(ldt.getMonth, ldt.getDayOfMonth))) implicit val arbitraryMonth: Arbitrary[Month] = Arbitrary(arbitrary[MonthDay].map(_.getMonth)) }
Example 19
Source File: BloomFilterSpec.scala From bloom-filter-scala with MIT License | 5 votes |
package tests.bloomfilter.mutable import bloomfilter.CanGenerateHashFrom import bloomfilter.mutable.BloomFilter import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.Test.Parameters import org.scalacheck.commands.Commands import org.scalacheck.{Arbitrary, Gen, Prop, Properties} class BloomFilterSpec extends Properties("BloomFilter") { property("for Long") = new BloomFilterCommands[Long].property() property("for String") = new BloomFilterCommands[String].property() property("for Array[Byte]") = new BloomFilterCommands[Array[Byte]].property() override def overrideParameters(p: Parameters): Parameters = { super.overrideParameters(p).withMinSuccessfulTests(100) } class BloomFilterCommands[T: Arbitrary](implicit canGenerateHash: CanGenerateHashFrom[T]) extends Commands { type Sut = BloomFilter[T] case class State(expectedItems: Long, addedItems: Long) override def canCreateNewSut( newState: State, initSuts: Traversable[State], runningSuts: Traversable[Sut]): Boolean = { initSuts.isEmpty && runningSuts.isEmpty || newState.addedItems > newState.expectedItems || newState.addedItems > 100 } override def destroySut(sut: Sut): Unit = sut.dispose() override def genInitialState: Gen[State] = Gen.chooseNum[Long](1, Int.MaxValue).map(State(_, 0)) override def newSut(state: State): Sut = BloomFilter[T](state.expectedItems, 0.01) def initialPreCondition(state: State): Boolean = true def genCommand(state: State): Gen[Command] = for { item <- Arbitrary.arbitrary[T] } yield commandSequence(AddItem(item), CheckItem(item)) case class AddItem(item: T) extends UnitCommand { def run(sut: Sut): Unit = sut.synchronized(sut.add(item)) def nextState(state: State) = state.copy(addedItems = state.addedItems + 1) def preCondition(state: State) = true def postCondition(state: State, success: Boolean) = success } case class CheckItem(item: T) extends SuccessCommand { type Result = Boolean def run(sut: Sut): Boolean = sut.synchronized(sut.mightContain(item)) def nextState(state: State) = state def preCondition(state: State) = true def postCondition(state: State, result: Boolean): Prop = result } } private val elemsToAddGen = for { numberOfElemsToAdd <- Gen.chooseNum[Int](1, 1000) elemsToAdd <- Gen.listOfN(numberOfElemsToAdd, arbitrary[Long]) } yield elemsToAdd // TODO fix elemsToAddGen.filter() below, why Gen.listOfN above generates empty lists? property("approximateElementCount") = forAll(elemsToAddGen.filter(x => x.size > 10 && x.toSet.size > 10)) { elemsToAdd: List[Long] => val bf = BloomFilter[Long](elemsToAdd.size * 10, 0.0001) elemsToAdd.foreach(bf.add) val numberOfUnique = elemsToAdd.toSet.size math.abs(bf.approximateElementCount() - numberOfUnique) < numberOfUnique * 0.1 } }
Example 20
Source File: BloomFiltersSpec.scala From bloom-filter-scala with MIT License | 5 votes |
package tests.bloomfilter.mutable import bloomfilter.CanGenerateHashFrom import bloomfilter.mutable.BloomFilter import org.scalacheck.Test.Parameters import org.scalacheck.commands.Commands import org.scalacheck.{Arbitrary, Gen, Prop, Properties} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll class BloomFiltersSpec extends Properties("BloomFilters") { val maxNumElems = 10 def genListOfMaxTenElems[A](implicit aGen: Gen[A]): Gen[List[A]] = Gen.posNum[Int] map (_ % maxNumElems) flatMap (i => Gen.listOfN(i, aGen)) property("union") = forAll(genListOfMaxTenElems(arbitrary[Long]), genListOfMaxTenElems(arbitrary[Long])) { (leftElements: List[Long], rightElements: List[Long]) => val leftBloomFilter = BloomFilter[Long](maxNumElems, 0.01) leftElements foreach leftBloomFilter.add val rightBloomFilter = BloomFilter[Long](maxNumElems, 0.01) rightElements foreach rightBloomFilter.add val unionBloomFilter = leftBloomFilter union rightBloomFilter val result = (leftElements ++ rightElements) forall unionBloomFilter.mightContain leftBloomFilter.dispose() rightBloomFilter.dispose() unionBloomFilter.dispose() result } property("intersect") = forAll(genListOfMaxTenElems(arbitrary[Long]), genListOfMaxTenElems(arbitrary[Long])) { (leftElements: List[Long], rightElements: List[Long]) => val leftBloomFilter = BloomFilter[Long](maxNumElems, 0.01) leftElements foreach leftBloomFilter.add val rightBloomFilter = BloomFilter[Long](maxNumElems, 0.01) rightElements foreach rightBloomFilter.add val unionBloomFilter = leftBloomFilter intersect rightBloomFilter val intersectElems = leftElements.toSet intersect rightElements.toSet val result = intersectElems forall unionBloomFilter.mightContain leftBloomFilter.dispose() rightBloomFilter.dispose() unionBloomFilter.dispose() result } }
Example 21
Source File: ResourceProxyHandlerSpec.scala From cosmos with Apache License 2.0 | 5 votes |
package com.mesosphere.cosmos.handler import com.mesosphere.Generators.Implicits._ import com.mesosphere.cosmos.error.CosmosException import com.mesosphere.cosmos.error.GenericHttpError import io.lemonlabs.uri.Uri import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.FreeSpec import org.scalatest.Matchers import org.scalatest.prop.PropertyChecks final class ResourceProxyHandlerSpec extends FreeSpec with PropertyChecks with Matchers { type TestData = (Long, Uri) "When Content-Length is provided by the upstream server" - { "When Content-Length matches the actual content stream length" - { // scalastyle:off magic.number val genTestData: Gen[TestData] = for { actualLength <- Gen.chooseNum(2, 10) uri <- arbitrary[Uri] } yield { (actualLength.toLong, uri) } "Succeeds if Content-Length is below the limit" in { forAll(genTestData) { case (actualLength, uri) => ResourceProxyHandler.validateContentLength(uri, Some(actualLength)) } } "Fails if Content-Length is zero" in { forAll (genTestData) { case (_, uri) => val exception = intercept[CosmosException](ResourceProxyHandler.validateContentLength(uri, Some(0))) assert(exception.error.isInstanceOf[GenericHttpError]) } } } } "Fails when Content-Length is not provided by the upstream server" in { val exception = intercept[CosmosException](ResourceProxyHandler.validateContentLength(Uri.parse("/random"), None)) assert(exception.error.isInstanceOf[GenericHttpError]) } "Parses the filename correctly" in { ResourceProxyHandler.getFileNameFromUrl( Uri.parse("http://doesntreallymatter.com/c.d") ) shouldEqual Some("c.d") ResourceProxyHandler.getFileNameFromUrl( Uri.parse("http://doesntreallymatter.com/a/b/c.d") ) shouldEqual Some("c.d") ResourceProxyHandler.getFileNameFromUrl( Uri.parse("http://doesntreallymatter.com/a/b/c") ) shouldEqual Some("c") ResourceProxyHandler.getFileNameFromUrl( Uri.parse("http://doesntreallymatter.com/a/b/c/") ) shouldEqual Some("c") // These should never happen, but just in case. ResourceProxyHandler.getFileNameFromUrl( Uri.parse("http://doesntreallymatter.com/") ) shouldEqual None ResourceProxyHandler.getFileNameFromUrl( Uri.parse("https://doesntreallymatter.com") ) shouldEqual None } }
Example 22
Source File: CopTests.scala From scalaz-deriving with GNU Lesser General Public License v3.0 | 5 votes |
// Copyright: 2017 - 2020 Sam Halliday // License: https://opensource.org/licenses/BSD-3-Clause package scalaz package iotatests import scala._, Predef._ import iotaz._ import org.scalacheck._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop._ import org.scalacheck.ScalacheckShapeless._ object CopTests extends Properties("CopTests") { import TList.:: import TList.Op._ case class One(a: String) case class Two(a: Int) case class Three(a: Int) type OneTwoThreeL = One :: Two :: Three :: TNil type ThreeTwoOneL = Three :: Two :: One :: TNil // these just need to compile Cop.InjectL[One, OneTwoThreeL] Cop.InjectL[CopTests.One, OneTwoThreeL] Cop.InjectL[_root_.scalaz.iotatests.CopTests.One, OneTwoThreeL] def checkInjectL[A, L <: TList]( gen: Gen[A], inj: Cop.InjectL[A, L], index: Int ): Prop = forAll(gen)(v => inj.inj(v) ?= Cop.unsafeApply(index, v)) property("inject One into OneTwoThreeL") = checkInjectL(arbitrary[One], Cop.InjectL[One, OneTwoThreeL], 0) property("inject Two into OneTwoThreeL") = checkInjectL(arbitrary[Two], Cop.InjectL[Two, OneTwoThreeL], 1) property("inject Three into OneTwoThreeL") = checkInjectL(arbitrary[Three], Cop.InjectL[Three, OneTwoThreeL], 2) property("inject One into ThreeTwoOneL") = checkInjectL(arbitrary[One], Cop.InjectL[One, ThreeTwoOneL], 2) property("inject Two into ThreeTwoOneL") = checkInjectL(arbitrary[Two], Cop.InjectL[Two, ThreeTwoOneL], 1) property("inject Three into ThreeTwoOneL") = checkInjectL(arbitrary[Three], Cop.InjectL[Three, ThreeTwoOneL], 0) property("inject Three into Reverse[ThreeTwoOneL]") = checkInjectL(arbitrary[Three], Cop.InjectL[Three, Reverse[ThreeTwoOneL]], 2) type First = Int type Last = String type Y type Yuge = First :: // 20 rows of 15 = 300 filler items Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Y :: Last :: TNil property("inject First into Yuge") = checkInjectL(arbitrary[First], Cop.InjectL[First, Yuge], 0) property("inject Last into Yuge") = checkInjectL(arbitrary[Last], Cop.InjectL[Last, Yuge], 301) }
Example 23
Source File: HashcodeTests.scala From scalaz-deriving with GNU Lesser General Public License v3.0 | 5 votes |
// Copyright: 2017 - 2020 Sam Halliday // License: https://opensource.org/licenses/BSD-3-Clause package scalaz package iotatests import scala._, Predef._ import iotaz._ import org.scalacheck._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop._ import org.scalacheck.ScalacheckShapeless._ object HashcodeTests extends Properties("HashcodeTests") { property("keep equals - hashcode contract for CopK") = { import TListK.:: type CC[A] = CopK[List :: TNilK, A] val I = CopK.Inject[List, CC] forAll(arbitrary[Map[List[Int], String]].suchThat(_.size > 0)) { map => val copMap = map.map { case (k, v) => (I(k), v) } val key = copMap.keys.head val keyCopy = I(I.prj(key).get) copMap.get(keyCopy) ?= copMap.get(key) } } }
Example 24
Source File: ConversionsSpec.scala From core with Apache License 2.0 | 5 votes |
package com.smartbackpackerapp.scraper.sql import com.smartbackpackerapp.model._ import com.smartbackpackerapp.scraper.model.VisaRequirementsFor import org.scalacheck.Arbitrary.arbitrary import org.scalacheck._ import org.scalatest.FunSuite import org.scalatest.prop.PropertyChecks class ConversionsSpec extends FunSuite with ConversionsArbitraries with PropertyChecks { forAll { (vr: VisaRequirementsFor) => test(s"convert a $vr into a VisaRequirementsDTO") { val dto = (vr.from.value, vr.to.value, vr.visaCategory.toString, vr.description) assert(vr.toVisaRequirementsDTO == dto) } } forAll { (v: Vaccine) => test(s"convert a $v into a VaccineDTO # ${v.hashCode()}") { val dto = (v.disease.value, v.description, v.diseaseCategories.map(_.toString).mkString(",")) assert(v.toVaccineDTO == dto) } } } trait ConversionsArbitraries { implicit val visaCategory: Arbitrary[VisaCategory] = Arbitrary[VisaCategory] { val list = List( VisaNotRequired, VisaWaiverProgram, AdmissionRefused, TravelBanned, VisaRequired, VisaDeFactoRequired, ElectronicVisa, ElectronicVisitor, ElectronicTravelAuthority, FreeVisaOnArrival, VisaOnArrival, ElectronicVisaPlusVisaOnArrival, OnlineReciprocityFee, MainlandTravelPermit, HomeReturnPermitOnly, UnknownVisaCategory ) Gen.oneOf(list) } implicit val visaRequirementsFor: Arbitrary[VisaRequirementsFor] = Arbitrary[VisaRequirementsFor] { for { f <- Gen.alphaUpperStr t <- Gen.alphaUpperStr c <- arbitrary[VisaCategory] d <- Gen.alphaStr } yield VisaRequirementsFor(CountryCode(f), CountryCode(t), c, d) } implicit val diseaseCategory: Arbitrary[DiseaseCategory] = Arbitrary[DiseaseCategory] { val list = List( AvoidNonSterileEquipment, TakeAntimalarialMeds, GetVaccinated, AvoidSharingBodyFluids, ReduceExposureToGerms, EatAndDrinkSafely, PreventBugBites, KeepAwayFromAnimals, UnknownDiseaseCategory ) Gen.oneOf(list) } implicit val vaccine: Arbitrary[Vaccine] = Arbitrary[Vaccine] { for { d <- Gen.alphaStr x <- Gen.alphaStr c <- Gen.listOf(arbitrary[DiseaseCategory]) } yield Vaccine(Disease(d), x, c) } }
Example 25
Source File: types.scala From laserdisc with MIT License | 5 votes |
package laserdisc import eu.timepit.refined.types.string.NonEmptyString import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary final case class Foo(x: Int) object Foo { implicit final val fooRead: Bulk ==> Foo = Read.instance { case Bulk(ToInt(i)) => Right(Foo(i)) case Bulk(other) => Left(RESPDecErr(s"Boom: $other")) } } final case class Bar(x: String) final case class Baz(f1: Int, f2: String) object Baz { implicit final val bazArb: Arbitrary[Baz] = Arbitrary { for { i <- arbitrary[Int] s <- arbitrary[String] } yield Baz(i, s) } } private[laserdisc] sealed trait ProtocolEncoded extends Product with Serializable { def encoded: String } private[laserdisc] final case class ArrEncoded(v: List[ProtocolEncoded]) extends ProtocolEncoded { def encoded: String = s"*${v.size}$CRLF${v.map(_.encoded).mkString}" } private[laserdisc] final case class EmptyArrEncoded() extends ProtocolEncoded { def encoded: String = s"*0$CRLF" } private[laserdisc] final case class NullArrEncoded() extends ProtocolEncoded { def encoded: String = s"*-1$CRLF" } private[laserdisc] final case class EmptyBulkEncoded() extends ProtocolEncoded { def encoded: String = s"$$0$CRLF$CRLF" } private[laserdisc] final case class BulkEncoded(v: NonEmptyString) extends ProtocolEncoded { def encoded: String = s"$$${v.value.getBytes.length}$CRLF${v.value}$CRLF" } private[laserdisc] final case class NullBulkEncoded() extends ProtocolEncoded { def encoded: String = s"$$-1$CRLF" } private[laserdisc] final case class NumEncoded(v: Long) extends ProtocolEncoded { def encoded: String = s":$v$CRLF" } private[laserdisc] final case class StrEncoded(v: String) extends ProtocolEncoded { def encoded: String = s"+$v$CRLF" } private[laserdisc] final case class ErrEncoded(v: NonEmptyString) extends ProtocolEncoded { def encoded: String = s"-${v.value}$CRLF" }
Example 26
Source File: NewtsSuite.scala From newts with Apache License 2.0 | 5 votes |
package newts import cats.instances.AllInstances import newts.syntax.AllSyntax import org.scalacheck.{Arbitrary, Cogen} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FunSuite, Matchers} import org.typelevel.discipline.scalatest.Discipline trait NewtsSuite extends FunSuite with Matchers with GeneratorDrivenPropertyChecks with Discipline with AllSyntax with AllInstances with cats.syntax.AllSyntax with ArbitraryInstances trait ArbitraryInstances { def arbNewtype[S, A: Arbitrary](implicit newtype: Newtype.Aux[S, A]): Arbitrary[S] = Arbitrary(arbitrary[A].map(newtype.wrap)) def cogenNewtype[S, A: Cogen](implicit newtype: Newtype.Aux[S, A]): Cogen[S] = Cogen[A].contramap(newtype.unwrap) implicit val allArbitrary: Arbitrary[All] = arbNewtype[All, Boolean] implicit val anyArbitrary: Arbitrary[Any] = arbNewtype[Any, Boolean] implicit def multArbitrary[A:Arbitrary]: Arbitrary[Mult[A]] = arbNewtype[Mult[A], A] implicit def dualArbitrary[A: Arbitrary]: Arbitrary[Dual[A]] = arbNewtype[Dual[A], A] implicit def firstArbitrary[A: Arbitrary]: Arbitrary[First[A]] = arbNewtype[First[A], A] implicit def lastArbitrary[A: Arbitrary]: Arbitrary[Last[A]] = arbNewtype[Last[A], A] implicit def firstOptionArbitrary[A: Arbitrary]: Arbitrary[FirstOption[A]] = arbNewtype[FirstOption[A], Option[A]] implicit def lastOptionArbitrary[A: Arbitrary]: Arbitrary[LastOption[A]] = arbNewtype[LastOption[A], Option[A]] implicit def minArbitrary[A: Arbitrary]: Arbitrary[Min[A]] = arbNewtype[Min[A], A] implicit def maxArbitrary[A: Arbitrary]: Arbitrary[Max[A]] = arbNewtype[Max[A], A] implicit def zipListArbitrary[A: Arbitrary]: Arbitrary[ZipList[A]] = arbNewtype[ZipList[A], List[A]] implicit def backwardsArbitrary[F[_], A](implicit ev: Arbitrary[F[A]]): Arbitrary[Backwards[F, A]] = arbNewtype[Backwards[F, A], F[A]] implicit def reverseArbitrary[F[_], A](implicit ev: Arbitrary[F[A]]): Arbitrary[Reverse[F, A]] = arbNewtype[Reverse[F, A], F[A]] implicit val allCogen: Cogen[All] = cogenNewtype[All, Boolean] implicit val anyCogen: Cogen[Any] = cogenNewtype[Any, Boolean] implicit def multCogen[A: Cogen]: Cogen[Mult[A]] = cogenNewtype[Mult[A], A] implicit def dualCogen[A: Cogen]: Cogen[Dual[A]] = cogenNewtype[Dual[A], A] implicit def firstCogen[A: Cogen]: Cogen[First[A]] = cogenNewtype[First[A], A] implicit def lastCogen[A: Cogen]: Cogen[Last[A]] = cogenNewtype[Last[A], A] implicit def firstOptionCogen[A: Cogen]: Cogen[FirstOption[A]] = cogenNewtype[FirstOption[A], Option[A]] implicit def lastOptionCogen[A: Cogen] : Cogen[LastOption[A]] = cogenNewtype[LastOption[A], Option[A]] implicit def minOptionCogen[A: Cogen] : Cogen[Min[A]] = cogenNewtype[Min[A], A] implicit def maxOptionCogen[A: Cogen] : Cogen[Max[A]] = cogenNewtype[Max[A], A] implicit def zipListCogen[A: Cogen]: Cogen[ZipList[A]] = cogenNewtype[ZipList[A], List[A]] implicit def backwardsCogen[F[_], A](implicit ev: Cogen[F[A]]): Cogen[Backwards[F, A]] = cogenNewtype[Backwards[F, A], F[A]] implicit def reverseCogen[F[_], A](implicit ev: Cogen[F[A]]): Cogen[Reverse[F, A]] = cogenNewtype[Reverse[F, A], F[A]] }
Example 27
Source File: ColumnMetadataTest.scala From spark-vector with Apache License 2.0 | 5 votes |
package com.actian.spark_vector.vector import java.util.regex.Pattern import org.apache.spark.sql.types.DecimalType import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalacheck.Gen.{choose, identifier} import org.scalacheck.Prop.{forAll, propBoolean} import org.scalatest.{FunSuite, Matchers} import com.actian.spark_vector.test.tags.RandomizedTest class ColumnMetadataTest extends FunSuite with Matchers { // Generate random column metadata and ensure the resultant StructField's are valid test("generated", RandomizedTest) { forAll(columnMetadataGen)(colMD => { assertColumnMetadata(colMD) }).check } val milliSecsPattern = Pattern.compile(".*\\.(S*)") def assertColumnMetadata(columnMD: ColumnMetadata): Boolean = { val structField = columnMD.structField structField.dataType match { // For decimal type, ensure the scale and precision match case decType: DecimalType => decType.precision should be(columnMD.precision) decType.scale should be(columnMD.scale) case _ => } true } val columnMetadataGen: Gen[ColumnMetadata] = for { name <- identifier typeName <- VectorTypeGen.vectorJdbcTypeGen nullable <- arbitrary[Boolean] precision <- choose(0, 20) scale <- choose(0, Math.min(20, precision)) } yield ColumnMetadata(name, typeName, nullable, precision, scale) }
Example 28
Source File: ArbitraryStrategy.scala From lithium with Apache License 2.0 | 5 votes |
package com.swissborg.lithium import akka.cluster.swissborg.EitherValues import cats.effect.Sync import cats.{Applicative, ApplicativeError, Functor, Semigroupal} import com.swissborg.lithium.instances.ArbitraryTestInstances._ import com.swissborg.lithium.strategy._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen.chooseNum import org.scalacheck.{Arbitrary, Gen} trait ArbitraryStrategy[F] { def fromScenario(scenario: Scenario): Arbitrary[F] } object ArbitraryStrategy extends EitherValues { implicit def keepRefereeArbitraryStrategy[F[_]: Applicative]: ArbitraryStrategy[KeepReferee[F]] = new ArbitraryStrategy[KeepReferee[F]] { override def fromScenario(scenario: Scenario): Arbitrary[KeepReferee[F]] = Arbitrary { val maybeNodes = scenario.worldViews.headOption.map(_.nodes) for { referee <- Gen.oneOf(maybeNodes.fold(Arbitrary.arbitrary[Node]) { nodes => chooseNum(0, nodes.length - 1).map(nodes.toNonEmptyList.toList.apply) }, Arbitrary.arbitrary[Node]) downIfLessThan <- chooseNum(1, maybeNodes.fold(1)(_.length)) } yield new strategy.KeepReferee[F]( KeepRefereeConfig(referee.member.address.toString, downIfLessThan) ) } } implicit def staticQuorumArbitraryStrategy[F[_]: Sync]: ArbitraryStrategy[StaticQuorum[F]] = new ArbitraryStrategy[StaticQuorum[F]] { override def fromScenario(scenario: Scenario): Arbitrary[StaticQuorum[F]] = Arbitrary { val clusterSize = scenario.clusterSize val minQuorumSize = clusterSize / 2 + 1 for { quorumSize <- chooseNum(minQuorumSize, clusterSize) role <- arbitrary[String] } yield new strategy.StaticQuorum(StaticQuorumConfig(role, quorumSize)) } } implicit def keepMajorityArbitraryStrategy[F[_]: ApplicativeError[*[_], Throwable]] : ArbitraryStrategy[KeepMajority[F]] = new ArbitraryStrategy[KeepMajority[F]] { override def fromScenario(scenario: Scenario): Arbitrary[KeepMajority[F]] = Arbitrary { for { role <- arbitrary[String] weaklUpMembersAllowed <- arbitrary[Boolean] } yield new strategy.KeepMajority(KeepMajorityConfig(role), weaklUpMembersAllowed) } } implicit def keepOldestArbitraryStrategy[F[_]: ApplicativeError[*[_], Throwable]]: ArbitraryStrategy[KeepOldest[F]] = new ArbitraryStrategy[KeepOldest[F]] { override def fromScenario(scenario: Scenario): Arbitrary[KeepOldest[F]] = Arbitrary { for { downIfAlone <- arbitrary[Boolean] role <- arbitrary[String] } yield new strategy.KeepOldest(KeepOldestConfig(downIfAlone, role)) } } implicit def downAllArbitraryStrategy[F[_]: Applicative]: ArbitraryStrategy[DownAll[F]] = new ArbitraryStrategy[DownAll[F]] { override def fromScenario(scenario: Scenario): Arbitrary[DownAll[F]] = Arbitrary(Gen.const(new strategy.DownAll[F]())) } implicit def downIndirectlyConnectedArbitraryStrategy[F[_]: Applicative]: ArbitraryStrategy[IndirectlyConnected[F]] = new ArbitraryStrategy[IndirectlyConnected[F]] { override def fromScenario(scenario: Scenario): Arbitrary[IndirectlyConnected[F]] = Arbitrary(Gen.const(new strategy.IndirectlyConnected[F]())) } implicit def unionArbitraryStrategy[F[_]: Functor: Semigroupal, Strat1[_[_]], Strat2[_[_]]]( implicit ev1: Strat1[F] <:< Strategy[F], ev2: Strat2[F] <:< Strategy[F], arbStrat1: ArbitraryStrategy[Strat1[F]], arbStrat2: ArbitraryStrategy[Strat2[F]] ): ArbitraryStrategy[Union[F, Strat1, Strat2]] = new ArbitraryStrategy[Union[F, Strat1, Strat2]] { override def fromScenario(scenario: Scenario): Arbitrary[Union[F, Strat1, Strat2]] = Arbitrary { for { strat1 <- arbStrat1.fromScenario(scenario).arbitrary strat2 <- arbStrat2.fromScenario(scenario).arbitrary } yield new Union[F, Strat1, Strat2](strat1, strat2) } } }
Example 29
Source File: Simulation.scala From lithium with Apache License 2.0 | 5 votes |
package com.swissborg.lithium import cats.implicits._ import cats._ import com.swissborg.lithium.strategy._ import com.swissborg.lithium.utils._ import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary val splitBrainResolved: F[Boolean] = { scenario.worldViews .foldMap { worldView => strategy.takeDecision(worldView).map(PostResolution.fromDecision(worldView)) } .map(_.isResolved) } override def toString: String = s"Simulation($strategy, $scenario)" } object Simulation { implicit def arbSimulation[F[_]: Functor, Strat[_[_]], S <: Scenario: Arbitrary]( implicit strategy: ArbitraryStrategy[Strat[F]], M: Monoid[F[PostResolution]], ev: Strat[F] <:< Strategy[F] ): Arbitrary[Simulation[F, Strat, S]] = Arbitrary { for { scenario <- arbitrary[S] strategy <- strategy.fromScenario(scenario).arbitrary } yield new Simulation[F, Strat, S](strategy, scenario) } }
Example 30
Source File: TransformBstProperties.scala From functional-way with GNU General Public License v3.0 | 5 votes |
import org.scalacheck.{Gen, Properties} import org.scalacheck.Gen._ import org.scalacheck.Prop.forAll import org.scalacheck.Arbitrary.arbitrary import tree.transformBst import tree.MaybeNode class TransformBstProperties extends Properties("TransformBST") { import tree.Node private val leafNodeGen : Gen[Node] = arbitrary[Int].map(v => Node(left = null, right = null, value = v)) private val nodeGen = for { v <- arbitrary[Int] left <- genTree right <- genTree } yield Node(value = v, left = left, right = right) private val genTree : Gen[Node] = oneOf(nodeGen, leafNodeGen) private def isZeroPresent(node : MaybeNode) : Boolean = node match { case n: Node => if(n.value == 0) true else { isZeroPresent(n.left) || isZeroPresent(n.right) } case null => false } //Not a complete test here. But a good one to begin with property("transformBst") = forAll(genTree) { (root : Node) => val transformedTreeRoot = transformBst(root) isZeroPresent(transformedTreeRoot) } }
Example 31
Source File: LfVersionsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Gen} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} import scalaz.NonEmptyList import scalaz.scalacheck.ScalazArbitrary._ class LfVersionsSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { case class DummyVersion(value: Int) { def protoValue: String = value.toString } class DummyVersions(versions: NonEmptyList[DummyVersion]) extends LfVersions[DummyVersion](versions)(_.protoValue) case class DummyError(msg: String) private def dummyVersionGen: Gen[DummyVersion] = arbitrary[Int].map(DummyVersion) implicit private val dummyVersionArb: Arbitrary[DummyVersion] = Arbitrary(dummyVersionGen) "LfVersions.acceptedVersions" should { "be otherVersions + defaultVersion" in forAll { vs: NonEmptyList[DummyVersion] => val versions = new DummyVersions(vs) versions.acceptedVersions should ===(vs.list.toList) vs.list.toList.forall(v => versions.acceptedVersions.contains(v)) shouldBe true } } "LfVersions.decodeVersion" should { "return failure if passed version value is null, don't throw exception" in { val versions = new DummyVersions(NonEmptyList(DummyVersion(1))) versions.isAcceptedVersion(null) shouldBe None } "return failure if passed version value is an empty string, don't throw exception" in { val versions = new DummyVersions(NonEmptyList(DummyVersion(1))) versions.isAcceptedVersion("") shouldBe None } "return failure if passed version is not default and not supported" in forAll { (vs: NonEmptyList[DummyVersion], version: DummyVersion) => whenever(!vs.list.toList.contains(version)) { val versions = new DummyVersions(vs) versions.acceptedVersions.contains(version) shouldBe false versions.isAcceptedVersion(version.protoValue) shouldBe None } } "return success if passed version is default" in forAll { default: DummyVersion => val versions = new DummyVersions(NonEmptyList(default)) versions.isAcceptedVersion(default.protoValue) shouldBe Some(default) } "return success if passed version is one of other versions" in forAll { (vs: NonEmptyList[DummyVersion], version: DummyVersion) => val versions = new DummyVersions(version <:: vs) versions.acceptedVersions.contains(version) shouldBe true versions.isAcceptedVersion(version.protoValue) shouldBe Some(version) } } }
Example 32
Source File: NameClashRecordVariantUT.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import com.daml.sample.MyMain.NameClashRecordVariant import NameClashRecordVariant.{NameClashRecordVariantA, NameClashRecordVariantB} import com.daml.ledger.client.binding.{Primitive => P, Value} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} class NameClashRecordVariantUT extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { "generated variants have compatible read and write methods" in forAll(nameClashRecordVariantGen) { a1 => val b = Value.encode(a1) val a2 = Value.decode[NameClashRecordVariant](b) Some(a1) shouldBe a2 } def nameClashRecordVariantGen: Gen[NameClashRecordVariant] = Gen.oneOf(nameClashRecordVariantAGen, nameClashRecordVariantBGen) def nameClashRecordVariantAGen: Gen[NameClashRecordVariantA] = for { x <- arbitrary[P.Int64] y <- arbitrary[P.Int64] z <- arbitrary[P.Int64] } yield NameClashRecordVariantA(x, y, z) def nameClashRecordVariantBGen: Gen[NameClashRecordVariantB] = for { x <- arbitrary[P.Int64] y <- arbitrary[P.Int64] z <- arbitrary[P.Int64] } yield NameClashRecordVariantB(x, y, z) }
Example 33
Source File: DamlDecimalGen.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Gen} import com.daml.ledger.client.binding.{Primitive => P} // DAML Decimal is DECIMAL(38, 10) object DamlDecimalGen { private val scale = 10 val MaxDamlDecimal = BigDecimal("9" * 28 + "." + "9" * scale).setScale(scale) val MinDamlDecimal = -MaxDamlDecimal // this gives us: [-21474836480000000000.0000000000; 21474836470000000000.0000000000] // [BigDecimal(scala.Int.MinValue, -10).setScale(10), BigDecimal(scala.Int.MaxValue, -10).setScale(10)] private val genDamlDecimal: Gen[BigDecimal] = for { n <- arbitrary[Int] s <- Gen.choose(-scale, scale) } yield BigDecimal(n.toLong, s).setScale(scale) private val genSpecificDamlDecimal: Gen[BigDecimal] = Gen.oneOf( MaxDamlDecimal, MinDamlDecimal, BigDecimal(0).setScale(scale), BigDecimal(1).setScale(scale), BigDecimal(-1).setScale(scale)) lazy val arbDamlDecimal: Arbitrary[P.Numeric] = Arbitrary( Gen.frequency((10, genDamlDecimal), (5, genSpecificDamlDecimal))) }
Example 34
Source File: CustomMatcherSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.scalatest import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.WordSpec import org.scalatest.prop.GeneratorDrivenPropertyChecks import scalaz.{Equal, Show} class CustomMatcherSpec extends WordSpec with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 10000) "make sure it works comparing ints" in { import com.daml.scalatest.CustomMatcher._ import scalaz.std.anyVal._ CustomMatcherOps(10) should_=== 10 CustomMatcherOps(10) should_=/= 11 10 should_=== 10 10 should_=/= 11 } case class Dummy(a: String, b: Int, c: BigDecimal) lazy val genDummy: Gen[Dummy] = for { a <- arbitrary[String] b <- arbitrary[Int] c <- arbitrary[BigDecimal] } yield Dummy(a, b, c) lazy val genPairOfNonEqualDummies: Gen[(Dummy, Dummy)] = { def genSetOf2: Gen[Set[Dummy]] = Gen.buildableOfN[Set[Dummy], Dummy](2, genDummy).filter(_.size == 2) genSetOf2.map(_.toSeq).map { case Seq(a, b) => (a, b) case a @ _ => sys.error(s"Should never happen: $a") } } implicit val dummyEqual: Equal[Dummy] = Equal.equalA implicit val dummyShow: Show[Dummy] = Show.showA "make sure it works comparing case classes with custom Show and Equal" in forAll( genPairOfNonEqualDummies) { case (a, b) => import com.daml.scalatest.CustomMatcher._ a should_=== a a should_=== a.copy() a should_=/= b } }
Example 35
Source File: ECDSASignatureSpec.scala From mantis with Apache License 2.0 | 5 votes |
package io.iohk.ethereum.crypto import akka.util.ByteString import io.iohk.ethereum.nodebuilder.SecureRandomBuilder import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary import org.scalatest.prop.PropertyChecks import org.scalatest.{FlatSpec, Matchers} import org.spongycastle.crypto.params.ECPublicKeyParameters import org.spongycastle.util.encoders.Hex class ECDSASignatureSpec extends FlatSpec with Matchers with PropertyChecks with SecureRandomBuilder { "ECDSASignature" should "recover public key correctly for go ethereum transaction" in { val bytesToSign = Hex.decode("5a1465f4683bf2c18fc72c0789239c0f52b3ceac666ca9551cf265a11abe912c") val signatureRandom = ByteString(Hex.decode("f3af65a23fbf207b933d3c962381aa50e0ac19649c59c1af1655e592a8d95401")) val signature = ByteString(Hex.decode("53629a403579f5ce57bcbefba2616b1c6156d308ddcd37372c94943fdabeda97")) val pointSign = 28 val sig = ECDSASignature(BigInt(1, signatureRandom.toArray[Byte]), BigInt(1, signature.toArray[Byte]), pointSign.toByte) sig.publicKey(bytesToSign).isEmpty shouldBe false } it should "fail on case from transaction 74c45d0cf2332cc021bebdfee6b1c1da0b58e8f4154537adb79b025f722920a4" in { val bytesToSign = Hex.decode("2bb3925f178aa22c11435c61899e134fb7b1227016274b5f7b9d85c4469130ba") val signatureRandom = ByteString(Hex.decode("fbe3df0cf030655d817a89936850d1cc00c07c35d3b21be73cfe9a730ea8b753")) val signature = ByteString(Hex.decode("62d73b6a92ac23ff514315fad795bbac6d485481d356329d71467e93c87dfa42")) val pointSign = 0x1f val sig = ECDSASignature(BigInt(1, signatureRandom.toArray[Byte]), BigInt(1, signature.toArray[Byte]), pointSign.toByte) sig.publicKey(bytesToSign).isEmpty shouldBe true } it should "sign message and recover public key" in { forAll(arbitrary[Array[Byte]], Arbitrary.arbitrary[Unit].map(_ => generateKeyPair(secureRandom))) { (message, keys) => val pubKey = keys.getPublic.asInstanceOf[ECPublicKeyParameters].getQ val msg = kec256(message) val signature = ECDSASignature.sign(msg, keys) val recPubKey = signature.publicKey(msg) val result = recPubKey.map(a => ECDSASignature.uncompressedIndicator +: a).map(curve.getCurve.decodePoint).map(_.getEncoded(true)).map(ByteString(_)) val expected = Some(pubKey.getEncoded(true)).map(ByteString(_)) result shouldBe expected } } }
Example 36
Source File: PortTest.scala From jvm-toxcore-api with GNU General Public License v3.0 | 5 votes |
package im.tox.core.network import im.tox.core.typesafe.Equals._ import im.tox.core.ModuleCompanionTest import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary object PortTest { implicit val arbPort: Arbitrary[Port] = Arbitrary(arbitrary[Char].filter(_ =/= 0).map(x => new Port(x))) } @SuppressWarnings(Array("org.wartremover.warts.Equals")) final class PortTest extends ModuleCompanionTest(Port) { test("creation") { for (portNumber <- Port.MinValue to Port.MaxValue) { Port.fromInt(portNumber) match { case None => fail(s"out of range: $portNumber") case Some(port) => assert(port.value == portNumber) } } } test("creation from invalid values") { for (portNumber <- Seq(-1, Int.MinValue, Int.MaxValue, 0x10000)) { assert(Port.fromInt(portNumber).isEmpty) } } }
Example 37
Source File: EnumModuleCompanionTest.scala From jvm-toxcore-api with GNU General Public License v3.0 | 5 votes |
package im.tox.core.typesafe import im.tox.core.ModuleCompanionTest import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen @SuppressWarnings(Array("org.wartremover.warts.Equals")) abstract class EnumModuleCompanionTest[T, S <: Security](module: EnumModuleCompanion[T, S]) extends ModuleCompanionTest(module) { test("non-empty enum values") { assert(module.values.nonEmpty) } test("distinct ordinals") { val ordinals = module.values.toSeq.map(module.ordinal) assert(ordinals == ordinals.distinct) } }
Example 38
Source File: GenericTree.scala From bonsai with MIT License | 5 votes |
package com.stripe.bonsai import org.scalacheck._ import org.scalacheck.Arbitrary.arbitrary case class GenericTree[A](label: A, children: List[GenericTree[A]]) object GenericTree { def node[A](label: A, children: GenericTree[A]*): GenericTree[A] = GenericTree(label, children.toList) def leaf[A](label: A): GenericTree[A] = GenericTree(label, Nil) implicit def TreeOps[A]: TreeOps[GenericTree[A], A] = new TreeOps[GenericTree[A], A] { type Node = GenericTree[A] def root(t: GenericTree[A]): Option[Node] = Some(t) def children(node: Node): List[Node] = node.children def label(node: Node): A = node.label } def fromTree[A](tree: Tree[A]): Option[GenericTree[A]] = { def mkTree(node: Tree.NodeRef[A]): GenericTree[A] = GenericTree(node.label, node.children.map(mkTree)(collection.breakOut)) tree.root.map(mkTree) } implicit def arbitraryTree[A: Arbitrary]: Arbitrary[GenericTree[A]] = Arbitrary(genTree(arbitrary[A], 4, 7)) def genTree[A](genLabel: Gen[A], maxChildren: Int, maxDepth: Int): Gen[GenericTree[A]] = if (maxDepth == 1) genLabel.map(GenericTree.leaf) else { val k = maxDepth - 1 for { label <- genLabel numChildren <- Gen.choose(0, maxChildren) children <- Gen.listOfN(numChildren, genTree(genLabel, maxChildren, maxDepth - 1)) } yield GenericTree(label, children) } } case class GenericBinTree[A](label: A, children: Option[(GenericBinTree[A], GenericBinTree[A])]) object GenericBinTree { def branch[A](label: A, left: GenericBinTree[A], right: GenericBinTree[A]): GenericBinTree[A] = GenericBinTree(label, Some((left, right))) def leaf[A](label: A): GenericBinTree[A] = GenericBinTree(label, None) implicit def GenericBinTreeOps[A]: FullBinaryTreeOps[GenericBinTree[A], A, A] = new FullBinaryTreeOps[GenericBinTree[A], A, A] { type Node = GenericBinTree[A] def root(t: GenericBinTree[A]): Option[Node] = Some(t) def foldNode[X](node: Node)(f: (A, Node, Node) => X, g: A => X): X = node.children match { case Some((lc, rc)) => f(node.label, lc, rc) case None => g(node.label) } } def fromTree[A](tree: FullBinaryTree[A, A]): Option[GenericBinTree[A]] = { def construct(n: tree.NodeRef): GenericBinTree[A] = n.fold({ (a, lc, rc) => GenericBinTree.branch(a, construct(lc), construct(rc)) }, GenericBinTree.leaf) tree.root.map(construct) } implicit def arbitraryGenericBinTree[A: Arbitrary]: Arbitrary[GenericBinTree[A]] = Arbitrary(genGenericBinTree(arbitrary[A], 9)) def genGenericBinTreeLeaf[A](genLabel: Gen[A]): Gen[GenericBinTree[A]] = genLabel.map(GenericBinTree.leaf) def genGenericBinTreeBranch[A](genLabel: Gen[A], maxDepth: Int): Gen[GenericBinTree[A]] = if (maxDepth == 1) genGenericBinTreeLeaf(genLabel) else for { label <- genLabel left <- genGenericBinTree(genLabel, maxDepth - 1) right <- genGenericBinTree(genLabel, maxDepth - 1) } yield GenericBinTree.branch(label, left, right) def genGenericBinTree[A](genLabel: Gen[A], maxDepth: Int): Gen[GenericBinTree[A]] = Gen.frequency( 7 -> genGenericBinTreeBranch(genLabel, maxDepth), 2 -> genGenericBinTreeLeaf(genLabel)) }
Example 39
Source File: LocalDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{Duration, LocalDateTime, ZoneOffset} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import com.fortysevendeg.scalacheck.datetime.jdk8.ArbitraryJdk8.genZonedDateTime import dtc.instances.localDateTime._ import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen} import dtc.instances.providers.realLocalDateTimeProvider class JVMLocalDateTimeTests extends DTCSuiteJVM { implicit val arbT: Arbitrary[LocalDateTime] = Arbitrary(genZonedDateTime.map(_.toLocalDateTime)) implicit val cogenT: Cogen[LocalDateTime] = Cogen(_.toEpochSecond(ZoneOffset.UTC)) val overflowSafePairGen = for { dt <- arbitrary[LocalDateTime] dur <- arbitrary[Duration] } yield (dt, dur) val ldtTests = LocalDateTimeTests[LocalDateTime](overflowSafePairGen, genYear) checkAll("java.time.LocalDateTime", DateTimeTests[LocalDateTime](overflowSafePairGen).dateTime) checkAll("java.time.LocalDateTime", ldtTests.localDateTime) checkAll("java.time.LocalDateTime", ldtTests.monthUntilFractionHandling) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].order) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].partialOrder) checkAll("java.time.LocalDateTime", OrderTests[LocalDateTime].eqv) checkAll("java.time.LocalDateTime", ProviderTests[LocalDateTime](genTimeZone).provider) }
Example 40
Source File: JVMZonedDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.temporal.ChronoUnit import java.time.{Duration, ZonedDateTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import com.fortysevendeg.scalacheck.datetime.jdk8.ArbitraryJdk8 import dtc.{Offset, Zoned} import dtc.laws.{DateTimeTests, ProviderTests, ZonedDateTimeTestData, ZonedDateTimeTests} import dtc.syntax.timeZone._ import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen, Gen} import dtc.instances.providers.realZonedDateTimeProvider abstract class JVMZonedDateTimeTests(instance: Zoned[ZonedDateTime]) extends DTCSuiteJVM { implicit val zonedInstance: Zoned[ZonedDateTime] = instance implicit val arbT: Arbitrary[ZonedDateTime] = ArbitraryJdk8.arbZonedDateTimeJdk8 implicit val cogenT: Cogen[ZonedDateTime] = Cogen(_.toEpochSecond) val overflowSafePairGen: Gen[(ZonedDateTime, Duration)] = for { dt <- arbitrary[ZonedDateTime] dur <- arbitrary[Duration] } yield (dt, dur) def genDateFromPeriod(period: SameZoneOffsetPeriod): Gen[ZonedDateTime] = genDateTimeFromSameOffsetPeriod(period).map(tpl => ZonedDateTime.of(tpl._1, tpl._2, tpl._3.zoneId)) val overflowSafePairGenWithinSameOffset: Gen[(ZonedDateTime, Duration)] = for { period <- arbitrary[SameZoneOffsetPeriod] dateTime <- genDateFromPeriod(period) duration <- genDateFromPeriod(period) .map(other => dateTime.until(other, ChronoUnit.NANOS)) .map(Duration.ofNanos) } yield (dateTime, duration) val genZonedTestDataSuite: Gen[ZonedDateTimeTestData[ZonedDateTime]] = overflowSafePairGen.map { case (date, duration) => val target = date.plus(duration) ZonedDateTimeTestData(date, duration, Offset(date.plus(duration).getOffset.getTotalSeconds), target.toLocalTime, target.toLocalDate) } checkAll("java.time.ZonedDateTime", DateTimeTests[ZonedDateTime](overflowSafePairGen).dateTime) checkAll("java.time.ZonedDateTime", ZonedDateTimeTests[ZonedDateTime]( overflowSafePairGenWithinSameOffset, genZonedTestDataSuite, genYear, genTimeZone ).zonedDateTime) checkAll("java.time.ZonedDateTime", OrderTests[ZonedDateTime].order) checkAll("java.time.ZonedDateTime", OrderTests[ZonedDateTime].partialOrder) checkAll("java.time.ZonedDateTime", OrderTests[ZonedDateTime].eqv) checkAll("java.time.ZonedDateTime", ProviderTests[ZonedDateTime](genTimeZone).provider) } class ZonedDateTimeWithStrictEqualityTests extends JVMZonedDateTimeTests(dtc.instances.zonedDateTime.zonedDateTimeWithStrictEquality) class ZonedDateTimeWithCrossZoneEqualityTests extends JVMZonedDateTimeTests(dtc.instances.zonedDateTime.zonedDateTimeWithCrossZoneEquality)
Example 41
Source File: MomentLocalDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.instances.moment._ import dtc.js.MomentLocalDateTime import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary.arbitrary import dtc.instances.moment.providers.realMomentLocalDateTimeProvider class MomentLocalDateTimeTests extends DTCSuiteJS { implicit val arbT: Arbitrary[MomentLocalDateTime] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] } yield MomentLocalDateTime.of(date, time)) implicit val cogenT = cogenMomentDateTime[MomentLocalDateTime] val pairGen = overflowSafePairGen.map(t => (MomentLocalDateTime.of(t._1, t._2), t._3)) val ldtTests = LocalDateTimeTests[MomentLocalDateTime]( pairGen, genJSValidYear ) checkAll("MomentLocalDateTimeTests", DateTimeTests[MomentLocalDateTime](pairGen).dateTime) checkAll("MomentLocalDateTimeTests", ldtTests.localDateTime) // see: https://github.com/moment/moment/issues/3029 // checkAll("MomentLocalDateTimeTests", ldtTests.localDateTime) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].order) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].partialOrder) checkAll("MomentLocalDateTimeTests", OrderTests[MomentLocalDateTime].eqv) checkAll("MomentLocalDateTimeTests", ProviderTests[MomentLocalDateTime](genTimeZone).provider) }
Example 42
Source File: MomentZonedDateTimeTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{Duration, LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.{TimeZoneId, Zoned} import dtc.js.MomentZonedDateTime import dtc.laws.{DateTimeTests, ProviderTests, ZonedDateTimeTestData, ZonedDateTimeTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen, Gen} import dtc.instances.moment.providers.realMomentZonedDateTimeProvider abstract class MomentZonedDateTimeTests(instance: Zoned[MomentZonedDateTime]) extends DTCSuiteJS { implicit val zonedInstance: Zoned[MomentZonedDateTime] = instance implicit val arbT: Arbitrary[MomentZonedDateTime] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] zone <- arbitrary[TimeZoneId] } yield MomentZonedDateTime.of(date, time, zone)) implicit val cogenT: Cogen[MomentZonedDateTime] = cogenMomentDateTime[MomentZonedDateTime] val pairGen: Gen[(MomentZonedDateTime, Duration)] = for { zone <- arbitrary[TimeZoneId] pair <- overflowSafePairGen } yield (MomentZonedDateTime.of(pair._1, pair._2, zone), pair._3) def genDateFromPeriod(period: SameZoneOffsetPeriod): Gen[MomentZonedDateTime] = genDateTimeFromSameOffsetPeriod(period).map(tpl => MomentZonedDateTime.of(tpl._1, tpl._2, tpl._3)) val overflowSafePairGenWithinSameOffset: Gen[(MomentZonedDateTime, Duration)] = for { period <- arbitrary[SameZoneOffsetPeriod] dateTime <- genDateFromPeriod(period) duration <- genDateFromPeriod(period) .map(other => dateTime.millisecondsUntil(other)) .map(Duration.ofMillis) } yield (dateTime, duration) val genZonedTestDataSuite: Gen[ZonedDateTimeTestData[MomentZonedDateTime]] = pairGen.map { case (date, duration) => val target = date.plus(duration) ZonedDateTimeTestData(date, duration, target.offset, target.toLocalTime, target.toLocalDate) } checkAll("MomentZonedDateTime", DateTimeTests[MomentZonedDateTime](pairGen).dateTime) checkAll("MomentZonedDateTime", ZonedDateTimeTests[MomentZonedDateTime]( overflowSafePairGenWithinSameOffset, genZonedTestDataSuite, genJSValidYear, genTimeZone ).zonedDateTime) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].order) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].partialOrder) checkAll("MomentZonedDateTime", OrderTests[MomentZonedDateTime].eqv) checkAll("MomentZonedDateTime", ProviderTests[MomentZonedDateTime](genTimeZone).provider) } class MomentZonedDateTimeWithStrictEqualityTests extends MomentZonedDateTimeTests(dtc.instances.moment.momentZonedWithStrictEquality) class MomentZonedDateTimeWithCrossZoneEqualityTests extends MomentZonedDateTimeTests(dtc.instances.moment.momentZonedWithCrossZoneEquality)
Example 43
Source File: JSDateTests.scala From dtc with Apache License 2.0 | 5 votes |
package dtc.tests import java.time.{LocalDate, LocalTime} import cats.instances.option._ import cats.kernel.laws.discipline.OrderTests import dtc.instances.jsDate._ import dtc.js.JSDate import dtc.laws.{DateTimeTests, LocalDateTimeTests, ProviderTests} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Cogen} import dtc.instances.providers.realJSDateProvider class JSDateTests extends DTCSuiteJS { implicit val cogenT: Cogen[JSDate] = Cogen(_.jsGetTime.toLong) implicit val arbT: Arbitrary[JSDate] = Arbitrary(for { date <- arbitrary[LocalDate] time <- arbitrary[LocalTime] } yield JSDate.of(date, time)) val pairGen = overflowSafePairGen.map(t => (JSDate.of(t._1, t._2), t._3)) val ldtTests = LocalDateTimeTests[JSDate]( pairGen, genJSValidYear ) checkAll("JSDate", DateTimeTests[JSDate](pairGen).dateTime) checkAll("JSDate", ldtTests.localDateTime) checkAll("JSDate", ldtTests.monthUntilFractionHandling) checkAll("JSDate", OrderTests[JSDate].order) checkAll("JSDate", OrderTests[JSDate].partialOrder) checkAll("JSDate", OrderTests[JSDate].eqv) checkAll("JSDate", ProviderTests[JSDate](genTimeZone).provider) }
Example 44
Source File: RelationTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.data import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.prop.PropertyChecks import org.scalatest.{Matchers, PropSpec} final class RelationTest extends PropSpec with Matchers with PropertyChecks { import Relation.Relation._ // an empty map and a map with exclusively empty values represent // the same relationship but the underlying structure is different private val nonEmptyRelations: Gen[Map[Int, Set[Char]]] = arbitrary[Map[Int, Set[Char]]].suchThat(_.values.forall(_.nonEmpty)) property("invert andThen invert == identity for non empty relations") { forAll(nonEmptyRelations) { nonEmpty: Map[Int, Set[Char]] => nonEmpty shouldEqual invert(invert(nonEmpty)) } } property("union commutative") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]]) => union(m1, m2) shouldEqual union(m2, m1) } } property("union associative") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]], m3: Map[Int, Set[Char]]) => union(union(m1, m2), m3) shouldEqual union(m1, union(m2, m3)) } } property("union has unit") { forAll { m: Map[Int, Set[Char]] => union(m, Map.empty[Int, Set[Char]]) shouldEqual m union(Map.empty[Int, Set[Char]], m) shouldEqual m } } property("flattening is the inverse of grouping for non empty relations") { forAll(nonEmptyRelations) { nonEmpty => flatten(nonEmpty).toSeq.groupBy(_._1).mapValues(_.map(_._2).toSet) shouldEqual nonEmpty } } property("diff is idempotent") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]]) => diff(m1, m2) shouldEqual diff(diff(m1, m2), m2) } } property("diff by empty doesn't affect non-empty relations") { forAll(nonEmptyRelations) { m => diff(m, Map.empty[Int, Set[Char]]) shouldEqual m } } property("diff: no item in the right operand appears in the result") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]]) => val result = flatten(diff(m1, m2)).toList val right = flatten(m2).toList result should contain noElementsOf right } } property("diff: items in the result should be a subset of the ones in the left operand") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]]) => val result = flatten(diff(m1, m2)).toSet val left = flatten(m1).toSet assert(result.subsetOf(left)) } } property("diff is equivalent to flatten-and-diff") { forAll { (m1: Map[Int, Set[Char]], m2: Map[Int, Set[Char]]) => flatten(diff(m1, m2)).toSet shouldEqual flatten(m1).toSet.diff(flatten(m2).toSet) } } }
Example 45
Source File: JsonDecoderSpec.scala From roc with BSD 3-Clause "New" or "Revised" License | 5 votes |
package roc package types import io.circe.generic.auto._ import io.circe.syntax._ import java.nio.charset.StandardCharsets import jawn.ast.JParser import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.{Arbitrary, Gen} import org.specs2.{ScalaCheck, Specification} import roc.postgresql.Null import roc.types.failures.{ElementDecodingFailure, NullDecodedFailure} import roc.types.{decoders => Decoders} final class JsonDecoderSpec extends Specification with ScalaCheck { def is = s2""" Json Decoder must correctly decode Text representation $testValidText must throw a ElementDecodingFailure when Text decoding invalid Json $testInvalidText must correctly decode Binary representation $testValidBinary must throw a ElementDecodingFailure when Binary decoding invalid Json $testInvalidBinary must throw a NullDecodedFailure when Null decoding Json $testNullDecoding """ private val testValidText = forAll { x: JsonContainer => Decoders.jsonElementDecoder.textDecoder(x.text) must_== x.json } private val testInvalidText = forAll { x: String => Decoders.jsonElementDecoder.textDecoder(x) must throwA[ElementDecodingFailure] } private val testValidBinary = forAll { x: BinaryJsonContainer => Decoders.jsonElementDecoder.binaryDecoder(x.binary) must_== x.json } private val testInvalidBinary = forAll { xs: Array[Byte] => Decoders.jsonElementDecoder.binaryDecoder(xs) must throwA[ElementDecodingFailure] } private val testNullDecoding = Decoders.jsonElementDecoder.nullDecoder(Null('doesnotmatter, -71)) must throwA[NullDecodedFailure] case class JsonContainer(text: String, json: Json) private lazy val genJsonContainer: Gen[JsonContainer] = for { jObject <- arbitrary[JsonObject] } yield { val text = jObject.asJson.noSpaces val json = JParser.parseUnsafe(text) new JsonContainer(text, json) } private implicit lazy val arbitraryJsonContainer: Arbitrary[JsonContainer] = Arbitrary(genJsonContainer) case class BinaryJsonContainer(binary: Array[Byte], json: Json) private lazy val genBinaryJsonContainer: Gen[BinaryJsonContainer] = for { jObject <- arbitrary[JsonObject] } yield { val text = jObject.asJson.noSpaces val json = JParser.parseUnsafe(text) val bytes = text.getBytes(StandardCharsets.UTF_8) new BinaryJsonContainer(bytes, json) } private implicit lazy val arbitraryBinaryJsonContainer: Arbitrary[BinaryJsonContainer] = Arbitrary(genBinaryJsonContainer) case class JsonObject(name: String, first_names: List[String]) private lazy val genJsonObject: Gen[JsonObject] = for { name <- arbitrary[String] first_names <- arbitrary[List[String]] } yield new JsonObject(name, first_names) private implicit lazy val arbitraryJsonObject: Arbitrary[JsonObject] = Arbitrary(genJsonObject) }
Example 46
Source File: MessageSpec.scala From roc with BSD 3-Clause "New" or "Revised" License | 5 votes |
package roc package postgresql import java.nio.charset.StandardCharsets import java.security.MessageDigest import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.{Arbitrary, Gen} import org.specs2._ final class MessagesSpec extends Specification with ScalaCheck { def is = s2""" PasswordMessage should MD5 encrypt a password with given salt $pmEncrypt """ val pmEncrypt = forAll { (user: String, pm: PasswordMessage, salt: Array[Byte]) => val md = MessageDigest.getInstance("MD5") md.update((pm.password+ user).getBytes(StandardCharsets.UTF_8)) val unsaltedHexStr = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("")(_ + _) val saltedBytes = unsaltedHexStr.getBytes ++ salt md.reset() md.update(saltedBytes) val passwd = md.digest().map(x => "%02x".format(x.byteValue)).foldLeft("md5")(_ + _) passwd must_== PasswordMessage.encryptMD5Passwd(user, pm.password, salt) } lazy val genByte: Gen[Byte] = arbitrary[Byte] lazy val genSalt: Gen[Array[Byte]] = Gen.containerOfN[Array, Byte](4, genByte) lazy val genPasswordMessage: Gen[PasswordMessage] = for { password <- arbitrary[String] } yield new PasswordMessage(password) implicit lazy val implicitPasswordMessage: Arbitrary[PasswordMessage] = Arbitrary(genPasswordMessage) }
Example 47
Source File: StartupSpecs.scala From roc with BSD 3-Clause "New" or "Revised" License | 5 votes |
package roc package postgresql import com.twitter.finagle.client.StackClient import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.{Arbitrary, Gen} import org.specs2._ import roc.postgresql.Startup.{Database, Credentials} final class StartupSpecs extends Specification with ScalaCheck { def is = s2""" Database must have correct database name $testDatabase Credentials must have correct username and password $testUserAndPasswd Startup must have correct database, username, and password $testStartupClass must have correct defaults for username, password, and database $testStartupDefaults """ val testDatabase = forAll { dbContainer: DbContainer => val database = dbContainer.db database.db must_== dbContainer.dbName } val testUserAndPasswd = forAll { credentialsContainer: CredentialsContainer => val expectedCredentials = Credentials(credentialsContainer.username, credentialsContainer.passwd) credentialsContainer.credentials must_== expectedCredentials } val testStartupClass = forAll { startupContainer: StartupContainer => val expectedStartup = Startup(startupContainer.username, startupContainer.passwd, startupContainer.database) startupContainer.startup must_== expectedStartup } val testStartupDefaults= { val expectedStartup = Startup("postgres", "postgres", "postgres") Startup(StackClient.defaultParams) must_== expectedStartup } case class DbContainer(db: Database, dbName: String) private lazy val databaseGen: Gen[DbContainer] = for { db <- arbitrary[String] } yield DbContainer(Database(db), db) implicit lazy val arbitraryDatabase: Arbitrary[DbContainer] = Arbitrary(databaseGen) case class CredentialsContainer(credentials: Credentials, username: String, passwd: String) private lazy val credentialsContainerGen: Gen[CredentialsContainer] = for { username <- arbitrary[String] password <- arbitrary[String] } yield CredentialsContainer(Credentials(username, password), username, password) implicit lazy val arbitraryCredentialsContainer: Arbitrary[CredentialsContainer] = Arbitrary(credentialsContainerGen) case class StartupContainer(startup: Startup, username: String, passwd: String, database: String) private lazy val startupContainerGen: Gen[StartupContainer] = for { username <- arbitrary[String] passwd <- arbitrary[String] database <- arbitrary[String] } yield StartupContainer(Startup(username, passwd, database), username, passwd, database) implicit lazy val arbitraryStartupContainer: Arbitrary[StartupContainer] = Arbitrary(startupContainerGen) }
Example 48
Source File: ResultsSpec.scala From roc with BSD 3-Clause "New" or "Revised" License | 5 votes |
package roc package postgresql import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAll import org.scalacheck.{Arbitrary, Gen} import org.specs2._ import org.specs2.mock.Mockito import roc.postgresql.failures.ElementNotFoundFailure final class ResultsSpec extends Specification with ScalaCheck with Mockito { def is = s2""" Row get(column) must throw ElementNotFound failure for unknown column name $columnNotFound """ val columnNotFound = forAll { sym: Symbol => val row = new Row(List.empty[Element]) row.get(sym) must throwA[ElementNotFoundFailure] } lazy val genSymbol: Gen[Symbol] = for { str <- arbitrary[String] } yield Symbol(str) implicit lazy val arbitrarySymbol: Arbitrary[Symbol] = Arbitrary(genSymbol) }
Example 49
Source File: DurationFormatSpec.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import org.scalacheck.Arbitrary.arbitrary import play.api.libs.json.scalacheck.DurationGenerators._ import play.api.libs.json.scalatest.PlayJsonFormatSpec import scala.concurrent.duration._ class FiniteDurationArrayFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.array.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class FiniteDurationStringFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.string.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class DurationArrayFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[Duration])(DurationFormat.array.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] class DurationStringFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[FiniteDuration])(DurationFormat.string.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] private[ops] trait AssertDurationEquality[T <: Duration] extends PlayJsonFormatSpec[T] { override protected def assertPostSerializationEquality(expected: T, actual: T): Unit = { if (expected.isFinite()) { assert(actual.isFinite(), s"$actual is not finite and cannot be equal to $expected") assertResult(expected.unit)(actual.unit) assertResult(expected.length)(actual.length) } else if (expected eq Duration.Undefined) { assertResult(Duration.Undefined)(actual) } else { assert(!actual.isFinite(), s"$actual is finite and cannot be equal to $expected") assertResult(expected)(actual) } } }
Example 50
Source File: DurationFormatSpec.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import org.scalacheck.Arbitrary.arbitrary import play.api.libs.json.scalacheck.DurationGenerators._ import play.api.libs.json.scalatest.PlayJsonFormatSpec import scala.concurrent.duration._ class FiniteDurationArrayFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.array.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class FiniteDurationStringFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.string.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class DurationArrayFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[Duration])(DurationFormat.array.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] class DurationStringFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[FiniteDuration])(DurationFormat.string.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] private[ops] trait AssertDurationEquality[T <: Duration] extends PlayJsonFormatSpec[T] { override protected def assertPostSerializationEquality(expected: T, actual: T): Unit = { if (expected.isFinite()) { assert(actual.isFinite(), s"$actual is not finite and cannot be equal to $expected") assertResult(expected.unit)(actual.unit) assertResult(expected.length)(actual.length) } else if (expected eq Duration.Undefined) { assertResult(Duration.Undefined)(actual) } else { assert(!actual.isFinite(), s"$actual is finite and cannot be equal to $expected") assertResult(expected)(actual) } } }
Example 51
Source File: DurationFormatSpec.scala From play-json-ops with MIT License | 5 votes |
package play.api.libs.json.ops.v4 import org.scalacheck.Arbitrary.arbitrary import play.api.libs.json.scalacheck.DurationGenerators._ import play.api.libs.json.scalatest.PlayJsonFormatSpec import scala.concurrent.duration._ class FiniteDurationArrayFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.array.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class FiniteDurationStringFormatSpec extends PlayJsonFormatSpec[FiniteDuration]( arbitrary[FiniteDuration])(DurationFormat.string.finiteDurationFormat, implicitly, implicitly) with AssertDurationEquality[FiniteDuration] class DurationArrayFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[Duration])(DurationFormat.array.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] class DurationStringFormatSpec extends PlayJsonFormatSpec[Duration]( arbitrary[FiniteDuration])(DurationFormat.string.durationFormat, implicitly, implicitly) with AssertDurationEquality[Duration] private[ops] trait AssertDurationEquality[T <: Duration] extends PlayJsonFormatSpec[T] { override protected def assertPostSerializationEquality(expected: T, actual: T): Unit = { if (expected.isFinite) { assert(actual.isFinite, s"$actual is not finite and cannot be equal to $expected") assertResult(expected.unit)(actual.unit) assertResult(expected.length)(actual.length) } else if (expected eq Duration.Undefined) { assertResult(Duration.Undefined)(actual) } else { assert(!actual.isFinite, s"$actual is finite and cannot be equal to $expected") assertResult(expected)(actual) } } }
Example 52
Source File: FeaturePropSpec.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.{Vector, Vectors, DenseVector} import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.sql.{SparkSession, DataFrame} import org.apache.spark.sql.types.{ StructField, IntegerType, DoubleType, BooleanType, StructType, StringType, ArrayType } import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.PropSpec import com.holdenkarau.spark.testing.{ SharedSparkContext, DataframeGenerator, Column } abstract class FeaturePropSpec extends PropSpec with SharedSparkContext with DefaultReadWriteTest { implicit def arbitraryDenseVector: Arbitrary[DenseVector] = Arbitrary { for (arr <- arbitrary[Array[Double]]) yield new DenseVector(arr) } implicit def arbitraryVector: Arbitrary[Vector] = Arbitrary( Gen.frequency( 1 -> arbitrary[DenseVector] )) lazy val spark = SparkSession.builder().getOrCreate() def schema = StructType( List( StructField("integer", IntegerType), StructField("double", DoubleType), StructField("boolean", BooleanType), StructField("string", StringType) )) def integerGen = new Column("integer", Gen.choose(-100, 100)) def doubleGen = new Column("double", Gen.choose(-100.0, 100.0)) def stringGen = new Column("string", Gen.oneOf("A", "BC", "DEF", "GHIJ", "KLMNO")) def dataframeGen = DataframeGenerator.arbitraryDataFrameWithCustomFields( spark.sqlContext, schema)(integerGen, doubleGen, stringGen) def hasDistinctValues(df: DataFrame, columns: String*): Boolean = { columns.foldLeft(true) { (acc, col) => acc && df.select(col).distinct.count() > 1 } } }
Example 53
Source File: KNNPropSpec.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.knn import scala.reflect.ClassTag import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen.{choose, oneOf} import org.scalatest.PropSpec import org.apache.spark.ml.linalg.{ CosineDistance, EuclideanDistance, ManhattanDistance, JaccardDistance, HammingDistance } import org.apache.spark.ml.linalg.{Vector, SparseVector, DenseVector, Vectors} import com.holdenkarau.spark.testing.SharedSparkContext abstract class KNNPropSpec extends PropSpec with SharedSparkContext { implicit def arbitraryDenseVector: Arbitrary[DenseVector] = Arbitrary { for (arr <- arbitrary[Array[Double]]) yield new DenseVector(arr) } implicit def arbitrarySparseVector: Arbitrary[SparseVector] = Arbitrary { for (vec <- arbitrary[DenseVector]) yield vec.toSparse } implicit def arbitraryVector: Arbitrary[Vector] = Arbitrary( Gen.frequency( 1 -> arbitrary[DenseVector], 1 -> arbitrary[SparseVector] )) private def arraysOfNM[T: ClassTag](numRows: Int, numCols: Int, gen: Gen[T]): Gen[Array[Array[T]]] = Gen.listOfN(numRows * numCols, gen).map { square => square.toArray.grouped(numCols).toArray } private def vectorsOfNM(numRows: Int, numCols: Int, gen: Gen[Double]): Gen[Array[DenseVector]] = for { arrays <- arraysOfNM(numRows, numCols, gen) } yield arrays.map(arr => new DenseVector(arr)) val treeGen = for { measure <- oneOf(CosineDistance, EuclideanDistance, ManhattanDistance, HammingDistance, JaccardDistance) numVectors <- choose(1, 100) vectors <- vectorsOfNM(numVectors, 2, choose(-10.0, 10.0)) } yield vectors .scanLeft(Seq[Vector]())(_ :+ _) .tail .map( vs => VPTree(vs.map(v => VectorEntry(0L, v)).toIndexedSeq, measure, 10, 10, 10)) }
Example 54
Source File: PackageTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalacheck.Gen import org.scalacheck.Arbitrary.arbitrary import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class PackageTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with Matchers { val coverGen = for { numberOfSplits <- Gen.posNum[Int] overlapRatio <- Gen.choose(0.0, 1.0) if overlapRatio > 0.0 list <- Gen.listOfN(3, arbitrary[Double]) } yield (numberOfSplits, overlapRatio, list) property("open cover covers all range of specified hypercube") { forAll(coverGen) { case (numberOfSplits, overlapRatio, list) => val sorted = list.sorted val lower = sorted(0) val upper = sorted(2) val point = sorted(1) OpenCover(numberOfSplits, overlapRatio, Vector((lower, upper))) .foldLeft(false) { (acc, open) => acc || (open contains Vector(point)) } should be(true) } } property("closed cover covers all range of specified hypercube") { forAll(coverGen) { case (numberOfSplits, overlapRatio, list) => val sorted = list.sorted val lower = sorted(0) val upper = sorted(2) val point = sorted(1) ClosedCover(numberOfSplits, overlapRatio, Vector((lower, upper))) .foldLeft(false) { (acc, closed) => acc || (closed contains Vector(point)) } should be(true) } } }
Example 55
Source File: IntervalPropSpec.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import scala.reflect.ClassTag import org.scalacheck.{Arbitrary, Gen} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.PropSpec abstract class IntervalPropSpec extends PropSpec { implicit def arbitraryOpenEndpoint: Arbitrary[Open] = Arbitrary { for (at <- arbitrary[Double]) yield Open(at) } implicit def arbitraryClosedEndpoint: Arbitrary[Closed] = Arbitrary { for (at <- arbitrary[Double]) yield Closed(at) } implicit def arbitraryUnboundedEndpoint: Arbitrary[Unbounded] = Arbitrary(Unbounded()) implicit def arbitraryEndpoint: Arbitrary[Endpoint] = Arbitrary( Gen.frequency( 4 -> arbitrary[Open], 4 -> arbitrary[Closed], 2 -> arbitrary[Unbounded] )) implicit def arbitraryLowerBound: Arbitrary[LowerBound] = Arbitrary(for (endpoint <- arbitrary[Endpoint]) yield LowerBound(endpoint)) implicit def arbitraryUpperBound: Arbitrary[UpperBound] = Arbitrary(for (endpoint <- arbitrary[Endpoint]) yield UpperBound(endpoint)) implicit def arbitraryBound: Arbitrary[Bound] = Arbitrary(Gen.oneOf(arbitrary[LowerBound], arbitrary[UpperBound])) implicit def arbitraryInterval: Arbitrary[Interval] = Arbitrary { def validate(lhs: Endpoint, rhs: Endpoint) = Interval.validate(LowerBound(lhs), UpperBound(rhs)) || Interval .validate(LowerBound(rhs), UpperBound(lhs)) def interval(lhs: Endpoint, rhs: Endpoint) = if (Interval.validate(LowerBound(lhs), UpperBound(rhs))) new Interval(LowerBound(lhs), UpperBound(rhs)) else new Interval(LowerBound(rhs), UpperBound(lhs)) for { x <- arbitrary[Endpoint] y <- arbitrary[Endpoint] if validate(x, y) } yield interval(x, y) } implicit def arbitrary2DimensionalCube: Arbitrary[Cube] = Arbitrary { for { x <- arbitrary[Interval] y <- arbitrary[Interval] } yield Cube(x, y) } }
Example 56
Source File: HashFunctionsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Gen.{choose, oneOf, listOfN} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.mllib.linalg.DenseVector class HashFunctionsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { import org.scalactic.Tolerance._ property( "simhash returns hashed vector whose dimension is at most the specified signature length") { forAll(simhashGen) { case (vector, signatureLength, simhash) => val bucket = simhash(0L, 0, vector) assert(bucket === simhash(0L, 0, vector.toSparse)) assert(bucket.signature.length <= signatureLength) } } property( "minhash returns hashed vector whose dimension is the specified signature length") { forAll(minhashGen) { case (vector, signatureLength, minhash) => val bucket = minhash(0L, 0, vector) assert(bucket === minhash(0L, 0, vector.toSparse)) assert(bucket.signature.length === signatureLength) } } property( "pstable returns hashed vector whose dimension is the specified signature length") { forAll(pstableGen) { case (vector, signatureLength, pstableL1, pstableL2) => val bucketL1 = pstableL1(0L, 0, vector) val bucketL2 = pstableL2(0L, 0, vector) assert(bucketL1 === pstableL1(0L, 0, vector.toSparse)) assert(bucketL2 === pstableL2(0L, 0, vector.toSparse)) assert(bucketL1.signature.length === signatureLength) assert(bucketL2.signature.length === signatureLength) } } property( "bit sampling returns hashed vector whose dimension is at most the specified signature length") { forAll(bsampleGen) { case (vector, signatureLength, bsample) => val bucket = bsample(0L, 0, vector) assert(bucket === bsample(0L, 0, vector.toSparse)) assert(bucket.signature.length <= signatureLength) } } }
Example 57
Source File: AmplificationsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Gen.{choose, oneOf, listOfN} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.mllib.linalg.{ DenseVector, CosineDistance, JaccardDistance } import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix class AmplificationsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { property( "or-construction generates the correct number of indexed rows for the given data points") { forAllNoShrink(simhashBucketsGen) { case (buckets, numVectors) => val or = ORConstruction(CosineDistance) val sim = new CoordinateMatrix(or(buckets, numVectors)).toIndexedRowMatrix.rows .collect() sim.size === numVectors sim.forall(s => s.vector.size <= numVectors) } } property( "band or-construction generates the correct number of indexed rows for the given data points") { forAllNoShrink(minhashBucketsGen) { case (buckets, numVectors, numBands) => val bor = BandORConstruction(JaccardDistance, numBands) val sim = new CoordinateMatrix(bor(buckets, numVectors)).toIndexedRowMatrix.rows .collect() sim.size === numVectors sim.forall(s => s.vector.size <= numVectors) } } }