org.scalatest.prop.GeneratorDrivenPropertyChecks Scala Examples
The following examples show how to use org.scalatest.prop.GeneratorDrivenPropertyChecks.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: IntervalTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class IntervalTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("intervals intersect if they overlap") { forAll { (a: Double, b: Double, c: Double, d: Double) => val es = List(a, b, c, d).sorted val la = es(0) val lb = es(1) val ua = es(2) val ub = es(3) Interval.greaterEqual(la) intersects Interval.closed(lb, ub) should be( true) Interval.closed(la, ua) intersects Interval.closed(lb, ub) should be( true) whenever(lb < ua) { whenever(la < ua) { Interval.open(la, ua) intersects Interval.closed(lb, ub) should be( true) } whenever(lb < ub) { Interval.closed(la, ua) intersects Interval.open(lb, ub) should be( true) } whenever(la < ua && lb < ub) { Interval.open(la, ua) intersects Interval.open(lb, ub) should be( true) } } } } property("interval intersection is commutative") { forAll { (lhs: Interval, rhs: Interval) => (lhs intersects rhs) should equal(rhs intersects lhs) } } property("interval inclusion is transitive") { forAll { (lhs: Interval, rhs: Interval, x: Double) => !(lhs contains rhs) || !(rhs contains x) || (lhs contains x) } } }
Example 2
Source File: LawChecking.scala From curryhoward with Apache License 2.0 | 5 votes |
package io.chymyst.ch.unit import io.chymyst.ch.implement import org.scalacheck.Arbitrary import org.scalatest.{Assertion, FlatSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks trait FMap[F[_]] { def f[A, B]: (A ⇒ B) ⇒ F[A] ⇒ F[B] } trait FPoint[F[_]] { def f[A]: A ⇒ F[A] } trait FFlatMap[F[_]] { def f[A, B]: (A ⇒ F[B]) ⇒ F[A] ⇒ F[B] } trait LawChecking extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { def fEqual[A: Arbitrary, B](f1: A ⇒ B, f2: A ⇒ B): Assertion = { forAll { (x: A) ⇒ f1(x) shouldEqual f2(x) } } private def checkFunctionEquality[A: Arbitrary, B](f1: A ⇒ B, f2: A ⇒ B)(implicit resultsEqual: (B, B) ⇒ Assertion): Assertion = { forAll { (x: A) ⇒ resultsEqual(f1(x), f2(x)) } } // Check equality for higher-order functions of type A ⇒ B ⇒ C. def hofEqual[A: Arbitrary, B: Arbitrary, C: Arbitrary](f1: A ⇒ B ⇒ C, f2: A ⇒ B ⇒ C): Assertion = checkFunctionEquality[A, B ⇒ C](f1, f2)(implicitly[Arbitrary[A]], (x: B ⇒ C, y: B ⇒ C) ⇒ fEqual(x, y)) def fmapLawIdentity[A: Arbitrary, F[_]](fmap: FMap[F])(implicit fResultsEqual: (F[A], F[A]) ⇒ Assertion, ev: Arbitrary[F[A]]): Assertion = { checkFunctionEquality[F[A], F[A]](fmap.f(identity[A]), identity[F[A]]) } def fmapLawComposition[A: Arbitrary, B: Arbitrary, C: Arbitrary, F[_]](fmap: FMap[F])(implicit fResultsEqual: (F[C], F[C]) ⇒ Assertion, evA: Arbitrary[F[A]], evAB: Arbitrary[A ⇒ B], evBC: Arbitrary[B ⇒ C]): Assertion = { forAll { (f: A ⇒ B, g: B ⇒ C) ⇒ checkFunctionEquality[F[A], F[C]](fmap.f(f) andThen fmap.f(g), fmap.f(f andThen g)) } } def fmapPointLaw[A: Arbitrary, B: Arbitrary, F[_]](point: FPoint[F], fmap: FMap[F])(implicit fResultsEqual: (F[B], F[B]) ⇒ Assertion, evAB: Arbitrary[A ⇒ B]): Assertion = forAll { (f: A ⇒ B) ⇒ val point_dot_map = point.f andThen fmap.f(f) val f_dot_point = f andThen point.f checkFunctionEquality[A, F[B]](point_dot_map, f_dot_point) } def flatmapPointLaw[A: Arbitrary, B: Arbitrary, F[_]](point: FPoint[F], flatmap: FFlatMap[F])(implicit fResultsEqual: (F[B], F[B]) ⇒ Assertion, evAB: Arbitrary[A ⇒ F[B]], evFB: Arbitrary[F[B]]): Assertion = forAll { (f: A ⇒ F[B]) ⇒ checkFunctionEquality[F[B], F[B]](flatmap.f(point.f), identity) checkFunctionEquality(point.f andThen flatmap.f(f), f) } def flatmapAssocLaw[A: Arbitrary, B: Arbitrary, C: Arbitrary, F[_]](fflatMap: FFlatMap[F])(implicit fResultsEqual: (F[C], F[C]) ⇒ Assertion, evFA: Arbitrary[F[A]], evAB: Arbitrary[A ⇒ F[B]], evBC: Arbitrary[B ⇒ F[C]]): Assertion = forAll { (f: A ⇒ F[B], g: B ⇒ F[C]) ⇒ val x = fflatMap.f(f) andThen fflatMap.f(g) val y = fflatMap.f((x: A) ⇒ fflatMap.f(g)(f(x))) checkFunctionEquality[F[A], F[C]](x, y) } def flip[A, B, C]: (A ⇒ B ⇒ C) ⇒ (B ⇒ A ⇒ C) = implement }
Example 3
Source File: AmplificationsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Gen.{choose, oneOf, listOfN} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.mllib.linalg.{ DenseVector, CosineDistance, JaccardDistance } import org.apache.spark.mllib.linalg.distributed.CoordinateMatrix class AmplificationsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { property( "or-construction generates the correct number of indexed rows for the given data points") { forAllNoShrink(simhashBucketsGen) { case (buckets, numVectors) => val or = ORConstruction(CosineDistance) val sim = new CoordinateMatrix(or(buckets, numVectors)).toIndexedRowMatrix.rows .collect() sim.size === numVectors sim.forall(s => s.vector.size <= numVectors) } } property( "band or-construction generates the correct number of indexed rows for the given data points") { forAllNoShrink(minhashBucketsGen) { case (buckets, numVectors, numBands) => val bor = BandORConstruction(JaccardDistance, numBands) val sim = new CoordinateMatrix(bor(buckets, numVectors)).toIndexedRowMatrix.rows .collect() sim.size === numVectors sim.forall(s => s.vector.size <= numVectors) } } }
Example 4
Source File: ANearestNeighborsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class ANearestNeighborsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { property( "average selectivity of simhash joins increase with more hash tables") { forAllNoShrink(simhashJoinGen) { case (matrix, joinWithLessTables, joinWithMoreTables) => val lessSelectivity = joinWithLessTables.avgSelectivity(matrix) val moreSelectivity = joinWithMoreTables.avgSelectivity(matrix) lessSelectivity <= moreSelectivity } } property( "average selectivity of minhash joins increase with more hash tables") { forAllNoShrink(minhashJoinGen) { case (matrix, joinWithLessTables, joinWithMoreTables) => val lessSelectivity = joinWithLessTables.avgSelectivity(matrix) val moreSelectivity = joinWithMoreTables.avgSelectivity(matrix) lessSelectivity <= moreSelectivity } } property( "average selectivity of p-stable L1 joins increase with more hash tables") { forAllNoShrink(pstablel1JoinGen) { case (matrix, joinWithLessTables, joinWithMoreTables) => val lessSelectivity = joinWithLessTables.avgSelectivity(matrix) val moreSelectivity = joinWithMoreTables.avgSelectivity(matrix) lessSelectivity <= moreSelectivity } } property( "average selectivity of p-stable L2 joins increase with more hash tables") { forAllNoShrink(pstablel2JoinGen) { case (matrix, joinWithLessTables, joinWithMoreTables) => val lessSelectivity = joinWithLessTables.avgSelectivity(matrix) val moreSelectivity = joinWithMoreTables.avgSelectivity(matrix) lessSelectivity <= moreSelectivity } } property( "average selectivity of bit sampling joins increase with more hash tables") { forAllNoShrink(bsampleJoinGen) { case (matrix, joinWithLessTables, joinWithMoreTables) => val lessSelectivity = joinWithLessTables.avgSelectivity(matrix) val moreSelectivity = joinWithMoreTables.avgSelectivity(matrix) lessSelectivity <= moreSelectivity } } }
Example 5
Source File: HashFunctionsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Gen.{choose, oneOf, listOfN} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.mllib.linalg.DenseVector class HashFunctionsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { import org.scalactic.Tolerance._ property( "simhash returns hashed vector whose dimension is at most the specified signature length") { forAll(simhashGen) { case (vector, signatureLength, simhash) => val bucket = simhash(0L, 0, vector) assert(bucket === simhash(0L, 0, vector.toSparse)) assert(bucket.signature.length <= signatureLength) } } property( "minhash returns hashed vector whose dimension is the specified signature length") { forAll(minhashGen) { case (vector, signatureLength, minhash) => val bucket = minhash(0L, 0, vector) assert(bucket === minhash(0L, 0, vector.toSparse)) assert(bucket.signature.length === signatureLength) } } property( "pstable returns hashed vector whose dimension is the specified signature length") { forAll(pstableGen) { case (vector, signatureLength, pstableL1, pstableL2) => val bucketL1 = pstableL1(0L, 0, vector) val bucketL2 = pstableL2(0L, 0, vector) assert(bucketL1 === pstableL1(0L, 0, vector.toSparse)) assert(bucketL2 === pstableL2(0L, 0, vector.toSparse)) assert(bucketL1.signature.length === signatureLength) assert(bucketL2.signature.length === signatureLength) } } property( "bit sampling returns hashed vector whose dimension is at most the specified signature length") { forAll(bsampleGen) { case (vector, signatureLength, bsample) => val bucket = bsample(0L, 0, vector) assert(bucket === bsample(0L, 0, vector.toSparse)) assert(bucket.signature.length <= signatureLength) } } }
Example 6
Source File: KNearestNeighborsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed.impl import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class KNearestNeighborsTest extends ImplPropSpec with GeneratorDrivenPropertyChecks with Matchers { property( "cosine distance join joins each feature vectors to its k-nearest neighbors") { forAllNoShrink(cosineJoinGen) { case (numVectors, quantity, matrix, join) => val knn = join(matrix).toIndexedRowMatrix knn.rows.aggregate(true)( (acc, row) => acc && row.vector.toSparse.indices.size == quantity, _ && _) knn.rows.count == numVectors } } property( "jaccard distance join joins each feature vectors to its k-nearest neighbors") { forAllNoShrink(jaccardJoinGen) { case (numVectors, quantity, matrix, join) => val knn = join(matrix).toIndexedRowMatrix knn.rows.aggregate(true)( (acc, row) => acc && row.vector.toSparse.indices.size == quantity, _ && _) knn.rows.count == numVectors } } property( "manhattan distance join joins each feature vectors to its k-nearest neighbors") { forAllNoShrink(manhattanJoinGen) { case (numVectors, quantity, matrix, join) => val knn = join(matrix).toIndexedRowMatrix knn.rows.aggregate(true)( (acc, row) => acc && row.vector.toSparse.indices.size == quantity, _ && _) knn.rows.count == numVectors } } property( "euclidean distance join joins each feature vectors to its k-nearest neighbors") { forAllNoShrink(euclideanJoinGen) { case (numVectors, quantity, matrix, join) => val knn = join(matrix).toIndexedRowMatrix knn.rows.aggregate(true)( (acc, row) => acc && row.vector.toSparse.indices.size == quantity, _ && _) knn.rows.count == numVectors } } property( "hamming distance join joins each feature vectors to its k-nearest neighbors") { forAllNoShrink(hammingJoinGen) { case (numVectors, quantity, matrix, join) => val knn = join(matrix).toIndexedRowMatrix knn.rows.aggregate(true)( (acc, row) => acc && row.vector.toSparse.indices.size == quantity, _ && _) knn.rows.count == numVectors } } }
Example 7
Source File: CoordinateMatrixFunctionsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg.distributed import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.mllib.linalg.distributed.CoordinateMatrixFunctions._ class CoordinatMatrixFunctionsTest extends DistributedPropSpec with GeneratorDrivenPropertyChecks with Matchers { import org.scalactic.Tolerance._ property("hadamard product preserved the size of matrices") { forAllNoShrink(coordinateMatrixGen) { case (lhs, rhs) => val hprod = lhs hproduct rhs hprod.numRows == math.max(lhs.numRows, rhs.numRows) hprod.numCols == math.max(lhs.numCols, rhs.numCols) } } property( "resulting elements of hadamard product are equal to the product of corresponding elements") { forAllNoShrink(coordinateMatrixGen) { case (lhs, rhs) => val lhsb = lhs.toBreeze val rhsb = rhs.toBreeze val hprod = (lhs hproduct rhs).toBreeze var assertion = true for (i <- (0 until math.min(lhs.numRows.toInt, rhs.numRows.toInt)); j <- (0 until math.max(lhs.numCols.toInt, rhs.numCols.toInt))) { assertion &= (hprod(i, j) == lhsb(i, j) * rhsb(i, j) +- 0.01) } assertion } } }
Example 8
Source File: ProjectionsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.linalg import java.util.Random import org.scalacheck.Gen import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.prop.Checkers.check class ProjectionsTest extends LinalgPropSpec with GeneratorDrivenPropertyChecks with Matchers { import org.scalactic.Tolerance._ val dimGen = for { srcDim <- Gen.choose(100, 200) dstDim <- Gen.choose(100, 200) } yield (srcDim, dstDim) property("gaussian random projection have good statistical properties") { forAllNoShrink(dimGen) { case (srcDim, dstDim) => val projection = GaussianRandomProjection(srcDim, dstDim, new Random()) projection.mean === 0.0 +- 0.01 projection.stddev === 1.0 +- 0.01 } } property("cauchy random projection have good statistical properties") { forAllNoShrink(dimGen) { case (srcDim, dstDim) => val projection = CauchyRandomProjection(srcDim, dstDim) projection.median === 0.0 +- 0.01 } } }
Example 9
Source File: EndpointsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class EndpointsTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("the above relation on endpoints is transitive") { forAll { (x: Endpoint, y: Endpoint, z: Endpoint) => val (u, l) = if (x isAbove y) (x, y) else (y, x) whenever(l isAbove z) { (u isAbove z) should be(true) } } } property("the below relation on endpoints is transitive") { forAll { (x: Endpoint, y: Endpoint, z: Endpoint) => val (l, u) = if (x isBelow y) (x, y) else (y, x) whenever(u isBelow z) { (l isBelow z) should be(true) } } } }
Example 10
Source File: ProcessManagerSpec.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs.process import akka.actor.ActorRef import com.productfoundry.akka.cqrs._ import com.productfoundry.akka.cqrs.publish.EventPublication import com.productfoundry.akka.messaging.ConfirmDelivery import com.productfoundry.support.EntityTestSupport import org.scalatest.prop.GeneratorDrivenPropertyChecks class ProcessManagerSpec extends EntityTestSupport with GeneratorDrivenPropertyChecks with Fixtures { implicit def DummyProcessManagerIdResolution = DummyProcessManager.idResolution implicit def DummyProcessManagerFactory = DummyProcessManager.factory() implicit val supervisorFactory = entityContext.entitySupervisorFactory[DummyProcessManager] val supervisor: ActorRef = EntitySupervisor.forType[DummyProcessManager] "Event publications" must { "be received" in new ProcessManagerFixture { forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication } val events = receiveN(publications.size).map(_.asInstanceOf[AggregateEvent]) publications.map(_.eventRecord.event) should contain theSameElementsAs events } expectNoMsg() } "be confirmed" in new ProcessManagerFixture { var nextDeliveryId = 1L forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication.requestConfirmation(nextDeliveryId) nextDeliveryId = nextDeliveryId + 1 } if (publications.nonEmpty) { val results = receiveN(publications.size * 2) val events = results.filter(p => classOf[AggregateEvent].isAssignableFrom(p.getClass)) publications.map(_.eventRecord.event) should contain theSameElementsAs events val confirmations = results.filter(p => classOf[ConfirmDelivery].isAssignableFrom(p.getClass)) confirmations.size should be(events.size) } } expectNoMsg() } "be deduplicated" in new ProcessManagerFixture { forAll { commit: Commit => val publications = createUniquePublications(commit) publications.foreach { publication => supervisor ! publication supervisor ! publication } val events = receiveN(publications.size) publications.map(_.eventRecord.event) should contain theSameElementsAs events } expectNoMsg() } } trait ProcessManagerFixture { system.eventStream.subscribe(self, classOf[Any]) def createUniquePublications(commit: Commit): Seq[EventPublication] = { commit.records.map(eventRecord => EventPublication(eventRecord)).groupBy(_.eventRecord.tag).map(_._2.head).toSeq } } }
Example 11
Source File: CubeTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalatest.Matchers import org.scalatest.GivenWhenThen import org.scalatest.prop.GeneratorDrivenPropertyChecks class CubeTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with GivenWhenThen with Matchers { property("cube intersection is commutative") { forAll { (lhs: Cube, rhs: Cube) => (lhs intersects rhs) should equal(rhs intersects lhs) } } property("cube inclusion is transitive") { forAll { (lhs: Cube, rhs: Cube, x: Double, y: Double) => !(lhs contains rhs) || !(rhs contains (x, y)) || (lhs contains (x, y)) } } }
Example 12
Source File: BoundsTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class BoundsTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("a lower bound contains a lower bound that starts before") { forAll { (x: Endpoint, y: Endpoint) => whenever(x isBelow y) { (LowerBound(x) contains LowerBound(y)) should be(true) } } } property("an upper bound contains an upper bound that starts after") { forAll { (x: Endpoint, y: Endpoint) => whenever(x isAbove y) { (UpperBound(x) contains UpperBound(y)) should be(true) } } } }
Example 13
Source File: PackageTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalacheck.Gen import org.scalacheck.Arbitrary.arbitrary import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks class PackageTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with Matchers { val coverGen = for { numberOfSplits <- Gen.posNum[Int] overlapRatio <- Gen.choose(0.0, 1.0) if overlapRatio > 0.0 list <- Gen.listOfN(3, arbitrary[Double]) } yield (numberOfSplits, overlapRatio, list) property("open cover covers all range of specified hypercube") { forAll(coverGen) { case (numberOfSplits, overlapRatio, list) => val sorted = list.sorted val lower = sorted(0) val upper = sorted(2) val point = sorted(1) OpenCover(numberOfSplits, overlapRatio, Vector((lower, upper))) .foldLeft(false) { (acc, open) => acc || (open contains Vector(point)) } should be(true) } } property("closed cover covers all range of specified hypercube") { forAll(coverGen) { case (numberOfSplits, overlapRatio, list) => val sorted = list.sorted val lower = sorted(0) val upper = sorted(2) val point = sorted(1) ClosedCover(numberOfSplits, overlapRatio, Vector((lower, upper))) .foldLeft(false) { (acc, closed) => acc || (closed contains Vector(point)) } should be(true) } } }
Example 14
Source File: TreesTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.knn import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.ml.linalg.{Vector, Vectors, EuclideanDistance} class TreesTest extends KNNPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("VPTree can be constructed with empty data") { forAll { (v: Vector) => val tree = VPTree(IndexedSeq.empty[VectorWithId], EuclideanDistance, 0, 0) val vector = VectorEntry(0L, v) tree.iterator shouldBe empty tree.query(vector) shouldBe empty tree.numLeaves shouldBe 0 } } property("VPTree can be constructed with data not having any duplication") { val origin = VectorEntry(0L, Vectors.dense(0, 0)) val data = (-5 to 5).flatMap { i => (-5 to 5).map { j => VectorEntry(0L, Vectors.dense(i, j)) } } List(1, data.size / 2, data.size, data.size * 2).foreach { leafSize => val tree = VPTree(data, EuclideanDistance, 1, 1, leafSize) tree.size shouldBe data.size tree.iterator.toIterable should contain theSameElementsAs data data.foreach(v => tree.query(v, 1).head._1 shouldBe v) tree .query(origin, 5) .map(_._1.vector) should contain theSameElementsAs Set( Vectors.dense(-1, 0), Vectors.dense(1, 0), Vectors.dense(0, -1), Vectors.dense(0, 1), Vectors.dense(0, 0) ) tree .query(origin, 9) .map(_._1.vector) should contain theSameElementsAs Set( Vectors.dense(-1, -1), Vectors.dense(-1, 0), Vectors.dense(-1, 1), Vectors.dense(0, -1), Vectors.dense(0, 0), Vectors.dense(0, 1), Vectors.dense(1, -1), Vectors.dense(1, 0), Vectors.dense(1, 1) ) tree.numLeaves shouldBe (tree.cardinality / leafSize.toDouble).ceil } } property("VPTree can be constructed with data having duplication") { val origin = VectorEntry(0L, Vectors.dense(0, 0)) val data = (Vectors.dense(2.0, 0.0) +: Array.fill(5)(Vectors.dense(0.0, 1.0))) .map(VectorEntry(0L, _)) val tree = VPTree(data, EuclideanDistance, 6, 6) val knn = tree.query(origin, 5) tree.numLeaves shouldBe 2 knn.size shouldBe 5 knn.map(_._1.vector).toSet should contain theSameElementsAs Array( Vectors.dense(0.0, 1.0)) } }
Example 15
Source File: IndicesTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.knn import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.ml.linalg.{Vector, Vectors, EuclideanDistance} class IndicesTest extends KNNPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("TopTrees can be constructed with empty data") { forAll { (v: Vector, coverId: Int) => val topTrees = TopTrees(IndexedSeq.empty[(Int, Tree)]) val vector = VectorEntry(0L, v) topTrees.get((coverId, vector)) shouldBe None topTrees.isDefinedAt((coverId, vector)) shouldBe false intercept[NoSuchElementException] { topTrees((coverId, vector)) } } } property( "TopTrees can be constructed with non empty data and maintain its consistency") { forAll(treeGen) { case (trees) => val indexedTrees = trees.zipWithIndex.map { case (t, i) => (i, t) } val topTrees = TopTrees(indexedTrees) val indices = indexedTrees .flatMap { case (index, tree) => tree.iterator.map(d => (index, d)) } .map { case (index, entry) => topTrees((index, entry)) } .toSet indices should contain theSameElementsAs (0 until topTrees.numIndices) .toSet (0 until topTrees.numIndices).toSet should contain theSameElementsAs indices } } }
Example 16
Source File: PartitionersTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.knn import org.scalacheck.Prop.forAllNoShrink import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.apache.spark.ml.linalg.{Vector, Vectors, EuclideanDistance} class PartitionersTest extends KNNPropSpec with GeneratorDrivenPropertyChecks with Matchers { property("TopTreesPartitioner can be constructed with empty data") { forAll { (v: Vector, coverId: Int) => val partitioner = new TopTreesPartitioner(TopTrees(IndexedSeq.empty[(Int, Tree)])) val vector = VectorEntry(0L, v) intercept[NoSuchElementException] { partitioner.getPartition((coverId, vector)) } } } property( "TopTrees can be constructed with non empty data and maintain its consistency") { forAll(treeGen) { case (trees) => val indexedTrees = trees.zipWithIndex.map { case (t, i) => (i, t) } val partitioner = new TopTreesPartitioner(TopTrees(indexedTrees)) val indices = indexedTrees .flatMap { case (index, tree) => tree.iterator.map(d => (index, d)) } .map { case (index, entry) => partitioner.getPartition((index, entry)) } .toSet indices should contain theSameElementsAs (0 until partitioner.numPartitions) .toSet (0 until partitioner.numPartitions).toSet should contain theSameElementsAs indices intercept[IllegalArgumentException] { partitioner.getPartition(0) } } } }
Example 17
Source File: CoverTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.Vectors import org.apache.spark.sql.functions.{col, explode, udf} import org.scalatest.{PropSpec, Matchers, GivenWhenThen} import org.scalatest.prop.GeneratorDrivenPropertyChecks class CoverTest extends FeaturePropSpec with GivenWhenThen with GeneratorDrivenPropertyChecks with Matchers { val assembler = new VectorAssembler() .setInputCols(Array("double", "integer")) .setOutputCol("vector") property("argument numSplits must be positive") { intercept[IllegalArgumentException] { val cover = new Cover() .setInputCols("double") .setOutputCol("cover_ids") .setNumSplits(0) } } property("argument overlapRatio must be positive") { intercept[IllegalArgumentException] { val cover = new Cover() .setInputCols("double") .setOutputCol("cover_ids") .setOverlapRatio(0.0) } } property("cover estimator changes nothing with the original dataframe") { val cover = new Cover() .setInputCols("double", "integer", "vector") .setOutputCol("cover_ids") forAll(dataframeGen.arbitrary) { df => val transformed = assembler.transform(df) whenever( transformed.count() > 0 && hasDistinctValues(transformed, "double", "integer", "vector")) { val covered = cover .fit(transformed) .transform(transformed) .drop("cover_ids") .except(transformed) .count() should be(0) } } } property("generated cover covers all range of specified columns") { val cover = new Cover() .setInputCols("double", "integer", "vector") .setOutputCol("cover_ids") val uncovered = udf { xs: Seq[Long] => xs.length == 0 } forAll(dataframeGen.arbitrary) { df => val transformed = assembler.transform(df) whenever( transformed.count() > 0 && hasDistinctValues(transformed, "double", "integer", "vector")) { cover .fit(transformed) .transform(transformed) .where(uncovered(col("cover_ids"))) .count() should be(0) } } } property("Cover is readable/writable") { val cover = new Cover() .setInputCols("double", "integer") .setOutputCol("cover_ids") testDefaultReadWrite(cover) } property("CoverModel is readable/writable") { val model = new CoverModel("myCoverModel", Vectors.dense(-1.0, 0.0), Vectors.dense(1.0, 10.0)) .setInputCols("double", "integer") .setOutputCol("cover_ids") val newModel = testDefaultReadWrite(model) assert(newModel.min === model.min) assert(newModel.max === model.max) } }
Example 18
Source File: ReebDiagramTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.{Vectors, EuclideanDistance, Vector} import org.apache.spark.sql.functions.{col, explode, udf} import org.scalatest.{PropSpec, Matchers, GivenWhenThen} import org.scalatest.prop.GeneratorDrivenPropertyChecks class ReebDiagramTest extends FeaturePropSpec with GivenWhenThen with GeneratorDrivenPropertyChecks with Matchers { val assembler = new VectorAssembler() .setInputCols(Array("double", "integer")) .setOutputCol("vector") val cover = new Cover() .setExploding(true) .setInputCols("double", "integer") .setOutputCol("cover_id") property("argument topTreeSize must be positive") { intercept[IllegalArgumentException] { val reeb = new ReebDiagram() // .setIdCol("id") // .setCoverCol("cover_id") // .setFeaturesCol("vector") // .setOutputCol("cluster_id") .setTopTreeSize(0) } } property("placeholder") { val reeb = new ReebDiagram() .setK(15) .setIdCol("id") .setCoverCol("cover_id") .setFeaturesCol("vector") .setOutputCol("cluster_id") forAll(dataframeGen.arbitrary) { df => val assembled = assembler.transform(df) whenever( assembled.count() > 0 && hasDistinctValues(assembled, "double", "integer")) { val transformed = cover .fit(assembled) .transform(assembled) val result = reeb .setTopTreeSize(1) .fit(transformed) .transform(transformed) // result.show() } } } }
Example 19
Source File: ResizableRingBufferSpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} import swave.core.impl.util.ResizableRingBuffer class ResizableRingBufferSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "A ResizableRingBuffer should" - { val bufferGen = for { bit ← Gen.choose(0, 8) } yield new ResizableRingBuffer[String](initialCap = 1, maxCap = 1 << bit) "take in exactly `maxAvailable` elements" in { forAll(bufferGen) { buf ⇒ Stream.continually("x").takeWhile(buf.write).toArray.length shouldEqual buf.maxCapacity } } "read back exactly the number of elems previously written" in { val gen = for { buf ← bufferGen count ← Gen.choose(0, buf.maxCapacity) } yield (buf, count) forAll(gen) { case (buf, count) ⇒ val values = List.tabulate(count)(_.toString) values.foreach(s ⇒ buf.write(s) shouldBe true) List.fill(count)(buf.read()) shouldEqual values buf.isEmpty shouldBe true a[NoSuchElementException] should be thrownBy buf.read() } } "pass a simple stress-test" in { val gen = for { buf ← bufferGen opCount ← Gen.choose(5, 50) ops ← Gen.listOfN(opCount, Gen.choose(-20, 50)) } yield (buf, ops) forAll(gen) { case (buf, ops) ⇒ val queue = collection.mutable.Queue[String]() val ints = Iterator.from(0) ops foreach { case readCount if readCount < 0 ⇒ -readCount times { buf.isEmpty shouldEqual queue.isEmpty if (queue.nonEmpty) queue.dequeue() shouldEqual buf.read() else a[NoSuchElementException] should be thrownBy buf.read() } case writeCount if writeCount > 0 ⇒ writeCount times { val next = ints.next().toString if (buf.write(next)) queue.enqueue(next) } case 0 ⇒ // ignore } } } } }
Example 20
Source File: GetFromVectorTransformerIntegSpec.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperables import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import io.deepsense.deeplang._ import io.deepsense.deeplang.doperables.dataframe.DataFrame import io.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.SingleColumnChoice import io.deepsense.deeplang.doperables.spark.wrappers.transformers.TransformerSerialization import io.deepsense.deeplang.doperations.exceptions.ColumnDoesNotExistException import io.deepsense.deeplang.params.selections._ import io.deepsense.sparkutils.Linalg.Vectors class GetFromVectorTransformerIntegSpec extends DeeplangIntegTestSupport with GeneratorDrivenPropertyChecks with Matchers with TransformerSerialization { import DeeplangIntegTestSupport._ import TransformerSerialization._ val columns = Seq( StructField("id", IntegerType), StructField("data", new io.deepsense.sparkutils.Linalg.VectorUDT())) def schema: StructType = StructType(columns) // "id"/0 "a"/1 val row1 = Seq(1, Vectors.dense(1.0, 10.0, 100.0)) val row2 = Seq(2, Vectors.sparse(3, Seq((0, 2.0), (1, 20.0), (2, 200.0)))) val row3 = Seq(3, null) val data = Seq(row1, row2, row3) val dataFrame = createDataFrame(data.map(Row.fromSeq), schema) "GetFromVectorTransformer" should { val expectedSchema = StructType(Seq( StructField("id", IntegerType), StructField("data", DoubleType))) val transformer = new GetFromVectorTransformer() .setIndex(1) .setSingleOrMultiChoice( SingleColumnChoice().setInputColumn(NameSingleColumnSelection("data"))) "infer correct schema" in { val filteredSchema = transformer._transformSchema(schema) filteredSchema shouldBe Some(expectedSchema) } "select correctly data from vector" in { val transformed = transformer._transform(executionContext, dataFrame) val expectedData = data.map { r => val vec = r(1) if (vec != null) { Seq(r.head, vec.asInstanceOf[io.deepsense.sparkutils.Linalg.Vector](1)) } else { Seq(r.head, null) } } val expectedDataFrame = createDataFrame(expectedData.map(Row.fromSeq), expectedSchema) assertDataFramesEqual(transformed, expectedDataFrame) val projectedBySerializedTransformer = projectedUsingSerializedTransformer(transformer) assertDataFramesEqual(transformed, projectedBySerializedTransformer) } "throw an exception" when { "the selected column does not exist" when { val transformer = new GetFromVectorTransformer() .setIndex(1) .setSingleOrMultiChoice(SingleColumnChoice().setInputColumn( NameSingleColumnSelection("thisColumnDoesNotExist"))) "transforming a DataFrame" in { intercept[ColumnDoesNotExistException] { transformer._transform(executionContext, dataFrame) } } "transforming a schema" in { intercept[ColumnDoesNotExistException] { transformer._transformSchema(schema) } } } } } private def projectedUsingSerializedTransformer(transformer: Transformer): DataFrame = { transformer.loadSerializedTransformer(tempDir)._transform(executionContext, dataFrame) } }
Example 21
Source File: DataFrameSplitterIntegSpec.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperations import scala.collection.JavaConverters._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row import org.apache.spark.sql.types.{IntegerType, StructField, StructType} import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import io.deepsense.deeplang._ import io.deepsense.deeplang.doperables.dataframe.DataFrame class DataFrameSplitterIntegSpec extends DeeplangIntegTestSupport with GeneratorDrivenPropertyChecks with Matchers { "SplitDataFrame" should { "split randomly one df into two df in given range" in { val input = Range(1, 100) val parameterPairs = List( (0.0, 0), (0.3, 1), (0.5, 2), (0.8, 3), (1.0, 4)) for((splitRatio, seed) <- parameterPairs) { val rdd = createData(input) val df = executionContext.dataFrameBuilder.buildDataFrame(createSchema, rdd) val (df1, df2) = executeOperation( executionContext, new Split() .setSplitMode( SplitModeChoice.Random() .setSplitRatio(splitRatio) .setSeed(seed / 2)))(df) validateSplitProperties(df, df1, df2) } } "split conditionally one df into two df in given range" in { val input = Range(1, 100) val condition = "value > 20" val predicate: Int => Boolean = _ > 20 val (expectedDF1, expectedDF2) = (input.filter(predicate), input.filter(!predicate(_))) val rdd = createData(input) val df = executionContext.dataFrameBuilder.buildDataFrame(createSchema, rdd) val (df1, df2) = executeOperation( executionContext, new Split() .setSplitMode( SplitModeChoice.Conditional() .setCondition(condition)))(df) df1.sparkDataFrame.collect().map(_.get(0)) should contain theSameElementsAs expectedDF1 df2.sparkDataFrame.collect().map(_.get(0)) should contain theSameElementsAs expectedDF2 validateSplitProperties(df, df1, df2) } } private def createSchema: StructType = { StructType(List( StructField("value", IntegerType, nullable = false) )) } private def createData(data: Seq[Int]): RDD[Row] = { sparkContext.parallelize(data.map(Row(_))) } private def executeOperation(context: ExecutionContext, operation: DOperation) (dataFrame: DataFrame): (DataFrame, DataFrame) = { val operationResult = operation.executeUntyped(Vector[DOperable](dataFrame))(context) val df1 = operationResult.head.asInstanceOf[DataFrame] val df2 = operationResult.last.asInstanceOf[DataFrame] (df1, df2) } def validateSplitProperties(inputDF: DataFrame, outputDF1: DataFrame, outputDF2: DataFrame) : Unit = { val dfCount = inputDF.sparkDataFrame.count() val df1Count = outputDF1.sparkDataFrame.count() val df2Count = outputDF2.sparkDataFrame.count() val rowsDf = inputDF.sparkDataFrame.collectAsList().asScala val rowsDf1 = outputDF1.sparkDataFrame.collectAsList().asScala val rowsDf2 = outputDF2.sparkDataFrame.collectAsList().asScala val intersect = rowsDf1.intersect(rowsDf2) intersect.size shouldBe 0 (df1Count + df2Count) shouldBe dfCount rowsDf.toSet shouldBe rowsDf1.toSet.union(rowsDf2.toSet) } }
Example 22
Source File: ReceiptRendererTest.scala From apple-of-my-iap with MIT License | 5 votes |
package com.meetup.iap.receipt import com.meetup.iap.AppleApi import AppleApi.{ReceiptResponse, ReceiptInfo} import org.scalatest.{Matchers, PropSpec} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.joda.time.{Period, DateTime} class ReceiptRendererTest extends PropSpec with GeneratorDrivenPropertyChecks with Matchers { property("Renderer should produce valid dates") { val purchaseDate = new DateTime().withMillis(0).toDate val expiresDate = new DateTime().withMillis(0).plus(Period.days(7)).toDate val cancellationDate = new DateTime().withMillis(0).plus(Period.days(3)).toDate println(s"Orig purchaseDate: $purchaseDate, $expiresDate, $cancellationDate") val transactionId = "10022345304" val receiptInfo = ReceiptInfo( purchaseDate, transactionId, transactionId, purchaseDate, expiresDate, "123943451", isTrialPeriod = false, isInIntroOfferPeriod = None, Some(cancellationDate), 1) val json = ReceiptRenderer(ReceiptResponse(None, List(receiptInfo))) val response = AppleApi.parseResponse(json) response.latestInfo.isDefined should equal (true) response.latestInfo.map { info => info.purchaseDate should equal (purchaseDate) info.expiresDate should equal (expiresDate) info.cancellationDate.isDefined should equal (true) info.cancellationDate.map(_ should equal (cancellationDate)) } } }
Example 23
Source File: NewtsSuite.scala From newts with Apache License 2.0 | 5 votes |
package newts import cats.instances.AllInstances import newts.syntax.AllSyntax import org.scalacheck.{Arbitrary, Cogen} import org.scalacheck.Arbitrary.arbitrary import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FunSuite, Matchers} import org.typelevel.discipline.scalatest.Discipline trait NewtsSuite extends FunSuite with Matchers with GeneratorDrivenPropertyChecks with Discipline with AllSyntax with AllInstances with cats.syntax.AllSyntax with ArbitraryInstances trait ArbitraryInstances { def arbNewtype[S, A: Arbitrary](implicit newtype: Newtype.Aux[S, A]): Arbitrary[S] = Arbitrary(arbitrary[A].map(newtype.wrap)) def cogenNewtype[S, A: Cogen](implicit newtype: Newtype.Aux[S, A]): Cogen[S] = Cogen[A].contramap(newtype.unwrap) implicit val allArbitrary: Arbitrary[All] = arbNewtype[All, Boolean] implicit val anyArbitrary: Arbitrary[Any] = arbNewtype[Any, Boolean] implicit def multArbitrary[A:Arbitrary]: Arbitrary[Mult[A]] = arbNewtype[Mult[A], A] implicit def dualArbitrary[A: Arbitrary]: Arbitrary[Dual[A]] = arbNewtype[Dual[A], A] implicit def firstArbitrary[A: Arbitrary]: Arbitrary[First[A]] = arbNewtype[First[A], A] implicit def lastArbitrary[A: Arbitrary]: Arbitrary[Last[A]] = arbNewtype[Last[A], A] implicit def firstOptionArbitrary[A: Arbitrary]: Arbitrary[FirstOption[A]] = arbNewtype[FirstOption[A], Option[A]] implicit def lastOptionArbitrary[A: Arbitrary]: Arbitrary[LastOption[A]] = arbNewtype[LastOption[A], Option[A]] implicit def minArbitrary[A: Arbitrary]: Arbitrary[Min[A]] = arbNewtype[Min[A], A] implicit def maxArbitrary[A: Arbitrary]: Arbitrary[Max[A]] = arbNewtype[Max[A], A] implicit def zipListArbitrary[A: Arbitrary]: Arbitrary[ZipList[A]] = arbNewtype[ZipList[A], List[A]] implicit def backwardsArbitrary[F[_], A](implicit ev: Arbitrary[F[A]]): Arbitrary[Backwards[F, A]] = arbNewtype[Backwards[F, A], F[A]] implicit def reverseArbitrary[F[_], A](implicit ev: Arbitrary[F[A]]): Arbitrary[Reverse[F, A]] = arbNewtype[Reverse[F, A], F[A]] implicit val allCogen: Cogen[All] = cogenNewtype[All, Boolean] implicit val anyCogen: Cogen[Any] = cogenNewtype[Any, Boolean] implicit def multCogen[A: Cogen]: Cogen[Mult[A]] = cogenNewtype[Mult[A], A] implicit def dualCogen[A: Cogen]: Cogen[Dual[A]] = cogenNewtype[Dual[A], A] implicit def firstCogen[A: Cogen]: Cogen[First[A]] = cogenNewtype[First[A], A] implicit def lastCogen[A: Cogen]: Cogen[Last[A]] = cogenNewtype[Last[A], A] implicit def firstOptionCogen[A: Cogen]: Cogen[FirstOption[A]] = cogenNewtype[FirstOption[A], Option[A]] implicit def lastOptionCogen[A: Cogen] : Cogen[LastOption[A]] = cogenNewtype[LastOption[A], Option[A]] implicit def minOptionCogen[A: Cogen] : Cogen[Min[A]] = cogenNewtype[Min[A], A] implicit def maxOptionCogen[A: Cogen] : Cogen[Max[A]] = cogenNewtype[Max[A], A] implicit def zipListCogen[A: Cogen]: Cogen[ZipList[A]] = cogenNewtype[ZipList[A], List[A]] implicit def backwardsCogen[F[_], A](implicit ev: Cogen[F[A]]): Cogen[Backwards[F, A]] = cogenNewtype[Backwards[F, A], F[A]] implicit def reverseCogen[F[_], A](implicit ev: Cogen[F[A]]): Cogen[Reverse[F, A]] = cogenNewtype[Reverse[F, A], F[A]] }
Example 24
Source File: TraitReferenceSpec.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.model.artifact import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{ Matchers, OptionValues, WordSpec } class TraitReferenceSpec extends WordSpec with Matchers with OptionValues with GeneratorDrivenPropertyChecks { "TraitReference" should { "convert string to TraitReference via: referenceFor" in { forAll("cluster", "group", "name") { (cluster: String, group: String, name: String) ⇒ whenever(!cluster.contains('.') && !group.contains('.') && !name.contains('.')) { val traitReferenceString = s"$cluster.$group.$name" TraitReference.referenceFor(traitReferenceString).value should be(TraitReference(cluster, group, name)) } } } "referenceFor & toReference: referenceFor(x.toReference) should yield same result" in { forAll("cluster", "group", "name") { (cluster: String, group: String, name: String) ⇒ whenever(!cluster.contains('.') && !group.contains('.') && !name.contains('.')) { TraitReference.referenceFor(TraitReference(cluster, group, name).reference).value should be(TraitReference(cluster, group, name)) } } } } }
Example 25
Source File: XorShiftRandomSpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} class XorShiftRandomSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "XorShiftRandom" - { "nextLong" in { val random = XorShiftRandom() forAll(Gen.posNum[Long]) { bound ⇒ random.nextLong(bound) should (be >= 0L and be < bound) } } "nextInt" in { val random = XorShiftRandom() forAll(Gen.posNum[Int]) { bound ⇒ random.nextInt(bound) should (be >= 0 and be < bound) } } "nextDouble" in { val random = XorShiftRandom() forAll { (_: Unit) ⇒ random.nextDouble() should (be >= 0.0 and be < 1.0) } } "shuffle" in { val random = XorShiftRandom() val array = Array("0", "1", "2", "3", "4", "5", "6", "7", "8", "9") val array2 = java.util.Arrays.copyOf(array, array.length) random.shuffle_!(array2) array2 should not equal array // will fail once every approx. 10! = 3.628.800 test runs array2.sorted shouldEqual array } } }
Example 26
Source File: RichRefArraySpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} class RichRefArraySpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "RichRefArray" - { val stringArrays = Gen.containerOf[Array, String](Gen.alphaStr) "fastIndexOf" in { val arrayWithIndex = for { arr ← stringArrays ix ← Gen.chooseNum(0, arr.length + 1) } yield arr.map(Symbol(_)) → ix forAll(arrayWithIndex) { case (array, ix) ⇒ val specimen = if (ix < array.length) array(ix) else 'foo array.fastIndexOf(specimen) shouldEqual array.indexOf(specimen) } } "reverse_!" in { forAll(stringArrays) { array ⇒ val array2 = array.drop(0) array2.reverse_!() array2 shouldEqual array.reverse } } } }
Example 27
Source File: RichListSpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalacheck.Gen import org.scalatest.{FreeSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks class RichListSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "RichList" - { "fastReverse" in { forAll { (list: List[Int]) ⇒ list.fastReverse shouldEqual list.reverse } } "remove" in { forAll(Gen.choose(0, 5), Gen.choose(0, 4)) { (n: Int, x: Int) ⇒ val list = List.tabulate(n)(identity) list.remove(x) shouldEqual list.filterNot(_ == x) } } } }
Example 28
Source File: ExpressionEvalHelper.scala From HANAVora-Extensions with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.expressions // // Partially backported from Spark 1.5.2. // import org.apache.spark.sql.extension.OptimizerFactoryForTests import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project} import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow} import org.scalactic.TripleEqualsSupport.Spread import org.scalatest.FunSuite import org.scalatest.prop.GeneratorDrivenPropertyChecks // scalastyle:off case _ => } expression.eval(inputRow) } protected def generateProject( generator: => Projection, expression: Expression): Projection = { try { generator } catch { case e: Throwable => fail( s""" |Code generation of $expression failed: |$e |${e.getStackTraceString} """.stripMargin) } } protected def checkEvaluationWithoutCodegen( expression: Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = { val actual = try evaluate(expression, inputRow) catch { case e: Exception => fail(s"Exception evaluating $expression", e) } if (!checkResult(actual, expected)) { val input = if (inputRow == EmptyRow) "" else s", input: $inputRow" fail(s"Incorrect evaluation (codegen off): $expression, " + s"actual: $actual, " + s"expected: $expected$input") } } protected def checkEvaluationWithOptimization( expression: Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = { val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation) val optimizedPlan = OptimizerFactoryForTests.default().execute(plan) checkEvaluationWithoutCodegen(optimizedPlan.expressions.head, expected, inputRow) } }
Example 29
Source File: RichLongSpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} class RichLongSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "RichLong" - { val longMin = BigDecimal(Long.MinValue) val longMax = BigDecimal(Long.MaxValue) def bounded(d: BigDecimal) = if (d < longMin) Long.MinValue else if (d > longMax) Long.MaxValue else d.longValue() "⊹" in { forAll { (x: Long, y: Long) ⇒ x ⊹ y shouldEqual bounded(BigDecimal(x) + BigDecimal(y)) } } "×" in { forAll { (x: Long, y: Long) ⇒ (x × y) shouldEqual bounded(BigDecimal(x) * BigDecimal(y)) } } } }
Example 30
Source File: RingBufferSpec.scala From swave with Mozilla Public License 2.0 | 5 votes |
package swave.core.util import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} import swave.core.impl.util.RingBuffer class RingBufferSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "A RingBuffer should" - { val bufferGen = for { bit ← Gen.choose(0, 8) } yield new RingBuffer[String](cap = 1 << bit) "take in exactly `capacity` elements" in { forAll(bufferGen) { buf ⇒ val a = Stream.continually("x").takeWhile(buf.write).toArray a.length shouldEqual buf.capacity } } "read back exactly the number of elems previously written" in { val gen = for { buf ← bufferGen count ← Gen.choose(0, buf.capacity) } yield (buf, count) forAll(gen) { case (buf, count) ⇒ val values = List.tabulate(count)(_.toString) values.foreach(s ⇒ buf.write(s) shouldBe true) List.fill(count)(buf.read()) shouldEqual values buf.isEmpty shouldBe true a[NoSuchElementException] should be thrownBy buf.read() } } "pass a simple stress-test" in { val gen = for { buf ← bufferGen opCount ← Gen.choose(5, 20) ops ← Gen.listOfN(opCount, Gen.choose(-10, 20)) } yield (buf, ops) forAll(gen) { case (buf, ops) ⇒ val queue = collection.mutable.Queue[String]() val ints = Iterator.from(0) ops foreach { case readCount if readCount < 0 ⇒ -readCount times { buf.isEmpty shouldEqual queue.isEmpty if (queue.nonEmpty) queue.dequeue() shouldEqual buf.read() else a[NoSuchElementException] should be thrownBy buf.read() } case writeCount if writeCount > 0 ⇒ writeCount times { val next = ints.next().toString if (buf.write(next)) queue.enqueue(next) } case 0 ⇒ // ignore } } } } }
Example 31
Source File: GenericDateTimeGeneratorsSpec.scala From scalacheck-ops with Apache License 2.0 | 5 votes |
package org.scalacheck.ops.time import org.scalacheck.Arbitrary import org.scalacheck.ops._ import org.scalatest.FlatSpec import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.Matchers._ import scala.reflect.ClassTag private[time] abstract class GenericDateTimeGeneratorsSpec[Generators <: AbstractTimeGenerators]( protected val gen: Generators )(implicit classTag: ClassTag[Generators]) extends FlatSpec with GeneratorDrivenPropertyChecks { protected val genClassName: String = classTag.runtimeClass.getSimpleName.stripSuffix("$") protected def arbInstantType: Arbitrary[gen.InstantType] protected def clsTagInstantType: ClassTag[gen.InstantType] protected def orderingInstantType: Ordering[gen.InstantType] // set the local implicits private implicit def localArbDateTimeType: Arbitrary[gen.InstantType] = arbInstantType private implicit def localClsTagDateTimeType: ClassTag[gen.InstantType] = clsTagInstantType private implicit def localParams: gen.ParamsType = gen.defaultParams private implicit def localOrderingInstantType: Ordering[gen.InstantType] = orderingInstantType behavior of s"$genClassName.before" it should "not generate errors" in { forAll() { start: gen.InstantType => val sampleIter = gen.before(start).sampleIterator val samples = sampleIter.take(10).toSeq assert(samples.forall(_.isDefined)) } } it should s"always generate $genClassName instances less than the given instant" in { forAll() { start: gen.InstantType => forAll(gen.before(start)) { before: gen.InstantType => before should be <= start } } } behavior of s"$genClassName.after" it should "not generate errors" in { forAll() { start: gen.InstantType => val sampleIter = gen.after(start).sampleIterator val samples = sampleIter.take(10).toSeq assert(samples.forall(_.isDefined)) } } it should s"always generate $genClassName instances greater than the given instant" in { forAll() { start: gen.InstantType => forAll(gen.after(start)) { after: gen.InstantType => after should be >= start } } } behavior of s"$genClassName.around" it should "not generate errors" in { forAll() { start: gen.InstantType => val sampleIter = gen.around(start).sampleIterator val samples = sampleIter.take(10).toSeq assert(samples.forall(_.isDefined)) } } it should s"always generate $genClassName that are within the given range of the given time" in { forAll() { start: gen.InstantType => forAll(gen.around(start, gen.defaultRange)) { around => around should ( be >= gen.subtractToFloor(start, gen.defaultRange) and be <= gen.addToCeil(start, gen.defaultRange) ) } } } }
Example 32
Source File: SerializationRoundTripSpec.scala From sigmastate-interpreter with MIT License | 5 votes |
package sigmastate.utxo import org.ergoplatform.{ErgoLikeTransaction, ErgoBoxCandidate, _} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{PropSpec, Matchers} import scalan.util.BenchmarkUtil import sigmastate.helpers.SigmaTestingCommons import sigmastate.interpreter.{ProverResult, ContextExtension} import sigmastate.serialization.generators.ObjectGenerators import debox.{Buffer => DBuffer} import spire.algebra._ import spire.std.int._ class SerializationRoundTripSpec extends PropSpec with GeneratorDrivenPropertyChecks with Matchers with ObjectGenerators with SigmaTestingCommons { case class Run(size: Int, time: Long) implicit val orderRun = Order.by((r: Run) => r.size) property("ErgoBoxCandidate: Serializer round trip benchmark") { val runs = DBuffer.empty[Run] forAll(MinSuccessful(20)) { t: ErgoBoxCandidate => val (_, time) = BenchmarkUtil.measureTime { var i = 0 while (i < 100) { roundTripTest(t)(ErgoBoxCandidate.serializer) i += 1 } } runs += Run(t.bytesWithNoRef.length, time) } runs.sort for (r <- runs) { println(s"Size: ${r.size}, Time: ${r.time}") } } property("ErgoBoxCandidate: Serializer round trip") { forAll { t: ErgoBoxCandidate => roundTripTest(t)(ErgoBoxCandidate.serializer) } forAll { t: ErgoBoxCandidate => roundTripTestWithPos(t)(ErgoBoxCandidate.serializer) } } property("ErgoBox: Serializer round trip") { forAll { t: ErgoBox => roundTripTest(t)(ErgoBox.sigmaSerializer) } forAll { t: ErgoBox => roundTripTestWithPos(t)(ErgoBox.sigmaSerializer) } } property("ContextExtension: Serializer round trip") { forAll { t: ContextExtension => roundTripTest(t)(ContextExtension.serializer) } forAll { t: ContextExtension => roundTripTestWithPos(t)(ContextExtension.serializer) } } property("SerializedProverResult: Serializer round trip") { forAll { t: ProverResult => roundTripTest(t)(ProverResult.serializer) } forAll { t: ProverResult => roundTripTestWithPos(t)(ProverResult.serializer) } } property("Input: Serializer round trip") { forAll { t: Input => roundTripTest(t)(Input.serializer) } forAll { t: Input => roundTripTestWithPos(t)(Input.serializer) } } }
Example 33
Source File: SerializationSpecification.scala From sigmastate-interpreter with MIT License | 5 votes |
package sigmastate.serialization import org.ergoplatform.validation.ValidationSpecification import org.scalacheck.Gen import org.scalatest.prop.{PropertyChecks, TableDrivenPropertyChecks, GeneratorDrivenPropertyChecks} import org.scalatest.{PropSpec, Assertion, Matchers} import org.scalacheck.Arbitrary._ import sigmastate.Values._ import sigmastate.SType import sigmastate.serialization.generators._ trait SerializationSpecification extends PropSpec with PropertyChecks with GeneratorDrivenPropertyChecks with TableDrivenPropertyChecks with Matchers with ObjectGenerators with ConcreteCollectionGenerators with OpcodesGen with TransformerGenerators with RelationGenerators with ValidationSpecification { protected def roundTripTest[V <: Value[_ <: SType]](v: V): Assertion = { val bytes = ValueSerializer.serialize(v) predefinedBytesTest(v, bytes) predefinedBytesTestNotFomZeroElement(bytes, v) } protected def predefinedBytesTest[V <: Value[_ <: SType]](v: V, bytes: Array[Byte]): Assertion = { ValueSerializer.serialize(v) shouldEqual bytes val r = SigmaSerializer.startReader(bytes) val positionLimitBefore = r.positionLimit val dv = ValueSerializer.deserialize(r) dv shouldEqual v r.positionLimit shouldBe positionLimitBefore } //check that pos and consumed are being implented correctly protected def predefinedBytesTestNotFomZeroElement[V <: Value[_ <: SType]](bytes: Array[Byte], v: V): Assertion = { val randomInt = Gen.chooseNum(1, 20).sample.get val randomBytes = Gen.listOfN(randomInt, arbByte.arbitrary).sample.get.toArray val parsedVal = ValueSerializer.deserialize(randomBytes ++ bytes, randomInt) parsedVal shouldEqual v } }
Example 34
Source File: MessageExtractorSuite.scala From sqs-kafka-connect with Apache License 2.0 | 5 votes |
package com.hivehome.kafka.connect.sqs import com.amazon.sqs.javamessaging.message.{SQSObjectMessage, SQSBytesMessage, SQSTextMessage} import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FunSuite, Matchers} class MessageExtractorSuite extends FunSuite with GeneratorDrivenPropertyChecks with Matchers { test("should extract text message") { forAll(Gen.alphaStr) { text => val msg = new SQSTextMessage(text) val actual = MessageExtractor(msg) actual shouldEqual text } } test("should extract bytes message") { forAll(Gen.alphaStr) { text => val msg = new SQSBytesMessage() msg.writeBytes(text.getBytes) val actual = MessageExtractor(msg) actual shouldEqual text } } test("should extract object message") { forAll(Gen.alphaStr) { text => val msg = new SQSObjectMessage() msg.setObject(text) val actual = MessageExtractor(msg) actual shouldEqual text } } }
Example 35
Source File: GetFromVectorTransformerIntegSpec.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.deeplang.doperables import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import ai.deepsense.deeplang._ import ai.deepsense.deeplang.doperables.dataframe.DataFrame import ai.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.SingleColumnChoice import ai.deepsense.deeplang.doperables.spark.wrappers.transformers.TransformerSerialization import ai.deepsense.deeplang.doperations.exceptions.ColumnDoesNotExistException import ai.deepsense.deeplang.params.selections._ import ai.deepsense.sparkutils.Linalg.Vectors class GetFromVectorTransformerIntegSpec extends DeeplangIntegTestSupport with GeneratorDrivenPropertyChecks with Matchers with TransformerSerialization { import DeeplangIntegTestSupport._ import TransformerSerialization._ val columns = Seq( StructField("id", IntegerType), StructField("data", new ai.deepsense.sparkutils.Linalg.VectorUDT())) def schema: StructType = StructType(columns) // "id"/0 "a"/1 val row1 = Seq(1, Vectors.dense(1.0, 10.0, 100.0)) val row2 = Seq(2, Vectors.sparse(3, Seq((0, 2.0), (1, 20.0), (2, 200.0)))) val row3 = Seq(3, null) val data = Seq(row1, row2, row3) val dataFrame = createDataFrame(data.map(Row.fromSeq), schema) "GetFromVectorTransformer" should { val expectedSchema = StructType(Seq( StructField("id", IntegerType), StructField("data", DoubleType))) val transformer = new GetFromVectorTransformer() .setIndex(1) .setSingleOrMultiChoice( SingleColumnChoice().setInputColumn(NameSingleColumnSelection("data"))) "infer correct schema" in { val filteredSchema = transformer._transformSchema(schema) filteredSchema shouldBe Some(expectedSchema) } "select correctly data from vector" in { val transformed = transformer._transform(executionContext, dataFrame) val expectedData = data.map { r => val vec = r(1) if (vec != null) { Seq(r.head, vec.asInstanceOf[ai.deepsense.sparkutils.Linalg.Vector](1)) } else { Seq(r.head, null) } } val expectedDataFrame = createDataFrame(expectedData.map(Row.fromSeq), expectedSchema) assertDataFramesEqual(transformed, expectedDataFrame) val projectedBySerializedTransformer = projectedUsingSerializedTransformer(transformer) assertDataFramesEqual(transformed, projectedBySerializedTransformer) } "throw an exception" when { "the selected column does not exist" when { val transformer = new GetFromVectorTransformer() .setIndex(1) .setSingleOrMultiChoice(SingleColumnChoice().setInputColumn( NameSingleColumnSelection("thisColumnDoesNotExist"))) "transforming a DataFrame" in { intercept[ColumnDoesNotExistException] { transformer._transform(executionContext, dataFrame) } } "transforming a schema" in { intercept[ColumnDoesNotExistException] { transformer._transformSchema(schema) } } } } } private def projectedUsingSerializedTransformer(transformer: Transformer): DataFrame = { transformer.loadSerializedTransformer(tempDir)._transform(executionContext, dataFrame) } }
Example 36
Source File: DataFrameSplitterIntegSpec.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.deeplang.doperations import scala.collection.JavaConverters._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row import org.apache.spark.sql.types.{IntegerType, StructField, StructType} import org.scalatest.Matchers import org.scalatest.prop.GeneratorDrivenPropertyChecks import ai.deepsense.deeplang._ import ai.deepsense.deeplang.doperables.dataframe.DataFrame class DataFrameSplitterIntegSpec extends DeeplangIntegTestSupport with GeneratorDrivenPropertyChecks with Matchers { "SplitDataFrame" should { "split randomly one df into two df in given range" in { val input = Range(1, 100) val parameterPairs = List( (0.0, 0), (0.3, 1), (0.5, 2), (0.8, 3), (1.0, 4)) for((splitRatio, seed) <- parameterPairs) { val rdd = createData(input) val df = executionContext.dataFrameBuilder.buildDataFrame(createSchema, rdd) val (df1, df2) = executeOperation( executionContext, new Split() .setSplitMode( SplitModeChoice.Random() .setSplitRatio(splitRatio) .setSeed(seed / 2)))(df) validateSplitProperties(df, df1, df2) } } "split conditionally one df into two df in given range" in { val input = Range(1, 100) val condition = "value > 20" val predicate: Int => Boolean = _ > 20 val (expectedDF1, expectedDF2) = (input.filter(predicate), input.filter(!predicate(_))) val rdd = createData(input) val df = executionContext.dataFrameBuilder.buildDataFrame(createSchema, rdd) val (df1, df2) = executeOperation( executionContext, new Split() .setSplitMode( SplitModeChoice.Conditional() .setCondition(condition)))(df) df1.sparkDataFrame.collect().map(_.get(0)) should contain theSameElementsAs expectedDF1 df2.sparkDataFrame.collect().map(_.get(0)) should contain theSameElementsAs expectedDF2 validateSplitProperties(df, df1, df2) } } private def createSchema: StructType = { StructType(List( StructField("value", IntegerType, nullable = false) )) } private def createData(data: Seq[Int]): RDD[Row] = { sparkContext.parallelize(data.map(Row(_))) } private def executeOperation(context: ExecutionContext, operation: DOperation) (dataFrame: DataFrame): (DataFrame, DataFrame) = { val operationResult = operation.executeUntyped(Vector[DOperable](dataFrame))(context) val df1 = operationResult.head.asInstanceOf[DataFrame] val df2 = operationResult.last.asInstanceOf[DataFrame] (df1, df2) } def validateSplitProperties(inputDF: DataFrame, outputDF1: DataFrame, outputDF2: DataFrame) : Unit = { val dfCount = inputDF.sparkDataFrame.count() val df1Count = outputDF1.sparkDataFrame.count() val df2Count = outputDF2.sparkDataFrame.count() val rowsDf = inputDF.sparkDataFrame.collectAsList().asScala val rowsDf1 = outputDF1.sparkDataFrame.collectAsList().asScala val rowsDf2 = outputDF2.sparkDataFrame.collectAsList().asScala val intersect = rowsDf1.intersect(rowsDf2) intersect.size shouldBe 0 (df1Count + df2Count) shouldBe dfCount rowsDf.toSet shouldBe rowsDf1.toSet.union(rowsDf2.toSet) } }
Example 37
Source File: LawlessTraversalsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.data import ImmArray.ImmArraySeq import org.scalatest.{Inside, Matchers, WordSpec} import org.scalatest.prop.GeneratorDrivenPropertyChecks class LawlessTraversalsSpec extends WordSpec with Matchers with Inside with GeneratorDrivenPropertyChecks { import LawlessTraversals._ "traverseEitherStrictly" should { "satisfy identity, elementwise" in forAll { xs: Seq[Int] => xs traverseEitherStrictly (Right(_)) should ===(Right(xs)) } "preserve class if the implementation bothered to set it up" in { val classySeqs = Seq[Seq[Int]](List(1), Vector(2), ImmArraySeq(3)) // we need to use patmat, not == or shouldBe, because patmat is stricter inside(classySeqs map (_ traverseEitherStrictly (Right(_)))) { case Seq(_, Right(List(_)), _) => fail("lists are not vectors") case Seq(Right(List(1)), Right(Vector(2)), Right(ImmArraySeq(3))) => } } } }
Example 38
Source File: TransactionFilterSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.javaapi.data import com.daml.ledger.javaapi.data.Generators._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} class TransactionFilterSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSize = 1, sizeRange = 3) "TransactionFilter.fromProto" should "convert Protoc-generated instances to data instances" in forAll( transactionFilterGen) { transactionFilter => val converted = TransactionFilter.fromProto(transactionFilter) TransactionFilter.fromProto(converted.toProto) shouldEqual converted } }
Example 39
Source File: CommandSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.javaapi.data import com.daml.ledger.javaapi.data.Generators._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} class CommandSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSize = 1, sizeRange = 3) "Command.fromProto" should "convert Protoc-generated instances to data instances" in forAll( commandGen) { cmd => val converted = Command.fromProtoCommand(cmd) Command.fromProtoCommand(converted.toProtoCommand) shouldEqual converted } }
Example 40
Source File: GetActiveContractsRequestSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.javaapi.data import com.daml.ledger.javaapi.data.Generators._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} class GetActiveContractsRequestSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { "GetActiveContractsRequestSpec.fromProto" should "convert Protoc-generated instances to data instances" in forAll( getActiveContractRequestGen) { activeContractRequest => val converted = GetActiveContractsRequest.fromProto(activeContractRequest) GetActiveContractsRequest.fromProto(converted.toProto) shouldEqual converted } }
Example 41
Source File: TimestampSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.javaapi.data import java.time.Instant import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} class TimestampSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { behavior of "Timestamp" it should "be built from a java.time.Instant" in forAll( Gen.oneOf(0L, 1L, 10L, 100L, 1000L, Instant.now().toEpochMilli)) { millis => val instant = java.time.Instant.ofEpochMilli(millis) withClue( s"input: ${millis}ms instant.getEpochSeconds: ${instant.getEpochSecond} instant.getNanos: ${instant.getNano} issue: ") { Timestamp .fromInstant(instant) .getMicroseconds shouldBe (millis * 1000) // getValue gives back microseconds } } it should "lose nanoseconds when doing TimeStamp.fromInstant(_).toInstant()" in { val instant = java.time.Instant.ofEpochSecond(1, 42) val timestamp = Timestamp.fromInstant(instant) timestamp.toInstant shouldBe Instant.ofEpochSecond(1, 0) } }
Example 42
Source File: EventSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.javaapi.data import com.daml.ledger.javaapi.data.Generators._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} class EventSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSize = 1, sizeRange = 3) "Event.fromProto" should "convert Protoc-generated instances to data instances" in forAll( eventGen) { event => val converted = Event.fromProtoEvent(event) Event.fromProtoEvent(converted.toProtoEvent) shouldEqual converted } "CreatedEvents" should "be protected from mutations of the parameters" in forAll(createdEventGen) { e => val mutatingWitnesses = new java.util.ArrayList[String](e.getWitnessPartiesList) val mutatingSignatories = new java.util.ArrayList[String](e.getSignatoriesList) val mutatingObservers = new java.util.ArrayList[String](e.getObserversList) val event = new CreatedEvent( mutatingWitnesses, e.getEventId, Identifier.fromProto(e.getTemplateId), e.getContractId, Record.fromProto(e.getCreateArguments), java.util.Optional.empty(), java.util.Optional.empty(), mutatingSignatories, mutatingObservers ) mutatingWitnesses.add("INTRUDER!") mutatingSignatories.add("INTRUDER!") mutatingObservers.add("INTRUDER!") event.getWitnessParties should not contain "INTRUDER!" event.getSignatories should not contain "INTRUDER!" event.getObservers should not contain "INTRUDER!" } "CreatedEvents" should "disallow mutation of its mutable fields" in forAll(createdEventGen) { e => val event = new CreatedEvent( e.getWitnessPartiesList, e.getEventId, Identifier.fromProto(e.getTemplateId), e.getContractId, Record.fromProto(e.getCreateArguments), java.util.Optional.empty(), java.util.Optional.empty(), e.getSignatoriesList, e.getObserversList ) an[UnsupportedOperationException] shouldBe thrownBy(event.getWitnessParties.add("INTRUDER!")) an[UnsupportedOperationException] shouldBe thrownBy(event.getSignatories.add("INTRUDER!")) an[UnsupportedOperationException] shouldBe thrownBy(event.getObservers.add("INTRUDER!")) an[UnsupportedOperationException] shouldBe thrownBy(event.getWitnessParties.remove(0)) an[UnsupportedOperationException] shouldBe thrownBy(event.getSignatories.remove(0)) an[UnsupportedOperationException] shouldBe thrownBy(event.getObservers.remove(0)) } }
Example 43
Source File: SomeArrayEqualsTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.speedy import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{WordSpec, Matchers} @SuppressWarnings(Array("org.wartremover.warts.Any")) class SomeArrayEqualsTest extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import com.daml.lf.speedy.{SomeArrayEquals => SAE} import SomeArrayEqualsTest._ "equals" should { "distinguish same-arity but different classes" in { case class A() extends SAE case class B() extends SAE case class C(i: Int) extends SAE case class D(i: Int) extends SAE val a = A() a shouldBe a a shouldBe A() a should not be B() C(42) shouldBe C(42) C(42) should not be D(42) } "distinguish different arity" in { case class A() extends SAE case class B(i: Int) extends SAE A() should not be B(0) } "case different element data types properly" in forAll { tb: TestBlob => import java.util.Arrays.copyOf tb shouldBe tb tb.copy() shouldBe tb if (tb.ai ne null) tb.copy(ai = copyOf(tb.ai, tb.ai.length)) shouldBe tb if (tb.as ne null) tb.copy(as = copyOf(tb.as, tb.as.length)) shouldBe tb if (tb.s ne null) tb.copy(s = new String(tb.s)) shouldBe tb } "detect varying Ints" in forAll { (tb: TestBlob, i: Int) => whenever(i != tb.i) { tb.copy(i = i) should not be tb } } "detect varying Strings" in forAll { (tb: TestBlob, s: Option[String]) => whenever(s != Option(tb.s)) { tb.copy(s = s.orNull) should not be tb } } "detect varying Int arrays" in forAll { (tb: TestBlob, oai: Option[Array[Int]]) => whenever(oai.fold(tb.ai ne null)(ai => (tb.ai eq null) || !(ai sameElements tb.ai))) { tb.copy(ai = oai.orNull) should not be tb } } "detect varying String arrays" in forAll { (tb: TestBlob, oas: Option[Array[String]]) => whenever(oas.fold(tb.as ne null)(as => (tb.as eq null) || !(as sameElements tb.as))) { tb.copy(as = oas.orNull) should not be tb } } } } object SomeArrayEqualsTest { import org.scalacheck.{Gen, Arbitrary} import Arbitrary.{arbitrary => arb} final case class TestBlob(i: Int, ai: Array[Int], as: Array[String], s: String) extends SomeArrayEquals val testBlobGen: Gen[TestBlob] = arb[(Int, Option[Array[Int]], Option[Array[String]], Option[String])] .map { case (i, ai, as, s) => TestBlob(i, ai.orNull, as.orNull, s.orNull) } implicit val testBlobArb: Arbitrary[TestBlob] = Arbitrary(testBlobGen) }
Example 44
Source File: LfVersionsSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.{Arbitrary, Gen} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} import scalaz.NonEmptyList import scalaz.scalacheck.ScalazArbitrary._ class LfVersionsSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { case class DummyVersion(value: Int) { def protoValue: String = value.toString } class DummyVersions(versions: NonEmptyList[DummyVersion]) extends LfVersions[DummyVersion](versions)(_.protoValue) case class DummyError(msg: String) private def dummyVersionGen: Gen[DummyVersion] = arbitrary[Int].map(DummyVersion) implicit private val dummyVersionArb: Arbitrary[DummyVersion] = Arbitrary(dummyVersionGen) "LfVersions.acceptedVersions" should { "be otherVersions + defaultVersion" in forAll { vs: NonEmptyList[DummyVersion] => val versions = new DummyVersions(vs) versions.acceptedVersions should ===(vs.list.toList) vs.list.toList.forall(v => versions.acceptedVersions.contains(v)) shouldBe true } } "LfVersions.decodeVersion" should { "return failure if passed version value is null, don't throw exception" in { val versions = new DummyVersions(NonEmptyList(DummyVersion(1))) versions.isAcceptedVersion(null) shouldBe None } "return failure if passed version value is an empty string, don't throw exception" in { val versions = new DummyVersions(NonEmptyList(DummyVersion(1))) versions.isAcceptedVersion("") shouldBe None } "return failure if passed version is not default and not supported" in forAll { (vs: NonEmptyList[DummyVersion], version: DummyVersion) => whenever(!vs.list.toList.contains(version)) { val versions = new DummyVersions(vs) versions.acceptedVersions.contains(version) shouldBe false versions.isAcceptedVersion(version.protoValue) shouldBe None } } "return success if passed version is default" in forAll { default: DummyVersion => val versions = new DummyVersions(NonEmptyList(default)) versions.isAcceptedVersion(default.protoValue) shouldBe Some(default) } "return success if passed version is one of other versions" in forAll { (vs: NonEmptyList[DummyVersion], version: DummyVersion) => val versions = new DummyVersions(version <:: vs) versions.acceptedVersions.contains(version) shouldBe true versions.isAcceptedVersion(version.protoValue) shouldBe Some(version) } } }
Example 45
Source File: TryOpsTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.data import org.scalatest.{Matchers, WordSpec} import org.scalatest.prop.GeneratorDrivenPropertyChecks import com.daml.lf.data.TryOps.Bracket.bracket import scala.util.{Failure, Success, Try} class TryOpsTest extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { "bracket should call clean after successful calculation" in forAll { (a: Int, b: Int) => var calls = List.empty[String] def clean(x: Int): Try[Unit] = { calls = s"clean $x" :: calls Success(()) } def add(x: Int)(y: Int): Try[Int] = { calls = s"add $x $y" :: calls Success(x + y) } val actual = bracket(Try(a))(clean).flatMap(add(b)) actual shouldBe Success(a + b) calls.reverse shouldBe List(s"add $b $a", s"clean $a") } "bracket should fail if clean failed" in forAll { (a: Int, b: Int, e: Throwable) => var calls = List.empty[String] def clean(x: Int): Try[Unit] = { calls = s"clean $x $e" :: calls Failure(e) } def add(x: Int)(y: Int): Try[Int] = { calls = s"add $x $y" :: calls Success(x + y) } val actual = bracket(Try(a))(clean).flatMap(add(b)) actual shouldBe Failure(e) calls.reverse shouldBe List(s"add $b $a", s"clean $a $e") } "bracket should call clean if calculation fails" in forAll { (a: Int, b: Int, e: Throwable) => var calls = List.empty[String] def clean(x: Int): Try[Unit] = { calls = s"clean $x" :: calls Success(()) } def add(x: Int)(y: Int): Try[Int] = { calls = s"add $x $y" :: calls Failure(e) } val actual = bracket(Try(a))(clean).flatMap(add(b)) actual shouldBe Failure(e) calls.reverse shouldBe List(s"add $b $a", s"clean $a") } "bracket should return calculation error if if both calculation and clean fail" in forAll { (a: Int, b: Int, e1: Throwable, e2: Throwable) => var calls = List.empty[String] def clean(x: Int): Try[Unit] = { calls = s"clean $x $e2" :: calls Failure(e2) } def add(x: Int)(y: Int): Try[Int] = { calls = s"add $x $y" :: calls Failure(e1) } val actual = bracket(Try(a))(clean).flatMap(add(b)) actual shouldBe Failure(e1) calls.reverse shouldBe List(s"add $b $a", s"clean $a $e2") } }
Example 46
Source File: NameClashRecordVariantUT.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import com.daml.sample.MyMain.NameClashRecordVariant import NameClashRecordVariant.{NameClashRecordVariantA, NameClashRecordVariantB} import com.daml.ledger.client.binding.{Primitive => P, Value} import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} class NameClashRecordVariantUT extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { "generated variants have compatible read and write methods" in forAll(nameClashRecordVariantGen) { a1 => val b = Value.encode(a1) val a2 = Value.decode[NameClashRecordVariant](b) Some(a1) shouldBe a2 } def nameClashRecordVariantGen: Gen[NameClashRecordVariant] = Gen.oneOf(nameClashRecordVariantAGen, nameClashRecordVariantBGen) def nameClashRecordVariantAGen: Gen[NameClashRecordVariantA] = for { x <- arbitrary[P.Int64] y <- arbitrary[P.Int64] z <- arbitrary[P.Int64] } yield NameClashRecordVariantA(x, y, z) def nameClashRecordVariantBGen: Gen[NameClashRecordVariantB] = for { x <- arbitrary[P.Int64] y <- arbitrary[P.Int64] z <- arbitrary[P.Int64] } yield NameClashRecordVariantB(x, y, z) }
Example 47
Source File: FrontStackSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.lf.data import com.daml.scalatest.{Unnatural, WordSpecCheckLaws} import org.scalatest.prop.{GeneratorDrivenPropertyChecks, TableDrivenPropertyChecks} import org.scalatest.{Matchers, WordSpec} import scalaz.scalacheck.ScalazProperties import scalaz.std.anyVal._ class FrontStackSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks with TableDrivenPropertyChecks with WordSpecCheckLaws { import DataArbitrary._ "apply" when { "1 element is provided" should { "behave the same as prepend" in forAll { x: Int => FrontStack(x) should ===(x +: FrontStack.empty) } } "2 elements are provided" should { "behave the same as prepend" in forAll { (x: Int, y: Int) => FrontStack(x, y) should ===(x +: y +: FrontStack.empty) } } "more than 2 elements are provided" should { "behave the same as prepend" in forAll { (x: Int, y: Int, z: Int, rest: Seq[Int]) => FrontStack(x, y, z, rest: _*) should ===( ImmArray(Seq(x, y, z) ++ rest) ++: FrontStack.empty) } } "a sequence of elements is provided" should { "behave the same as prepend" in forAll { xs: Seq[Int] => FrontStack(xs) should ===(ImmArray(xs) ++: FrontStack.empty) } } } "++:" should { "yield equal results to +:" in forAll { (ia: ImmArray[Int], fs: FrontStack[Int]) => (ia ++: fs) should ===(ia.toSeq.foldRight(fs)(_ +: _)) } } "toImmArray" should { "yield same elements as iterator" in forAll { fs: FrontStack[Int] => fs.toImmArray should ===(fs.iterator.to[ImmArray]) } } "length" should { "be tracked accurately during building" in forAll { fs: FrontStack[Int] => fs.length should ===(fs.iterator.length) } } "slowApply" should { "throw when out of bounds" in forAll { fs: FrontStack[Int] => an[IndexOutOfBoundsException] should be thrownBy fs.slowApply(-1) an[IndexOutOfBoundsException] should be thrownBy fs.slowApply(fs.length) } "preserve Seq's apply" in forAll { fs: FrontStack[Int] => val expected = Table( ("value", "index"), fs.toImmArray.toSeq.zipWithIndex: _* ) forEvery(expected) { (value, index) => fs.slowApply(index) should ===(value) } } } "toBackStack" should { "be retracted by toFrontStack" in forAll { fs: FrontStack[Int] => fs.toBackStack.toFrontStack should ===(fs) } } "Traverse instance" should { checkLaws(ScalazProperties.traverse.laws[FrontStack]) } "Equal instance" should { checkLaws(ScalazProperties.equal.laws[FrontStack[Unnatural[Int]]]) } }
Example 48
Source File: ApiValueToLfValueConverterTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http package util import com.daml.lf.data.{Numeric => LfNumeric} import com.daml.lf.value.test.TypedValueGenerators.genAddend import com.daml.lf.value.{Value => V} import com.daml.platform.participant.util.LfEngineToApi.lfValueToApiValue import org.scalacheck.{Arbitrary, Gen} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, WordSpec} import scalaz.Equal import scalaz.syntax.bifunctor._ import scalaz.std.option._ import scalaz.std.tuple._ class ApiValueToLfValueConverterTest extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import ApiValueToLfValueConverterTest._ private[this] implicit val arbCid: Arbitrary[CidSrc] = Arbitrary( Gen.alphaStr map (t => V.ContractId.V0 assertFromString ('#' +: t))) "apiValueToLfValue" should { import ApiValueToLfValueConverter.apiValueToLfValue "retract lfValueToApiValue" in forAll(genAddend, minSuccessful(100)) { va => import va.injshrink implicit val arbInj: Arbitrary[va.Inj[CidSrc]] = va.injarb forAll(minSuccessful(20)) { v: va.Inj[CidSrc] => val vv = va.inj(v) val roundTrip = lfValueToApiValue(true, vv).right.toOption flatMap (x => apiValueToLfValue(x).toMaybe.toOption) assert(Equal[Option[V[Cid]]].equal(roundTrip, Some(vv))) } } } } object ApiValueToLfValueConverterTest { type Cid = V.ContractId type CidSrc = V.ContractId.V0 // Numeric are normalized when converting from api to lf, // them we have to relax numeric equality private implicit def eqValue: Equal[V[Cid]] = { (l, r) => V.`Value Equal instance`[Cid] .contramap[V[Cid]]( mapNumeric(_, n => LfNumeric assertFromUnscaledBigDecimal n.stripTrailingZeros)) .equal(l, r) } private[this] def mapNumeric[C](fa: V[C], f: LfNumeric => LfNumeric): V[C] = { def go(fa: V[C]): V[C] = fa match { case V.ValueNumeric(m) => V.ValueNumeric(f(m)) case _: V.ValueCidlessLeaf | V.ValueContractId(_) => fa case r @ V.ValueRecord(_, fields) => r copy (fields = fields map (_ rightMap go)) case v @ V.ValueVariant(_, _, value) => v copy (value = go(value)) case s @ V.ValueStruct(fields) => s copy (fields = fields map (_ rightMap go)) case V.ValueList(fs) => V.ValueList(fs map go) case V.ValueOptional(o) => V.ValueOptional(o map go) case V.ValueTextMap(m) => V.ValueTextMap(m mapValue go) case V.ValueGenMap(m) => V.ValueGenMap(m map (_ bimap (go, go))) } go(fa) } }
Example 49
Source File: InsertDeleteStepTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.util import com.daml.scalatest.FlatSpecCheckLaws import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} import scalaz.scalacheck.ScalazProperties import scalaz.syntax.semigroup._ import scalaz.{@@, Equal, Tag} class InsertDeleteStepTest extends FlatSpec with Matchers with FlatSpecCheckLaws with GeneratorDrivenPropertyChecks { import InsertDeleteStepTest._ override implicit val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 100) behavior of "append monoid" checkLaws(ScalazProperties.monoid.laws[IDS]) behavior of "append" it should "never insert a deleted item" in forAll { (x: IDS, y: IDS) => val xy = x |+| y.copy(inserts = y.inserts filterNot Cid.subst(x.deletes.keySet)) xy.inserts.toSet intersect Cid.subst(xy.deletes.keySet) shouldBe empty } it should "preserve every left delete" in forAll { (x: IDS, y: IDS) => val xy = x |+| y xy.deletes.keySet should contain allElementsOf x.deletes.keySet } it should "preserve at least right deletes absent in left inserts" in forAll { (x: IDS, y: IDS) => val xy = x |+| y // xy.deletes _may_ contain x.inserts; it is semantically irrelevant xy.deletes.keySet should contain allElementsOf (y.deletes.keySet -- Cid.unsubst(x.inserts)) } it should "preserve append absent deletes" in forAll { (x: Vector[Cid], y: Vector[Cid]) => val xy = InsertDeleteStep(x, Map.empty[String, Unit]) |+| InsertDeleteStep(y, Map.empty) xy.inserts should ===(x ++ y) } } object InsertDeleteStepTest { import org.scalacheck.{Arbitrary, Gen, Shrink} import Arbitrary.arbitrary type IDS = InsertDeleteStep[Unit, Cid] sealed trait Alpha type Cid = String @@ Alpha val Cid = Tag.of[Alpha] implicit val `Alpha arb`: Arbitrary[Cid] = Cid subst Arbitrary( Gen.alphaUpperChar map (_.toString)) private[util] implicit val `test Cid`: InsertDeleteStep.Cid[Cid] = Cid.unwrap implicit val `IDS arb`: Arbitrary[IDS] = Arbitrary(arbitrary[(Vector[Cid], Map[Cid, Unit])] map { case (is, ds) => InsertDeleteStep(is filterNot ds.keySet, Cid.unsubst[Map[?, Unit], String](ds)) }) implicit val `IDS shr`: Shrink[IDS] = Shrink.xmap[(Vector[Cid], Map[Cid, Unit]), IDS]( { case (is, ds) => InsertDeleteStep(is, Cid.unsubst[Map[?, Unit], String](ds)) }, step => (step.inserts, Cid.subst[Map[?, Unit], String](step.deletes)), ) implicit val `IDS eq`: Equal[IDS] = Equal.equalA }
Example 50
Source File: LedgerOffsetUtilTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.util import com.daml.scalatest.FlatSpecCheckLaws import com.daml.http.Generators import com.daml.ledger.api.v1.ledger_offset.LedgerOffset import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} import scalaz.scalacheck.ScalazProperties class LedgerOffsetUtilTest extends FlatSpec with Matchers with FlatSpecCheckLaws with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 100) import LedgerOffsetUtilTest._ behavior of LedgerOffsetUtil.AbsoluteOffsetOrdering.getClass.getSimpleName checkLaws(ScalazProperties.order.laws[LedgerOffset.Value.Absolute]) } object LedgerOffsetUtilTest { import org.scalacheck.Arbitrary implicit val arbAbsoluteOffset: Arbitrary[LedgerOffset.Value.Absolute] = Arbitrary( Generators.absoluteLedgerOffsetVal) implicit val scalazOrder: scalaz.Order[LedgerOffset.Value.Absolute] = scalaz.Order.fromScalaOrdering(LedgerOffsetUtil.AbsoluteOffsetOrdering) }
Example 51
Source File: FlowUtilTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.util import akka.NotUsed import akka.actor.ActorSystem import akka.stream.Materializer import akka.stream.scaladsl.Source import org.scalacheck.{Gen, Arbitrary} import org.scalatest.concurrent.ScalaFutures import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} import scalaz.{-\/, \/, \/-} import scala.concurrent.Future class FlowUtilTest extends FlatSpec with ScalaFutures with Matchers with GeneratorDrivenPropertyChecks { import FlowUtil._ implicit val asys: ActorSystem = ActorSystem(this.getClass.getSimpleName) implicit val materializer: Materializer = Materializer(asys) "allowOnlyFirstInput" should "pass 1st message through and replace all others with errors" in forAll( nonEmptyVectorOfInts) { xs: Vector[Int] => val error = "Error" val errorNum = Math.max(xs.size - 1, 0) val expected: Vector[String \/ Int] = xs.take(1).map(\/-(_)) ++ Vector.fill(errorNum)(-\/(error)) val input: Source[String \/ Int, NotUsed] = Source.fromIterator(() => xs.toIterator).map(\/-(_)) val actualF: Future[Vector[String \/ Int]] = input .via(allowOnlyFirstInput[String, Int](error)) .runFold(Vector.empty[String \/ Int])(_ :+ _) whenReady(actualF) { actual => actual shouldBe expected } } private val nonEmptyVectorOfInts: Gen[Vector[Int]] = Gen.nonEmptyBuildableOf[Vector[Int], Int](Arbitrary.arbitrary[Int]) }
Example 52
Source File: ContractStreamStepTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http package util import com.daml.scalatest.FlatSpecCheckLaws import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} import scalaz.scalacheck.ScalaCheckBinding._ import scalaz.scalacheck.ScalazProperties import scalaz.syntax.apply._ import scalaz.syntax.semigroup._ import scalaz.{@@, Equal, Tag} class ContractStreamStepTest extends FlatSpec with FlatSpecCheckLaws with Matchers with GeneratorDrivenPropertyChecks with TableDrivenPropertyChecks { import ContractStreamStepTest._, ContractStreamStep._ import InsertDeleteStepTest._ override implicit val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 100) behavior of "append" it should "be associative for valid streams" in forAll(validStreamGen) { csses => whenever(csses.size >= 3) { forEvery( Table(("a", "b", "c"), csses.sliding(3).map { case Seq(a, b, c) => (a, b, c) }.toSeq: _*)) { case (a, b, c) => (a |+| (b |+| c)) should ===((a |+| b) |+| c) } } } it should "report the last offset" in forAll { (a: CSS, b: CSS) => def off(css: ContractStreamStep[_, _]) = css match { case Acs(_) => None case LiveBegin(off) => off.toOption case Txn(_, off) => Some(off) } off(a |+| b) should ===(off(b) orElse off(a)) } it should "preserve append across toInsertDelete" in forAll { (a: CSS, b: CSS) => (a |+| b).toInsertDelete should ===(a.toInsertDelete |+| b.toInsertDelete) } behavior of "append semigroup" checkLaws(ScalazProperties.semigroup.laws[CSS]) } object ContractStreamStepTest { import InsertDeleteStepTest._, InsertDeleteStep.Inserts, ContractStreamStep._ import org.scalacheck.{Arbitrary, Gen} import Arbitrary.arbitrary type CSS = ContractStreamStep[Unit, Cid] private val offGen: Gen[domain.Offset] = Tag subst Tag.unsubst(arbitrary[String @@ Alpha]) private val acsGen = arbitrary[Inserts[Cid]] map (Acs(_)) private val noAcsLBGen = Gen const LiveBegin(LedgerBegin) private val postAcsGen = offGen map (o => LiveBegin(AbsoluteBookmark(o))) private val txnGen = ^(arbitrary[IDS], offGen)(Txn(_, _)) private val validStreamGen: Gen[Seq[CSS]] = for { beforeAfter <- Gen.zip( Gen.containerOf[Vector, CSS](acsGen), Gen.containerOf[Vector, CSS](txnGen)) (acsSeq, txnSeq) = beforeAfter liveBegin <- if (acsSeq.isEmpty) noAcsLBGen else postAcsGen } yield (acsSeq :+ liveBegin) ++ txnSeq private implicit val `CSS eq`: Equal[CSS] = Equal.equalA private implicit val `anyCSS arb`: Arbitrary[CSS] = Arbitrary(Gen.frequency((4, acsGen), (1, noAcsLBGen), (1, postAcsGen), (4, txnGen))) }
Example 53
Source File: GeneratorsTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import com.daml.http.Generators.genDuplicateApiIdentifiers import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) class GeneratorsTest extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 10000) import org.scalacheck.Shrink.shrinkAny "Generators.genDuplicateApiIdentifiers" should "generate API Identifiers with the same moduleName and entityName" in forAll(genDuplicateApiIdentifiers) { ids => ids.size should be >= 2 val (packageIds, moduleNames, entityNames) = ids.foldLeft((Set.empty[String], Set.empty[String], Set.empty[String])) { (b, a) => (b._1 + a.packageId, b._2 + a.moduleName, b._3 + a.entityName) } packageIds.size shouldBe ids.size moduleNames.size shouldBe 1 entityNames.size shouldBe 1 } }
Example 54
Source File: PrimitiveSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import java.time.{Instant, LocalDate} import org.scalacheck.Gen import org.scalatest.{WordSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks import shapeless.test.illTyped import com.daml.ledger.client.binding.{Primitive => P} class PrimitiveSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import PrimitiveSpec._ "Primitive types" when { "defined concretely" should { "have nice companion aliases" in { P.List: collection.generic.TraversableFactory[P.List] } } "defined abstractly" should { "carry their phantoms" in { def check[A, B]() = { illTyped( "implicitly[P.ContractId[A] =:= P.ContractId[B]]", "Cannot prove that .*ContractId\\[A\\] =:= .*ContractId\\[B\\].") illTyped( "implicitly[P.TemplateId[A] =:= P.TemplateId[B]]", "Cannot prove that .*TemplateId\\[A\\] =:= .*TemplateId\\[B\\].") illTyped( "implicitly[P.Update[A] =:= P.Update[B]]", "Cannot prove that .*Update\\[A\\] =:= .*Update\\[B\\].") } check[Unit, Unit]() } } } "Date.fromLocalDate" should { import ValueSpec.dateArb "pass through existing dates" in forAll { d: P.Date => P.Date.fromLocalDate(d: LocalDate) shouldBe Some(d) } "be idempotent" in forAll(anyLocalDateGen) { d => val od2 = P.Date.fromLocalDate(d) od2 flatMap (P.Date.fromLocalDate(_: LocalDate)) shouldBe od2 } "prove MIN, MAX are valid" in { import P.Date.{MIN, MAX} P.Date.fromLocalDate(MIN: LocalDate) shouldBe Some(MIN) P.Date.fromLocalDate(MAX: LocalDate) shouldBe Some(MAX) } } "Timestamp.discardNanos" should { import ValueSpec.timestampArb "pass through existing times" in forAll { t: P.Timestamp => P.Timestamp.discardNanos(t: Instant) shouldBe Some(t) } "be idempotent" in forAll(anyInstantGen) { i => val oi2 = P.Timestamp.discardNanos(i) oi2 flatMap (P.Timestamp.discardNanos(_: Instant)) shouldBe oi2 } "prove MIN, MAX are valid" in { import P.Timestamp.{MIN, MAX} P.Timestamp.discardNanos(MIN: Instant) shouldBe Some(MIN) P.Timestamp.discardNanos(MAX: Instant) shouldBe Some(MAX) } "preapprove values for TimestampConversion.instantToMicros" in forAll(anyInstantGen) { i => P.Timestamp.discardNanos(i) foreach { t => noException should be thrownBy com.daml.api.util.TimestampConversion .instantToMicros(t) } } } } object PrimitiveSpec { private val anyLocalDateGen: Gen[LocalDate] = Gen.choose(LocalDate.MIN.toEpochDay, LocalDate.MAX.toEpochDay) map LocalDate.ofEpochDay private val anyInstantGen: Gen[Instant] = Gen .zip( Gen.choose(Instant.MIN.getEpochSecond, Instant.MAX.getEpochSecond), Gen.choose(0L, 999999999)) .map { case (s, n) => Instant.ofEpochSecond(s, n) } }
Example 55
Source File: GenEncodingSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import com.daml.ledger.client.binding.{Primitive => P} import org.scalatest.{Succeeded, WordSpec} import org.scalatest.prop.GeneratorDrivenPropertyChecks import scalaz.Show class GenEncodingSpec extends WordSpec with GeneratorDrivenPropertyChecks { import ShowEncoding.Implicits._ implicit override val generatorDrivenConfig = PropertyCheckConfiguration(minSuccessful = 10000) "P.Text arbitrary Gen should not generate \\u0000, PostgreSQL does not like it" in forAll( GenEncoding.postgresSafe.primitive.valueText) { text: P.Text => val show: Show[P.Text] = implicitly if (text.forall(_ != '\u0000')) Succeeded else fail( s"P.Text generator produced a string with unexpected character, text: ${show.show(text)}") } }
Example 56
Source File: ShowUnicodeEscapedStringSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import com.daml.ledger.client.binding.encoding.EncodingUtil.normalize import org.apache.commons.text.StringEscapeUtils import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Assertion, Matchers, WordSpec} import scalaz.Cord class ShowUnicodeEscapedStringSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig = PropertyCheckConfiguration(minSuccessful = 10000) "should unicode-escape all non-ascii chars in the format that can compile back to original string" in { "scho\u0308n" should !==("schön") normalize("scho\u0308n") should ===(normalize("schön")) println(ShowUnicodeEscapedString.show("scho\u0308n")) ShowUnicodeEscapedString.show("scho\u0308n").toString should ===("\"scho\\u0308n\"") println(ShowUnicodeEscapedString.show("schön")) ShowUnicodeEscapedString.show("schön").toString should ===("\"sch\\u00F6n\"") "sch\u00F6n" should ===("schön") "\u00F6" should ===("ö") } "normalizing unicode string multiple times does not change it" in { "scho\u0308n" should !==("schön") normalize("scho\u0308n") should ===(normalize("schön")) normalize(normalize("scho\u0308n")) should ===(normalize("schön")) normalize("scho\u0308n") should ===(normalize(normalize("schön"))) } "ASCII slash should be unicode escaped" in { ShowUnicodeEscapedString.show("\\").toString.getBytes should ===("\"\\u005C\"".getBytes) } "unicode escaped string can be interpreted back to original string one example" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("scho\u0308n") } "unicode escaped zero can be interpreted back" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("\u0000") } "backslash followed by zero caused some issues" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("\\\u0000") } "any unicode escaped string can be interpreted back to original string" in forAll { s: String => testUnicodeEscapedStringCanBeUnescapedBackToOriginalString(s) } private def testUnicodeEscapedStringCanBeUnescapedBackToOriginalString(s0: String): Assertion = { val s1: Cord = ShowUnicodeEscapedString.show(s0) val s2: String = StringEscapeUtils.unescapeJava(removeWrappingQuotes(s1.toString)) s2.getBytes should ===(s0.getBytes) } private def removeWrappingQuotes(s: String): String = { require(s.length >= 2) s.substring(1, s.length - 1) } }
Example 57
Source File: ShrinkEncodingSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import com.daml.ledger.client.binding.{Primitive => P} import org.scalatest.WordSpec import org.scalatest.prop.GeneratorDrivenPropertyChecks class ShrinkEncodingSpec extends WordSpec with GeneratorDrivenPropertyChecks { "ShrinkEncoding.primitive.valueParty should not generate \\u0000, PostgreSQL does not like them" in forAll( GenEncoding.primitive.valueParty) { p: P.Party => import EqualityEncoding.Implicits._ import ShowEncoding.Implicits._ import com.daml.scalatest.CustomMatcher._ import scalaz.std.AllInstances._ val list: List[P.Party] = ShrinkEncoding.primitive.valueParty.shrink(p).toList list.forall { a: P.Party => val str: String = P.Party.unwrap(a) str.forall(_.isLetterOrDigit) } should_=== true val lastP: Option[P.Party] = list.lastOption lastP should_=== Some(P.Party("")) } "GenEncoding.primitive.valueParty should not generate \\u0000, PostgreSQL does not like them" in forAll( GenEncoding.primitive.valueParty) { p: P.Party => val str: String = P.Party.unwrap(p) if (str.contains("\u0000")) { fail(s"Party contains illegal chars: ${ShowEncoding.primitive.valueParty.show(p)}") } } }
Example 58
Source File: CustomMatcherSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.scalatest import org.scalacheck.Arbitrary.arbitrary import org.scalacheck.Gen import org.scalatest.WordSpec import org.scalatest.prop.GeneratorDrivenPropertyChecks import scalaz.{Equal, Show} class CustomMatcherSpec extends WordSpec with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 10000) "make sure it works comparing ints" in { import com.daml.scalatest.CustomMatcher._ import scalaz.std.anyVal._ CustomMatcherOps(10) should_=== 10 CustomMatcherOps(10) should_=/= 11 10 should_=== 10 10 should_=/= 11 } case class Dummy(a: String, b: Int, c: BigDecimal) lazy val genDummy: Gen[Dummy] = for { a <- arbitrary[String] b <- arbitrary[Int] c <- arbitrary[BigDecimal] } yield Dummy(a, b, c) lazy val genPairOfNonEqualDummies: Gen[(Dummy, Dummy)] = { def genSetOf2: Gen[Set[Dummy]] = Gen.buildableOfN[Set[Dummy], Dummy](2, genDummy).filter(_.size == 2) genSetOf2.map(_.toSeq).map { case Seq(a, b) => (a, b) case a @ _ => sys.error(s"Should never happen: $a") } } implicit val dummyEqual: Equal[Dummy] = Equal.equalA implicit val dummyShow: Show[Dummy] = Show.showA "make sure it works comparing case classes with custom Show and Equal" in forAll( genPairOfNonEqualDummies) { case (a, b) => import com.daml.scalatest.CustomMatcher._ a should_=== a a should_=== a.copy() a should_=/= b } }
Example 59
Source File: VoidValueRefSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import org.scalatest.{WordSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks class VoidValueRefSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { "VoidValueRef subclasses" should { sealed abstract class TestVoid extends VoidValueRef // NB: *no special companion is required!* object TestVoid identity(TestVoid) "always have a codec instance" in { Value.Encoder[TestVoid] Value.Decoder[TestVoid] } } }
Example 60
Source File: TemplateSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import org.scalatest.{WordSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks import shapeless.test.illTyped import com.daml.ledger.client.binding.{Primitive => P} class TemplateSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import TemplateSpec._ // avoid importing CNA to test that implicit resolution is working properly "Template subclasses" when { "having colliding names" should { val cna = Tmpls.CNA("id", 1, 2, 3, 4) "resolve 'create' properly" in { val c = cna.create identity[P.Int64](c) illTyped("cna.create: Primitive.Update[Tmpls.CNA]", "(?s).*found[^\n]*Int64.*") val u = (cna: Template[Tmpls.CNA]).create identity[P.Update[P.ContractId[Tmpls.CNA]]](u) } } "a companion is defined" should { "resolve Value implicitly" in { implicitly[Value[Tmpls.CNA]] } "resolve the companion implicitly" in { implicitly[TemplateCompanion[Tmpls.CNA]] } } } } object TemplateSpec { object Tmpls { final case class CNA( id: P.Text, template: P.Int64, create: P.Int64, namedArguments: P.Int64, archive: P.Int64) extends Template[CNA] { protected[this] override def templateCompanion(implicit d: DummyImplicit) = CNA } // We've already passed the first test, by scalac checking that // these don't conflict with inherited names post-erasure. object CNA extends TemplateCompanion.Empty[CNA] { override protected val onlyInstance = CNA("", 0, 0, 0, 0) val id = P.TemplateId("hi", "there", "you") val consumingChoices = Set() } } }
Example 61
Source File: UtilTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen import com.daml.lf.data.Ref.{QualifiedName, PackageId} import java.io.IOException import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import com.daml.lf.{iface => I} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks class UtilTest extends UtilTestHelpers with GeneratorDrivenPropertyChecks { val packageInterface = I.Interface(packageId = PackageId.assertFromString("abcdef"), typeDecls = Map.empty) val scalaPackageParts = Array("com", "digitalasset") val scalaPackage: String = scalaPackageParts.mkString(".") val util = lf.LFUtil( scalaPackage, I.EnvironmentInterface fromReaderInterfaces packageInterface, outputDir.toFile) def damlScalaName(damlNameSpace: Array[String], name: String): util.DamlScalaName = util.DamlScalaName(damlNameSpace, name) behavior of "Util" it should "mkDamlScalaName for a Contract named Test" in { val result = util.mkDamlScalaNameFromDirsAndName(Array(), "Test") result shouldEqual damlScalaName(Array.empty, "Test") result.packageName shouldEqual scalaPackage result.qualifiedName shouldEqual (scalaPackage + ".Test") } it should "mkDamlScalaName for a Template names foo.bar.Test" in { val result = util.mkDamlScalaName(Util.Template, QualifiedName assertFromString "foo.bar:Test") result shouldEqual damlScalaName(Array("foo", "bar"), "Test") result.packageName shouldEqual (scalaPackage + ".foo.bar") result.qualifiedName shouldEqual (scalaPackage + ".foo.bar.Test") } "partitionEithers" should "equal scalaz separate in simple cases" in forAll { iis: List[Either[Int, Int]] => import scalaz.syntax.monadPlus._, scalaz.std.list._, scalaz.std.either._ Util.partitionEithers(iis) shouldBe iis.separate } } abstract class UtilTestHelpers extends FlatSpec with Matchers with BeforeAndAfterAll { val outputDir = Files.createTempDirectory("codegenUtilTest") override protected def afterAll(): Unit = { super.afterAll() deleteRecursively(outputDir) } def deleteRecursively(dir: Path): Unit = { Files.walkFileTree( dir, new SimpleFileVisitor[Path] { override def postVisitDirectory(dir: Path, exc: IOException) = { Files.delete(dir) FileVisitResult.CONTINUE } override def visitFile(file: Path, attrs: BasicFileAttributes) = { Files.delete(file) FileVisitResult.CONTINUE } } ) () } }
Example 62
Source File: CodeGenSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml package codegen import com.daml.lf.data.ImmArray.ImmArraySeq import com.daml.lf.data.Ref.Identifier import com.daml.lf.iface._ import com.daml.lf.value.test.ValueGenerators.idGen import org.scalatest.{WordSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks class CodeGenSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import CodeGen.filterTemplatesBy import CodeGenSpec._ "filterTemplatesBy" should { "be identity given empty regexes" in forAll(trivialEnvInterfaceGen) { ei => filterTemplatesBy(Seq.empty)(ei) should ===(ei) } "delete all templates given impossible regex" in forAll(trivialEnvInterfaceGen) { ei => val noTemplates = ei copy (ei.typeDecls transform { case (_, tmpl @ InterfaceType.Template(_, _)) => InterfaceType.Normal(tmpl.`type`) case (_, v) => v }) filterTemplatesBy(Seq("(?!a)a".r))(ei) should ===(noTemplates) } "match the union of regexes, not intersection" in forAll(trivialEnvInterfaceGen) { ei => filterTemplatesBy(Seq("(?s).*".r, "(?!a)a".r))(ei) should ===(ei) } } } object CodeGenSpec { import org.scalacheck.{Arbitrary, Gen} import Arbitrary.arbitrary val trivialEnvInterfaceGen: Gen[EnvironmentInterface] = { val fooRec = Record(ImmArraySeq.empty) val fooTmpl = InterfaceType.Template(fooRec, DefTemplate(Map.empty, None)) val fooNorm = InterfaceType.Normal(DefDataType(ImmArraySeq.empty, fooRec)) implicit val idArb: Arbitrary[Identifier] = Arbitrary(idGen) arbitrary[Map[Identifier, Boolean]] map { ids => EnvironmentInterface(ids transform { (_, isTemplate) => if (isTemplate) fooTmpl else fooNorm }) } } }
Example 63
Source File: NamespaceSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.codegen.types import org.scalatest.{WordSpec, Matchers, Inside} import org.scalatest.prop.GeneratorDrivenPropertyChecks import scalaz.std.anyVal._ import scalaz.std.tuple._ import scalaz.std.vector._ import scalaz.syntax.bifunctor._ class NamespaceSpec extends WordSpec with Matchers with Inside with GeneratorDrivenPropertyChecks { "fromHierarchy" should { "be lossless for keysets" in forAll { m: Map[List[Int], Int] => NamespaceSpec .paths(Namespace.fromHierarchy(m)) .collect { case (ns, Some(a)) => (ns, a) } .toMap shouldBe m } } } object NamespaceSpec { import com.daml.codegen.lf.HierarchicalOutput.`scalaz ==>> future` def paths[K, A](n: Namespace[K, A]): Vector[(List[K], A)] = n.foldTreeStrict[Vector[(List[K], A)]] { (a, kVecs) => kVecs.foldMapWithKey { (k, vec) => vec.map(_ leftMap (k :: _)) } :+ ((List(), a)) } }
Example 64
Source File: ConverterSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.navigator.model.converter import org.scalatest.{Matchers, WordSpec} import org.scalatest.prop.GeneratorDrivenPropertyChecks class ConverterSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { import Converter._ "sequence" should { "satisfy identity, modulo list conversion" in forAll { xs: Vector[Int] => sequence(xs map (Right(_))) shouldBe Right(xs.toList) } "report the last error encountered" in { sequence(Seq(Left(1), Right(2), Left(3))) shouldBe Left(3) } } }
Example 65
Source File: RLPSpeedSuite.scala From mantis with Apache License 2.0 | 5 votes |
package io.iohk.ethereum.rlp import akka.util.ByteString import io.iohk.ethereum.ObjectGenerators import io.iohk.ethereum.domain.Block._ import io.iohk.ethereum.domain._ import io.iohk.ethereum.network.p2p.messages.CommonMessages.SignedTransactions._ import io.iohk.ethereum.network.p2p.messages.PV62.BlockBody import io.iohk.ethereum.utils.Logger import org.scalacheck.Gen import org.scalatest.FunSuite import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks} import org.spongycastle.util.encoders.Hex class RLPSpeedSuite extends FunSuite with PropertyChecks with GeneratorDrivenPropertyChecks with ObjectGenerators with Logger { val rounds = 10000 test("Main") { val startBlockSerialization: Long = System.currentTimeMillis val block = blockGen.sample.get val serializedBlock = doTestSerialize[Block](block, (b: Block) => b.toBytes, rounds) val elapsedBlockSerialization = (System.currentTimeMillis() - startBlockSerialization) / 1000f log.info(s"Block serializations / sec: (${rounds.toFloat / elapsedBlockSerialization})") val blockDeserializationStart: Long = System.currentTimeMillis val deserializedBlock: Block = doTestDeserialize(serializedBlock, (b: Array[Byte]) => b.toBlock, rounds) val elapsedBlockDeserialization = (System.currentTimeMillis() - blockDeserializationStart) / 1000f log.info(s"Block deserializations / sec: (${rounds.toFloat / elapsedBlockDeserialization})") val serializationTxStart: Long = System.currentTimeMillis val tx = validTransaction val serializedTx = doTestSerialize(tx, (stx: SignedTransaction) => stx.toBytes, rounds) val elapsedTxSerialization = (System.currentTimeMillis() - serializationTxStart) / 1000f log.info(s"TX serializations / sec: (${rounds.toFloat / elapsedTxSerialization})") val txDeserializationStart: Long = System.currentTimeMillis val deserializedTx: SignedTransaction = doTestDeserialize(serializedTx, (b: Array[Byte]) => b.toSignedTransaction, rounds) val elapsedTxDeserialization = (System.currentTimeMillis() - txDeserializationStart) / 1000f log.info(s"TX deserializations / sec: (${rounds.toFloat / elapsedTxDeserialization})") } test("Performance decode") { val blockRaw: String = "f8cbf8c7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a02f4399b08efe68945c1cf90ffe85bbe3ce978959da753f9e649f034015b8817da00000000000000000000000000000000000000000000000000000000000000000834000008080830f4240808080a004994f67dc55b09e814ab7ffc8df3686b4afb2bb53e60eae97ef043fe03fb829c0c0" val payload: Array[Byte] = Hex.decode(blockRaw) val ITERATIONS: Int = 10000000 log.info("Starting " + ITERATIONS + " decoding iterations...") val start1: Long = System.currentTimeMillis (1 to ITERATIONS).foreach { _ => RLP.rawDecode(payload); Unit } val end1: Long = System.currentTimeMillis log.info("Result decode()\t: " + (end1 - start1) + "ms") } def doTestSerialize[T](toSerialize: T, encode: T => Array[Byte], rounds: Int): Array[Byte] = { (1 until rounds).foreach(_ => { encode(toSerialize) }) encode(toSerialize) } def doTestDeserialize[T](serialized: Array[Byte], decode: Array[Byte] => T, rounds: Int): T = { (1 until rounds).foreach(_ => { decode(serialized) }) decode(serialized) } val validTransaction = SignedTransaction( Transaction( nonce = 172320, gasPrice = BigInt("50000000000"), gasLimit = 90000, receivingAddress = Address(Hex.decode("1c51bf013add0857c5d9cf2f71a7f15ca93d4816")), value = BigInt("1049756850000000000"), payload = ByteString.empty), pointSign = 28, signatureRandom = ByteString(Hex.decode("cfe3ad31d6612f8d787c45f115cc5b43fb22bcc210b62ae71dc7cbf0a6bea8df")), signature = ByteString(Hex.decode("57db8998114fae3c337e99dbd8573d4085691880f4576c6c1f6c5bbfe67d6cf0")), chainId = 0x3d.toByte ).get lazy val blockGen: Gen[Block] = for { header <- blockHeaderGen uncles <- blockHeaderGen } yield Block(header = header, BlockBody(transactionList = List.fill(10)(validTransaction), uncleNodesList = Seq(uncles))) }
Example 66
Source File: DCollectionGenProperties.scala From kontextfrei with Apache License 2.0 | 5 votes |
package com.danielwestheide.kontextfrei.scalatest import org.apache.spark.rdd.RDD import org.scalatest.PropSpecLike import org.scalatest.prop.GeneratorDrivenPropertyChecks trait DCollectionGenProperties[DColl[_]] extends PropSpecLike with GeneratorDrivenPropertyChecks with DCollectionGen with KontextfreiSpec[DColl] { property("Can get arbitrary DCollections") { forAll { xs: DColl[String] => ops.count(xs) === ops.collectAsArray(xs).length } } } class DCollectionGenStreamSpec extends DCollectionGenProperties[Stream] with StreamSpec class DCollectionGenRDDSpec extends DCollectionGenProperties[RDD] with RDDSpec
Example 67
Source File: CollectingInstancesProperties.scala From kontextfrei with Apache License 2.0 | 5 votes |
package com.danielwestheide.kontextfrei.scalatest import org.apache.spark.rdd.RDD import org.scalatest.enablers.Collecting import org.scalatest.{Inspectors, PropSpec, PropSpecLike} import org.scalatest.prop.GeneratorDrivenPropertyChecks trait CollectingInstancesProperties[DColl[_]] extends PropSpecLike with GeneratorDrivenPropertyChecks with KontextfreiSpec[DColl] with CollectingInstances { property("There is a Collecting instance for DCollection") { forAll { (xs: List[String]) => val dcoll = ops.unit(xs) Inspectors.forAll(dcoll) { x => assert(xs.contains(x)) } } } property( "Collecting nature of DCollection returns the original size of the input sequence") { forAll { (xs: List[String]) => val dcoll = ops.unit(xs) assert( implicitly[Collecting[String, DColl[String]]] .sizeOf(dcoll) === xs.size) } } property( "Collecting nature of DCollection returns the Some loneElement if input sequence has exactly one element") { forAll { (x: String) => val dcoll = ops.unit(List(x)) assert( implicitly[Collecting[String, DColl[String]]] .loneElementOf(dcoll) === Some(x)) } } property( "Collecting nature of DCollection returns the None as loneElement if input sequence as more than one element") { forAll { (xs: List[String]) => whenever(xs.size > 1) { val dcoll = ops.unit(xs) assert( implicitly[Collecting[String, DColl[String]]] .loneElementOf(dcoll) .isEmpty) } } } property( "Collecting nature of DCollection returns the None as loneElement if input sequence is empty") { val dcoll = ops.unit(List.empty[String]) assert( implicitly[Collecting[String, DColl[String]]] .loneElementOf(dcoll) .isEmpty) } } class CollectionInstancesStreamSpec extends CollectingInstancesProperties[Stream] with StreamSpec class CollectionInstancesRDDSpec extends CollectingInstancesProperties[RDD] with RDDSpec
Example 68
Source File: ConfigParserSpec.scala From daml with Apache License 2.0 | 4 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.config import com.daml.extractor.config.Generators._ import com.daml.extractor.targets.Target import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Inside, Matchers} import scalaz.OneAnd import scalaz.Scalaz._ import scalaz.scalacheck.ScalazArbitrary._ class ConfigParserSpec extends FlatSpec with Matchers with Inside with GeneratorDrivenPropertyChecks { behavior of ConfigParser.getClass.getSimpleName val requiredArgs = Vector("--party", "Bob") it should "parse template configuration" in forAll { templateConfigs: OneAnd[List, TemplateConfig] => val args = requiredArgs ++ Vector("--templates", templateConfigUserInput(templateConfigs)) inside(ConfigParser.parse(args)) { case Some((config, _)) => config.templateConfigs should ===(templateConfigs.toSet) } } it should "fail parsing when duplicate template configurations" in forAll { templateConfigs: OneAnd[List, TemplateConfig] => val duplicate = templateConfigs.head val args = requiredArgs ++ Vector( "--templates", templateConfigUserInput(duplicate :: templateConfigs.toList)) // scopt prints errors into STD Error stream val capturedStdErr = new java.io.ByteArrayOutputStream() val result: Option[(ExtractorConfig, Target)] = Console.withErr(capturedStdErr) { ConfigParser.parse(args) } capturedStdErr.flush() capturedStdErr.close() result should ===(None) val firstLine = capturedStdErr.toString.replaceAllLiterally("\r", "").split('\n')(0) firstLine should ===("Error: The list of templates must contain unique elements") } }
Example 69
Source File: CustomScoptReadersSpec.scala From daml with Apache License 2.0 | 4 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor.config import com.daml.extractor.config.Generators._ import org.scalacheck.{Gen, Shrink} import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FlatSpec, Matchers} import scopt.Read class CustomScoptReadersSpec extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { behavior of "CustomScoptReaders" implicit def noShrink[A]: Shrink[A] = Shrink.shrinkAny it should "parse a TemplateConfig" in forAll(genTemplateConfig) { templateConfig: TemplateConfig => val sut = CustomScoptReaders.templateConfigRead val input: String = templateConfigUserInput(templateConfig) val actual: TemplateConfig = sut.reads(input) actual should ===(templateConfig) } it should "parse a list of TemplateConfigs" in forAll(Gen.nonEmptyListOf(genTemplateConfig)) { templateConfigs: List[TemplateConfig] => import CustomScoptReaders.templateConfigRead val sut: Read[Seq[TemplateConfig]] = implicitly val input: String = templateConfigUserInput(templateConfigs) val actual: Seq[TemplateConfig] = sut.reads(input) actual.toList should ===(templateConfigs) } }
Example 70
Source File: MultiPartySpec.scala From daml with Apache License 2.0 | 4 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.extractor import java.io.File import com.daml.bazeltools.BazelRunfiles._ import com.daml.lf.data.Ref.Party import com.daml.lf.value.test.ValueGenerators.{party => partyGen} import com.daml.extractor.config.CustomScoptReaders._ import com.daml.extractor.config.ExtractorConfig import com.daml.extractor.services.ExtractorFixtureAroundAll import com.daml.ledger.api.testing.utils.SuiteResourceManagementAroundAll import com.daml.testing.postgresql.PostgresAroundAll import org.scalacheck.Arbitrary import org.scalatest._ import org.scalatest.prop.GeneratorDrivenPropertyChecks import scalaz._ import scalaz.scalacheck.ScalazArbitrary._ import scalaz.std.list._ import scalaz.std.option._ import scalaz.std.string._ import scalaz.syntax.foldable._ import scalaz.syntax.functor._ import scopt.Read class MultiPartySpec extends FlatSpec with Suite with PostgresAroundAll with SuiteResourceManagementAroundAll with ExtractorFixtureAroundAll with Matchers with GeneratorDrivenPropertyChecks { override protected def darFile = new File(rlocation("extractor/RecordsAndVariants.dar")) override def scenario: Option[String] = Some("RecordsAndVariants:multiParty") override def configureExtractor(ec: ExtractorConfig): ExtractorConfig = { val ec2 = super.configureExtractor(ec) ec2.copy(parties = OneAnd(Party assertFromString "Alice", ec2.parties.toList)) } private[this] implicit def partyArb: Arbitrary[Party] = Arbitrary(partyGen) private[this] val readParties = implicitly[Read[ExtractorConfig.Parties]] "Party parser" should "permit comma separation" in forAll { parties: OneAnd[List, Party] => readParties.reads(parties.widen[String] intercalate ",") should ===(parties) } "Party parser" should "permit spaces in parties" in { readParties.reads("foo bar,baz quux, a b ") should ===( OneAnd("foo bar", List("baz quux", " a b "))) } "Party parser" should "reject non-comma bad characters" in { an[IllegalArgumentException] should be thrownBy { readParties reads "amazing!" } } "Contracts" should "contain the visible contracts" in { val ticks = getContracts.map { ct => for { o <- ct.create_arguments.asObject tick <- o("tick") n <- tick.asNumber i <- n.toInt } yield i } val expected = List(1, 2, 4, 5, 7).map(some) ticks should contain theSameElementsAs expected } }
Example 71
Source File: ResponseFormatsTest.scala From daml with Apache License 2.0 | 4 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http.json import com.daml.http.util.Collections._ import akka.actor.ActorSystem import akka.stream.Materializer import akka.stream.scaladsl.Source import akka.util.ByteString import org.scalacheck.Gen import org.scalatest.compatible.Assertion import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Inside, Matchers} import scalaz.syntax.show._ import scalaz.{Show, \/} import spray.json._ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} class ResponseFormatsTest extends FreeSpec with Matchers with Inside with GeneratorDrivenPropertyChecks { implicit val asys: ActorSystem = ActorSystem(this.getClass.getSimpleName) implicit val mat: Materializer = Materializer(asys) implicit val ec: ExecutionContext = asys.dispatcher implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = 100) "resultJsObject should serialize Source of Errors and JsValues" in forAll( Gen.listOf(errorOrJsNumber), Gen.option(Gen.nonEmptyListOf(Gen.identifier))) { (input, warnings) => import spray.json.DefaultJsonProtocol._ val jsValWarnings: Option[JsValue] = warnings.map(_.toJson) val (failures, successes): (Vector[JsString], Vector[JsValue]) = input.toVector.partitionMap(_.leftMap(e => JsString(e.shows))) val jsValSource = Source[DummyError \/ JsValue](input) val responseF: Future[ByteString] = ResponseFormats .resultJsObject(jsValSource, jsValWarnings) .runFold(ByteString.empty)((b, a) => b ++ a) val resultF: Future[Assertion] = responseF.map { str => JsonParser(str.utf8String) shouldBe expectedResult(failures, successes, jsValWarnings) } Await.result(resultF, 10.seconds) } private def expectedResult( failures: Vector[JsValue], successes: Vector[JsValue], warnings: Option[JsValue]): JsObject = { val map1: Map[String, JsValue] = warnings match { case Some(x) => Map("warnings" -> x) case None => Map.empty } val map2 = if (failures.isEmpty) Map[String, JsValue]("result" -> JsArray(successes), "status" -> JsNumber("200")) else Map[String, JsValue]( "result" -> JsArray(successes), "errors" -> JsArray(failures), "status" -> JsNumber("501")) JsObject(map1 ++ map2) } private lazy val errorOrJsNumber: Gen[DummyError \/ JsNumber] = Gen.frequency( 1 -> dummyErrorGen.map(\/.left), 5 -> jsNumberGen.map(\/.right) ) private lazy val dummyErrorGen: Gen[DummyError] = Gen.identifier.map(DummyError.apply) private lazy val jsNumberGen: Gen[JsNumber] = Gen.posNum[Long].map(JsNumber.apply) } final case class DummyError(message: String) object DummyError { implicit val ShowInstance: Show[DummyError] = Show shows { e => s"DummyError(${e.message})" } }
Example 72
Source File: ConsumerEventSpec.scala From reactive-kinesis with Apache License 2.0 | 4 votes |
package com.weightwatchers.reactive.kinesis.models import java.nio.ByteBuffer import java.nio.charset.StandardCharsets.UTF_8 import org.joda.time.DateTime import org.scalacheck.Gen import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{FreeSpec, Matchers} class ConsumerEventSpec extends FreeSpec with Matchers with GeneratorDrivenPropertyChecks { "A ConsumerEvent" - { "can be read as string" in { forAll(Gen.alphaNumStr) { string => val event = ConsumerEvent(CompoundSequenceNumber("123", 0), ByteBuffer.wrap(string.getBytes(UTF_8)), DateTime.now()) event.payloadAsString(UTF_8) shouldBe string } } } }