org.scalatest.Assertion Scala Examples
The following examples show how to use org.scalatest.Assertion.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: EffectTestSupport.scala From cats-effect-testing with Apache License 2.0 | 5 votes |
package cats.effect.testing.scalatest import cats.effect._ import cats.implicits._ import scala.concurrent.Future import org.scalatest.{Assertion, Succeeded} trait EffectTestSupport { implicit def syncIoToFutureAssertion(io: SyncIO[Assertion]): Future[Assertion] = io.toIO.unsafeToFuture implicit def ioToFutureAssertion(io: IO[Assertion]): Future[Assertion] = io.unsafeToFuture implicit def syncIoUnitToFutureAssertion(io: SyncIO[Unit]): Future[Assertion] = io.toIO.as(Succeeded).unsafeToFuture implicit def ioUnitToFutureAssertion(io: IO[Unit]): Future[Assertion] = io.as(Succeeded).unsafeToFuture }
Example 2
Source File: SigmaValidationSettingsSerializerSpec.scala From sigmastate-interpreter with MIT License | 5 votes |
package org.ergoplatform.validation import org.scalatest.Assertion import sigmastate.helpers.SigmaTestingCommons import sigmastate.serialization.SerializationSpecification class SigmaValidationSettingsSerializerSpec extends SerializationSpecification with SigmaTestingCommons { private def roundtrip(settings: SigmaValidationSettings): Assertion = { implicit val set = SigmaValidationSettingsSerializer roundTripTest(settings) roundTripTestWithPos(settings) } property("ValidationRules.currentSettings round trip") { roundtrip(ValidationRules.currentSettings) } property("SigmaValidationSettings round trip") { forAll(ruleIdGen, statusGen, MinSuccessful(100)) { (ruleId, status) => val vs = ValidationRules.currentSettings.updated(ruleId, status) roundtrip(vs) } } }
Example 3
Source File: RuleStatusSerializerSpec.scala From sigmastate-interpreter with MIT License | 5 votes |
package org.ergoplatform.validation import org.scalatest.Assertion import sigmastate.helpers.SigmaTestingCommons import sigmastate.serialization.{SigmaSerializer, SerializationSpecification} class RuleStatusSerializerSpec extends SerializationSpecification with SigmaTestingCommons { private def roundtrip(status: RuleStatus): Assertion = { implicit val ser = RuleStatusSerializer roundTripTest(status) roundTripTestWithPos(status) } property("RuleStatusSerializer round trip") { forAll(statusGen, MinSuccessful(100)) { status => roundtrip(status) } } property("RuleStatusSerializer parse unrecognized status") { val unknownCode = 100.toByte val someByte = 10.toByte val nextByte = 20.toByte val bytes = Array[Byte](1, unknownCode, someByte, nextByte) val r = SigmaSerializer.startReader(bytes) val s = RuleStatusSerializer.parse(r) s shouldBe ReplacedRule(0) val b = r.getByte() b shouldBe nextByte } }
Example 4
Source File: SerializationSpecification.scala From sigmastate-interpreter with MIT License | 5 votes |
package sigmastate.serialization import org.ergoplatform.validation.ValidationSpecification import org.scalacheck.Gen import org.scalatest.prop.{PropertyChecks, TableDrivenPropertyChecks, GeneratorDrivenPropertyChecks} import org.scalatest.{PropSpec, Assertion, Matchers} import org.scalacheck.Arbitrary._ import sigmastate.Values._ import sigmastate.SType import sigmastate.serialization.generators._ trait SerializationSpecification extends PropSpec with PropertyChecks with GeneratorDrivenPropertyChecks with TableDrivenPropertyChecks with Matchers with ObjectGenerators with ConcreteCollectionGenerators with OpcodesGen with TransformerGenerators with RelationGenerators with ValidationSpecification { protected def roundTripTest[V <: Value[_ <: SType]](v: V): Assertion = { val bytes = ValueSerializer.serialize(v) predefinedBytesTest(v, bytes) predefinedBytesTestNotFomZeroElement(bytes, v) } protected def predefinedBytesTest[V <: Value[_ <: SType]](v: V, bytes: Array[Byte]): Assertion = { ValueSerializer.serialize(v) shouldEqual bytes val r = SigmaSerializer.startReader(bytes) val positionLimitBefore = r.positionLimit val dv = ValueSerializer.deserialize(r) dv shouldEqual v r.positionLimit shouldBe positionLimitBefore } //check that pos and consumed are being implented correctly protected def predefinedBytesTestNotFomZeroElement[V <: Value[_ <: SType]](bytes: Array[Byte], v: V): Assertion = { val randomInt = Gen.chooseNum(1, 20).sample.get val randomBytes = Gen.listOfN(randomInt, arbByte.arbitrary).sample.get.toArray val parsedVal = ValueSerializer.deserialize(randomBytes ++ bytes, randomInt) parsedVal shouldEqual v } }
Example 5
Source File: TypeSerializerSpecification.scala From sigmastate-interpreter with MIT License | 5 votes |
package sigmastate.serialization import org.scalacheck.Arbitrary._ import org.scalatest.Assertion import sigmastate._ class TypeSerializerSpecification extends SerializationSpecification { private def roundtrip[T <: SType](tpe: T, expected: Array[Byte]): Assertion = { val w = SigmaSerializer.startWriter() .putType(tpe) val bytes = w.toBytes bytes shouldBe expected roundtrip(tpe) } private def roundtrip[T <: SType](tpe: T): Assertion = { val w = SigmaSerializer.startWriter() .putType(tpe) val bytes = w.toBytes val r = SigmaSerializer.startReader(bytes, 0) val res = r.getType() res shouldBe tpe val randomPrefix = arrayGen[Byte].sample.get val r2 = SigmaSerializer.startReader(randomPrefix ++ bytes, randomPrefix.length) val res2 = r2.getType() res2 shouldBe tpe } property("Codes of embeddable types have correct order") { for (i <- TypeSerializer.embeddableIdToType.indices.drop(1)) i shouldBe TypeSerializer.embeddableIdToType(i).typeCode } import SCollectionType._; import SOption._; import STuple._ property("Embeddable type serialization roundtrip") { forAll { t: SPredefType => whenever(t.isInstanceOf[SEmbeddable]) { val e = t.asInstanceOf[SEmbeddable] val tCode = t.typeCode roundtrip(t, Array[Byte](tCode)) roundtrip(SCollection(t), Array[Byte](e.embedIn(CollectionTypeCode))) roundtrip(SCollection(SCollection(t)), Array[Byte](e.embedIn(NestedCollectionTypeCode))) roundtrip(SOption(t), Array[Byte](e.embedIn(OptionTypeCode))) roundtrip(SOption(SCollection(t)), Array[Byte](e.embedIn(OptionCollectionTypeCode))) roundtrip(STuple(t, t), Array[Byte](e.embedIn(PairSymmetricTypeCode))) if (t != SLong) { roundtrip(STuple(t, SBox), Array[Byte](e.embedIn(Pair1TypeCode), SBox.typeCode)) roundtrip(STuple(SBox, t), Array[Byte](e.embedIn(Pair2TypeCode), SBox.typeCode)) } roundtrip(STuple(SCollection(SLong), t), Array[Byte](e.embedIn(Pair2TypeCode), SLong.embedIn(CollectionTypeCode))) } } } property("Complex type serialization roundtrip") { forAll { t: SPredefType => whenever(t.isInstanceOf[SEmbeddable]) { val e = t.asInstanceOf[SEmbeddable] val tupCode = e.embedIn(PairSymmetricTypeCode) roundtrip(SCollection(STuple(e, e)), Array[Byte](CollectionTypeCode, tupCode)) roundtrip(SCollection(SOption(e)), Array[Byte](CollectionTypeCode, e.embedIn(OptionTypeCode))) roundtrip(SCollection(SCollection(STuple(e, e))), Array[Byte](CollectionTypeCode, CollectionTypeCode, tupCode)) roundtrip(SCollection(SOption(STuple(e, e))), Array[Byte](CollectionTypeCode, OptionTypeCode, tupCode)) roundtrip(SOption(STuple(e, e)), Array[Byte](OptionTypeCode, tupCode)) roundtrip(SOption(SCollection(STuple(e, e))), Array[Byte](OptionTypeCode, CollectionTypeCode, tupCode)) } } } property("Specific types serialization roundtrip") { roundtrip(STuple(SCollection(SLong), SCollection(SLong)), Array[Byte](Pair1TypeCode, SLong.embedIn(CollectionTypeCode), SLong.embedIn(CollectionTypeCode))) roundtrip(STuple(SCollection(SLong), SOption(SLong)), Array[Byte](Pair1TypeCode, SLong.embedIn(CollectionTypeCode), SLong.embedIn(OptionTypeCode))) roundtrip(STuple(SLong, SLong, SByte), Array[Byte](Pair2TypeCode, SLong.typeCode, SLong.typeCode, SByte.typeCode)) roundtrip(STuple(SCollection(SLong), SLong, SByte), Array[Byte](Pair2TypeCode, SLong.embedIn(CollectionTypeCode), SLong.typeCode, SByte.typeCode)) roundtrip(STuple(SCollection(SLong), SOption(SLong), SByte), Array[Byte](Pair2TypeCode, SLong.embedIn(CollectionTypeCode), SLong.embedIn(OptionTypeCode), SByte.typeCode)) roundtrip(STuple(SCollection(SCollection(SLong)), SOption(SCollection(SLong)), SByte), Array[Byte](Pair2TypeCode, SLong.embedIn(NestedCollectionTypeCode), SLong.embedIn(OptionCollectionTypeCode), SByte.typeCode)) roundtrip(STuple(SLong, SLong, SByte, SBoolean), Array[Byte](PairSymmetricTypeCode, SLong.typeCode, SLong.typeCode, SByte.typeCode, SBoolean.typeCode)) roundtrip(STuple(SLong, SLong, SByte, SBoolean, SInt), Array[Byte](TupleTypeCode, 5, SLong.typeCode, SLong.typeCode, SByte.typeCode, SBoolean.typeCode, SInt.typeCode)) } property("STypeIdent serialization roundtrip") { forAll(sTypeIdentGen) { ti => roundtrip(ti) } } }
Example 6
Source File: SigSerializerSpecification.scala From sigmastate-interpreter with MIT License | 5 votes |
package sigmastate.serialization import java.util import org.ergoplatform.{ErgoLikeContext, ErgoLikeTransaction} import org.scalacheck.{Arbitrary, Gen} import org.scalatest.Assertion import sigmastate.Values.{SigmaBoolean, SigmaPropConstant, SigmaPropValue, Value} import sigmastate._ import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeContextTesting, ErgoLikeTransactionTesting, SigmaTestingCommons} import sigmastate.serialization.generators.ObjectGenerators import sigmastate.utxo.Transformer import scala.util.Random class SigSerializerSpecification extends SigmaTestingCommons with ObjectGenerators { implicit lazy val IR = new TestingIRContext private lazy implicit val arbExprGen: Arbitrary[SigmaBoolean] = Arbitrary(exprTreeGen) private lazy val prover = new ContextEnrichingTestProvingInterpreter() private lazy val interpreterProveDlogGen: Gen[ProveDlog] = Gen.oneOf(prover.dlogSecrets.map(secret => ProveDlog(secret.publicImage.h))) private lazy val interpreterProveDHTGen = Gen.oneOf( prover.dhSecrets .map(_.commonInput) .map(ci => ProveDHTuple(ci.g, ci.h, ci.u, ci.v))) private def exprTreeNodeGen: Gen[SigmaBoolean] = for { left <- exprTreeGen right <- exprTreeGen node <- Gen.oneOf( COR(Seq(left, right)), CAND(Seq(left, right)) ) } yield node private def exprTreeGen: Gen[SigmaBoolean] = Gen.oneOf(interpreterProveDlogGen, interpreterProveDHTGen, Gen.delay(exprTreeNodeGen)) private def isEquivalent(expected: ProofTree, actual: ProofTree): Boolean = (expected, actual) match { case (NoProof, NoProof) => true case (dht1: UncheckedDiffieHellmanTuple, dht2: UncheckedDiffieHellmanTuple) => // `firstMessageOpt` is not serialized dht1.copy(commitmentOpt = None) == dht2 case (sch1: UncheckedSchnorr, sch2: UncheckedSchnorr) => // `firstMessageOpt` is not serialized sch1.copy(commitmentOpt = None) == sch2 case (conj1: UncheckedConjecture, conj2: UncheckedConjecture) => util.Arrays.equals(conj1.challenge, conj2.challenge) && conj1.children.zip(conj2.children).forall(t => isEquivalent(t._1, t._2)) case _ => false } private def roundTrip(uncheckedTree: UncheckedTree, exp: SigmaBoolean): Assertion = { val bytes = SigSerializer.toBytes(uncheckedTree) val parsedUncheckedTree = SigSerializer.parseAndComputeChallenges(exp, bytes) isEquivalent(uncheckedTree, parsedUncheckedTree) shouldBe true } property("SigSerializer no proof round trip") { roundTrip(NoProof, TrivialProp.TrueProp) } property("SigSerializer round trip") { forAll { sb: SigmaBoolean => val expr = sb.toSigmaProp val challenge = Array.fill(32)(Random.nextInt(100).toByte) val ctx = ErgoLikeContextTesting( currentHeight = 1, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContextTesting.dummyPubkey, boxesToSpend = IndexedSeq(fakeSelf), spendingTransaction = ErgoLikeTransactionTesting.dummy, self = fakeSelf) // get sigma conjectures out of transformers val prop = prover.reduceToCrypto(ctx, expr).get._1 val proof = prover.prove(expr, ctx, challenge).get.proof val proofTree = SigSerializer.parseAndComputeChallenges(prop, proof) roundTrip(proofTree, prop) } } }
Example 7
Source File: ConsistencyLevelUnitTest.scala From scala-cass with MIT License | 5 votes |
package com.weather.scalacass import com.datastax.driver.core.ConsistencyLevel import com.weather.scalacass.scsession.{ SCBatchStatement, SCStatement }, SCStatement.RightBiasedEither import com.weather.scalacass.util.CassandraWithTableTester import org.scalatest.{ Assertion, OptionValues } object ConsistencyLevelUnitTest { val db = "actionsdb" val table = "actionstable" } class ConsistencyLevelUnitTest extends CassandraWithTableTester(ConsistencyLevelUnitTest.db, ConsistencyLevelUnitTest.table, List("str varchar", "otherstr varchar", "d double"), List("str")) with OptionValues { import ConsistencyLevelUnitTest.{ db, table } lazy val ss = ScalaSession(ConsistencyLevelUnitTest.db)(client.session) case class Query(str: String) case class Insert(str: String, otherstr: String, d: Double) case class Update(otherstr: String, d: Double) val insertValue = Insert("str", "otherstr", 1234.0) val queryValue = Query(insertValue.str) val updateValue = Update("updatedStr", 4321.0) def checkConsistency[T <: SCStatement[_]](statement: T, clOpt: Option[ConsistencyLevel]): Assertion = { clOpt match { case Some(cl) => statement.toString should include(s"<CONSISTENCY $cl>") case None => statement.toString should not include "<CONSISTENCY" } val bound = statement.prepareAndBind().right.value bound.preparedStatement.getConsistencyLevel shouldBe clOpt.orNull } def fullCheck[T <: SCStatement[_]](statement: T)(plusConsistency: (T, ConsistencyLevel) => T, minusConsistency: T => T, cl: ConsistencyLevel): Assertion = { val statementWithConsistency = plusConsistency(statement, cl) val statementWithNoConsistency = minusConsistency(statement) checkConsistency(statement, None) checkConsistency(statementWithConsistency, Some(cl)) checkConsistency(statement, None) checkConsistency(statementWithConsistency, Some(cl)) checkConsistency(statementWithNoConsistency, None) } "setting consistency" should "work with inserts" in { fullCheck(ss.insert(table, insertValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.ONE) } it should "work with updates" in { fullCheck(ss.update(table, updateValue, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_ONE) } it should "work with selects" in { fullCheck(ss.selectStar(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL) fullCheck(ss.select[Update](table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL) fullCheck(ss.selectOneStar(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL) fullCheck(ss.selectOne[Update](table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.SERIAL) } it should "work with deletes" in { fullCheck(ss.deleteRow(table, queryValue))(_ consistency _, _.defaultConsistency, ConsistencyLevel.ANY) } it should "work with raw" in { fullCheck(ss.rawStatement(s"INSERT INTO $db.$table (str, otherstr, d) VALUES (?, ?, ?)"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_QUORUM) fullCheck(ss.rawSelectOne(s"SELECT * FROM $db.$table WHERE str=? LIMIT 1"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_SERIAL) fullCheck(ss.rawSelect(s"SELECT * FROM $db.$table WHERE str=?"))(_ consistency _, _.defaultConsistency, ConsistencyLevel.LOCAL_SERIAL) } it should "work with batches" in { def checkConsistencyBatch(statement: SCBatchStatement, clOpt: Option[ConsistencyLevel]): Assertion = { clOpt match { case Some(cl) => statement.toString should include(s"<CONSISTENCY $cl>") case None => statement.toString should not include "<CONSISTENCY" } val bound = statement.mkBatch.right.value bound.getSerialConsistencyLevel shouldBe clOpt.getOrElse(cluster.getConfiguration.getQueryOptions.getSerialConsistencyLevel) } val statement = ss.batchOf(ss.insert(table, insertValue)) val statementWithConsistency = statement.consistency(ConsistencyLevel.LOCAL_SERIAL) val statementWithNoConsistency = statementWithConsistency.defaultConsistency checkConsistencyBatch(statement, None) checkConsistencyBatch(statementWithConsistency, Some(ConsistencyLevel.LOCAL_SERIAL)) checkConsistencyBatch(statement, None) checkConsistencyBatch(statementWithConsistency.consistency(ConsistencyLevel.SERIAL), Some(ConsistencyLevel.SERIAL)) checkConsistencyBatch(statementWithNoConsistency, None) } }
Example 8
Source File: CirisDecoderSpec.scala From fs2-aws with MIT License | 5 votes |
package fs2.aws.ciris; import java.util.Date import cats.effect.{ ContextShift, IO } import ciris.{ ConfigException, ConfigValue } import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import software.amazon.kinesis.common.InitialPositionInStream import scala.concurrent.ExecutionContext.Implicits.global; class CirisDecoderSpec extends AnyWordSpec with Matchers { implicit val cs: ContextShift[IO] = IO.contextShift(global) "InitialPositionDecoderSpec" should { "when decoding Either[InitialPositionInStream, Date]" can { // same package, so `import fs2.aws.ciris._` not necessary here def decode(testStr: String): Either[InitialPositionInStream, Date] = ConfigValue .default(testStr) .as[Either[InitialPositionInStream, Date]] .load[IO] .unsafeRunSync() def expectDecodeFailure(testString: String): Assertion = intercept[ConfigException] { decode(testString) }.getMessage should include( s"Unable to convert value $testString to InitialPositionInStream" ) "decode supported strings as initial offsets" in { decode("LATEST") should equal(Left(InitialPositionInStream.LATEST)) decode("TRIM_HORIZON") should equal(Left(InitialPositionInStream.TRIM_HORIZON)) decode("TS_1592404273000") should equal(Right(new Date(1592404273000L))) } "fail to decode valid strings" in { expectDecodeFailure("FOOBAR") expectDecodeFailure("TS_FOO") expectDecodeFailure("TS_") expectDecodeFailure("_1592404273000") } } } }
Example 9
Source File: CreatableTests.scala From kubernetes-client with Apache License 2.0 | 5 votes |
package com.goyeau.kubernetes.client.operation import cats.Applicative import cats.implicits._ import com.goyeau.kubernetes.client.KubernetesClient import com.goyeau.kubernetes.client.Utils.retry import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta import org.http4s.Status import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.{Assertion, OptionValues} trait CreatableTests[F[_], Resource <: { def metadata: Option[ObjectMeta] }] extends AnyFlatSpec with Matchers with OptionValues with MinikubeClientProvider[F] { def namespacedApi(namespaceName: String)(implicit client: KubernetesClient[F]): Creatable[F, Resource] def getChecked(namespaceName: String, resourceName: String)(implicit client: KubernetesClient[F]): F[Resource] def sampleResource(resourceName: String, labels: Map[String, String] = Map.empty): Resource def modifyResource(resource: Resource): Resource def checkUpdated(updatedResource: Resource): Assertion def createChecked(namespaceName: String, resourceName: String)( implicit client: KubernetesClient[F] ): F[Resource] = createChecked(namespaceName, resourceName, Map.empty) def createChecked(namespaceName: String, resourceName: String, labels: Map[String, String])( implicit client: KubernetesClient[F] ): F[Resource] = { val resource = sampleResource(resourceName, labels) for { status <- namespacedApi(namespaceName).create(resource) _ = status shouldBe Status.Created resource <- getChecked(namespaceName, resourceName) } yield resource } "create" should s"create a $resourceName" in usingMinikube { implicit client => createChecked(resourceName.toLowerCase, "create-resource") } "createOrUpdate" should s"create a $resourceName" in usingMinikube { implicit client => for { namespaceName <- Applicative[F].pure(resourceName.toLowerCase) resourceName = "create-update-resource" status <- namespacedApi(namespaceName).createOrUpdate(sampleResource(resourceName)) _ = status shouldBe Status.Created _ <- getChecked(namespaceName, resourceName) } yield () } def createOrUpdate(namespaceName: String, resourceName: String)(implicit client: KubernetesClient[F]) = for { resource <- getChecked(namespaceName, resourceName) status <- namespacedApi(namespaceName).createOrUpdate(modifyResource(resource)) _ = status shouldBe Status.Ok } yield () it should s"update a $resourceName already created" in usingMinikube { implicit client => for { namespaceName <- Applicative[F].pure(resourceName.toLowerCase) resourceName <- Applicative[F].pure("update-resource") _ <- createChecked(namespaceName, resourceName) _ <- retry(createOrUpdate(namespaceName, resourceName)) updatedResource <- getChecked(namespaceName, resourceName) _ = checkUpdated(updatedResource) } yield () } }
Example 10
Source File: ReplaceableTests.scala From kubernetes-client with Apache License 2.0 | 5 votes |
package com.goyeau.kubernetes.client.operation import cats.Applicative import cats.implicits._ import com.goyeau.kubernetes.client.KubernetesClient import com.goyeau.kubernetes.client.Utils.retry import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta import org.http4s.Status import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.{Assertion, OptionValues} trait ReplaceableTests[F[_], Resource <: { def metadata: Option[ObjectMeta] }] extends AnyFlatSpec with Matchers with OptionValues with MinikubeClientProvider[F] { def namespacedApi(namespaceName: String)( implicit client: KubernetesClient[F] ): Replaceable[F, Resource] def createChecked(namespaceName: String, resourceName: String)( implicit client: KubernetesClient[F] ): F[Resource] def getChecked(namespaceName: String, resourceName: String)( implicit client: KubernetesClient[F] ): F[Resource] def sampleResource(resourceName: String, labels: Map[String, String] = Map.empty): Resource def modifyResource(resource: Resource): Resource def checkUpdated(updatedResource: Resource): Assertion def replace(namespaceName: String, resourceName: String)(implicit client: KubernetesClient[F]) = for { resource <- getChecked(namespaceName, resourceName) status <- namespacedApi(namespaceName).replace(modifyResource(resource)) _ = status shouldBe Status.Ok } yield () "replace" should s"replace a $resourceName" in usingMinikube { implicit client => for { namespaceName <- Applicative[F].pure(resourceName.toLowerCase) resourceName <- Applicative[F].pure("some-resource") _ <- createChecked(namespaceName, resourceName) _ <- retry(replace(namespaceName, resourceName)) replaced <- getChecked(namespaceName, resourceName) _ = checkUpdated(replaced) } yield () } it should "fail on non existing namespace" in usingMinikube { implicit client => for { status <- namespacedApi("non-existing").replace( sampleResource("non-existing") ) _ = status should (equal(Status.NotFound).or( equal( Status.InternalServerError ) )) } yield () } it should s"fail on non existing $resourceName" in usingMinikube { implicit client => for { namespaceName <- Applicative[F].pure(resourceName.toLowerCase) status <- namespacedApi(namespaceName).replace( sampleResource("non-existing") ) _ = status should (equal(Status.NotFound).or( equal( Status.InternalServerError ) )) } yield () } }
Example 11
Source File: AssertingSyntax.scala From cats-effect-testing with Apache License 2.0 | 5 votes |
package cats.effect.testing.scalatest import cats.Functor import cats.effect.Sync import org.scalatest.{Assertion, Assertions, Succeeded} import cats.implicits._ def assertThrows[E <: Throwable](implicit F: Sync[F], ct: reflect.ClassTag[E]): F[Assertion] = self.attempt.flatMap { case Left(t: E) => F.pure(Succeeded: Assertion) case Left(t) => F.delay( fail( s"Expected an exception of type ${ct.runtimeClass.getName} but got an exception: $t" ) ) case Right(a) => F.delay( fail(s"Expected an exception of type ${ct.runtimeClass.getName} but got a result: $a") ) } } }
Example 12
Source File: IntegrationBeforeAndAfterAll.scala From cosmos with Apache License 2.0 | 5 votes |
package com.mesosphere.cosmos import com.google.common.io.CharStreams import com.mesosphere.cosmos.circe.Decoders.parse import com.mesosphere.cosmos.http.CosmosRequests import com.mesosphere.cosmos.test.CosmosIntegrationTestClient.CosmosClient import com.mesosphere.cosmos.thirdparty.marathon.model.AppId import io.lemonlabs.uri.dsl._ import com.twitter.finagle.http.Status import io.circe.jawn.decode import java.io.InputStreamReader import org.scalatest.Assertion import org.scalatest.BeforeAndAfterAll import org.scalatest.Suite import org.scalatest.concurrent.Eventually import scala.concurrent.duration._ trait IntegrationBeforeAndAfterAll extends BeforeAndAfterAll with Eventually { this: Suite => private[this] lazy val logger = org.slf4j.LoggerFactory.getLogger(getClass) private[this] val universeUri = "https://downloads.mesosphere.com/universe/02493e40f8564a39446d06c002f8dcc8e7f6d61f/repo-up-to-1.8.json" private[this] val universeConverterUri = "https://universe-converter.mesosphere.com/transform?url=" + universeUri override def beforeAll(): Unit = { Requests.deleteRepository(Some("Universe")) val customPkgMgrResource = s"/${ItObjects.customManagerAppName}.json" logger.info(s"Creating marathon app from $customPkgMgrResource") Requests .postMarathonApp( parse( Option(this.getClass.getResourceAsStream(customPkgMgrResource)) match { case Some(is) => CharStreams.toString(new InputStreamReader(is)) case _ => throw new IllegalStateException(s"Unable to load classpath resource: $customPkgMgrResource") } ).toOption.get.asObject.get ) Requests.waitForDeployments() Requests.addRepository( "Universe", universeConverterUri, Some(0) ) Requests.addRepository( "V5Testpackage", ItObjects.V5TestPackage, Some(0) ) Requests.addRepository( "V4TestUniverse", ItObjects.V4TestUniverseConverterURI, Some(0) ) // This package is present only in V4TestUniverse and this method ensures that the // package collection cache is cleared before starting the integration tests val _ = waitUntilCacheReloads() } override def afterAll(): Unit = { Requests.deleteRepository(Some("V4TestUniverse")) Requests.deleteRepository(Some("V5Testpackage")) val customMgrAppId = AppId(ItObjects.customManagerAppName) Requests.deleteMarathonApp(customMgrAppId) Requests.waitForMarathonAppToDisappear(customMgrAppId) Requests.deleteRepository(None, Some(universeConverterUri)) val _ = Requests.addRepository("Universe", "https://universe.mesosphere.com/repo") } private[this] def waitUntilCacheReloads(): Assertion = { val packageName = "helloworld-invalid" eventually(timeout(2.minutes), interval(10.seconds)) { val response = CosmosClient.submit( CosmosRequests.packageDescribeV3(rpc.v1.model.DescribeRequest(packageName, None)) ) assertResult(Status.Ok)(response.status) val Right(actualResponse) = decode[rpc.v3.model.DescribeResponse](response.contentString) assert(actualResponse.`package`.name == packageName) } } }
Example 13
Source File: FeatureTestBase.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.test import com.salesforce.op.features._ import com.salesforce.op.features.types._ import com.salesforce.op.utils.spark.RichDataset.RichDataset import org.apache.spark.ml.{Estimator, Transformer} import org.apache.spark.sql.Dataset import org.scalatest.prop.PropertyChecks import org.scalatest.{Assertion, Suite} import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag def testOp[A <: FeatureType : TypeTag, B <: FeatureType : TypeTag, C <: FeatureType : TypeTag : FeatureTypeSparkConverter : ClassTag] ( op: FeatureLike[A] => FeatureLike[B] => FeatureLike[C] ): BinaryTester[A, B, C] = new BinaryTester[A, B, C] { def of(v: (A, B)*): Checker[C] = new Checker[C] { def expecting(z: C*): Assertion = { val (data, f1, f2) = TestFeatureBuilder[A, B](v) val f = op(f1)(f2) checkFeature(f, data, expected = z, clue = s"Testing ${f.originStage.operationName} on $v: ") } } } sealed abstract class UnaryTester[A <: FeatureType, C <: FeatureType : TypeTag : FeatureTypeSparkConverter : ClassTag] { def of(x: A*): Checker[C] } sealed abstract class BinaryTester[A <: FeatureType, B <: FeatureType, C <: FeatureType : TypeTag : FeatureTypeSparkConverter : ClassTag] { def of(x: A, y: B): Checker[C] = of((x, y)) def of(x: (A, B)*): Checker[C] } sealed abstract class Checker[C <: FeatureType : TypeTag : FeatureTypeSparkConverter : ClassTag] { def expecting(z: C*): Assertion protected def checkFeature(f: FeatureLike[C], data: Dataset[_], clue: String, expected: Seq[C]): Assertion = { val transformed = f.originStage match { case e: Estimator[_] => e.fit(data).transform(data) case t: Transformer => t.transform(data) } withClue(clue)( new RichDataset(transformed).collect[C](f) should contain theSameElementsInOrderAs expected ) } } }
Example 14
Source File: SplitterSummaryAsserts.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages.impl.tuning import com.salesforce.op.stages.impl.selector.ModelSelectorNames import org.scalatest.{Assertion, Matchers} trait SplitterSummaryAsserts { self: Matchers => def assertDataBalancerSummary(summary: Option[SplitterSummary])( assert: DataBalancerSummary => Assertion ): Assertion = summary match { case Some(s: DataBalancerSummary) => val meta = s.toMetadata() meta.getString(SplitterSummary.ClassName) shouldBe classOf[DataBalancerSummary].getName meta.getLong(ModelSelectorNames.Positive) should be >= 0L meta.getLong(ModelSelectorNames.Negative) should be >= 0L meta.getDouble(ModelSelectorNames.Desired) should be >= 0.0 meta.getDouble(ModelSelectorNames.UpSample) should be >= 0.0 meta.getDouble(ModelSelectorNames.DownSample) should be >= 0.0 assert(s) case x => fail(s"Unexpected data balancer summary: $x") } def assertDataCutterSummary(summary: Option[SplitterSummary])( assert: DataCutterSummary => Assertion ): Assertion = summary match { case Some(s: DataCutterSummary) => val meta = s.toMetadata() meta.getString(SplitterSummary.ClassName) shouldBe classOf[DataCutterSummary].getName meta.getDoubleArray(ModelSelectorNames.LabelsKept).foreach(_ should be >= 0.0) meta.getDoubleArray(ModelSelectorNames.LabelsDropped).foreach(_ should be >= 0.0) meta.getDouble(ModelSelectorNames.DownSample) should be >= 0.0 meta.getLong(ModelSelectorNames.PreSplitterDataCount) should be >= 0L meta.getLong(ModelSelectorNames.LabelsDroppedTotal) should be >= 0L assert(s) case x => fail(s"Unexpected data cutter summary: $x") } def assertDataSplitterSummary(summary: Option[SplitterSummary])( assert: DataSplitterSummary => Assertion ): Assertion = summary match { case Some(s: DataSplitterSummary) => val meta = s.toMetadata() meta.getString(SplitterSummary.ClassName) shouldBe classOf[DataSplitterSummary].getName assert(s) case x => fail(s"Unexpected data splitter summary: $x") } }
Example 15
Source File: AttributeAsserts.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages.impl.feature import com.salesforce.op.features.types.OPVector import org.apache.spark.ml.attribute.AttributeGroup import org.apache.spark.sql.types.StructField import org.scalatest.{Assertion, Matchers} trait AttributeAsserts { self: Matchers => final def assertNominal(schema: StructField, expectedNominal: Array[Boolean], output: Array[OPVector]): Assertion = { for { (x, i) <- output.zipWithIndex _ = withClue(s"Output vector $i and expectedNominal arrays are not of the same length:") { x.value.size shouldBe expectedNominal.length } (value, nominal) <- x.value.toArray.zip(expectedNominal) } if (nominal) value should (be(0.0) or be(1.0)) val attributes = AttributeGroup.fromStructField(schema).attributes withClue("Field attributes were not set or not as expected:") { attributes.map(_.map(_.isNominal).toSeq) shouldBe Some(expectedNominal.toSeq) } } }
Example 16
Source File: FeatureSparkTypeTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.features import com.salesforce.op.features.types.FeatureType import com.salesforce.op.test.{TestCommon, TestSparkContext} import org.apache.spark.ml.linalg.SQLDataTypes.VectorType import org.apache.spark.sql.types._ import org.junit.runner.RunWith import org.scalatest.{Assertion, FlatSpec} import org.scalatest.junit.JUnitRunner import scala.reflect.runtime.universe._ @RunWith(classOf[JUnitRunner]) class FeatureSparkTypeTest extends FlatSpec with TestCommon { val primitiveTypes = Seq( (DoubleType, weakTypeTag[types.Real], DoubleType), (FloatType, weakTypeTag[types.Real], DoubleType), (LongType, weakTypeTag[types.Integral], LongType), (IntegerType, weakTypeTag[types.Integral], LongType), (ShortType, weakTypeTag[types.Integral], LongType), (ByteType, weakTypeTag[types.Integral], LongType), (DateType, weakTypeTag[types.Date], LongType), (TimestampType, weakTypeTag[types.DateTime], LongType), (StringType, weakTypeTag[types.Text], StringType), (BooleanType, weakTypeTag[types.Binary], BooleanType), (VectorType, weakTypeTag[types.OPVector], VectorType) ) val nonNullable = Seq( (DoubleType, weakTypeTag[types.RealNN], DoubleType), (FloatType, weakTypeTag[types.RealNN], DoubleType) ) private def mapType(v: DataType) = MapType(StringType, v, valueContainsNull = true) private def arrType(v: DataType) = ArrayType(v, containsNull = true) val collectionTypes = Seq( (arrType(LongType), weakTypeTag[types.DateList], arrType(LongType)), (arrType(DoubleType), weakTypeTag[types.Geolocation], arrType(DoubleType)), (arrType(StringType), weakTypeTag[types.TextList], arrType(StringType)), (mapType(StringType), weakTypeTag[types.TextMap], mapType(StringType)), (mapType(DoubleType), weakTypeTag[types.RealMap], mapType(DoubleType)), (mapType(LongType), weakTypeTag[types.IntegralMap], mapType(LongType)), (mapType(BooleanType), weakTypeTag[types.BinaryMap], mapType(BooleanType)), (mapType(arrType(StringType)), weakTypeTag[types.MultiPickListMap], mapType(arrType(StringType))), (mapType(arrType(DoubleType)), weakTypeTag[types.GeolocationMap], mapType(arrType(DoubleType))) ) Spec(FeatureSparkTypes.getClass) should "assign appropriate feature type tags for valid types and versa" in { primitiveTypes.map(scala.Function.tupled(assertTypes())) } it should "assign appropriate feature type tags for valid non-nullable types and versa" in { nonNullable.map(scala.Function.tupled(assertTypes(isNullable = false))) } it should "assign appropriate feature type tags for collection types and versa" in { collectionTypes.map(scala.Function.tupled(assertTypes())) } it should "error for unsupported types" in { val error = intercept[IllegalArgumentException](FeatureSparkTypes.featureTypeTagOf(BinaryType, isNullable = false)) error.getMessage shouldBe "Spark BinaryType is currently not supported" } it should "error for unknown types" in { val unknownType = NullType val error = intercept[IllegalArgumentException](FeatureSparkTypes.featureTypeTagOf(unknownType, isNullable = false)) error.getMessage shouldBe s"No feature type tag mapping for Spark type $unknownType" } def assertTypes( isNullable: Boolean = true )( sparkType: DataType, featureType: WeakTypeTag[_ <: FeatureType], expectedSparkType: DataType ): Assertion = { FeatureSparkTypes.featureTypeTagOf(sparkType, isNullable) shouldBe featureType FeatureSparkTypes.sparkTypeOf(featureType) shouldBe expectedSparkType } }
Example 17
Source File: ToolTestUtilities.scala From scala-debugger with Apache License 2.0 | 5 votes |
package test import java.util.concurrent.ArrayBlockingQueue import org.scaladebugger.api.utils.Logging import org.scaladebugger.tool.frontend.VirtualTerminal import org.scalatest.{Assertion, Matchers} def validateNextLine( vt: VirtualTerminal, text: String, success: (String, String) => Assertion = (text, line) => text should be (line), fail: String => Assertion = fail(_: String), lineLogger: String => Unit = logger.debug(_: String) ): Assertion = { import scala.reflect.runtime.universe._ val t = Literal(Constant(text)).toString nextLine(vt) match { case Some(line) => val l = Literal(Constant(line)).toString lineLogger(l) success(text, line) case None => fail(s"Unable to find desired line in time: '$t'") } } }
Example 18
Source File: ArrayEncodingBaseSpec.scala From quill with Apache License 2.0 | 5 votes |
package io.getquill.context.sql.encoding import java.time.LocalDate import java.util.Date import io.getquill.{ MappedEncoding, Spec } import org.scalatest.{ Assertion, BeforeAndAfterEach } trait ArrayEncodingBaseSpec extends Spec with BeforeAndAfterEach { // Support all sql base types and `Seq` implementers case class ArraysTestEntity( texts: List[String], decimals: Seq[BigDecimal], bools: Vector[Boolean], bytes: List[Byte], shorts: IndexedSeq[Short], ints: Seq[Int], longs: Seq[Long], floats: Seq[Float], doubles: Seq[Double], timestamps: Seq[Date], dates: Seq[LocalDate] ) val e = ArraysTestEntity(List("test"), Seq(BigDecimal(2.33)), Vector(true, true), List(1), IndexedSeq(3), Seq(2), Seq(1, 2, 3), Seq(1f, 2f), Seq(4d, 3d), Seq(new Date(System.currentTimeMillis())), Seq(LocalDate.now())) // casting types can be dangerous so we need to ensure that everything is ok def baseEntityDeepCheck(e1: ArraysTestEntity, e2: ArraysTestEntity): Assertion = { e1.texts.head mustBe e2.texts.head e1.decimals.head mustBe e2.decimals.head e1.bools.head mustBe e2.bools.head e1.bytes.head mustBe e2.bytes.head e1.shorts.head mustBe e2.shorts.head e1.ints.head mustBe e2.ints.head e1.longs.head mustBe e2.longs.head e1.floats.head mustBe e2.floats.head e1.doubles.head mustBe e2.doubles.head e1.timestamps.head mustBe e2.timestamps.head e1.dates.head mustBe e2.dates.head } // Support Seq encoding basing on MappedEncoding case class StrWrap(str: String) implicit val strWrapEncode: MappedEncoding[StrWrap, String] = MappedEncoding(_.str) implicit val strWrapDecode: MappedEncoding[String, StrWrap] = MappedEncoding(StrWrap.apply) case class WrapEntity(texts: Seq[StrWrap]) val wrapE = WrapEntity(List("hey", "ho").map(StrWrap.apply)) }
Example 19
Source File: TypeConverterTest.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.okapi.ir.impl.typer import org.opencypher.okapi.api.types._ import org.opencypher.okapi.testing.BaseTestSuite import org.opencypher.v9_0.util.{symbols => frontend} import org.scalatest.Assertion class TypeConverterTest extends BaseTestSuite { test("should convert basic types") { frontend.CTBoolean shouldBeConvertedTo CTBoolean frontend.CTInteger shouldBeConvertedTo CTInteger frontend.CTFloat shouldBeConvertedTo CTFloat frontend.CTNumber shouldBeConvertedTo CTNumber frontend.CTString shouldBeConvertedTo CTString frontend.CTAny shouldBeConvertedTo CTAny frontend.CTDate shouldBeConvertedTo CTDate frontend.CTLocalDateTime shouldBeConvertedTo CTLocalDateTime frontend.CTDuration shouldBeConvertedTo CTDuration } test("should convert element types") { frontend.CTNode shouldBeConvertedTo CTNode frontend.CTRelationship shouldBeConvertedTo CTRelationship frontend.CTPath shouldBeConvertedTo CTPath } test("should convert container types") { frontend.CTList(frontend.CTInteger) shouldBeConvertedTo CTList(CTInteger) frontend.CTMap shouldBeConvertedTo CTMap } implicit class RichType(t: frontend.CypherType) { def shouldBeConvertedTo(other: CypherType): Assertion = { TypeConverter.convert(t) should equal(Some(other)) } } }
Example 20
Source File: toFrontendTypeTest.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.okapi.ir.impl.typer import org.opencypher.okapi.api.types._ import org.opencypher.okapi.testing.BaseTestSuite import org.opencypher.v9_0.util.{symbols => frontend} import org.scalatest.Assertion class toFrontendTypeTest extends BaseTestSuite { it("can convert basic types") { CTAny shouldBeConvertedTo frontend.CTAny CTAny.nullable shouldBeConvertedTo frontend.CTAny CTInteger shouldBeConvertedTo frontend.CTInteger CTInteger.nullable shouldBeConvertedTo frontend.CTInteger CTFloat shouldBeConvertedTo frontend.CTFloat CTFloat.nullable shouldBeConvertedTo frontend.CTFloat CTNumber shouldBeConvertedTo frontend.CTNumber CTNumber.nullable shouldBeConvertedTo frontend.CTNumber CTBoolean shouldBeConvertedTo frontend.CTBoolean CTBoolean.nullable shouldBeConvertedTo frontend.CTBoolean CTString shouldBeConvertedTo frontend.CTString CTString.nullable shouldBeConvertedTo frontend.CTString CTDate shouldBeConvertedTo frontend.CTDate CTDate.nullable shouldBeConvertedTo frontend.CTDate CTLocalDateTime shouldBeConvertedTo frontend.CTLocalDateTime CTLocalDateTime.nullable shouldBeConvertedTo frontend.CTLocalDateTime CTDuration shouldBeConvertedTo frontend.CTDuration CTDuration.nullable shouldBeConvertedTo frontend.CTDuration } test("should convert element types") { CTNode shouldBeConvertedTo frontend.CTNode CTNode.nullable shouldBeConvertedTo frontend.CTNode CTRelationship shouldBeConvertedTo frontend.CTRelationship CTRelationship.nullable shouldBeConvertedTo frontend.CTRelationship CTPath shouldBeConvertedTo frontend.CTPath } test("should convert container types") { CTList(CTInteger) shouldBeConvertedTo frontend.CTList(frontend.CTInteger) CTList(CTInteger).nullable shouldBeConvertedTo frontend.CTList(frontend.CTInteger) CTList(CTInteger.nullable) shouldBeConvertedTo frontend.CTList(frontend.CTInteger) CTMap shouldBeConvertedTo frontend.CTMap CTMap.nullable shouldBeConvertedTo frontend.CTMap } implicit class RichType(t: CypherType) { def shouldBeConvertedTo(other: frontend.CypherType): Assertion = { toFrontendType(t) should equal(other) } } }
Example 21
Source File: RecordMatchingTestSupport.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.morpheus.testing.support import org.apache.spark.sql.Row import org.opencypher.morpheus.impl.MorpheusConverters._ import org.opencypher.morpheus.impl.MorpheusRecords import org.opencypher.morpheus.impl.table.SparkTable.DataFrameTable import org.opencypher.morpheus.testing.MorpheusTestSuite import org.opencypher.okapi.api.table.CypherRecords import org.opencypher.okapi.api.value.CypherValue import org.opencypher.okapi.api.value.CypherValue._ import org.opencypher.okapi.impl.exception.IllegalArgumentException import org.opencypher.okapi.ir.api.expr.{Expr, Param} import org.opencypher.okapi.relational.api.planning.RelationalRuntimeContext import org.opencypher.okapi.relational.impl.table.RecordHeader import org.opencypher.okapi.testing.Bag import org.opencypher.okapi.testing.Bag._ import org.scalatest.Assertion import scala.collection.JavaConverters._ trait RecordMatchingTestSupport { self: MorpheusTestSuite => implicit class RichRow(r: Row) { def getCypherValue(expr: Expr, header: RecordHeader) (implicit context: RelationalRuntimeContext[DataFrameTable]): CypherValue = { expr match { case Param(name) => context.parameters(name) case _ => header.getColumn(expr) match { case None => throw IllegalArgumentException( expected = s"column for $expr", actual = header.pretty) case Some(column) => CypherValue(r.get(r.schema.fieldIndex(column))) } } } } implicit class RecordMatcher(records: MorpheusRecords) { def shouldMatch(expected: CypherMap*): Assertion = { records.collect.toBag should equal(Bag(expected: _*)) } def shouldMatch(expectedRecords: MorpheusRecords): Assertion = { records.header should equal(expectedRecords.header) val actualData = records.toLocalIterator.asScala.toSet val expectedData = expectedRecords.toLocalIterator.asScala.toSet actualData should equal(expectedData) } } implicit class RichRecords(records: CypherRecords) { val morpheusRecords: MorpheusRecords = records.asMorpheus def toMaps: Bag[CypherMap] = Bag(morpheusRecords.toCypherMaps.collect(): _*) } }
Example 22
Source File: ReactRefTest.scala From slinky with MIT License | 5 votes |
package slinky.core import org.scalajs.dom import org.scalajs.dom.html import slinky.core.facade.React import slinky.web.ReactDOM import slinky.web.html.{div, ref} import scala.concurrent.Promise import org.scalatest.Assertion import org.scalatest.funsuite.AsyncFunSuite class ReactRefTest extends AsyncFunSuite { test("Can pass in a ref object to an HTML tag and use it") { val elemRef = React.createRef[html.Div] ReactDOM.render( div(ref := elemRef)("hello!"), dom.document.createElement("div") ) assert(elemRef.current.innerHTML == "hello!") } test("Can pass in a ref object to a Slinky component and use it") { val promise: Promise[Assertion] = Promise() val ref = React.createRef[TestForceUpdateComponent.Def] ReactDOM.render( TestForceUpdateComponent(() => promise.success(assert(true))).withRef(ref), dom.document.createElement("div") ) ref.current.forceUpdate() promise.future } test("Can use forwardRef to pass down a ref to a lower element") { val forwarded = React.forwardRef[String, html.Div](FunctionalComponent((props, rf) => { div(ref := rf)(props) })) val divRef = React.createRef[html.Div] ReactDOM.render( forwarded("hello").withRef(divRef), dom.document.createElement("div") ) assert(divRef.current.innerHTML == "hello") } }
Example 23
Source File: LithiumSpec.scala From lithium with Apache License 2.0 | 5 votes |
package com.swissborg.lithium import cats.implicits._ import cats.{Applicative, Functor, Monoid} import com.swissborg.lithium.ArbitraryStrategy._ import com.swissborg.lithium.instances.ArbitraryTestInstances import com.swissborg.lithium.strategy._ import com.swissborg.lithium.utils.PostResolution import org.scalacheck.Arbitrary import org.scalactic.anyvals._ import org.scalatest.wordspec.AnyWordSpecLike import org.scalatest.Assertion import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks import org.scalatest.matchers.should.Matchers trait LithiumSpec extends AnyWordSpecLike with Matchers with ScalaCheckPropertyChecks with ArbitraryTestInstances { implicit override val generatorDrivenConfig: PropertyCheckConfiguration = PropertyCheckConfiguration(minSuccessful = PosInt(1000), maxDiscardedFactor = PosZDouble(5), minSize = PosZInt(0), sizeRange = PosZInt(100), workers = PosInt(8)) final def simulate[F[_]: Functor, Strat[_[_]], S <: Scenario: Arbitrary](name: String)( run: F[Assertion] => Assertion )(implicit strategy: ArbitraryStrategy[Strat[F]], ev: Strat[F] <:< Strategy[F], M: Monoid[F[PostResolution]]): Unit = name in { forAll { simulation: Simulation[F, Strat, S] => run(simulation.splitBrainResolved.map(_ shouldBe true)) } } final def simulateWithNonCleanPartitions[F[_]: Applicative, Strat[_[_]], S <: Scenario: Arbitrary](name: String)( run: F[Assertion] => Assertion )(implicit strategy: ArbitraryStrategy[Strat[F]], ev: Strat[F] <:< Strategy[F], M: Monoid[F[PostResolution]]): Unit = simulate[F, Union[*[_], Strat, IndirectlyConnected], WithNonCleanPartitions[S]](name)(run) }
Example 24
Source File: TapirJsonPlayTests.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.json.play import org.scalatest.{FlatSpec, Matchers, Assertion} import play.api.libs.json._ import sttp.tapir._ import sttp.tapir.DecodeResult._ import java.util.Date object TapirJsonPlayCodec extends TapirJsonPlay class TapirJsonPlayTests extends FlatSpec with Matchers { case class Customer(name: String, yearOfBirth: Int, lastPurchase: Option[Long]) object Customer { implicit val rw: Format[Customer] = Json.format } val customerDecoder = TapirJsonPlayCodec.readsWritesCodec[Customer] // Helper to test encoding then decoding an object is the same as the original def testEncodeDecode[T: Format: Schema: Validator](original: T): Assertion = { val codec = TapirJsonPlayCodec.readsWritesCodec[T] val encoded = codec.encode(original) codec.decode(encoded) match { case Value(d) => d shouldBe original case f: DecodeResult.Failure => fail(f.toString) } } it should "encode and decode Scala case class with non-empty Option elements" in { val customer = Customer("Alita", 1985, Some(1566150331L)) testEncodeDecode(customer) } it should "encode and decode Scala case class with empty Option elements" in { val customer = Customer("Alita", 1985, None) testEncodeDecode(customer) } it should "encode and decode String type" in { testEncodeDecode("Hello, World!") } it should "encode and decode Long type" in { testEncodeDecode(1566150331L) } it should "encode and decode using custom Date serializer" in { val d = new Date testEncodeDecode(d) } it should "Fail to encode a badly formatted date" in { val codec = TapirJsonPlayCodec.readsWritesCodec[Date] val encoded = "\"OOPS-10-10 11:20:49.029\"" codec.decode(encoded) match { case _: DecodeResult.Failure => succeed case Value(d) => fail(s"Should not have been able to decode this date: $d") } } it should "encode to non-prettified Json" in { val customer = Customer("Alita", 1985, None) val codec = TapirJsonPlayCodec.readsWritesCodec[Customer] val expected = """{"name":"Alita","yearOfBirth":1985}""" codec.encode(customer) shouldBe expected } }
Example 25
Source File: TapirJsonSprayTests.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.json.spray import org.scalatest.{Assertion, FlatSpec, Matchers} import sttp.tapir._ import sttp.tapir.DecodeResult._ import spray.json._ import sttp.tapir.Codec.JsonCodec object TapirJsonSprayCodec extends TapirJsonSpray class TapirJsonSprayTests extends FlatSpec with Matchers with DefaultJsonProtocol { case class Customer(name: String, yearOfBirth: Int, lastPurchase: Option[Long]) object Customer { implicit val rw: JsonFormat[Customer] = jsonFormat3(Customer.apply) } val customerDecoder: JsonCodec[Customer] = TapirJsonSprayCodec.jsonFormatCodec[Customer] // Helper to test encoding then decoding an object is the same as the original def testEncodeDecode[T: JsonFormat: Schema: Validator](original: T): Assertion = { val codec = TapirJsonSprayCodec.jsonFormatCodec[T] val encoded = codec.encode(original) codec.decode(encoded) match { case Value(d) => d shouldBe original case f: DecodeResult.Failure => fail(f.toString) } } it should "encode and decode Scala case class with non-empty Option elements" in { val customer = Customer("Alita", 1985, Some(1566150331L)) testEncodeDecode(customer) } it should "encode and decode Scala case class with empty Option elements" in { val customer = Customer("Alita", 1985, None) testEncodeDecode(customer) } it should "encode and decode String type" in { testEncodeDecode("Hello, World!") } it should "encode and decode Long type" in { testEncodeDecode(1566150331L) } }
Example 26
Source File: TapirJsonuPickleTests.scala From tapir with Apache License 2.0 | 5 votes |
package sttp.tapir.json.upickle import upickle.default._ import org.scalatest.{Assertion, FlatSpec, Matchers} import java.util.Date import sttp.tapir.Codec.JsonCodec import sttp.tapir._ import sttp.tapir.DecodeResult._ object TapirJsonuPickleCodec extends TapirJsonuPickle class TapirJsonuPickleTests extends FlatSpec with Matchers { case class Customer(name: String, yearOfBirth: Int, lastPurchase: Option[Long]) object Customer { implicit val rw: ReadWriter[Customer] = macroRW } val customerDecoder: JsonCodec[Customer] = TapirJsonuPickleCodec.readWriterCodec[Customer] // Helper to test encoding then decoding an object is the same as the original def testEncodeDecode[T: ReadWriter: Schema: Validator](original: T): Assertion = { val codec = TapirJsonuPickleCodec.readWriterCodec[T] val encoded = codec.encode(original) codec.decode(encoded) match { case Value(d) => d shouldBe original case f: DecodeResult.Failure => fail(f.toString) } } it should "encode and decode Scala case class with non-empty Option elements" in { val customer = Customer("Alita", 1985, Some(1566150331L)) testEncodeDecode(customer) } it should "encode and decode Scala case class with empty Option elements" in { val customer = Customer("Alita", 1985, None) testEncodeDecode(customer) } it should "encode and decode String type" in { testEncodeDecode("Hello, World!") } it should "encode and decode Long type" in { testEncodeDecode(1566150331L) } // Custom Date serialization object DateConversionUtil { val dateFormatString = "yyyy-MM-dd HH:mm:ss.SSS" implicit val rw1 = upickle.default .readwriter[String] .bimap[Date]( date => { val sdf = new java.text.SimpleDateFormat(dateFormatString) sdf.format(date) }, s => { val dateFormat = new java.text.SimpleDateFormat(dateFormatString) dateFormat.parse(s) } ) } it should "encode and decode using custom Date serializer" in { import DateConversionUtil._ val d = new Date testEncodeDecode(d) } it should "Fail to encode a badly formatted date" in { import DateConversionUtil._ val codec = TapirJsonuPickleCodec.readWriterCodec[Date] val encoded = "\"OOPS-10-10 11:20:49.029\"" codec.decode(encoded) match { case _: DecodeResult.Failure => succeed case Value(d) => fail(s"Should not have been able to decode this date: $d") } } }
Example 27
Source File: EnumFormatSpec.scala From scalapb-json4s with Apache License 2.0 | 5 votes |
package scalapb.json4s import com.google.protobuf.{InvalidProtocolBufferException, Message} import jsontest.test.{EnumTest, MyEnum} import jsontest.test3.EnumTest3 import jsontest.test3.MyTest3.MyEnum3 import org.scalatest.Assertion import scalapb.GeneratedMessageCompanion import scalapb.JavaProtoSupport import com.google.protobuf.util.JsonFormat.{Parser => JavaParser} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.must.Matchers class EnumFormatSpec extends AnyFlatSpec with Matchers with JavaAssertions { // not ignoring unknown fields: "default parser" should "match Java behavior for string enums" in new DefaultParserContext { assertFails("""{"enum":"ZAZA"}""", EnumTest) assertFails("""{"enum":"ZAZA"}""", EnumTest3) assertFails("""{"enum":""}""", EnumTest) assertFails("""{"enum":""}""", EnumTest3) assertParse("""{"enum":"V1"}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":"V1"}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":"0"}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":"0"}""", EnumTest3()) assertParse("""{"enum":"1.0"}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":"1.0"}""", EnumTest3(MyEnum3.V1)) assertFails("""{"enum":"1.4"}""", EnumTest) assertFails("""{"enum":"1.4"}""", EnumTest3) assertFails("""{"enum":"10"}""", EnumTest) assertParse("""{"enum":"10"}""", EnumTest3(MyEnum3.Unrecognized(10))) } "default parser" should "match Java behavior for int enums" in new DefaultParserContext { assertFails("""{"enum":10}""", EnumTest) assertParse("""{"enum":10}""", EnumTest3(MyEnum3.Unrecognized(10))) assertParse("""{"enum":0}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":0}""", EnumTest3(MyEnum3.UNKNOWN)) assertParse("""{"enum":0.0}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":0.0}""", EnumTest3(MyEnum3.UNKNOWN)) assertFails("""{"enum":0.4}""", EnumTest) assertFails("""{"enum":0.4}""", EnumTest3) assertParse("""{"enum":1}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":1}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":1.0}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":1.0}""", EnumTest3(MyEnum3.V1)) assertFails("""{"enum":-1}""", EnumTest) assertParse("""{"enum":-1}""", EnumTest3(MyEnum3.Unrecognized(-1))) } "ignoring unknown fields parser" should "match Java behavior for strings enums" in new IgnoringUnknownParserContext { assertParse("""{"enum":"ZAZA"}""", EnumTest()) assertParse("""{"enum":"ZAZA"}""", EnumTest3()) assertParse("""{"enum":""}""", EnumTest()) assertParse("""{"enum":""}""", EnumTest3()) assertParse("""{"enum":"V1"}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":"V1"}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":"0"}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":"0"}""", EnumTest3()) assertParse("""{"enum":"1.0"}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":"1.0"}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":"1.4"}""", EnumTest()) assertParse("""{"enum":"1.4"}""", EnumTest3()) assertParse("""{"enum":"10"}""", EnumTest()) assertParse("""{"enum":"10"}""", EnumTest3(MyEnum3.Unrecognized(10))) } "ignoring unknown fields parser" should "match Java behavior for int enums" in new IgnoringUnknownParserContext { assertParse("""{"enum":10}""", EnumTest()) assertParse("""{"enum":10}""", EnumTest3(MyEnum3.Unrecognized(10))) assertParse("""{"enum":0}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":0}""", EnumTest3(MyEnum3.UNKNOWN)) assertParse("""{"enum":0.0}""", EnumTest(Some(MyEnum.UNKNOWN))) assertParse("""{"enum":0.0}""", EnumTest3(MyEnum3.UNKNOWN)) assertParse("""{"enum":0.4}""", EnumTest()) assertParse("""{"enum":0.4}""", EnumTest()) assertParse("""{"enum":1}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":1}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":1.0}""", EnumTest(Some(MyEnum.V1))) assertParse("""{"enum":1.0}""", EnumTest3(MyEnum3.V1)) assertParse("""{"enum":-1}""", EnumTest()) assertParse("""{"enum":-1}""", EnumTest3(MyEnum3.Unrecognized(-1))) } "Enum" should "be serialized the same way as java" in { assertJsonIsSameAsJava(jsontest.test.EnumTest()) assertJsonIsSameAsJava(jsontest.test.EnumTest(Some(MyEnum.V1))) } }
Example 28
Source File: CoderAssertions.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.testing import com.spotify.scio.coders.{Coder, CoderMaterializer} import org.apache.beam.sdk.coders.{Coder => BCoder} import org.apache.beam.sdk.options.{PipelineOptions, PipelineOptionsFactory} import org.apache.beam.sdk.util.{CoderUtils, SerializableUtils} import org.scalactic.Equality import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers._ import scala.reflect.ClassTag object CoderAssertions { private lazy val DefaultPipelineOptions = PipelineOptionsFactory.create() implicit class CoderAssertionsImplicits[T](private val value: T) extends AnyVal { def coderShould( coderAssertion: CoderAssertion[T] )(implicit c: Coder[T], eq: Equality[T]): Assertion = coderAssertion.assert(value) } trait CoderAssertion[T] { def assert(value: T)(implicit c: Coder[T], eq: Equality[T]): Assertion } def roundtrip[T](opts: PipelineOptions = DefaultPipelineOptions): CoderAssertion[T] = new CoderAssertion[T] { override def assert(value: T)(implicit c: Coder[T], eq: Equality[T]): Assertion = { val beamCoder = CoderMaterializer.beamWithDefault(c, o = opts) checkRoundtripWithCoder(beamCoder, value) } } def roundtripKryo[T: ClassTag]( opts: PipelineOptions = DefaultPipelineOptions ): CoderAssertion[T] = new CoderAssertion[T] { override def assert(value: T)(implicit c: Coder[T], eq: Equality[T]): Assertion = { val kryoCoder = CoderMaterializer.beamWithDefault(Coder.kryo[T], o = opts) checkRoundtripWithCoder(kryoCoder, value) } } def notFallback[T: ClassTag](opts: PipelineOptions = DefaultPipelineOptions): CoderAssertion[T] = new CoderAssertion[T] { override def assert(value: T)(implicit c: Coder[T], eq: Equality[T]): Assertion = { c should !==(Coder.kryo[T]) val beamCoder = CoderMaterializer.beamWithDefault(c, o = opts) checkRoundtripWithCoder[T](beamCoder, value) } } def fallback[T: ClassTag](opts: PipelineOptions = DefaultPipelineOptions): CoderAssertion[T] = new CoderAssertion[T] { override def assert(value: T)(implicit c: Coder[T], eq: Equality[T]): Assertion = { c should ===(Coder.kryo[T]) roundtripKryo(opts).assert(value) } } def coderIsSerializable[A](implicit c: Coder[A]): Assertion = coderIsSerializable(CoderMaterializer.beamWithDefault(c)) private def coderIsSerializable[A](beamCoder: BCoder[A]): Assertion = noException should be thrownBy SerializableUtils.ensureSerializable(beamCoder) private def checkRoundtripWithCoder[T](beamCoder: BCoder[T], value: T)(implicit eq: Equality[T] ): Assertion = { val bytes = CoderUtils.encodeToByteArray(beamCoder, value) val result = CoderUtils.decodeFromByteArray(beamCoder, bytes) result should ===(value) } }
Example 29
Source File: TypeProviderIT.scala From scio with Apache License 2.0 | 5 votes |
package com.spotify.scio.avro.types import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.annotation.StaticAnnotation import scala.reflect.runtime.universe._ class TypeProviderIT extends AnyFlatSpec with Matchers { @AvroType.fromSchemaFile(""" |https://raw.githubusercontent.com/spotify/scio/master/ |scio-avro/src/test/avro/ |scio-avro-test.avsc """.stripMargin) class FromResourceMultiLine @AvroType.fromSchemaFile( "https://raw.githubusercontent.com/spotify/scio/master/scio-avro/src/test/avro/scio-avro-test.avsc" ) class FromResource "AvroType.fromSchemaFile" should "support reading schema from multiline resource" in { val r = FromResourceMultiLine(1) r.test shouldBe 1 } it should "support reading schema from resource" in { val r = FromResource(2) r.test shouldBe 2 } class Annotation1 extends StaticAnnotation class Annotation2 extends StaticAnnotation @Annotation1 @AvroType.fromSchemaFile( "https://raw.githubusercontent.com/spotify/scio/master/scio-avro/src/test/avro/scio-avro-test.avsc" ) @Annotation2 class FromResourceWithSurroundingAnnotations it should "preserve surrounding user defined annotations" in { containsAllAnnotTypes[FromResourceWithSurroundingAnnotations] } @AvroType.fromSchemaFile( "https://raw.githubusercontent.com/spotify/scio/master/scio-avro/src/test/avro/scio-avro-test.avsc" ) @Annotation1 @Annotation2 class FromResourceWithSequentialAnnotations it should "preserve sequential user defined annotations" in { containsAllAnnotTypes[FromResourceWithSequentialAnnotations] } def containsAllAnnotTypes[T: TypeTag]: Assertion = typeOf[T].typeSymbol.annotations .map(_.tree.tpe) .containsSlice(Seq(typeOf[Annotation1], typeOf[Annotation2])) shouldBe true }
Example 30
Source File: EmbeddedKafkaSpecSupport.scala From embedded-kafka-schema-registry with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import java.net.{InetAddress, Socket} import net.manub.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{ Available, NotAvailable, ServerStatus } import org.scalatest.Assertion import org.scalatest.concurrent.{Eventually, IntegrationPatience} import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Milliseconds, Seconds, Span} import org.scalatest.wordspec.AnyWordSpecLike import scala.util.{Failure, Success, Try} trait EmbeddedKafkaSpecSupport extends AnyWordSpecLike with Matchers with Eventually with IntegrationPatience { implicit val config: PatienceConfig = PatienceConfig(Span(1, Seconds), Span(100, Milliseconds)) def expectedServerStatus(port: Int, expectedStatus: ServerStatus): Assertion = eventually { status(port) shouldBe expectedStatus } private def status(port: Int): ServerStatus = { Try(new Socket(InetAddress.getByName("localhost"), port)) match { case Failure(_) => NotAvailable case Success(_) => Available } } } object EmbeddedKafkaSpecSupport { sealed trait ServerStatus case object Available extends ServerStatus case object NotAvailable extends ServerStatus }
Example 31
Source File: EmbeddedKafkaTraitSpec.scala From embedded-kafka-schema-registry with MIT License | 5 votes |
package net.manub.embeddedkafka.schemaregistry import net.manub.embeddedkafka.schemaregistry.EmbeddedKafka._ import net.manub.embeddedkafka.schemaregistry.EmbeddedKafkaSpecSupport.{ Available, NotAvailable } import org.scalatest.Assertion class EmbeddedKafkaTraitSpec extends EmbeddedKafkaSpecSupport { "the withRunningKafka method" should { "start a Schema Registry server on a specified port" in { implicit val config: EmbeddedKafkaConfig = EmbeddedKafkaConfig(schemaRegistryPort = 12345) withRunningKafka { expectedServerStatus(12345, Available) } } } "the withRunningKafkaOnFoundPort method" should { "start a Schema Registry server on an available port if 0" in { val userDefinedConfig: EmbeddedKafkaConfig = EmbeddedKafkaConfig(schemaRegistryPort = 0) withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig => expectedServerStatus(actualConfig.schemaRegistryPort, Available) } } "start and stop Kafka, Zookeeper, and Schema Registry successfully on non-zero ports" in { val userDefinedConfig = EmbeddedKafkaConfig( kafkaPort = 12345, zooKeeperPort = 12346, schemaRegistryPort = 12347 ) val actualConfig = withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig => actualConfig shouldBe userDefinedConfig everyServerIsAvailable(actualConfig) actualConfig } noServerIsAvailable(actualConfig) } } private def everyServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { expectedServerStatus(config.kafkaPort, Available) expectedServerStatus(config.schemaRegistryPort, Available) expectedServerStatus(config.zooKeeperPort, Available) } private def noServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { expectedServerStatus(config.kafkaPort, NotAvailable) expectedServerStatus(config.schemaRegistryPort, NotAvailable) expectedServerStatus(config.zooKeeperPort, NotAvailable) } }
Example 32
Source File: GraphMatchingTestSupport.scala From morpheus with Apache License 2.0 | 5 votes |
package org.opencypher.morpheus.testing.support import org.opencypher.morpheus.impl.table.SparkTable.DataFrameTable import org.opencypher.morpheus.testing.fixture.{MorpheusSessionFixture, SparkSessionFixture} import org.opencypher.okapi.relational.api.graph.RelationalCypherGraph import org.opencypher.okapi.relational.api.table.RelationalCypherRecords import org.opencypher.okapi.testing.BaseTestSuite import org.scalatest.Assertion import scala.collection.immutable.Map trait GraphMatchingTestSupport { self: BaseTestSuite with SparkSessionFixture with MorpheusSessionFixture => private def getElementIds(records: RelationalCypherRecords[DataFrameTable]): Set[List[Byte]] = { val elementVar = records.header.vars.toSeq match { case Seq(v) => v case other => throw new UnsupportedOperationException(s"Expected records with 1 element, got $other") } records.table.df.select(records.header.column(elementVar)).collect().map(_.getAs[Array[Byte]](0).toList).toSet } private def verify(actual: RelationalCypherGraph[DataFrameTable], expected: RelationalCypherGraph[DataFrameTable]): Assertion = { val expectedNodeIds = getElementIds(expected.nodes("n")) val expectedRelIds = getElementIds(expected.relationships("r")) val actualNodeIds = getElementIds(actual.nodes("n")) val actualRelIds = getElementIds(actual.relationships("r")) expectedNodeIds should equal(actualNodeIds) expectedRelIds should equal(actualRelIds) } implicit class GraphsMatcher(graphs: Map[String, RelationalCypherGraph[DataFrameTable]]) { def shouldMatch(expectedGraphs: RelationalCypherGraph[DataFrameTable]*): Unit = { withClue("expected and actual must have same size") { graphs.size should equal(expectedGraphs.size) } graphs.values.zip(expectedGraphs).foreach { case (actual, expected) => verify(actual, expected) } } } implicit class GraphMatcher(graph: RelationalCypherGraph[DataFrameTable]) { def shouldMatch(expectedGraph: RelationalCypherGraph[DataFrameTable]): Unit = verify(graph, expectedGraph) } }
Example 33
Source File: RideIssueTransactionSuite.scala From Waves with MIT License | 5 votes |
package com.wavesplatform.it.sync.smartcontract import java.nio.charset.StandardCharsets import com.typesafe.config.Config import com.wavesplatform.common.state.ByteStr import com.wavesplatform.common.utils.EitherExt2 import com.wavesplatform.it.NodeConfigs import com.wavesplatform.it.NodeConfigs.Default import com.wavesplatform.it.api.SyncHttpApi._ import com.wavesplatform.it.api.TransactionInfo import com.wavesplatform.it.sync._ import com.wavesplatform.it.transactions.BaseTransactionSuite import com.wavesplatform.lang.v1.estimator.v3.ScriptEstimatorV3 import com.wavesplatform.transaction.smart.script.ScriptCompiler import org.scalatest.{Assertion, CancelAfterFailure} class RideIssueTransactionSuite extends BaseTransactionSuite with CancelAfterFailure { override protected def nodeConfigs: Seq[Config] = NodeConfigs .Builder(Default, 1, Seq.empty) .overrideBase(_.quorum(0)) .buildNonConflicting() val assetName = "Asset name" val assetDescription = "Asset description" val assetQuantity = 2000 val issueCheckV4 = compile( s""" | {-# STDLIB_VERSION 4 #-} | {-# CONTENT_TYPE EXPRESSION #-} | {-# SCRIPT_TYPE ACCOUNT #-} | | match tx { | case i: IssueTransaction => | i.name == "$assetName" && | i.description == "$assetDescription" | | case _ => | throw("unexpected") | } | """.stripMargin ) val issueCheckV3 = compile( s""" | {-# STDLIB_VERSION 3 #-} | {-# CONTENT_TYPE EXPRESSION #-} | {-# SCRIPT_TYPE ACCOUNT #-} | | match tx { | case i: IssueTransaction => | i.name == base64'${ByteStr(assetName.getBytes(StandardCharsets.UTF_8)).base64}' && | i.description == base64'${ByteStr(assetDescription.getBytes(StandardCharsets.UTF_8)).base64}' | | case _ => | throw("unexpected") | } | """.stripMargin ) test("check issuing asset name and description using V3 and V4 script") { assertSuccessIssue(firstAddress, issueCheckV3) assertSuccessIssue(secondAddress, issueCheckV4) } def compile(script: String): String = ScriptCompiler.compile(script, ScriptEstimatorV3).explicitGet()._1.bytes().base64 def assertSuccessIssue(address: String, script: String): Assertion = { val setScriptId = sender.setScript(address, Some(script), setScriptFee, waitForTx = true).id val scriptInfo = sender.addressScriptInfo(address) scriptInfo.script.isEmpty shouldBe false scriptInfo.scriptText.isEmpty shouldBe false scriptInfo.script.get.startsWith("base64:") shouldBe true sender.transactionInfo[TransactionInfo](setScriptId).script.get.startsWith("base64:") shouldBe true val assetId = sender.issue(address, assetName, assetDescription, assetQuantity, fee = issueFee + smartFee, waitForTx = true).id sender.assertAssetBalance(address, assetId, assetQuantity) val asset = sender.assetsDetails(assetId) asset.name shouldBe assetName asset.description shouldBe assetDescription } }
Example 34
Source File: IOAsyncTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import org.scalactic.source.Position import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AsyncFunSuite import scala.concurrent.{ExecutionContext, Future, Promise} import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} class IOAsyncTests extends AsyncFunSuite with Matchers { implicit override def executionContext = ExecutionContext.global implicit val timer: Timer[IO] = IO.timer(executionContext) implicit val cs: ContextShift[IO] = IO.contextShift(executionContext) def testEffectOnRunAsync(source: IO[Int], expected: Try[Int])(implicit pos: Position): Future[Assertion] = { val effect = Promise[Int]() val attempt = Promise[Try[Int]]() effect.future.onComplete(attempt.success) val io = source.runAsync { case Right(a) => IO { effect.success(a); () } case Left(e) => IO { effect.failure(e); () } } for (_ <- io.toIO.unsafeToFuture(); v <- attempt.future) yield { v shouldEqual expected } } test("IO.pure#runAsync") { testEffectOnRunAsync(IO.pure(10), Success(10)) } test("IO.apply#runAsync") { testEffectOnRunAsync(IO(10), Success(10)) } test("IO.apply#shift#runAsync") { testEffectOnRunAsync(IO.shift.flatMap(_ => IO(10)), Success(10)) } test("IO.raiseError#runAsync") { val dummy = new RuntimeException("dummy") testEffectOnRunAsync(IO.raiseError(dummy), Failure(dummy)) } test("IO.raiseError#shift#runAsync") { val dummy = new RuntimeException("dummy") testEffectOnRunAsync(IO.shift.flatMap(_ => IO.raiseError(dummy)), Failure(dummy)) } test("IO.sleep(10.ms)") { val io = IO.sleep(10.millis).map(_ => 10) for (r <- io.unsafeToFuture()) yield { r shouldBe 10 } } test("IO.sleep(negative)") { val io = IO.sleep(-10.seconds).map(_ => 10) for (r <- io.unsafeToFuture()) yield { r shouldBe 10 } } }
Example 35
Source File: UsingSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl package domains import com.thoughtworks.dsl.Dsl.{!!, Continuation, reset} import com.thoughtworks.dsl.keywords.{Using, Yield} import org.scalatest.Assertion import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers class UsingSpec extends AnyFreeSpec with Matchers { def successContinuation[Domain](domain: Domain): (Domain !! Throwable) @reset = Continuation.empty(domain) "AutoCloseable" - { "scope" - { "arm" in { var isOpen = false def raii: Stream[Int] !! Throwable !! Assertion = Continuation.apply { !Yield(1) isOpen should be(false) val a = !Using { !Yield(2) new AutoCloseable { isOpen should be(false) isOpen = true def close(): Unit = { isOpen should be(true) isOpen = false } } } !Yield(3) isOpen should be(true) } isOpen should be(false) val myException = new Exception val stream = raii(_ => _ => Stream.empty)(throw _) stream should be(Stream(1, 2, 3)) isOpen should be(false) } } } }
Example 36
Source File: ReactApolloTest.scala From apollo-scalajs with MIT License | 5 votes |
package com.apollographql.scalajs.react import com.apollographql.scalajs.cache.InMemoryCache import com.apollographql.scalajs.link.{HttpLink, HttpLinkOptions} import com.apollographql.scalajs.{ApolloBoostClient, ApolloClient, CurrencyRatesQuery, UnfetchFetch} import org.scalajs.dom.document import org.scalatest.{Assertion, AsyncFunSuite} import slinky.web.ReactDOM import slinky.web.html.div import scala.concurrent.Promise import scala.scalajs.js import scala.scalajs.js.JSON class ReactApolloTest extends AsyncFunSuite { js.Dynamic.global.window.fetch = UnfetchFetch implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global test("Can mount an ApolloProvider with a client instance") { assert(!js.isUndefined( ReactDOM.render( ApolloProvider( client = ApolloBoostClient(uri = "https://graphql-currency-rates.glitch.me") )( div() ), document.createElement("div") ) )) } test("Can server-side render data to string based on a query") { val link = new HttpLink(options = HttpLinkOptions(uri = "https://graphql-currency-rates.glitch.me")) val cache = new InMemoryCache() val client = new ApolloClient(options = js.Dynamic.literal(ssrMode = true, link = link, cache = cache)) ReactApolloServer.renderToStringWithData( ApolloProvider(ApolloProvider.Props(client = client))( Query( CurrencyRatesQuery, CurrencyRatesQuery.Variables("USD") ) { d => if (d.data.isDefined) { div(d.data.get.rates.get.head.get.currency.get) } else "" } ) ).toFuture.map { html => assert(html == """<div data-reactroot="">AED</div>""") } } }
Example 37
Source File: ApolloLinkTest.scala From apollo-scalajs with MIT License | 5 votes |
package com.apollographql.scalajs import com.apollographql.scalajs.link.{ApolloLink, GraphQLRequest, HttpLink, HttpLinkOptions} import org.scalatest.{Assertion, AsyncFunSuite} import scala.concurrent.Promise import scala.scalajs.js class ApolloLinkTest extends AsyncFunSuite { js.Dynamic.global.window.fetch = UnfetchFetch implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global test("Can perform a query with an HttpLink") { val resultPromise = Promise[Assertion] ApolloLink.execute( new HttpLink(HttpLinkOptions("https://graphql-currency-rates.glitch.me")), GraphQLRequest( gql( """{ | rates(currency: "USD") { | currency | } |}""".stripMargin ) ) ).forEach { res => resultPromise.success(assert(true)) } resultPromise.future } }
Example 38
Source File: EncoderDecoderAssertions.scala From protoless with Apache License 2.0 | 5 votes |
package io.protoless import org.scalactic.Equality import org.scalatest.Assertion import io.protoless.fields.{FieldDecoder, RepeatableFieldDecoder} import io.protoless.messages.{Decoder, Encoder} import io.protoless.tests.ProtolessSuite import io.protoless.tests.samples.{Colors, TestCase} trait EncoderDecoderAssertions { self: ProtolessSuite => implicit protected val colorDecoder: RepeatableFieldDecoder[Colors.Value] = FieldDecoder.decodeEnum(Colors) protected def testEncoding[X](testCase: TestCase[X])(implicit enc: Encoder[X]): Assertion = { val bytes = enc.encodeAsBytes(testCase.source) val origin = testCase.protobuf.toByteArray bytes must ===(origin) } protected def testDecoding[X](testCase: TestCase[X])(implicit dec: Decoder[X], eq: Equality[X]): Assertion = { val bytes = testCase.protobuf.toByteArray val decoded = dec.decode(bytes) decoded match { case Right(res) => res must ===(testCase.source) case Left(err) => err.printStackTrace() fail(err) } } protected def testFullCycle[X](testCase: TestCase[X])(implicit dec: Decoder[X], enc: Encoder[X]): Assertion = { val bytes = enc.encodeAsBytes(testCase.source) val decoded = dec.decode(bytes) decoded match { case Right(res) => val rebytes = enc.encodeAsBytes(res) bytes must ===(rebytes) case Left(err) => err.printStackTrace() fail(err) } } }
Example 39
Source File: LowLevelListenerWebSocketTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.testing.websocket import java.util.concurrent.ConcurrentLinkedQueue import org.scalatest.concurrent.{Eventually, IntegrationPatience} import org.scalatest.{Assertion, BeforeAndAfterAll} import sttp.client._ import sttp.client.monad.MonadError import sttp.client.testing.{ConvertToFuture, ToFutureWrapper} import sttp.client.monad.syntax._ import scala.collection.JavaConverters._ import org.scalatest.SuiteMixin import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers import sttp.client.testing.HttpTest.wsEndpoint // TODO: change to `extends AsyncFlatSpec` when https://github.com/scalatest/scalatest/issues/1802 is fixed trait LowLevelListenerWebSocketTest[F[_], WS, WS_HANDLER[_]] extends SuiteMixin with AsyncFlatSpecLike with Matchers with BeforeAndAfterAll with ToFutureWrapper with Eventually with IntegrationPatience { implicit def backend: SttpBackend[F, Nothing, WS_HANDLER] implicit def convertToFuture: ConvertToFuture[F] private implicit lazy val monad: MonadError[F] = backend.responseMonad def testErrorWhenEndpointIsNotWebsocket: Boolean = true def createHandler(onTextFrame: String => Unit): WS_HANDLER[WS] def sendText(ws: WS, t: String): Unit def sendCloseFrame(ws: WS): Unit it should "send and receive ten messages" in { val n = 10 val received = new ConcurrentLinkedQueue[String]() basicRequest .get(uri"$wsEndpoint/ws/echo") .openWebsocket(createHandler(received.add)) .map { response => (1 to n).foreach { i => val msg = s"test$i" info(s"Sending text message: $msg") sendText(response.result, msg) } eventually { received.asScala.toList shouldBe (1 to n).map(i => s"echo: test$i").toList } sendCloseFrame(response.result) succeed } .toFuture() } it should "receive two messages" in { val received = new ConcurrentLinkedQueue[String]() basicRequest .get(uri"$wsEndpoint/ws/send_and_wait") .openWebsocket(createHandler(received.add)) .map { response => eventually { received.asScala.toList shouldBe List("test10", "test20") } sendCloseFrame(response.result) succeed } .toFuture() } if (testErrorWhenEndpointIsNotWebsocket) { it should "error if the endpoint is not a websocket" in { monad .handleError( basicRequest .get(uri"$wsEndpoint/echo") .openWebsocket(createHandler(_ => ())) .map(_ => fail("An exception should be thrown"): Assertion) ) { case e => (e shouldBe a[SttpClientException.ReadException]).unit } .toFuture() } } override protected def afterAll(): Unit = { backend.close().toFuture() super.afterAll() } }
Example 40
Source File: OkHttpHighLevelMonixWebsocketTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.okhttp.monix import monix.eval.Task import monix.execution.Scheduler.Implicits.global import org.scalatest.Assertion import sttp.client._ import sttp.client.impl.monix.{TaskMonadAsyncError, convertMonixTaskToFuture} import sttp.client.monad.MonadError import sttp.client.monad.syntax._ import sttp.client.okhttp.WebSocketHandler import sttp.client.okhttp.monix.internal.SendMessageException import sttp.client.testing.ConvertToFuture import sttp.client.testing.websocket.HighLevelWebsocketTest import sttp.client.ws.WebSocket import sttp.client.testing.HttpTest.wsEndpoint import scala.concurrent.duration._ class OkHttpHighLevelMonixWebsocketTest extends HighLevelWebsocketTest[Task, WebSocketHandler] { override implicit val backend: SttpBackend[Task, Nothing, WebSocketHandler] = OkHttpMonixBackend().runSyncUnsafe() override implicit val convertToFuture: ConvertToFuture[Task] = convertMonixTaskToFuture override implicit val monad: MonadError[Task] = TaskMonadAsyncError override def createHandler: Option[Int] => Task[WebSocketHandler[WebSocket[Task]]] = MonixWebSocketHandler(_) it should "error if the endpoint is not a websocket" in { monad .handleError { basicRequest .get(uri"$wsEndpoint/echo") .openWebsocketF(createHandler(None)) .map(_ => fail: Assertion) } { case e: Exception => (e shouldBe a[SttpClientException.ReadException]).unit } .toFuture() } it should "error if incoming messages overflow the buffer" in { basicRequest .get(uri"$wsEndpoint/ws/echo") .openWebsocketF(createHandler(Some(3))) .flatMap { response => val ws = response.result send(ws, 1000) >> eventually(10 millis, 400)(ws.isOpen.map(_ shouldBe false)) } .onErrorRecover { case _: SendMessageException => succeed } .toFuture() } override def eventually[T](interval: FiniteDuration, attempts: Int)(f: => Task[T]): Task[T] = { (Task.sleep(interval) >> f).onErrorRestart(attempts) } }
Example 41
Source File: AsyncHttpClientHighLevelWebsocketTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.asynchttpclient import java.nio.channels.ClosedChannelException import org.scalatest.Assertion import sttp.client.{SttpClientException, basicRequest} import sttp.client.monad.syntax._ import sttp.client.testing.websocket.HighLevelWebsocketTest import sttp.model.Uri._ import scala.concurrent.duration._ import sttp.client.testing.HttpTest.wsEndpoint abstract class AsyncHttpClientHighLevelWebsocketTest[F[_]] extends HighLevelWebsocketTest[F, WebSocketHandler] { it should "error if the endpoint is not a websocket" in { monad .handleError { basicRequest .get(uri"$wsEndpoint/echo") .openWebsocketF(createHandler(None)) .map(_ => fail(): Assertion) } { case e: Exception => (e shouldBe a[SttpClientException.ReadException]).unit } .toFuture() } it should "error if incoming messages overflow the buffer" in { basicRequest .get(uri"$wsEndpoint/ws/echo") .openWebsocketF(createHandler(Some(3))) .flatMap { response => val ws = response.result send(ws, 4) >> eventually(10.millis, 500) { ws.isOpen.map(_ shouldBe false) } } .handleError { case _: ClosedChannelException => succeed.unit } .toFuture() } }
Example 42
Source File: CodeGenExampleSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import java.util.UUID import com.daml.ledger.client.binding.{Primitive => P} import com.daml.sample.MyMain.PayOut import org.scalatest.{Assertion, Matchers, WordSpec} class CodeGenExampleSpec extends WordSpec with Matchers { val alice = P.Party("Alice") val bob = P.Party("Bob") val charlie = P.Party("Charlie") "create CallablePayout contract should compile" in { import com.daml.sample.MyMain.CallablePayout val createCommand: P.Update[P.ContractId[CallablePayout]] = CallablePayout(giver = alice, receiver = bob).create sendCommand(createCommand) } "exercise Call choice should compile" in { import com.daml.sample.MyMain.CallablePayout val givenContractId: P.ContractId[CallablePayout] = receiveContractIdFromTheLedger val exerciseCommand: P.Update[P.ContractId[PayOut]] = givenContractId.exerciseCall2(actor = alice) sendCommand(exerciseCommand) } "exercise Transfer choice should compile" in { import com.daml.sample.MyMain.CallablePayout val givenContractId: P.ContractId[CallablePayout] = receiveContractIdFromTheLedger val exerciseCommand: P.Update[P.ContractId[CallablePayout]] = givenContractId.exerciseTransfer(actor = bob, newReceiver = charlie) sendCommand(exerciseCommand) } "create contract with tuple should compile" in { import com.daml.sample.{MyMain, DA} val ct = MyMain.Twoples(alice, DA.Types.Tuple2(1, 2)) val createCommand = ct.create sendCommand(createCommand) } private def sendCommand[T](command: P.Update[P.ContractId[T]]): Assertion = command should not be null private def receiveContractIdFromTheLedger[T]: P.ContractId[T] = P.ContractId(UUID.randomUUID.toString) }
Example 43
Source File: HttpRequestRecorderItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.persistence import akka.Done import akka.actor.{ActorSystem, Props} import akka.http.scaladsl.model.HttpRequest import akka.http.scaladsl.model.Uri.{Authority, Host} import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.persistence.query.PersistenceQuery import akka.stream.ActorMaterializer import akka.stream.scaladsl.Sink import com.amazonaws.services.s3.AmazonS3 import com.ing.wbaa.rokku.proxy.RokkuS3Proxy import com.ing.wbaa.rokku.proxy.config.{HttpSettings, KafkaSettings, StorageS3Settings} import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser import com.ing.wbaa.rokku.proxy.handler.{FilterRecursiveListBucketHandler, RequestHandlerS3Cache} import com.ing.wbaa.rokku.proxy.provider.{AuditLogProvider, MessageProviderKafka, SignatureProviderAws} import com.ing.wbaa.rokku.proxy.queue.MemoryUserRequestQueue import com.ing.wbaa.testkit.RokkuFixtures import org.scalatest.Assertion import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AsyncWordSpec import scala.concurrent.duration._ import scala.concurrent.{Await, Future} class HttpRequestRecorderItTest extends AsyncWordSpec with Diagrams with RokkuFixtures { implicit val testSystem: ActorSystem = ActorSystem.create("test-system") implicit val mat: ActorMaterializer = ActorMaterializer() val rokkuHttpSettings: HttpSettings = new HttpSettings(testSystem.settings.config) { override val httpPort: Int = 0 override val httpBind: String = "127.0.0.1" } def withS3SdkToMockProxy(testCode: AmazonS3 => Assertion): Future[Assertion] = { val proxy: RokkuS3Proxy = new RokkuS3Proxy with RequestHandlerS3Cache with SignatureProviderAws with FilterRecursiveListBucketHandler with MessageProviderKafka with AuditLogProvider with MemoryUserRequestQueue with RequestParser { override implicit lazy val system: ActorSystem = testSystem override val httpSettings: HttpSettings = rokkuHttpSettings override def isUserAuthorizedForRequest(request: S3Request, user: User)(implicit id: RequestId): Boolean = true override def isUserAuthenticated(httpRequest: HttpRequest, awsSecretKey: AwsSecretKey)(implicit id: RequestId): Boolean = true override val storageS3Settings: StorageS3Settings = StorageS3Settings(testSystem) override val kafkaSettings: KafkaSettings = KafkaSettings(testSystem) override def areCredentialsActive(awsRequestCredential: AwsRequestCredential)(implicit id: RequestId): Future[Option[User]] = Future(Some(User(UserRawJson("userId", Some(Set("group")), "accesskey", "secretkey", None)))) def createLineageFromRequest(httpRequest: HttpRequest, userSTS: User, userIPs: UserIps)(implicit id: RequestId): Future[Done] = Future.successful(Done) override protected def auditEnabled: Boolean = false override val requestPersistenceEnabled: Boolean = true override val configuredPersistenceId: String = "localhost-1" } proxy.startup.map { binding => try testCode(getAmazonS3( authority = Authority(Host(binding.localAddress.getAddress), binding.localAddress.getPort) )) finally proxy.shutdown() } } private val CHECKER_PERSISTENCE_ID = "localhost-1" val requestRecorder = testSystem.actorOf(Props(classOf[HttpRequestRecorder]), CHECKER_PERSISTENCE_ID) val queries = PersistenceQuery(testSystem) .readJournalFor[CassandraReadJournal](CassandraReadJournal.Identifier) "S3 Proxy" should { s"with Request Recorder" that { "persists requests in Cassandra" in withS3SdkToMockProxy { sdk => withBucket(sdk) { bucketName => Thread.sleep(6000) val storedInCassandraF = queries.currentEventsByPersistenceId(CHECKER_PERSISTENCE_ID, 1L, Long.MaxValue) .map(_.event) .runWith(Sink.seq) .mapTo[Seq[ExecutedRequestEvt]] val r = Await.result(storedInCassandraF, 5.seconds).filter(_.httpRequest.getUri().toString.contains(bucketName)) assert(r.size == 1) assert(r.head.userSTS.userName.value == "userId") } } } } }
Example 44
Source File: MetadataAlgebraSpec.scala From hydra with Apache License 2.0 | 5 votes |
package hydra.kafka.algebras import java.time.Instant import cats.data.NonEmptyList import cats.effect.{Concurrent, ContextShift, IO, Sync, Timer} import cats.implicits._ import hydra.avro.registry.SchemaRegistry import hydra.core.marshallers.History import hydra.kafka.algebras.MetadataAlgebra.TopicMetadataContainer import hydra.kafka.model.ContactMethod.Slack import hydra.kafka.model.TopicMetadataV2Request.Subject import hydra.kafka.model.{Public, StreamTypeV2, TopicMetadataV2, TopicMetadataV2Key, TopicMetadataV2Request, TopicMetadataV2Value} import io.chrisdavenport.log4cats.SelfAwareStructuredLogger import io.chrisdavenport.log4cats.slf4j.Slf4jLogger import org.apache.avro.generic.GenericRecord import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import retry.RetryPolicies._ import retry.syntax.all._ import retry.{RetryPolicy, _} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ class MetadataAlgebraSpec extends AnyWordSpecLike with Matchers { implicit private val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) private implicit val concurrentEffect: Concurrent[IO] = IO.ioConcurrentEffect private implicit val policy: RetryPolicy[IO] = limitRetries[IO](5) |+| exponentialBackoff[IO](500.milliseconds) private implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) private implicit def noop[A]: (A, RetryDetails) => IO[Unit] = retry.noop[IO, A] implicit private def unsafeLogger[F[_]: Sync]: SelfAwareStructuredLogger[F] = Slf4jLogger.getLogger[F] private implicit class RetryAndAssert[A](boolIO: IO[A]) { def retryIfFalse(check: A => Boolean): IO[Assertion] = boolIO.map(check).retryingM(identity, policy, noop).map(assert(_)) } private val metadataTopicName = "_internal.metadataTopic" private val consumerGroup = "Consumer Group" (for { kafkaClient <- KafkaClientAlgebra.test[IO] schemaRegistry <- SchemaRegistry.test[IO] metadata <- MetadataAlgebra.make(metadataTopicName, consumerGroup, kafkaClient, schemaRegistry, consumeMetadataEnabled = true) } yield { runTests(metadata, kafkaClient) }).unsafeRunSync() private def runTests(metadataAlgebra: MetadataAlgebra[IO], kafkaClientAlgebra: KafkaClientAlgebra[IO]): Unit = { "MetadataAlgebraSpec" should { "retrieve none for non-existant topic" in { val subject = Subject.createValidated("Non-existantTopic").get metadataAlgebra.getMetadataFor(subject).unsafeRunSync() shouldBe None } "retrieve metadata" in { val subject = Subject.createValidated("subject1").get val (genericRecordsIO, key, value) = getMetadataGenericRecords(subject) (for { record <- genericRecordsIO _ <- kafkaClientAlgebra.publishMessage(record, metadataTopicName) _ <- metadataAlgebra.getMetadataFor(subject).retryIfFalse(_.isDefined) metadata <- metadataAlgebra.getMetadataFor(subject) } yield metadata shouldBe Some(TopicMetadataContainer(key, value, None, None))).unsafeRunSync() } "retrieve all metadata" in { val subject = Subject.createValidated("subject2").get val (genericRecordsIO, key, value) = getMetadataGenericRecords(subject) (for { record <- genericRecordsIO _ <- kafkaClientAlgebra.publishMessage(record, metadataTopicName) _ <- metadataAlgebra.getMetadataFor(subject).retryIfFalse(_.isDefined) allMetadata <- metadataAlgebra.getAllMetadata } yield allMetadata should have length 2).unsafeRunSync() } } } private def getMetadataGenericRecords(subject: Subject): (IO[(GenericRecord, Option[GenericRecord])], TopicMetadataV2Key, TopicMetadataV2Value) = { val key = TopicMetadataV2Key(subject) val value = TopicMetadataV2Value( StreamTypeV2.Entity, deprecated = false, Public, NonEmptyList.one(Slack.create("#channel").get), Instant.now, List(), None) (TopicMetadataV2.encode[IO](key, Some(value)), key, value) } }
Example 45
Source File: StoreOpsTest.scala From fs2-blobstore with Apache License 2.0 | 5 votes |
package blobstore import java.nio.charset.Charset import java.nio.file.Files import java.util.concurrent.Executors import cats.effect.{Blocker, IO} import cats.effect.laws.util.TestInstances import cats.implicits._ import fs2.Pipe import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import implicits._ import org.scalatest.matchers.must.Matchers import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContext class StoreOpsTest extends AnyFlatSpec with Matchers with TestInstances { implicit val cs = IO.contextShift(ExecutionContext.global) val blocker = Blocker.liftExecutionContext(ExecutionContext.fromExecutor(Executors.newCachedThreadPool)) behavior of "PutOps" it should "buffer contents and compute size before calling Store.put" in { val bytes: Array[Byte] = "AAAAAAAAAA".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) fs2.Stream.emits(bytes).covary[IO].through(store.bufferedPut(Path("path/to/file.txt"), blocker)).compile.drain.unsafeRunSync() store.buf.toArray must be(bytes) } it should "upload a file from a nio Path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) fs2.Stream.bracket(IO(Files.createTempFile("test-file", ".bin"))) { p => IO(p.toFile.delete).void }.flatMap { p => fs2.Stream.emits(bytes).covary[IO].through(fs2.io.file.writeAll(p, blocker)).drain ++ fs2.Stream.eval(store.put(p, Path("path/to/file.txt"), blocker)) }.compile.drain.unsafeRunSync() store.buf.toArray must be(bytes) } } final case class DummyStore(check: Path => Assertion) extends Store[IO] { val buf = new ArrayBuffer[Byte]() override def put(path: Path): Pipe[IO, Byte, Unit] = { check(path) in => { buf.appendAll(in.compile.toVector.unsafeRunSync()) fs2.Stream.emit(()) } } override def list(path: Path): fs2.Stream[IO, Path] = ??? override def get(path: Path, chunkSize: Int): fs2.Stream[IO, Byte] = ??? override def move(src: Path, dst: Path): IO[Unit] = ??? override def copy(src: Path, dst: Path): IO[Unit] = ??? override def remove(path: Path): IO[Unit] = ??? }
Example 46
Source File: ParserHarness.scala From Converter with GNU General Public License v3.0 | 5 votes |
package org.scalablytyped.converter.internal.ts.parser import org.scalablytyped.converter.internal.{files, InFile} import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers._ import scala.util.parsing.combinator.Parsers object ParserHarness { implicit final class Forcer[T](res: Parsers#ParseResult[T]) { def force: T = res getOrElse sys.error( "Parse error at %s\n".format(res.next.pos.toString) + res.asInstanceOf[Parsers#NoSuccess].msg + "\n" + res.next.pos.longString, ) } def withTsFile[T](resourceName: String)(f: String => T): T = f(files content InFile(os.Path(getClass.getResource(s"/$resourceName").getFile))) def parseAs[T](input: String, parser: String => Parsers#ParseResult[T]): T = parser(input).force def notParse[T](input: String, parser: String => Parsers#ParseResult[T]): Unit = { val res = parser(input) if (res.isEmpty) () else sys.error(s"$input should not have parsed") } def shouldParseAs[T](input: String, parser: String => Parsers#ParseResult[T])(expected: T): Assertion = parseAs(input, parser) should equal(expected) }
Example 47
Source File: ExchangeTransactionCreatorSpecification.scala From matcher with MIT License | 5 votes |
package com.wavesplatform.dex.model import com.wavesplatform.dex.domain.asset.Asset import com.wavesplatform.dex.domain.bytes.ByteStr import com.wavesplatform.dex.domain.crypto import com.wavesplatform.dex.domain.crypto.Proofs import com.wavesplatform.dex.domain.order.Order import com.wavesplatform.dex.domain.order.OrderOps._ import com.wavesplatform.dex.domain.transaction.ExchangeTransactionV2 import com.wavesplatform.dex.domain.utils.EitherExt2 import com.wavesplatform.dex.{MatcherSpecBase, NoShrink} import org.scalacheck.Gen import org.scalamock.scalatest.PathMockFactory import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.wordspec.AnyWordSpec import org.scalatest.{Assertion, BeforeAndAfterAll} import org.scalatestplus.scalacheck.{ScalaCheckPropertyChecks => PropertyChecks} import scala.concurrent.ExecutionContext.Implicits.global class ExchangeTransactionCreatorSpecification extends AnyWordSpec with Matchers with MatcherSpecBase with BeforeAndAfterAll with PathMockFactory with PropertyChecks with NoShrink with TableDrivenPropertyChecks { private def getExchangeTransactionCreator(hasMatcherScript: Boolean = false, hasAssetScripts: Asset => Boolean = _ => false): ExchangeTransactionCreator = { new ExchangeTransactionCreator(MatcherAccount, matcherSettings.exchangeTxBaseFee, hasMatcherScript, hasAssetScripts) } "ExchangeTransactionCreator" should { "create an ExchangeTransactionV2" when { (List(1, 2, 3) ++ List(1, 2, 3)).combinations(2).foreach { case List(counterVersion, submittedVersion) => s"counterVersion=$counterVersion, submittedVersion=$submittedVersion" in { val counter = buy(wavesBtcPair, 100000, 0.0008, matcherFee = Some(2000L), version = counterVersion.toByte) val submitted = sell(wavesBtcPair, 100000, 0.0007, matcherFee = Some(1000L), version = submittedVersion.toByte) val tc = getExchangeTransactionCreator() val oe = mkOrderExecutedRaw(submitted, counter) tc.createTransaction(oe).explicitGet() shouldBe a[ExchangeTransactionV2] } } } "take fee from order executed event" when { "orders are matched fully" in { val preconditions = for { ((_, buyOrder), (_, sellOrder)) <- orderV3MirrorPairGenerator } yield (buyOrder, sellOrder) test(preconditions) } "orders are matched partially" in { val preconditions = for { ((_, buyOrder), (senderSell, sellOrder)) <- orderV3MirrorPairGenerator } yield { val sellOrderWithUpdatedAmount = sellOrder.updateAmount(sellOrder.amount / 2) val newSignature = crypto.sign(senderSell, sellOrderWithUpdatedAmount.bodyBytes()) val correctedSellOrder = sellOrderWithUpdatedAmount.updateProofs(Proofs(Seq(ByteStr(newSignature)))) (buyOrder, correctedSellOrder) } test(preconditions) } def test(preconditions: Gen[(Order, Order)]): Assertion = forAll(preconditions) { case (buyOrder, sellOrder) => val tc = getExchangeTransactionCreator() val oe = mkOrderExecutedRaw(buyOrder, sellOrder) val tx = tc.createTransaction(oe).explicitGet() tx.buyMatcherFee shouldBe oe.submittedExecutedFee tx.sellMatcherFee shouldBe oe.counterExecutedFee } } } }
Example 48
Source File: ArrayUtilSpec.scala From hacktoberfest-scala-algorithms with GNU General Public License v3.0 | 5 votes |
package io.github.sentenza.hacktoberfest.util import org.scalatest.{Assertion, Matchers, WordSpec, Inspectors} class ArrayUtilSpec extends WordSpec with Matchers { "The ArrayUtil" should { "return an array of n elements" in { val arrayUtil = new ArrayUtil() val length = scala.util.Random.nextInt(1000) arrayUtil.buildRandomArray(length).length shouldBe length } "return an array whose values are capped by the length" in { val arrayUtil = new ArrayUtil() val length = scala.util.Random.nextInt(1000) import Inspectors._ val maximum = scala.util.Random.nextInt(900) forAll(arrayUtil.buildRandomArray(length, maximum)) { elem => elem should be <= maximum } } } }
Example 49
Source File: testkit.scala From iotchain with MIT License | 5 votes |
package jbok.codec import jbok.codec.rlp.RlpCodec import jbok.codec.rlp.implicits._ import org.scalatest.{Assertion, Matchers} import scodec.bits.{BitVector, ByteVector} import scodec.{Attempt, DecodeResult, Err} object testkit extends testkit trait testkit extends Matchers { def roundtripAndMatch[A](a: A, expected: ByteVector)(implicit c: RlpCodec[A]): Assertion = { roundtrip[A](a) a.encoded.bits.bytes shouldBe expected } def roundtripLen[A](a: A, expectedNumBytes: Int)(implicit c: RlpCodec[A]): Assertion = { roundtrip[A](a) a.encoded.bits.bytes.length shouldBe expectedNumBytes } def roundtrip[A](a: A)(implicit c: RlpCodec[A]): Assertion = roundtrip(c, a) def roundtrip[A](codec: RlpCodec[A], value: A): Assertion = { val encoded = codec.encode(value) encoded.isSuccessful shouldBe true val Attempt.Successful(DecodeResult(decoded, remainder)) = codec.decode(encoded.require) remainder shouldBe BitVector.empty decoded shouldBe value } def roundtripAll[A](codec: RlpCodec[A], as: collection.Iterable[A]): Unit = as foreach { a => roundtrip(codec, a) } def encodeError[A](codec: RlpCodec[A], a: A, err: Err): Assertion = { val encoded = codec.encode(a) encoded shouldBe Attempt.Failure(err) } def shouldDecodeFullyTo[A](codec: RlpCodec[A], buf: BitVector, expected: A): Assertion = { val Attempt.Successful(DecodeResult(actual, rest)) = codec decode buf rest shouldBe BitVector.empty actual shouldBe expected } }
Example 50
Source File: Http4sRpcSpec.scala From iotchain with MIT License | 5 votes |
package jbok.network.rpc import cats.effect.IO import io.circe.Json import jbok.common.CommonSpec import jbok.network.rpc.http.{Http4sRpcServer, Http4sRpcTransport} import org.http4s.Uri import cats.implicits._ import org.http4s.circe.CirceEntityCodec._ import org.scalatest.Assertion import jbok.codec.impl.circe._ import io.circe.generic.auto._ class Http4sRpcSpec extends CommonSpec { val impl = new TestApiImpl val service = RpcService[IO, Json].mount[TestAPI[IO]](impl) val server = Http4sRpcServer.server[IO](service) def transport(uri: Uri) = new Http4sRpcTransport[IO, Json](uri) def client(uri: Uri) = RpcClient[IO, Json](transport(uri)).use[TestAPI[IO]] def assertIO[A](io1: IO[A], io2: IO[A]): IO[Assertion] = (io1, io2).mapN(_ shouldBe _) "Http4sRpcService" should { "impl service and client" in { val p = server.use { s => val c = client(s.baseUri) assertIO(c.foo, impl.foo) >> assertIO(c.bar, impl.bar) >> assertIO(c.qux("oho", 42), impl.qux("oho", 42)) } p.unsafeRunSync() } } }
Example 51
Source File: CustomMatcher.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.scalatest import org.scalatest.{Assertion, Succeeded} import scalaz.syntax.show._ import scalaz.{Equal, Show} object CustomMatcher { final implicit class CustomMatcherOps[A](val underlying: A) extends AnyVal { def should_===(other: A)(implicit eqEv: Equal[A], showEv: Show[A]): Assertion = if (eqEv.equal(underlying, other)) Succeeded else reportFailure(underlying, " =/= ", other) def should_=/=(other: A)(implicit eqEv: Equal[A], showEv: Show[A]): Assertion = if (eqEv.equal(underlying, other)) reportFailure(underlying, " === ", other) else Succeeded private def reportFailure(underlying: A, str: String, other: A)( implicit showEv: Show[A]): Assertion = throw CustomMatcherException(s"${underlying.shows}$str${other.shows}") } case class CustomMatcherException(c: String) extends RuntimeException(c) }
Example 52
Source File: ShowUnicodeEscapedStringSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding.encoding import com.daml.ledger.client.binding.encoding.EncodingUtil.normalize import org.apache.commons.text.StringEscapeUtils import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Assertion, Matchers, WordSpec} import scalaz.Cord class ShowUnicodeEscapedStringSpec extends WordSpec with Matchers with GeneratorDrivenPropertyChecks { implicit override val generatorDrivenConfig = PropertyCheckConfiguration(minSuccessful = 10000) "should unicode-escape all non-ascii chars in the format that can compile back to original string" in { "scho\u0308n" should !==("schön") normalize("scho\u0308n") should ===(normalize("schön")) println(ShowUnicodeEscapedString.show("scho\u0308n")) ShowUnicodeEscapedString.show("scho\u0308n").toString should ===("\"scho\\u0308n\"") println(ShowUnicodeEscapedString.show("schön")) ShowUnicodeEscapedString.show("schön").toString should ===("\"sch\\u00F6n\"") "sch\u00F6n" should ===("schön") "\u00F6" should ===("ö") } "normalizing unicode string multiple times does not change it" in { "scho\u0308n" should !==("schön") normalize("scho\u0308n") should ===(normalize("schön")) normalize(normalize("scho\u0308n")) should ===(normalize("schön")) normalize("scho\u0308n") should ===(normalize(normalize("schön"))) } "ASCII slash should be unicode escaped" in { ShowUnicodeEscapedString.show("\\").toString.getBytes should ===("\"\\u005C\"".getBytes) } "unicode escaped string can be interpreted back to original string one example" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("scho\u0308n") } "unicode escaped zero can be interpreted back" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("\u0000") } "backslash followed by zero caused some issues" in { testUnicodeEscapedStringCanBeUnescapedBackToOriginalString("\\\u0000") } "any unicode escaped string can be interpreted back to original string" in forAll { s: String => testUnicodeEscapedStringCanBeUnescapedBackToOriginalString(s) } private def testUnicodeEscapedStringCanBeUnescapedBackToOriginalString(s0: String): Assertion = { val s1: Cord = ShowUnicodeEscapedString.show(s0) val s2: String = StringEscapeUtils.unescapeJava(removeWrappingQuotes(s1.toString)) s2.getBytes should ===(s0.getBytes) } private def removeWrappingQuotes(s: String): String = { require(s.length >= 2) s.substring(1, s.length - 1) } }
Example 53
Source File: AuthenticationProviderSTSItTest.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.rokku.proxy.provider import akka.actor.ActorSystem import akka.stream.ActorMaterializer import com.amazonaws.services.securitytoken.model.{AssumeRoleRequest, GetSessionTokenRequest} import com.ing.wbaa.rokku.proxy.config.StsSettings import com.ing.wbaa.rokku.proxy.data._ import com.ing.wbaa.testkit.awssdk.StsSdkHelpers import com.ing.wbaa.testkit.oauth.OAuth2TokenRequest import org.scalatest.Assertion import org.scalatest.diagrams.Diagrams import org.scalatest.wordspec.AsyncWordSpec import scala.concurrent.{ExecutionContext, Future} class AuthenticationProviderSTSItTest extends AsyncWordSpec with Diagrams with AuthenticationProviderSTS with StsSdkHelpers with OAuth2TokenRequest { override implicit val testSystem: ActorSystem = ActorSystem.create("test-system") override implicit val system: ActorSystem = testSystem override implicit val executionContext: ExecutionContext = testSystem.dispatcher override implicit val materializer: ActorMaterializer = ActorMaterializer()(testSystem) override val stsSettings: StsSettings = StsSettings(testSystem) implicit val requestId: RequestId = RequestId("test") private val validKeycloakCredentials = Map( "grant_type" -> "password", "username" -> "testuser", "password" -> "password", "client_id" -> "sts-rokku" ) private val userOneKeycloakCredentials = Map( "grant_type" -> "password", "username" -> "userone", "password" -> "password", "client_id" -> "sts-rokku" ) def withAwsCredentialsValidInSTS(testCode: AwsRequestCredential => Future[Assertion]): Future[Assertion] = { val stsSdk = getAmazonSTSSdk(StsSettings(testSystem).stsBaseUri) retrieveKeycloackToken(validKeycloakCredentials).flatMap { keycloakToken => val cred = stsSdk.getSessionToken(new GetSessionTokenRequest() .withTokenCode(keycloakToken.access_token)) .getCredentials testCode(AwsRequestCredential(AwsAccessKey(cred.getAccessKeyId), Some(AwsSessionToken(cred.getSessionToken)))) } } def withAssumeRoleInSTS(testCode: AwsRequestCredential => Future[Assertion]): Future[Assertion] = { val stsSdk = getAmazonSTSSdk(StsSettings(testSystem).stsBaseUri) retrieveKeycloackToken(userOneKeycloakCredentials).flatMap { keycloakToken => val assumeRoleReq = new AssumeRoleRequest().withTokenCode(keycloakToken.access_token) assumeRoleReq.setRoleArn("arn:aws:iam::account-id:role/admin") assumeRoleReq.setRoleSessionName("testRole") val cred = stsSdk.assumeRole(assumeRoleReq).getCredentials testCode(AwsRequestCredential(AwsAccessKey(cred.getAccessKeyId), Some(AwsSessionToken(cred.getSessionToken)))) } } "Authentication Provider STS" should { "check authentication" that { "succeeds for valid credentials" in { withAwsCredentialsValidInSTS { awsCredential => areCredentialsActive(awsCredential).map { userResult => assert(userResult.map(_.userName).contains(UserName("testuser"))) assert(userResult.map(_.userGroups).head.contains(UserGroup("testgroup"))) assert(userResult.map(_.userGroups).head.contains(UserGroup("group3"))) assert(userResult.map(_.userGroups).head.size == 2) assert(userResult.exists(_.accessKey.value.length == 32)) assert(userResult.exists(_.secretKey.value.length == 32)) } } } "fail when user is not authenticated" in { areCredentialsActive(AwsRequestCredential(AwsAccessKey("notauthenticated"), Some(AwsSessionToken("okSessionToken")))).map { userResult => assert(userResult.isEmpty) } } "succeeds for valid role" in { withAssumeRoleInSTS { awsCredential => areCredentialsActive(awsCredential).map { roleResult => assert(roleResult.map(_.userRole).contains(UserAssumeRole("admin"))) assert(roleResult.map(_.userGroups).contains(Set())) assert(roleResult.exists(_.accessKey.value.length == 32)) assert(roleResult.exists(_.secretKey.value.length == 32)) } } } } } }
Example 54
Source File: CodeGenExampleSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.client.binding import java.util.UUID import com.daml.ledger.client.binding.{Primitive => P} import com.daml.sample.Main.PayOut import org.scalatest.{Assertion, Matchers, WordSpec} class CodeGenExampleSpec extends WordSpec with Matchers { val alice = P.Party("Alice") val bob = P.Party("Bob") val charlie = P.Party("Charlie") "create CallablePayout contract should compile" in { import com.daml.sample.Main.CallablePayout val createCommand: P.Update[P.ContractId[CallablePayout]] = CallablePayout(giver = alice, receiver = bob).create sendCommand(createCommand) } "exercise Call choice should compile" in { import com.daml.sample.Main.CallablePayout import com.daml.sample.Main.CallablePayout._ val givenContractId: P.ContractId[CallablePayout] = receiveContractIdFromTheLedger val exerciseCommand: P.Update[P.ContractId[PayOut]] = givenContractId.exerciseCall(actor = alice) sendCommand(exerciseCommand) } "exercise Transfer choice should compile" in { import com.daml.sample.Main.CallablePayout import com.daml.sample.Main.CallablePayout._ val givenContractId: P.ContractId[CallablePayout] = receiveContractIdFromTheLedger val exerciseCommand: P.Update[P.ContractId[CallablePayout]] = givenContractId.exerciseTransfer(actor = bob, $choice_arg = Transfer(newReceiver = charlie)) sendCommand(exerciseCommand) } private def sendCommand[T](command: P.Update[P.ContractId[T]]): Assertion = command should not be null private def receiveContractIdFromTheLedger[T]: P.ContractId[T] = P.ContractId(UUID.randomUUID.toString) }
Example 55
Source File: AuthMatchers.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.rxjava import com.daml.grpc.{GrpcException, GrpcStatus} import org.scalatest.{Assertion, Matchers} private[rxjava] trait AuthMatchers { self: Matchers => private def theCausalChainOf(t: Throwable): Iterator[Throwable] = Iterator.iterate(t)(_.getCause).takeWhile(_ != null) private def expectError(predicate: Throwable => Boolean)(call: => Any): Assertion = theCausalChainOf(the[RuntimeException] thrownBy call).filter(predicate) should not be empty def expectUnauthenticated(call: => Any): Assertion = expectError { case GrpcException(GrpcStatus.UNAUTHENTICATED(), _) => true case _ => false }(call) def expectPermissionDenied(call: => Any): Assertion = expectError { case GrpcException(GrpcStatus.PERMISSION_DENIED(), _) => true case _ => false }(call) }
Example 56
Source File: ServerSubscriberStressTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.grpc.adapter.server.rs import com.daml.grpc.adapter.TestExecutionSequencerFactory import org.reactivestreams.tck.flow.support.HelperPublisher import org.scalatest.concurrent.AsyncTimeLimitedTests import org.scalatest.time.Span import org.scalatest.time.SpanSugar._ import org.scalatest.{Assertion, AsyncWordSpec, BeforeAndAfterEach, Matchers} import scala.concurrent.ExecutionContext.global import scala.concurrent.Future class ServerSubscriberStressTest extends AsyncWordSpec with BeforeAndAfterEach with Matchers with AsyncTimeLimitedTests { private val elemCount = 10000 private val testRunCount = 50 private val expectedElemRange = 0.until(elemCount) var serverCallStreamObserver: MockServerCallStreamObserver[Int] = _ var sut: ServerSubscriber[Int] = _ var helperPublisher: HelperPublisher[Int] = _ override protected def beforeEach(): Unit = { serverCallStreamObserver = new MockServerCallStreamObserver[Int] val executor = TestExecutionSequencerFactory.instance.getExecutionSequencer sut = new ServerSubscriber[Int](serverCallStreamObserver, executor) helperPublisher = new HelperPublisher[Int](0, elemCount, i => i, global) } "ServerSubscriber" should { for (i <- 1.to(testRunCount)) { s"work with $elemCount elements when they are requested one by one (test run #$i)" in { helperPublisher.subscribe(sut) expectedElemRange.foreach(_ => serverCallStreamObserver.demandResponse()) verifyExpectedElementsArrivedInOrder() } } for (i <- 1.to(testRunCount)) { s"work with $elemCount elements when they are requested in bulk (isReady stays true) (test run #$i)" in { helperPublisher.subscribe(sut) serverCallStreamObserver.demandResponse(elemCount) verifyExpectedElementsArrivedInOrder() } } } private def verifyExpectedElementsArrivedInOrder(): Future[Assertion] = { serverCallStreamObserver.elementsWhenCompleted.map { receivedElements => receivedElements should contain theSameElementsInOrderAs expectedElemRange } } override def timeLimit: Span = 10.seconds }
Example 57
Source File: ResultAssertions.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.grpc.adapter.client import com.daml.platform.hello.HelloResponse import com.google.protobuf.ByteString import io.grpc.{Status, StatusRuntimeException} import org.scalatest.{Assertion, Matchers} import scala.util.Random trait ResultAssertions { self: Matchers => protected def elemCount: Int = 1024 protected lazy val elemRange: Range = 1.to(elemCount) protected lazy val halfCount: Int = elemCount / 2 protected lazy val halfRange: Range = elemRange.take(halfCount) protected def isCancelledException(err: Throwable): Assertion = { err shouldBe a[StatusRuntimeException] err.asInstanceOf[StatusRuntimeException].getStatus.getCode shouldEqual Status.CANCELLED.getCode } protected def assertElementsAreInOrder(expectedCount: Long)( results: Seq[HelloResponse] ): Assertion = { results should have length expectedCount results.map(_.respInt) shouldEqual (1 to expectedCount.toInt) } protected def elementsAreSummed(results: Seq[HelloResponse]): Assertion = { results should have length 1 results.foldLeft(0)(_ + _.respInt) shouldEqual elemRange.sum } protected def everyElementIsDoubled(results: Seq[HelloResponse]): Assertion = { results should have length elemCount.toLong //the order does matter results.map(_.respInt) shouldEqual elemRange.map(_ * 2) } protected def genPayload(): ByteString = { val bytes = new Array[Byte](1024) Random.nextBytes(bytes) ByteString.copyFrom(bytes) } }
Example 58
Source File: IsStatusException.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.ledger.api.testing.utils import com.daml.grpc.{GrpcException, GrpcStatus} import io.grpc.Status import org.scalatest.{Assertion, Matchers} import scala.util.control.NonFatal object IsStatusException extends Matchers { def apply(expectedStatusCode: Status.Code)(throwable: Throwable): Assertion = { throwable match { case GrpcException(GrpcStatus(code, _), _) => code shouldEqual expectedStatusCode case NonFatal(other) => fail(s"$other is not a gRPC Status exception.") } } def apply(expectedStatus: Status): Throwable => Assertion = { apply(expectedStatus.getCode) } }
Example 59
Source File: TlsTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import HttpServiceTestFixture.UseTls import akka.http.scaladsl.model.{StatusCodes, Uri} import org.scalatest.{Assertion, AsyncFreeSpec, Inside, Matchers} import spray.json.{JsArray, JsObject} import scala.concurrent.Future @SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements")) class TlsTest extends AsyncFreeSpec with Matchers with Inside with AbstractHttpServiceIntegrationTestFuns { override def jdbcConfig = None override def staticContentConfig = None override def useTls = UseTls.Tls "connect normally with tls on" in withHttpService { (uri: Uri, _, _) => getRequest(uri = uri.withPath(Uri.Path("/v1/query"))) .flatMap { case (status, output) => status shouldBe StatusCodes.OK assertStatus(output, StatusCodes.OK) inside(output) { case JsObject(fields) => inside(fields.get("result")) { case Some(JsArray(vector)) => vector should have size 0L } } }: Future[Assertion] } }
Example 60
Source File: HttpServiceIntegrationTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import java.io.File import java.nio.file.Files import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpMethods, HttpRequest, StatusCodes, Uri} import com.daml.http.Statement.discard import com.daml.http.util.TestUtil.writeToFile import org.scalacheck.Gen import org.scalatest.{Assertion, BeforeAndAfterAll} import scala.concurrent.Future class HttpServiceIntegrationTest extends AbstractHttpServiceIntegrationTest with BeforeAndAfterAll { private val staticContent: String = "static" private val staticContentDir: File = Files.createTempDirectory("integration-test-static-content").toFile override def staticContentConfig: Option[StaticContentConfig] = Some(StaticContentConfig(prefix = staticContent, directory = staticContentDir)) override def jdbcConfig: Option[JdbcConfig] = None private val expectedDummyContent: String = Gen .listOfN(100, Gen.identifier) .map(_.mkString(" ")) .sample .getOrElse(throw new IllegalStateException(s"Cannot create dummy text content")) private val dummyFile: File = writeToFile(new File(staticContentDir, "dummy.txt"), expectedDummyContent).get require(dummyFile.exists) override protected def afterAll(): Unit = { // clean up temp directory discard { dummyFile.delete() } discard { staticContentDir.delete() } super.afterAll() } "should serve static content from configured directory" in withHttpService { (uri: Uri, _, _) => Http() .singleRequest( HttpRequest( method = HttpMethods.GET, uri = uri.withPath(Uri.Path(s"/$staticContent/${dummyFile.getName}")))) .flatMap { resp => discard { resp.status shouldBe StatusCodes.OK } val bodyF: Future[String] = getResponseDataBytes(resp, debug = false) bodyF.flatMap { body => body shouldBe expectedDummyContent } }: Future[Assertion] } "Forwarded" - { import Endpoints.Forwarded "can 'parse' sample" in { Forwarded("for=192.168.0.1;proto=http;by=192.168.0.42").proto should ===(Some("http")) } "can 'parse' quoted sample" in { Forwarded("for=192.168.0.1;proto = \"https\" ;by=192.168.0.42").proto should ===( Some("https")) } } }
Example 61
Source File: FutureTimeouts.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.akkastreams import akka.actor.ActorSystem import com.daml.dec.DirectExecutionContext import org.scalatest.{Assertion, AsyncWordSpec} import scala.concurrent.{Future, Promise, TimeoutException} import scala.concurrent.duration.FiniteDuration import scala.util.Try import scala.util.control.NoStackTrace trait FutureTimeouts { self: AsyncWordSpec => protected def system: ActorSystem protected def expectTimeout(f: Future[Any], duration: FiniteDuration): Future[Assertion] = { val promise: Promise[Any] = Promise[Any]() val cancellable = system.scheduler.scheduleOnce(duration, { () => promise.failure( new TimeoutException(s"Future timed out after $duration as expected.") with NoStackTrace) () })(system.dispatcher) f.onComplete((_: Try[Any]) => cancellable.cancel())(DirectExecutionContext) recoverToSucceededIf[TimeoutException]( Future.firstCompletedOf[Any](List[Future[Any]](f, promise.future))(DirectExecutionContext)) } }
Example 62
Source File: ReadOnlyServiceCallAuthTests.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.sandbox.auth import org.scalatest.Assertion import scala.concurrent.Future trait ReadOnlyServiceCallAuthTests extends ServiceCallWithMainActorAuthTests { def successfulBehavior: Future[Any] => Future[Assertion] = expectSuccess(_: Future[Any]) it should "deny calls with an expired read-only token" in { expectUnauthenticated(serviceCallWithToken(canReadAsMainActorExpired)) } it should "allow calls with explicitly non-expired read-only token" in { successfulBehavior(serviceCallWithToken(canReadAsMainActorExpiresTomorrow)) } it should "allow calls with read-only token without expiration" in { successfulBehavior(serviceCallWithToken(canReadAsMainActor)) } it should "deny calls with an expired read/write token" in { expectUnauthenticated(serviceCallWithToken(canActAsMainActorExpired)) } it should "allow calls with explicitly non-expired read/write token" in { successfulBehavior(serviceCallWithToken(canActAsMainActorExpiresTomorrow)) } it should "allow calls with read/write token without expiration" in { successfulBehavior(serviceCallWithToken(canActAsMainActor)) } it should "allow calls with the correct ledger ID" in { successfulBehavior(serviceCallWithToken(canReadAsMainActorActualLedgerId)) } it should "deny calls with a random ledger ID" in { expectPermissionDenied(serviceCallWithToken(canReadAsMainActorRandomLedgerId)) } it should "allow calls with the correct participant ID" in { successfulBehavior(serviceCallWithToken(canReadAsMainActorActualParticipantId)) } it should "deny calls with a random participant ID" in { expectPermissionDenied(serviceCallWithToken(canReadAsMainActorRandomParticipantId)) } }
Example 63
Source File: CallbackTest.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.core.api import org.bitcoins.core.util.FutureUtil import org.bitcoins.testkit.util.BitcoinSAsyncTest import org.scalatest.Assertion import scala.concurrent.Promise import scala.concurrent.duration._ import scala.util.Success class CallbackTest extends BitcoinSAsyncTest { val testTimeout: FiniteDuration = 10.seconds it must "show callbacks being blocked" in { val promise = Promise[Assertion]() val f1: Callback[Unit] = _ => { Thread.sleep(testTimeout.toMillis) promise.complete(fail("2nd callback did not start before timeout")) FutureUtil.unit } val f2: Callback[Unit] = _ => { promise.complete(Success(succeed)) FutureUtil.unit } val handler = CallbackHandler[Unit, Callback[Unit]](name = "name", Vector(f1, f2)) // Start execution of callbacks handler.execute(()) // Return result of the callbacks, f2 should complete first promise.future } }
Example 64
Source File: AdditionalAssertions.scala From ScalaWebTest with Apache License 2.0 | 5 votes |
package org.scalawebtest.integration import org.scalactic.source.Position import org.scalatest.{Assertion, Succeeded, Suite} import scala.reflect.ClassTag trait AdditionalAssertions {self: Suite => def assertThrowsAndTestMessage[T <: AnyRef](f: => Any)(messageTest: String => Assertion)(implicit classTag: ClassTag[T], pos: Position): Assertion = { val clazz = classTag.runtimeClass val threwExpectedException = try { f false } catch { case u: Throwable => messageTest(u.getMessage) if (!clazz.isAssignableFrom(u.getClass)) { fail(s"didn't throw expected exception ${clazz.getCanonicalName}") } else true } if (threwExpectedException) { Succeeded } else { fail(s"didn't throw expected exception ${clazz.getCanonicalName}") } } }
Example 65
Source File: KNNTest.scala From mmlspark with MIT License | 5 votes |
// Copyright (C) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in project root for information. package com.microsoft.ml.spark.nn import com.microsoft.ml.spark.core.test.fuzzing.{EstimatorFuzzing, TestObject} import org.apache.spark.ml.util.MLReadable import org.apache.spark.sql.{DataFrame, Row} import org.scalactic.Equality import org.scalatest.Assertion class KNNTest extends EstimatorFuzzing[KNN] with BallTreeTestBase { test("matches non spark result") { val results = new KNN().setOutputCol("matches") .fit(df).transform(testDF) .select("matches").collect() val sparkResults = results.map(r => r.getSeq[Row](0).map(mr => mr.getDouble(1)) ) val tree = BallTree(uniformData, uniformData.indices) val nonSparkResults = uniformData.take(5).map( point => tree.findMaximumInnerProducts(point, 5) ) sparkResults.zip(nonSparkResults).foreach { case (sr, nsr) => assert(sr === nsr.map(_.distance)) } } override def assertDFEq(df1: DataFrame, df2: DataFrame)(implicit eq: Equality[DataFrame]): Assertion = { super.assertDFEq( df1.select("features", "values", "matches.distance"), df2.select("features", "values", "matches.distance") )(eq) } override def testObjects(): Seq[TestObject[KNN]] = List(new TestObject(new KNN().setOutputCol("matches"), df, testDF)) override def reader: MLReadable[_] = KNN override def modelReader: MLReadable[_] = KNNModel } class ConditionalKNNTest extends EstimatorFuzzing[ConditionalKNN] with BallTreeTestBase { test("matches non spark result") { val results = new ConditionalKNN().setOutputCol("matches") .fit(df).transform(testDF) .select("matches").collect() val sparkResults = results.map(r => r.getSeq[Row](0).map(mr => (mr.getDouble(1), mr.getInt(2))) ) val tree = ConditionalBallTree(uniformData, uniformData.indices, uniformLabels) val nonSparkResults = uniformData.take(5).map( point => tree.findMaximumInnerProducts(point, Set(0, 1), 5) ) sparkResults.zip(nonSparkResults).foreach { case (sr, nsr) => assert(sr.map(_._1) === nsr.map(_.distance)) assert(sr.forall(p => Set(1, 0)(p._2))) } } override def assertDFEq(df1: DataFrame, df2: DataFrame)(implicit eq: Equality[DataFrame]): Assertion = { super.assertDFEq( df1.select("features", "values", "matches.distance"), df2.select("features", "values", "matches.distance") )(eq) } override def testObjects(): Seq[TestObject[ConditionalKNN]] = List(new TestObject(new ConditionalKNN().setOutputCol("matches"), df, testDF)) override def reader: MLReadable[_] = ConditionalKNN override def modelReader: MLReadable[_] = ConditionalKNNModel }
Example 66
Source File: BatchIteratorSuite.scala From mmlspark with MIT License | 5 votes |
// Copyright (C) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in project root for information. package com.microsoft.ml.spark.stages import com.microsoft.ml.spark.core.test.base.TestBase import org.scalatest.Assertion class BatchIteratorSuite extends TestBase { def delayedIterator(n: Int, wait: Int = 5): Iterator[Int] = { (1 to n).toIterator.map { x => Thread.sleep(x * wait.toLong) x } } def standardIterator(n: Int): Iterator[Int] = (1 to n).toIterator def comparePerformance[T](iteratorCons: => Iterator[T], ratioRequired: Double = 2.0, maxBuffer: Int = Integer.MAX_VALUE, numTrials: Int = 5): Assertion = { val (dresults, dtime) = getTime(numTrials) { new DynamicBufferedBatcher(iteratorCons, maxBuffer).toList } val (results, time) = getTime(numTrials) { iteratorCons.toList.map(x => List(x)) } assert(dresults.head.flatten === results.head.flatten) val ratio = dtime/time.toDouble println(s"ratio: $ratio, Batched: ${dtime/1000000}ms, normal: ${time/1000000}ms") assert(ratio < ratioRequired) } def compareAbsolutePerformance[T](iteratorCons: => Iterator[T], extraTimeUpperBound: Double = 40, maxBuffer: Int = Integer.MAX_VALUE ): Assertion = { val (dresults, dtime) = getTime { new DynamicBufferedBatcher(iteratorCons, maxBuffer).toList } val (results, time) = getTime { iteratorCons.toList.map(x => List(x)) } assert(dresults.flatten === results.flatten) val ratio = dtime/time.toDouble println(s"ratio: $ratio, Batched: ${dtime/1000000}ms, normal: ${time/1000000}ms") assert(dtime < time + extraTimeUpperBound * 1000000) } test("Performance and behavior") { comparePerformance(delayedIterator(30), numTrials = 1) comparePerformance(delayedIterator(10), numTrials = 1) comparePerformance(delayedIterator(30), maxBuffer = 10, numTrials = 1) comparePerformance(delayedIterator(10), maxBuffer = 10, numTrials = 1) compareAbsolutePerformance(standardIterator(300), maxBuffer = 10) compareAbsolutePerformance(standardIterator(3000), maxBuffer = 10) } test("no deadlocks") { (1 to 1000).foreach {_ => val l = new DynamicBufferedBatcher((1 to 100).toIterator, 10).toList assert(l.flatten === (1 to 100)) } } }
Example 67
Source File: PartitionConsolidatorSuite.scala From mmlspark with MIT License | 5 votes |
// Copyright (C) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in project root for information. package com.microsoft.ml.spark.flaky import com.microsoft.ml.spark.core.test.base.TimeLimitedFlaky import com.microsoft.ml.spark.core.test.fuzzing.{TestObject, TransformerFuzzing} import com.microsoft.ml.spark.io.http.PartitionConsolidator import org.apache.spark.ml.util.MLReadable import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.types.{DoubleType, StructType} import org.apache.spark.sql.{DataFrame, Dataset, Row} import org.scalatest.Assertion class PartitionConsolidatorSuite extends TransformerFuzzing[PartitionConsolidator] with TimeLimitedFlaky { import session.implicits._ override val numCores: Option[Int] = Some(2) lazy val df: DataFrame = (1 to 1000).toDF("values") override val sortInDataframeEquality: Boolean = true override def testObjects(): Seq[TestObject[PartitionConsolidator]] = Seq( new TestObject(new PartitionConsolidator(), df)) override def reader: MLReadable[_] = PartitionConsolidator def getPartitionDist(df: DataFrame): List[Int] = { df.rdd.mapPartitions(it => Iterator(it.length)).collect().toList } //TODO figure out what is causing the issue on the build server override def testSerialization(): Unit = {} override def testExperiments(): Unit = {} def basicTest(df: DataFrame): Assertion = { val pd1 = getPartitionDist(df) val newDF = new PartitionConsolidator().transform(df) val pd2 = getPartitionDist(newDF) assert(pd1.sum === pd2.sum) assert(pd2.max >= pd1.max) assert(pd1.length === pd2.length) } test("basic functionality") { basicTest(df) } test("works with more partitions than cores") { basicTest(df.repartition(12)) } test("overheads") { val baseDF = (1 to 1000).toDF("values").cache() println(baseDF.count()) def getDF: Dataset[Row] = baseDF.map { x => Thread.sleep(10); x }( RowEncoder(new StructType().add("values", DoubleType))) val t1 = getTime(3)( getDF.foreach(_ => ()))._2 val t2 = getTime(3)( new PartitionConsolidator().transform(getDF).foreach(_ => ()))._2 println(t2.toDouble / t1.toDouble) assert(t2.toDouble / t1.toDouble < 3.0) } test("works with more partitions than cores2") { basicTest(df.repartition(100)) } test("work with 1 partition") { basicTest(df.repartition(1)) } }
Example 68
Source File: LoadBalancerTestUtils.scala From scastie with Apache License 2.0 | 5 votes |
package com.olegych.scastie.balancer import java.time.Instant import com.olegych.scastie.api._ import org.scalatest.Assertion import org.scalatest.funsuite.AnyFunSuite object TestTaskId { def apply(i: Int) = TaskId(SnippetId(i.toString, None)) } case class TestServerRef(id: Int) case class TestState(state: String, ready: Boolean = true) extends ServerState { def isReady: Boolean = ready } trait LoadBalancerTestUtils extends AnyFunSuite with TestUtils { type TestServer0 = Server[TestServerRef, TestState] type TestLoadBalancer0 = LoadBalancer[TestServerRef, TestState] @transient private var taskId = 1000 def add(balancer: TestLoadBalancer0, config: Inputs): TestLoadBalancer0 = synchronized { val (_, balancer0) = balancer.add(Task(config, nextIp, TestTaskId(taskId), Instant.now)).get taskId += 1 balancer0 } // Ordering only for debug purposes object Multiset { def apply[T: Ordering](xs: Seq[T]): Multiset[T] = Multiset(xs.groupBy(x => x).map { case (k, vs) => (k, vs.size) }) } case class Multiset[T: Ordering](inner: Map[T, Int]) { override def toString: String = { val size = inner.values.sum inner.toList .sortBy { case (k, v) => (-v, k) } .map { case (k, v) => s"$k($v)" } .mkString("Multiset(", ", ", s") {$size}") } } def assertConfigs(balancer: TestLoadBalancer0)(columns: Seq[String]*): Assertion = { assert( Multiset(balancer.servers.map(_.currentConfig.sbtConfigExtra)) == Multiset( columns.flatten.map(i => sbtConfig(i.toString).sbtConfigExtra) ) ) } @transient private var serverId = 0 def server( c: String, mailbox: Vector[Task] = Vector(), state: TestState = TestState("default-state") ): TestServer0 = synchronized { val t = Server(TestServerRef(serverId), sbtConfig(c), state, mailbox) serverId += 1 t } def servers(columns: Seq[String]*): Vector[TestServer0] = { columns.to(Vector).flatten.map(c => server(c)) } @transient private var currentIp = 0 def nextIp: Ip = synchronized { val t = Ip("ip" + currentIp) currentIp += 1 t } def server(v: Int): TestServerRef = TestServerRef(v) def code(code: String) = Inputs.default.copy(code = code) def sbtConfig(sbtConfig: String) = Inputs.default.copy(sbtConfigExtra = sbtConfig) def history(columns: Seq[String]*): TaskHistory = { val records = columns.to(Vector).flatten.map(i => Task(Inputs.default.copy(code = i.toString), nextIp, TestTaskId(1), Instant.now)).reverse TaskHistory(Vector(records: _*), maxSize = 20) } }
Example 69
Source File: SafeArgSpec.scala From fs2-rabbit with Apache License 2.0 | 5 votes |
package dev.profunktor.fs2rabbit import org.scalacheck.Gen import org.scalatest.Assertion import org.scalatest.funsuite.AnyFunSuite import org.scalatestplus.scalacheck.{ScalaCheckPropertyChecks => PropertyChecks} class SafeArgSpec extends AnyFunSuite with PropertyChecks { import arguments._ def safeArg[A](value: A)(implicit ev: SafeArgument[A]): Assertion = { assert(ev.toJavaType(value) != null) assert(ev.toObject(value) != null) } def safeConversion(args: Arguments): Assertion = { val converted: java.util.Map[String, Object] = args assert(converted != null) } val tupleGen: Gen[(String, String)] = for { x <- Gen.alphaStr y <- Gen.alphaStr } yield (x, y) val stringGen: Gen[String] = Gen.alphaStr val intGen: Gen[Int] = Gen.posNum[Int] val longGen: Gen[Long] = Gen.posNum[Long] val doubleGen: Gen[Double] = Gen.posNum[Double] val floatGen: Gen[Float] = Gen.posNum[Float] val shortGen: Gen[Short] = Gen.posNum[Short] val booleanGen: Gen[Boolean] = Gen.oneOf(Seq(true, false)) val byteGen: Gen[Byte] = intGen.map(_.toByte) val dateGen: Gen[java.util.Date] = Gen.calendar.map(_.getTime) val bigDecimalGen: Gen[BigDecimal] = longGen.map(BigDecimal.apply) val listGen: Gen[List[String]] = Gen.listOf(stringGen) val mapGen: Gen[Map[String, String]] = Gen.mapOf(tupleGen) forAll(stringGen)(safeArg(_)) forAll(intGen)(safeArg(_)) forAll(longGen)(safeArg(_)) forAll(doubleGen)(safeArg(_)) forAll(floatGen)(safeArg(_)) forAll(shortGen)(safeArg(_)) forAll(booleanGen)(safeArg(_)) forAll(byteGen)(safeArg(_)) forAll(dateGen)(safeArg(_)) forAll(bigDecimalGen)(safeArg(_)) forAll(listGen)(safeArg(_)) forAll(mapGen)(safeArg(_)) test("Arguments conversion") { safeConversion(Map("key" -> "value")) safeConversion(Map("key" -> true)) safeConversion(Map("key" -> 123)) safeConversion(Map("key" -> 456L)) safeConversion(Map("key" -> 1.87)) safeConversion(Map("key" -> new java.util.Date())) safeConversion(Map("key" -> "value".getBytes.toList)) safeConversion(Map("key" -> Map("nested" -> "value"))) safeConversion(Map("key" -> List(1, 2, 3))) } }
Example 70
Source File: AmqpFieldValueSpec.scala From fs2-rabbit with Apache License 2.0 | 5 votes |
package dev.profunktor.fs2rabbit import java.io.{DataInputStream, DataOutputStream, InputStream, OutputStream} import java.time.Instant import com.rabbitmq.client.impl.{ValueReader, ValueWriter} import dev.profunktor.fs2rabbit.model.AmqpFieldValue._ import dev.profunktor.fs2rabbit.model.{AmqpFieldValue, ShortString} import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers class AmqpFieldValueSpec extends AnyFlatSpecLike with Matchers with AmqpPropertiesArbitraries { it should "convert from and to Java primitive header values" in { val intVal = IntVal(1) val longVal = LongVal(2L) val stringVal = StringVal("hey") val arrayVal = ArrayVal(Vector(IntVal(3), IntVal(2), IntVal(1))) AmqpFieldValue.unsafeFrom(intVal.toValueWriterCompatibleJava) should be(intVal) AmqpFieldValue.unsafeFrom(longVal.toValueWriterCompatibleJava) should be(longVal) AmqpFieldValue.unsafeFrom(stringVal.toValueWriterCompatibleJava) should be(stringVal) AmqpFieldValue.unsafeFrom("fs2") should be(StringVal("fs2")) AmqpFieldValue.unsafeFrom(arrayVal.toValueWriterCompatibleJava) should be(arrayVal) } it should "preserve the same value after a round-trip through impure and from" in { forAll { amqpHeaderVal: AmqpFieldValue => AmqpFieldValue.unsafeFrom(amqpHeaderVal.toValueWriterCompatibleJava) == amqpHeaderVal } } it should "preserve the same values after a round-trip through the Java ValueReader and ValueWriter" in { forAll(assertThatValueIsPreservedThroughJavaWriteAndRead _) } it should "preserve a specific StringVal that previously failed after a round-trip through the Java ValueReader and ValueWriter" in { assertThatValueIsPreservedThroughJavaWriteAndRead(StringVal("kyvmqzlbjivLqQFukljghxdowkcmjklgSeybdy")) } it should "preserve a specific DateVal created from an Instant that has millisecond accuracy after a round-trip through the Java ValueReader and ValueWriter" in { val instant = Instant.parse("4000-11-03T20:17:29.57Z") val myDateVal = TimestampVal.from(instant) assertThatValueIsPreservedThroughJavaWriteAndRead(myDateVal) } "DecimalVal" should "reject a BigDecimal of an unscaled value with 33 bits..." in { DecimalVal.from(BigDecimal(Int.MaxValue) + BigDecimal(1)) should be(None) } it should "reject a BigDecimal with a scale over octet size" in { DecimalVal.from(new java.math.BigDecimal(java.math.BigInteger.valueOf(12345L), 1000)) should be(None) } // We need to wrap things in a dummy table because the method that would be // great to test with ValueReader, readFieldValue, is private, and so we // have to call the next best thing, readTable. private def wrapInDummyTable(value: AmqpFieldValue): TableVal = TableVal(Map(ShortString.unsafeFrom("dummyKey") -> value)) private def createWriterFromQueue(outputResults: collection.mutable.Queue[Byte]): ValueWriter = new ValueWriter({ new DataOutputStream({ new OutputStream { override def write(b: Int): Unit = outputResults.enqueue(b.toByte) } }) }) private def createReaderFromQueue(input: collection.mutable.Queue[Byte]): ValueReader = { val inputStream = new InputStream { override def read(): Int = try { val result = input.dequeue() // A signed -> unsigned conversion because bytes by default are // converted into signed ints, which is bad when the API of read // states that negative numbers indicate EOF... 0Xff & result.toInt } catch { case _: NoSuchElementException => -1 } override def available(): Int = { val result = input.size result } } new ValueReader(new DataInputStream(inputStream)) } private def assertThatValueIsPreservedThroughJavaWriteAndRead(amqpHeaderVal: AmqpFieldValue): Assertion = { val outputResultsAsTable = collection.mutable.Queue.empty[Byte] val tableWriter = createWriterFromQueue(outputResultsAsTable) tableWriter.writeTable(wrapInDummyTable(amqpHeaderVal).toValueWriterCompatibleJava) val reader = createReaderFromQueue(outputResultsAsTable) val readValue = reader.readTable() AmqpFieldValue.unsafeFrom(readValue) should be(wrapInDummyTable(amqpHeaderVal)) } }
Example 71
Source File: WireMockVerify.scala From self-assessment-api with Apache License 2.0 | 5 votes |
package support.wiremock import com.github.tomakehurst.wiremock.client.WireMock import com.github.tomakehurst.wiremock.http.HttpHeader import com.github.tomakehurst.wiremock.matching.RequestPatternBuilder import com.github.tomakehurst.wiremock.verification.LoggedRequest import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers._ import scala.collection.convert.ToScalaImplicits trait WireMockVerify extends ToScalaImplicits { private def allMocks: List[LoggedRequest] = WireMock.findAll(RequestPatternBuilder.allRequests()).toList def getMockFor(url: String): LoggedRequest = { // use equals as well as regex matching to account for query parameters separators not being able to be passed val mock: Option[LoggedRequest] = allMocks.find(y=> y.getUrl.equalsIgnoreCase(url) || y.getUrl.matches(url)) require(mock.isDefined, s"Trying to verify WireMock stubbing but none found for $url") mock.get } def verifyHeaderExists(mock: LoggedRequest, header: (String, String)): Assertion = { val (name, value) = header withClue(s"Trying to verify mock for ${mock.getUrl} has header $name, but does not\n") { mock.containsHeader(name) shouldBe true } val mockHeader = mock.header(name) withClue(s"Trying to verify mock for ${mock.getUrl} has header $name with value $value, but does not in ${getValues(mockHeader)}\n"){ mockHeader.containsValue(value) shouldBe true } } def getValues(header: HttpHeader): List[String] = { if(header.isPresent) header.values().toList else List.empty } }
Example 72
Source File: JsonDecodeTest.scala From Soteria with MIT License | 5 votes |
package com.leobenkel.soteria.Utils import com.leobenkel.soteria.Config.SoteriaConfiguration import com.leobenkel.soteria.Utils.Json.JsonDecode import com.leobenkel.soteria.Utils.Json.JsonParserHelper._ import com.leobenkel.soteria.{LogTest, ParentTest} import org.scalatest.Assertion import scala.io.Source class JsonDecodeTest extends ParentTest { private val soteriaLog: LogTest = new LogTest(this) test("Test decode json") { val value: Int = 12 case class MyJson(key: Int) implicit val parser: JsonDecode.Parser[MyJson] = (input: Map[String, Any]) => { for { key <- input.getAsInt("key") } yield { MyJson(key) } } val ei = JsonDecode.parse[MyJson](s""" |{ |"key": $value |} """.stripMargin) assert(ei.isRight) assertEquals(value, ei.right.get.key) } test("Test decode soteria.json") { Map[String, Either[String, SoteriaConfiguration] => Assertion]( "soteria_succeed_1.json" -> { result => assert(result.isRight) val parsed = result.right.get assert(parsed.modules.size == 1) assert(parsed.modules.head._2.size == 1) assert(parsed.modules.head._2.head._2.version == "3.0") assert(parsed.scalaCFlags.length == 10) assert(parsed.scalaVersions.size == 2) }, "soteria_succeed_2.json" -> { result => assert(result.isRight) val parsed = result.right.get assert(parsed.modules.size == 1) assert(parsed.modules.head._2.size == 1) assert(parsed.modules.head._2.head._2.version == "3.0") assert(parsed.scalaCFlags.isEmpty) assert(parsed.scalaVersions.size == 2) }, "soteria_succeed_3.json" -> { result => assert(result.isRight) val parsed = result.right.get assert(parsed.modules.isEmpty) assert(parsed.scalaCFlags.isEmpty) assert(parsed.scalaVersions.size == 2) }, "soteria_fail_no_scalaVersions.json" -> { result => assert(result.isLeft) val error = result.left.get assert(error.contains("scalaVersions")) }, "soteria_fail_no_version.json" -> { result => assert(result.isLeft) val error = result.left.get assert(error.contains("version")) assert(error.contains("com.orgs")) assert(error.contains("name-of-library")) }, "soteria_fail_bad_json.json" -> { result => assert(result.isLeft) val error = result.left.get assert("Did not parse" == error) } ).map { case (filePath, test) => val file = Source.fromResource(filePath) val content = file.mkString file.close() log.debug(s"Reading '$filePath'") val result: Either[String, SoteriaConfiguration] = JsonDecode.parse[SoteriaConfiguration](content)(SoteriaConfiguration.parser(soteriaLog)) test(result) } } }
Example 73
Source File: TaxCalculationViewSpec.scala From pertax-frontend with Apache License 2.0 | 5 votes |
package views.html.cards import config.ConfigDecorator import org.jsoup.nodes.Document import org.scalatest.Assertion import play.api.i18n.Messages import viewmodels.Message.text import viewmodels.{Heading, TaxCalculationViewModel, TaxYears, UnderpaidUrl} import views.html.ViewSpec import views.html.cards.home.TaxCalculationView import scala.collection.JavaConverters._ class TaxCalculationViewSpec extends ViewSpec { val taxCalculation = injected[TaxCalculationView] implicit val configDecorator: ConfigDecorator = injected[ConfigDecorator] def hasLink(document: Document, content: String, href: String)(implicit messages: Messages): Assertion = document.getElementsMatchingText(content).eachAttr("href").asScala should contain(href) "TaxCalculation card" should { val previousTaxYear = 2017 val doc = asDocument( taxCalculation(TaxCalculationViewModel( TaxYears(previousTaxYear, previousTaxYear + 1), Heading( text("label.you_do_not_owe_any_more_tax", previousTaxYear.toString, previousTaxYear + 1 toString), UnderpaidUrl(previousTaxYear) ), List(text("label.you_have_no_payments_to_make_to_hmrc")), Nil )).toString) "render the given heading correctly" in { doc.text() should include( Messages("label.you_do_not_owe_any_more_tax", previousTaxYear.toString, previousTaxYear + 1 toString) ) } "render the given url correctly" in { hasLink( doc, Messages("label.you_do_not_owe_any_more_tax", previousTaxYear.toString, previousTaxYear + 1 toString), configDecorator.underpaidUrl(previousTaxYear) ) } "render the given content correctly" in { doc.text() should include(Messages("label.you_have_no_payments_to_make_to_hmrc")) } } }
Example 74
Source File: TipValidationTest.scala From bitcoin-s with MIT License | 5 votes |
package org.bitcoins.chain.validation import akka.actor.ActorSystem import org.bitcoins.chain.models.{BlockHeaderDAO, BlockHeaderDbHelper} import org.bitcoins.chain.pow.Pow import org.bitcoins.core.protocol.blockchain.BlockHeader import org.bitcoins.testkit.chain.{BlockHeaderHelper, ChainDbUnitTest} import org.scalatest.{Assertion, FutureOutcome} class TipValidationTest extends ChainDbUnitTest { import org.bitcoins.chain.blockchain.Blockchain import org.bitcoins.chain.config.ChainAppConfig override type FixtureParam = BlockHeaderDAO // we're working with mainnet data implicit override lazy val appConfig: ChainAppConfig = mainnetAppConfig override def withFixture(test: OneArgAsyncTest): FutureOutcome = withBlockHeaderDAO(test) implicit override val system: ActorSystem = ActorSystem("TipValidationTest") behavior of "TipValidation" //blocks 566,092 and 566,093 val newValidTip = BlockHeaderHelper.header1 val currentTipDb = BlockHeaderHelper.header2Db val blockchain = Blockchain.fromHeaders(Vector(currentTipDb)) it must "connect two blocks with that are valid" in { bhDAO => val newValidTipDb = BlockHeaderDbHelper.fromBlockHeader( 566093, currentTipDb.chainWork + Pow.getBlockProof(newValidTip), newValidTip) val expected = TipUpdateResult.Success(newValidTipDb) runTest(newValidTip, expected, blockchain) } it must "fail to connect two blocks that do not reference prev block hash correctly" in { bhDAO => val badPrevHash = BlockHeaderHelper.badPrevHash val expected = TipUpdateResult.BadPreviousBlockHash(badPrevHash) runTest(badPrevHash, expected, blockchain) } it must "fail to connect two blocks with two different POW requirements at the wrong interval" in { bhDAO => val badPOW = BlockHeaderHelper.badNBits val expected = TipUpdateResult.BadPOW(badPOW) runTest(badPOW, expected, blockchain) } it must "fail to connect two blocks with a bad nonce" in { bhDAO => val badNonce = BlockHeaderHelper.badNonce val expected = TipUpdateResult.BadNonce(badNonce) runTest(badNonce, expected, blockchain) } private def runTest( header: BlockHeader, expected: TipUpdateResult, blockchain: Blockchain): Assertion = { val result = TipValidation.checkNewTip(header, blockchain) assert(result == expected) } }
Example 75
Source File: StoreOpsTest.scala From fs2-blobstore with Apache License 2.0 | 5 votes |
package blobstore import java.nio.charset.Charset import java.nio.file.Files import java.util.concurrent.Executors import cats.effect.{Blocker, IO} import cats.effect.laws.util.TestInstances import fs2.{Pipe, Stream} import org.scalatest.Assertion import org.scalatest.flatspec.AnyFlatSpec import implicits._ import org.scalatest.matchers.must.Matchers import scala.collection.mutable.ArrayBuffer import scala.concurrent.ExecutionContext class StoreOpsTest extends AnyFlatSpec with Matchers with TestInstances { implicit val cs = IO.contextShift(ExecutionContext.global) val blocker = Blocker.liftExecutionContext(ExecutionContext.fromExecutor(Executors.newCachedThreadPool)) behavior of "PutOps" it should "buffer contents and compute size before calling Store.put" in { val bytes: Array[Byte] = "AAAAAAAAAA".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) Stream .emits(bytes) .covary[IO] .through(store.bufferedPut(Path("path/to/file.txt"), blocker)) .compile .drain .unsafeRunSync() store.buf.toArray must be(bytes) } it should "upload a file from a nio Path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_.size must be(Some(bytes.length))) Stream .bracket(IO(Files.createTempFile("test-file", ".bin"))) { p => IO(p.toFile.delete).void } .flatMap { p => Stream.emits(bytes).covary[IO].through(fs2.io.file.writeAll(p, blocker)).drain ++ Stream.eval(store.put(p, Path("path/to/file.txt"), blocker)) } .compile .drain .unsafeRunSync() store.buf.toArray must be(bytes) } it should "download a file to a nio path" in { val bytes = "hello".getBytes(Charset.forName("utf-8")) val store = DummyStore(_ => succeed) val path = Path("path/to/file.txt") Stream.emits(bytes).through(store.put(path)).compile.drain.unsafeRunSync() Stream .bracket(IO(Files.createTempFile("test-file", ".bin")))(p => IO(p.toFile.delete).void) .flatMap { nioPath => Stream.eval(store.get(path, nioPath, blocker)) >> Stream.eval { IO { Files.readAllBytes(nioPath) mustBe bytes } } } .compile .drain .unsafeRunSync() } } final case class DummyStore(check: Path => Assertion) extends Store[IO] { val buf = new ArrayBuffer[Byte]() override def put(path: Path, overwrite: Boolean): Pipe[IO, Byte, Unit] = { check(path) in => { buf.appendAll(in.compile.toVector.unsafeRunSync()) Stream.emit(()) } } override def get(path: Path, chunkSize: Int): Stream[IO, Byte] = Stream.emits(buf) override def list(path: Path, recursive: Boolean = false): Stream[IO, Path] = ??? override def move(src: Path, dst: Path): IO[Unit] = ??? override def copy(src: Path, dst: Path): IO[Unit] = ??? override def remove(path: Path): IO[Unit] = ??? override def putRotate(computePath: IO[Path], limit: Long): Pipe[IO, Byte, Unit] = ??? }
Example 76
Source File: ExampleKafkaStreamsSpec.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka.streams import net.manub.embeddedkafka.Codecs._ import net.manub.embeddedkafka.ConsumerExtensions._ import net.manub.embeddedkafka.EmbeddedKafkaConfig import net.manub.embeddedkafka.streams.EmbeddedKafkaStreams._ import org.apache.kafka.common.serialization.{Serde, Serdes} import org.apache.kafka.streams.StreamsBuilder import org.apache.kafka.streams.kstream.{Consumed, KStream, Produced} import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ExampleKafkaStreamsSpec extends AnyWordSpec with Matchers { implicit val config: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 7000, zooKeeperPort = 7001) val (inTopic, outTopic) = ("in", "out") val stringSerde: Serde[String] = Serdes.String() "A Kafka streams test" should { "be easy to run with streams and consumer lifecycle management" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") publishToKafka(inTopic, "baz", "yaz") withConsumer[String, String, Assertion] { consumer => val consumedMessages = consumer.consumeLazily[(String, String)](outTopic) consumedMessages.take(2).toList should be( Seq("hello" -> "world", "foo" -> "bar") ) val h :: _ = consumedMessages.drop(2).toList h should be("baz" -> "yaz") } } } "allow support creating custom consumers" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build()) { publishToKafka(inTopic, "hello", "world") publishToKafka(inTopic, "foo", "bar") withConsumer[String, String, Assertion] { consumer => consumer.consumeLazily[(String, String)](outTopic).take(2) should be( Seq("hello" -> "world", "foo" -> "bar") ) } } } "allow for easy string based testing" in { val streamBuilder = new StreamsBuilder val stream: KStream[String, String] = streamBuilder.stream(inTopic, Consumed.`with`(stringSerde, stringSerde)) stream.to(outTopic, Produced.`with`(stringSerde, stringSerde)) runStreams(Seq(inTopic, outTopic), streamBuilder.build())( withConsumer[String, String, Assertion]({ consumer => publishToKafka(inTopic, "hello", "world") val h :: _ = consumer.consumeLazily[(String, String)](outTopic).toList h should be("hello" -> "world") }) )(config) } } }
Example 77
Source File: EmbeddedKafkaSpecSupport.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka import java.net.{InetAddress, Socket} import net.manub.embeddedkafka.EmbeddedKafkaSpecSupport.{ Available, NotAvailable, ServerStatus } import org.scalatest.Assertion import org.scalatest.concurrent.{Eventually, IntegrationPatience} import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Milliseconds, Seconds, Span} import org.scalatest.wordspec.AnyWordSpecLike import scala.util.{Failure, Success, Try} trait EmbeddedKafkaSpecSupport extends AnyWordSpecLike with Matchers with Eventually with IntegrationPatience { implicit val config: PatienceConfig = PatienceConfig(Span(1, Seconds), Span(100, Milliseconds)) def expectedServerStatus(port: Int, expectedStatus: ServerStatus): Assertion = eventually { status(port) shouldBe expectedStatus } private def status(port: Int): ServerStatus = { Try(new Socket(InetAddress.getByName("localhost"), port)) match { case Failure(_) => NotAvailable case Success(_) => Available } } } object EmbeddedKafkaSpecSupport { sealed trait ServerStatus case object Available extends ServerStatus case object NotAvailable extends ServerStatus }
Example 78
Source File: EmbeddedKafkaWithRunningKafkaOnFoundPortSpec.scala From embedded-kafka with MIT License | 5 votes |
package net.manub.embeddedkafka import net.manub.embeddedkafka.EmbeddedKafka._ import net.manub.embeddedkafka.EmbeddedKafkaSpecSupport.{ Available, NotAvailable } import org.scalatest.Assertion class EmbeddedKafkaWithRunningKafkaOnFoundPortSpec extends EmbeddedKafkaSpecSupport { "the withRunningKafkaOnFoundPort method" should { "start and stop Kafka and Zookeeper successfully on non-zero ports" in { val userDefinedConfig = EmbeddedKafkaConfig(kafkaPort = 12345, zooKeeperPort = 12346) val actualConfig = withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig => actualConfig shouldBe userDefinedConfig everyServerIsAvailable(actualConfig) actualConfig } noServerIsAvailable(actualConfig) } "start and stop multiple Kafka and Zookeeper successfully on arbitrary available ports" in { val userDefinedConfig = EmbeddedKafkaConfig(kafkaPort = 0, zooKeeperPort = 0) val actualConfig1 = withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig1 => everyServerIsAvailable(actualConfig1) publishStringMessageToKafka("topic", "message1")(actualConfig1) consumeFirstStringMessageFrom("topic")( actualConfig1 ) shouldBe "message1" val actualConfig2 = withRunningKafkaOnFoundPort(userDefinedConfig) { actualConfig2 => everyServerIsAvailable(actualConfig2) publishStringMessageToKafka("topic", "message2")(actualConfig2) consumeFirstStringMessageFrom("topic")( actualConfig2 ) shouldBe "message2" val allConfigs = Seq(userDefinedConfig, actualConfig1, actualConfig2) // Confirm both actual configs are running on separate non-zero ports, but otherwise equal allConfigs.map(_.kafkaPort).distinct should have size 3 allConfigs.map(_.zooKeeperPort).distinct should have size 3 allConfigs .map(config => EmbeddedKafkaConfigImpl( kafkaPort = 0, zooKeeperPort = 0, config.customBrokerProperties, config.customProducerProperties, config.customConsumerProperties ) ) .distinct should have size 1 actualConfig2 } noServerIsAvailable(actualConfig2) actualConfig1 } noServerIsAvailable(actualConfig1) } "work with a simple example using implicits" in { val userDefinedConfig = EmbeddedKafkaConfig(kafkaPort = 0, zooKeeperPort = 0) withRunningKafkaOnFoundPort(userDefinedConfig) { implicit actualConfig => publishStringMessageToKafka("topic", "message") consumeFirstStringMessageFrom("topic") shouldBe "message" } } } private def everyServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { expectedServerStatus(config.kafkaPort, Available) expectedServerStatus(config.zooKeeperPort, Available) } private def noServerIsAvailable(config: EmbeddedKafkaConfig): Assertion = { expectedServerStatus(config.kafkaPort, NotAvailable) expectedServerStatus(config.zooKeeperPort, NotAvailable) } }
Example 79
Source File: LawChecking.scala From curryhoward with Apache License 2.0 | 5 votes |
package io.chymyst.ch.unit import io.chymyst.ch.implement import org.scalacheck.Arbitrary import org.scalatest.{Assertion, FlatSpec, Matchers} import org.scalatest.prop.GeneratorDrivenPropertyChecks trait FMap[F[_]] { def f[A, B]: (A ⇒ B) ⇒ F[A] ⇒ F[B] } trait FPoint[F[_]] { def f[A]: A ⇒ F[A] } trait FFlatMap[F[_]] { def f[A, B]: (A ⇒ F[B]) ⇒ F[A] ⇒ F[B] } trait LawChecking extends FlatSpec with Matchers with GeneratorDrivenPropertyChecks { def fEqual[A: Arbitrary, B](f1: A ⇒ B, f2: A ⇒ B): Assertion = { forAll { (x: A) ⇒ f1(x) shouldEqual f2(x) } } private def checkFunctionEquality[A: Arbitrary, B](f1: A ⇒ B, f2: A ⇒ B)(implicit resultsEqual: (B, B) ⇒ Assertion): Assertion = { forAll { (x: A) ⇒ resultsEqual(f1(x), f2(x)) } } // Check equality for higher-order functions of type A ⇒ B ⇒ C. def hofEqual[A: Arbitrary, B: Arbitrary, C: Arbitrary](f1: A ⇒ B ⇒ C, f2: A ⇒ B ⇒ C): Assertion = checkFunctionEquality[A, B ⇒ C](f1, f2)(implicitly[Arbitrary[A]], (x: B ⇒ C, y: B ⇒ C) ⇒ fEqual(x, y)) def fmapLawIdentity[A: Arbitrary, F[_]](fmap: FMap[F])(implicit fResultsEqual: (F[A], F[A]) ⇒ Assertion, ev: Arbitrary[F[A]]): Assertion = { checkFunctionEquality[F[A], F[A]](fmap.f(identity[A]), identity[F[A]]) } def fmapLawComposition[A: Arbitrary, B: Arbitrary, C: Arbitrary, F[_]](fmap: FMap[F])(implicit fResultsEqual: (F[C], F[C]) ⇒ Assertion, evA: Arbitrary[F[A]], evAB: Arbitrary[A ⇒ B], evBC: Arbitrary[B ⇒ C]): Assertion = { forAll { (f: A ⇒ B, g: B ⇒ C) ⇒ checkFunctionEquality[F[A], F[C]](fmap.f(f) andThen fmap.f(g), fmap.f(f andThen g)) } } def fmapPointLaw[A: Arbitrary, B: Arbitrary, F[_]](point: FPoint[F], fmap: FMap[F])(implicit fResultsEqual: (F[B], F[B]) ⇒ Assertion, evAB: Arbitrary[A ⇒ B]): Assertion = forAll { (f: A ⇒ B) ⇒ val point_dot_map = point.f andThen fmap.f(f) val f_dot_point = f andThen point.f checkFunctionEquality[A, F[B]](point_dot_map, f_dot_point) } def flatmapPointLaw[A: Arbitrary, B: Arbitrary, F[_]](point: FPoint[F], flatmap: FFlatMap[F])(implicit fResultsEqual: (F[B], F[B]) ⇒ Assertion, evAB: Arbitrary[A ⇒ F[B]], evFB: Arbitrary[F[B]]): Assertion = forAll { (f: A ⇒ F[B]) ⇒ checkFunctionEquality[F[B], F[B]](flatmap.f(point.f), identity) checkFunctionEquality(point.f andThen flatmap.f(f), f) } def flatmapAssocLaw[A: Arbitrary, B: Arbitrary, C: Arbitrary, F[_]](fflatMap: FFlatMap[F])(implicit fResultsEqual: (F[C], F[C]) ⇒ Assertion, evFA: Arbitrary[F[A]], evAB: Arbitrary[A ⇒ F[B]], evBC: Arbitrary[B ⇒ F[C]]): Assertion = forAll { (f: A ⇒ F[B], g: B ⇒ F[C]) ⇒ val x = fflatMap.f(f) andThen fflatMap.f(g) val y = fflatMap.f((x: A) ⇒ fflatMap.f(g)(f(x))) checkFunctionEquality[F[A], F[C]](x, y) } def flip[A, B, C]: (A ⇒ B ⇒ C) ⇒ (B ⇒ A ⇒ C) = implement }
Example 80
Source File: ControllerTest.scala From Aton with GNU General Public License v3.0 | 5 votes |
package test import model.json.ResultMessage import org.scalatest.{Assertion, BeforeAndAfterAll} import org.scalatest.mockito.MockitoSugar import org.scalatestplus.play.PlaySpec import play.api.mvc.Result import play.api.test.Helpers._ import play.test.WithApplication import scala.concurrent.{Await, Future} import scala.concurrent.duration._ override def afterAll() { application.stopPlay() } def assertFutureResultStatus(future: Future[Result], status: Int) = { val result: Result = Await.result(future, 20 seconds) if(result.header.status != status){ play.Logger.error(contentAsString(future)) } assert(result.header.status == status) } def assertBodyJsonMessage(future: Future[Result], message: String, emptyExtras: Boolean): Assertion = { //val result: Result = Await.result(future,20 seconds) val bodyJson = contentAsJson(future) play.Logger.debug(s"BodyJson: $bodyJson") val jsResult = bodyJson.validate[ResultMessage] assert(jsResult.isSuccess) if(!emptyExtras) { assert(jsResult.get.extras.nonEmpty) if(jsResult.get.extras.isEmpty){ play.Logger.debug(jsResult.toString) } } assert(jsResult.get.result === message) } def assertBodyJsonMessage(future: Future[Result], message: String): Assertion = { assertBodyJsonMessage(future, message, emptyExtras = true) } }
Example 81
Source File: AsyncUdashSharedTest.scala From udash-core with Apache License 2.0 | 5 votes |
package io.udash.testing import org.scalactic.source.Position import org.scalajs.dom import org.scalatest.{Assertion, Succeeded} import scala.concurrent.{ExecutionContext, Future, Promise} import scala.scalajs.concurrent.JSExecutionContext import scala.scalajs.js.Date import scala.util.{Failure, Success} trait AsyncUdashSharedTest extends AsyncUdashSharedTestBase { override implicit def executionContext: ExecutionContext = JSExecutionContext.queue override def retrying(code: => Any)(implicit patienceConfig: PatienceConfig, pos: Position): Future[Assertion] = { val start = Date.now() val p = Promise[Assertion] var lastEx: Option[Throwable] = None def startTest(): Unit = { dom.window.setTimeout(() => { if (patienceConfig.timeout.toMillis > Date.now() - start) { try { code p.complete(Success(Succeeded)) } catch { case ex: Throwable => lastEx = Some(ex) startTest() } } else { p.complete(Failure(lastEx.getOrElse(RetryingTimeout()))) } }, patienceConfig.interval.toMillis.toDouble) } startTest() p.future } }
Example 82
Source File: RokkuFixtures.scala From rokku with Apache License 2.0 | 5 votes |
package com.ing.wbaa.testkit import java.io.{File, RandomAccessFile} import com.amazonaws.services.s3.AmazonS3 import com.ing.wbaa.testkit.awssdk.S3SdkHelpers import org.scalatest.Assertion import scala.concurrent.{ExecutionContext, Future} import scala.util.{Random, Try} trait RokkuFixtures extends S3SdkHelpers { def withHomeBucket(s3Client: AmazonS3, objects: Seq[String])(testCode: String => Future[Assertion])(implicit exCtx: ExecutionContext): Future[Assertion] = { val testBucket = "home" Try(s3Client.createBucket(testBucket)) objects.foreach(obj => s3Client.putObject(testBucket, obj, "")) testCode(testBucket).andThen { case _ => cleanBucket(s3Client, testBucket) } } private def cleanBucket(s3Client: AmazonS3, bucketName: String) = { import scala.collection.JavaConverters._ s3Client.listObjectsV2(bucketName).getObjectSummaries.asScala.toList.map(_.getKey).foreach { key => s3Client.deleteObject(bucketName, key) } } }