org.scalatest.AsyncFlatSpec Scala Examples
The following examples show how to use org.scalatest.AsyncFlatSpec.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: KsmServiceImplTest.scala From kafka-security-manager with MIT License | 5 votes |
package com.github.simplesteph.ksm.grpc import com.github.simplesteph.ksm.notification.DummyNotification import com.github.simplesteph.ksm.parser.CsvAclParser import com.github.simplesteph.ksm.source.DummySourceAcl import com.github.simplesteph.ksm.{AclSynchronizer, DummyAuthorizer} import com.security.kafka.pb.ksm.OperationTypePb._ import com.security.kafka.pb.ksm.PermissionTypePb._ import com.security.kafka.pb.ksm.ResourceTypePb._ import com.security.kafka.pb.ksm._ import org.scalatest.{AsyncFlatSpec, Matchers} class KsmServiceImplTest extends AsyncFlatSpec with Matchers { val dummySourceAcl = new DummySourceAcl val ksmServiceImpl = new KsmServiceImpl( new AclSynchronizer( new DummyAuthorizer(), dummySourceAcl, new DummyNotification, new CsvAclParser ) ) "getAllAcls" should "return all Acls" in { ksmServiceImpl.getAllAcls(new GetAllAclsRequest) map { getAclResponse => getAclResponse shouldBe GetAllAclsResponse( Vector( ResourceAndAclPb( Some( ResourcePb( "foo", RESOURCE_TYPE_TOPIC, PatternTypePb.PATTERN_TYPE_LITERAL ) ), Some( AclPb( Some(KafkaPrincipalPb("User", "alice")), PERMISSION_TYPE_ALLOW, "*", OPERATION_TYPE_READ ) ) ) ) ) } } }
Example 2
Source File: SmokeTest.scala From aws4s with MIT License | 5 votes |
package org.aws4s.core import cats.effect.IO import org.aws4s.{Credentials, Region} import org.http4s.client.blaze.Http1Client import org.scalatest.{AsyncFlatSpec, Matchers} abstract class SmokeTest extends AsyncFlatSpec with Matchers { final val httpClient = Http1Client[IO]() final val region = Region.`eu-central-1` final val credentials = () => Credentials(getEnvOrDie("AWS_ACCESS_KEY"), getEnvOrDie("AWS_SECRET_KEY")) private final def getEnvOrDie(name: String): String = Option(System.getenv(name)) match { case Some(v) => v case None => throw new RuntimeException(s"ENV variable $name is missing") } }
Example 3
Source File: ZipkinModuleSpec.scala From play-zipkin-tracing with Apache License 2.0 | 5 votes |
package brave.play.module import java.util.Collections import akka.actor.CoordinatedShutdown import brave.Tracing import brave.play.{ZipkinTraceService, ZipkinTraceServiceLike} import org.scalatest.AsyncFlatSpec import play.api.inject.guice.GuiceApplicationBuilder import zipkin2.reporter.Sender import zipkin2.reporter.okhttp3.OkHttpSender class ZipkinModuleSpec extends AsyncFlatSpec { val injector = new GuiceApplicationBuilder() .bindings(new ZipkinModule) .injector() it should "provide an okhttp sender" in { val sender = injector.instanceOf[Sender] assert(sender.isInstanceOf[OkHttpSender]) } it should "eventually close the sender" in { // provisioning the sender so we can tell if it is closed on shutdown val sender = injector.instanceOf[Sender] // stopping the application should close the sender! injector.instanceOf[CoordinatedShutdown].run(CoordinatedShutdown.UnknownReason) map { _ => val thrown = intercept[Exception] { sender.sendSpans(Collections.emptyList[Array[Byte]]).execute() } assert(thrown.getMessage === "closed") } } it should "provide a tracing component" in instanceOfTracing { tracing => assert(Tracing.current() != null) assert(Tracing.current() == tracing) } it should "eventually close the tracing component" in instanceOfTracing { tracing => // stopping the application should close the tracing component! injector.instanceOf[CoordinatedShutdown].run(CoordinatedShutdown.UnknownReason) map { _ => assert(Tracing.current() == null) } } private def instanceOfTracing[A](test: Tracing => A): A = { val tracing = injector.instanceOf[Tracing] try { test(tracing) } finally { // Ensures there is no active Tracing object tracing.close() } } it should "provide a zipkin trace service" in { // TODO: dies due to missing dispatcher val service = injector.instanceOf[ZipkinTraceServiceLike] assert(service.isInstanceOf[ZipkinTraceService]) } }
Example 4
Source File: ActorTraceSupportSpec.scala From play-zipkin-tracing with Apache License 2.0 | 5 votes |
package brave.play.actor import java.util.concurrent.TimeUnit import akka.actor.{ActorSystem, Props} import akka.util.Timeout import org.scalatest.AsyncFlatSpec import ActorTraceSupport._ import brave.play.{TestZipkinTraceService, ZipkinTraceServiceLike} class ActorTraceSupportSpec extends AsyncFlatSpec { it should "ask pattern" in { val system = ActorSystem("mySystem") implicit val tracer = new TestZipkinTraceService implicit val timeout = Timeout(5, TimeUnit.SECONDS) val actor = system.actorOf(Props(classOf[HelloWorldActor], tracer), "test-actor") TraceableActorRef(actor) ? HelloWorldMessage("Test", ActorTraceData()) map { result => assert(result == "Received data: Test") } TimeUnit.SECONDS.sleep(3) tracer.tracing.close() system.terminate() assert(tracer.reporter.spans.length == 2) val parent = tracer.reporter.spans.find(_.name == "? - test-actor").get val child = tracer.reporter.spans.find(_.name == "test-actor").get assert(parent.id == child.parentId) assert(parent.id != child.id) } } class HelloWorldActor(val tracer: ZipkinTraceServiceLike) extends TraceableActor { def receive = { case m: HelloWorldMessage => sender() ! s"Received data: ${m.message}" } } case class HelloWorldMessage(message: String, traceData: ActorTraceData) extends TraceMessage
Example 5
Source File: Json4sJacksonSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.marshallers.json import akka.actor.ActorSystem import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.{HttpEntity, MediaTypes, MessageEntity} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.ActorMaterializer import org.json4s._ import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} class Json4sJacksonSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import de.heikoseeberger.akkahttpjson4s.Json4sSupport._ implicit val system = ActorSystem("Json4sJacksonSpec") implicit val mat = ActorMaterializer() implicit val serialization = jackson.Serialization "NotTypeHints Example (case class)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(NoTypeHints) val playInfo = PlayerInfo("d", "k", 30) val entity = HttpEntity(MediaTypes.`application/json`, """{"firstName":"d","lastName":"k","age":30}""") Marshal(playInfo).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[PlayerInfo] map { _ shouldBe playInfo } } "NotTypeHints Example (case class contain the other case class)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(NoTypeHints) val name = Player("d", "k") val playInfo = PlayerInfo2(name, 30) val entity = HttpEntity(MediaTypes.`application/json`, """{"name":{"firstName":"d","lastName":"k"},"age":30}""") Marshal(playInfo).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[PlayerInfo2] map { _ shouldBe playInfo } } "ShortTypeHints Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(ShortTypeHints(classOf[Dog] :: classOf[Fish] :: Nil)) val animals = Animals(Dog("pluto") :: Fish(1.2) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"jsonClass":"Dog","name":"pluto"},{"jsonClass":"Fish","weight":1.2}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } "FullTypeHints Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(FullTypeHints(classOf[Dog] :: classOf[Fish] :: Nil)) val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"jsonClass":"org.squbs.marshallers.json.Dog","name":"lucky"},""" + """{"jsonClass":"org.squbs.marshallers.json.Fish","weight":3.4}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } "Custom Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = new Formats { val dateFormat = DefaultFormats.lossless.dateFormat override val typeHints = FullTypeHints(classOf[Fish] :: classOf[Dog] :: Nil) override val typeHintFieldName = "$type$" } val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"$type$":"org.squbs.marshallers.json.Dog","name":"lucky"},""" + """{"$type$":"org.squbs.marshallers.json.Fish","weight":3.4}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } override protected def afterAll(): Unit = system.terminate() } case class Player(firstName: String, lastName: String) case class PlayerInfo(firstName: String, lastName: String, age: Int) case class PlayerInfo2(name: Player, age: Int) trait Animal case class Dog(name: String) extends Animal case class Fish(weight: Double) extends Animal case class Animals(animals: List[Animal])
Example 6
Source File: Json4sNativeSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.marshallers.json import akka.actor.ActorSystem import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.{HttpEntity, MediaTypes, MessageEntity} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.ActorMaterializer import org.json4s._ import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} class Json4sNativeSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import de.heikoseeberger.akkahttpjson4s.Json4sSupport._ implicit val system = ActorSystem("Json4sNativeSpec") implicit val mat = ActorMaterializer() implicit val serialization = native.Serialization "NoTypeHints Example (case class)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(NoTypeHints) val playInfo = PlayerInfo("d", "k", 30) val entity = HttpEntity(MediaTypes.`application/json`, """{"firstName":"d","lastName":"k","age":30}""") Marshal(playInfo).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[PlayerInfo] map { _ shouldBe playInfo } } "NoTypeHints Example (case class contain the other case class)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(NoTypeHints) val name = Player("d", "k") val playInfo = PlayerInfo2(name, 30) val entity = HttpEntity(MediaTypes.`application/json`, """{"name":{"firstName":"d","lastName":"k"},"age":30}""") Marshal(playInfo).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[PlayerInfo2] map { _ shouldBe playInfo } } "ShortTypeHints Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(ShortTypeHints(classOf[Dog] :: classOf[Fish] :: Nil)) val animals = Animals(Dog("pluto") :: Fish(1.2) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"jsonClass":"Dog","name":"pluto"},{"jsonClass":"Fish","weight":1.2}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } "FullTypeHints Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = DefaultFormats.withHints(FullTypeHints(classOf[Dog] :: classOf[Fish] :: Nil)) val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"jsonClass":"org.squbs.marshallers.json.Dog","name":"lucky"},""" + """{"jsonClass":"org.squbs.marshallers.json.Fish","weight":3.4}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } "Custom Example (inheritance)" should "have correct behaviour of read/write" in { implicit val formats = new Formats { val dateFormat: DateFormat = DefaultFormats.lossless.dateFormat override val typeHints = FullTypeHints(classOf[Fish] :: classOf[Dog] :: Nil) override val typeHintFieldName = "$type$" } val animals = Animals(Dog("lucky") :: Fish(3.4) :: Nil) val entity = HttpEntity(MediaTypes.`application/json`, """{"animals":[{"$type$":"org.squbs.marshallers.json.Dog","name":"lucky"},""" + """{"$type$":"org.squbs.marshallers.json.Fish","weight":3.4}]}""") Marshal(animals).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Animals] map { _ shouldBe animals } } }
Example 7
Source File: JacksonMapperSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.marshallers.json import akka.actor.ActorSystem import akka.http.scaladsl.marshalling.Marshal import akka.http.scaladsl.model.{HttpEntity, MediaTypes, MessageEntity} import akka.http.scaladsl.unmarshalling.Unmarshal import akka.stream.ActorMaterializer import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility import com.fasterxml.jackson.annotation.PropertyAccessor import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} import org.squbs.marshallers.json.TestData._ class JacksonMapperSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import JacksonMapperSupport._ implicit val system = ActorSystem("JacksonMapperSpec") implicit val mat = ActorMaterializer() JacksonMapperSupport.setDefaultMapper(new ObjectMapper().registerModule(DefaultScalaModule)) it should "marshal and unmarshal standard case classes" in { val entity = HttpEntity(MediaTypes.`application/json`, fullTeamJson) Marshal(fullTeam).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[Team] map { _ shouldBe fullTeam } } it should "marshal and unmarshal Scala non-case classes" in { val entity = HttpEntity(MediaTypes.`application/json`, fullTeamJson) Marshal(fullTeamNonCaseClass).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[TeamNonCaseClass] map { _ shouldBe fullTeamNonCaseClass } } it should "marshal and unmarshal Scala class with Java Bean members" in { val entity = HttpEntity(MediaTypes.`application/json`, fullTeamJson) Marshal(fullTeamWithBeanMember).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[TeamWithBeanMember] map { _ shouldBe fullTeamWithBeanMember } } it should "marshal and unmarshal Java Bean with case class members" in { val entity = HttpEntity(MediaTypes.`application/json`, fullTeamJson) Marshal(fullTeamWithCaseClassMember).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[TeamBeanWithCaseClassMember] map { _ shouldBe fullTeamWithCaseClassMember } } it should "marshal and unmarshal Java Bean" in { val fieldMapper = new ObjectMapper().setVisibility(PropertyAccessor.FIELD, Visibility.ANY) .registerModule(DefaultScalaModule) JacksonMapperSupport.register[TeamWithPrivateMembers](fieldMapper) val entity = HttpEntity(MediaTypes.`application/json`, fullTeamJson) Marshal(fullTeamWithPrivateMembers).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[TeamWithPrivateMembers] map { _ shouldBe fullTeamWithPrivateMembers } } it should "Marshal and unmarshal Jackson annotated Java subclasses" in { JacksonMapperSupport.register[PageData](new ObjectMapper) val entity = HttpEntity(MediaTypes.`application/json`, pageTestJson) Marshal(pageTest).to[MessageEntity] map { _ shouldBe entity } Unmarshal(entity).to[PageData] map { _ shouldBe pageTest } } }
Example 8
Source File: DeduplicateSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.streams import java.util import akka.actor.ActorSystem import akka.stream.scaladsl._ import akka.stream.ActorMaterializer import org.scalatest.{AsyncFlatSpec, Matchers} class DeduplicateSpec extends AsyncFlatSpec with Matchers{ implicit val system = ActorSystem("DeduplicateSpec") implicit val materializer = ActorMaterializer() it should "require duplicateCount >= 2" in { an [IllegalArgumentException] should be thrownBy Source("a" :: "a" :: "b" :: "b" :: "c" :: "c" :: Nil).via(Deduplicate(1)).runWith(Sink.seq) } it should "filter consecutive duplicates" in { val result = Source("a" :: "a" :: "b" :: "b" :: "c" :: "c" :: Nil).via(Deduplicate(2)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil)} } it should "filter unordered duplicates" in { val result = Source("a" :: "b" :: "b" :: "c" :: "a" :: "c" :: Nil).via(Deduplicate(2)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil)} } it should "filter when identical element count is greater than 2" in { val result = Source("a" :: "a" :: "b" :: "b" :: "b" :: "c" :: "a" :: "c" :: "c" :: Nil). via(Deduplicate(3)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil)} } it should "filter all identical elements when duplicate count is not specified" in { val result = Source("a" :: "b" :: "b" :: "c" :: "a" :: "a" :: "a" :: "c" :: Nil).via(Deduplicate()).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil)} } it should "allow identical elements after duplicate count is reached" in { val result = Source("a" :: "a" :: "b" :: "b" :: "b" :: "c" :: "a" :: "c" :: "c" :: Nil). via(Deduplicate(2)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "b" :: "c" :: "a" :: "c" :: Nil)} } it should "remove an element from registry when duplicate count is reached" in { val map = new util.HashMap[String, MutableLong]() val result = Source("a" :: "a" :: "b" :: "c" :: "c" :: Nil).via(Deduplicate(2, map)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil) map should have size 1 map.get("b") shouldEqual MutableLong(1) } } it should "work with a custom registry" in { val treeMap = new util.TreeMap[String, MutableLong]() val result = Source("a" :: "a" :: "b" :: "b" :: "c" :: "c" :: Nil).via(Deduplicate(2, treeMap)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ("a" :: "b" :: "c" :: Nil) treeMap shouldBe empty } } it should "work with a custom key function" in { val result = Source((1 -> "a") :: (1 -> "sameKey") :: (2 -> "b") :: (2 -> "b") :: (3 -> "c") :: (3 -> "c") :: Nil). via(Deduplicate((element: (Int, String)) => element._1, 2)).runWith(Sink.seq) result map { s => s should contain theSameElementsInOrderAs ((1 -> "a") :: (2 -> "b") :: (3 -> "c") :: Nil) } } }
Example 9
Source File: ClientFlowIdleTimeoutSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.httpclient import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model._ import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source, TcpIdleTimeoutException} import com.typesafe.config.ConfigFactory import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} import org.squbs.resolver.ResolverRegistry import org.squbs.testkit.Timeouts.awaitMax import scala.concurrent.{Await, Promise} import scala.util.{Failure, Success} object ClientFlowIdleTimeoutSpec { val config = ConfigFactory.parseString( """ |akka { | loggers = [ | "akka.event.Logging$DefaultLogger" | ] | | loglevel = "DEBUG" | | http { | server { | idle-timeout = 240 s | request-timeout = 120 s | } | | client.idle-timeout = 1 s | | host-connection-pool.max-retries = 0 | } |} """.stripMargin) implicit val system = ActorSystem("ClientFlowIdleTimeoutSpec", config) implicit val materializer = ActorMaterializer() ResolverRegistry(system).register[HttpEndpoint]("LocalhostEndpointResolver") { (svcName, _) => svcName match { case "slow" => Some(HttpEndpoint(s"http://localhost:$port")) case _ => None }} import akka.http.scaladsl.server.Directives._ import system.dispatcher val route = path("slow") { get { val promise = Promise[String] // Never completing the promise onComplete(promise.future) { case Success(value) => complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "Slow...!")) case Failure(ex) => complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "Slow failed...!")) } } } val serverBinding = Await.result(Http().bindAndHandle(route, "localhost", 0), awaitMax) val port = serverBinding.localAddress.getPort } class ClientFlowIdleTimeoutSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import ClientFlowIdleTimeoutSpec._ override def afterAll: Unit = { serverBinding.unbind() map {_ => system.terminate()} } it should "drop the connection after idle-timeout and resume the stream with new connections" in { val clientFlow = ClientFlow[Int]("slow") val result = Source(1 to 10) .map(HttpRequest(uri = "/slow") -> _) .via(clientFlow) .runWith(Sink.seq) result map { r => val failures = r.map(_._1).filter(_.isFailure).map(_.failed) failures should have size 10 failures.forall(_.get.isInstanceOf[TcpIdleTimeoutException]) shouldBe true r.map(_._2) should contain theSameElementsAs(1 to 10) } } }
Example 10
Source File: ClientFlowHttpsSpec.scala From squbs with Apache License 2.0 | 5 votes |
package org.squbs.httpclient import java.io.InputStream import java.security.{KeyStore, SecureRandom} import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory} import akka.actor.ActorSystem import akka.http.scaladsl.model._ import akka.http.scaladsl.{ConnectionContext, Http} import akka.stream.ActorMaterializer import akka.stream.scaladsl.{Sink, Source} import akka.util.ByteString import com.typesafe.config.ConfigFactory import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} import org.squbs.resolver.ResolverRegistry import org.squbs.testkit.Timeouts._ import scala.concurrent.{Await, Future} import scala.util.{Success, Try} object ClientFlowHttpsSpec { val config = ConfigFactory.parseString( """ |helloHttps { | type = squbs.httpclient | akka.ssl-config.loose.disableHostnameVerification = true |} """.stripMargin) implicit val system = ActorSystem("ClientFlowHttpsSpec", config) implicit val materializer = ActorMaterializer() ResolverRegistry(system).register[HttpEndpoint]("LocalhostHttpsEndpointResolver") { (name, _) => name match { case "helloHttps" => Some(HttpEndpoint(s"https://localhost:$port", Some(sslContext("exampletrust.jks", "changeit")), None)) case _ => None } } import akka.http.scaladsl.server.Directives._ import system.dispatcher val route = path("hello") { get { complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "Hello World!")) } } val serverBinding = Await.result(Http().bindAndHandle(route, "localhost", 0, ConnectionContext.https(sslContext("example.com.jks", "changeit"))), awaitMax) val port = serverBinding.localAddress.getPort } class ClientFlowHttpsSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { import ClientFlowHttpsSpec._ override def afterAll: Unit = { serverBinding.unbind() map {_ => system.terminate()} } it should "make a call to Hello Service" in { val clientFlow = ClientFlow[Int]("helloHttps") val responseFuture: Future[(Try[HttpResponse], Int)] = Source.single(HttpRequest(uri = "/hello") -> 42) .via(clientFlow) .runWith(Sink.head) val (Success(response), _) = Await.result(responseFuture, awaitMax) response.status should be (StatusCodes.OK) val entity = response.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String) entity map { e => e shouldEqual "Hello World!" } } }
Example 11
Source File: JdbcLedgerDaoH2DatabaseSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store.dao import org.scalatest.{AsyncFlatSpec, Matchers} // Aggregate all specs in a single run to not start a new database fixture for each one final class JdbcLedgerDaoH2DatabaseSpec extends AsyncFlatSpec with Matchers with JdbcLedgerDaoSuite with JdbcLedgerDaoBackendH2Database with JdbcLedgerDaoActiveContractsSpec with JdbcLedgerDaoCompletionsSpec with JdbcLedgerDaoConfigurationSpec with JdbcLedgerDaoContractsSpec with JdbcLedgerDaoPackagesSpec with JdbcLedgerDaoPartiesSpec with JdbcLedgerDaoTransactionsSpec with JdbcLedgerDaoTransactionTreesSpec with JdbcLedgerDaoTransactionsWriterSpec
Example 12
Source File: NotificationSenderAsyncTest.scala From teamcity-slack with MIT License | 5 votes |
package com.fpd.teamcity.slack import com.fpd.teamcity.slack.ConfigManager.BuildSettingFlag import com.fpd.teamcity.slack.SlackGateway.{MessageSent, SlackChannel, SlackUser} import com.ullink.slack.simpleslackapi.SlackMessageHandle import com.ullink.slack.simpleslackapi.replies.SlackMessageReply import jetbrains.buildServer.messages.Status import jetbrains.buildServer.users.SUser import org.scalamock.scalatest.AsyncMockFactory import org.scalatest.{AsyncFlatSpec, Matchers} import scala.concurrent.Future import scala.util.Success import NotificationSenderTest._ class NotificationSenderAsyncTest extends AsyncFlatSpec with AsyncMockFactory with Matchers { "NotificationSender.send" should "send message to channel for non-personal build" in { val sent = successfulSent val context = new Context { def settingFlags = Set(BuildSettingFlag.failure) build.getBuildStatus _ when() returns Status.FAILURE build.isPersonal _ when() returns false gateway.sendMessage _ when(SlackChannel(channelName), *) returns Future.successful(sent) val result = sender.send(build, Set(BuildSettingFlag.failure)) } context.result.map(_.head shouldEqual sent) } "NotificationSender.send" should "send private message to build's owner for personal build" in { val sent = successfulSent val context = new Context { def settingFlags = Set(BuildSettingFlag.failure) build.getBuildStatus _ when() returns Status.FAILURE build.isPersonal _ when() returns true val email = "[email protected]" val user = stub[SUser] user.getEmail _ when() returns email build.getOwner _ when() returns user gateway.sendMessage _ when(SlackUser(email), *) returns Future.successful(sent) val result = sender.send(build, Set(BuildSettingFlag.failure)) } context.result.map(_.head shouldEqual sent) } def successfulSent: MessageSent = Success(stub[SlackMessageHandle[SlackMessageReply]]) }
Example 13
Source File: NifiProcessorSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package it.gov.daf.ingestion.nifi import akka.actor.ActorSystem import akka.stream.ActorMaterializer import com.typesafe.config.Config import it.gov.daf.catalogmanager.MetaCatalog import it.gov.daf.catalogmanager.json._ import it.gov.daf.ingestion.metacatalog.MetaCatalogProcessor import org.scalatest.{AsyncFlatSpec, Matchers} import play.api.libs.json._ import play.api.libs.ws.WSResponse import play.api.libs.ws.ahc.AhcWSClient import scala.concurrent.Future import scala.io.Source class NifiProcessorSpec extends AsyncFlatSpec with Matchers { "A Nifi Processor " should "create a nifi pipeline for a correct meta catalog entry" in { val in = this.getClass.getResourceAsStream("/data_test.json") val sMetaCatalog = Source.fromInputStream(in).getLines().mkString(" ") in.close() val parsed = Json.parse(sMetaCatalog) val metaCatalog: JsResult[MetaCatalog] = Json.fromJson[MetaCatalog](parsed) metaCatalog.isSuccess shouldBe true implicit val system: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() implicit val wsClient: AhcWSClient = AhcWSClient() implicit val config: Config = com.typesafe.config.ConfigFactory.load() implicit val ec = system.dispatcher def closeAll(): Unit = { system.terminate() materializer.shutdown() wsClient.close() } val fResult = NifiProcessor(metaCatalog.get).createDataFlow() fResult.map { response => println(response) closeAll() true shouldBe true } } }
Example 14
Source File: FastSyncStateStorageActorSpec.scala From mantis with Apache License 2.0 | 5 votes |
package io.iohk.ethereum.blockchain.sync import akka.actor.ActorSystem import akka.pattern._ import akka.testkit.TestActorRef import akka.util.ByteString import io.iohk.ethereum.NormalPatience import io.iohk.ethereum.blockchain.sync.FastSync.SyncState import io.iohk.ethereum.blockchain.sync.FastSyncStateStorageActor.GetStorage import io.iohk.ethereum.db.dataSource.EphemDataSource import io.iohk.ethereum.db.storage.FastSyncStateStorage import io.iohk.ethereum.domain.BlockHeader import org.scalatest.concurrent.Eventually import org.scalatest.{AsyncFlatSpec, Matchers} class FastSyncStateStorageActorSpec extends AsyncFlatSpec with Matchers with Eventually with NormalPatience { "FastSyncStateActor" should "eventually persist a newest state of a fast sync" in { val dataSource = EphemDataSource() implicit val system = ActorSystem("FastSyncStateActorSpec_System") val syncStateActor = TestActorRef(new FastSyncStateStorageActor) val maxN = 10 val targetBlockHeader = BlockHeader(ByteString(""), ByteString(""), ByteString(""), ByteString(""), ByteString(""), ByteString(""), ByteString(""), 0, 0, 0, 0, 0, ByteString(""), ByteString(""), ByteString("")) syncStateActor ! new FastSyncStateStorage(dataSource) (0 to maxN).foreach(n => syncStateActor ! SyncState(targetBlockHeader).copy(downloadedNodesCount = n)) eventually { (syncStateActor ? GetStorage).mapTo[Option[SyncState]].map { syncState => val expected = SyncState(targetBlockHeader).copy(downloadedNodesCount = maxN) syncState shouldEqual Some(expected) } } } }
Example 15
Source File: AuthorizationTest.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.http import java.nio.file.Files import akka.actor.ActorSystem import akka.stream.Materializer import com.daml.auth.TokenHolder import com.daml.bazeltools.BazelRunfiles.rlocation import com.daml.grpc.adapter.{AkkaExecutionSequencerPool, ExecutionSequencerFactory} import com.daml.http.util.TestUtil.requiredFile import com.daml.ledger.api.auth.{AuthServiceStatic, Claim, ClaimPublic, Claims} import com.daml.ledger.client.LedgerClient import org.scalatest.{AsyncFlatSpec, BeforeAndAfterAll, Matchers} import org.slf4j.LoggerFactory import scala.concurrent.ExecutionContext import scala.util.control.NonFatal final class AuthorizationTest extends AsyncFlatSpec with BeforeAndAfterAll with Matchers { private val dar = requiredFile(rlocation("docs/quickstart-model.dar")) .fold(e => throw new IllegalStateException(e), identity) private val testId: String = this.getClass.getSimpleName implicit val asys: ActorSystem = ActorSystem(testId) implicit val mat: Materializer = Materializer(asys) implicit val aesf: ExecutionSequencerFactory = new AkkaExecutionSequencerPool(testId)(asys) implicit val ec: ExecutionContext = asys.dispatcher private val publicToken = "public" private val emptyToken = "empty" private val mockedAuthService = Option(AuthServiceStatic { case `publicToken` => Claims(Seq[Claim](ClaimPublic)) case `emptyToken` => Claims(Nil) }) private val accessTokenFile = Files.createTempFile("Extractor", "AuthSpec") private val tokenHolder = Option(new TokenHolder(accessTokenFile)) private def setToken(string: String): Unit = { val _ = Files.write(accessTokenFile, string.getBytes()) } override protected def afterAll(): Unit = { super.afterAll() try { Files.delete(accessTokenFile) } catch { case NonFatal(e) => LoggerFactory .getLogger(classOf[AuthorizationTest]) .warn("Unable to delete temporary token file", e) } } protected def withLedger[A] = HttpServiceTestFixture .withLedger[A](List(dar), testId, Option(publicToken), mockedAuthService) _ private def packageService(client: LedgerClient): PackageService = new PackageService(HttpService.loadPackageStoreUpdates(client.packageClient, tokenHolder)) behavior of "PackageService against an authenticated sandbox" it should "fail immediately if the authorization is insufficient" in withLedger { client => setToken(emptyToken) packageService(client).reload.failed.map(_ => succeed) } it should "succeed if the authorization is sufficient" in withLedger { client => setToken(publicToken) packageService(client).reload.map(_ => succeed) } }
Example 16
Source File: JdbcLedgerDaoPackagesSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store.dao import org.scalatest.{AsyncFlatSpec, Matchers} private[dao] trait JdbcLedgerDaoPackagesSpec { this: AsyncFlatSpec with Matchers with JdbcLedgerDaoSuite => behavior of "JdbcLedgerDao (packages)" it should "upload packages in an idempotent fashion, maintaining existing descriptions" in { val firstDescription = "first description" val secondDescription = "second description" val offset1 = nextOffset() val offset2 = nextOffset() for { firstUploadResult <- ledgerDao .storePackageEntry( offset1, packages .map(a => a._1 -> a._2.copy(sourceDescription = Some(firstDescription))) .take(1), None) secondUploadResult <- ledgerDao .storePackageEntry( offset2, packages.map(a => a._1 -> a._2.copy(sourceDescription = Some(secondDescription))), None) loadedPackages <- ledgerDao.listLfPackages } yield { firstUploadResult shouldBe PersistenceResponse.Ok secondUploadResult shouldBe PersistenceResponse.Ok // Note that the order here isn’t fixed. loadedPackages.values.flatMap(_.sourceDescription.toList) should contain theSameElementsAs Seq(firstDescription) ++ Seq.fill(packages.length - 1)(secondDescription) } } }
Example 17
Source File: JdbcLedgerDaoTransactionsWriterSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store.dao import org.scalatest.{AsyncFlatSpec, LoneElement, Matchers} private[dao] trait JdbcLedgerDaoTransactionsWriterSpec extends LoneElement { this: AsyncFlatSpec with Matchers with JdbcLedgerDaoSuite => private val ok = io.grpc.Status.Code.OK.value() behavior of "JdbcLedgerDao (TransationsWriter)" it should "serialize a valid positive lookupByKey" in { val keyValue = s"positive-lookup-by-key" for { from <- ledgerDao.lookupLedgerEnd() (_, create) <- store(txCreateContractWithKey(alice, keyValue)) createdContractId = nonTransient(create).loneElement (_, lookup) <- store(txLookupByKey(alice, keyValue, Some(createdContractId))) to <- ledgerDao.lookupLedgerEnd() completions <- getCompletions(from, to, defaultAppId, Set(alice)) } yield { completions should contain allOf ( create.commandId.get -> ok, lookup.commandId.get -> ok, ) } } it should "serialize a valid fetch" in { val keyValue = s"valid-fetch" for { from <- ledgerDao.lookupLedgerEnd() (_, create) <- store(txCreateContractWithKey(alice, keyValue)) createdContractId = nonTransient(create).loneElement (_, fetch) <- store(txFetch(alice, createdContractId)) to <- ledgerDao.lookupLedgerEnd() completions <- getCompletions(from, to, defaultAppId, Set(alice)) } yield { completions should contain allOf ( create.commandId.get -> ok, fetch.commandId.get -> ok, ) } } }
Example 18
Source File: GroupContiguousSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store.dao.events import akka.stream.scaladsl.{Sink, Source} import com.daml.ledger.api.testing.utils.AkkaBeforeAndAfterAll import org.scalatest.concurrent.ScalaFutures import org.scalatest.prop.PropertyChecks import org.scalatest.{AsyncFlatSpec, Matchers} final class GroupContiguousSpec extends AsyncFlatSpec with Matchers with PropertyChecks with ScalaFutures with AkkaBeforeAndAfterAll { behavior of "groupContiguous" override def spanScaleFactor: Double = 10 // Give some extra slack on CI it should "be equivalent to grouping on inputs with an ordered key" in forAll { pairs: List[(Int, String)] => val sortedPairs = pairs.sortBy(_._1) val grouped = groupContiguous(Source(sortedPairs))(by = _._1) whenReady(grouped.runWith(Sink.seq[Vector[(Int, String)]])) { _ should contain theSameElementsAs pairs.groupBy(_._1).values } } it should "be equivalent to grouping on inputs with a contiguous key" in { val pairsWithContiguousKeys = List(1 -> "baz", 0 -> "foo", 0 -> "bar", 0 -> "quux") val grouped = groupContiguous(Source(pairsWithContiguousKeys))(by = _._1) whenReady(grouped.runWith(Sink.seq[Vector[(Int, String)]])) { _.map(_.toSet) should contain theSameElementsAs pairsWithContiguousKeys .groupBy(_._1) .map(_._2.toSet) } } it should "behave as expected when grouping inputs without a contiguous key" in { val pairs = List(0 -> "foo", 0 -> "bar", 1 -> "baz", 0 -> "quux") val grouped = groupContiguous(Source(pairs))(by = _._1) whenReady(grouped.runWith(Sink.seq[Vector[(Int, String)]])) { _.map(_.toSet) should contain theSameElementsAs Vector( Set(0 -> "foo", 0 -> "bar"), Set(1 -> "baz"), Set(0 -> "quux"), ) } } }
Example 19
Source File: JdbcLedgerDaoPostgresqlSpec.scala From daml with Apache License 2.0 | 5 votes |
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved. // SPDX-License-Identifier: Apache-2.0 package com.daml.platform.store.dao import org.scalatest.{AsyncFlatSpec, Matchers} // Aggregate all specs in a single run to not start a new database fixture for each one final class JdbcLedgerDaoPostgresqlSpec extends AsyncFlatSpec with Matchers with JdbcLedgerDaoSuite with JdbcLedgerDaoBackendPostgresql with JdbcLedgerDaoActiveContractsSpec with JdbcLedgerDaoCompletionsSpec with JdbcLedgerDaoConfigurationSpec with JdbcLedgerDaoContractsSpec with JdbcLedgerDaoPackagesSpec with JdbcLedgerDaoPartiesSpec with JdbcLedgerDaoTransactionsSpec with JdbcLedgerDaoTransactionTreesSpec with JdbcLedgerDaoTransactionsWriterSpec