org.scalatest.matchers.should.Matchers Scala Examples
The following examples show how to use org.scalatest.matchers.should.Matchers.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: PulsarSinkTaskTest.scala From stream-reactor with Apache License 2.0 | 6 votes |
package com.datamountaineer.streamreactor.connect.pulsar.sink import java.util import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarConfigConstants import org.apache.kafka.common.TopicPartition import org.apache.kafka.connect.sink.SinkTaskContext import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class PulsarSinkTaskTest extends AnyWordSpec with Matchers with MockitoSugar { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" "should start a Sink" in { val props = Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000" ).asJava val assignment: util.Set[TopicPartition] = new util.HashSet[TopicPartition] val partition: TopicPartition = new TopicPartition("kafka_topic", 1) //Set topic assignments assignment.add(partition) val context = mock[SinkTaskContext] when(context.assignment()).thenReturn(assignment) when(context.configs()).thenReturn(props) val task = new PulsarSinkTask() task.initialize(context) task.start(props) } }
Example 2
Source File: AsyncHttpClientPipedFs2WebsocketsTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.asynchttpclient.fs2 import cats.effect.concurrent.Ref import cats.effect.IO import cats.implicits._ import fs2._ import sttp.client._ import sttp.client.asynchttpclient.WebSocketHandler import sttp.client.impl.cats.CatsTestBase import sttp.client.impl.fs2.Fs2WebSockets import sttp.client.testing.ToFutureWrapper import sttp.client.ws.WebSocket import sttp.model.ws.WebSocketFrame import sttp.client.testing.HttpTest.wsEndpoint import scala.collection.immutable.Queue import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers class AsyncHttpClientPipedFs2WebsocketsTest extends AsyncFlatSpec with Matchers with ToFutureWrapper with CatsTestBase { implicit val backend: SttpBackend[IO, Nothing, WebSocketHandler] = AsyncHttpClientFs2Backend[IO]().unsafeRunSync() def createHandler: Option[Int] => IO[WebSocketHandler[WebSocket[IO]]] = Fs2WebSocketHandler[IO](_) it should "run a simple echo pipe" in { basicRequest .get(uri"$wsEndpoint/ws/echo") .openWebsocketF(createHandler(None)) .product(Ref.of[IO, Queue[String]](Queue.empty)) .flatMap { case (response, results) => Fs2WebSockets.handleSocketThroughTextPipe(response.result) { in => val receive = in.evalMap(m => results.update(_.enqueue(m))) val send = Stream("Message 1".asRight, "Message 2".asRight, WebSocketFrame.close.asLeft) send merge receive.drain } >> results.get.map(_ should contain theSameElementsInOrderAs List("echo: Message 1", "echo: Message 2")) } .toFuture() } it should "run a simple read-only client" in { basicRequest .get(uri"$wsEndpoint/ws/send_and_wait") .openWebsocketF(createHandler(None)) .product(Ref.of[IO, Queue[String]](Queue.empty)) .flatMap { case (response, results) => Fs2WebSockets.handleSocketThroughTextPipe(response.result) { in => in.evalMap(m => results.update(_.enqueue(m)).flatMap(_ => results.get.map(_.size))).flatMap { case 2 => Stream(None) // terminating the stream case _ => Stream.empty // waiting for more messages }.unNoneTerminate } >> results.get.map(_ should contain theSameElementsInOrderAs List("test10", "test20")) } .toFuture() } }
Example 3
Source File: SttpBackendStubZioTests.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.asynchttpclient.zio import org.scalatest.concurrent.ScalaFutures import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.client._ import sttp.client.testing.SttpBackendStub import sttp.client.impl.zio._ import sttp.client.monad.MonadError import sttp.client.ws.{WebSocket, WebSocketEvent} import sttp.model.Headers import sttp.model.ws._ import zio._ class SttpBackendStubZioTests extends AnyFlatSpec with Matchers with ScalaFutures { "backend stub" should "cycle through responses using a single sent request" in { // given implicit val b: SttpBackendStub[Task, Nothing, NothingT] = SttpBackendStub(new RIOMonadAsyncError[Any]) .whenRequestMatches(_ => true) .thenRespondCyclic("a", "b", "c") // when val r = basicRequest.get(uri"http://example.org/a/b/c").send() // then runtime.unsafeRun(r).body shouldBe Right("a") runtime.unsafeRun(r).body shouldBe Right("b") runtime.unsafeRun(r).body shouldBe Right("c") runtime.unsafeRun(r).body shouldBe Right("a") } it should "return given web socket response" in { val rioMonad: MonadError[zio.Task] = new RIOMonadAsyncError[Any] val frame1 = WebSocketFrame.text("initial frame") val sentFrame = WebSocketFrame.text("sent frame") def webSocket(queue: Queue[WebSocketFrame.Incoming]) = new WebSocket[Task] { override def isOpen: zio.Task[Boolean] = Task.succeed(true) override def monad: MonadError[zio.Task] = rioMonad override def receive: zio.Task[Either[WebSocketEvent.Close, WebSocketFrame.Incoming]] = queue.take.map(Right(_)) override def send(f: WebSocketFrame, isContinuation: Boolean): zio.Task[Unit] = f match { case t: WebSocketFrame.Text => queue.offer(t).unit case _ => Task.unit } } def makeBackend(queue: Queue[WebSocketFrame.Incoming]) = AsyncHttpClientZioBackend.stub .whenRequestMatches(_ => true) .thenRespondWebSocket(Headers(List.empty), webSocket(queue)) val test = for { queue <- Queue.unbounded[WebSocketFrame.Incoming] _ <- queue.offer(frame1) backend = makeBackend(queue) handler <- ZioWebSocketHandler() request = basicRequest.get(uri"http://example.org/a/b/c") ws <- backend.openWebsocket(request, handler).map(_.result) msg1 <- ws.receive _ <- ws.send(sentFrame, false) msg2 <- ws.receive } yield (msg1, msg2) runtime.unsafeRun(test) shouldBe ((Right(frame1), Right(sentFrame))) } }
Example 4
Source File: AkkaHttpWebsocketTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.akkahttp import java.util.concurrent.ConcurrentLinkedQueue import akka.Done import akka.http.scaladsl.model.ws.{Message, TextMessage} import akka.stream.Materializer import akka.stream.scaladsl._ import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.{Eventually, IntegrationPatience} import sttp.client._ import scala.collection.JavaConverters._ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future, Promise} import scala.util.Success import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers import sttp.client.testing.HttpTest.wsEndpoint class AkkaHttpWebsocketTest extends AsyncFlatSpec with Matchers with BeforeAndAfterAll with Eventually with IntegrationPatience { implicit val ec: ExecutionContext = scala.concurrent.ExecutionContext.global implicit val backend: SttpBackend[Future, Nothing, Flow[Message, Message, *]] = AkkaHttpBackend() it should "send and receive ten messages" in { val received = new ConcurrentLinkedQueue[String]() val sink: Sink[Message, Future[Done]] = collectionSink(received) val n = 10 val source: Source[Message, Promise[Option[Message]]] = Source((1 to n).map(i => TextMessage(s"test$i"))).concatMat(Source.maybe[Message])(Keep.right) val flow: Flow[Message, Message, (Future[Done], Promise[Option[Message]])] = Flow.fromSinkAndSourceMat(sink, source)(Keep.both) basicRequest.get(uri"$wsEndpoint/ws/echo").openWebsocket(flow).flatMap { r => eventually { received.asScala.toList shouldBe (1 to n).map(i => s"echo: test$i").toList } r.result._2.complete(Success(None)) // the source should now complete r.result._1.map(_ => succeed) // the future should be completed once the stream completes (and the ws closes) } } it should "receive two messages" in { val received = new ConcurrentLinkedQueue[String]() val sink: Sink[Message, Future[Done]] = collectionSink(received) val source: Source[Message, Promise[Option[Message]]] = Source.maybe[Message] val flow: Flow[Message, Message, Promise[Option[Message]]] = Flow.fromSinkAndSourceMat(sink, source)(Keep.right) basicRequest.get(uri"$wsEndpoint/ws/send_and_wait").openWebsocket(flow).flatMap { r => eventually { received.asScala.toList shouldBe List("test10", "test20") } r.result.success(None) // closing succeed } } it should "error if the endpoint is not a websocket" in { basicRequest.get(uri"$wsEndpoint/echo").openWebsocket(Flow.apply[Message]).failed.map { t => t shouldBe a[NotAWebsocketException] } } def collectionSink(queue: ConcurrentLinkedQueue[String]): Sink[Message, Future[Done]] = Sink .setup[Message, Future[Done]] { (_materializer, _) => Flow[Message] // mapping with parallelism 1 so that messages don't get reordered .mapAsync(1) { case m: TextMessage => implicit val materializer: Materializer = _materializer m.toStrict(1.second).map(Some(_)) case _ => Future.successful(None) } .collect { case Some(TextMessage.Strict(text)) => text } .toMat(Sink.foreach(queue.add))(Keep.right) } .mapMaterializedValue(_.flatMap(identity)) override protected def afterAll(): Unit = { backend.close() super.afterAll() } }
Example 5
Source File: AkkaHttpRouteBackendTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.akkahttp import akka.actor.ActorSystem import akka.http.scaladsl.server.Route import akka.stream.ActorMaterializer import org.scalatest.BeforeAndAfterAll import sttp.client.{NothingT, SttpBackend} import sttp.model.StatusCode import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec class AkkaHttpRouteBackendTest extends AsyncWordSpec with Matchers with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem() implicit val materializer: ActorMaterializer = ActorMaterializer() override protected def afterAll(): Unit = { Await.result(system.terminate(), 5.seconds) } val backend: SttpBackend[Future, Nothing, NothingT] = { AkkaHttpBackend.usingClient(system, http = AkkaHttpClient.stubFromRoute(Routes.route)) } import sttp.client._ "matched route" should { "respond" in { backend.send(basicRequest.get(uri"http://localhost/hello")).map { response => response.code shouldBe StatusCode.Ok response.body.right.get shouldBe "Hello, world!" } } } "unmatched route" should { "respond with 404" in { backend.send(basicRequest.get(uri"http://localhost/not-matching")).map { response => response.code shouldBe StatusCode.NotFound response.body.left.get shouldBe "The requested resource could not be found." } } } } object Routes { import akka.http.scaladsl.server.Directives._ val route: Route = pathPrefix("hello") { complete("Hello, world!") } }
Example 6
Source File: SttpBackendStubAkkaTests.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.akkahttp import akka.actor.ActorSystem import akka.http.scaladsl.model.ws.{Message, TextMessage} import akka.stream.OverflowStrategy import akka.stream.scaladsl.{Flow, Keep, Sink, Source} import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.ScalaFutures import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.client._ import sttp.model.Headers import scala.concurrent.{Await, Future} import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ class SttpBackendStubAkkaTests extends AnyFlatSpec with Matchers with ScalaFutures with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem() override protected def afterAll(): Unit = { Await.result(system.terminate().map(_ => ()), 5.seconds) } "backend stub" should "cycle through responses using a single sent request" in { // given implicit val backend = AkkaHttpBackend.stub .whenRequestMatches(_ => true) .thenRespondCyclic("a", "b", "c") // when def r = basicRequest.get(uri"http://example.org/a/b/c").send().futureValue // then r.body shouldBe Right("a") r.body shouldBe Right("b") r.body shouldBe Right("c") r.body shouldBe Right("a") } it should "use given flow as web socket handler" in { // This test is an example how can we test client flow. // We check behavior of client when connected to echo server. // Client responsibility was to send two messages to the server and collect received messages. val useHandler: Flow[Message, Message, Future[Seq[Message]]] => Future[Seq[Message]] = clientFlow => { val ((outQueue, clientReceivedMessages), inQueue) = Source .queue(1, OverflowStrategy.fail) .viaMat(clientFlow)(Keep.both) .toMat(Sink.queue())(Keep.both) .run() def echoMsg(): Future[Unit] = inQueue.pull().flatMap { case None => echoMsg() case Some(msg) => outQueue.offer(TextMessage(s"echo: " + msg.asTextMessage.getStrictText)).map(_ => ()) } (for { _ <- outQueue.offer(TextMessage("Hi!")) _ <- echoMsg() _ <- echoMsg() _ = outQueue.complete() _ <- outQueue.watchCompletion() } yield ()).flatMap(_ => clientReceivedMessages) } val clientFlow: Flow[Message, Message, Future[Seq[Message]]] = { Flow.fromSinkAndSourceMat( Sink.seq[Message], Source((1 to 2).map(i => TextMessage(s"test$i"))) )(Keep.left) } implicit val b = AkkaHttpBackend.stub .whenRequestMatches(_ => true) .thenHandleOpenWebSocket(Headers(List.empty), useHandler) val receivedMessages = basicRequest .get(uri"wss://echo.websocket.org") .openWebsocket(clientFlow) .flatMap(_.result) .futureValue .toList receivedMessages shouldBe List("Hi!", "echo: test1", "echo: test2").map(TextMessage(_)) } }
Example 7
Source File: OkHttpSyncDigestAuthProxyManualTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.okhttp import org.scalatest.Ignore import sttp.client._ import sttp.client.testing.{ConvertToFuture, ToFutureWrapper} import org.scalatest.freespec.AsyncFreeSpec import org.scalatest.matchers.should.Matchers @Ignore class OkHttpSyncDigestAuthProxyManualTest extends AsyncFreeSpec with Matchers with ToFutureWrapper { implicit val backend: SttpBackend[Identity, Nothing, NothingT] = new DigestAuthenticationBackend[Identity, Nothing, NothingT]( OkHttpSyncBackend(options = SttpBackendOptions.httpProxy("localhost", 3128)) ) implicit val convertToFuture: ConvertToFuture[Identity] = ConvertToFuture.id "complex proxy auth with digest" in { val response = basicRequest .get(uri"http://httpbin.org/digest-auth/auth/andrzej/test/SHA-512") .auth .digest("andrzej", "test") .proxyAuth .digest("kasper", "qweqwe") .send() response.code.code shouldBe 200 } }
Example 8
Source File: IllTypedTests.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.testing.compile import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.tools.reflect.ToolBoxError class IllTypedTests extends AnyFlatSpec with Matchers { "compilation" should "fail when trying to stream using the default backend" in { val thrown = intercept[ToolBoxError] { EvalScala(""" import sttp.client._ class MyStream[T]() implicit val sttpBackend = HttpURLConnectionBackend() basicRequest.get(uri"http://example.com").response(asStream[MyStream[Byte]]).send() """) } thrown.getMessage should include( "could not find implicit value for parameter backend: sttp.client.SttpBackend[F,MyStream[Byte],sttp.client.NothingT]" ) } "compilation" should "fail when trying to send a request without giving an URL" in { val thrown = intercept[ToolBoxError] { EvalScala(""" import sttp.client._ implicit val sttpBackend = HttpURLConnectionBackend() basicRequest.send() """) } thrown.getMessage should include("This is a partial request, the method & url are not specified") } }
Example 9
Source File: CookieRequestTests.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import sttp.model.{StatusCode, _} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class CookieRequestTests extends AnyFlatSpec with Matchers { "request cookies" should "be set from a name-value pair" in { basicRequest .cookie("k", "v") .headers .find(_.name == HeaderNames.Cookie) .map(_.value) should be(Some("k=v")) } it should "be set from multiple name-value pairs" in { basicRequest .cookies("k1" -> "v1", "k2" -> "v2") .headers .find(_.name == HeaderNames.Cookie) .map(_.value) should be(Some("k1=v1; k2=v2")) } it should "add multiple headers if invoked multiple times" in { basicRequest .cookie("k1", "v1") .cookie("k2" -> "v2") .cookies("k3" -> "v3", "k4" -> "v4") .headers .filter(_.name == HeaderNames.Cookie) .map(_.value) .toList should be(List("k1=v1; k2=v2; k3=v3; k4=v4")) } it should "set cookies from a response" in { val response = Response( Right(()), StatusCode.Ok, "", List(Header(HeaderNames.SetCookie, "k1=v1"), Header(HeaderNames.SetCookie, "k2=v2")), Nil ) basicRequest .cookies(response) .headers .find(_.name == HeaderNames.Cookie) .map(_.value) should be(Some("k1=v1; k2=v2")) } }
Example 10
Source File: ToCurlConverterTestExtension.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import java.io.File import org.scalatest.Suite import sttp.client.internal.SttpFile import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers trait ToCurlConverterTestExtension { suit: Suite with AnyFlatSpec with Matchers => it should "render multipart form data if content is a file" in { basicRequest .multipartBody(multipartSttpFile("upload", SttpFile.fromPath(new File("myDataSet").toPath))) .post(uri"http://localhost") .toCurl should include( """--form 'upload=@myDataSet'""" ) } }
Example 11
Source File: LowLevelListenerWebSocketTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.testing.websocket import java.util.concurrent.ConcurrentLinkedQueue import org.scalatest.concurrent.{Eventually, IntegrationPatience} import org.scalatest.{Assertion, BeforeAndAfterAll} import sttp.client._ import sttp.client.monad.MonadError import sttp.client.testing.{ConvertToFuture, ToFutureWrapper} import sttp.client.monad.syntax._ import scala.collection.JavaConverters._ import org.scalatest.SuiteMixin import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers import sttp.client.testing.HttpTest.wsEndpoint // TODO: change to `extends AsyncFlatSpec` when https://github.com/scalatest/scalatest/issues/1802 is fixed trait LowLevelListenerWebSocketTest[F[_], WS, WS_HANDLER[_]] extends SuiteMixin with AsyncFlatSpecLike with Matchers with BeforeAndAfterAll with ToFutureWrapper with Eventually with IntegrationPatience { implicit def backend: SttpBackend[F, Nothing, WS_HANDLER] implicit def convertToFuture: ConvertToFuture[F] private implicit lazy val monad: MonadError[F] = backend.responseMonad def testErrorWhenEndpointIsNotWebsocket: Boolean = true def createHandler(onTextFrame: String => Unit): WS_HANDLER[WS] def sendText(ws: WS, t: String): Unit def sendCloseFrame(ws: WS): Unit it should "send and receive ten messages" in { val n = 10 val received = new ConcurrentLinkedQueue[String]() basicRequest .get(uri"$wsEndpoint/ws/echo") .openWebsocket(createHandler(received.add)) .map { response => (1 to n).foreach { i => val msg = s"test$i" info(s"Sending text message: $msg") sendText(response.result, msg) } eventually { received.asScala.toList shouldBe (1 to n).map(i => s"echo: test$i").toList } sendCloseFrame(response.result) succeed } .toFuture() } it should "receive two messages" in { val received = new ConcurrentLinkedQueue[String]() basicRequest .get(uri"$wsEndpoint/ws/send_and_wait") .openWebsocket(createHandler(received.add)) .map { response => eventually { received.asScala.toList shouldBe List("test10", "test20") } sendCloseFrame(response.result) succeed } .toFuture() } if (testErrorWhenEndpointIsNotWebsocket) { it should "error if the endpoint is not a websocket" in { monad .handleError( basicRequest .get(uri"$wsEndpoint/echo") .openWebsocket(createHandler(_ => ())) .map(_ => fail("An exception should be thrown"): Assertion) ) { case e => (e shouldBe a[SttpClientException.ReadException]).unit } .toFuture() } } override protected def afterAll(): Unit = { backend.close().toFuture() super.afterAll() } }
Example 12
Source File: RequestTests.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import sttp.model.{HeaderNames, StatusCode} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class RequestTests extends AnyFlatSpec with Matchers { "content length" should "be automatically set for a string body" in { basicRequest .body("test") .headers .find(_.name.equalsIgnoreCase(HeaderNames.ContentLength)) .map(_.value) should be(Some("4")) } it should "be automatically set to the number of utf-8 bytes in a string" in { basicRequest .body("ąęć") .headers .find(_.name.equalsIgnoreCase(HeaderNames.ContentLength)) .map(_.value) should be(Some("6")) } it should "not override an already specified content length" in { basicRequest .contentLength(10) .body("a") .headers .find(_.name.equalsIgnoreCase(HeaderNames.ContentLength)) .map(_.value) should be(Some("10")) } "request timeout" should "use default if not overridden" in { basicRequest.options.readTimeout should be(DefaultReadTimeout) } it should "compile multiple subtype response variants" in { val asLeft: ResponseAs[Left[String, String], Nothing] = asStringAlways.map(Left(_)) val asRight: ResponseAs[Right[String, String], Nothing] = asStringAlways.map(Right(_)) def myRequest: Request[Either[String, String], Nothing] = basicRequest .get(uri"https://test.com") .response { fromMetadata { meta => meta.code match { case StatusCode.Ok => asLeft case StatusCode.BadRequest => asRight } } } } }
Example 13
Source File: ToCurlConverterTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import java.io.ByteArrayInputStream import java.nio.charset.StandardCharsets import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ToCurlConverterTest extends AnyFlatSpec with Matchers with ToCurlConverterTestExtension { private val localhost = uri"http://localhost" it should "convert base request" in { basicRequest .get(uri"$localhost") .toCurl shouldBe """curl -L --max-redirs 32 -X GET 'http://localhost'""" } it should "convert request with method to curl" in { basicRequest.get(localhost).toCurl should include("-X GET") basicRequest.post(localhost).toCurl should include("-X POST") basicRequest.put(localhost).toCurl should include("-X PUT") basicRequest.delete(localhost).toCurl should include("-X DELETE") basicRequest.patch(localhost).toCurl should include("-X PATCH") basicRequest.head(localhost).toCurl should include("-X HEAD") basicRequest.options(localhost).toCurl should include("-X OPTIONS") } it should "convert request with header" in { basicRequest.header("User-Agent", "myapp").get(localhost).toCurl should include( """-H 'User-Agent: myapp'""" ) } it should "convert request with body" in { basicRequest.body(Map("name" -> "john", "org" -> "sml")).post(localhost).toCurl should include( """-H 'Content-Type: application/x-www-form-urlencoded' -H 'Content-Length: 17' -F 'name=john&org=sml'""" ) basicRequest.body("name=john").post(localhost).toCurl should include( """-H 'Content-Type: text/plain; charset=utf-8' -H 'Content-Length: 9' --data 'name=john'""" ) basicRequest.body("name=john", StandardCharsets.ISO_8859_1.name()).post(localhost).toCurl should include( """-H 'Content-Type: text/plain; charset=ISO-8859-1' -H 'Content-Length: 9' --data 'name=john'""" ) basicRequest.body("name='john'").post(localhost).toCurl should include( """-H 'Content-Type: text/plain; charset=utf-8' -H 'Content-Length: 11' --data 'name=\'john\''""" ) basicRequest.body("name=\"john\"").post(localhost).toCurl should include( """-H 'Content-Type: text/plain; charset=utf-8' -H 'Content-Length: 11' --data 'name="john"'""" ) } it should "convert request with options" in { basicRequest.followRedirects(false).get(localhost).toCurl should not include "-L" basicRequest.maxRedirects(11).get(localhost).toCurl should include("--max-redirs 11") } it should "put placeholder when sending binary data" in { val testBodyBytes = "this is the body".getBytes("UTF-8") val curl = basicRequest .post(localhost) .body(new ByteArrayInputStream(testBodyBytes)) .toCurl curl should include("--data-binary <PLACEHOLDER>") } it should "render multipart form data if content is a plain string" in { basicRequest.multipartBody(multipart("k1", "v1"), multipart("k2", "v2")).post(localhost).toCurl should include( """--form 'k1=v1' --form 'k2=v2'""" ) } }
Example 14
Source File: SttpBackendOptionsProxyTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class SttpBackendOptionsProxyTest extends AnyFlatSpec with Matchers { "ignoreProxy" should "return true for a exact match with nonProxyHosts" in { val proxySetting = SttpBackendOptions.Proxy( "fakeproxyserverhost", 8080, SttpBackendOptions.ProxyType.Http, List("a.nonproxy.host", "localhost", "127.*") ) proxySetting.ignoreProxy("a.nonproxy.host") should be(true) proxySetting.ignoreProxy("localhost") should be(true) } it should "return true for wildcard suffix match" in { val proxySetting = SttpBackendOptions.Proxy( "fakeproxyserverhost", 8080, SttpBackendOptions.ProxyType.Http, List("localhost", "127.*") ) proxySetting.ignoreProxy("127.0.0.1") should be(true) proxySetting.ignoreProxy("127.1.0.1") should be(true) } it should "return true for wildcard prefix match" in { val proxySetting = SttpBackendOptions.Proxy( "fakeproxyserverhost", 8080, SttpBackendOptions.ProxyType.Http, List("localhost", "*.local", "127.*") ) proxySetting.ignoreProxy("sttp.local") should be(true) proxySetting.ignoreProxy("www.sttp.local") should be(true) } it should "return false for others" in { val proxySetting = SttpBackendOptions.Proxy( "fakeproxyserverhost", 8080, SttpBackendOptions.ProxyType.Http, List("localhost", "*.local", "127.*") ) proxySetting.ignoreProxy("sttp.local.com") should be(false) proxySetting.ignoreProxy("10.127.0.1") should be(false) } }
Example 15
Source File: RetryWhenDefaultTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import java.io.ByteArrayInputStream import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import sttp.client import sttp.model.StatusCode class RetryWhenDefaultTest extends AnyFlatSpec with Matchers { private val simpleRequest = basicRequest.get(uri"http://localhost") it should "not retry 200 response" in { RetryWhen.Default(simpleRequest, Right(Response.ok(""))) shouldBe false } it should "retry 500 response" in { RetryWhen.Default(simpleRequest, Right(Response("", StatusCode.InternalServerError))) shouldBe true } it should "retry connection exceptions" in { RetryWhen.Default(simpleRequest, Left(new client.SttpClientException.ConnectException(null))) shouldBe true } it should "not retry read exceptions" in { RetryWhen.Default(simpleRequest, Left(new client.SttpClientException.ReadException(null))) shouldBe false } it should "not retry input stream bodies" in { RetryWhen.Default( simpleRequest.body(new ByteArrayInputStream(new Array[Byte](8))), Right(Response("", StatusCode.InternalServerError)) ) shouldBe false } }
Example 16
Source File: FollowRedirectsBackendTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class FollowRedirectsBackendTest extends AnyFunSuite with Matchers { val testData = List( ("/x/y/z", true), (" /x2/y/z", true), ("/x?query=10", true), ("/foo%3F?token=xyz&url=http://minio:9000/a/b/c", true), ("http://server.com", false), ("https://server.com", false), (" https://server2.com", false), ("HTTP://server.com", false), ("httpS://server.com", false) ) for ((uri, isRelative) <- testData) { test(s"$uri should ${if (isRelative) "" else "not "}be relative") { FollowRedirectsBackend.isRelative(uri) shouldBe isRelative } } }
Example 17
Source File: WwwAuthHeaderParserTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.internal import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class WwwAuthHeaderParserTest extends AnyFlatSpec with Matchers { it should "parse header with only quoted values" in { WwwAuthHeaderParser .parse( "Digest realm=\"Digest WF Realm\", qop=\"auth\", nonce=\"MTU3MzQ5MTE3MjQ3NzphY2I5NDgxNjdmODdiZGIwMzU1YTk5OTIxNDU1MmY0ZQ==\"" ) .values shouldBe Map( "realm" -> "Digest WF Realm", "qop" -> "auth", "nonce" -> "MTU3MzQ5MTE3MjQ3NzphY2I5NDgxNjdmODdiZGIwMzU1YTk5OTIxNDU1MmY0ZQ==" ) } it should "parse parse header with mixed qouted and unquoted values" in { WwwAuthHeaderParser .parse( "Digest realm=\"[email protected]\", " + "nonce=\"399b4061bd576c9d9a22b698bd3f9367\", " + "qop=\"auth\", " + "opaque=\"47e2037ead3fd3dfe6260991da9e5db7\", " + "algorithm=MD5, " + "stale=FALSE" ) .values shouldBe Map( "realm" -> "[email protected]", "nonce" -> "399b4061bd576c9d9a22b698bd3f9367", "qop" -> "auth", "opaque" -> "47e2037ead3fd3dfe6260991da9e5db7", "algorithm" -> "MD5", "stale" -> "FALSE" ) } it should "parse header without spaces" in { WwwAuthHeaderParser .parse( "Digest realm=\"Digest WF Realm\",qop=\"auth\",nonce=\"MTU3MzQ5MTE3MjQ3NzphY2I5NDgxNjdmODdiZGIwMzU1YTk5OTIxNDU1MmY0ZQ==\"" ) .values shouldBe Map( "realm" -> "Digest WF Realm", "qop" -> "auth", "nonce" -> "MTU3MzQ5MTE3MjQ3NzphY2I5NDgxNjdmODdiZGIwMzU1YTk5OTIxNDU1MmY0ZQ==" ) } }
Example 18
Source File: CancelTest.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.testing import org.scalatest.BeforeAndAfterAll import org.scalatest.freespec.AsyncFreeSpecLike import org.scalatest.matchers.should.Matchers import sttp.client._ import sttp.client.monad.MonadError import sttp.client.testing.HttpTest.endpoint trait CancelTest[F[_], S] extends AsyncFreeSpecLike with Matchers with ToFutureWrapper with BeforeAndAfterAll { implicit def backend: SttpBackend[F, S, NothingT] implicit def convertToFuture: ConvertToFuture[F] def timeoutToNone[T](t: F[T], timeoutMillis: Int): F[Option[T]] "cancel" - { "a request in progress" in { implicit val monad: MonadError[F] = backend.responseMonad import sttp.client.monad.syntax._ val req = basicRequest .get(uri"$endpoint/timeout") .response(asString) val now = monad.eval(System.currentTimeMillis()) convertToFuture.toFuture( now .flatMap { start => timeoutToNone(req.send(), 100) .map { r => (System.currentTimeMillis() - start) should be < 2000L r shouldBe None } } ) } } }
Example 19
Source File: HttpOriginMatcherSpec.scala From akka-http-cors with Apache License 2.0 | 5 votes |
package ch.megard.akka.http.cors.scaladsl.model import akka.http.scaladsl.model.headers.HttpOrigin import org.scalatest.Inspectors import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class HttpOriginMatcherSpec extends AnyWordSpec with Matchers with Inspectors { "The `*` matcher" should { "match any Origin" in { val origins = Seq( "http://localhost", "http://192.168.1.1", "http://test.com", "http://test.com:8080", "https://test.com", "https://test.com:4433" ).map(HttpOrigin.apply) forAll(origins) { o => HttpOriginMatcher.*.matches(o) shouldBe true } } "be printed as `*`" in { HttpOriginMatcher.*.toString shouldBe "*" } } "The strict() method" should { "build a strict matcher, comparing exactly the origins" in { val positives = Seq( "http://localhost", "http://test.com", "https://test.ch:12345", "https://*.test.uk.co" ).map(HttpOrigin.apply) val negatives = Seq( "http://localhost:80", "https://localhost", "http://test.com:8080", "https://test.ch", "https://abc.test.uk.co" ).map(HttpOrigin.apply) val matcher = HttpOriginMatcher.strict(positives: _*) forAll(positives) { o => matcher.matches(o) shouldBe true } forAll(negatives) { o => matcher.matches(o) shouldBe false } } "build a matcher with a toString() method that is a valid range" in { val matcher = HttpOriginMatcher(Seq("http://test.com", "https://test.ch:12345").map(HttpOrigin.apply): _*) matcher.toString shouldBe "http://test.com https://test.ch:12345" } } "The apply() method" should { "build a matcher accepting sub-domains with wildcards" in { val matcher = HttpOriginMatcher( Seq( "http://test.com", "https://test.ch:12345", "https://*.test.uk.co", "http://*.abc.com:8080", "http://*abc.com", // Must start with `*.` "http://abc.*.middle.com" // The wildcard can't be in the middle ).map(HttpOrigin.apply): _* ) val positives = Seq( "http://test.com", "https://test.ch:12345", "https://sub.test.uk.co", "https://sub1.sub2.test.uk.co", "http://sub.abc.com:8080" ).map(HttpOrigin.apply) val negatives = Seq( "http://test.com:8080", "http://sub.test.uk.co", // must compare the scheme "http://sub.abc.com", // must compare the port "http://abc.test.com", // no wildcard "http://sub.abc.com", "http://subabc.com", "http://abc.sub.middle.com", "http://abc.middle.com" ).map(HttpOrigin.apply) forAll(positives) { o => matcher.matches(o) shouldBe true } forAll(negatives) { o => matcher.matches(o) shouldBe false } } "build a matcher with a toString() method that is a valid range" in { val matcher = HttpOriginMatcher(Seq("http://test.com", "https://*.test.ch:12345").map(HttpOrigin.apply): _*) matcher.toString shouldBe "http://test.com https://*.test.ch:12345" } } }
Example 20
Source File: FS2CronTest.scala From fs2-cron with Apache License 2.0 | 5 votes |
package eu.timepit.fs2cron import cats.effect.{ContextShift, IO, Timer} import cron4s.Cron import cron4s.expr.CronExpr import scala.concurrent.ExecutionContext import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class FS2CronTest extends AnyFunSuite with Matchers { implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global) val evenSeconds: CronExpr = Cron.unsafeParse("*/2 * * ? * *") def isEven(i: Int): Boolean = i % 2 == 0 test("awakeEveryCron") { val s1 = awakeEveryCron[IO](evenSeconds) >> evalNow[IO] val s2 = s1.map(_.getSecond).take(2).forall(isEven) s2.compile.last.map(_.getOrElse(false)).unsafeRunSync() } test("sleepCron") { val s1 = sleepCron[IO](evenSeconds) >> evalNow[IO] val s2 = s1.map(_.getSecond).forall(isEven) s2.compile.last.map(_.getOrElse(false)).unsafeRunSync() } test("schedule") { implicit val ctxShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global) val everySecond: CronExpr = Cron.unsafeParse("* * * ? * *") val s1 = schedule(List(everySecond -> evalNow[IO], evenSeconds -> evalNow[IO])).map(_.getSecond) val seconds = s1.take(3).compile.toList.unsafeRunSync() seconds.count(isEven) shouldBe 2 seconds.count(!isEven(_)) shouldBe 1 } }
Example 21
Source File: NumPyTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.numpy import java.io.{ByteArrayOutputStream, OutputStream} import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class NumPyTest extends AnyFlatSpec with Matchers { private def test(f: OutputStream => Unit)(expectedFile: String): Unit = { val actual = { val baos = new ByteArrayOutputStream() f(baos) baos.toByteArray } val expected = { val in = this.getClass.getResourceAsStream(expectedFile) val out = new ByteArrayOutputStream(math.max(32, in.available())) val buf = new Array[Byte](8192) var r = in.read(buf) while (r != -1) { out.write(buf, 0, r) r = in.read(buf) } out.toByteArray } actual shouldBe expected } "NumPy" should "work with 1-dimensional arrays" in { val a1d = (0 until 10).toArray test(NumPy.write(_, a1d))("/a1d-int.npy") test(NumPy.write(_, a1d.map(_.toLong)))("/a1d-long.npy") test(NumPy.write(_, a1d.map(_.toFloat)))("/a1d-float.npy") test(NumPy.write(_, a1d.map(_.toDouble)))("/a1d-double.npy") // scalastyle:off no.whitespace.before.left.bracket the[IllegalArgumentException] thrownBy { test(NumPy.write(_, a1d, Seq(20)))("/a1d-int.npy") } should have message "requirement failed: Invalid shape, 20 != 10" // scalastyle:on no.whitespace.before.left.bracket } it should "work with 2-dimensional arrays" in { val a2d = (for { i <- 0 until 10 j <- 0 until 5 } yield i * 10 + j).toArray test(NumPy.write(_, a2d, Seq(10, 5)))("/a2d-int.npy") test(NumPy.write(_, a2d.map(_.toLong), Seq(10, 5)))("/a2d-long.npy") test(NumPy.write(_, a2d.map(_.toFloat), Seq(10, 5)))("/a2d-float.npy") test(NumPy.write(_, a2d.map(_.toDouble), Seq(10, 5)))("/a2d-double.npy") // scalastyle:off no.whitespace.before.left.bracket the[IllegalArgumentException] thrownBy { test(NumPy.write(_, a2d, Seq(20, 5)))("/a1d-int.npy") } should have message "requirement failed: Invalid shape, 20 * 5 != 50" // scalastyle:on no.whitespace.before.left.bracket } it should "work with iterators" in { val a2d = (0 until 10).map(i => (0 until 5).map(j => i * 10 + j).toArray) test(NumPy.write(_, a2d.iterator, 10, 5))("/a2d-int.npy") test(NumPy.write(_, a2d.iterator.map(_.map(_.toLong)), 10, 5))("/a2d-long.npy") test(NumPy.write(_, a2d.iterator.map(_.map(_.toFloat)), 10, 5))("/a2d-float.npy") test(NumPy.write(_, a2d.iterator.map(_.map(_.toDouble)), 10, 5))("/a2d-double.npy") // scalastyle:off no.whitespace.before.left.bracket the[IllegalArgumentException] thrownBy { test(NumPy.write(_, a2d.iterator, 10, 10))("/a2d-int.npy") } should have message "requirement failed: Invalid row size, expected: 10, actual: 5" the[IllegalArgumentException] thrownBy { test(NumPy.write(_, a2d.iterator, 20, 5))("/a2d-int.npy") } should have message "requirement failed: Invalid number of rows, expected: 20, actual: 10" // hit the header.length % 16 == 0 condition the[IllegalArgumentException] thrownBy { test(NumPy.write(_, a2d.iterator, 1000000000, 50))("/a2d-int.npy") } should have message "requirement failed: Invalid row size, expected: 50, actual: 5" // scalastyle:on no.whitespace.before.left.bracket } }
Example 22
Source File: SparkTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.spark import com.spotify.featran._ import org.apache.spark.SparkContext import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class SparkTest extends AnyFlatSpec with Matchers { import Fixtures._ "FeatureSpec" should "work with Spark" in { val sc = new SparkContext("local[4]", "test") sc.setLogLevel("ERROR") val f = TestSpec.extract(sc.parallelize(TestData)) f.featureNames.collect() shouldBe Array(ExpectedNames) f.featureValues[Seq[Double]].collect() should contain theSameElementsAs ExpectedValues f.featureValues[Map[String, Double]] .collect() should contain theSameElementsAs ExpectedMapValues sc.stop() } it should "work with MultiFeatureSpec" in { noException shouldBe thrownBy { val sc = new SparkContext("local[4]", "test") sc.setLogLevel("ERROR") val f = RecordSpec.extract(sc.parallelize(Records)) f.featureNames.collect() f.featureValues[Seq[Double]].collect() sc.stop() } } }
Example 23
Source File: FlinkTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.flink import com.spotify.featran._ import org.apache.flink.api.scala._ import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class FlinkTest extends AnyFlatSpec with Matchers { import Fixtures._ "Flink" should "work with FeatureSpec" in { val env = ExecutionEnvironment.getExecutionEnvironment val f = TestSpec.extract(env.fromCollection(TestData)) f.featureNames.collect() shouldBe Seq(ExpectedNames) f.featureValues[Seq[Double]].collect() should contain theSameElementsAs ExpectedValues } it should "work with MultiFeatureSpec" in { noException shouldBe thrownBy { val env = ExecutionEnvironment.getExecutionEnvironment val f = RecordSpec.extract(env.fromCollection(Records)) f.featureNames.collect() f.featureValues[Seq[Double]].collect() } } }
Example 24
Source File: ScaldingTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.scalding import com.spotify.featran._ import com.twitter.scalding._ import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ScaldingTest extends AnyFlatSpec with Matchers { import Fixtures._ def materialize[T](p: TypedPipe[T]): Iterable[T] = p.toIterableExecution.waitFor(Config.default, Local(true)).get "FeatureSpec" should "work with Scalding" in { val f = TestSpec.extract(TypedPipe.from(TestData)) materialize(f.featureNames) shouldBe Iterable(ExpectedNames) materialize(f.featureValues[Seq[Double]]) should contain theSameElementsAs ExpectedValues } it should "work with MultiFeatureSpec" in { noException shouldBe thrownBy { val f = RecordSpec.extract(TypedPipe.from(Records)) materialize(f.featureNames) materialize(f.featureValues[Seq[Double]]) } } }
Example 25
Source File: MDLPDiscretizerTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.transformers.mdl import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class MDLPDiscretizerTest extends AnyFlatSpec with Matchers { import com.spotify.featran.transformers.mdl.TestUtility._ "MDLPDiscretizer" should "work with cars data (maxBins = 10)" in { val data = Cars.map(v => (v.origin, v.mpg)) val result = new MDLPDiscretizer(data).discretize(10).sorted val expected = List(Double.NegativeInfinity, 16.1, 21.05, 30.95, Double.PositiveInfinity) result.length shouldBe expected.length result.zip(expected).map { case (r, e) => r shouldEqual e } } it should "work with cars data (maxBins = 2)" in { val data = Cars.map(v => (v.origin, v.mpg)) val result = new MDLPDiscretizer(data).discretize(2).sorted val expected = List(Double.NegativeInfinity, 21.05, Double.PositiveInfinity) result.length shouldBe expected.length result.zip(expected).map { case (r, e) => r shouldEqual e } } it should "work with empty data" in { val data = List.empty[(String, Double)] val result = new MDLPDiscretizer(data).discretize(2).sorted val expected = List(Double.NegativeInfinity, Double.PositiveInfinity) result.length shouldBe expected.length } }
Example 26
Source File: ThresholdFinderTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.transformers.mdl import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ThresholdFinderTest extends AnyFlatSpec with Matchers { "ThresholdFinder" should "work with nLabels = 3 and feature size = 4" in { val finder = new ThresholdFinder(nLabels = 3, stoppingCriterion = 0, maxBins = 100, minBinWeight = 1) val feature = Array( (5.0f, Array(1L, 2L, 3L)), (4.0f, Array(5L, 4L, 20L)), (3.5f, Array(3L, 20L, 12L)), (3.0f, Array(8L, 18L, 2L)) ) val result = finder.findThresholds(feature) result shouldBe Seq(Float.NegativeInfinity, 4.0, Float.PositiveInfinity) } it should "work with duplicates" in { val finder = new ThresholdFinder(nLabels = 3, stoppingCriterion = 0, maxBins = 100, minBinWeight = 1) val best = finder.bestThreshold( List((1.0f, Array.empty, Array.empty, Array.empty)), Some(1.0f), Array.empty ) best shouldBe empty } }
Example 27
Source File: CaseClassConverterTest.scala From featran with Apache License 2.0 | 5 votes |
package com.spotify.featran.converters import com.spotify.featran.transformers.{MDLRecord, WeightedLabel} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers case class TestData( num: Int, str: String, d: Double, l: Long, s: List[String], b: Boolean ) case class TestOpt(num: Option[Int]) case class TestDataOpt(num: Option[Int], d: Option[Double]) case class TestAllNatives( i: Int = 1, s: Short = 1, l: Long = 1L, d: Double = 1.0, io: Option[Int] = Some(1), so: Option[Short] = Some(1), lo: Option[Long] = Some(1L), dopt: Option[Double] = Some(1.0), il: List[Int] = List(1), sl: List[Short] = List(1), ll: List[Long] = List(1L), dl: List[Double] = List(1.0) ) case class TestObjects( str: String = "a", strs: List[String] = List("a"), mdl: MDLRecord[String] = MDLRecord("a", 1.0), we: List[WeightedLabel] = List(WeightedLabel("a", 1.0)) ) class CaseClassConverterTest extends AnyFlatSpec with Matchers { it should "convert a case class to a spec" in { val data = List( TestData(1, "a", 1.0, 1L, List("c"), b = true), TestData(2, "b", 1.0, 1L, List("d"), b = true) ) val spec = CaseClassConverter.toSpec[TestData] val features = spec.extract(data).featureValues[Seq[Double]] assert( features === List( Seq(1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0), Seq(2.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0) ) ) } it should "convert a simple option" in { val data = List( TestOpt(Some(1)), TestOpt(None) ) val spec = CaseClassConverter.toSpec[TestOpt] val features = spec.extract(data).featureValues[Seq[Double]] assert(features === List(Seq(1.0), Seq(0.0))) } it should "convert a case class to a spec with optionals" in { val data = List( TestDataOpt(Some(1), Some(1.0)), TestDataOpt(None, None) ) val spec = CaseClassConverter.toSpec[TestDataOpt] val features = spec.extract(data).featureValues[Seq[Double]] assert(features === List(Seq(1.0, 1.0), Seq(0.0, 0.0))) } it should "test all native types" in { val data = List(TestAllNatives()) val spec = CaseClassConverter.toSpec[TestAllNatives] val features = spec.extract(data).featureValues[Seq[Double]] assert(features === List(0.until(12).toList.map(_ => 1.0))) } it should "test all object types" in { val data = List(TestObjects()) val spec = CaseClassConverter.toSpec[TestObjects] val features = spec.extract(data).featureValues[Seq[Double]] assert(features === List(0.until(4).toList.map(_ => 1.0))) } }
Example 28
Source File: SParallelFluxTest.scala From reactor-scala-extensions with Apache License 2.0 | 5 votes |
package reactor.core.scala.publisher import org.mockito.IdiomaticMockito import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers import reactor.core.publisher.{Flux => JFlux, ParallelFlux => JParallelFlux} import reactor.core.scheduler.Schedulers import reactor.test.StepVerifier class SParallelFluxTest extends AnyFreeSpec with Matchers with IdiomaticMockito { "SParallelFlux" - { val data = Seq(1, 2, 3) val flux = SFlux.just[Int](data: _*) val fluxParallel: SParallelFlux[Int] = flux.parallel() ".asJava should convert as Java ParallelFlux" in { fluxParallel.asJava shouldBe a[JParallelFlux[_]] } ".apply should convert Java ParallelFlux into SParallelFlux" in { SParallelFlux(JFlux.just(1, 2, 3).parallel()).asJava shouldBe a[JParallelFlux[_]] } ".filter should filter elements" in { StepVerifier.create(fluxParallel.filter((i: Int) => i % 2 == 0)) .expectNext(2) .verifyComplete() } ".map should map from T to U" in { val expected = data.map(_.toString) StepVerifier.create(fluxParallel.map(i => i.toString)) .expectNextMatches((i: String) => expected.contains(i)) .expectNextMatches((i: String) => expected.contains(i)) .expectNextMatches((i: String) => expected.contains(i)) .verifyComplete() } ".reduce should aggregate the values" - { "without initial supplier" in { val mono = fluxParallel.reduce(_ + _) StepVerifier.create(mono) .expectNext(6) .verifyComplete() } "with initial value should aggregate the values with initial one" ignore { val parallelFlux = fluxParallel.reduce[String](() => "0", (agg, v) => s"$agg-${v.toString}") StepVerifier.create(parallelFlux) .expectNext("0-1") .expectNext("0-2") .expectNext("0-3") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .expectNext("0") .verifyComplete() } } ".sequential should merge the rails" in { val expected = data.map(_.toString) StepVerifier.create(fluxParallel.map(i => i.toString).sequential()) .expectNextMatches((i: String) => expected.contains(i)) .expectNextMatches((i: String) => expected.contains(i)) .expectNextMatches((i: String) => expected.contains(i)) .verifyComplete() } ".runOn should run on different thread" in { val scheduler = spy(Schedulers.parallel()) StepVerifier.create(flux.parallel(2).runOn(scheduler)) .expectNextMatches((i: Int) => data.contains(i)) .expectNextMatches((i: Int) => data.contains(i)) .expectNextMatches((i: Int) => data.contains(i)) .verifyComplete() scheduler.createWorker() wasCalled twice } } }
Example 29
Source File: CovariantTest.scala From reactor-scala-extensions with Apache License 2.0 | 5 votes |
package reactor.core.scala.publisher import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers import reactor.core.scala.publisher.model.{ComponentExt, ContainerExt} class CovariantTest extends AnyFreeSpec with Matchers { "SFlux and SMono" - { "covariance should be proven with the following compiled" in { new ContainerExt { override def component: ComponentExt = ??? override def components: List[ComponentExt] = ??? override def componentsFlux: SFlux[ComponentExt] = ??? override def componentMono: SMono[ComponentExt] = ??? } shouldBe a[ContainerExt] } } }
Example 30
Source File: ExecutionContextSchedulerTest.scala From reactor-scala-extensions with Apache License 2.0 | 5 votes |
package reactor.core.scala.scheduler import java.util.concurrent.{Executors, ThreadFactory} import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers import reactor.core.scala.publisher.SMono import reactor.test.StepVerifier import scala.concurrent.ExecutionContext class ExecutionContextSchedulerTest extends AnyFreeSpec with Matchers { "ExecutionContextScheduler" - { "should create a Scheduler using provided ExecutionContext" - { "on SMono" in { val executionContext = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(1, new ThreadFactory { override def newThread(r: Runnable): Thread = new Thread(r, "THREAD-NAME-SMONO") })) val mono = SMono.just(1) .subscribeOn(ExecutionContextScheduler(executionContext)) .doOnNext(i => Thread.currentThread().getName shouldBe "THREAD-NAME-SMONO") StepVerifier.create(mono) .expectNext(1) .verifyComplete() } } } }
Example 31
Source File: InformationSpec.scala From perfolation with MIT License | 5 votes |
package spec import org.scalatest.matchers.should.Matchers import perfolation.unit._ import org.scalatest.wordspec.AnyWordSpec class InformationSpec extends AnyWordSpec with Matchers { "Information" when { "using binary" should { "validate bytes" in { Information.useBinary() 5.b.bytes should be(5L) } "validate kilobytes" in { 5.kb.bytes should be(5120L) } "validate megabytes" in { 5.mb.bytes should be(5242880L) } "validate gigabytes" in { 5.gb.bytes should be(5368709120L) } "validate terabytes" in { 5.tb.bytes should be(5497558138880L) } "validate petabytes" in { 5.pb.bytes should be(5629499534213120L) } "validate exabytes" in { 5.eb.bytes should be(5764607523034234880L) } "validate zettabytes" in { 5.zb.bytes should be(BigInt("5902958103587056517120")) } "validate yottabytes" in { 5.yb.bytes should be(BigInt("6044629098073145873530880")) } "format kilobytes" in { 5.kb.toString should be("5.00 KiB") } } "using decimal" should { "validate bytes" in { Information.useDecimal() 5.b.bytes should be(5L) } "validate kilobytes" in { 5.kb.bytes should be(5000L) } "validate megabytes" in { 5.mb.bytes should be(5000000L) } "validate gigabytes" in { 5.gb.bytes should be(5000000000L) } "validate terabytes" in { 5.tb.bytes should be(5000000000000L) } "validate petabytes" in { 5.pb.bytes should be(5000000000000000L) } "validate exabytes" in { 5.eb.bytes should be(5000000000000000000L) } "validate zettabytes" in { 5.zb.bytes should be(BigInt("5000000000000000000000")) } "validate yottabytes" in { 5.yb.bytes should be(BigInt("5000000000000000000000000")) } "format kilobytes" in { 5.kb.toString should be("5.00 kb") } } } }
Example 32
Source File: DateFormatSpec.scala From perfolation with MIT License | 5 votes |
package tests import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class DateFormatSpec extends AnyWordSpec with Matchers { "Date Formatting" should { import perfolation._ val date1: Long = 1524606965775L val hour = (21 - date1.t.timeZoneOffsetHH) % 24 "retrieve millisecond info" in { date1.t.milliseconds should be(date1) date1.t.milliOfSecond should be(775) date1.t.Q should be(date1.toString) date1.t.L should be("775") } "retrieve seconds info" in { date1.t.secondOfMinute should be(5) date1.t.secondsOfEpoch should be(date1 / 1000L) date1.t.S should be("05") date1.t.s.toInt should be(date1 / 1000L) } "retrieve minutes info" in { date1.t.minuteOfHour should be(56) date1.t.M should be("56") } "retrieve hours info" in { date1.t.hour24 should be(hour) date1.t.hour12 should be(hour % 12) date1.t.isAM should be(false) date1.t.isPM should be(true) date1.t.H should be(hour.toString) date1.t.I should be((hour % 12).f(2)) date1.t.k should be(hour.toString) date1.t.l should be((hour % 12).toString) date1.t.p should be("pm") date1.t.P should be("PM") } "retrieve days info" in { date1.t.dayOfWeek should be(3) date1.t.dayOfMonth should be(24) date1.t.dayOfYear should be(113) date1.t.A should be("Tuesday") date1.t.a should be("Tues") date1.t.j should be("114") date1.t.d should be("24") date1.t.e should be("24") } "retrieve week info" in { } "retrieve month info" in { date1.t.month should be(3) date1.t.B should be("April") date1.t.b should be("Apr") date1.t.h should be("Apr") date1.t.m should be("04") } "retrieve years info" in { date1.t.year should be(2018) date1.t.C should be("20") date1.t.Y should be("2018") date1.t.y should be("18") } } }
Example 33
Source File: NumberFormatSpec.scala From perfolation with MIT License | 5 votes |
package tests import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import perfolation.numeric.{FastNumber, Grouping, RoundingMode} class NumberFormatSpec extends AnyWordSpec with Matchers { "Number Formatting" should { import perfolation._ "use a FastNumber properly" in { val fn = new FastNumber fn.set(12.345) fn.toString should be("12.345") fn.setMinimumIntegerDigits(3) fn.toString should be("012.345") fn.set(1234.567) fn.setMaximumIntegerDigits(2) fn.toString should be("34.567") fn.setMinimumFractionDigits(5) fn.toString should be("34.56700") fn.setMaximumFractionDigits(2, RoundingMode.HalfUp) fn.toString should be("34.57") fn.set(123456789.0) fn.setMaximumFractionDigits(0, RoundingMode.HalfUp) fn.toString should be("123456789") fn.group(Grouping.US) fn.toString should be("123,456,789") } "format an integer to two digits" in { 4.f(2) should be("04") 40.f(2) should be("40") 400.f(2) should be("400") } "format an integer to two fraction digits" in { 4.f(f = 2) should be("4.00") 40.f(f = 2) should be("40.00") 400.f(f = 2) should be("400.00") } "format a double to two digits" in { 4.0.f(2) should be("04.00") 40.0.f(2) should be("40.00") 400.0.f(2) should be("400.00") 4.1.f(2) should be("04.10") 40.1.f(2) should be("40.10") 400.1.f(2) should be("400.10") 4.12.f(2) should be("04.12") 40.12.f(2) should be("40.12") 400.12.f(2) should be("400.12") 4.123.f(2) should be("04.12") 40.123.f(2) should be("40.12") 400.123.f(2) should be("400.12") 4.126.f(2) should be("04.13") } "format a BigDecimal to 0 digits" in { BigDecimal(6481415348.78).f(f = 0) should be("6481415349") BigDecimal(9999999999.99).f(f = 0) should be("10000000000") } "format a negative number properly" in { (-100.0).f(2) should be("-100.00") (-0.5).f(2) should be("-0.50") (-0.5).f(2, g = Grouping.US) should be("-0.50") (-444).f(g = Grouping.US) should be("-444") } } }
Example 34
Source File: StarWarsMutationSpec.scala From sangria-relay with Apache License 2.0 | 5 votes |
package sangria.relay.starWars import sangria.execution.Executor import sangria.parser.QueryParser import sangria.relay.starWars.StarWarsData.ShipRepo import sangria.relay.util.AwaitSupport import sangria.marshalling.InputUnmarshaller.mapVars import scala.concurrent.ExecutionContext.Implicits.global import scala.util.Success import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class StarWarsMutationSpec extends AnyWordSpec with Matchers with AwaitSupport { "Mutation" should { "Correctly mutates the data set" in { val Success(doc) = QueryParser.parse( """ mutation AddBWingQuery($input: IntroduceShipInput!) { introduceShip(input: $input) { ship { id name } faction { name } clientMutationId } } """) val vars = mapVars( "input" -> Map( "shipName" -> "B-Wing", "factionId" -> "RmFjdGlvbjox", "clientMutationId" -> "abcde" ) ) Executor.execute(StarWarsSchema.schema, doc, variables = vars, userContext = new ShipRepo).await should be( Map( "data" -> Map( "introduceShip" -> Map( "ship" -> Map( "id" -> "U2hpcDo5", "name" -> "B-Wing" ), "faction" -> Map( "name" -> "Alliance to Restore the Republic" ), "clientMutationId" -> "abcde" )))) } } }
Example 35
Source File: Base64Spec.scala From sangria-relay with Apache License 2.0 | 5 votes |
package sangria.relay.util import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class Base64Spec extends AnyWordSpec with Matchers { val TestText = "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum." val TestBase64 = "TG9yZW0gSXBzdW0gaXMgc2ltcGx5IGR1bW15IHRleHQgb2YgdGhlIHByaW50aW5nIGFuZCB0eXBlc2V0dGluZyBpbmR1c3RyeS4gTG9yZW0gSXBzdW0gaGFzIGJlZW4gdGhlIGluZHVzdHJ5J3Mgc3RhbmRhcmQgZHVtbXkgdGV4dCBldmVyIHNpbmNlIHRoZSAxNTAwcywgd2hlbiBhbiB1bmtub3duIHByaW50ZXIgdG9vayBhIGdhbGxleSBvZiB0eXBlIGFuZCBzY3JhbWJsZWQgaXQgdG8gbWFrZSBhIHR5cGUgc3BlY2ltZW4gYm9vay4gSXQgaGFzIHN1cnZpdmVkIG5vdCBvbmx5IGZpdmUgY2VudHVyaWVzLCBidXQgYWxzbyB0aGUgbGVhcCBpbnRvIGVsZWN0cm9uaWMgdHlwZXNldHRpbmcsIHJlbWFpbmluZyBlc3NlbnRpYWxseSB1bmNoYW5nZWQuIEl0IHdhcyBwb3B1bGFyaXNlZCBpbiB0aGUgMTk2MHMgd2l0aCB0aGUgcmVsZWFzZSBvZiBMZXRyYXNldCBzaGVldHMgY29udGFpbmluZyBMb3JlbSBJcHN1bSBwYXNzYWdlcywgYW5kIG1vcmUgcmVjZW50bHkgd2l0aCBkZXNrdG9wIHB1Ymxpc2hpbmcgc29mdHdhcmUgbGlrZSBBbGR1cyBQYWdlTWFrZXIgaW5jbHVkaW5nIHZlcnNpb25zIG9mIExvcmVtIElwc3VtLg==" val TestUtf8Text = "Lorem Ipsum ist ein einfacher Demo-Text für die Print- und Schriftindustrie. Lorem Ipsum ist in der Industrie bereits der Standard Demo-Text seit 1500, als ein unbekannter Schriftsteller eine Hand voll Wörter nahm und diese durcheinander warf um ein Musterbuch zu erstellen. Es hat nicht nur 5 Jahrhunderte überlebt, sondern auch in Spruch in die elektronische Schriftbearbeitung geschafft (bemerke, nahezu unverändert). Bekannt wurde es 1960, mit dem erscheinen von \"Letraset\", welches Passagen von Lorem Ipsum enhielt, so wie Desktop Software wie \"Aldus PageMaker\" - ebenfalls mit Lorem Ipsum." val TestUtf8Base64 = "TG9yZW0gSXBzdW0gaXN0IGVpbiBlaW5mYWNoZXIgRGVtby1UZXh0IGbDvHIgZGllIFByaW50LSB1bmQgU2NocmlmdGluZHVzdHJpZS4gTG9yZW0gSXBzdW0gaXN0IGluIGRlciBJbmR1c3RyaWUgYmVyZWl0cyBkZXIgU3RhbmRhcmQgRGVtby1UZXh0IHNlaXQgMTUwMCwgYWxzIGVpbiB1bmJla2FubnRlciBTY2hyaWZ0c3RlbGxlciBlaW5lIEhhbmQgdm9sbCBXw7ZydGVyIG5haG0gdW5kIGRpZXNlIGR1cmNoZWluYW5kZXIgd2FyZiB1bSBlaW4gTXVzdGVyYnVjaCB6dSBlcnN0ZWxsZW4uIEVzIGhhdCBuaWNodCBudXIgNSBKYWhyaHVuZGVydGUgw7xiZXJsZWJ0LCBzb25kZXJuIGF1Y2ggaW4gU3BydWNoIGluIGRpZSBlbGVrdHJvbmlzY2hlIFNjaHJpZnRiZWFyYmVpdHVuZyBnZXNjaGFmZnQgKGJlbWVya2UsIG5haGV6dSB1bnZlcsOkbmRlcnQpLiBCZWthbm50IHd1cmRlIGVzIDE5NjAsIG1pdCBkZW0gZXJzY2hlaW5lbiB2b24gIkxldHJhc2V0Iiwgd2VsY2hlcyBQYXNzYWdlbiB2b24gTG9yZW0gSXBzdW0gZW5oaWVsdCwgc28gd2llIERlc2t0b3AgU29mdHdhcmUgd2llICJBbGR1cyBQYWdlTWFrZXIiIC0gZWJlbmZhbGxzIG1pdCBMb3JlbSBJcHN1bS4=" "Base64" should { "encode string" in { Base64.encode(TestText) should be (TestBase64) } "encode bytes" in { Base64.encode(TestUtf8Text.getBytes("UTF-8")) should be (TestUtf8Base64) } "encode UTF-8 string" in { Base64.encode(TestUtf8Text) should be (TestUtf8Base64) } "decode base64 string" in { Base64.decode(TestBase64) should be (Some(TestText)) } "decode UTF-8 base64 string" in { Base64.decode(TestBase64) should be (Some(TestText)) } "return an empty string for an empty string" in { Base64.decode("") should be (Some("")) } "return None for base64 strings with to little valid bits" in { Base64.decode("a3222==") should be (None) } "return None for base64 strings with invalid characters" in { Base64.decode("foobär23") should be (None) } "return None for base64 strings with wrong 4-byte ending unit" in { Base64.decode("TQ=") should be (None) } } }
Example 36
Source File: Test.scala From scalingua with Apache License 2.0 | 5 votes |
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{LanguageId, Messages, Language} import ru.makkarpov.scalingua.I18n._ class Test extends AnyFlatSpec with Matchers { implicit val messages = Messages.compiled("some.test.pkg") it should "provide correct messages for en_US" in { implicit val languageId = LanguageId("en-US") t"Test" shouldBe "Test" tag("test.key") shouldBe "test message" ptag("test.plural.key", 1) shouldBe "singular form" ptag("test.plural.key", 2) shouldBe "plural forms" } it should "provide correct messages for ru_RU" in { implicit val languageId = LanguageId("ru-RU") t"Test" shouldBe "Тест" tag("test.key") shouldBe "Тестовое сообщение" ptag("test.plural.key", 1) shouldBe "Единственное число" ptag("test.plural.key", 2) shouldBe "Почти единственное число" ptag("test.plural.key", 5) shouldBe "Множественное число" } it should "provide correct messages for lazy strings" in { val l = lptag("test.plural.key", 2) { implicit val id = LanguageId("ru-RU") l.resolve shouldBe "Почти единственное число" } { implicit val id = LanguageId("en-US") l.resolve shouldBe "plural forms" } { implicit val id = LanguageId("xx-YY") l.resolve shouldBe "plural forms" } } }
Example 37
Source File: Test.scala From scalingua with Apache License 2.0 | 5 votes |
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{LanguageId, Messages, Language} import ru.makkarpov.scalingua.I18n._ class Test extends AnyFlatSpec with Matchers { implicit val messages = Messages.compiled("some.test.pkg") it should "provide correct messages for en_US" in { implicit val langId = LanguageId("en-US") t("Hello, world!") shouldBe "Hello, world!" p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "There is 7 dogs!" } it should "provide correct messages for ru_RU" in { implicit val langId = LanguageId("ru-RU") t("Hello, world!") shouldBe "Привет, мир!" p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!" } it should "provide english messages for absent languages" in { implicit val langId = LanguageId("xx-QQ") t("Hello, world!") shouldBe "Hello, world!" p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "There is 7 dogs!" } it should "provide correct messages for other countries (ru_XX)" in { implicit val langId = LanguageId("ru-XX") t("Hello, world!") shouldBe "Привет, мир!" p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!" } it should "provide correct messages for generic languages (ru)" in { implicit val langId = LanguageId("ru") t("Hello, world!") shouldBe "Привет, мир!" p("There is %(n) dog!", "There is %(n) dogs!", 7) shouldBe "Здесь 7 собак!" } it should "handle percent signs" in { implicit val langId = LanguageId("ru-RU") t"Percents! %" shouldBe "Проценты! %" t("Percents!! %%") shouldBe "Проценты!! %" val x = 1 t"Percents with variables%: $x, percents%" shouldBe s"Проценты с перменными%: $x, проценты%" t"Percents after variable: $x%%" shouldBe s"Проценты после переменной: $x%" // Plural: p"Look, I have $x percent${S.s}: %!" shouldBe s"Смотри, у меня $x процент: %!" } it should "reject invalid percent signs" in { """ val x = 123 t"Test: $x% qweqwe" """ shouldNot typeCheck } it should "escape unicode literals" in { implicit val langId = LanguageId("en-US") t"Привет, мир!" shouldBe "Привет, мир!" t"Weird’quotes" shouldBe "Weird’quotes" } }
Example 38
Source File: Test.scala From scalingua with Apache License 2.0 | 5 votes |
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{LanguageId, Messages, Language} import ru.makkarpov.scalingua.I18n._ class Test extends AnyFlatSpec with Matchers { implicit val messages = Messages.compiled("ru.makkarpov") implicit val languageId = LanguageId("ru-RU") it should "correctly translate strings" in { t"Hello, world!" shouldBe "Привет, мир!" tc("something", "Hello, world!") shouldBe "Привет, мир в контексте something!" } it should "include contextual strings in Messages" in { val lang = messages(languageId) lang.singular("Hello, world!") shouldBe "Hello, world!" lang.singular("ru.makkarpov", "Hello, world!") shouldBe "Привет, мир!" lang.singular("something", "Hello, world!") shouldBe "Hello, world!" lang.singular("ru.makkarpov:something", "Hello, world!") shouldBe "Привет, мир в контексте something!" } it should "reference contextual strings" in { implicit val mockLang = new Language { override def singular(msgctx: String, msgid: String): String = msgctx + "/" + msgid override def plural(msgid: String, msgidPlural: String, n: Long): String = fail override def plural(msgctx: String, msgid: String, msgidPlural: String, n: Long): String = fail override def singular(msgid: String): String = fail override def taggedSingular(tag: String): String = fail override def taggedPlural(tag: String, n: Long): String = fail override def merge(other: Language): Language = fail def fail = throw new IllegalArgumentException("Called an unexpected method") override def id: LanguageId = LanguageId("xx-XX") } t"Hello, world!" shouldBe "ru.makkarpov/Hello, world!" tc("context", "Hello, world!") shouldBe "ru.makkarpov:context/Hello, world!" } }
Example 39
Source File: Test.scala From scalingua with Apache License 2.0 | 5 votes |
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{LanguageId, Messages, Language} import ru.makkarpov.scalingua.I18n._ // test whether Scalingua is able to compile messages for multiple languages at once: class Test extends AnyFlatSpec with Matchers { implicit val messages = Messages.compiled() it should "provide messages for English" in { implicit val lang = LanguageId("en-US") t"Good evening!" shouldBe "Good evening!" } it should "provide messages for German" in { implicit val lang = LanguageId("de-DE") t"Good evening!" shouldBe "Guten Abend!" } it should "provide messages for Russian" in { implicit val lang = LanguageId("ru-RU") t"Good evening!" shouldBe "Добрый вечер!" } it should "provide messages for Chinese" in { implicit val lang = LanguageId("zh-ZH") t"Good evening!" shouldBe "晚上好!" } }
Example 40
Source File: Test.scala From scalingua with Apache License 2.0 | 5 votes |
import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{LanguageId, Messages, Language} import ru.makkarpov.scalingua.I18n._ class Test extends AnyFlatSpec with Matchers { implicit val languageId = LanguageId("ru-RU") val subAMessages = Messages.compiled("subA") val subBMessages = Messages.compiled("subB") it should "translate SubA separately" in { implicit val messages = subAMessages SubA.testA shouldBe "Тест первый" SubA.testB shouldBe "Тест второй" SubB.testA shouldBe "Test A" SubB.testB shouldBe "Test B" } it should "translate SubB separately" in { implicit val messages = subBMessages SubA.testA shouldBe "Test A" SubA.testB shouldBe "Test B" SubB.testA shouldBe "Первый тест" SubB.testB shouldBe "Второй тест" } it should "translate merged messages" in { implicit val messages = subAMessages.merge(subBMessages) SubA.testA shouldBe "Тест первый" SubA.testB shouldBe "Тест второй" SubB.testA shouldBe "Первый тест" SubB.testB shouldBe "Второй тест" } }
Example 41
Source File: PlayTest.scala From scalingua with Apache License 2.0 | 5 votes |
package ru.makkarpov.scalingua.play.test import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{Language, Messages, TaggedLanguage} import ru.makkarpov.scalingua.play.I18n._ class PlayTest extends AnyFlatSpec with Matchers { it should "handle HTML translations" in { implicit val lang = Language.English val x = "\"List<String>\"" h"A class <code>$x</code> can be used to provide simple list container".body shouldBe "A class <code>"List<String>"</code> can be used to provide simple list container" } it should "handle 'Accept' header" in { implicit val messages = new Messages( TaggedLanguage.Identity, MockEnglishLang("aa-AA"), MockEnglishLang("aa-AX"), MockEnglishLang("bb-BB"), MockEnglishLang("cc-CC") ) import ru.makkarpov.scalingua.play.PlayUtils.{languageFromAccept => f} f("") shouldBe Language.English f("cc").id.toString shouldBe "cc-CC" f("bb").id.toString shouldBe "bb-BB" f("aa").id.toString shouldBe "aa-AA" f("aa-RR").id.toString shouldBe "aa-AA" f("aa-AX").id.toString shouldBe "aa-AX" f("bb, cc").id.toString shouldBe "bb-BB" f("cc, bb").id.toString shouldBe "cc-CC" f("xx, yy, zz") shouldBe Language.English f("tt, tt, cc, tt, tt").id.toString shouldBe "cc-CC" f("bb, cc; q=0.8").id.toString shouldBe "bb-BB" f("cc; q=0.8, bb").id.toString shouldBe "bb-BB" f("aa; q=0.2; bb; q=0.4, cc; q=0.8").id.toString shouldBe "cc-CC" f("aa; q=0.8, bb; q=0.4, cc; q=0.2").id.toString shouldBe "aa-AA" // No exceptions should be thrown on incorrect inputs; English should be returned instead: f("111111111") shouldBe Language.English f("aa-AA-ww") shouldBe Language.English f("aa-AX; q=W") shouldBe Language.English } }
Example 42
Source File: IStringTest.scala From scalingua with Apache License 2.0 | 5 votes |
package ru.makkarpov.scalingua.test import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.I18n._ class IStringTest extends AnyFlatSpec with Matchers { val mockLang1 = new MockLang("1") val mockLang2 = new MockLang("2") val mockLang3 = new MockLang("3") it should "handle internationalized strings when surrounding implicit lang is not present" in { val t = lt"Hello, world!" t.resolve(mockLang1) shouldBe "{s1:Hello, world!}" t.resolve(mockLang2) shouldBe "{s2:Hello, world!}" } it should "handle internationalized strings when implicit lang is present" in { implicit val lang = mockLang3 val t = lt"12345" t.resolve(mockLang1) shouldBe "{s1:12345}" t.resolve(mockLang2) shouldBe "{s2:12345}" t.resolve shouldBe "{s3:12345}" } }
Example 43
Source File: CustomI18nTest.scala From scalingua with Apache License 2.0 | 5 votes |
package ru.makkarpov.scalingua.test import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import ru.makkarpov.scalingua.{I18n, Language, Macros, OutputFormat} import scala.language.experimental.macros class CustomI18nTest extends AnyFlatSpec with Matchers { case class CStr(s: String) implicit val CStrFormat = new OutputFormat[CStr] { override def convert(s: String): CStr = CStr(s"C{$s}") override def escape(s: String): String = s"[$s]" } object CustomI18n extends I18n { def ct(msg: String, args: (String, Any)*)(implicit lang: Language, outputFormat: OutputFormat[CStr]): CStr = macro Macros.singular[CStr] } implicit val mockLang = new MockLang("") import CustomI18n._ it should "handle custom I18n classes via traits" in { t"Hello, world!" shouldBe "{s:Hello, world!}" } it should "handle custom methods in I18n classes" in { ct("Hello, world!").s shouldBe "C{{s:Hello, world!}}" ct("Hello, %(what)!", "what" -> "world").s shouldBe "C{{s:Hello, %(what)[[world]]!}}" """ ct("Hello, %(x)!", "y" -> 1) """ shouldNot compile } }
Example 44
Source File: CoursierDependencyResolutionTests.scala From sbt-coursier with Apache License 2.0 | 5 votes |
package lmcoursier import org.scalatest.matchers.should.Matchers import org.scalatest.propspec.AnyPropSpec import sbt.librarymanagement.ModuleID import sbt.librarymanagement.UpdateConfiguration import sbt.librarymanagement.UnresolvedWarningConfiguration import sbt.util.Logger import sbt.librarymanagement.ModuleInfo import sbt.librarymanagement.ModuleDescriptorConfiguration import sbt.librarymanagement.Configuration class CoursierDependencyResolutionTests extends AnyPropSpec with Matchers { property("missingOk from passed UpdateConfiguration") { val depRes = CoursierDependencyResolution(CoursierConfiguration().withAutoScalaLibrary(false)) val desc = ModuleDescriptorConfiguration(ModuleID("test", "foo", "1.0"), ModuleInfo("foo")) .withDependencies(Vector( ModuleID("io.get-coursier", "coursier_2.13", "0.1.53").withConfigurations(Some("compile")), ModuleID("org.scala-lang", "scala-library", "2.12.11").withConfigurations(Some("compile")) )) .withConfigurations(Vector(Configuration.of("Compile", "compile"))) val module = depRes.moduleDescriptor(desc) val logger: Logger = new Logger { def log(level: sbt.util.Level.Value, message: => String): Unit = System.err.println(s"${level.id} $message") def success(message: => String): Unit = System.err.println(message) def trace(t: => Throwable): Unit = System.err.println(s"trace $t") } depRes.update(module, UpdateConfiguration(), UnresolvedWarningConfiguration(), logger) .fold(w => (), rep => sys.error(s"Expected resolution to fail, got report $rep")) val report = depRes.update(module, UpdateConfiguration().withMissingOk(true), UnresolvedWarningConfiguration(), logger) .fold(w => throw w.resolveException, identity) } }
Example 45
Source File: IvyXmlTests.scala From sbt-coursier with Apache License 2.0 | 5 votes |
package lmcoursier import lmcoursier.definitions.{Configuration, Info, Module, ModuleName, Organization, Project} import org.scalatest.matchers.should.Matchers import org.scalatest.propspec.AnyPropSpec class IvyXmlTests extends AnyPropSpec with Matchers { property("no truncation") { val project = Project( Module(Organization("org"), ModuleName("name"), Map()), "ver", Nil, Map( Configuration("foo") -> (1 to 80).map(n => Configuration("bar" + n)) // long list of configurations -> no truncation any way ), Nil, None, Nil, Info("", "", Nil, Nil, None) ) val content = IvyXml(project, Nil, Nil) assert(!content.contains("</conf>")) } }
Example 46
Source File: MacroExpansionJVMSpec.scala From scala-loci with Apache License 2.0 | 5 votes |
package loci package language package impl import Testing._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class MacroExpansionJVMSpec extends AnyFlatSpec with Matchers { behavior of "Macro Expansion" def emptySystem(peer: runtime.PlacedValues) = new runtime.System( peer, None, false, Map.empty, contexts.Immediate.global, new runtime.RemoteConnections( runtime.Peer.Signature("", List.empty, runtime.Module.Signature("", List.empty)), Map.empty), Seq.empty, Seq.empty) it should "correctly compile remote blocks" in { @multitier object mod { @peer type Peer <: { type Tie <: Single[Peer] } val dummyRemote: Remote[Peer] = null def method() = on[Peer] { implicit! => val i = 100 (on[Peer].run { implicit! => identity(42) }).asLocal (on[Peer].run sbj { implicit! => remote: Remote[Peer] => identity(42) }).asLocal (on[Peer].run.capture(i) { implicit! => identity(i) }).asLocal (on[Peer].run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) }).asLocal (on(dummyRemote).run { implicit! => identity(42) }).asLocal (on(dummyRemote).run sbj { implicit! => remote: Remote[Peer] => identity(42) }).asLocal (on(dummyRemote).run.capture(i) { implicit! => identity(i) }).asLocal (on(dummyRemote).run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) }).asLocal (on(dummyRemote, dummyRemote).run { implicit! => identity(42) }).asLocalFromAll (on(dummyRemote, dummyRemote).run sbj { implicit! => remote: Remote[Peer] => identity(42) }).asLocalFromAll (on(dummyRemote, dummyRemote).run.capture(i) { implicit! => identity(i) }).asLocalFromAll (on(dummyRemote, dummyRemote).run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) }).asLocalFromAll on[Peer].run { implicit! => identity(42) } on[Peer].run sbj { implicit! => remote: Remote[Peer] => identity(42) } on[Peer].run.capture(i) { implicit! => identity(i) } on[Peer].run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) } on(dummyRemote).run { implicit! => identity(42) } on(dummyRemote).run sbj { implicit! => remote: Remote[Peer] => identity(42) } on(dummyRemote).run.capture(i) { implicit! => identity(i) } on(dummyRemote).run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) } on(dummyRemote, dummyRemote).run { implicit! => identity(42) } on(dummyRemote, dummyRemote).run sbj { implicit! => remote: Remote[Peer] => identity(42) } on(dummyRemote, dummyRemote).run.capture(i) { implicit! => identity(i) } on(dummyRemote, dummyRemote).run.capture(i) sbj { implicit! => remote: Remote[Peer] => identity(i) } } } val dummyRemote: Remote[mod.Peer] = null val peer = new mod.$loci$peer$Peer { def $loci$sys$create = emptySystem(this) } val methods = peer.getClass.getMethods def invoke[T](name: String, arguments: AnyRef*) = (methods collectFirst { case method if method.getName == name => method }).get.invoke(peer, arguments: _*).asInstanceOf[T] invoke[AnyRef]("$loci$anonymous$0") should be (42) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$1")(dummyRemote) should be (42) invoke[AnyRef]("$loci$anonymous$2", Int.box(200)) should be (200) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$3", Int.box(200))(dummyRemote) should be (200) invoke[AnyRef]("$loci$anonymous$4") should be (42) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$5")(dummyRemote) should be (42) invoke[AnyRef]("$loci$anonymous$6", Int.box(200)) should be (200) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$7", Int.box(200))(dummyRemote) should be (200) invoke[AnyRef]("$loci$anonymous$8") should be (42) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$9")(dummyRemote) should be (42) invoke[AnyRef]("$loci$anonymous$10", Int.box(200)) should be (200) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$11", Int.box(200))(dummyRemote) should be (200) invoke[AnyRef]("$loci$anonymous$12") should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$13")(dummyRemote) should (be (()) or be (null)) invoke[AnyRef]("$loci$anonymous$14", Int.box(200)) should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$15", Int.box(200))(dummyRemote) should (be (()) or be (null)) invoke[AnyRef]("$loci$anonymous$16") should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$17")(dummyRemote) should (be (()) or be (null)) invoke[AnyRef]("$loci$anonymous$18", Int.box(200)) should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$19", Int.box(200))(dummyRemote) should (be (()) or be (null)) invoke[AnyRef]("$loci$anonymous$20") should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$21")(dummyRemote) should (be (()) or be (null)) invoke[AnyRef]("$loci$anonymous$22", Int.box(200)) should (be (()) or be (null)) invoke[Remote[mod.Peer] => AnyRef]("$loci$anonymous$23", Int.box(200))(dummyRemote) should (be (()) or be (null)) } }
Example 47
Source File: CalcModelSpec.scala From playground-binding.scala with MIT License | 5 votes |
import com.ccm.me.playground.bindingscala.calc._ import org.scalatest._ import org.scalatest.prop._ import scala.collection.immutable._ import scala.util.Success import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class CalcModelSpec extends AnyFlatSpec with TableDrivenPropertyChecks with Matchers { val examples = Table( ("tokens", "result"), ("", Success(0d)), ("0 0", Success(0d)), ("5", Success(5d)), ("21", Success(21d)), ("2 + 3", Success(3d)), ("2 + 3 =", Success(5d)), ("20 + 30 =", Success(50d)), ("2 - 3 =", Success(-1d)), ("2 * 3 =", Success(6d)), ("2 / 3 =", Success(2d / 3d)), ("2 + 3 *", Success(3d)), ("2 + 3 * 6", Success(6d)), ("2 * 3 +", Success(6d)), ("2 + 3 * 1", Success(1d)), ("2 + 3 * 1 -", Success(5d)), ("2 + 3 * 1 - 5 =", Success(0d)), ("2 + 3 * 1 - 5 * 12 =", Success(-55d)), ("12 + +", Success(24d)), ("12 + + =", Success(48d)), ("0012 + 0021 =", Success(33d)) ) it should "produce correct calculation" in { forAll(examples) { (tokens, result) => val calc = parseTokens(tokens).foldLeft(CalcModel())((calc, token) => calc(token)) calc.result should equal(result) } } private def parseTokens(s: String): Seq[Token] = { s.filter(!_.isSpaceChar).map { case n if n.isDigit => Digit(n.asDigit) case '.' => Dot() case '+' => Plus() case '-' => Minus() case '*' => Multiply() case '/' => Divide() case 'c' => Clear() case '=' => Result() case c@_ => fail(s"Unexpected char: $c") } } }
Example 48
Source File: AltSpec.scala From scalaz-deriving with GNU Lesser General Public License v3.0 | 5 votes |
// Copyright: 2017 - 2020 Sam Halliday // License: https://opensource.org/licenses/BSD-3-Clause package scalaz import java.lang.String import org.scalatest._ import examples.adt._ import examples.recadt._ import examples.recgadt._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class AltSpec extends AnyFlatSpec with NonImplicitAssertions { import Matchers._ "products" should "behave as expected" in { Default[Faz].default should equal(Faz(false, 0)) } "coproducts" should "behave as expected" in { Default[Foo].default should equal(Bar("")) } // Default for a recursive ADT is a dumb idea It only works by accident here // because of the ordering of the source code case classes! Try using \/- as // the choice, or swap the case classes around and watch the world explode // with an infinite loop. "recursive products" should "behave as expected" in { Default[Leaf].default should equal(Leaf("")) } "recursive coproducts" should "behave as expected" in { Default[ATree].default should equal(Leaf("")) } "recursive GADT products" should "behave as expected" in { Default[GLeaf[String]].default should equal(GLeaf("")) } "recursive GADT coproducts" should "behave as expected" in { Default[GTree[String]].default should equal(GLeaf("")) } }
Example 49
Source File: ModifyOptionAtTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import java.util.NoSuchElementException import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyOptionAtTest extends AnyFlatSpec with Matchers { it should "modify a Option with case class item" in { modify(Option(1))(_.at).using(_ + 1) should be(Option(2)) } it should "modify a Option in a case class hierarchy" in { case class Foo(a: Int) case class Bar(foo: Foo) case class BarOpt(maybeBar: Option[Bar]) case class BazOpt(barOpt: BarOpt) modify(BazOpt(BarOpt(Some(Bar(Foo(4))))))(_.barOpt.maybeBar.at.foo.a).using(_ + 1) should be( BazOpt(BarOpt(Some(Bar(Foo(5))))) ) } it should "crashes on missing key" in { an[NoSuchElementException] should be thrownBy modify(Option.empty[Int])(_.at).using(_ + 1) } }
Example 50
Source File: ModifyOptionAtOrElseTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyOptionAtOrElseTest extends AnyFlatSpec with Matchers { it should "modify a Some" in { modify(Option(1))(_.atOrElse(3)).using(_ + 1) should be(Option(2)) } it should "modify a None with default" in { modify(None: Option[Int])(_.atOrElse(3)).using(_ + 1) should be(Option(4)) } it should "modify a Option in a case class hierarchy" in { case class Foo(a: Int) case class Bar(foo: Foo) case class BarOpt(maybeBar: Option[Bar]) case class BazOpt(barOpt: BarOpt) modify(BazOpt(BarOpt(None)))(_.barOpt.maybeBar.atOrElse(Bar(Foo(5))).foo.a).using(_ + 1) should be( BazOpt(BarOpt(Some(Bar(Foo(6))))) ) } }
Example 51
Source File: ModifyMapIndexTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyMapIndexTest extends AnyFlatSpec with Matchers { it should "modify a non-nested map with case class item" in { modify(m1)(_.index("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested sorted map with case class item" in { modify(ms1)(_.index("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested hash map with case class item" in { modify(mh1)(_.index("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested listed map with case class item" in { modify(ml1)(_.index("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a nested map using index" in { modify(m2)(_.m3.index("K1").a5.name).using(duplicate) should be(m2dup) } it should "not modify if there's no such element" in { modify(m1)(_.index("K0").a5.name).using(duplicate) should be(m1) } }
Example 52
Source File: SealedTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData.duplicate import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers object SealedTestData { case class G(p1: Option[P1]) sealed trait P1 { def x: String def f: Option[String] } case class C1(x: String, f: Option[String]) extends P1 case class C2(x: String, f: Option[String]) extends P1 val p1: P1 = C2("c2", None) val p1dup: P1 = C2("c2c2", None) val g1 = G(Some(C1("c1", Some("c2")))) val g1dup = G(Some(C1("c1c1", Some("c2")))) val g1eachdup = G(Some(C1("c1", Some("c2c2")))) sealed trait P2 { def x: String } case class C3(x: String) extends P2 sealed trait P3 extends P2 case class C4(x: String) extends P3 val p2: P2 = C4("c4") val p2dup: P2 = C4("c4c4") // example from the README sealed trait Pet { def name: String } case class Fish(name: String) extends Pet sealed trait LeggedPet extends Pet case class Cat(name: String) extends LeggedPet case class Dog(name: String) extends LeggedPet val pets = List[Pet](Fish("Finn"), Cat("Catia"), Dog("Douglas")) val juniorPets = List[Pet](Fish("Finn, Jr."), Cat("Catia, Jr."), Dog("Douglas, Jr.")) } class SealedTest extends AnyFlatSpec with Matchers { import SealedTestData._ it should "modify a field in a sealed trait" in { modify(p1)(_.x).using(duplicate) should be(p1dup) } it should "modify a field in a sealed trait through a Functor" in { modify(g1)(_.p1.each.x).using(duplicate) should be(g1dup) } it should "modify a Functor field in a sealed trait" in { modify(g1)(_.p1.each.f.each).using(duplicate) should be(g1eachdup) } it should "modify a field in a hierarchy of sealed traits" in { modify(p2)(_.x).using(duplicate) should be(p2dup) } it should "modify a list of pets from the example" in { modify(pets)(_.each.name).using(_ + ", Jr.") should be(juniorPets) } }
Example 53
Source File: SetToSimpleTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class SetToSimpleTest extends AnyFlatSpec with Matchers { it should "set a new value of a single-nested case class field" in { modify(a1)(_.a2.a3.a4.a5.name).setTo("mod") should be(a1mod) } it should "set a new value in a case class if the condition is true" in { modify(a1)(_.a2.a3.a4.a5.name).setToIf(true)("mod") should be(a1mod) } it should "leave a case class unchanged if the condition is false" in { modify(a1)(_.a2.a3.a4.a5.name).setToIf(false)("mod") should be(a1) } it should "set a new value in a case class if it is defined" in { modify(a1)(_.a2.a3.a4.a5.name).setToIfDefined(Some("mod")) should be(a1mod) } it should "leave a case class unchanged if the value is not defined" in { modify(a1)(_.a2.a3.a4.a5.name).setToIfDefined(None) should be(a1) } }
Example 54
Source File: ModifyWhenTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers object ModifyWhenTestData { trait Animal case class Dog(age: Int) extends Animal case class Cat(ages: List[Int]) extends Animal case class Zoo(animals: List[Animal]) val dog: Animal = Dog(4) val olderDog: Animal = Dog(5) val cat: Animal = Cat(List(3, 12, 13)) val olderCat: Animal = Cat(List(4, 12, 13)) val zoo = Zoo(List(dog, cat)) val olderZoo = Zoo(List(olderDog, olderCat)) } class ModifyWhenTest extends AnyFlatSpec with Matchers { import ModifyWhenTestData._ it should "modify a field in a subtype" in { dog.modify(_.when[Dog].age).using(_ + 1) shouldEqual olderDog } it should "ignore subtypes other than the selected one" in { cat.modify(_.when[Dog].age).using(_ + 1) shouldEqual cat } it should "modify a Functor field in a subtype" in { cat.modify(_.when[Cat].ages.at(0)).using(_ + 1) shouldEqual olderCat } it should "modify a field in a subtype through a Functor" in { zoo .modifyAll( _.animals.each.when[Dog].age, _.animals.each.when[Cat].ages.at(0) ) .using(_ + 1) shouldEqual olderZoo } }
Example 55
Source File: ModifyEachWhereTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyEachWhereTest extends AnyFlatSpec with Matchers { it should "modify a single-nested optional case class field only if the condition returns true" in { modify(x4)(_.x5.eachWhere(_ => true).name).using(duplicate) should be(x4dup) modify(x4)(_.x5.eachWhere(_ => false).name).using(duplicate) should be(x4) } it should "modify a single-nested optional case class field (pimped) only if the condition returns true" in { x4.modify(_.x5.eachWhere(_ => true).name).using(duplicate) should be(x4dup) x4.modify(_.x5.eachWhere(_ => false).name).using(duplicate) should be(x4) } it should "not modify an optional case class field if it is none regardless of the condition" in { modify(x4none)(_.x5.eachWhere(_ => true).name).using(duplicate) should be(x4none) modify(x4none)(_.x5.eachWhere(_ => false).name).using(duplicate) should be(x4none) } it should "modify only those list elements where the condition returns true" in { modify(y1)(_.y2.y3.eachWhere(_.y4.map(_.name) == Some("d2")).y4.each.name) .using(duplicate) should be(y1at1dup) } it should "allow .each at then end only if the condition returns true" in { modify(z1)(_.name.eachWhere(_.startsWith("d"))).using(duplicate) should be(z1dup) modify(z1)(_.name.eachWhere(_.startsWith("e"))).using(duplicate) should be(z1) } }
Example 56
Source File: ModifyEachTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyEachTest extends AnyFlatSpec with Matchers { it should "modify a single-nested optional case class field" in { modify(x4)(_.x5.each.name).using(duplicate) should be(x4dup) } it should "modify a single-nested optional case class field (pimped)" in { x4.modify(_.x5.each.name).using(duplicate) should be(x4dup) } it should "modify multiple deeply-nested optional case class field" in { modify(x1)(_.x2.x3.each.x4.x5.each.name).using(duplicate) should be(x1dup) } it should "not modify an optional case class field if it is none" in { modify(x1none)(_.x2.x3.each.x4.x5.each.name).using(duplicate) should be(x1none) modify(x4none)(_.x5.each.name).using(duplicate) should be(x4none) } it should "modify both lists and options" in { modify(y1)(_.y2.y3.each.y4.each.name).using(duplicate) should be(y1dup) } it should "allow .each at the end" in { modify(z1)(_.name.each).using(duplicate) should be(z1dup) } }
Example 57
Source File: ModifyEitherTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers case class Named(name: String) case class Aged(age: Int) case class Eithers(e: Either[Named, Aged]) case class MoreEithers(e1: Either[Eithers, MoreEithers], e2: Either[Eithers, MoreEithers]) class ModifyEitherTest extends AnyFlatSpec with Matchers { it should "modify a single-nested left case class field" in { modify( Eithers(Left(Named("boo"))) )(_.e.eachLeft.name).setTo("moo") should be( Eithers(Left(Named("moo"))) ) } it should "modify a single-nested left case class field (pimped)" in { Eithers(Left(Named("boo"))) .modify(_.e.eachLeft.name) .setTo("moo") should be( Eithers(Left(Named("moo"))) ) } it should "modify a single-nested right case class field" in { modify( Eithers(Right(Aged(23))) )(_.e.eachRight.age).setTo(32) should be( Eithers(Right(Aged(32))) ) } it should "modify a single-nested right case class field (pimped)" in { Eithers(Right(Aged(23))) .modify(_.e.eachRight.age) .setTo(32) should be( Eithers(Right(Aged(32))) ) } it should "modify multiple deeply-nested either case class fields" in { modify( MoreEithers( e1 = Right( MoreEithers( e1 = Left(Eithers(Right(Aged(23)))), e2 = Left(Eithers(Left(Named("boo")))) ) ), e2 = Left(Eithers(Left(Named("boo")))) ) )(_.e1.eachRight.e2.eachLeft.e.eachLeft.name) .using(_.toUpperCase) should be( MoreEithers( e1 = Right( MoreEithers( e1 = Left(Eithers(Right(Aged(23)))), e2 = Left(Eithers(Left(Named("BOO")))) ) ), e2 = Left(Eithers(Left(Named("boo")))) ) ) } it should "not modify left case class field if it is right" in { modify( Eithers(Right(Aged(23))) )(_.e.eachLeft.name).setTo("moo") should be( Eithers(Right(Aged(23))) ) } it should "not modify right case class field if it is left" in { modify( Eithers(Left(Named("boo"))) )(_.e.eachRight.age).setTo(33) should be( Eithers(Left(Named("boo"))) ) } it should "allow .eachLeft at then end" in { modify(Left("boo"): Either[String, Int])(_.eachLeft) .using(_.toUpperCase) should be(Left("BOO")) } it should "allow .eachRight at then end" in { modify(Right(23): Either[String, Int])(_.eachRight).using(_ + 3) should be(Right(26)) } }
Example 58
Source File: ModifyLazyTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyLazyTest extends AnyFlatSpec with Matchers { it should "modify a single-nested case class field" in { val ml = modify[A5](_.name).using(duplicate) ml(a5) should be(a5dup) } it should "modify a deeply-nested case class field" in { val ml = modify[A1](_.a2.a3.a4.a5.name).using(duplicate) ml(a1) should be(a1dup) } it should "modify several fields" in { val ml = modifyAll[B1](_.b2, _.b3.each).using(duplicate) ml(b1) should be(b1dupdup) } it should "modify a case class field if the condition is true" in { val ml = modify[A5](_.name).usingIf(true)(duplicate) ml(a5) should be(a5dup) } it should "leave a case class unchanged if the condition is flase" in { val ml = modify[A5](_.name).usingIf(false)(duplicate) ml(a5) should be(a5) } }
Example 59
Source File: ModifyIndexTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyIndexTest extends AnyFlatSpec with Matchers { it should "modify a non-nested list with case class item" in { modify(l1)(_.index(2).a4.a5.name).using(duplicate) should be(l1at2dup) modify(l1)(_.index(2)) .using(a3 => modify(a3)(_.a4.a5.name).using(duplicate)) should be(l1at2dup) } it should "modify a nested list using index" in { modify(ll1)(_.index(2).index(1).name).using(duplicate) should be(ll1at2at1dup) } it should "modify a nested list using index and each" in { modify(ll1)(_.index(2).each.name).using(duplicate) should be(ll1at2eachdup) modify(ll1)(_.each.index(1).name).using(duplicate) should be(ll1eachat1dup) } it should "modify both lists and options" in { modify(y1)(_.y2.y3.index(1).y4.each.name).using(duplicate) should be(y1at1dup) } it should "not modify if given index does not exist" in { modify(l1)(_.index(10).a4.a5.name).using(duplicate) should be(l1) } }
Example 60
Source File: ModifySimpleTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import TestData._ class ModifySimpleTest extends AnyFlatSpec with Matchers { it should "modify a single-nested case class field" in { modify(a5)(_.name).using(duplicate) should be(a5dup) } it should "modify a single-nested case class field using apply" in { modify(a5)(_.name)(duplicate) should be(a5dup) } it should "modify a deeply-nested case class field" in { modify(a1)(_.a2.a3.a4.a5.name).using(duplicate) should be(a1dup) } it should "modify several fields" in { modifyAll(b1)(_.b2, _.b3.each).using(duplicate) should be(b1dupdup) } it should "modify a case class field if the condition is true" in { modify(a5)(_.name).usingIf(true)(duplicate) should be(a5dup) } it should "leave a case class unchanged if the condition is false" in { modify(a5)(_.name).usingIf(false)(duplicate) should be(a5) } it should "modify polymorphic case class field" in { modify(aPoly)(_.poly).using(duplicate) should be(aPolyDup) } it should "modify polymorphic case class field using apply" in { modify(aPoly)(_.poly)(duplicate) should be(aPolyDup) } it should "modify polymorphic case class field if condition is true" in { modify(aPoly)(_.poly).usingIf(true)(duplicate) should be(aPolyDup) } it should "leave a polymorphic case class field if condition is false" in { modify(aPoly)(_.poly).usingIf(false)(duplicate) should be(aPoly) } }
Example 61
Source File: ModifyPimpTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyPimpTest extends AnyFlatSpec with Matchers { it should "modify a field once" in { a1.modify(_.a2.a3.a4.a5.name).using(duplicate) should be(a1dup) } it should "modify a deeply-nested case class field" in { a1.modify(_.a2.a3.a4.a5.name) .using(duplicate) .modify(_.a2.a3.a4.a5.name) .using(duplicate) should be(a1dupdup) } it should "modify several fields" in { b1.modifyAll(_.b2, _.b3.each).using(duplicate) should be(b1dupdup) } it should "modify polymorphic case class field" in { aPoly.modify(_.poly).using(duplicate) should be(aPolyDup) } }
Example 62
Source File: ModifyAtTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyAtTest extends AnyFlatSpec with Matchers { it should "modify a non-nested list with case class item" in { modify(l1)(_.at(2).a4.a5.name).using(duplicate) should be(l1at2dup) modify(l1)(_.at(2)) .using(a3 => modify(a3)(_.a4.a5.name).using(duplicate)) should be(l1at2dup) } it should "modify a nested list using at" in { modify(ll1)(_.at(2).at(1).name).using(duplicate) should be(ll1at2at1dup) } it should "modify a nested list using at and each" in { modify(ll1)(_.at(2).each.name).using(duplicate) should be(ll1at2eachdup) modify(ll1)(_.each.at(1).name).using(duplicate) should be(ll1eachat1dup) } it should "modify both lists and options" in { modify(y1)(_.y2.y3.at(1).y4.each.name).using(duplicate) should be(y1at1dup) } it should "throw an exception if there's no element at the given index" in { an[IndexOutOfBoundsException] should be thrownBy { modify(l1)(_.at(10).a4.a5.name).using(duplicate) } } }
Example 63
Source File: ModifyOptionIndexTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyOptionIndexTest extends AnyFlatSpec with Matchers { it should "modify a Option with case class item" in { modify(Option(1))(_.index).using(_ + 1) should be(Option(2)) } it should "modify a Option in a case class hierarchy" in { case class Foo(a: Int) case class Bar(foo: Foo) case class BarOpt(maybeBar: Option[Bar]) case class BazOpt(barOpt: BarOpt) modify(BazOpt(BarOpt(Some(Bar(Foo(4))))))(_.barOpt.maybeBar.index.foo.a).using(_ + 1) should be( BazOpt(BarOpt(Some(Bar(Foo(5))))) ) } it should "not modify on missing key" in { modify(Option.empty[Int])(_.index).using(_ + 1) should be(None) } }
Example 64
Source File: ModifyMapAtTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ModifyMapAtTest extends AnyFlatSpec with Matchers { it should "modify a non-nested map with case class item" in { modify(m1)(_.at("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested map with atOrElse" in { modify(m1)(_.atOrElse("K1", A4(A5("d4"))).a5.name).using(duplicate) should be(m1dup) modify(m1)(_.atOrElse("K1", ???).a5.name).using(duplicate) should be(m1dup) modify(m1)(_.atOrElse("K4", A4(A5("d4"))).a5.name).using(duplicate) should be(m1missingdup) } it should "modify a non-nested sorted map with case class item" in { modify(ms1)(_.at("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested hash map with case class item" in { modify(mh1)(_.at("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a non-nested listed map with case class item" in { modify(ml1)(_.at("K1").a5.name).using(duplicate) should be(m1dup) } it should "modify a nested map using at" in { modify(m2)(_.m3.at("K1").a5.name).using(duplicate) should be(m2dup) } it should "modify a nested map using atOrElse" in { modify(m2)(_.m3.atOrElse("K4", A4(A5("d4"))).a5.name).using(duplicate) should be(m2missingdup) } it should "modify a non-nested map using each" in { modify(m1)(_.each.a5.name).using(duplicate) should be(m1dupEach) } it should "modify a non-nested sorted map using each" in { modify(ms1)(_.each.a5.name).using(duplicate) should be(m1dupEach) } it should "modify a non-nested hash map using each" in { modify(mh1)(_.each.a5.name).using(duplicate) should be(m1dupEach) } it should "modify a non-nested list map using each" in { modify(ml1)(_.each.a5.name).using(duplicate) should be(m1dupEach) } it should "throw an exception if there's no such element" in { an[NoSuchElementException] should be thrownBy { modify(m1)(_.at("K0").a5.name).using(duplicate) } } }
Example 65
Source File: LensTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class LensTest extends AnyFlatSpec with Matchers { it should "create reusable lens of the given type" in { val lens = modify(_: A1)(_.a2.a3.a4.a5.name) lens(a1).using(duplicate) should be(a1dup) } it should "compose lens" in { val lens_a1_a3 = modify(_: A1)(_.a2.a3) val lens_a3_name = modify(_: A3)(_.a4.a5.name) (lens_a1_a3 andThenModify lens_a3_name)(a1).using(duplicate) should be(a1dup) } }
Example 66
Source File: LensLazyTest.scala From quicklens with Apache License 2.0 | 5 votes |
package com.softwaremill.quicklens import com.softwaremill.quicklens.TestData._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class LensLazyTest extends AnyFlatSpec with Matchers { it should "create reusable lens of the given type" in { val lens = modify[A1](_.a2.a3.a4.a5.name) val lm = lens.using(duplicate) lm(a1) should be(a1dup) } it should "compose lens" in { val lens_a1_a3 = modify[A1](_.a2.a3) val lens_a3_name = modify[A3](_.a4.a5.name) val lm = (lens_a1_a3 andThenModify lens_a3_name).using(duplicate) lm(a1) should be(a1dup) } }
Example 67
Source File: UsingSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl package domains import com.thoughtworks.dsl.Dsl.{!!, Continuation, reset} import com.thoughtworks.dsl.keywords.{Using, Yield} import org.scalatest.Assertion import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers class UsingSpec extends AnyFreeSpec with Matchers { def successContinuation[Domain](domain: Domain): (Domain !! Throwable) @reset = Continuation.empty(domain) "AutoCloseable" - { "scope" - { "arm" in { var isOpen = false def raii: Stream[Int] !! Throwable !! Assertion = Continuation.apply { !Yield(1) isOpen should be(false) val a = !Using { !Yield(2) new AutoCloseable { isOpen should be(false) isOpen = true def close(): Unit = { isOpen should be(true) isOpen = false } } } !Yield(3) isOpen should be(true) } isOpen should be(false) val myException = new Exception val stream = raii(_ => _ => Stream.empty)(throw _) stream should be(Stream(1, 2, 3)) isOpen should be(false) } } } }
Example 68
Source File: ReturnSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl.keywords import com.thoughtworks.dsl.Dsl.!! import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers final class ReturnSpec extends AnyFreeSpec with Matchers { "return a Stream" in { def stream: Stream[Int] = !Return[Int](1) stream should be(Stream(1)) } "return the left domain" in { def continuation: Int !! String = !Return(42) continuation { s => throw new AssertionError(s) } should be(42) } "return the right domain" in { def continuation: Int !! String = !Return("right value") continuation { s => s should be("right value") 43 } should be(43) } "return the middle domain" - { "as the return value" in { def continuation: Int !! Double !! String = !Return(1.23) continuation { s => throw new AssertionError(s) } { d => d should be(1.23) 43 } should be(43) } "then the throw expression will not be executed" in { def continuation: Int !! Double !! String = { throw !Return(1.23) } continuation { s => throw new AssertionError(s) } { d => d should be(1.23) 43 } should be(43) } } }
Example 69
Source File: ForEachSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl.keywords import com.thoughtworks.dsl.Dsl.{!!, reset} import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers class ForEachSpec extends AnyFreeSpec with Matchers { "foreach" - { "val" in { val seq = 1 to 10 def run(): Unit = { val plus100 = Seq { !ForEach(seq) + 100 } plus100.length should be(1) !ForEach(plus100) } run() } "def" in { val seq = 1 to 10 def run(): Unit = { def plus100 = Seq { !Each(seq) + 100 } plus100.length should be(10) !ForEach(plus100) } run() } } }
Example 70
Source File: scalazSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl.domains import com.thoughtworks.dsl.Dsl.!! import _root_.scalaz.OptionT import _root_.scalaz.concurrent.Task import _root_.scalaz.std.stream._ import com.thoughtworks.dsl.domains.scalaz._ import com.thoughtworks.dsl.keywords.{Monadic, Shift, Yield} import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers "Leaving generator" )) } } } "Given a monadic expression that contains a Scalaz OptionT" - { def myOptionalList: OptionT[Stream, String] = { // TODO: Is it possible to have `Yield` expressions here? val threadId = !Monadic(Stream(0, 1, 2)) val subThreadId = !Monadic(OptionT(Stream(Some(10), None, Some(30)))) val subSubThreadId = !Monadic(OptionT(Stream(Some(100), Some(200), None))) OptionT[Stream, String](Stream(Some(s"Fork thread $threadId-$subThreadId-$subSubThreadId"))) } "Then it should skips those elements that contains a None" in { myOptionalList.run should be( Seq( Some("Fork thread 0-10-100"), Some("Fork thread 0-10-200"), None, None, Some("Fork thread 0-30-100"), Some("Fork thread 0-30-200"), None, Some("Fork thread 1-10-100"), Some("Fork thread 1-10-200"), None, None, Some("Fork thread 1-30-100"), Some("Fork thread 1-30-200"), None, Some("Fork thread 2-10-100"), Some("Fork thread 2-10-200"), None, None, Some("Fork thread 2-30-100"), Some("Fork thread 2-30-200"), None )) } } }
Example 71
Source File: BangNotationSpec.scala From Dsl.scala with Apache License 2.0 | 5 votes |
package com.thoughtworks.dsl.compilerplugin import com.thoughtworks.dsl.Dsl.shift import org.scalatest.freespec.AnyFreeSpec import org.scalatest.matchers.should.Matchers class BangNotationSpec extends AnyFreeSpec with Matchers { "printf problem" in { object IntPlaceholder { @shift def unary_! : String = ??? def cpsApply[Domain](f: String => Domain): Int => Domain = { i: Int => f(i.toString) } } object StringPlaceholder { @shift def unary_! : String = ??? def cpsApply[Domain](f: String => Domain): String => Domain = f } def f1 = "Hello World!" def f2 = "Hello " + !StringPlaceholder + "!" def f3 = "The value of " + !StringPlaceholder + " is " + !IntPlaceholder + "." f1 should be("Hello World!") f2.asInstanceOf[String => String]("World") should be("Hello World!") f3.asInstanceOf[String => Int => String]("x")(3) should be("The value of x is 3.") } }
Example 72
Source File: BaseTestsSuite.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import cats.effect.internals.TestUtils import cats.effect.laws.util.{TestContext, TestInstances} import org.scalactic.source import org.scalatest.Tag import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite import org.scalatestplus.scalacheck.Checkers import org.typelevel.discipline.Laws import org.typelevel.discipline.scalatest.FunSuiteDiscipline class BaseTestsSuite extends AnyFunSuite with Matchers with Checkers with FunSuiteDiscipline with TestInstances with TestUtils { def testAsync[A](name: String, tags: Tag*)(f: TestContext => Unit)(implicit pos: source.Position): Unit = // Overriding System.err test(name, tags: _*)(silenceSystemErr(f(TestContext())))(pos) def checkAllAsync(name: String, f: TestContext => Laws#RuleSet): Unit = { val context = TestContext() val ruleSet = f(context) for ((id, prop) <- ruleSet.all.properties) test(name + "." + id) { silenceSystemErr(check(prop)) } } }
Example 73
Source File: IOAsyncTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import org.scalactic.source.Position import org.scalatest.Assertion import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AsyncFunSuite import scala.concurrent.{ExecutionContext, Future, Promise} import scala.concurrent.duration._ import scala.util.{Failure, Success, Try} class IOAsyncTests extends AsyncFunSuite with Matchers { implicit override def executionContext = ExecutionContext.global implicit val timer: Timer[IO] = IO.timer(executionContext) implicit val cs: ContextShift[IO] = IO.contextShift(executionContext) def testEffectOnRunAsync(source: IO[Int], expected: Try[Int])(implicit pos: Position): Future[Assertion] = { val effect = Promise[Int]() val attempt = Promise[Try[Int]]() effect.future.onComplete(attempt.success) val io = source.runAsync { case Right(a) => IO { effect.success(a); () } case Left(e) => IO { effect.failure(e); () } } for (_ <- io.toIO.unsafeToFuture(); v <- attempt.future) yield { v shouldEqual expected } } test("IO.pure#runAsync") { testEffectOnRunAsync(IO.pure(10), Success(10)) } test("IO.apply#runAsync") { testEffectOnRunAsync(IO(10), Success(10)) } test("IO.apply#shift#runAsync") { testEffectOnRunAsync(IO.shift.flatMap(_ => IO(10)), Success(10)) } test("IO.raiseError#runAsync") { val dummy = new RuntimeException("dummy") testEffectOnRunAsync(IO.raiseError(dummy), Failure(dummy)) } test("IO.raiseError#shift#runAsync") { val dummy = new RuntimeException("dummy") testEffectOnRunAsync(IO.shift.flatMap(_ => IO.raiseError(dummy)), Failure(dummy)) } test("IO.sleep(10.ms)") { val io = IO.sleep(10.millis).map(_ => 10) for (r <- io.unsafeToFuture()) yield { r shouldBe 10 } } test("IO.sleep(negative)") { val io = IO.sleep(-10.seconds).map(_ => 10) for (r <- io.unsafeToFuture()) yield { r shouldBe 10 } } }
Example 74
Source File: IOJSTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AsyncFunSuite import scala.concurrent.duration.{FiniteDuration, _} import scala.concurrent.{ExecutionContext, Future} import scala.scalajs.js.timers.setTimeout class IOJSTests extends AsyncFunSuite with Matchers { implicit override def executionContext = ExecutionContext.global def delayed[A](duration: FiniteDuration)(f: => A): IO[A] = IO.async { callback => setTimeout(duration.toMillis.toDouble)(callback(Right(f))) } test("unsafeToFuture works") { delayed(100.millis)(10).unsafeToFuture().map { r => r shouldEqual 10 } } test("unsafeRunSync is unsupported for async stuff") { Future { try { delayed(100.millis)(10).unsafeRunSync() fail("Expected UnsupportedOperationException") } catch { case _: UnsupportedOperationException => succeed } } } }
Example 75
Source File: JvmIOTimerTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect package internals import java.util.concurrent.{ScheduledThreadPoolExecutor, TimeUnit} import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite import scala.util.control.NonFatal class JvmIOTimerTests extends AnyFunSuite with Matchers { private def withScheduler(props: Map[String, String])(f: ScheduledThreadPoolExecutor => Unit): Unit = { val s = IOTimer.mkGlobalScheduler(props) try f(s) finally { try s.shutdownNow() catch { case NonFatal(e) => e.printStackTrace() } } } test("global scheduler: default core pool size") { withScheduler(Map.empty) { s => s.getCorePoolSize shouldBe 2 } } test("global scheduler: custom core pool size") { withScheduler(Map("cats.effect.global_scheduler.threads.core_pool_size" -> "3")) { s => s.getCorePoolSize shouldBe 3 } } test("global scheduler: invalid core pool size") { withScheduler(Map("cats.effect.global_scheduler.threads.core_pool_size" -> "-1")) { s => s.getCorePoolSize shouldBe 2 } } test("global scheduler: malformed core pool size") { withScheduler(Map("cats.effect.global_scheduler.threads.core_pool_size" -> "banana")) { s => s.getCorePoolSize shouldBe 2 } } test("global scheduler: default core thread timeout") { withScheduler(Map.empty) { s => s.allowsCoreThreadTimeOut shouldBe false } } test("global scheduler: custom core thread timeout") { withScheduler(Map("cats.effect.global_scheduler.keep_alive_time_ms" -> "1000")) { s => s.allowsCoreThreadTimeOut shouldBe true s.getKeepAliveTime(TimeUnit.MILLISECONDS) shouldBe 1000 } } test("global scheduler: invalid core thread timeout") { withScheduler(Map("cats.effect.global_scheduler.keep_alive_time_ms" -> "0")) { s => s.allowsCoreThreadTimeOut shouldBe false } } test("global scheduler: malformed core thread timeout") { withScheduler(Map("cats.effect.global_scheduler.keep_alive_time_ms" -> "feral hogs")) { s => s.allowsCoreThreadTimeOut shouldBe false } } }
Example 76
Source File: IOAppTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats package effect import scala.concurrent.ExecutionContext import cats.effect.internals.{IOAppPlatform, TestUtils, TrampolineEC} import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AsyncFunSuite class IOAppTests extends AsyncFunSuite with Matchers with TestUtils { test("exits with specified code") { IOAppPlatform .mainFiber(Array.empty, Eval.now(implicitly[ContextShift[IO]]), Eval.now(implicitly[Timer[IO]]))(_ => IO.pure(ExitCode(42)) ) .flatMap(_.join) .unsafeToFuture() .map(_ shouldEqual 42) } test("accepts arguments") { IOAppPlatform .mainFiber(Array("1", "2", "3"), Eval.now(implicitly), Eval.now(implicitly))(args => IO.pure(ExitCode(args.mkString.toInt)) ) .flatMap(_.join) .unsafeToFuture() .map(_ shouldEqual 123) } test("raised error exits with 1") { silenceSystemErr { IOAppPlatform .mainFiber(Array.empty, Eval.now(implicitly), Eval.now(implicitly))(_ => IO.raiseError(new Exception())) .flatMap(_.join) .unsafeToFuture() .map(_ shouldEqual 1) } } implicit override def executionContext: ExecutionContext = TrampolineEC.immediate implicit val timer: Timer[IO] = IO.timer(executionContext) implicit val cs: ContextShift[IO] = IO.contextShift(executionContext) }
Example 77
Source File: ArrayStackTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect.internals import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite class ArrayStackTests extends AnyFunSuite with Matchers with TestUtils { test("push and pop 8 items") { val stack = new ArrayStack[String]() var times = 0 while (times < 10) { assert(stack.isEmpty, "stack.isEmpty") for (i <- 0 until 8) stack.push(i.toString) var list = List.empty[String] while (!stack.isEmpty) { assert(!stack.isEmpty, "!stack.isEmpty") list = stack.pop() :: list } list shouldBe (0 until 8).map(_.toString).toList stack.pop().asInstanceOf[AnyRef] shouldBe null stack.isEmpty shouldBe true times += 1 } } test("push and pop 100 items") { val stack = new ArrayStack[String]() var times = 0 while (times < 10) { assert(stack.isEmpty, "stack.isEmpty") for (i <- 0 until 100) stack.push(i.toString) var list = List.empty[String] while (!stack.isEmpty) { assert(!stack.isEmpty, "!stack.isEmpty") list = stack.pop() :: list } list shouldBe (0 until 100).map(_.toString).toList stack.pop().asInstanceOf[AnyRef] shouldBe null stack.isEmpty shouldBe true times += 1 } } test("pushAll(stack)") { val stack = new ArrayStack[String]() val stack2 = new ArrayStack[String]() for (i <- 0 until 100) stack2.push(i.toString) stack.pushAll(stack2) var list = List.empty[String] while (!stack.isEmpty) { assert(!stack.isEmpty) list = stack.pop() :: list } list shouldBe (0 until 100).map(_.toString).toList.reverse stack.pop().asInstanceOf[AnyRef] shouldBe null stack.isEmpty shouldBe true stack2.isEmpty shouldBe false } test("pushAll(iterable)") { val stack = new ArrayStack[String]() val expected = (0 until 100).map(_.toString).toList stack.pushAll(expected) var list = List.empty[String] while (!stack.isEmpty) { assert(!stack.isEmpty) list = stack.pop() :: list } list shouldBe expected stack.pop().asInstanceOf[AnyRef] shouldBe null stack.isEmpty shouldBe true } test("iterator") { val stack = new ArrayStack[String]() val expected = (0 until 100).map(_.toString).toList for (i <- expected) stack.push(i) stack.iteratorReversed.toList shouldBe expected.reverse } }
Example 78
Source File: ForwardCancelableTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect.internals import cats.effect.IO import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite class ForwardCancelableTests extends AnyFunSuite with Matchers { test("cancel() after complete") { var effect = 0 val ref = ForwardCancelable() ref.complete(IO(effect += 1)) effect shouldBe 0 ref.cancel.unsafeRunAsyncAndForget() effect shouldBe 1 // Weak idempotency guarantees (not thread-safe) ref.cancel.unsafeRunAsyncAndForget() effect shouldBe 1 } test("cancel() before complete") { var effect = 0 val ref = ForwardCancelable() ref.cancel.unsafeRunAsyncAndForget() effect shouldBe 0 ref.complete(IO(effect += 1)) effect shouldBe 1 intercept[IllegalStateException](ref.complete(IO(effect += 2))) // completed task was canceled before error was thrown effect shouldBe 3 ref.cancel.unsafeRunAsyncAndForget() effect shouldBe 3 } test("complete twice before cancel") { var effect = 0 val ref = ForwardCancelable() ref.complete(IO(effect += 1)) effect shouldBe 0 intercept[IllegalStateException](ref.complete(IO(effect += 2))) effect shouldBe 2 ref.cancel.unsafeRunAsyncAndForget() effect shouldBe 3 } }
Example 79
Source File: LinkedMapTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect.internals import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite final class LinkedMapTests extends AnyFunSuite with Matchers { test("empty map") { val map = LinkedMap.empty[Int, Int] map.isEmpty shouldBe true } test("inserting entries") { val ns = (0 until 10).toList val map = ns.foldLeft(LinkedMap.empty[Int, Int])((map, i) => map.updated(i, i)) map.isEmpty shouldBe false map.keys.toList shouldBe ns map.values.toList shouldBe ns } test("dequeueing entries") { val ns = (0 until 10).toList val map = ns.foldLeft(LinkedMap.empty[Int, Int])((map, i) => map.updated(i, i)) var n = 0 var acc = map while (!acc.isEmpty) { val res = acc.dequeue res._1 shouldBe n n += 1 acc = res._2 } } test("removing entry") { val ns = (0 until 10).toList val map = ns.foldLeft(LinkedMap.empty[Int, Int])((map, i) => map.updated(i, i)) val n = 2 assert(map.keys.exists(_ == n)) assert(map.values.exists(_ == n)) map.keys.exists(_ == n) shouldBe true map.values.exists(_ == n) shouldBe true val map2 = map - n map2.keys.exists(_ == n) shouldBe false map2.values.exists(_ == n) shouldBe false } }
Example 80
Source File: TrampolineECTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect package internals import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite import cats.effect.internals.TrampolineEC.immediate import scala.concurrent.ExecutionContext import cats.effect.internals.IOPlatform.isJVM import scala.collection.immutable.Queue class TrampolineECTests extends AnyFunSuite with Matchers with TestUtils { implicit val ec: ExecutionContext = immediate def executeImmediate(f: => Unit): Unit = ec.execute(new Runnable { def run(): Unit = f }) test("execution should be immediate") { var effect = 0 executeImmediate { effect += 1 executeImmediate { effect += 2 executeImmediate { effect += 3 } } } effect shouldEqual 1 + 2 + 3 } test("concurrent execution") { var effect = List.empty[Int] executeImmediate { executeImmediate { effect = 1 :: effect } executeImmediate { effect = 2 :: effect } executeImmediate { effect = 3 :: effect } } effect shouldEqual List(1, 2, 3) } test("stack safety") { var effect = 0 def loop(n: Int, acc: Int): Unit = executeImmediate { if (n > 0) loop(n - 1, acc + 1) else effect = acc } val n = if (isJVM) 100000 else 5000 loop(n, 0) effect shouldEqual n } test("on blocking it should fork") { assume(isJVM, "test relevant only for the JVM") import scala.concurrent.blocking var effects = Queue.empty[Int] executeImmediate { executeImmediate { effects = effects.enqueue(4) } executeImmediate { effects = effects.enqueue(4) } effects = effects.enqueue(1) blocking { effects = effects.enqueue(2) } effects = effects.enqueue(3) } effects shouldBe Queue(1, 4, 4, 2, 3) } test("thrown exceptions should get logged to System.err (immediate)") { val dummy1 = new RuntimeException("dummy1") val dummy2 = new RuntimeException("dummy2") var effects = 0 val output = catchSystemErr { executeImmediate { executeImmediate(effects += 1) executeImmediate(effects += 1) executeImmediate { executeImmediate(effects += 1) executeImmediate(effects += 1) throw dummy2 } throw dummy1 } } output should include("dummy1") output should include("dummy2") effects shouldBe 4 } }
Example 81
Source File: CancelUtilsTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect.internals import java.io.ByteArrayOutputStream import cats.effect.IO import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite import scala.util.control.NonFatal class CancelUtilsTests extends AnyFunSuite with Matchers with TestUtils { test("cancelAll works for zero references") { CancelUtils.cancelAll().unsafeRunSync() } test("cancelAll works for one reference") { var wasCanceled = false CancelUtils.cancelAll(IO { wasCanceled = true }).unsafeRunSync() wasCanceled shouldBe true } test("cancelAll catches error from one reference") { val dummy = new RuntimeException("dummy") var wasCanceled1 = false var wasCanceled2 = false val io = CancelUtils.cancelAll( IO { wasCanceled1 = true }, IO(throw dummy), IO { wasCanceled2 = true } ) try { io.unsafeRunSync() fail("should have throw exception") } catch { case `dummy` => wasCanceled1 shouldBe true wasCanceled2 shouldBe true } } test("cancelAll catches the first error and logs the rest") { val dummy1 = new RuntimeException("dummy1") val dummy2 = new RuntimeException("dummy2") var wasCanceled1 = false var wasCanceled2 = false val io = CancelUtils.cancelAll( IO { wasCanceled1 = true }, IO(throw dummy1), IO(throw dummy2), IO { wasCanceled2 = true } ) val sysErr = new ByteArrayOutputStream() try { catchSystemErrInto(sysErr) { io.unsafeRunSync() } fail("should have throw exception") } catch { case NonFatal(error) => error shouldBe dummy1 sysErr.toString("utf-8") should include("dummy2") dummy1.getSuppressed shouldBe empty // ensure memory isn't leaked with addSuppressed dummy2.getSuppressed shouldBe empty // ensure memory isn't leaked with addSuppressed } } }
Example 82
Source File: ConcurrentTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import cats.Eq import cats.effect.concurrent.Ref import cats.effect.implicits._ import cats.implicits._ import org.scalatest.compatible.Assertion import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.Succeeded import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} class ConcurrentTests extends AsyncFunSuite with Matchers { implicit override def executionContext: ExecutionContext = ExecutionContext.Implicits.global implicit val timer: Timer[IO] = IO.timer(executionContext) implicit val cs: ContextShift[IO] = IO.contextShift(executionContext) private val smallDelay: IO[Unit] = timer.sleep(20.millis) private def awaitEqual[A: Eq](t: IO[A], success: A): IO[Unit] = t.flatMap(a => if (Eq[A].eqv(a, success)) IO.unit else smallDelay *> awaitEqual(t, success)) private def run(t: IO[Unit]): Future[Assertion] = t.as(Succeeded).unsafeToFuture() test("F.parTraverseN(n)(collection)(f)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.range(0, finalValue) val modifies = implicitly[Concurrent[IO]].parTraverseN(3)(list)(_ => IO.shift *> r.update(_ + 1)) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } test("collection.parTraverseN(n)(f)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.range(0, finalValue) val modifies = list.parTraverseN(3)(_ => IO.shift *> r.update(_ + 1)) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } test("F.parSequenceN(n)(collection)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.fill(finalValue)(IO.shift *> r.update(_ + 1)) val modifies = implicitly[Concurrent[IO]].parSequenceN(3)(list) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } test("collection.parSequenceN(n)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.fill(finalValue)(IO.shift *> r.update(_ + 1)) val modifies = list.parSequenceN(3) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } }
Example 83
Source File: ExitCodeTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats package effect package concurrent import org.scalatest.matchers.should.Matchers import org.scalatest.funsuite.AnyFunSuite import org.scalatestplus.scalacheck.Checkers class ExitCodeTests extends AnyFunSuite with Matchers with Checkers { test("fromInt(i) == fromInt(i & 0xff)") { check { (i: Int) => ExitCode(i) == ExitCode(i & 0xff) } } test("code is in range from 0 to 255, inclusive") { check { (i: Int) => val ec = ExitCode(i) ec.code >= 0 && ec.code < 256 } } }
Example 84
Source File: AsyncTests.scala From cats-effect with Apache License 2.0 | 5 votes |
package cats.effect import cats.Eq import cats.effect.concurrent.Ref import cats.effect.implicits._ import cats.implicits._ import org.scalatest.compatible.Assertion import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.Succeeded import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} class AsyncTests extends AsyncFunSuite with Matchers { implicit override def executionContext: ExecutionContext = ExecutionContext.Implicits.global implicit val timer: Timer[IO] = IO.timer(executionContext) implicit val cs: ContextShift[IO] = IO.contextShift(executionContext) private val smallDelay: IO[Unit] = timer.sleep(20.millis) private def awaitEqual[A: Eq](t: IO[A], success: A): IO[Unit] = t.flatMap(a => if (Eq[A].eqv(a, success)) IO.unit else smallDelay *> awaitEqual(t, success)) private def run(t: IO[Unit]): Future[Assertion] = t.as(Succeeded).unsafeToFuture() test("F.parTraverseN(n)(collection)(f)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.range(0, finalValue) val modifies = implicitly[Async[IO]].parTraverseN(3)(list)(_ => IO.shift *> r.update(_ + 1)) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } test("F.parSequenceN(n)(collection)") { val finalValue = 100 val r = Ref.unsafe[IO, Int](0) val list = List.fill(finalValue)(IO.shift *> r.update(_ + 1)) val modifies = implicitly[Async[IO]].parSequenceN(3)(list) run(IO.shift *> modifies.start *> awaitEqual(r.get, finalValue)) } }
Example 85
Source File: SchedulerTestkit.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass.job import akka.actor.ActorSystem import akka.http.scaladsl.testkit.{ RouteTestTimeout, ScalatestRouteTest } import com.typesafe.scalalogging.StrictLogging import fusion.inject.guice.testkit.GuiceApplicationTestkit import fusion.json.jackson.http.JacksonSupport import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import scala.concurrent.duration._ abstract class SchedulerTestkit extends GuiceApplicationTestkit with AnyWordSpecLike with Matchers with BeforeAndAfterAll with ScalatestRouteTest with StrictLogging { override protected def createActorSystem(): ActorSystem = application.classicSystem implicit def routeTestTimeout: RouteTestTimeout = RouteTestTimeout(10.seconds) protected val jobScheduler: JobScheduler = injectInstance[JobScheduler] protected val jacksonSupport: JacksonSupport = injectInstance[JacksonSupport] }
Example 86
Source File: MassSettingsTest.scala From fusion-data with Apache License 2.0 | 5 votes |
package mass import helloscala.common.Configuration import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class MassSettingsTest extends AnyWordSpec with Matchers { "MassSettings" should { val config = Configuration.generateConfig() val settings = MassSettings(config) "compiles" in { settings.compiles.scala212Home should not be empty println(settings.compiles.scala212Home) } "test key" in { config.getString("test.key") shouldBe "test.key" } } }
Example 87
Source File: RespondeeTests.scala From streamee with Apache License 2.0 | 5 votes |
package io.moia.streamee import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import scala.concurrent.duration.DurationInt final class RespondeeTests extends AsyncWordSpec with AkkaSuite with Matchers { import Respondee._ "A Respondee" should { "fail its promise with a TimeoutException if not receiving a Response in time" in { val timeout = 100.milliseconds val (_, response) = Respondee.spawn[Int](timeout) response.future.failed.map { case ResponseTimeoutException(t) => t shouldBe timeout } } "successfully complete its promise with the received Response" in { val (respondee, response) = Respondee.spawn[Int](1.second) respondee ! Response(42) response.future.map(_ shouldBe 42) } } }
Example 88
Source File: TransactionTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.blockchain.data import com.datamountaineer.streamreactor.connect.blockchain.json.JacksonJson import com.datamountaineer.streamreactor.connect.blockchain.{GetResourcesFromDirectoryFn, Using} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class TransactionTest extends AnyWordSpec with Matchers with Using { "Transaction" should { "be initialized from json" in { GetResourcesFromDirectoryFn("/transactions").foreach { file => val json = scala.io.Source.fromFile(file).mkString val message = JacksonJson.fromJson[BlockchainMessage](json) message.x.isDefined shouldBe true val sr = message.x.get.toSourceRecord("test", 0, None) } } "be return from a list of json objects" in { scala.io.Source.fromFile(getClass.getResource("/transactions_bundle.txt").toURI.getPath) .mkString .split(';') .foreach { json => val msg = JacksonJson.fromJson[BlockchainMessage](json) msg.x.isDefined shouldBe true msg.x.get.toSourceRecord("test", 0, None) } } } }
Example 89
Source File: BlockchainSourceTaskTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.blockchain.source import java.util import com.datamountaineer.streamreactor.connect.blockchain.config.BlockchainConfigConstants import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class BlockchainSourceTaskTest extends AnyWordSpec with Matchers { "BlockchainSourceTask" should { "start and stop on request" ignore { val task = new BlockchainSourceTask() val map = new util.HashMap[String, String] map.put(BlockchainConfigConstants.KAFKA_TOPIC, "sometopic") task.start(map) //Thread.sleep(50000) //val records = task.poll() task.stop() } } }
Example 90
Source File: SinkRecordToDocumentTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.azure.documentdb.sink import com.datamountaineer.streamreactor.connect.azure.documentdb.Json import com.datamountaineer.streamreactor.connect.azure.documentdb.config.DocumentDbSinkSettings import com.datamountaineer.streamreactor.connect.errors.NoopErrorPolicy import com.datamountaineer.streamreactor.connect.schemas.ConverterUtil import com.microsoft.azure.documentdb.{ConsistencyLevel, Document} import org.apache.kafka.connect.data.Schema import org.apache.kafka.connect.sink.SinkRecord import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class SinkRecordToDocumentTest extends AnyWordSpec with Matchers with ConverterUtil { private val connection = "https://accountName.documents.azure.com:443/" "SinkRecordToDocument" should { "convert Kafka Struct to a Azure Document Db Document" in { for (i <- 1 to 4) { val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString val tx = Json.fromJson[Transaction](json) val record = new SinkRecord("topic1", 0, null, null, Transaction.ConnectSchema, tx.toStruct(), 0) implicit val settings = DocumentDbSinkSettings( connection, "secret", "database", Seq.empty, Map("topic1" -> Set.empty[String]), Map("topic1" -> Map.empty), Map("topic1" -> Set.empty), NoopErrorPolicy(), ConsistencyLevel.Session, false, None) val (document, _) = SinkRecordToDocument(record) val expected = new Document(json) //comparing string representation; we have more specific types given the schema document.toString shouldBe expected.toString } } "convert String Schema + Json payload to a Azure Document DB Document" in { for (i <- 1 to 4) { val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString val record = new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, json, 0) implicit val settings = DocumentDbSinkSettings( connection, "secret", "database", Seq.empty, Map("topic1" -> Set.empty[String]), Map("topic1" -> Map.empty), Map("topic1" -> Set.empty), NoopErrorPolicy(), ConsistencyLevel.Session, false, None) val (document, _) = SinkRecordToDocument(record) val expected = new Document(json) //comparing string representation; we have more specific types given the schema document.toString() shouldBe expected.toString } } "convert Schemaless + Json payload to a Azure Document DB Document" in { for (i <- 1 to 4) { val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction$i.json").toURI.getPath).mkString val record = new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, json, 0) implicit val settings = DocumentDbSinkSettings( connection, "secret", "database", Seq.empty, Map("topic1" -> Set.empty[String]), Map("topic1" -> Map.empty), Map("topic1" -> Set.empty), NoopErrorPolicy(), ConsistencyLevel.Session, false, None) val (document, _) = SinkRecordToDocument(record) val expected = new Document(json) //comparing string representation; we have more specific types given the schema document.toString() shouldBe expected.toString } } } }
Example 91
Source File: KeysExtractorTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.azure.documentdb.sink import java.util import com.datamountaineer.streamreactor.connect.azure.documentdb.Json import com.sksamuel.avro4s.RecordFormat import io.confluent.connect.avro.AvroData import org.apache.kafka.common.config.ConfigException import org.apache.kafka.connect.data.Struct import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class KeysExtractorTest extends AnyWordSpec with Matchers { private val avroData = new AvroData(4) case class WithNested(id: Int, nested: SomeTest) case class SomeTest(name: String, value: Double, flags: Seq[Int], map: Map[String, String]) "KeysExtractor" should { "extract keys from JSON" in { val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction1.json").toURI.getPath).mkString val jvalue = Json.parseJson(json) val actual = KeysExtractor.fromJson(jvalue, Set("lock_time", "rbf")) actual shouldBe List("lock_time" -> 9223372036854775807L, "rbf" -> true) } "throw exception when extracting the keys from JSON" in { val json = scala.io.Source.fromFile(getClass.getResource(s"/transaction1.json").toURI.getPath).mkString val jvalue = Json.parseJson(json) intercept[ConfigException] { val actual = KeysExtractor.fromJson(jvalue, Set("inputs")) } } "extract keys from a Map" in { val actual = KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> "tripple").asJava, Set("key1", "key3")) actual shouldBe Set("key1" -> 12, "key3" -> "tripple") } "extract keys from a Map should throw an exception if the key is another map" in { intercept[ConfigException] { KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> Map.empty[String, String]).asJava, Set("key1", "key3")) } } "extract keys from a Map should throw an exception if the key is an array" in { intercept[ConfigException] { KeysExtractor.fromMap(Map("key1" -> 12, "key2" -> 10L, "key3" -> new util.ArrayList[String]).asJava, Set("key1", "key3")) } } "extract from a struct" in { val format = RecordFormat[SomeTest] val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty)) val struct = avroData.toConnectData(avro.getSchema, avro) KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("name")) shouldBe Set("name" -> "abc") } "extract from a struct should throw an exception if a key is an array" in { val format = RecordFormat[SomeTest] val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty)) intercept[ConfigException] { val struct = avroData.toConnectData(avro.getSchema, avro) KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("flags")) } } "extract from a struct should throw an exception if a key is a map" in { val format = RecordFormat[SomeTest] val avro = format.to(SomeTest("abc", 12.5, Seq.empty, Map.empty)) intercept[ConfigException] { val struct = avroData.toConnectData(avro.getSchema, avro) KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("map")) } } "extract from a struct should throw an exception if a key is a struct" in { val format = RecordFormat[WithNested] val avro = format.to(WithNested(1, SomeTest("abc", 12.5, Seq.empty, Map.empty))) intercept[ConfigException] { val struct = avroData.toConnectData(avro.getSchema, avro) KeysExtractor.fromStruct(struct.value().asInstanceOf[Struct], Set("nested")) } } } }
Example 92
Source File: MongoSinkConnectorTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.mongodb.sink import com.datamountaineer.streamreactor.connect.mongodb.config.MongoConfigConstants import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class MongoSinkConnectorTest extends AnyWordSpec with Matchers with MockitoSugar { "MongoSinkConnector" should { "return one task when multiple routes are provided but maxTasks is 1" in { val map = Map( "topics" -> "topic1, topicA", MongoConfigConstants.DATABASE_CONFIG -> "database1", MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017", MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1; INSERT INTO coll2 SELECT * FROM topicA" ).asJava val connector = new MongoSinkConnector() connector.start(map) connector.taskConfigs(1).size() shouldBe 1 } } }
Example 93
Source File: TestCassandraSinkConnector.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.cassandra.sink import com.datamountaineer.streamreactor.connect.cassandra.TestConfig import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants import org.scalatest.BeforeAndAfter import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class TestCassandraSinkConnector extends AnyWordSpec with BeforeAndAfter with Matchers with TestConfig { "Should start a Cassandra Sink Connector" in { val props = Map( "topics" -> s"$TOPIC1, $TOPIC2", CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.KCQL -> QUERY_ALL ).asJava val connector = new CassandraSinkConnector() connector.start(props) val taskConfigs = connector.taskConfigs(1) taskConfigs.asScala.head.get(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL taskConfigs.asScala.head.get(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT taskConfigs.asScala.head.get(CassandraConfigConstants.KEY_SPACE) shouldBe TOPIC1 taskConfigs.size() shouldBe 1 connector.taskClass() shouldBe classOf[CassandraSinkTask] //connector.version() shouldBe "" connector.stop() } }
Example 94
Source File: TestCassandraConnectionSecure.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.cassandra import com.datamountaineer.streamreactor.connect.cassandra.config.{CassandraConfigConstants, CassandraConfigSink} import org.scalatest.DoNotDiscover import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ @DoNotDiscover class TestCassandraConnectionSecure extends AnyWordSpec with Matchers with TestConfig { "should return a secured session" in { createKeySpace("connection", secure = true, ssl = false) val props = Map( CassandraConfigConstants.CONTACT_POINTS -> "localhost", CassandraConfigConstants.KEY_SPACE -> "connection", CassandraConfigConstants.USERNAME -> "cassandra", CassandraConfigConstants.PASSWD -> "cassandra", CassandraConfigConstants.KCQL -> "INSERT INTO TABLE SELECT * FROM TOPIC" ).asJava val taskConfig = CassandraConfigSink(props) val conn = CassandraConnection(taskConfig) val session = conn.session session should not be null session.getCluster.getConfiguration.getProtocolOptions.getAuthProvider should not be null val cluster = session.getCluster session.close() cluster.close() } }
Example 95
Source File: TestCassandraSourceConnector.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.cassandra.source import com.datamountaineer.streamreactor.connect.cassandra.TestConfig import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants import org.scalatest.DoNotDiscover import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ @DoNotDiscover class TestCassandraSourceConnector extends AnyWordSpec with Matchers with TestConfig { "Should start a Cassandra Source Connector" in { val props = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SOURCE_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.KCQL -> IMPORT_QUERY_ALL, CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES, CassandraConfigConstants.POLL_INTERVAL -> "1000" ).asJava val connector = new CassandraSourceConnector() connector.start(props) val taskConfigs = connector.taskConfigs(1) taskConfigs.asScala.head.get(CassandraConfigConstants.KCQL) shouldBe IMPORT_QUERY_ALL taskConfigs.asScala.head.get(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT taskConfigs.asScala.head.get(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SOURCE_KEYSPACE taskConfigs.asScala.head.get(CassandraConfigConstants.ASSIGNED_TABLES) shouldBe ASSIGNED_TABLES taskConfigs.size() shouldBe 1 connector.taskClass() shouldBe classOf[CassandraSourceTask] connector.stop() } }
Example 96
Source File: TestCassandraSinkConfig.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.cassandra.config import com.datamountaineer.streamreactor.connect.cassandra.TestConfig import org.scalatest.BeforeAndAfter import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Matchers with TestConfig { "A CassandraConfig should return configured for username and password" in { val props = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.KCQL -> QUERY_ALL ).asJava val taskConfig = CassandraConfigSink(props) taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL taskConfig.getString(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG) shouldBe CassandraConfigConstants.CONSISTENCY_LEVEL_DEFAULT } "A CassandraConfig should return configured for SSL" in { val props = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.SSL_ENABLED -> "true", CassandraConfigConstants.TRUST_STORE_PATH -> TRUST_STORE_PATH, CassandraConfigConstants.TRUST_STORE_PASSWD -> TRUST_STORE_PASSWORD, CassandraConfigConstants.KCQL -> QUERY_ALL ).asJava val taskConfig = CassandraConfigSink(props) taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD taskConfig.getBoolean(CassandraConfigConstants.SSL_ENABLED) shouldBe true taskConfig.getString(CassandraConfigConstants.TRUST_STORE_PATH) shouldBe TRUST_STORE_PATH taskConfig.getPassword(CassandraConfigConstants.TRUST_STORE_PASSWD).value shouldBe TRUST_STORE_PASSWORD //taskConfig.getString(CassandraConfigConstants.EXPORT_MAPPINGS) shouldBe EXPORT_TOPIC_TABLE_MAP taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL } "A CassandraConfig should return configured for SSL without client certficate authentication" in { val props = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.SSL_ENABLED -> "true", CassandraConfigConstants.TRUST_STORE_PATH -> TRUST_STORE_PATH, CassandraConfigConstants.TRUST_STORE_PASSWD -> TRUST_STORE_PASSWORD, CassandraConfigConstants.USE_CLIENT_AUTH -> "false", CassandraConfigConstants.KEY_STORE_PATH -> KEYSTORE_PATH, CassandraConfigConstants.KEY_STORE_PASSWD -> KEYSTORE_PASSWORD, CassandraConfigConstants.KCQL -> QUERY_ALL ).asJava val taskConfig = CassandraConfigSink(props) taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT taskConfig.getString(CassandraConfigConstants.KEY_SPACE) shouldBe CASSANDRA_SINK_KEYSPACE taskConfig.getString(CassandraConfigConstants.USERNAME) shouldBe USERNAME taskConfig.getPassword(CassandraConfigConstants.PASSWD).value shouldBe PASSWD taskConfig.getBoolean(CassandraConfigConstants.SSL_ENABLED) shouldBe true taskConfig.getString(CassandraConfigConstants.KEY_STORE_PATH) shouldBe KEYSTORE_PATH taskConfig.getPassword(CassandraConfigConstants.KEY_STORE_PASSWD).value shouldBe KEYSTORE_PASSWORD taskConfig.getBoolean(CassandraConfigConstants.USE_CLIENT_AUTH) shouldBe false taskConfig.getString(CassandraConfigConstants.KEY_STORE_PATH) shouldBe KEYSTORE_PATH taskConfig.getPassword(CassandraConfigConstants.KEY_STORE_PASSWD).value shouldBe KEYSTORE_PASSWORD taskConfig.getString(CassandraConfigConstants.KCQL) shouldBe QUERY_ALL } }
Example 97
Source File: TestCassandraSourceSettings.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.cassandra.config import com.datamountaineer.streamreactor.connect.cassandra.TestConfig import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class TestCassandraSourceSettings extends AnyWordSpec with Matchers with TestConfig { "CassandraSettings should return setting for a source" in { val props = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SOURCE_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.KCQL -> IMPORT_QUERY_ALL, CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES, CassandraConfigConstants.POLL_INTERVAL -> "1000" ).asJava val taskConfig = CassandraConfigSource(props) val settings = CassandraSettings.configureSource(taskConfig).toList settings.size shouldBe 2 settings.head.kcql.getSource shouldBe TABLE1 settings.head.kcql.getTarget shouldBe TABLE1 //no table mapping provided so should be the table settings.head.timestampColType shouldBe TimestampType.NONE settings(1).kcql.getSource shouldBe TABLE2 settings(1).kcql.getTarget shouldBe TOPIC2 settings(1).timestampColType shouldBe TimestampType.NONE } "CassandraSettings should return setting for a source with one table" in { val map = Map( CassandraConfigConstants.CONTACT_POINTS -> CONTACT_POINT, CassandraConfigConstants.KEY_SPACE -> CASSANDRA_SINK_KEYSPACE, CassandraConfigConstants.USERNAME -> USERNAME, CassandraConfigConstants.PASSWD -> PASSWD, CassandraConfigConstants.KCQL -> "INSERT INTO cassandra-source SELECT * FROM orders PK created", CassandraConfigConstants.POLL_INTERVAL -> "1000" ) val taskConfig = CassandraConfigSource(map.asJava) val settings = CassandraSettings.configureSource(taskConfig).toList settings.size shouldBe 1 } }
Example 98
Source File: MapMessageConverterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.sink.converters import java.util.UUID import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings} import com.datamountaineer.streamreactor.connect.jms.sink.converters.MapMessageConverter import com.datamountaineer.streamreactor.connect.{TestBase, Using} import javax.jms.MapMessage import org.apache.activemq.ActiveMQConnectionFactory import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ import scala.reflect.io.Path class MapMessageConverterTest extends AnyWordSpec with Matchers with Using with BeforeAndAfterAll with TestBase { val converter = new MapMessageConverter() val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}" val topicName = UUID.randomUUID().toString val queueName = UUID.randomUUID().toString val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC") val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE") val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL) val config = JMSConfig(props.asJava) val settings = JMSSettings(config, true) val setting = settings.settings.head override def afterAll(): Unit = { Path(AVRO_FILE).delete() } "MapMessageConverter" should { "create a JMS MapMessage" in { val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false") using(connectionFactory.createConnection()) { connection => using(connection.createSession(false, 1)) { session => val record = getSinkRecords(kafkaTopic1).head val msg = converter.convert(record, session, setting)._2.asInstanceOf[MapMessage] Option(msg).isDefined shouldBe true msg.getBoolean("boolean") shouldBe true msg.getByte("int8") shouldBe 12.toByte msg.getShort("int16") shouldBe 12.toShort msg.getInt("int32") shouldBe 12 msg.getLong("int64") shouldBe 12L msg.getFloat("float32") shouldBe 12.2f msg.getDouble("float64") shouldBe 12.2 msg.getString("string") shouldBe "foo" msg.getBytes("bytes") shouldBe "foo".getBytes() val arr = msg.getObject("array") arr.asInstanceOf[java.util.List[String]].asScala.toArray shouldBe Array("a", "b", "c") val map1 = msg.getObject("map").asInstanceOf[java.util.Map[String, Int]].asScala.toMap map1 shouldBe Map("field" -> 1) val map2 = msg.getObject("mapNonStringKeys").asInstanceOf[java.util.Map[Int, Int]].asScala.toMap map2 shouldBe Map(1 -> 1) } } } } }
Example 99
Source File: ObjectMessageConverterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.sink.converters import java.util.UUID import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings} import com.datamountaineer.streamreactor.connect.jms.sink.converters.ObjectMessageConverter import com.datamountaineer.streamreactor.connect.{TestBase, Using} import javax.jms.ObjectMessage import org.apache.activemq.ActiveMQConnectionFactory import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ import scala.reflect.io.Path class ObjectMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll { val converter = new ObjectMessageConverter() val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}" val topicName = UUID.randomUUID().toString val queueName = UUID.randomUUID().toString val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC") val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE") val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL) val config = JMSConfig(props.asJava) val settings = JMSSettings(config, true) val setting = settings.settings.head override def afterAll(): Unit = { Path(AVRO_FILE).delete() } "ObjectMessageConverter" should { "create an instance of jms ObjectMessage" in { val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false") using(connectionFactory.createConnection()) { connection => using(connection.createSession(false, 1)) { session => val record = getSinkRecords(kafkaTopic1).head val msg = converter.convert(record, session, setting)._2.asInstanceOf[ObjectMessage] Option(msg).isDefined shouldBe true msg.getBooleanProperty("boolean") shouldBe true msg.getByteProperty("int8") shouldBe 12.toByte msg.getShortProperty("int16") shouldBe 12.toShort msg.getIntProperty("int32") shouldBe 12 msg.getLongProperty("int64") shouldBe 12L msg.getFloatProperty("float32") shouldBe 12.2f msg.getDoubleProperty("float64") shouldBe 12.2 msg.getStringProperty("string") shouldBe "foo" msg.getObjectProperty("bytes").asInstanceOf[java.util.List[Byte]].toArray shouldBe "foo".getBytes() val arr = msg.getObjectProperty("array") arr.asInstanceOf[java.util.List[String]].asScala.toArray shouldBe Array("a", "b", "c") val map1 = msg.getObjectProperty("map").asInstanceOf[java.util.Map[String, Int]].asScala.toMap map1 shouldBe Map("field" -> 1) val map2 = msg.getObjectProperty("mapNonStringKeys").asInstanceOf[java.util.Map[Int, Int]].asScala.toMap map2 shouldBe Map(1 -> 1) } } } } }
Example 100
Source File: AvroMessageConverterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.sink.converters import java.nio.ByteBuffer import java.util.UUID import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings} import com.datamountaineer.streamreactor.connect.jms.sink.converters.AvroMessageConverter import com.datamountaineer.streamreactor.connect.sink.AvroDeserializer import com.datamountaineer.streamreactor.connect.{TestBase, Using} import io.confluent.connect.avro.AvroData import javax.jms.BytesMessage import org.apache.activemq.ActiveMQConnectionFactory import org.apache.avro.generic.GenericData import org.apache.avro.util.Utf8 import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ import scala.reflect.io.Path class AvroMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll { val converter = new AvroMessageConverter() private lazy val avroData = new AvroData(128) val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}" val topicName = UUID.randomUUID().toString val queueName = UUID.randomUUID().toString val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC") val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE") val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL) val config = JMSConfig(props.asJava) val settings = JMSSettings(config, true) val setting = settings.settings.head override def afterAll(): Unit = { Path(AVRO_FILE).delete() } "AvroMessageConverter" should { "create a BytesMessage with avro payload" in { val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false") using(connectionFactory.createConnection()) { connection => using(connection.createSession(false, 1)) { session => val record = getSinkRecords(kafkaTopic1).head val msg = converter.convert(record, session, setting)._2.asInstanceOf[BytesMessage] Option(msg).isDefined shouldBe true msg.reset() val size = msg.getBodyLength size > 0 shouldBe true val data = new Array[Byte](size.toInt) msg.readBytes(data) val avroRecord = AvroDeserializer(data, avroData.fromConnectSchema(record.valueSchema())) avroRecord.get("int8") shouldBe 12.toByte avroRecord.get("int16") shouldBe 12.toShort avroRecord.get("int32") shouldBe 12 avroRecord.get("int64") shouldBe 12L avroRecord.get("float32") shouldBe 12.2f avroRecord.get("float64") shouldBe 12.2 avroRecord.get("boolean") shouldBe true avroRecord.get("string").toString shouldBe "foo" avroRecord.get("bytes").asInstanceOf[ByteBuffer].array() shouldBe "foo".getBytes() val array = avroRecord.get("array").asInstanceOf[GenericData.Array[Utf8]] val iter = array.iterator() new Iterator[String] { override def hasNext: Boolean = iter.hasNext override def next(): String = iter.next().toString }.toSeq shouldBe Seq("a", "b", "c") val map = avroRecord.get("map").asInstanceOf[java.util.Map[Utf8, Int]].asScala map.size shouldBe 1 map.keys.head.toString shouldBe "field" map.get(map.keys.head) shouldBe Some(1) val iterRecord = avroRecord.get("mapNonStringKeys").asInstanceOf[GenericData.Array[GenericData.Record]].iterator() iterRecord.hasNext shouldBe true val r = iterRecord.next() r.get("key") shouldBe 1 r.get("value") shouldBe 1 } } } } }
Example 101
Source File: JsonMessageConverterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.sink.converters import java.util.UUID import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSSettings} import com.datamountaineer.streamreactor.connect.jms.sink.converters.JsonMessageConverter import com.datamountaineer.streamreactor.connect.{TestBase, Using} import javax.jms.TextMessage import org.apache.activemq.ActiveMQConnectionFactory import org.apache.kafka.connect.sink.SinkRecord import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ import scala.reflect.io.Path class JsonMessageConverterTest extends AnyWordSpec with Matchers with Using with TestBase with BeforeAndAfterAll { val converter = new JsonMessageConverter() val kafkaTopic1 = s"kafka-${UUID.randomUUID().toString}" val queueName = UUID.randomUUID().toString val kcql = getKCQL(queueName, kafkaTopic1, "QUEUE") val props = getProps(kcql, JMS_URL) val config = JMSConfig(props.asJava) val settings = JMSSettings(config, true) val setting = settings.settings.head override def afterAll(): Unit = { Path(AVRO_FILE).delete() } "JsonMessageConverter" should { "create a TextMessage with Json payload" in { val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false") using(connectionFactory.createConnection()) { connection => using(connection.createSession(false, 1)) { session => val schema = getSchema val struct = getStruct(schema) val record = new SinkRecord(kafkaTopic1, 0, null, null, schema, struct, 1) val msg = converter.convert(record, session, setting)._2.asInstanceOf[TextMessage] Option(msg).isDefined shouldBe true val json = msg.getText json shouldBe """{"int8":12,"int16":12,"int32":12,"int64":12,"float32":12.2,"float64":12.2,"boolean":true,"string":"foo","bytes":"Zm9v","array":["a","b","c"],"map":{"field":1},"mapNonStringKeys":[[1,1]]}""".stripMargin } } } } }
Example 102
Source File: AvroRecordFieldExtractorMapFnTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase.avro import java.nio.file.Paths import org.apache.avro.Schema import org.apache.hadoop.hbase.util.Bytes import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AvroRecordFieldExtractorMapFnTest extends AnyWordSpec with Matchers { val schema: Schema = new Schema.Parser().parse(Paths.get(getClass.getResource("/person.avsc").toURI).toFile) "AvroRecordFieldExtractorMapFn" should { "raise an exception if the given field does not exist in the schema" in { intercept[IllegalArgumentException] { AvroRecordFieldExtractorMapFn(schema, Seq("wrongField")) } } "raise an exception if the given field is not a primitive" in { intercept[IllegalArgumentException] { AvroRecordFieldExtractorMapFn(schema, Seq("address")) } } "create the mappings for all the given fields" in { val mappings = AvroRecordFieldExtractorMapFn(schema, Seq("firstName", "age")) val fnFirstName = mappings("firstName") val firstName = "Beaky" fnFirstName(firstName) shouldBe Bytes.toBytes(firstName) val fnAge = mappings("age") val age = 31 fnAge(age) shouldBe Bytes.toBytes(age) intercept[ClassCastException] { fnAge(12.4) } } } }
Example 103
Source File: AvroSchemaFieldsExistFnTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase.avro import com.datamountaineer.streamreactor.connect.hbase.PersonAvroSchema import org.apache.avro.Schema import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AvroSchemaFieldsExistFnTest extends AnyWordSpec with Matchers { val schema: Schema = new Schema.Parser().parse(PersonAvroSchema.schema) "AvroSchemaFieldsExistFn" should { "raise an exception if the field is not present" in { intercept[IllegalArgumentException] { AvroSchemaFieldsExistFn(schema, Seq("notpresent")) } intercept[IllegalArgumentException] { AvroSchemaFieldsExistFn(schema, Seq(" lastName")) } } "not raise an exception if the fields are present" in { AvroSchemaFieldsExistFn(schema, Seq("lastName", "age", "address")) } } }
Example 104
Source File: GenericRowKeyBuilderTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._ import org.apache.hadoop.hbase.util.Bytes import org.apache.kafka.connect.data.Schema import org.apache.kafka.connect.sink.SinkRecord import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class GenericRowKeyBuilderTest extends AnyWordSpec with Matchers { "GenericRowKeyBuilder" should { "use the topic, partition and offset to make the key" in { val topic = "sometopic" val partition = 2 val offset = 1243L val sinkRecord = new SinkRecord(topic, partition, Schema.INT32_SCHEMA, 345, Schema.STRING_SCHEMA, "", offset) val keyBuilder = new GenericRowKeyBuilderBytes() val expected = Bytes.add(Array(topic.fromString(), keyBuilder.delimiterBytes, partition.fromString(), keyBuilder.delimiterBytes, offset.fromString())) keyBuilder.build(sinkRecord, Nil) shouldBe expected } } }
Example 105
Source File: StructFieldsRowKeyBuilderTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._ import org.apache.hadoop.hbase.util.Bytes import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct} import org.apache.kafka.connect.sink.SinkRecord import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class StructFieldsRowKeyBuilderTest extends AnyWordSpec with Matchers { "StructFieldsRowKeyBuilder" should { "raise an exception if the field is not present in the struct" in { intercept[IllegalArgumentException] { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) //val field = Field("threshold", "threshold", false) StructFieldsRowKeyBuilderBytes(List("threshold")).build(sinkRecord, null) } } "create the row key based on one single field in the struct" in { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) //val field = Field("firstName", "firstName", true) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StructFieldsRowKeyBuilderBytes(List("firstName")).build(sinkRecord, null) shouldBe "Alex".fromString } "create the row key based on more thant one field in the struct" in { val schema = SchemaBuilder.struct().name("com.example.Person") .field("firstName", Schema.STRING_SCHEMA) .field("age", Schema.INT32_SCHEMA) .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build() val struct = new Struct(schema).put("firstName", "Alex").put("age", 30) //val field = Field("firstName", "firstName", true) //val field2 = Field("age", "age", true) val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1) StructFieldsRowKeyBuilderBytes(List("firstName", "age")).build(sinkRecord, null) shouldBe Bytes.add("Alex".fromString(), "\n".fromString(), 30.fromInt()) } } }
Example 106
Source File: AvroRecordRowKeyBuilderTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._ import com.datamountaineer.streamreactor.connect.hbase.avro.AvroRecordFieldExtractorMapFn import org.apache.avro.Schema import org.apache.avro.generic.GenericRecord import org.apache.hadoop.hbase.util.Bytes import org.apache.kafka.connect.sink.SinkRecord import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AvroRecordRowKeyBuilderTest extends AnyWordSpec with Matchers with MockitoSugar { val schema: Schema = new Schema.Parser().parse(PersonAvroSchema.schema) "AvroRecordRowKeyBuilder" should { "extract the values from the avro record and create the key" in { val keys = Seq("firstName", "lastName", "age") val rowKeyBuilder = new AvroRecordRowKeyBuilderBytes(AvroRecordFieldExtractorMapFn(schema, keys), keys) val sinkRecord = mock[SinkRecord] val firstName = "Jack" val lastName = "Smith" val age = 29 val record = new GenericRecord { val values: Map[String, AnyRef] = Map("firstName" -> firstName, "lastName" -> lastName, "age" -> Int.box(age)) override def get(key: String): AnyRef = values(key) override def put(key: String, v: scala.Any): Unit = sys.error("not supported") override def get(i: Int): AnyRef = sys.error("not supported") override def put(i: Int, v: scala.Any): Unit = sys.error("not supported") override def getSchema: Schema = sys.error("not supported") } val expectedValue = Bytes.add( Array( firstName.fromString(), rowKeyBuilder.delimBytes, lastName.fromString(), rowKeyBuilder.delimBytes, age.fromInt())) rowKeyBuilder.build(sinkRecord, record) shouldBe expectedValue } } }
Example 107
Source File: SinkRecordKeyRowKeyBuilderTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.hbase import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._ import org.apache.kafka.connect.data.Schema import org.apache.kafka.connect.sink.SinkRecord import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class SinkRecordKeyRowKeyBuilderTest extends AnyWordSpec with Matchers with MockitoSugar { val keyRowKeyBuilder = new SinkRecordKeyRowKeyBuilderBytes() "SinkRecordKeyRowKeyBuilder" should { "create the right key from the Schema key value - Byte" in { val b = 123.toByte val sinkRecord = new SinkRecord("", 1, Schema.INT8_SCHEMA, b, Schema.FLOAT64_SCHEMA, Nil, 0) keyRowKeyBuilder.build(sinkRecord, "Should not matter") shouldBe Array(b) } "create the right key from the Schema key value - String" in { val s = "somekey" val sinkRecord = new SinkRecord("", 1, Schema.STRING_SCHEMA, s, Schema.FLOAT64_SCHEMA, Nil, 0) keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe s.fromString() } "create the right key from the Schema key value - Bytes" in { val bArray = Array(23.toByte, 24.toByte, 242.toByte) val sinkRecord = new SinkRecord("", 1, Schema.BYTES_SCHEMA, bArray, Schema.FLOAT64_SCHEMA, Nil, 0) keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe bArray } "create the right key from the Schema key value - Boolean" in { val bool = true val sinkRecord = new SinkRecord("", 1, Schema.BOOLEAN_SCHEMA, bool, Schema.FLOAT64_SCHEMA, Nil, 0) keyRowKeyBuilder.build(sinkRecord, Nil) shouldBe bool.fromBoolean() } } }
Example 108
Source File: AsyncFunctionLoopTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class AsyncFunctionLoopTest extends AnyFunSuite with Matchers { test("it loops 5 times in 10 seconds with 2s delay") { val countDownLatch = new CountDownLatch(5) val looper = new AsyncFunctionLoop(2.seconds, "test")({ countDownLatch.countDown() }) looper.start() countDownLatch.await(11000, TimeUnit.MILLISECONDS) shouldBe true looper.close() } }
Example 109
Source File: FileUtilsTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.utils import java.io.File import org.apache.kafka.common.config.ConfigException import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class FileUtilsTest extends AnyFunSuite with Matchers { test("raises an exception if the file does not exists") { intercept[ConfigException] { FileUtils.throwIfNotExists("does_not_exist.file", "k1") } } test("throws an exception when the path is a directory") { val file = new File("dir") file.mkdir() shouldBe true try { intercept[ConfigException] { FileUtils.throwIfNotExists(file.getAbsolutePath, "k1") } } finally { file.delete() } } test("returns when the file exists") { val file = new File("file1.abc") file.createNewFile() shouldBe true try { FileUtils.throwIfNotExists(file.getAbsolutePath, "k1") } catch { case throwable: Throwable => fail("Should not raise an exception") } finally { file.delete() } } }
Example 110
Source File: KeytabSettingsTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.kerberos import com.landoop.streamreactor.connect.hive.sink.config.{HiveSinkConfigDefBuilder, SinkConfigSettings} import org.apache.kafka.common.config.ConfigException import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.JavaConverters._ class KeytabSettingsTest extends AnyFunSuite with Matchers with FileCreation { test("validate a keytab setting") { val file = createFile("keytab1.keytab") try { val principal = "[email protected]" val config = HiveSinkConfigDefBuilder( Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytable select a,b,c from mytopic", SinkConfigSettings.KerberosKey -> "true", SinkConfigSettings.PrincipalKey -> principal, SinkConfigSettings.KerberosKeyTabKey -> file.getAbsolutePath ).asJava ) val actualSettings = KeytabSettings.from(config, SinkConfigSettings) actualSettings shouldBe KeytabSettings(principal, file.getAbsolutePath, None) } finally { file.delete() } } test("throws an exception when principal is not set") { val file = createFile("keytab2.keytab") try { val principal = "[email protected]" val config = HiveSinkConfigDefBuilder( Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytable select a,b,c from mytopic", SinkConfigSettings.KerberosKey -> "true", SinkConfigSettings.KerberosKeyTabKey -> file.getAbsolutePath ).asJava ) intercept[ConfigException] { KeytabSettings.from(config, SinkConfigSettings) } } finally { file.delete() } } test("throws an exception when the keytab is not present") { val principal = "[email protected]" val config = HiveSinkConfigDefBuilder( Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytable select a,b,c from mytopic", SinkConfigSettings.KerberosKey -> "true", SinkConfigSettings.PrincipalKey -> principal, SinkConfigSettings.KerberosKeyTabKey -> "does_not_exists.keytab" ).asJava ) intercept[ConfigException] { KeytabSettings.from(config, SinkConfigSettings) } } }
Example 111
Source File: MapValueConverterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.sink import com.landoop.json.sql.JacksonJson import org.apache.kafka.connect.data.{Schema, Struct} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.JavaConverters._ class MapValueConverterTest extends AnyFunSuite with Matchers { test("converts nested payload") { val json = """ |{ | "idType": 3, | "colorDepth": "", | "threshold" : 45.77, | "evars": { | "evars": { | "eVar1": "Tue Aug 27 2019 12:08:10", | "eVar2": 156692207943934897 | } | }, | "exclude": { | "id": 0, | "value": false | } |} |""".stripMargin val map = JacksonJson.toMap[Any](json) val struct = MapValueConverter.convert(map) //Jackson transforming the json to Map the fields order is not retained struct.schema().fields().asScala.map(_.name()).sorted shouldBe List("idType", "colorDepth", "threshold", "evars", "exclude").sorted struct.schema().field("idType").schema() shouldBe Schema.OPTIONAL_INT64_SCHEMA struct.schema().field("colorDepth").schema() shouldBe Schema.OPTIONAL_STRING_SCHEMA struct.schema().field("threshold").schema() shouldBe Schema.OPTIONAL_FLOAT64_SCHEMA struct.schema().field("exclude").schema().`type`() shouldBe Schema.Type.STRUCT struct.schema().field("exclude").schema().isOptional shouldBe true struct.schema().field("evars").schema().`type`() shouldBe Schema.Type.STRUCT struct.schema().field("evars").schema().isOptional shouldBe true struct.schema().field("evars").schema().fields().asScala.map(_.name()) shouldBe List("evars") val evarsInner = struct.schema().field("evars").schema().field("evars") evarsInner.schema().`type`() shouldBe Schema.Type.STRUCT evarsInner.schema().isOptional shouldBe true evarsInner.schema().fields().asScala.map(_.name()).sorted shouldBe List("eVar1", "eVar2").sorted evarsInner.schema().field("eVar1").schema() shouldBe Schema.OPTIONAL_STRING_SCHEMA evarsInner.schema().field("eVar2").schema() shouldBe Schema.OPTIONAL_INT64_SCHEMA val exclude = struct.schema().field("exclude").schema() exclude.schema().`type`() shouldBe Schema.Type.STRUCT exclude.schema().isOptional shouldBe true exclude.schema().fields().asScala.map(_.name()).sorted shouldBe List("id", "value").sorted exclude.schema().field("id").schema() shouldBe Schema.OPTIONAL_INT64_SCHEMA exclude.schema().field("value").schema() shouldBe Schema.OPTIONAL_BOOLEAN_SCHEMA struct.get("idType") shouldBe 3L struct.get("colorDepth") shouldBe "" struct.get("threshold") shouldBe 45.77D val evarsStruct = struct.get("evars").asInstanceOf[Struct].get("evars").asInstanceOf[Struct] evarsStruct.get("eVar1") shouldBe "Tue Aug 27 2019 12:08:10" evarsStruct.get("eVar2") shouldBe 156692207943934897L val excludeStruct = struct.get("exclude").asInstanceOf[Struct] excludeStruct.get("id") shouldBe 0L excludeStruct.get("value") shouldBe false } }
Example 112
Source File: StageManagerTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.sink.staging import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartition, TopicPartitionOffset} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class StageManagerTest extends AnyWordSpec with Matchers { implicit val conf = new Configuration() implicit val fs = FileSystem.getLocal(conf) val dir = new Path("stageman") fs.mkdirs(dir) val manager = new StageManager(DefaultFilenamePolicy) "StageManager" should { "stage file as hidden" in { val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1)) stagePath.getName.startsWith(".") shouldBe true } "delete existing file" in { val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1)) fs.create(stagePath) manager.stage(dir, TopicPartition(Topic("mytopic"), 1)) fs.exists(stagePath) shouldBe false } "commit file using offset" in { val stagePath = manager.stage(dir, TopicPartition(Topic("mytopic"), 1)) fs.create(stagePath) val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100)) val finalPath = manager.commit(stagePath, tpo) finalPath.getName shouldBe "streamreactor_mytopic_1_100" } } }
Example 113
Source File: DefaultCommitPolicyTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.sink.staging import com.landoop.streamreactor.connect.hive.{Offset, Topic, TopicPartitionOffset} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, LocalFileSystem, Path} import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.concurrent.duration._ class DefaultCommitPolicyTest extends AnyWordSpec with Matchers { val schema: Schema = SchemaBuilder.struct() .field("name", SchemaBuilder.string().required().build()) .build() val struct = new Struct(schema) implicit val conf: Configuration = new Configuration() implicit val fs: LocalFileSystem = FileSystem.getLocal(conf) val tpo = TopicPartitionOffset(Topic("mytopic"), 1, Offset(100)) private def shouldFlush(policy: CommitPolicy, path: Path, count: Long) = { val status = fs.getFileStatus(path) policy.shouldFlush(CommitContext(tpo, path, count, status.getLen, status.getModificationTime)) } "DefaultCommitPolicy" should { "roll over after interval" in { val policy = DefaultCommitPolicy(None, Option(2.seconds), None) val path = new Path("foo") fs.create(path) shouldFlush(policy, path, 10) shouldBe false Thread.sleep(2000) shouldFlush(policy, path, 10) shouldBe true fs.delete(path, false) } "roll over after file count" in { val policy = DefaultCommitPolicy(None, None, Some(9)) val path = new Path("foo") fs.create(path) shouldFlush(policy, path, 7) shouldBe false shouldFlush(policy, path, 8) shouldBe false shouldFlush(policy, path, 9) shouldBe true shouldFlush(policy, path, 10) shouldBe true fs.delete(path, false) } "roll over after file size" in { val policy = DefaultCommitPolicy(Some(10), None, None) val path = new Path("foo") val out = fs.create(path) shouldFlush(policy, path, 7) shouldBe false out.writeBytes("wibble wobble wabble wubble") out.close() shouldFlush(policy, path, 9) shouldBe true fs.delete(path, false) } } }
Example 114
Source File: DropPartitionValuesMapperTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.sink.mapper import cats.data.NonEmptyList import com.landoop.streamreactor.connect.hive.{PartitionKey, PartitionPlan, TableName} import org.apache.kafka.connect.data.{SchemaBuilder, Struct} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.JavaConverters._ class DropPartitionValuesMapperTest extends AnyFunSuite with Matchers { test("strip partition values") { val schema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("p", SchemaBuilder.string().required().build()) .field("q", SchemaBuilder.string().required().build()) .field("z", SchemaBuilder.string().required().build()) .build() val plan = PartitionPlan(TableName("foo"), NonEmptyList.of(PartitionKey("p"), PartitionKey("q"))) val struct = new Struct(schema).put("a", "a").put("p", "p").put("q", "q").put("z", "z") val output = new DropPartitionValuesMapper(plan).map(struct) output.schema().fields().asScala.map(_.name) shouldBe Seq("a", "z") } test("handle partition field is missing in input") { val schema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("q", SchemaBuilder.string().required().build()) .field("z", SchemaBuilder.string().required().build()) .build() val plan = PartitionPlan(TableName("foo"), NonEmptyList.of(PartitionKey("p"), PartitionKey("q"))) val struct = new Struct(schema).put("a", "a").put("q", "q").put("z", "z") val output = new DropPartitionValuesMapper(plan).map(struct) output.schema().fields().asScala.map(_.name) shouldBe Seq("a", "z") } }
Example 115
Source File: MetastoreSchemaAlignMapperTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.sink.mapper import org.apache.kafka.connect.data.{SchemaBuilder, Struct} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.JavaConverters._ class MetastoreSchemaAlignMapperTest extends AnyFunSuite with Matchers { test("pad optional missing fields with null") { val recordSchema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("b", SchemaBuilder.string().required().build()) .field("c", SchemaBuilder.string().required().build()) .build() val struct = new Struct(recordSchema).put("a", "a").put("b", "b").put("c", "c") val metastoreSchema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("b", SchemaBuilder.string().required().build()) .field("c", SchemaBuilder.string().required().build()) .field("z", SchemaBuilder.string().optional().build()) .build() val output = new MetastoreSchemaAlignMapper(metastoreSchema).map(struct) output.schema().fields().asScala.map(_.name) shouldBe Seq("a", "b", "c", "z") } test("drop fields not specified in metastore") { val recordSchema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("b", SchemaBuilder.string().required().build()) .field("c", SchemaBuilder.string().required().build()) .build() val struct = new Struct(recordSchema).put("a", "a").put("b", "b").put("c", "c") val metastoreSchema = SchemaBuilder.struct() .field("a", SchemaBuilder.string().required().build()) .field("b", SchemaBuilder.string().required().build()) .build() val output = new MetastoreSchemaAlignMapper(metastoreSchema).map(struct) output.schema().fields().asScala.map(_.name) shouldBe Seq("a", "b") } }
Example 116
Source File: HiveSourceConfigTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.source import com.landoop.streamreactor.connect.hive.source.config.{HiveSourceConfig, ProjectionField} import com.landoop.streamreactor.connect.hive.{TableName, Topic} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class HiveSourceConfigTest extends AnyWordSpec with Matchers { "HiveSource" should { "populate required table properties from KCQL" in { val config = HiveSourceConfig.fromProps(Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytopic select a,b from mytable" )) val tableConfig = config.tableOptions.head tableConfig.topic shouldBe Topic("mytopic") tableConfig.tableName shouldBe TableName("mytable") tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "a"), ProjectionField("b", "b")) } "populate aliases from KCQL" in { val config = HiveSourceConfig.fromProps(Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytopic select a as x,b from mytable" )) val tableConfig = config.tableOptions.head tableConfig.projection.get.toList shouldBe Seq(ProjectionField("a", "x"), ProjectionField("b", "b")) } "set projection to None for *" in { val config = HiveSourceConfig.fromProps(Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytopic select * from mytable" )) val tableConfig = config.tableOptions.head tableConfig.projection shouldBe None } "set table limit" in { val config = HiveSourceConfig.fromProps(Map( "connect.hive.database.name" -> "mydatabase", "connect.hive.metastore" -> "thrift", "connect.hive.metastore.uris" -> "thrift://localhost:9083", "connect.hive.fs.defaultFS" -> "hdfs://localhost:8020", "connect.hive.kcql" -> "insert into mytopic select a from mytable limit 200" )) val tableConfig = config.tableOptions.head tableConfig.limit shouldBe 200 } } }
Example 117
Source File: ParquetWriterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.connect.hive.parquet import com.landoop.streamreactor.connect.hive.StructUtils import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.kafka.connect.data.{SchemaBuilder, Struct} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ParquetWriterTest extends AnyWordSpec with Matchers { implicit val conf = new Configuration() implicit val fs = FileSystem.getLocal(conf) "ParquetWriter" should { "write parquet files" in { val schema = SchemaBuilder.struct() .field("name", SchemaBuilder.string().required().build()) .field("title", SchemaBuilder.string().optional().build()) .field("salary", SchemaBuilder.float64().optional().build()) .build() val users = List( new Struct(schema).put("name", "sam").put("title", "mr").put("salary", 100.43), new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06) ) val path = new Path("sinktest.parquet") val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true)) users.foreach(writer.write) writer.close() val reader = parquetReader(path) val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList reader.close() actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues) fs.delete(path, false) } "support writing nulls" in { val schema = SchemaBuilder.struct() .field("name", SchemaBuilder.string().required().build()) .field("title", SchemaBuilder.string().optional().build()) .field("salary", SchemaBuilder.float64().optional().build()) .build() val users = List( new Struct(schema).put("name", "sam").put("title", null).put("salary", 100.43), new Struct(schema).put("name", "laura").put("title", "ms").put("salary", 429.06) ) val path = new Path("sinktest.parquet") val writer = parquetWriter(path, schema, ParquetSinkConfig(overwrite = true)) users.foreach(writer.write) writer.close() val reader = parquetReader(path) val actual = Iterator.continually(reader.read).takeWhile(_ != null).toList reader.close() actual.map(StructUtils.extractValues) shouldBe users.map(StructUtils.extractValues) fs.delete(path, false) } } }
Example 118
Source File: HiveSchemaTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.hive.it import java.util.concurrent.TimeUnit import com.landoop.streamreactor.connect.hive.{DatabaseName, TableName} import org.apache.kafka.connect.data.Schema import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ import scala.io.Source import scala.util.Random class HiveSchemaTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests { private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis)) case class Foo(s: String, l: Long, b: Boolean, d: Double) def foo = Foo("string", Random.nextLong, Random.nextBoolean, Random.nextDouble) "Hive" should { "create correct schema for table" in { val topic = createTopic() val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n") .replace("{{TOPIC}}", topic) .replace("{{TABLE}}", topic) .replace("{{NAME}}", topic) postTask(taskDef) val producer = stringStringProducer() writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(foo), 2000) producer.close(30, TimeUnit.SECONDS) // wait for some data to have been flushed eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select count(*) FROM $topic") rs.next() rs.getLong(1) should be > 0L } } // check that the schema is correct val schema = com.landoop.streamreactor.connect.hive.schema(DatabaseName("default"), TableName(topic)) schema.fields().asScala.map(_.name).toSet shouldBe Set("s", "b", "l", "d") schema.field("s").schema().`type`() shouldBe Schema.Type.STRING schema.field("l").schema().`type`() shouldBe Schema.Type.INT64 schema.field("d").schema().`type`() shouldBe Schema.Type.FLOAT64 schema.field("b").schema().`type`() shouldBe Schema.Type.BOOLEAN stopTask(topic) } } }
Example 119
Source File: HiveParquetWithPartitionTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.hive.it import java.util.concurrent.TimeUnit import org.apache.hadoop.fs.Path import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} import org.scalatest.wordspec.AnyWordSpec import scala.io.Source class HiveParquetWithPartitionTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests { private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis)) "Hive" should { "write partitioned records" in { val count = 100000L val topic = createTopic() val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_with_partitions.json")).getLines().mkString("\n") .replace("{{TOPIC}}", topic) .replace("{{TABLE}}", topic) .replace("{{NAME}}", topic) postTask(taskDef) val producer = stringStringProducer() writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count) producer.close(30, TimeUnit.SECONDS) // wait for some data to have been flushed eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select count(*) FROM $topic") if (rs.next()) { val count = rs.getLong(1) println(s"Current count for $topic is $count") count should be > 100L } else { fail() } } } // we should see every partition created eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select distinct state from $topic") var count = 0 while (rs.next()) { count = count + 1 } println(s"State count is $count") count shouldBe states.length } } // check for the presence of each partition directory val table = metastore.getTable("default", topic) for (state <- states) { fs.exists(new Path(table.getSd.getLocation, s"state=$state")) shouldBe true } stopTask(topic) } } }
Example 120
Source File: HiveSourceTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.hive.it import java.util.Collections import java.util.concurrent.TimeUnit import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} import org.scalatest.wordspec.AnyWordSpec import scala.io.Source class HiveSourceTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests { private implicit val patience: PatienceConfig = PatienceConfig(Span(60000, Millis), Span(5000, Millis)) "Hive" should { "read non partitioned table" in { val count = 2000L val inputTopic = createTopic() val sinkTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n") .replace("{{TOPIC}}", inputTopic) .replace("{{TABLE}}", inputTopic) .replace("{{NAME}}", inputTopic) postTask(sinkTaskDef) val producer = stringStringProducer() writeRecords(producer, inputTopic, JacksonSupport.mapper.writeValueAsString(person), count) producer.close(30, TimeUnit.SECONDS) // we now should have 1000 records in hive which we can test via jdbc eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select count(*) from $inputTopic") rs.next() rs.getLong(1) shouldBe count } } stopTask(inputTopic) // now we can read them back in val outputTopic = createTopic() val sourceTaskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_source_task.json")).getLines().mkString("\n") .replace("{{TOPIC}}", outputTopic) .replace("{{TABLE}}", inputTopic) .replace("{{NAME}}", outputTopic) postTask(sourceTaskDef) // we should have 1000 records on the outputTopic var records = 0L val consumer = stringStringConsumer("earliest") consumer.subscribe(Collections.singleton(outputTopic)) eventually { records = records + readRecords(consumer, outputTopic, 2, TimeUnit.SECONDS).size records shouldBe count } stopTask(outputTopic) } } }
Example 121
Source File: HiveParquetTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.landoop.streamreactor.hive.it import java.util.concurrent.TimeUnit import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Span} import org.scalatest.wordspec.AnyWordSpec import scala.io.Source class HiveParquetTest extends AnyWordSpec with Matchers with PersonTestData with Eventually with HiveTests { private implicit val patience: PatienceConfig = PatienceConfig(Span(30000, Millis), Span(2000, Millis)) "Hive" should { "write records" in { val count = 10000L val topic = createTopic() val taskDef = Source.fromInputStream(getClass.getResourceAsStream("/hive_sink_task_no_partitions.json")).getLines().mkString("\n") .replace("{{TOPIC}}", topic) .replace("{{TABLE}}", topic) .replace("{{NAME}}", topic) postTask(taskDef) val producer = stringStringProducer() writeRecords(producer, topic, JacksonSupport.mapper.writeValueAsString(person), count) producer.close(30, TimeUnit.SECONDS) // we now should have 1000 records in hive which we can test via jdbc eventually { withConn { conn => val stmt = conn.createStatement val rs = stmt.executeQuery(s"select count(*) from $topic") rs.next() rs.getLong(1) shouldBe count } } stopTask(topic) } } }
Example 122
Source File: SubscriptionInfoExtractFnTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.bloomberg import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class SubscriptionInfoExtractFnTest extends AnyWordSpec with Matchers { "SubscriptionInfoExtractFn" should { "handle empty settings" in { intercept[IllegalArgumentException] { SubscriptionInfoExtractFn("") shouldBe Seq.empty } } "handle one ticker subscription" in { SubscriptionInfoExtractFn("ticker1?fields= field1, field2, field3") shouldBe Seq( SubscriptionInfo("ticker1", List("FIELD1", "FIELD2", "FIELD3"), "ticker1?fields= field1, field2, field3") ) } "handle multiple tickers subscription" in { SubscriptionInfoExtractFn("ticker1?fields= field1, field2, field3; ticker2?fields=field1;ticker3?fields=fieldA") shouldBe List( SubscriptionInfo("ticker1", List("FIELD1", "FIELD2", "FIELD3"), "ticker1?fields= field1, field2, field3"), SubscriptionInfo("ticker2", List("FIELD1"), "ticker2?fields=field1"), SubscriptionInfo("ticker3", List("FIELDA"), "ticker3?fields=fieldA") ) } "handle missing ? between ticker and fields" in { intercept[IllegalArgumentException] { SubscriptionInfoExtractFn("ticker field1, field2, field3") } } "handle missing fields for a ticker subscription" in { intercept[IllegalArgumentException] { SubscriptionInfoExtractFn("ticker1?fields=f1,f2;ticker2?fields=") } } "handle missing fields= for a ticker subscription" in { intercept[IllegalArgumentException] { SubscriptionInfoExtractFn("ticker1?fields=f1,f2;ticker2?f3") } } } }
Example 123
Source File: CorrelationIdsExtractorFnTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.bloomberg import com.bloomberglp.blpapi.{CorrelationID, Subscription, SubscriptionList} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class CorrelationIdsExtractorFnTest extends AnyWordSpec with Matchers { "CorrelationIdsExtractorFn" should { "handle null parameter" in { CorrelationIdsExtractorFn(null) shouldBe "" } "list all the correlation ids" in { val subscriptions = new SubscriptionList() subscriptions.add(new Subscription("someticker1", new CorrelationID(11))) subscriptions.add(new Subscription("someticker2", new CorrelationID(31))) CorrelationIdsExtractorFn(subscriptions) shouldBe "11,31" } } }
Example 124
Source File: BloombergSubscriptionManagerTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.bloomberg import com.bloomberglp.blpapi.Event.EventType import com.bloomberglp.blpapi._ import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class BloombergSubscriptionManagerTest extends AnyWordSpec with Matchers with MockitoSugar { "BloombergSubscriptionManager" should { "return null if there are no items in the manager buffer" in { val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1")) manager.getData shouldBe None } "ignore non SUBSCRIPTION_DATA events" in { val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1")) val events = Seq(EventType.ADMIN, EventType.AUTHORIZATION_STATUS, EventType.PARTIAL_RESPONSE, EventType.REQUEST, EventType.REQUEST_STATUS, EventType.RESOLUTION_STATUS, EventType.RESPONSE, EventType.SERVICE_STATUS, EventType.SESSION_STATUS, EventType.SUBSCRIPTION_STATUS, EventType.TIMEOUT, EventType.TOPIC_STATUS, EventType.TOKEN_STATUS) events.map { et => val ev = mock[Event] when(ev.eventType()).thenReturn(et) when(ev.iterator()).thenReturn(Seq.empty[Message].iterator.asJava) ev }.foreach(manager.processEvent(_, null)) manager.getData shouldBe None } "return all items in the buffer" in { val manager = new BloombergSubscriptionManager(Map(1L -> "ticker1")) val correlationId = new CorrelationID(1) val msg1 = mock[Message] val elem1 = MockElementFn(Seq(MockElementFn(3.15D, "FIELD1"))) when(msg1.correlationID()).thenReturn(correlationId) when(msg1.asElement()).thenReturn(elem1) val msg2 = mock[Message] val elem2 = MockElementFn(Seq(MockElementFn(value = true, "FIELD2"))) when(msg2.numElements()).thenReturn(1) when(msg2.correlationID()).thenReturn(correlationId) when(msg2.asElement()).thenReturn(elem2) val ev = mock[Event] when(ev.eventType()).thenReturn(Event.EventType.SUBSCRIPTION_DATA) when(ev.iterator()).thenReturn(Seq(msg1, msg2).iterator.asJava) manager.processEvent(ev, null) val data = manager.getData.get data.size() shouldBe 2 data.get(0).data.size() shouldBe 2 //contains the ticker as well data.get(0).data.containsKey(BloombergData.SubscriptionFieldKey) data.get(0).data.containsKey("FIELD1") shouldBe true data.get(0).data.get("FIELD1") shouldBe 3.15D data.get(1).data.size() shouldBe 2 data.get(1).data.containsKey(BloombergData.SubscriptionFieldKey) data.get(1).data.containsKey("FIELD2") shouldBe true data.get(1).data.get("FIELD2") shouldBe true manager.getData shouldBe None } } }
Example 125
Source File: TestCustomIndexName.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.elastic6.indexname import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks class TestCustomIndexName extends AnyFlatSpec with TableDrivenPropertyChecks with Matchers { val ValidIndexNames = Table( ("Valid index name", "Expectations"), ("", Vector()), ("abc", Vector(TextFragment("abc"))), ("abc{YYYY-MM-dd}", Vector(TextFragment("abc"), DateTimeFragment("YYYY-MM-dd"))), ("{YYYY-MM-dd}abc", Vector(DateTimeFragment("YYYY-MM-dd"), TextFragment("abc"))), ("{YYYY-MM-dd}abc{HH-MM-ss}", Vector(DateTimeFragment("YYYY-MM-dd"), TextFragment("abc"), DateTimeFragment("HH-MM-ss"))), ("{YYYY-MM-dd}{HH-MM-ss}", Vector(DateTimeFragment("YYYY-MM-dd"), DateTimeFragment("HH-MM-ss"))), ("abc{}", Vector(TextFragment("abc"))), ("{}abc", Vector(TextFragment("abc"))) ) val InvalidIndexNames = Table( ("Invalid index name"), ("}abc"), ("abc}"), ("abc}def") ) "Custom index name" should "parse a valid String with date time formatting options" in { forAll (ValidIndexNames) { case (validIndexName, expectations) => CustomIndexName.parseIndexName(validIndexName) shouldBe CustomIndexName(expectations) } } it should "throw an exception when using invalid index name" in { forAll (InvalidIndexNames) { case (invalidIndexName) => intercept[InvalidCustomIndexNameException] { CustomIndexName.parseIndexName(invalidIndexName) } } } it should "return a valid String from a list of fragments" in new ClockFixture { CustomIndexName( Vector(DateTimeFragment("YYYY-MM-dd", TestClock), TextFragment("ABC"), DateTimeFragment("HH:mm:ss", TestClock)) ).toString shouldBe "2016-10-02ABC14:00:00" } }
Example 126
Source File: TestIndexNameFragment.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.elastic6.indexname import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class TestIndexNameFragment extends AnyFlatSpec with Matchers { // "TextFragment" should "return the original text when using getFragment()" in { // forAll(Gen.alphaStr) { someString => // TextFragment(someString).getFragment shouldBe someString // } // } "DateTimeFragment" should "return the formatted date when using getFragment()" in new ClockFixture { val dateTimeFormat = "YYYY-MM-dd HH:mm:ss" val expectedResult = "2016-10-02 14:00:00" DateTimeFragment(dateTimeFormat, TestClock).getFragment shouldBe expectedResult } }
Example 127
Source File: CreateIndexTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.elastic6 import com.datamountaineer.kcql.Kcql import com.datamountaineer.streamreactor.connect.elastic6.indexname.CreateIndex import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class CreateIndexTest extends AnyWordSpec with Matchers { "CreateIndex" should { "create an index name without suffix when suffix not set" in { val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA") CreateIndex.getIndexName(kcql) shouldBe "index_name" } "create an index name with suffix when suffix is set" in { val kcql = Kcql.parse("INSERT INTO index_name SELECT * FROM topicA WITHINDEXSUFFIX=_suffix_{YYYY-MM-dd}") val formattedDateTime = new DateTime(DateTimeZone.UTC).toString("YYYY-MM-dd") CreateIndex.getIndexName(kcql) shouldBe s"index_name_suffix_$formattedDateTime" } } }
Example 128
Source File: ConsumerConfigFactoryTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.pulsar import com.datamountaineer.streamreactor.connect.pulsar.config._ import org.apache.pulsar.client.api.SubscriptionType import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" "should create a config with batch settings" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 1) val consumerConfig = ConsumerConfigFactory("test", settings.kcql) consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10 consumerConfig(pulsarTopic).getConsumerName shouldBe "test" } "should create a config with Failover mode" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = failOver", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 2) val consumerConfig = ConsumerConfigFactory("test", settings.kcql) consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10 consumerConfig(pulsarTopic).getConsumerName shouldBe "test" consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Failover } "should create a config with exclusive mode" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = Exclusive", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 1) val consumerConfig = ConsumerConfigFactory("test", settings.kcql) consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10 consumerConfig(pulsarTopic).getConsumerName shouldBe "test" consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Exclusive } "should create a config with shared mode" in { val config = PulsarSourceConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = shared", PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true", PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500" ).asJava) val settings = PulsarSourceSettings(config, 2) val consumerConfig = ConsumerConfigFactory("test", settings.kcql) consumerConfig(pulsarTopic).getReceiverQueueSize shouldBe 10 consumerConfig(pulsarTopic).getConsumerName shouldBe "test" consumerConfig(pulsarTopic).getSubscriptionType shouldBe SubscriptionType.Shared } }
Example 129
Source File: ProducerConfigFactoryTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.pulsar import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSinkConfig, PulsarSinkSettings} import org.apache.pulsar.client.api.CompressionType import org.apache.pulsar.client.api.ProducerConfiguration.MessageRoutingMode import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class ProducerConfigFactoryTest extends AnyWordSpec with Matchers { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" "should create a SinglePartition with batching" in { val config = PulsarSinkConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000" ).asJava) val settings = PulsarSinkSettings(config) val producerConfig = ProducerConfigFactory("test", settings.kcql) producerConfig(pulsarTopic).getBatchingEnabled shouldBe true producerConfig(pulsarTopic).getBatchingMaxMessages shouldBe 10 producerConfig(pulsarTopic).getBatchingMaxPublishDelayMs shouldBe 1000 producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.ZLIB producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.SinglePartition } "should create a CustomPartition with no batching and no compression" in { val config = PulsarSinkConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic WITHPARTITIONER = CustomPartition" ).asJava) val settings = PulsarSinkSettings(config) val producerConfig = ProducerConfigFactory("test", settings.kcql) producerConfig(pulsarTopic).getBatchingEnabled shouldBe false producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.NONE producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.CustomPartition } "should create a roundrobin with batching and no compression no delay" in { val config = PulsarSinkConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = ROUNDROBINPARTITION" ).asJava) val settings = PulsarSinkSettings(config) val producerConfig = ProducerConfigFactory("test", settings.kcql) producerConfig(pulsarTopic).getBatchingEnabled shouldBe true producerConfig(pulsarTopic).getBatchingEnabled shouldBe true producerConfig(pulsarTopic).getBatchingMaxMessages shouldBe 10 producerConfig(pulsarTopic).getBatchingMaxPublishDelayMs shouldBe 10 producerConfig(pulsarTopic).getCompressionType shouldBe CompressionType.NONE producerConfig(pulsarTopic).getMessageRoutingMode shouldBe MessageRoutingMode.RoundRobinPartition } }
Example 130
Source File: PulsarWriterTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.pulsar.sink import com.datamountaineer.streamreactor.connect.pulsar.ProducerConfigFactory import com.datamountaineer.streamreactor.connect.pulsar.config.{PulsarConfigConstants, PulsarSinkConfig, PulsarSinkSettings} import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct} import org.apache.kafka.connect.sink.SinkRecord import org.apache.pulsar.client.api.{Message, MessageId, Producer, PulsarClient} import org.mockito.ArgumentMatchers.any import org.mockito.MockitoSugar import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class PulsarWriterTest extends AnyWordSpec with MockitoSugar with Matchers { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" def getSchema: Schema = { SchemaBuilder.struct .field("int8", SchemaBuilder.int8().defaultValue(2.toByte).doc("int8 field").build()) .field("int16", Schema.INT16_SCHEMA) .field("int32", Schema.INT32_SCHEMA) .field("int64", Schema.INT64_SCHEMA) .field("float32", Schema.FLOAT32_SCHEMA) .field("float64", Schema.FLOAT64_SCHEMA) .field("boolean", Schema.BOOLEAN_SCHEMA) .field("string", Schema.STRING_SCHEMA) .build() } def getStruct(schema: Schema): Struct = { new Struct(schema) .put("int8", 12.toByte) .put("int16", 12.toShort) .put("int32", 12) .put("int64", 12L) .put("float32", 12.2f) .put("float64", 12.2) .put("boolean", true) .put("string", "foo") } "should write messages" in { val config = PulsarSinkConfig(Map( PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000" ).asJava) val schema = getSchema val struct = getStruct(schema) val record1 = new SinkRecord("kafka_topic", 0, null, null, schema, struct, 1) val settings = PulsarSinkSettings(config) val producerConfig = ProducerConfigFactory("test", settings.kcql) val client = mock[PulsarClient] val producer = mock[Producer] val messageId = mock[MessageId] when(client.createProducer(pulsarTopic, producerConfig(pulsarTopic))).thenReturn(producer) when(producer.send(any[Message])).thenReturn(messageId) val writer = PulsarWriter(client, "test", settings) writer.write(List(record1)) } }
Example 131
Source File: PulsarSinkConnectorTest.scala From stream-reactor with Apache License 2.0 | 5 votes |
package com.datamountaineer.streamreactor.connect.pulsar.sink import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarConfigConstants import org.apache.kafka.common.config.ConfigException import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scala.collection.JavaConverters._ class PulsarSinkConnectorTest extends AnyWordSpec with Matchers { val pulsarTopic = "persistent://landoop/standalone/connect/kafka-topic" "should start a Connector and split correctly" in { val props = Map( "topics" -> "kafka_topic", PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000" ).asJava val connector = new PulsarSinkConnector() connector.start(props) val taskConfigs = connector.taskConfigs(1) taskConfigs.size shouldBe 1 connector.taskClass() shouldBe classOf[PulsarSinkTask] connector.stop() } "should throw as topic doesn't match kcql" in { val props = Map( "topics" -> "bad", PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650", PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000" ).asJava val connector = new PulsarSinkConnector() intercept[ConfigException] { connector.start(props) } } }
Example 132
Source File: ImplicitLoggingSpec.scala From scribe with MIT License | 5 votes |
package specs import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ImplicitLoggingSpec extends AnyWordSpec with Matchers { "implicit logger" should { "config properly" in { ImplicitLoggingTestObject.initialize() } "properly log a simple message" in { val line = Some(19) ImplicitLoggingTestObject.doSomething() ImplicitLoggingTestObject.testingModifier.records.length should be(1) val record = ImplicitLoggingTestObject.testingModifier.records.head record.className should be("specs.ImplicitLoggingTestObject") record.methodName should be(Some("doSomething")) record.line should be(line) } } }
Example 133
Source File: AbbreviatorSpec.scala From scribe with MIT License | 5 votes |
package specs import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec import scribe.util.Abbreviator class AbbreviatorSpec extends AnyWordSpec with Matchers { "Abbreviator" should { val className1 = "mainPackage.sub.sample.Bar" val className2 = "mainPackage.sub.sample.FooBar" "properly abbreviate 26 length" in { val s = Abbreviator(className1, 26) s should be(className1) } "properly abbreviate 16 length" in { val s = Abbreviator(className1, 16) s should be("m.sub.sample.Bar") } "properly abbreviate 15 length" in { val s = Abbreviator(className1, 15) s should be("m.s.sample.Bar") } "properly abbreviate 10 length" in { val s = Abbreviator(className1, 10) s should be("m.s.s.Bar") } "properly abbreviate 5 length" in { val s = Abbreviator(className1, 5) s should be("Bar") } "properly abbreviate 0 length" in { val s = Abbreviator(className1, 0) s should be("Bar") } "properly abbreviate longer class name at 5" in { val s = Abbreviator(className2, 5, abbreviateName = true) s should be("Fo...") } } }
Example 134
Source File: SegmentOfTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.Id import com.evolutiongaming.kafka.journal.Key import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class SegmentOfTest extends AnyFunSuite with Matchers { for { id <- List("id", "ID") (segments, segmentNr) <- List( (Segments.min, SegmentNr.min), (Segments.default, SegmentNr.unsafe(55)), (Segments.max, SegmentNr.unsafe(3355))) } yield { test(s"id: $id, segments: $segments, segmentNr: $segmentNr") { val segmentOf = SegmentOf[Id](segments) val key = Key(id = id, topic = "topic") segmentOf(key) shouldEqual segmentNr } } }
Example 135
Source File: SchemaConfigSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.implicits._ import com.evolutiongaming.scassandra.ReplicationStrategyConfig import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.ConfigSource class SchemaConfigSpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() ConfigSource.fromConfig(config).load[SchemaConfig] shouldEqual SchemaConfig.default.asRight } test("apply from config") { val config = ConfigFactory.parseURL(getClass.getResource("schema.conf")) val expected = SchemaConfig( keyspace = SchemaConfig.Keyspace( name = "keyspace", replicationStrategy = ReplicationStrategyConfig.Simple(3), autoCreate = false), journalTable = "table-journal", metadataTable = "table-metadata", metaJournalTable = "table-meta-journal", pointerTable = "table-pointer", settingTable = "table-setting", locksTable = "table-locks", autoCreate = false) ConfigSource.fromConfig(config).load[SchemaConfig] shouldEqual expected.asRight } }
Example 136
Source File: EventualCassandraConfigSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.implicits._ import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.ConfigSource class EventualCassandraConfigSpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() val expected = EventualCassandraConfig.default ConfigSource.fromConfig(config).load[EventualCassandraConfig] shouldEqual expected.asRight } test("apply from config") { val config = ConfigFactory.parseURL(getClass.getResource("eventual-cassandra.conf")) val expected = EventualCassandraConfig( retries = 1, segmentSize = SegmentSize.min) ConfigSource.fromConfig(config).load[EventualCassandraConfig] shouldEqual expected.asRight } }
Example 137
Source File: ResultSetSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.effect.concurrent.Ref import cats.effect.{Concurrent, IO} import cats.implicits._ import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.util.control.NoStackTrace class ResultSetSpec extends AsyncFunSuite with Matchers { for { size <- 0 to 5 take <- 1 to 5 fetchSize <- 1 to 5 } { test(s"size: $size, take: $take, fetchSize: $fetchSize") { testF[IO](size = size, take = take, fetchSize = fetchSize).run() } } private def testF[F[_] : Concurrent](size: Int, take: Int, fetchSize: Int) = { type Row = Int val all = (0 until size).toList for { fetches <- Ref[F].of(0) left <- Ref[F].of(all) fetched <- Ref[F].of(List.empty[Row]) next = fetched.modify { rows => (List.empty, rows) } fetch = for { _ <- fetches.update(_ + 1) toFetch1 <- left.get result <- { if (toFetch1.isEmpty) ().pure[F] else for { taken <- left.modify { rows => val fetched = rows.take(fetchSize) val left = rows.drop(fetchSize) (left, fetched) } _ <- fetched.set(taken) } yield {} } } yield result resultSet = ResultSet[F, Row](fetch, left.get.map(_.isEmpty), next) rows <- resultSet.take(take.toLong).toList fetches <- fetches.get } yield { rows shouldEqual all.take(take) if (take >= size) { val expected = { val n = size / fetchSize if (size % fetchSize == 0) n else n + 1 } fetches shouldEqual expected } } } case object NotImplemented extends RuntimeException with NoStackTrace }
Example 138
Source File: ExpiryServiceTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import java.time.{Instant, LocalDate, ZoneOffset} import cats.effect.ExitCase import cats.implicits._ import cats.{Id, catsInstancesForId} import com.evolutiongaming.kafka.journal.ExpireAfter import com.evolutiongaming.kafka.journal.ExpireAfter.implicits._ import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpireOn.implicits._ import com.evolutiongaming.kafka.journal.eventual.cassandra.ExpiryService.Action import com.evolutiongaming.kafka.journal.util.BracketFromMonad import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.util.control.NonFatal class ExpiryServiceTest extends FunSuite with Matchers { import ExpiryServiceTest._ test("expireOn") { val expireAfter = 1.day.toExpireAfter val expected = LocalDate.of(2019, 12, 12).toExpireOn expireService.expireOn(expireAfter, timestamp) shouldEqual expected } for { (expiry, expireAfter, action) <- List( ( none[Expiry], 1.minute.toExpireAfter.some, Action.update(Expiry( 1.minute.toExpireAfter, LocalDate.of(2019, 12, 11).toExpireOn))), ( none[Expiry], 1.day.toExpireAfter.some, Action.update(Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn))), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 11).toExpireOn).some, 1.day.toExpireAfter.some, Action.update(Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn))), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn).some, 1.day.toExpireAfter.some, Action.ignore), ( Expiry( 1.day.toExpireAfter, LocalDate.of(2019, 12, 12).toExpireOn).some, none[ExpireAfter], Action.remove)) } yield { test(s"action expiry: $expiry, expireAfter: $expireAfter, action: $action") { expireService.action(expiry, expireAfter, timestamp) shouldEqual action } } } object ExpiryServiceTest { implicit val bracketId: BracketFromMonad[Id, Throwable] = new BracketFromMonad[Id, Throwable] { def F = catsInstancesForId def bracketCase[A, B](acquire: Id[A])(use: A => Id[B])(release: (A, ExitCase[Throwable]) => Id[Unit]) = { flatMap(acquire) { a => try { val b = use(a) try release(a, ExitCase.Completed) catch { case NonFatal(_) => } b } catch { case NonFatal(e) => release(a, ExitCase.Error(e)) raiseError(e) } } } def raiseError[A](a: Throwable) = throw a def handleErrorWith[A](fa: Id[A])(f: Throwable => Id[A]) = fa } val timestamp: Instant = Instant.parse("2019-12-11T10:10:10.00Z") val zoneId: ZoneOffset = ZoneOffset.UTC val expireService: ExpiryService[Id] = ExpiryService[Id](zoneId) }
Example 139
Source File: FiniteDurationHelperTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra.util import scala.concurrent.duration._ import com.datastax.driver.core.{Duration => DurationC} import com.evolutiongaming.kafka.journal.eventual.cassandra.util.FiniteDurationHelper._ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class FiniteDurationHelperTest extends AnyFunSuite with Matchers { for { (finiteDuration, duration) <- List( (1.millis , DurationC.newInstance(0, 0, 1000000)), (100.minutes, DurationC.newInstance(0, 0, 6000000000000L)), (30.days , DurationC.newInstance(0, 30, 0))) } { test(s"$finiteDuration to cassandra Duration") { finiteDurationToDuration(finiteDuration) shouldEqual duration } test(s"$duration to FiniteDuration") { durationToFiniteDuration(duration) shouldEqual finiteDuration } test(s"$finiteDuration to & from cassandra Duration") { durationToFiniteDuration(finiteDurationToDuration(finiteDuration)) shouldEqual finiteDuration } } }
Example 140
Source File: LocalDateTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import java.time.{Instant, ZoneId, LocalDate => LocalDateJ} import com.datastax.driver.core.LocalDate import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class LocalDateTest extends AnyFunSuite with Matchers { test("datastax LocalDate to/from java LocalDate") { val localDateJ = LocalDateJ.of(2019, 10, 4) val localDate = LocalDate.fromYearMonthDay(2019, 10, 4) LocalDateJ.ofEpochDay(localDate.getDaysSinceEpoch.toLong) shouldEqual localDateJ LocalDate.fromDaysSinceEpoch(localDateJ.toEpochDay.toInt) shouldEqual localDate } test("Instant to LocalDate") { val instant = Instant.parse("2019-10-04T10:10:10.00Z") val localDate = LocalDateJ.ofInstant(instant, ZoneId.of("UTC")) localDate shouldEqual LocalDateJ.of(2019, 10, 4) } }
Example 141
Source File: CassandraHealthCheckSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.effect.{IO, Resource} import cats.implicits._ import com.evolutiongaming.catshelper.Log import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ import scala.util.control.NoStackTrace class CassandraHealthCheckSpec extends AsyncFunSuite with Matchers { test("CassandraHealthCheck") { val error = (new RuntimeException with NoStackTrace).raiseError[IO, Unit] val healthCheck = CassandraHealthCheck.of[IO]( initial = 0.seconds, interval = 1.second, statement = Resource.pure[IO, IO[Unit]](error), log = Log.empty[IO]) val result = for { error <- healthCheck.use { _.error.untilDefinedM } } yield { error shouldEqual error } result.run() } }
Example 142
Source File: CreateSchemaSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual.cassandra import cats.Id import cats.data.{NonEmptyList => Nel} import com.evolutiongaming.scassandra.TableName import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class CreateSchemaSpec extends AnyFunSuite with Matchers { self => test("create keyspace and tables") { val config = SchemaConfig.default val createSchema = CreateSchema[StateT](config, createKeyspace, createTables) val initial = State.empty.copy(createTables = true) val (state, (schema, fresh)) = createSchema.run(initial) state shouldEqual initial.copy(actions = List(Action.CreateTables, Action.CreateKeyspace)) fresh shouldEqual true schema shouldEqual self.schema } test("not create keyspace and tables") { val config = SchemaConfig.default.copy(autoCreate = false) val createSchema = CreateSchema[StateT](config, createKeyspace, createTables) val initial = State.empty.copy(createTables = true) val (state, (schema, fresh)) = createSchema.run(initial) state shouldEqual initial.copy(actions = List(Action.CreateKeyspace)) fresh shouldEqual false schema shouldEqual self.schema } private val schema = Schema( journal = TableName(keyspace = "journal", table = "journal"), metadata = TableName(keyspace = "journal", table = "metadata"), metaJournal = TableName(keyspace = "journal", table = "metajournal"), pointer = TableName(keyspace = "journal", table = "pointer"), setting = TableName(keyspace = "journal", table = "setting")) val createTables: CreateTables[StateT] = new CreateTables[StateT] { def apply(keyspace: String, tables: Nel[CreateTables.Table]) = { StateT { state => val state1 = state.add(Action.CreateTables) (state1, state.createTables) } } } val createKeyspace: CreateKeyspace[StateT] = new CreateKeyspace[StateT] { def apply(config: SchemaConfig.Keyspace) = { StateT { state => val state1 = state.add(Action.CreateKeyspace) (state1, ()) } } } case class State(createTables: Boolean, actions: List[Action]) { def add(action: Action): State = copy(actions = action :: actions) } object State { val empty: State = State(createTables = false, actions = Nil) } type StateT[A] = cats.data.StateT[Id, State, A] object StateT { def apply[A](f: State => (State, A)): StateT[A] = cats.data.StateT[Id, State, A](f) } sealed trait Action extends Product object Action { case object CreateTables extends Action case object CreateKeyspace extends Action } }
Example 143
Source File: PartitionsToSegmentsTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.replicator import cats.data.{NonEmptySet => Nes} import cats.implicits._ import com.evolutiongaming.kafka.journal.eventual.cassandra.{SegmentNr, Segments} import com.evolutiongaming.catshelper.DataHelper._ import com.evolutiongaming.skafka.Partition import org.scalatest.Succeeded import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scala.collection.immutable.SortedSet import scala.util.Try class PartitionsToSegmentsTest extends AnyFunSuite with Matchers { for { (partitions, partitionNrs, segmentNrs) <- List( (20, Nes.of(0), SortedSet(0, 20, 40, 60, 80)), (30, Nes.of(0), SortedSet(0, 30, 60, 90)), (100, Nes.of(0, 1, 2), SortedSet(0, 1, 2)), (1, Nes.of(0), (0 until 100).toSortedSet)) } { test(s"partitions: $partitions, segmentNrs: $segmentNrs, partitionNrs: $partitionNrs") { val result = for { segmentNrs <- segmentNrs.toList.traverse { a => SegmentNr.of[Try](a.toLong) } partitionNrs <- partitionNrs.toNel.traverse { a => Partition.of[Try](a) } partitionsToSegments <- PartitionsToSegments.of[Try](partitions, Segments.default) } yield { val actual = partitionsToSegments(partitionNrs.toNes) actual shouldEqual segmentNrs.toSortedSet } result shouldEqual Succeeded.pure[Try] } } }
Example 144
Source File: KafkaSingletonTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.replicator import cats.data.{NonEmptySet => Nes} import cats.effect.concurrent.{Deferred, Ref} import cats.effect.{Concurrent, IO, Resource, Timer} import cats.implicits._ import com.evolutiongaming.catshelper.Log import com.evolutiongaming.kafka.journal.IOSuite._ import com.evolutiongaming.skafka.consumer.RebalanceListener import com.evolutiongaming.skafka.{Partition, TopicPartition} import com.evolutiongaming.sstream.Stream import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class KafkaSingletonTest extends AsyncFunSuite with Matchers { test("allocate & release when partition assigned or revoked") { `allocate & release when partition assigned or revoked`[IO]().run() } private def `allocate & release when partition assigned or revoked`[F[_] : Concurrent : Timer](): F[Unit] = { val topic = "topic" def consumer(deferred: Deferred[F, RebalanceListener[F]]) = { new TopicConsumer[F] { def subscribe(listener: RebalanceListener[F]) = deferred.complete(listener) def poll = Stream.empty def commit = TopicCommit.empty } } def topicPartition(partition: Partition) = TopicPartition(topic, partition) val result = for { listener <- Resource.liftF(Deferred[F, RebalanceListener[F]]) allocated <- Resource.liftF(Ref[F].of(false)) resource = Resource.make { allocated.set(true) } { _ => allocated.set(false) } singleton <- KafkaSingleton.of(topic, consumer(listener).pure[Resource[F, *]], resource, Log.empty[F]) listener <- Resource.liftF(listener.get) _ <- Resource.liftF { for { a <- singleton.get _ = a shouldEqual none[Unit] a <- allocated.get _ = a shouldEqual false _ <- listener.onPartitionsAssigned(Nes.of(topicPartition(Partition.max))) a <- singleton.get _ = a shouldEqual none[Unit] a <- allocated.get _ = a shouldEqual false _ <- listener.onPartitionsAssigned(Nes.of(topicPartition(Partition.min))) _ <- Timer[F].sleep(10.millis) a <- singleton.get _ = a shouldEqual ().some a <- allocated.get _ = a shouldEqual true _ <- listener.onPartitionsRevoked(Nes.of(topicPartition(Partition.max))) a <- singleton.get _ = a shouldEqual ().some a <- allocated.get _ = a shouldEqual true _ <- listener.onPartitionsRevoked(Nes.of(topicPartition(Partition.min))) _ <- Timer[F].sleep(10.millis) a <- singleton.get _ = a shouldEqual none[Unit] a <- allocated.get _ = a shouldEqual false } yield {} } } yield {} result.use { _ => ().pure[F] } } }
Example 145
Source File: ReplicatorConfigSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.replicator import cats.data.{NonEmptyList => Nel} import cats.implicits._ import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.{ConfigReader, ConfigSource} import scala.concurrent.duration._ class ReplicatorConfigSpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() val expected = ReplicatorConfig.default ConfigSource.fromConfig(config).load[ReplicatorConfig] shouldEqual expected.pure[ConfigReader.Result] } test("apply from config") { val config = ConfigFactory.parseURL(getClass.getResource("replicator.conf")) val expected = ReplicatorConfig( topicPrefixes = Nel.of("prefix1", "prefix2"), topicDiscoveryInterval = 1.minute, pollTimeout = 200.millis) ConfigSource.fromConfig(config).load[ReplicatorConfig] shouldEqual expected.pure[ConfigReader.Result] } test("apply from config with common kafka") { val config = ConfigFactory.parseURL(getClass.getResource("replicator-kafka.conf")) val default = ReplicatorConfig.default val expected = ReplicatorConfig( topicPrefixes = Nel.of("prefix"), kafka = default.kafka.copy( producer = default.kafka.producer.copy( common = default.kafka.producer.common.copy( clientId = "clientId".some)), consumer = default.kafka.consumer.copy( maxPollRecords = 10, common = default.kafka.consumer.common.copy( clientId = "clientId".some)))) ConfigSource.fromConfig(config).load[ReplicatorConfig] shouldEqual expected.pure[ConfigReader.Result] } test("apply from reference.conf") { val config = ConfigFactory.load() val expected = ReplicatorConfig.default ConfigSource .fromConfig(config) .at("evolutiongaming.kafka-journal.replicator") .load[ReplicatorConfig] shouldEqual expected.pure[ConfigReader.Result] } }
Example 146
Source File: TopicCommitTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.replicator import cats.data.{NonEmptyMap => Nem} import cats.implicits._ import cats.effect.{Clock, IO} import com.evolutiongaming.kafka.journal.IOSuite._ import cats.effect.concurrent.{Deferred, Ref} import com.evolutiongaming.skafka.{Offset, Partition} import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class TopicCommitTest extends AsyncFunSuite with Matchers{ test("delayed") { def commitOf( deferred: Deferred[IO, Unit], commitsRef: Ref[IO, List[Nem[Partition, Offset]]])(implicit clock: Clock[IO] ) = { val commit = new TopicCommit[IO] { def apply(offsets: Nem[Partition, Offset]) = { commitsRef.update { offsets :: _ } *> deferred.complete(()) } } TopicCommit.delayed(10.millis, commit) } def clockOf(ref: Ref[IO, FiniteDuration]): Clock[IO] = { new Clock[IO] { def realTime(unit: TimeUnit): IO[Long] = monotonic(unit) def monotonic(unit: TimeUnit): IO[Long] = ref.get.map { _.toUnit(unit).toLong } } } val result = for { commitsRef <- Ref[IO].of(List.empty[Nem[Partition, Offset]]) deferred <- Deferred[IO, Unit] clockRef <- Ref[IO].of(0.millis) clock = clockOf(clockRef) commit <- commitOf(deferred, commitsRef)(clock) _ <- commit(Nem.of((Partition.min, Offset.min))) offsets <- commitsRef.get _ = offsets shouldEqual List.empty _ <- clockRef.set(20.millis) _ <- commit(Nem.of((Partition.unsafe(1), Offset.unsafe(1)))) _ <- deferred.get offsets <- commitsRef.get _ = offsets shouldEqual List(Nem.of((Partition.min, Offset.min), (Partition.unsafe(1), Offset.unsafe(1)))) } yield {} result.run() } }
Example 147
Source File: ReplicatorSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.replicator import cats.effect.{IO, Resource} import cats.implicits._ import com.evolutiongaming.catshelper.LogOf import com.evolutiongaming.kafka.journal.replicator.Replicator.Consumer import com.evolutiongaming.kafka.journal.IOSuite._ import com.evolutiongaming.skafka.Topic import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import scala.concurrent.duration._ import scala.util.control.NoStackTrace class ReplicatorSpec extends AsyncWordSpec with Matchers { "Replicator" should { "fail if any of replicators failed" in { implicit val logOf = LogOf.empty[IO] val error = new RuntimeException with NoStackTrace val consumer = new Consumer[IO] { def topics = Set("journal").pure[IO] } val start = (_: Topic) => Resource.pure[IO, IO[Unit]](error.raiseError[IO, Unit]) val result = for { result <- Replicator.of( Replicator.Config(topicDiscoveryInterval = 0.millis), Resource.pure[IO, Consumer[IO]](consumer), start).use(identity).attempt } yield { result shouldEqual error.asLeft } result.run() } } }
Example 148
Source File: JournalConfigSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.ConfigSource import scala.concurrent.duration._ class JournalConfigSpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() ConfigSource.fromConfig(config).load[JournalConfig] shouldEqual JournalConfig.default.asRight } test("apply from config") { val config = ConfigFactory.parseURL(getClass.getResource("journal.conf")) val default = JournalConfig.default val expected = JournalConfig( pollTimeout = 1.millis, kafka = default.kafka.copy( producer = default.kafka.producer.copy( common = default.kafka.producer.common.copy( clientId = "clientId".some)), consumer = default.kafka.consumer.copy( common = default.kafka.consumer.common.copy( clientId = "clientId".some))), headCache = default.headCache.copy(enabled = false)) ConfigSource.fromConfig(config).load[JournalConfig] shouldEqual expected.asRight } }
Example 149
Source File: JsonCodecTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.{JsString, Json} import scodec.bits.ByteVector import scala.util.{Failure, Try} class JsonCodecTest extends FunSuite with Matchers { private val malformed = ByteVector.view(Json.toBytes(JsString("\ud83d\ude18\ud83d"))) test("JsonCodec.jsoniter") { JsonCodec.jsoniter[Try].decode.fromBytes(malformed) should matchPattern { case Failure(_: JournalError) => } } test("JsonCodec.playJson") { JsonCodec.playJson[Try].decode.fromBytes(malformed).isSuccess shouldEqual true } test("JsonCodec.default") { JsonCodec.default[Try].decode.fromBytes(malformed).isSuccess shouldEqual true } }
Example 150
Source File: ActionHeaderJsonSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import com.evolutiongaming.kafka.journal.util.PlayJsonHelper._ import play.api.libs.json._ import scala.util.Try class ActionHeaderJsonSpec extends AnyFunSuite with Matchers { val origins = List(Origin("origin").some, none) val metadata = List( ("metadata", HeaderMetadata(Json.obj(("key", "value")).some)), ("none" , HeaderMetadata.empty), ("legacy" , HeaderMetadata.empty)) val payloadTypes = List(PayloadType.Binary, PayloadType.Json) for { origin <- origins } { val originStr = origin.fold("None")(_.toString) for { payloadType <- payloadTypes (metadataStr, metadata) <- metadata } { test(s"Append format, origin: $origin, payloadType: $payloadType, metadata: $metadataStr") { val range = SeqRange.unsafe(1, 5) val header = ActionHeader.Append( range = range, origin = origin, payloadType = payloadType, metadata = metadata) verify(header, s"Append-$originStr-$payloadType-$metadataStr") } } test(s"Delete format, origin: $origin") { val seqNr = SeqNr.unsafe(3) val header = ActionHeader.Delete(seqNr.toDeleteTo, origin) verify(header, s"Delete-$originStr") } test(s"Purge format, origin: $origin") { val header = ActionHeader.Purge(origin) verify(header, s"Purge-$originStr") } test(s"Mark format, origin: $origin") { val header = ActionHeader.Mark("id", origin) verify(header, s"Mark-$originStr") } } test("not supported ActionHeader") { val json = Json.obj(("new", Json.obj())) json.validate[Option[ActionHeader]] shouldEqual none[ActionHeader].pure[JsResult] } private def verify(value: ActionHeader, name: String) = { def verify(json: JsValue) = { val actual = json.validate[Option[ActionHeader]] actual shouldEqual value.some.pure[JsResult] } verify(Json.toJson(value)) val json = for { byteVector <- ByteVectorOf[Try](getClass, s"$name.json") json <- Try { Json.parse(byteVector.toArray) } } yield json verify(json.get) } }
Example 151
Source File: SeqNrSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class SeqNrSpec extends AnyFunSuite with Matchers { test("of") { SeqNr.of[Option](0) shouldEqual none } test("show") { SeqNr.min.show shouldEqual "1" SeqNr.max.show shouldEqual "9223372036854775807" } test("1 max 2") { SeqNr.unsafe(1) max SeqNr.unsafe(2) shouldEqual SeqNr.unsafe(2) } test("2 max 1") { SeqNr.unsafe(2) max SeqNr.unsafe(1) shouldEqual SeqNr.unsafe(2) } test("1 min 2") { SeqNr.unsafe(1) min SeqNr.unsafe(2) shouldEqual SeqNr.unsafe(1) } test("2 min 1") { SeqNr.unsafe(2) min SeqNr.unsafe(1) shouldEqual SeqNr.unsafe(1) } test("min.next") { SeqNr.min.next[Option] shouldEqual Some(SeqNr.unsafe(2)) } test("max.next") { SeqNr.max.next[Option] shouldEqual None } test("min.prev") { SeqNr.min.prev[Option] shouldEqual None } test("max.prev") { SeqNr.max.prev[Option] shouldEqual SeqNr.unsafe(Long.MaxValue - 1).some } test("in") { SeqNr.min in SeqRange(SeqNr.min, SeqNr.max) shouldEqual true } test("to") { SeqNr.min to SeqNr.max shouldEqual SeqRange(SeqNr.min, SeqNr.max) } test("to Min") { SeqNr.min to SeqNr.min shouldEqual SeqRange(SeqNr.min) } }
Example 152
Source File: PayloadSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.JsString import scodec.bits.ByteVector class PayloadSpec extends AnyFunSuite with Matchers { test("apply text") { Payload("text") shouldEqual Payload.text("text") } test("apply binary") { Payload(ByteVector.empty) shouldEqual Payload.binary(ByteVector.empty) } test("apply json") { Payload(JsString("json")) shouldEqual Payload.json(JsString("json")) } }
Example 153
Source File: HeadInfoSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import com.evolutiongaming.skafka.Offset import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class HeadInfoSpec extends AnyFunSuite with Matchers { test("Empty apply Append") { HeadInfo.Empty(append(1, 2), Offset.min) shouldEqual appendInfo(0, 2) } test("Empty apply Delete") { HeadInfo.Empty(delete(10), Offset.min) shouldEqual deleteInfo(10) } test("Empty apply Purge") { HeadInfo.Empty(purge, Offset.min) shouldEqual HeadInfo.Purge } test("Empty apply Mark") { HeadInfo.Empty(mark, Offset.min) shouldEqual HeadInfo.Empty } test("NonEmpty apply Append") { appendInfo(0, 1)(append(2, 3), Offset.unsafe(1)) shouldEqual appendInfo(0, 3) appendInfo(0, 2, 1.some)(append(3, 4), Offset.unsafe(1)) shouldEqual appendInfo(0, 4, 1.some) } test("NonEmpty apply Delete") { appendInfo(0, 2)(delete(3), Offset.unsafe(1)) shouldEqual appendInfo(0, 2, 2.some) appendInfo(0, 2)(delete(1), Offset.unsafe(1)) shouldEqual appendInfo(0, 2, 1.some) appendInfo(0, 2, 1.some)(delete(3), Offset.unsafe(1)) shouldEqual appendInfo(0, 2, 2.some) appendInfo(0, 2, 2.some)(delete(1), Offset.unsafe(1)) shouldEqual appendInfo(0, 2, 2.some) } test("NonEmpty apply Purge") { appendInfo(0, 2)(purge, Offset.unsafe(1)) shouldEqual HeadInfo.Purge } test("NonEmpty apply Mark") { appendInfo(0, 2)(mark, Offset.unsafe(1)) shouldEqual appendInfo(0, 2) } test("Delete apply Append") { deleteInfo(1)(append(1, 2), Offset.unsafe(1)) shouldEqual appendInfo(1, 2) deleteInfo(10)(append(1, 2), Offset.unsafe(1)) shouldEqual appendInfo(1, 2) deleteInfo(10)(append(2, 3), Offset.unsafe(1)) shouldEqual appendInfo(1, 3, 1.some) } test("Delete apply Delete") { deleteInfo(1)(delete(2), Offset.unsafe(1)) shouldEqual deleteInfo(2) deleteInfo(2)(delete(1), Offset.unsafe(1)) shouldEqual deleteInfo(2) } test("Delete apply Purge") { deleteInfo(1)(purge, Offset.unsafe(1)) shouldEqual HeadInfo.Purge } test("Delete apply Mark") { deleteInfo(1)(mark, Offset.unsafe(1)) shouldEqual deleteInfo(1) } private def append(from: Int, to: Int) = { ActionHeader.Append( range = SeqRange.unsafe(from, to), origin = None, payloadType = PayloadType.Json, metadata = HeaderMetadata.empty) } private def delete(seqNr: Int) = { val deleteTo = SeqNr.unsafe(seqNr).toDeleteTo ActionHeader.Delete(deleteTo, None) } private def mark = ActionHeader.Mark("id", None) private def purge = ActionHeader.Purge(None) private def deleteInfo(seqNr: Int) = { val deleteTo = SeqNr.unsafe(seqNr).toDeleteTo HeadInfo.Delete(deleteTo) } private def appendInfo(offset: Int, seqNr: Int, deleteTo: Option[Int] = None) = { HeadInfo.Append( seqNr = SeqNr.unsafe(seqNr), deleteTo = deleteTo.map { deleteTo => SeqNr.unsafe(deleteTo).toDeleteTo }, offset = Offset.unsafe(offset)) } }
Example 154
Source File: EventsTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.data.{NonEmptyList => Nel} import cats.implicits._ import com.evolutiongaming.kafka.journal.Event._ import com.evolutiongaming.kafka.journal.util.ScodecHelper.{nelCodec, _} import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector import scodec.{Attempt, codecs} import scala.util.Try class EventsTest extends FunSuite with Matchers { test("decode newer version") { implicit val jsonCodec = JsonCodec.jsoniter[Try] val codec = { val eventsCodec = nelCodec(codecs.listOfN(codecs.int32, codecs.variableSizeBytes(codecs.int32, Event.codecEventPayload))) val version = ByteVector.fromByte(100) (codecs.constant(version) ~> eventsCodec) .xmap[Events[Payload]](a => Events(a, PayloadMetadata.empty), _.events) } val events = Events(Nel.of(Event(SeqNr.min, payload = Payload.text("text").some)), PayloadMetadata.empty) val actual = for { bits <- codec.encode(events) result <- Events.codecEvents[Payload].decode(bits) } yield { result.value } actual shouldEqual events.pure[Attempt] } }
Example 155
Source File: RecordMetadataTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import com.evolutiongaming.kafka.journal.ExpireAfter.implicits._ import com.evolutiongaming.kafka.journal.util.PlayJsonHelper._ import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.{JsResult, Json} import scala.concurrent.duration._ class RecordMetadataTest extends FunSuite with Matchers { for { (metadata, json) <- List( ( RecordMetadata.empty, Json.obj( ("header", Json.obj()), ("payload", Json.obj()))), ( RecordMetadata( HeaderMetadata(Json.obj(("key0", "value0")).some), PayloadMetadata(1.day.toExpireAfter.some, Json.obj(("key1", "value1")).some)), Json.obj( ("header", Json.obj( ("data", Json.obj(("key0", "value0"))))), ("payload", Json.obj( ("expireAfter", "1 day"), ("data", Json.obj(("key1", "value1"))))), ("data", Json.obj(("key0", "value0"))))), ( RecordMetadata( HeaderMetadata(Json.obj(("key0", "value0")).some), PayloadMetadata(none, Json.obj(("key1", "value1")).some)), Json.obj( ("header", Json.obj( ("data", Json.obj(("key0", "value0"))))), ("payload", Json.obj( ("data", Json.obj(("key1", "value1"))))), ("data", Json.obj(("key0", "value0"))))), ( RecordMetadata( HeaderMetadata.empty, PayloadMetadata(1.day.toExpireAfter.some, Json.obj(("key1", "value1")).some)), Json.obj( ("header", Json.obj()), ("payload", Json.obj( ("expireAfter", "1 day"), ("data", Json.obj(("key1", "value1"))))))), ( RecordMetadata( HeaderMetadata(Json.obj(("key0", "value0")).some), PayloadMetadata.empty), Json.obj( ("header", Json.obj( ("data", Json.obj(("key0", "value0"))))), ("payload", Json.obj()), ("data", Json.obj(("key0", "value0")))))) } { test(s"formatRecordMetadata reads & writes $json") { Json.toJson(metadata) shouldEqual json json.validate[RecordMetadata] shouldEqual metadata.pure[JsResult] } } for { (metadata, json) <- List( ( RecordMetadata.empty, Json.obj()), ( RecordMetadata( HeaderMetadata(Json.obj(("key0", "value0")).some), PayloadMetadata.empty), Json.obj( ("data", Json.obj(("key0", "value0"))))), ( RecordMetadata( HeaderMetadata(Json.obj(("key0", "value0")).some), PayloadMetadata(none, Json.obj(("key1", "value1")).some)), Json.obj( ("header", Json.obj( ("data", Json.obj(("key0", "value0"))))), ("payload", Json.obj( ("data", Json.obj(("key1", "value1")))))))) } { test(s"formatRecordMetadata reads $json") { json.validate[RecordMetadata] shouldEqual metadata.pure[JsResult] } } }
Example 156
Source File: SeqRangeSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.data.{NonEmptyList => Nel} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class SeqRangeSpec extends AnyFunSuite with Matchers { private def seqRange(value: Int) = SeqRange(SeqNr.unsafe(value)) private def seqRange(from: Int, to: Int) = SeqRange(SeqNr.unsafe(from), SeqNr.unsafe(to)) test("==") { seqRange(1) == seqRange(1) shouldEqual true seqRange(1) == seqRange(2) shouldEqual false seqRange(2) == seqRange(1) shouldEqual false seqRange(1, 2) == seqRange(1, 2) shouldEqual true seqRange(1, 2) == seqRange(2, 3) shouldEqual false seqRange(2, 3) == seqRange(1, 2) shouldEqual false } test(">") { seqRange(1) > seqRange(1) shouldEqual false seqRange(1) > seqRange(2) shouldEqual false seqRange(2) > seqRange(1) shouldEqual true seqRange(1, 2) > seqRange(1, 2) shouldEqual false seqRange(1, 2) > seqRange(2, 3) shouldEqual false seqRange(1, 2) > seqRange(3, 4) shouldEqual false seqRange(2, 3) > seqRange(1, 2) shouldEqual false seqRange(3, 4) > seqRange(1, 2) shouldEqual true } test("<") { seqRange(1) < seqRange(1) shouldEqual false seqRange(1) < seqRange(2) shouldEqual true seqRange(2) < seqRange(1) shouldEqual false seqRange(1, 2) < seqRange(1, 2) shouldEqual false seqRange(1, 2) < seqRange(2, 3) shouldEqual false seqRange(1, 2) < seqRange(3, 4) shouldEqual true seqRange(2, 3) < seqRange(1, 2) shouldEqual false seqRange(3, 4) < seqRange(1, 2) shouldEqual false } test("toNel") { seqRange(1).toNel.map(_.value) shouldEqual Nel.of(1) seqRange(1, 2).toNel.map(_.value) shouldEqual Nel.of(1, 2) seqRange(1, 4).toNel.map(_.value) shouldEqual Nel.of(1, 2, 3, 4) SeqRange(SeqNr.min).toNel shouldEqual Nel.of(SeqNr.min) SeqRange(SeqNr.max).toNel shouldEqual Nel.of(SeqNr.max) } test("contains") { seqRange(1) contains seqRange(1) shouldEqual true seqRange(1) contains seqRange(2) shouldEqual false seqRange(2) contains seqRange(1) shouldEqual false seqRange(1, 2) contains seqRange(1, 2) shouldEqual true seqRange(1, 2) contains seqRange(2, 3) shouldEqual false seqRange(1, 2) contains seqRange(3, 4) shouldEqual false seqRange(2, 3) contains seqRange(1, 2) shouldEqual false seqRange(3, 4) contains seqRange(1, 2) shouldEqual false seqRange(1, 4) contains seqRange(2, 3) shouldEqual true } test("intersects") { seqRange(1) intersects seqRange(1) shouldEqual true seqRange(1) intersects seqRange(2) shouldEqual false seqRange(2) intersects seqRange(1) shouldEqual false seqRange(1, 2) intersects seqRange(1, 2) shouldEqual true seqRange(1, 2) intersects seqRange(2, 3) shouldEqual true seqRange(1, 2) intersects seqRange(3, 4) shouldEqual false seqRange(2, 3) intersects seqRange(1, 2) shouldEqual true seqRange(3, 4) intersects seqRange(1, 2) shouldEqual false seqRange(1, 4) intersects seqRange(2, 3) shouldEqual true seqRange(2, 3) intersects seqRange(1, 4) shouldEqual true } }
Example 157
Source File: KafkaConfigTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import com.evolutiongaming.skafka.CommonConfig import com.evolutiongaming.skafka.consumer.{AutoOffsetReset, ConsumerConfig} import com.evolutiongaming.skafka.producer.{Acks, CompressionType, ProducerConfig} import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.ConfigSource import scala.concurrent.duration._ class KafkaConfigTest extends AnyFunSuite with Matchers { private implicit val configReader = KafkaConfig.configReader(KafkaConfig.default) test("configReader from empty config") { val config = ConfigFactory.empty() ConfigSource.fromConfig(config).load[KafkaConfig] shouldEqual KafkaConfig.default.asRight } test("configReader from config") { val config = ConfigFactory.parseURL(getClass.getResource("kafka.conf")) val expected = KafkaConfig( producer = ProducerConfig( common = CommonConfig( clientId = "clientId".some, sendBufferBytes = 1000, receiveBufferBytes = 100), acks = Acks.All, idempotence = true, linger = 1.millis, compressionType = CompressionType.Lz4), ConsumerConfig( common = CommonConfig( clientId = "clientId".some, sendBufferBytes = 100, receiveBufferBytes = 1000), groupId = "groupId".some, autoOffsetReset = AutoOffsetReset.Earliest, autoCommit = false, maxPollRecords = 1000)) ConfigSource.fromConfig(config).load[KafkaConfig] shouldEqual expected.asRight } }
Example 158
Source File: PayloadTypeSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.implicits._ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.JsString class PayloadTypeSpec extends AnyFunSuite with Matchers { for { (ext, payloadType) <- List( ("json", PayloadType.Json), ("txt" , PayloadType.Text), ("bin" , PayloadType.Binary)) } { test(s"$payloadType.ext") { payloadType.ext shouldEqual ext } } for { (json, expected) <- List( ("json" , PayloadType.Json.some), ("text" , PayloadType.Text.some), ("binary", PayloadType.Binary.some), ("none" , none)) } { test(s"reads & writes $json") { JsString(json).validate[PayloadType].asOpt shouldEqual expected } } for { (json, expected) <- List( ("json" , PayloadType.Json.some), ("text" , PayloadType.Text.some), ("binary", none)) } { test(s"TextOrJson reads & writes $json") { JsString(json).validate[PayloadType.TextOrJson].asOpt shouldEqual expected } } for { (json, expected) <- List( ("json" , PayloadType.Json.some), ("text" , none), ("binary", PayloadType.Binary.some)) } { test(s"BinaryOrJson reads & writes $json") { JsString(json).validate[PayloadType.BinaryOrJson].asOpt shouldEqual expected } } }
Example 159
Source File: TopicPointersTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual import com.evolutiongaming.skafka.{Offset, Partition} import org.scalatest.FunSuite import org.scalatest.matchers.should.Matchers class TopicPointersTest extends FunSuite with Matchers { test("merge") { def topicPointers(values: Map[Int, Int]) = { val values1 = values.map { case (partition, offset) => (Partition.unsafe(partition), Offset.unsafe(offset)) } TopicPointers(values1) } val topicPointers0 = topicPointers(Map((0, 0), (1, 1), (2, 2))) val topicPointers1 = topicPointers(Map((0, 2), (1, 1), (2, 0))) val expected = topicPointers(Map((0, 2), (1, 1), (2, 2))) (topicPointers0 merge topicPointers1) shouldEqual expected } }
Example 160
Source File: EventualPayloadAndTypeSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.eventual import cats.implicits._ import com.evolutiongaming.kafka.journal._ import org.scalatest.EitherValues import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.{Json => PlayJson} import scodec.bits.ByteVector import scala.util.Try class EventualPayloadAndTypeSpec extends AnyFunSuite with Matchers with EitherValues { implicit val jsonCodec: JsonCodec[Try] = JsonCodec.default[Try] private val eventualWrite = EventualWrite.summon[Try, Payload] private val eventualRead = EventualRead.summon[Try, Payload] for { (name, payload) <- List( ("text", Payload.text("text")), ("binary", PayloadBinaryFromStr("binary")), ("json", Payload.json("json")) ) } { test(s"toEventual & fromEventual, payload: $name") { val actual = for { payloadAndType <- eventualWrite(payload) actual <- eventualRead(payloadAndType) } yield actual actual shouldBe payload.pure[Try] } } test("toEventual: binary") { val payload = PayloadBinaryFromStr("binary") val eventual = eventualWrite(payload) eventual shouldBe EventualPayloadAndType(payload.value.asRight, PayloadType.Binary).pure[Try] } test("toEventual: text") { val payload = Payload.Text("text") val eventual = eventualWrite(payload) eventual shouldBe EventualPayloadAndType("text".asLeft, PayloadType.Text).pure[Try] } test("toEventual: json") { val payload = Payload.Json(PlayJson.obj("key" -> "value")) val eventual = eventualWrite(payload) eventual shouldBe EventualPayloadAndType("""{"key":"value"}""".asLeft, PayloadType.Json).pure[Try] } test("fromEventual: returns an error for payload type binary and payload string") { val payloadAndType = EventualPayloadAndType("text".asLeft, PayloadType.Binary) val result = eventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should include("Bytes expected") } test("fromEventual: returns an error for payload type text and payload bytes") { val payloadAndType = EventualPayloadAndType(ByteVector.empty.asRight, PayloadType.Text) val result = eventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should include("String expected") } test("fromEventual: returns an error for payload type json and payload bytes") { val payloadAndType = EventualPayloadAndType(ByteVector.empty.asRight, PayloadType.Json) val result = eventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should include("String expected") } }
Example 161
Source File: CombinationsSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.data.{NonEmptyList => Nel} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class CombinationsSpec extends AnyFunSuite with Matchers { test("Nil") { Combinations(List.empty[Int]) shouldEqual Combinations.Type.empty } test("List(1)") { Combinations(List(1)) shouldEqual List(List(Nel.of(1))) } test("List(1, 2)") { Combinations(List(1, 2)) shouldEqual List( List(Nel.of(1, 2)), List(Nel.of(1), Nel.of(2))) } test("List(1, 2, 3)") { Combinations(List(1, 2, 3)) shouldEqual List( List(Nel.of(1, 2, 3)), List(Nel.of(1), Nel.of(2, 3)), List(Nel.of(1), Nel.of(2), Nel.of(3)), List(Nel.of(1, 2), Nel.of(3))) } test("List(1, 2, 3, 4)") { Combinations(List(1, 2, 3, 4)) shouldEqual List( List(Nel.of(1, 2, 3, 4)), List(Nel.of(1), Nel.of(2, 3, 4)), List(Nel.of(1), Nel.of(2), Nel.of(3, 4)), List(Nel.of(1, 2), Nel.of(3, 4)), List(Nel.of(1), Nel.of(2), Nel.of(3), Nel.of(4)), List(Nel.of(1, 2), Nel.of(3), Nel.of(4)), List(Nel.of(1), Nel.of(2, 3), Nel.of(4)), List(Nel.of(1, 2, 3), Nel.of(4))) } }
Example 162
Source File: OriginSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import akka.actor.ActorSystem import akka.testkit.TestKit import cats.effect.IO import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.concurrent.duration._ class OriginSpec extends AsyncFunSuite with Matchers { test("hostName") { val result = for { hostName <- Origin.hostName[IO] result = hostName.isDefined shouldEqual true } yield result result.run() } withSystem { system => test("akkaHost") { val result = for { akkaHost <- Origin.akkaHost[IO](system) result = akkaHost.isDefined shouldEqual false } yield result result.run() } test("AkkaName") { Origin.akkaName(system) shouldEqual Origin("OriginSpec") } } private def withSystem[T](f: ActorSystem => T): T = { val system = ActorSystem("OriginSpec") try { f(system) } finally { TestKit.shutdownActorSystem(system, 3.seconds) } } }
Example 163
Source File: PartitionOffsetSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal import cats.data.{NonEmptyList => Nel} import cats.implicits._ import com.evolutiongaming.skafka.{Offset, Partition} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class PartitionOffsetSpec extends AnyFunSuite with Matchers { def partitionOffsetOf(partition: Int, offset: Int): PartitionOffset = { PartitionOffset( partition = Partition.unsafe(partition), offset = Offset.unsafe(offset)) } test("order") { val partitionOffsets = Nel.of( partitionOffsetOf(partition = 0, offset = 1), partitionOffsetOf(partition = 1, offset = 0), partitionOffsetOf(partition = 0, offset = 0) ) val expected = Nel.of( partitionOffsetOf(partition = 0, offset = 0), partitionOffsetOf(partition = 0, offset = 1), partitionOffsetOf(partition = 1, offset = 0)) partitionOffsets.sorted shouldEqual expected } test("show") { val partitionOffset = partitionOffsetOf(partition = 0, offset = 1) partitionOffset.show shouldEqual "0:1" } }
Example 164
Source File: FiniteDurationFormatTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.util import cats.implicits._ import com.evolutiongaming.kafka.journal.util.PlayJsonHelper._ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json._ import scala.concurrent.duration._ class FiniteDurationFormatTest extends AnyFunSuite with Matchers { for { (duration, expected) <- List( (1.millis, "1 millisecond"), (100.minutes, "100 minutes"), (30.days, "30 days")) } { test(s"$duration to/from JsValue") { val jsValue = Json.toJson(duration) jsValue shouldEqual JsString(expected) jsValue.validate[FiniteDuration] shouldEqual duration.pure[JsResult] } } for { (json, expected) <- List( (JsString("1 min"), 1.minute), (JsNumber(2), 2.millis), (JsString("30 h"), 30.hours), (JsString("1 day"), 1.day)) } { test(s"$json from Duration") { json.validate[FiniteDuration] shouldEqual expected.pure[JsResult] } } test("parse error") { val expected = JsError("cannot parse FiniteDuration from test: java.lang.NumberFormatException: format error test") JsString("test").validate[FiniteDuration] shouldEqual expected } }
Example 165
Source File: ResourceRegistrySpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.util import cats.effect._ import cats.effect.concurrent.{Deferred, Ref} import cats.implicits._ import cats.{Applicative, Foldable} import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import scala.util.control.NoStackTrace class ResourceRegistrySpec extends AsyncFunSuite with Matchers { val error: Throwable = new RuntimeException with NoStackTrace for { exitCase <- List( ExitCase.complete, ExitCase.error(error), ExitCase.canceled) } yield { test(s"ResRegistry releases resources, exitCase: $exitCase") { val result = exitCase match { case ExitCase.Completed => testError(none) case ExitCase.Canceled => testCancel case ExitCase.Error(error) => testError(error.some) } result.run() } } private def testError(error: Option[Throwable]) = { val n = 3 def logic(release: IO[Unit]) = { ResourceRegistry.of[IO].use { registry => val resource = Resource.make(().pure[IO]) { _ => release } val fa = registry.allocate(resource) implicit val monoidUnit = Applicative.monoid[IO, Unit] for { _ <- Foldable[List].fold(List.fill(n)(fa)) _ <- error.fold(().pure[IO])(_.raiseError[IO, Unit]) } yield {} } } for { ref <- Ref.of[IO, Int](0) fa = logic(ref.update(_ + 1)) result <- fa.redeem(_.some, _ => none) releases <- ref.get } yield { result shouldEqual result releases shouldEqual n } } private def testCancel = { for { released <- Ref.of[IO, Int](0) started <- Deferred[IO, Unit] fiber <- Concurrent[IO].start { ResourceRegistry.of[IO].use { registry => val resource = Resource.make(().pure[IO]) { _ => released.update(_ + 1) } for { _ <- registry.allocate(resource) _ <- started.complete(()) _ <- IO.never.as(()) } yield {} } } _ <- started.get _ <- fiber.cancel released <- released.get } yield { released shouldEqual 1 } } }
Example 166
Source File: GracefulFiberSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.util import cats.effect._ import cats.effect.concurrent.{Deferred, Ref} import cats.implicits._ import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers class GracefulFiberSpec extends AsyncFunSuite with Matchers { test("GracefulFiber") { val result = for { deferred <- Deferred[IO, Unit] ref <- Ref.of[IO, Boolean](false) fiber <- GracefulFiber[IO].apply { cancel => Concurrent[IO].start[Unit] { val loop = for { cancel <- cancel _ <- ref.set(cancel) } yield { if (cancel) ().some else none } for { _ <- deferred.complete(()) _ <- loop.untilDefinedM } yield {} } } _ <- fiber.cancel result <- ref.get } yield { result shouldEqual true } result.run() } }
Example 167
Source File: ResourceRefTest.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.util import cats.effect._ import cats.effect.concurrent.Ref import cats.implicits._ import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers class ResourceRefTest extends AsyncFunSuite with Matchers { test("ResourceRef") { def resourceOf[A](a: A, ref: Ref[IO, Boolean]) = { Resource.make { ref.set(true).as(a) } { _ => ref.set(false) } } val result = for { ref0 <- Ref[IO].of(false) ref1 <- Ref[IO].of(false) ref <- ResourceRef.of(resourceOf(0, ref0)).use { ref => for { a <- ref.get _ = a shouldEqual 0 a <- ref0.get _ = a shouldEqual true _ <- ref.set(resourceOf(1, ref1)) a <- ref.get _ = a shouldEqual 1 a <- ref0.get _ = a shouldEqual false a <- ref1.get _ = a shouldEqual true } yield ref } a <- ref1.get _ = a shouldEqual false _ <- ().pure[IO] a <- ref.get.attempt _ = a shouldEqual ResourceReleasedError.asLeft } yield {} result.run() } }
Example 168
Source File: StartResourceSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.util import cats.effect._ import cats.effect.concurrent.{Deferred, Ref} import cats.implicits._ import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers class StartResourceSpec extends AsyncFunSuite with Matchers { test("StartResource") { val result = for { deferred <- Deferred[IO, Unit] ref <- Ref.of[IO, Boolean](false) res = Resource.make(IO.unit)(_ => ref.set(true)) fiber <- StartResource(res)(_ => deferred.complete(()) *> IO.never.as(())) _ <- deferred.get _ <- fiber.cancel result <- ref.get } yield { result shouldEqual true } result.run() } }
Example 169
Source File: PersistentBinaryToBytesSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import cats.implicits._ import com.evolutiongaming.kafka.journal.FromBytes.implicits._ import com.evolutiongaming.kafka.journal.ToBytes.implicits._ import com.evolutiongaming.kafka.journal.ByteVectorOf import com.evolutiongaming.serialization.SerializedMsg import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import scodec.bits.ByteVector import scala.util.{Success, Try} class PersistentBinaryToBytesSpec extends AnyFunSuite with Matchers { import PersistentBinaryToBytesSpec._ test("toBytes & fromBytes") { val expected = PersistentBinary( manifest = "persistentManifest".some, writerUuid = "writerUuid", payload = SerializedMsg( identifier = 2, manifest = "manifest", bytes = "payload".encodeStr)) def verify(bytes: ByteVector) = { val actual = bytes.fromBytes[Try, PersistentBinary] actual shouldEqual Success(expected) } verify(expected.toBytes[Try].get) verify(ByteVectorOf[Try](getClass, "PersistentBinary.bin").get) } } object PersistentBinaryToBytesSpec { implicit class StrOps(val self: String) extends AnyVal { def encodeStr: ByteVector = { ByteVector .encodeUtf8(self) .fold(throw _, identity) } } }
Example 170
Source File: EventSerializerSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import java.io.FileOutputStream import akka.persistence.PersistentRepr import akka.persistence.serialization.Snapshot import cats.effect.{IO, Sync} import com.evolutiongaming.kafka.journal.FromBytes.implicits._ import com.evolutiongaming.kafka.journal.IOSuite._ import com.evolutiongaming.kafka.journal._ import com.evolutiongaming.kafka.journal.util.CatsHelper._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.JsString import scodec.bits.ByteVector import TestJsonCodec.instance import cats.implicits._ import scala.util.Try class EventSerializerSpec extends AsyncFunSuite with ActorSuite with Matchers { for { (name, payloadType, payload) <- List( ("PersistentRepr.bin", PayloadType.Binary, Snapshot("binary")), ("PersistentRepr.text.json", PayloadType.Json, "text"), ("PersistentRepr.json", PayloadType.Json, JsString("json"))) } { test(s"toEvent & toPersistentRepr, payload: $payload") { val persistenceId = "persistenceId" val persistentRepr = PersistentRepr( payload = payload, sequenceNr = 1, persistenceId = persistenceId, manifest = "manifest", writerUuid = "writerUuid") val fa = for { serializer <- EventSerializer.of[IO](actorSystem) event <- serializer.toEvent(persistentRepr) actual <- serializer.toPersistentRepr(persistenceId, event) _ <- Sync[IO].delay { actual shouldEqual persistentRepr } payload <- event.payload.getOrError[IO]("Event.payload is not defined") _ = payload.payloadType shouldEqual payloadType bytes <- ByteVectorOf[IO](getClass, name) } yield { payload match { case payload: Payload.Binary => payload.value shouldEqual bytes case payload: Payload.Text => payload.value shouldEqual bytes.fromBytes[Try, String].get case payload: Payload.Json => payload.value shouldEqual JsonCodec.summon[Try].decode.fromBytes(bytes).get } } fa.run() } } def writeToFile[F[_] : Sync](bytes: ByteVector, path: String): F[Unit] = { Sync[F].delay { val os = new FileOutputStream(path) os.write(bytes.toArray) os.close() } } }
Example 171
Source File: GroupByWeightSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class GroupByWeightSpec extends AnyFunSuite with Matchers { for { (weight, value, expected) <- List( (10, Nil, Nil), (-1, List(1), List(List(1))), (1, List(1, 2, 3), List(List(1), List(2), List(3))), (3, List(1, 2, 3, 4), List(List(1, 2), List(3), List(4))), (7, List(1, 2, 3, 4), List(List(1, 2, 3), List(4))), (4, List(1, 3, 1, 2), List(List(1, 3), List(1, 2)))) } { test(s"group value: $value, weight: $weight") { GroupByWeight(value, weight)(identity) shouldEqual expected } } }
Example 172
Source File: BatchingSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import akka.persistence.{AtomicWrite, PersistentRepr} import cats.Id import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class BatchingSpec extends AnyFunSuite with Matchers { private val atomicWrite = AtomicWrite(List(PersistentRepr(None, persistenceId = "persistenceId"))) test("disabled") { val batching = Batching.disabled[Id] batching(List(atomicWrite, atomicWrite)) shouldEqual List(List(atomicWrite), List(atomicWrite)) } test("all") { val batching = Batching.all[Id] batching(List(atomicWrite, atomicWrite)) shouldEqual List(List(atomicWrite, atomicWrite)) } test("byNumberOfEvents 1") { val batching = Batching.byNumberOfEvents[Id](1) batching(List(atomicWrite, atomicWrite)) shouldEqual List(List(atomicWrite), List(atomicWrite)) } test("byNumberOfEvents 2") { val batching = Batching.byNumberOfEvents[Id](2) batching(List(atomicWrite, atomicWrite)) shouldEqual List(List(atomicWrite, atomicWrite)) } }
Example 173
Source File: PersistentJsonSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import cats.implicits._ import com.evolutiongaming.kafka.journal.PayloadType import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.{JsSuccess, Json} class PersistentJsonSpec extends AnyFunSuite with Matchers { for { payloadType <- List(PayloadType.Json.some, PayloadType.Text.some, none) manifest <- List(none, "manifest".some, "".some) } { test(s"toJson & fromJson, payloadType: $payloadType, manifest: $manifest") { val persistent = PersistentJson( manifest = manifest, writerUuid = "writerUuid", payloadType = payloadType, payload = "payload") val json = Json.toJson(persistent) json.validate[PersistentJson[String]] shouldEqual JsSuccess(persistent) } } }
Example 174
Source File: ActorSystemRefTest.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import cats.arrow.FunctionK import cats.effect.{IO, Sync} import cats.implicits._ import com.evolutiongaming.kafka.journal.ActorSuite import com.evolutiongaming.kafka.journal.IOSuite._ import org.scalatest.funsuite.AsyncFunSuite import org.scalatest.matchers.should.Matchers class ActorSystemRefTest extends AsyncFunSuite with ActorSuite with Matchers { import ActorSystemRefTest._ test("Extension") { val result = for { ref <- Sync[IO].delay { Extension(actorSystem) } ref <- ref.fromFuture[IO].mapK(FunctionK.id).pure[IO] a <- ref.get.start _ <- ref.set(0) a <- a.join _ = a shouldEqual 0 a <- ref.get _ = a shouldEqual 0 a <- ref.set(0).attempt _ = a.isLeft shouldEqual true } yield {} result.run() } } object ActorSystemRefTest { object Extension extends ActorSystemRef.ExtensionId[Int] }
Example 175
Source File: KafkaJournalConfigSpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import cats.implicits._ import com.evolutiongaming.kafka.journal.Journal.CallTimeThresholds import com.evolutiongaming.kafka.journal.{JournalConfig, KafkaConfig} import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import pureconfig.{ConfigReader, ConfigSource} import scala.concurrent.duration._ class KafkaJournalConfigSpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() val expected = KafkaJournalConfig.default ConfigSource .fromConfig(config) .load[KafkaJournalConfig] shouldEqual expected.pure[ConfigReader.Result] } test("apply from config") { val config = ConfigFactory.parseURL(getClass.getResource("kafka-journal.conf")) val expected = KafkaJournalConfig( startTimeout = 1.millis, stopTimeout = 2.seconds, maxEventsInBatch = 3, callTimeThresholds = CallTimeThresholds( append = 1.millis, read = 2.millis, pointer = 3.millis, delete = 4.millis), journal = JournalConfig( headCache = JournalConfig.HeadCache(enabled = false), kafka = KafkaConfig("client-id")), jsonCodec = KafkaJournalConfig.JsonCodec.Jsoniter, ) ConfigSource .fromConfig(config) .load[KafkaJournalConfig] shouldEqual expected.pure[ConfigReader.Result] } test("apply from reference.conf") { val config = ConfigFactory.load() val expected = KafkaJournalConfig.default ConfigSource .fromConfig(config) .at("evolutiongaming.kafka-journal.persistence.journal") .load[KafkaJournalConfig] shouldEqual expected.pure[ConfigReader.Result] } }
Example 176
Source File: ToKeySpec.scala From kafka-journal with MIT License | 5 votes |
package akka.persistence.kafka.journal import cats.Id import com.evolutiongaming.kafka.journal.Key import com.typesafe.config.ConfigFactory import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class ToKeySpec extends AnyFunSuite with Matchers { test("apply from empty config") { val config = ConfigFactory.empty() val toKey = ToKey.fromConfig[Id](config) toKey("id") shouldEqual Key(topic = "journal", id = "id") } test("apply from to-key-split config") { val config = ConfigFactory.parseURL(getClass.getResource("to-key-split.conf")) val toKey = ToKey.fromConfig[Id](config) toKey("topic-id") shouldEqual Key(id = "id", topic = "topic") toKey("t-o-p-i-c-id") shouldEqual Key(id = "id", topic = "t-o-p-i-c") toKey("id") shouldEqual Key(id = "id", topic = "test") } test("apply from to-key-constant-topic config") { val config = ConfigFactory.parseURL(getClass.getResource("to-key-constant-topic.conf")) val toKey = ToKey.fromConfig[Id](config) toKey("topic-id") shouldEqual Key(topic = "test", id = "topic-id") toKey("id") shouldEqual Key(id = "id", topic = "test") } }
Example 177
Source File: EventualPayloadAndTypeSpec.scala From kafka-journal with MIT License | 5 votes |
package com.evolutiongaming.kafka.journal.circe import cats.implicits._ import com.evolutiongaming.kafka.journal.TestJsonCodec.instance import com.evolutiongaming.kafka.journal._ import com.evolutiongaming.kafka.journal.circe.Instances._ import com.evolutiongaming.kafka.journal.eventual._ import io.circe.{Json => CirceJson} import org.scalatest.EitherValues import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import play.api.libs.json.{Json => PlayJson} import scodec.bits.ByteVector import scala.util.Try class EventualPayloadAndTypeSpec extends AnyFunSuite with Matchers with EitherValues { private val playEventualWrite = EventualWrite.summon[Try, Payload] private val circeEventualRead = EventualRead.summon[Try, CirceJson] for { (playPayload, circePayload) <- List( (Payload.json(PlayJson.obj(("key", "value"))), CirceJson.obj("key" -> CirceJson.fromString("value"))) ) } { test(s"toEventual with Play, fromEventual with Circe") { val actual = for { payloadAndType <- playEventualWrite(playPayload) actual <- circeEventualRead(payloadAndType) } yield actual actual shouldBe circePayload.pure[Try] } } for { (name, payloadAndType) <- List( ("binary", EventualPayloadAndType(ByteVector.empty.asRight, PayloadType.Binary)), ("text", EventualPayloadAndType("text".asLeft, PayloadType.Text)) ) } { test(s"fromEventual: returns an error for non-json payload type: $name") { val result = circeEventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should include(payloadAndType.payloadType.toString) } } test("fromEventual: returns an error for payload type json and payload bytes") { val payloadAndType = EventualPayloadAndType(ByteVector.empty.asRight, PayloadType.Json) val result = circeEventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should include("String expected") } test("fromEventual: returns an error for malformed json") { val malformed = "{\"key\": {sss}}" val payloadAndType = EventualPayloadAndType(malformed.asLeft, PayloadType.Json) val result = circeEventualRead(payloadAndType).toEither result.left.value shouldBe a[JournalError] result.left.value.getMessage should (include("ParsingFailure") and include("sss")) } }
Example 178
Source File: HelloWorldServiceSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.example.helloworld.impl import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import com.example.helloworld.api._ import org.scalatest.concurrent.Eventually import scala.concurrent.Await import scala.concurrent.duration._ class HelloWorldServiceSpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll with Eventually { private val server = ServiceTest.startServer( ServiceTest.defaultSetup .withCassandra() ) { ctx => new HelloWorldApplication(ctx) with LocalServiceLocator } val client: HelloWorldService = server.serviceClient.implement[HelloWorldService] override protected def afterAll(): Unit = server.stop() "Hello World service" should { "say hello" in { client.hello("Alice").invoke().map { answer => answer should ===("""Hello, Alice! |Started reports: default-projected-message |Stopped reports: default-projected-message |""".stripMargin) } } "allow responding with a custom message" in { for { _ <- client.useGreeting("Bob", "Hi").invoke() answer <- client.hello("Bob").invoke() } yield { answer should ===("""Hi, Bob! |Started reports: default-projected-message |Stopped reports: default-projected-message |""".stripMargin) } implicit val patienceConfig: PatienceConfig = PatienceConfig(timeout = 25.seconds, interval = 300.millis) eventually{ client.hello("Bob").invoke().map(_ should ===( """Hi, Bob! |Started reports: Hi |Stopped reports: default-projected-message |""".stripMargin ) ) } } } }
Example 179
Source File: ServiceRegistryInteropSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.registry.impl import java.net.URI import java.util.Collections import java.util.Optional import akka.actor.ActorSystem import akka.testkit.TestKit import akka.util.ByteString import com.lightbend.lagom.devmode.internal.scaladsl.registry.RegisteredService import com.lightbend.lagom.devmode.internal.scaladsl.registry.ServiceRegistryService import com.lightbend.lagom.internal.javadsl.registry.{ RegisteredService => jRegisteredService } import com.lightbend.lagom.internal.javadsl.registry.{ ServiceRegistryService => jServiceRegistryService } import com.lightbend.lagom.devmode.internal.scaladsl.registry.{ RegisteredService => sRegisteredService } import com.lightbend.lagom.devmode.internal.scaladsl.registry.{ ServiceRegistryService => sServiceRegistryService } import com.lightbend.lagom.javadsl.api.ServiceAcl import com.lightbend.lagom.javadsl.api.deser.MessageSerializer import com.lightbend.lagom.javadsl.api.deser.StrictMessageSerializer import com.lightbend.lagom.javadsl.api.transport.MessageProtocol import com.lightbend.lagom.javadsl.api.transport.Method import com.lightbend.lagom.javadsl.jackson.JacksonSerializerFactory import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.Futures import play.api.libs.json.Format import play.api.libs.json.Json import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ServiceRegistryInteropSpec extends AnyFlatSpec with Matchers with Futures with BeforeAndAfterAll { val system = ActorSystem() val jacksonSerializerFactory = new JacksonSerializerFactory(system) protected override def afterAll(): Unit = { TestKit.shutdownActorSystem(actorSystem = system, verifySystemShutdown = true) } behavior.of("ServiceRegistry serializers") it should "should interop between java and scala (RegisteredService)" in { val msg = jRegisteredService.of("inventory", URI.create("https://localhost:123/asdf"), Optional.of("https")) roundTrip(msg) should be(msg) } it should "should interop between java and scala when optional fields are empty (RegisteredService)" in { val msg = jRegisteredService.of("inventory", URI.create("https://localhost:123/asdf"), Optional.empty[String]) roundTrip(msg) should be(msg) } it should "should interop between java and scala (ServiceRegistryService)" in { val msg = jServiceRegistryService.of( URI.create("https://localhost:123/asdf"), Collections.singletonList(ServiceAcl.methodAndPath(Method.GET, "/items")) ) roundTrip(msg) should be(msg) } it should "should interop between java and scala when optional fields are empty (ServiceRegistryService)" in { val msg = jServiceRegistryService.of(URI.create("https://localhost:123/asdf"), Collections.emptyList[ServiceAcl]) roundTrip(msg) should be(msg) } private def roundTrip(input: jServiceRegistryService): jServiceRegistryService = { roundTrip( input, jacksonSerializerFactory.messageSerializerFor[jServiceRegistryService](classOf[jServiceRegistryService]), com.lightbend.lagom.scaladsl.playjson.JsonSerializer[ServiceRegistryService].format )(sServiceRegistryService.format) } private def roundTrip(input: jRegisteredService): jRegisteredService = { roundTrip( input, jacksonSerializerFactory.messageSerializerFor[jRegisteredService](classOf[jRegisteredService]), com.lightbend.lagom.scaladsl.playjson.JsonSerializer[RegisteredService].format )(sRegisteredService.format) } private def roundTrip[J, S]( input: J, jacksonSerializer: StrictMessageSerializer[J], playJsonFormatter: Format[S] )(implicit format: Format[S]): J = { val byteString: ByteString = jacksonSerializer.serializerForRequest().serialize(input) val scalaValue: S = playJsonFormatter.reads(Json.parse(byteString.toArray)).get val str: String = playJsonFormatter.writes(scalaValue).toString() val jacksonDeserializer: MessageSerializer.NegotiatedDeserializer[J, ByteString] = jacksonSerializer.deserializer( new MessageProtocol(Optional.of("application/json"), Optional.empty[String], Optional.empty[String]) ) jacksonDeserializer.deserialize(ByteString(str)) } }
Example 180
Source File: InternalRouterSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.registry.impl import java.net.URI import java.util import java.util.Collections import com.lightbend.lagom.internal.javadsl.registry.ServiceRegistryService import com.lightbend.lagom.javadsl.api.ServiceAcl import com.lightbend.lagom.javadsl.api.transport.Method import com.lightbend.lagom.registry.impl.ServiceRegistryActor.Found import com.lightbend.lagom.registry.impl.ServiceRegistryActor.Route import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class InternalRouterSpec extends AnyFlatSpec with Matchers { behavior.of("InternalRouter") it should "find the appropriate URI given the portName" in { val httpUri = new URI("http://localhost.com/pathABC") val httpsUri = new URI("https://localhost.com:123/pathABC") val simpleName = "my-service" val acl = ServiceAcl.methodAndPath(Method.GET, "/pathABC") val srs = ServiceRegistryService.of(util.Arrays.asList(httpUri, httpsUri), Collections.singletonList(acl)) val registry = new InternalRegistry(Map.empty) registry.register(simpleName, srs) val router = new InternalRouter router.rebuild(registry) router.routeFor(Route("GET", "/pathABC", None)) should be(Found(httpUri)) router.routeFor(Route("GET", "/pathABC", Some("http"))) should be(Found(httpUri)) router.routeFor(Route("GET", "/pathABC", Some("https"))) should be(Found(httpsUri)) } }
Example 181
Source File: LagomDevModeServiceDiscoverySpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.registry import java.net.InetAddress import java.net.URI import akka.actor.ActorSystem import akka.discovery.ServiceDiscovery.Resolved import akka.discovery.ServiceDiscovery.ResolvedTarget import akka.testkit.TestKit import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.ScalaFutures._ import scala.collection.immutable import scala.concurrent.Future import scala.concurrent.duration._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike class LagomDevModeServiceDiscoverySpec extends TestKit(ActorSystem("LagomDevModeSimpleServiceDiscoverySpec")) with AnyWordSpecLike with Matchers with BeforeAndAfterAll { private val client = new StaticServiceRegistryClient( Map( "test-service" -> List(URI.create("http://localhost:8080")), "test-service-without-port" -> List(URI.create("http://localhost")) ) ) protected override def afterAll(): Unit = { shutdown(verifySystemShutdown = true) } private val discovery = LagomDevModeServiceDiscovery(system) discovery.setServiceRegistryClient(client) "DevModeSimpleServiceDiscoverySpec" should { "resolve services in the registry" in { val expected = Resolved("test-service", List(ResolvedTarget("localhost", Some(8080), Some(InetAddress.getLocalHost)))) discovery.lookup("test-service", 100.milliseconds).futureValue shouldBe expected } "allow missing ports" in { val expected = Resolved("test-service-without-port", List(ResolvedTarget("localhost", None, Some(InetAddress.getLocalHost)))) discovery.lookup("test-service-without-port", 100.milliseconds).futureValue shouldBe expected } } } private class StaticServiceRegistryClient(registrations: Map[String, List[URI]]) extends ServiceRegistryClient { override def locateAll(serviceName: String, portName: Option[String]): Future[immutable.Seq[URI]] = Future.successful(registrations.getOrElse(serviceName, Nil)) }
Example 182
Source File: AbstractLoggingServiceRegistryClientSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.devmode.internal.registry import java.net.URI import scala.concurrent.Future import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec class AbstractLoggingServiceRegistryClientSpec extends AsyncWordSpec with Matchers { private val client = new AbstractLoggingServiceRegistryClient { override def internalLocateAll(serviceName: String, portName: Option[String]): Future[List[URI]] = serviceName match { case "failing-service" => Future.failed(new IllegalArgumentException("Ignore: expected error")) case "empty-service" => Future.successful(List()) case "successful-service" => Future.successful(List(URI.create("http://localhost:8080"))) } } "AbstractLoggingServiceRegistryClient" when { "internal lookup fails" in { client .locateAll("failing-service", None) .failed .map(_ shouldBe an[IllegalArgumentException]) } "internal lookup has no result" in { client .locateAll("empty-service", None) .map(_ shouldEqual Nil) } "internal lookup has a successful result" in { client .locateAll("successful-service", None) .map(_ shouldEqual List(URI.create("http://localhost:8080"))) } } }
Example 183
Source File: ProjectionStateSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.projection import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.ProjectionName import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.WorkerCoordinates import com.lightbend.lagom.projection.Projection import com.lightbend.lagom.projection.Started import com.lightbend.lagom.projection.State import com.lightbend.lagom.projection.Status import com.lightbend.lagom.projection.Stopped import com.lightbend.lagom.projection.Worker import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ProjectionStateSpec extends AnyWordSpec with Matchers { private val prj001 = "prj001" private val prj002 = "prj002" val p1w1 = prj001 + "-workers-1" val p1w2 = prj001 + "-workers-2" val p1w3 = prj001 + "-workers-3" val p2w1 = s"$prj002-workers-1" val coordinates001_1 = WorkerCoordinates(prj001, p1w1) val coordinates001_2 = WorkerCoordinates(prj001, p1w2) val coordinates001_3 = WorkerCoordinates(prj001, p1w3) val coordinates002_1 = WorkerCoordinates(prj002, p2w1) val nameIndex: Map[ProjectionName, Set[WorkerCoordinates]] = Map( prj001 -> Set(coordinates001_1, coordinates001_2, coordinates001_3), prj002 -> Set(coordinates002_1) ) val requestedStatus: Map[WorkerCoordinates, Status] = Map( coordinates001_1 -> Stopped, coordinates001_2 -> Started, coordinates001_3 -> Stopped, coordinates002_1 -> Started ) val observedStatus: Map[WorkerCoordinates, Status] = Map( coordinates001_1 -> Stopped, coordinates001_2 -> Stopped, coordinates001_3 -> Started, coordinates002_1 -> Started ) def findProjection(state: State)(projectionName: String): Option[Projection] = state.projections.find(_.name == projectionName) def findWorker(state: State)(workerKey: String): Option[Worker] = state.projections.flatMap(_.workers).find(_.key == workerKey) "ProjectionStateSpec" should { "be build from a replicatedData" in { val state = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped) state.projections.size should equal(2) state.projections.flatMap(_.workers).size should equal(4) state.projections.flatMap(_.workers).find(_.key == coordinates001_3.asKey) shouldBe Some( Worker(p1w3, coordinates001_3.asKey, Stopped, Started) ) } "find projection by name" in { val state = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped) findProjection(state)(prj001) should not be None } "find worker by key" in { val state = State.fromReplicatedData(nameIndex, requestedStatus, observedStatus, Started, Stopped) val maybeWorker = findWorker(state)("prj001-prj001-workers-3") maybeWorker shouldBe Some( Worker(p1w3, coordinates001_3.asKey, Stopped, Started) ) } "build from default values when workers in nameIndex don't have request or observed values" in { val newProjectionName = "new-projection" val newWorkerName = "new-worker-001" val newCoordinates = WorkerCoordinates(newProjectionName, newWorkerName) val richIndex = nameIndex ++ Map( newProjectionName -> Set(newCoordinates) ) val defaultRequested = Stopped val defaultObserved = Started val state = State.fromReplicatedData(richIndex, requestedStatus, observedStatus, defaultRequested, defaultObserved) val maybeWorker = findWorker(state)(newCoordinates.asKey) maybeWorker shouldBe Some( Worker(newWorkerName, newCoordinates.asKey, defaultRequested, defaultObserved) ) } } }
Example 184
Source File: PublishServiceSpec.scala From lagom with Apache License 2.0 | 5 votes |
package docs.scaladsl.mb import com.lightbend.lagom.scaladsl.api.broker.Topic import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import com.lightbend.lagom.scaladsl.testkit.TestTopicComponents import play.api.libs.ws.ahc.AhcWSComponents import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec import akka.NotUsed import akka.Done import akka.stream.scaladsl.Source import akka.stream.testkit.scaladsl.TestSink import akka.stream.testkit.TestSubscriber import akka.stream.testkit.TestSubscriber.Probe abstract class PublishApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents { override lazy val lagomServer = serverFor[service.PublishService](new service.PublishServiceImpl()) } package service { import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.broker.TopicProducer object PublishService { val TOPIC_NAME = "events" } trait PublishService extends Service { final override def descriptor = { import Service._ named("brokerdocs") .withTopics(topic(PublishService.TOPIC_NAME, events)) .withAutoAcl(true) } def events(): Topic[PubMessage] } case class PubMessage(message: String) object PubMessage { import play.api.libs.json.Format import play.api.libs.json.Json implicit val format: Format[PubMessage] = Json.format[PubMessage] } class PublishServiceImpl() extends PublishService { override def events(): Topic[PubMessage] = TopicProducer.singleStreamWithOffset { offset => Source((1 to 10)).map(i => (PubMessage(s"msg $i"), offset)) } } } class PublishServiceSpec extends AsyncWordSpec with Matchers { import service._ //#topic-test-publishing-into-a-topic "The PublishService" should { "publish events on the topic" in ServiceTest.withServer(ServiceTest.defaultSetup) { ctx => new PublishApplication(ctx) with LocalServiceLocator with TestTopicComponents } { server => implicit val system = server.actorSystem implicit val mat = server.materializer val client: PublishService = server.serviceClient.implement[PublishService] val source = client.events().subscribe.atMostOnceSource source .runWith(TestSink.probe[PubMessage]) .request(1) .expectNext should ===(PubMessage("msg 1")) } } //#topic-test-publishing-into-a-topic }
Example 185
Source File: PostSpec.scala From lagom with Apache License 2.0 | 5 votes |
package docs.home.scaladsl.persistence //#unit-test import scala.concurrent.Await import scala.concurrent.duration._ import akka.Done import akka.actor.ActorSystem import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.InvalidCommandException import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.testkit.PersistentEntityTestDriver import com.typesafe.config.ConfigFactory import org.scalactic.TypeCheckedTripleEquals import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike class PostSpec extends AnyWordSpecLike with Matchers with BeforeAndAfterAll with TypeCheckedTripleEquals { val system = ActorSystem("PostSpec", JsonSerializerRegistry.actorSystemSetupFor(BlogPostSerializerRegistry)) override def afterAll(): Unit = { Await.ready(system.terminate, 10.seconds) } "Blog Post entity" must { "handle AddPost" in { val driver = new PersistentEntityTestDriver(system, new Post, "post-1") val content = PostContent("Title", "Body") val outcome = driver.run(AddPost(content)) outcome.events should ===(List(PostAdded("post-1", content))) outcome.state.published should ===(false) outcome.state.content should ===(Some(content)) outcome.replies should ===(List(AddPostDone("post-1"))) outcome.issues should be(Nil) } "validate title" in { val driver = new PersistentEntityTestDriver(system, new Post, "post-1") val outcome = driver.run(AddPost(PostContent("", "Body"))) outcome.replies.head.getClass should be(classOf[InvalidCommandException]) outcome.events.size should ===(0) outcome.issues should be(Nil) } "handle ChangeBody" in { val driver = new PersistentEntityTestDriver(system, new Post, "post-1") driver.run(AddPost(PostContent("Title", "Body"))) val outcome = driver.run(ChangeBody("New body 1"), ChangeBody("New body 2")) outcome.events should ===(List(BodyChanged("post-1", "New body 1"), BodyChanged("post-1", "New body 2"))) outcome.state.published should ===(false) outcome.state.content.get.body should ===("New body 2") outcome.replies should ===(List(Done, Done)) outcome.issues should be(Nil) } } } //#unit-test
Example 186
Source File: AkkaDiscoveryServiceLocatorSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.akka.discovery import akka.NotUsed import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.api.ServiceLocator import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import org.scalatest._ import play.api.libs.ws.WSClient import play.api.libs.ws.ahc.AhcWSComponents import scala.concurrent.Future import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec class AkkaDiscoveryServiceLocatorSpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll with OptionValues { "ServiceLocator" should { "retrieve registered services" in { val serviceLocator = server.application.serviceLocator serviceLocator.locate("fake-service").map { res => res.value.toString shouldBe "http://fake-service-host:9119" } } } private val server = ServiceTest.startServer(ServiceTest.defaultSetup) { ctx => new LagomTestApplication(ctx) } protected override def afterAll() = server.stop() class LagomTestApplication(ctx: LagomApplicationContext) extends LagomApplication(ctx) with AhcWSComponents with AkkaDiscoveryComponents { override def lagomServer: LagomServer = serverFor[TestService](new TestServiceImpl) } trait TestService extends Service { def hello(name: String): ServiceCall[NotUsed, String] override def descriptor = { import Service._ named("test-service") .withCalls( pathCall("/hello/:name", hello _) ) .withAutoAcl(true) } } class TestServiceImpl extends TestService { override def hello(name: String) = ServiceCall { _ => Future.successful(s"Hello $name") } } }
Example 187
Source File: ActorSystemSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.persistence import java.lang.reflect.Modifier import akka.actor.ActorSystem import akka.actor.CoordinatedShutdown import akka.actor.setup.ActorSystemSetup import akka.event.Logging import akka.event.LoggingAdapter import akka.testkit.ImplicitSender import akka.testkit.TestKit import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import org.scalactic.CanEqual import org.scalactic.TypeCheckedTripleEquals import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike object ActorSystemSpec { // taken from akka-testkit's AkkaSpec private def testNameFromCallStack(classToStartFrom: Class[_]): String = { def isAbstractClass(className: String): Boolean = { try { Modifier.isAbstract(Class.forName(className).getModifiers) } catch { case _: Throwable => false // yes catch everything, best effort check } } val startFrom = classToStartFrom.getName val filteredStack = Thread.currentThread.getStackTrace.iterator .map(_.getClassName) // drop until we find the first occurrence of classToStartFrom .dropWhile(!_.startsWith(startFrom)) // then continue to the next entry after classToStartFrom that makes sense .dropWhile { case `startFrom` => true case str if str.startsWith(startFrom + "$") => true // lambdas inside startFrom etc case str if isAbstractClass(str) => true case _ => false } if (filteredStack.isEmpty) throw new IllegalArgumentException(s"Couldn't find [${classToStartFrom.getName}] in call stack") // sanitize for actor system name scrubActorSystemName(filteredStack.next()) } // taken from akka-testkit's AkkaSpec private def scrubActorSystemName(name: String): String = { name .replaceFirst("""^.*\.""", "") // drop package name .replaceAll("""\$\$?\w+""", "") // drop scala anonymous functions/classes .replaceAll("[^a-zA-Z_0-9]", "_") } } abstract class ActorSystemSpec(actorSystemFactory: () => ActorSystem) extends TestKit(actorSystemFactory()) with AnyWordSpecLike with Matchers with BeforeAndAfterAll with TypeCheckedTripleEquals with ImplicitSender { def this(testName: String, config: Config) = this(() => ActorSystem(testName, config)) def this(config: Config) = this(ActorSystemSpec.testNameFromCallStack(classOf[ActorSystemSpec]), config) def this(setup: ActorSystemSetup) = this(() => ActorSystem(ActorSystemSpec.testNameFromCallStack(classOf[ActorSystemSpec]), setup)) def this() = this(ConfigFactory.empty()) override def afterAll(): Unit = { shutdown() super.afterAll() } val log: LoggingAdapter = Logging(system, this.getClass) val coordinatedShutdown: CoordinatedShutdown = CoordinatedShutdown(system) // for ScalaTest === compare of Class objects implicit def classEqualityConstraint[A, B]: CanEqual[Class[A], Class[B]] = new CanEqual[Class[A], Class[B]] { def areEqual(a: Class[A], b: Class[B]) = a == b } }
Example 188
Source File: ServiceSupport.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.it import java.util.Collections import java.util.function.{ Function => JFunction } import akka.stream.Materializer import akka.stream.scaladsl.Source import org.scalatest.Inside import play.api.Application import play.api.Configuration import play.api.Environment import play.inject.guice.GuiceApplicationBuilder import scala.concurrent.Await import scala.concurrent.duration._ import scala.reflect.ClassTag import akka.japi.function.Procedure import com.google.inject.Binder import com.google.inject.Module import com.google.inject.TypeLiteral import com.lightbend.lagom.javadsl.testkit.ServiceTest import com.lightbend.lagom.javadsl.testkit.ServiceTest.TestServer import play.api.routing.Router import java.util import com.lightbend.lagom.internal.testkit.EmptyAdditionalRoutersModule import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike sealed trait HttpBackend { final val provider: String = s"play.core.server.${codeName}ServerProvider" val codeName: String } case object AkkaHttp extends HttpBackend { val codeName = "AkkaHttp" } case object Netty extends HttpBackend { val codeName = "Netty" } trait ServiceSupport extends AnyWordSpecLike with Matchers with Inside { def withServer( configureBuilder: GuiceApplicationBuilder => GuiceApplicationBuilder )(block: Application => Unit)(implicit httpBackend: HttpBackend): Unit = { val jConfigureBuilder = new JFunction[GuiceApplicationBuilder, GuiceApplicationBuilder] { override def apply(b: GuiceApplicationBuilder): GuiceApplicationBuilder = { configureBuilder(b) .overrides(EmptyAdditionalRoutersModule) .configure("play.server.provider", httpBackend.provider) } } val jBlock = new Procedure[TestServer] { override def apply(server: TestServer): Unit = { block(server.app.asScala()) } } val setup = ServiceTest.defaultSetup.configureBuilder(jConfigureBuilder).withCluster(false) ServiceTest.withServer(setup, jBlock) } def withClient[T: ClassTag]( configureBuilder: GuiceApplicationBuilder => GuiceApplicationBuilder )(block: Application => T => Unit)(implicit httpBackend: HttpBackend): Unit = { withServer(configureBuilder) { application => val client = application.injector.instanceOf[T] block(application)(client) } } implicit def materializer(implicit app: Application): Materializer = app.materializer def consume[T](source: Source[T, _])(implicit mat: Materializer): List[T] = { Await.result(source.runFold(List.empty[T])((list, t) => t :: list), 10.seconds).reverse } }
Example 189
Source File: ScalaSupportSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.api import org.scalatest.Inside import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ScalaSupportSpec extends AnyWordSpec with Matchers with Inside { "scala support" should { "resolve a function" in { val method: ScalaServiceSupport.ScalaMethodCall[String] = testMethod _ method.method.getDeclaringClass should ===(this.getClass) method.method.getName should ===("testMethod") } } def testMethod(s: String): String = s }
Example 190
Source File: ConfigurationServiceLocatorSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.api import java.net.URI import java.util.concurrent.TimeUnit import com.typesafe.config.ConfigFactory import scala.compat.java8.OptionConverters._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ConfigurationServiceLocatorSpec extends AnyWordSpec with Matchers { val serviceLocator = new ConfigurationServiceLocator( ConfigFactory.parseString( """ |lagom.services { | foo = "http://localhost:10001" | bar = "http://localhost:10002" |} """.stripMargin ) ) def locate(serviceName: String) = serviceLocator.locate(serviceName).toCompletableFuture.get(10, TimeUnit.SECONDS).asScala "ConfigurationServiceLocator" should { "return a found service" in { locate("foo") should contain(URI.create("http://localhost:10001")) locate("bar") should contain(URI.create("http://localhost:10002")) } "return none for not found service" in { locate("none") shouldBe None } } }
Example 191
Source File: TransportExceptionSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.api.transport import java.util import java.util.Optional import com.lightbend.lagom.javadsl.api.deser.DeserializationException import com.lightbend.lagom.javadsl.api.deser.SerializationException import scala.collection.immutable import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class TransportExceptionSpec extends AnyWordSpec with Matchers { val protocolTextPlain = new MessageProtocol(Optional.of("text/plain"), Optional.of("utf-8"), Optional.empty[String]) val protocolJson = new MessageProtocol(Optional.of("application/json"), Optional.of("utf-8"), Optional.empty[String]) val protocolHtml = new MessageProtocol(Optional.of("text/html"), Optional.of("utf-8"), Optional.empty[String]) val supportedExceptions: immutable.Seq[TransportException] = List( new DeserializationException("some msg - DeserializationException"), new BadRequest("some msg - BadRequest"), new Forbidden("some msg - Forbidden"), new PolicyViolation("some msg - PolicyViolation"), new NotFound("some msg - NotFound"), new NotAcceptable(util.Arrays.asList(protocolJson, protocolTextPlain), protocolHtml), new PayloadTooLarge("some msg - PayloadTooLarge"), new UnsupportedMediaType(protocolTextPlain, protocolJson), new SerializationException("some msg - SerializationException") ) "Lagom-provided TransportExceptions" should { supportedExceptions.foreach { ex => s"be buildable from code and message (${ex.getClass.getName})" in { val reconstructed = TransportException.fromCodeAndMessage(ex.errorCode(), ex.exceptionMessage()) reconstructed.getClass.getName should ===(ex.getClass.getName) reconstructed.exceptionMessage() should ===(ex.exceptionMessage()) } } // TODO: implement roundtrip de/ser tests like in com.lightbend.lagom.scaladsl.api.ExceptionsSpec } }
Example 192
Source File: LagomClientFactorySpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.client.integration import akka.actor.Actor import akka.actor.ActorRef import akka.actor.ActorSystem import akka.actor.Props import com.typesafe.config.ConfigFactory import org.scalatest.concurrent.ScalaFutures import org.scalatest.BeforeAndAfterEach import scala.concurrent.duration._ import scala.concurrent.Await import akka.pattern._ import akka.stream.SystemMaterializer import akka.util.Timeout import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class LagomClientFactorySpec extends AnyFlatSpec with Matchers with BeforeAndAfterEach with ScalaFutures { private var system: ActorSystem = _ private var echoActor: ActorRef = _ implicit val timeout = Timeout(5.seconds) "LagomClientFactory" should "when using a unmanaged actor system, shoudl not terminate it upon closing" in { // check that actor system is operational (echoActor ? "hey").mapTo[String].futureValue shouldBe "hey" LagomClientFactory // create a factory by passing the existing ActorSystem .create( "test", this.getClass.getClassLoader, system, SystemMaterializer(system).materializer ) // closing the factory should not close the existing ActorSystem .close() // check that actor system is still operational (echoActor ? "hey").mapTo[String].futureValue shouldBe "hey" } protected override def beforeEach(): Unit = { system = ActorSystem("test", ConfigFactory.load()) echoActor = system.actorOf(Props(new EchoActor), "echo") } class EchoActor extends Actor { override def receive: Receive = { case s: String => sender() ! s } } protected override def afterEach(): Unit = { Await.ready(system.terminate(), 5.seconds) } }
Example 193
Source File: ConfigurationServiceLocatorSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.client import java.net.URI import java.util.concurrent.CompletionStage import java.util.concurrent.TimeUnit import java.util.function.Supplier import com.typesafe.config.ConfigFactory import scala.compat.java8.OptionConverters._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ConfigurationServiceLocatorSpec extends AnyWordSpec with Matchers { val serviceLocator = new ConfigurationServiceLocator( ConfigFactory.parseString( """ |lagom.services { | foo = "http://localhost:10001" | bar = "http://localhost:10002" |} """.stripMargin ), new CircuitBreakersPanel { override def withCircuitBreaker[T](id: String, body: Supplier[CompletionStage[T]]): CompletionStage[T] = body.get() } ) def locate(serviceName: String) = serviceLocator.locate(serviceName).toCompletableFuture.get(10, TimeUnit.SECONDS).asScala "ConfigurationServiceLocator" should { "return a found service" in { locate("foo") should contain(URI.create("http://localhost:10001")) locate("bar") should contain(URI.create("http://localhost:10002")) } "return none for not found service" in { locate("none") shouldBe None } } }
Example 194
Source File: AdditionalRoutersSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.it.routers import akka.NotUsed import com.lightbend.lagom.scaladsl.api.ServiceLocator import com.lightbend.lagom.scaladsl.server.LagomApplication import com.lightbend.lagom.scaladsl.server.LagomApplicationContext import com.lightbend.lagom.scaladsl.server.LagomServer import com.lightbend.lagom.scaladsl.server.LocalServiceLocator import com.lightbend.lagom.scaladsl.testkit.ServiceTest import com.lightbend.lagom.scaladsl.testkit.ServiceTest.TestServer import org.scalatest.concurrent.ScalaFutures import play.api.http.DefaultWriteables import play.api.http.HeaderNames import play.api.libs.ws.WSClient import play.api.libs.ws.ahc.AhcWSComponents import play.api.mvc import play.api.mvc._ import play.api.routing.SimpleRouterImpl import play.api.test.FakeHeaders import play.api.test.FakeRequest import play.api.test.Helpers import play.core.j.JavaRouterAdapter import play.api.test.Helpers._ import scala.concurrent.ExecutionContext import scala.concurrent.Future import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class AdditionalRoutersSpec extends AnyWordSpec with Matchers with ScalaFutures { "A LagomServer " should { "be extensible with a Play Router" in withServer { server => val request = FakeRequest(GET, "/hello/") val result = Helpers.route(server.application.application, request).get.futureValue result.header.status shouldBe OK val body = result.body.consumeData(server.materializer).futureValue.utf8String body shouldBe "hello" } } def withServer(block: TestServer[TestApp] => Unit): Unit = { ServiceTest.withServer(ServiceTest.defaultSetup.withCassandra(false).withCluster(false)) { ctx => new TestApp(ctx) } { server => block(server) } } class TestApp(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents with LocalServiceLocator { override def lagomServer: LagomServer = serverFor[AdditionalRoutersService](new AdditionalRoutersServiceImpl) .additionalRouter(FixedResponseRouter("hello").withPrefix("/hello")) } } object FixedResponseRouter { def apply(msg: String) = new SimpleRouterImpl({ case _ => new Action[Unit] { override def parser: BodyParser[Unit] = mvc.BodyParsers.utils.empty override def apply(request: Request[Unit]): Future[Result] = Future.successful(Results.Ok(msg)) override def executionContext: ExecutionContext = scala.concurrent.ExecutionContext.global } }) }
Example 195
Source File: MessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.api.deser import akka.util.ByteString import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer._ import com.lightbend.lagom.scaladsl.api.transport.DeserializationException import com.lightbend.lagom.scaladsl.api.transport.MessageProtocol import play.api.libs.json._ import scala.collection.immutable.Seq import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class MessageSerializerSpec extends AnyWordSpec with Matchers { case class Dummy(prop: Option[String]) "ByteString-to-PlayJson (via JsValueMessageSerializer)" should { "deserialize empty ByteString as JSON null" in { val deserializer = JsValueMessageSerializer.deserializer(MessageProtocol.empty) deserializer.deserialize(ByteString.empty) shouldBe JsNull } } implicit def optionFormat[T: Format]: Format[Option[T]] = new Format[Option[T]] { override def reads(json: JsValue): JsResult[Option[T]] = json.validateOpt[T] override def writes(o: Option[T]): JsValue = o match { case Some(t) => implicitly[Writes[T]].writes(t) case None => JsNull } } "PlayJson-to-RequestPayload formatters" should { implicit val format: Format[Dummy] = Json.format "fail when converting JSNull into T." in { intercept[JsResultException] { JsNull.as[Dummy] } } "convert JS null to None by default" in { val dummy = JsNull.as[Option[Dummy]] dummy shouldBe None } } "ByteString-to-RequestPayload (for JSON payloads, using jsValueFormatMessageSerializer)" should { "deserialize empty ByteString's to Option[T] as None" in { val serializer = jsValueFormatMessageSerializer(JsValueMessageSerializer, optionFormat[String]) val out = serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty) out shouldBe None } "fail to deserialize empty ByteString to Dummy(prop: Option[T])" in { val format: Format[Dummy] = Json.format val serializer = jsValueFormatMessageSerializer(JsValueMessageSerializer, format) intercept[DeserializationException] { serializer.deserializer(MessageProtocol.empty).deserialize(ByteString.empty) } } } "ByteString-to-ByteString" should { "serialize any request of type ByteString to the same ByteSting" in { val serializer = NoopMessageSerializer.serializerForRequest val out = serializer.serialize(ByteString("sample string")) out shouldBe ByteString("sample string") } "serialize any response of type ByteString to the same ByteSting" in { val serializer = NoopMessageSerializer.serializerForResponse(Seq(MessageProtocol.empty)) val out = serializer.serialize(ByteString("sample string")) out shouldBe ByteString("sample string") } "deserialize any ByteString's to the same ByteSting" in { val deserializer = NoopMessageSerializer.deserializer(MessageProtocol.empty) val out = deserializer.deserialize(ByteString("sample string")) out shouldBe ByteString("sample string") } } }
Example 196
Source File: ScaladslServiceResolverSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.client import akka.NotUsed import com.lightbend.lagom.scaladsl.api.deser.DefaultExceptionSerializer import com.lightbend.lagom.scaladsl.api.CircuitBreaker import com.lightbend.lagom.scaladsl.api.Descriptor import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.client.TestServiceClient import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ScaladslServiceResolverSpec extends AnyFlatSpec with Matchers { behavior.of("ScaladslServiceResolver") it should "setup circuit-breakers for all method calls using default values when nothing is specified" in { assertCircuitBreaking(TestServiceClient.implement[Unspecified], CircuitBreaker.PerNode) } it should "setup circuit-breakers for all method calls using descriptor value when only descriptor's CB is specified" in { assertCircuitBreaking(TestServiceClient.implement[General], CircuitBreaker.identifiedBy("general-cb")) } it should "setup circuit-breakers with each specific CB when each call has a CB described" in { assertCircuitBreaking(TestServiceClient.implement[PerCall], CircuitBreaker.identifiedBy("one-cb")) } // -------------------------------------------------------------------------------------------- private def assertCircuitBreaking(service: Service, expected: CircuitBreaker) = { val resolved = new ScaladslServiceResolver(DefaultExceptionSerializer.Unresolved).resolve(service.descriptor) resolved.calls.head.circuitBreaker should be(Some(expected)) } trait Unspecified extends Service { import Service._ def one: ServiceCall[NotUsed, NotUsed] override def descriptor: Descriptor = { named("Unspecified") .withCalls( namedCall("one", one) ) } } trait General extends Service { import Service._ def one: ServiceCall[NotUsed, NotUsed] override def descriptor: Descriptor = { named("Unspecified") .withCalls( namedCall("one", one) ) .withCircuitBreaker(CircuitBreaker.identifiedBy("general-cb")) } } trait PerCall extends Service { import Service._ def one: ServiceCall[NotUsed, NotUsed] override def descriptor: Descriptor = { named("Unspecified") .withCalls( namedCall("one", one) .withCircuitBreaker(CircuitBreaker.identifiedBy("one-cb")) // overwrites default. ) .withCircuitBreaker(CircuitBreaker.identifiedBy("general-cb")) } } }
Example 197
Source File: ServiceAclResolverSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.scaladsl.server import akka.NotUsed import akka.stream.scaladsl.Source import com.lightbend.lagom.internal.scaladsl.client.ScaladslServiceResolver import com.lightbend.lagom.scaladsl.api.Service import com.lightbend.lagom.scaladsl.api.ServiceAcl import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.api.deser.DefaultExceptionSerializer import com.lightbend.lagom.scaladsl.api.transport.Method import scala.concurrent.Future import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpec class ServiceAclResolverSpec extends AnyWordSpec with Matchers { class SomeService extends Service { private def echo[A] = ServiceCall[A, A](Future.successful) def callString: ServiceCall[String, String] = echo def callStreamed: ServiceCall[Source[String, NotUsed], Source[String, NotUsed]] = echo def callNotUsed: ServiceCall[NotUsed, NotUsed] = echo def restCallString: ServiceCall[String, String] = echo def restCallStreamed: ServiceCall[Source[String, NotUsed], Source[String, NotUsed]] = echo def restCallNotUsed: ServiceCall[NotUsed, NotUsed] = echo def withAutoAclTrue: ServiceCall[String, String] = echo def withAutoAclFalse: ServiceCall[String, String] = echo override def descriptor = { import Service._ named("some-service").withCalls( call(callString), call(callStreamed), call(callNotUsed), restCall(Method.PUT, "/restcallstring", restCallString), restCall(Method.PUT, "/restcallstreamed", restCallStreamed), restCall(Method.PUT, "/restcallnotused", restCallNotUsed), call(withAutoAclTrue).withAutoAcl(true), call(withAutoAclFalse).withAutoAcl(false) ) } } val resolver = new ScaladslServiceResolver(DefaultExceptionSerializer.Unresolved) "ScaladslServiceResolver" when { "when auto acl is true" should { val acls = resolver.resolve(new SomeService().descriptor.withAutoAcl(true)).acls "default to POST for service calls with used request messages" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/callString\\E")) } "default to GET for streamed service calls" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.GET, "\\Q/callStreamed\\E")) } "default to GET for service calls with not used request messages" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.GET, "\\Q/callNotUsed\\E")) } "use the specified method and path for rest calls" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallstring\\E")) } "use the specified method for rest calls when the request is streamed" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallstreamed\\E")) } "use the specified method and path for rest calls even when the request is unused" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.PUT, "\\Q/restcallnotused\\E")) } "create an acl when an individual method has auto acl set to true" in { acls should contain(ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclTrue\\E")) } "not create an acl when an individual method has auto acl set to false" in { acls should not contain ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclFalse\\E") } "generate the right number of acls" in { acls should have size 7 } } "auto acl is false" should { val acls = resolver.resolve(new SomeService().descriptor.withAutoAcl(false)).acls "create an acl when an individual method has auto acl set to true" in { acls should contain only ServiceAcl.forMethodAndPathRegex(Method.POST, "\\Q/withAutoAclTrue\\E") } } } }
Example 198
Source File: ScaladslKafkaSubscriberSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.broker.kafka import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ScaladslKafkaSubscriberSpec extends AnyFlatSpec with Matchers { behavior.of("ScaladslKafkaSubscriber") it should "create a new subscriber with updated groupId" in { val subscriber = new ScaladslKafkaSubscriber(null, null, ScaladslKafkaSubscriber.GroupId("old"), null, null, null, null)( null, null ) subscriber.withGroupId("newGID") should not be subscriber } }
Example 199
Source File: JdbcPersistenceModuleSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.javadsl.persistence.jdbc import javax.inject.Inject import org.scalatest._ import play.api.PlayException import play.api.db.DBApi import play.api.inject.guice.GuiceApplicationBuilder import play.api.inject.{ bind => playBind } import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AsyncWordSpec class JdbcPersistenceModuleSpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll { "The JdbcPersistenceModule" should { "should start the service when database is not available" in { // Should be okay to build an application since Lagom configuration // enables it without a database being available. val app = new GuiceApplicationBuilder() .bindings(playBind[DbWrapper].toSelf) .configure( // Correct configuration, but the database is not available "db.default.driver" -> "org.h2.Driver", "db.default.url" -> "jdbc:h2:tcp://localhost/~/notavailable", "lagom.cluster.exit-jvm-when-system-terminated" -> "off", "lagom.cluster.bootstrap.enabled" -> "off" ) .build() val dbWrapper = app.injector.instanceOf[DbWrapper] dbWrapper should not be (null) app.stop().map(_ => succeed) } "should fail to start the service when database is not available and configured to fail fast" in { assertThrows[PlayException] { new GuiceApplicationBuilder() .bindings(playBind[DbWrapper].toSelf) .configure( // Correct configuration, but the database is not available "db.default.driver" -> "org.h2.Driver", "db.default.url" -> "jdbc:h2:tcp://localhost/~/notavailable", // And it is configured to fail fast "play.db.prototype.hikaricp.initializationFailTimeout" -> "1", "lagom.cluster.exit-jvm-when-system-terminated" -> "off", "lagom.cluster.bootstrap.enabled" -> "off" ) .build() } } } } // So that we can confirm DBApi was created class DbWrapper @Inject() (val dbApi: DBApi)
Example 200
Source File: STMultiNodeSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.cluster import akka.remote.testkit.MultiNodeSpecCallbacks import org.scalatest.BeforeAndAfterAll import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike trait STMultiNodeSpec extends MultiNodeSpecCallbacks with AnyWordSpecLike with Matchers with BeforeAndAfterAll { override def beforeAll(): Unit = { super.beforeAll() multiNodeSpecBeforeAll() } override def afterAll(): Unit = { multiNodeSpecAfterAll() super.afterAll() } }