java.util.concurrent.CompletionStage Scala Examples

The following examples show how to use java.util.concurrent.CompletionStage. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: PlayRouter.scala    From play-grpc   with Apache License 2.0 5 votes vote down vote up
package play.grpc.internal

import java.util.Optional
import java.util.concurrent.CompletionStage

import akka.annotation.InternalApi
import akka.dispatch.Dispatchers
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.HttpResponse
import akka.stream.Materializer
import play.api.inject.Injector
import play.api.mvc.Handler
import play.api.mvc.akkahttp.AkkaHttpHandler
import play.api.routing.Router
import play.api.routing.Router.Routes
import play.mvc.Http

import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._


  final override def withPrefix(prefix: String): Router =
    if (prefix == "/") this
    else
      throw new UnsupportedOperationException(
        "Prefixing gRPC services is not widely supported by clients, " +
          s"strongly discouraged by the specification and therefore not supported. " +
          s"Attempted to prefix with [$prefix], yet already default prefix known to be [${this.prefix}]. " +
          s"When binding gRPC routers the path in `routes` MUST BE `/`.",
      )

} 
Example 2
Source File: ManagementRouteProviderSettings.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.javadsl

import java.util.Optional
import java.util.concurrent.CompletionStage
import java.util.function.{ Function => JFunction }

import akka.annotation.DoNotInherit
import akka.annotation.InternalApi
import akka.http.javadsl.HttpsConnectionContext
import akka.http.javadsl.model.Uri
import akka.http.javadsl.server.directives.SecurityDirectives.ProvidedCredentials
import akka.http.scaladsl.server.Directives.AsyncAuthenticator
import akka.management.scaladsl

object ManagementRouteProviderSettings {
  def create(selfBaseUri: Uri): ManagementRouteProviderSettings = {
    ManagementRouteProviderSettingsImpl(selfBaseUri, None, None, Optional.empty(), readOnly = true)
  }
}


@InternalApi private[akka] final case class ManagementRouteProviderSettingsImpl(
    override val selfBaseUri: Uri,
    javadslAuth: Option[JFunction[Optional[ProvidedCredentials], CompletionStage[Optional[String]]]],
    scaladslAuth: Option[AsyncAuthenticator[String]],
    override val httpsConnectionContext: Optional[HttpsConnectionContext],
    override val readOnly: Boolean
) extends ManagementRouteProviderSettings {

  // There is no public API for defining both so it should not be possible
  require(!(javadslAuth.isDefined && scaladslAuth.isDefined), "Defining both javadsl and scaladsl auth is not allowed")

  override def withAuth(newAuth: JFunction[Optional[ProvidedCredentials], CompletionStage[Optional[String]]])
      : ManagementRouteProviderSettings =
    copy(javadslAuth = Option(newAuth))

  override def withHttpsConnectionContext(
      newHttpsConnectionContext: HttpsConnectionContext): ManagementRouteProviderSettings =
    copy(
      selfBaseUri = selfBaseUri.scheme("https"),
      httpsConnectionContext = Optional.ofNullable(newHttpsConnectionContext))

  def scaladslHttpsConnectionContext: Option[akka.http.scaladsl.HttpsConnectionContext] = {
    if (httpsConnectionContext.isPresent) {
      httpsConnectionContext.get match {
        case ctx: akka.http.scaladsl.HttpsConnectionContext => Option(ctx)
        case other =>
          throw new IllegalStateException(
            "akka.http.javadsl.HttpsConnectionContext should be a " +
            s"akka.http.scaladsl.HttpsConnectionContext, but was [${other.getClass.getName}]")
      }
    } else {
      None
    }
  }

  override def withReadOnly(readOnly: Boolean): ManagementRouteProviderSettings = copy(readOnly = readOnly)

  def asScala: scaladsl.ManagementRouteProviderSettingsImpl =
    scaladsl.ManagementRouteProviderSettingsImpl(
      selfBaseUri = selfBaseUri.asScala,
      scaladslAuth,
      javadslAuth,
      scaladslHttpsConnectionContext,
      readOnly)
} 
Example 3
Source File: ManagementRouteProviderSettings.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.scaladsl

import java.util.Optional
import java.util.concurrent.CompletionStage
import java.util.function.{ Function => JFunction }

import akka.annotation.DoNotInherit
import akka.annotation.InternalApi
import akka.http.javadsl.server.directives.SecurityDirectives.ProvidedCredentials
import akka.http.scaladsl.HttpsConnectionContext
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Directives.AsyncAuthenticator
import akka.management.javadsl

object ManagementRouteProviderSettings {
  def apply(selfBaseUri: Uri, readOnly: Boolean): ManagementRouteProviderSettings = {
    ManagementRouteProviderSettingsImpl(selfBaseUri, None, None, None, readOnly = readOnly)
  }
}


@InternalApi private[akka] final case class ManagementRouteProviderSettingsImpl(
    override val selfBaseUri: Uri,
    scaladslAuth: Option[AsyncAuthenticator[String]],
    javadslAuth: Option[JFunction[Optional[ProvidedCredentials], CompletionStage[Optional[String]]]],
    override val httpsConnectionContext: Option[HttpsConnectionContext],
    override val readOnly: Boolean
) extends ManagementRouteProviderSettings {

  // There is no public API for defining both so it should not be possible
  require(!(javadslAuth.isDefined && scaladslAuth.isDefined), "Defining both javadsl and scaladsl auth is not allowed")

  override def withAuth(newAuth: AsyncAuthenticator[String]): ManagementRouteProviderSettings =
    copy(scaladslAuth = Option(newAuth))

  override def withHttpsConnectionContext(
      newHttpsConnectionContext: HttpsConnectionContext): ManagementRouteProviderSettings =
    copy(selfBaseUri = selfBaseUri.withScheme("https"), httpsConnectionContext = Option(newHttpsConnectionContext))

  def javadslHttpsConnectionContext: Optional[akka.http.javadsl.HttpsConnectionContext] =
    httpsConnectionContext match {
      case None      => Optional.empty()
      case Some(ctx) => Optional.of(ctx) // a scaladsl.HttpsConnectionContext is a javadsl.HttpsConnectionContext
    }

  override def withReadOnly(readOnly: Boolean): ManagementRouteProviderSettings = copy(readOnly = readOnly)

  def asJava: javadsl.ManagementRouteProviderSettingsImpl =
    javadsl.ManagementRouteProviderSettingsImpl(
      selfBaseUri = akka.http.javadsl.model.Uri.create(selfBaseUri),
      javadslAuth,
      scaladslAuth,
      javadslHttpsConnectionContext,
      readOnly)

} 
Example 4
Source File: CouchbaseReadSideHandler.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.couchbase

import java.util.concurrent.CompletionStage

import akka.Done
import akka.japi.Pair
import akka.stream.ActorAttributes
import akka.stream.alpakka.couchbase.javadsl.CouchbaseSession
import akka.stream.javadsl.Flow
import com.lightbend.lagom.internal.javadsl.persistence.OffsetAdapter
import com.lightbend.lagom.internal.persistence.couchbase.{CouchbaseOffsetDao, CouchbaseOffsetStore}
import com.lightbend.lagom.javadsl.persistence.ReadSideProcessor.ReadSideHandler
import com.lightbend.lagom.javadsl.persistence.{AggregateEvent, AggregateEventTag, Offset}
import org.slf4j.LoggerFactory

import scala.compat.java8.FutureConverters._
import scala.concurrent.{ExecutionContext, Future}


private[couchbase] final class CouchbaseReadSideHandler[Event <: AggregateEvent[Event]](
    couchbaseSession: CouchbaseSession,
    offsetStore: CouchbaseOffsetStore,
    handlers: Map[Class[_ <: Event], Handler[Event]],
    globalPrepareCallback: CouchbaseSession => CompletionStage[Done],
    prepareCallback: (CouchbaseSession, AggregateEventTag[Event]) => CompletionStage[Done],
    readProcessorId: String,
    dispatcher: String
)(implicit ec: ExecutionContext)
    extends ReadSideHandler[Event] {
  private val log = LoggerFactory.getLogger(this.getClass)

  @volatile
  private var offsetDao: CouchbaseOffsetDao = _

  protected def invoke(handler: Handler[Event], event: Event, offset: Offset): CompletionStage[Done] =
    handler
      .asInstanceOf[(CouchbaseSession, Event, Offset) => CompletionStage[Done]]
      .apply(couchbaseSession, event, offset)
      .toScala
      .flatMap { _ =>
        val akkaOffset = OffsetAdapter.dslOffsetToOffset(offset)
        offsetDao.bindSaveOffset(akkaOffset).execute(couchbaseSession.asScala, ec)
      }
      .toJava

  override def globalPrepare(): CompletionStage[Done] = globalPrepareCallback.apply(couchbaseSession)

  override def prepare(tag: AggregateEventTag[Event]): CompletionStage[Offset] =
    (for {
      _ <- prepareCallback.apply(couchbaseSession, tag).toScala
      dao <- offsetStore.prepare(readProcessorId, tag.tag)
    } yield {
      offsetDao = dao
      OffsetAdapter.offsetToDslOffset(dao.loadedOffset)
    }).toJava

  override def handle(): Flow[Pair[Event, Offset], Done, _] =
    akka.stream.scaladsl
      .Flow[Pair[Event, Offset]]
      .mapAsync(parallelism = 1) { pair =>
        val Pair(event, offset) = pair
        val eventClass = event.getClass

        val handler =
          handlers.getOrElse(
            // lookup handler
            eventClass,
            // fallback to empty handler if none
            {
              if (log.isDebugEnabled()) log.debug("Unhandled event [{}]", eventClass.getName)
              CouchbaseReadSideHandler.emptyHandler
            }
          )

        invoke(handler, event, offset).toScala
      }
      .withAttributes(ActorAttributes.dispatcher(dispatcher))
      .asJava
} 
Example 5
Source File: CouchbaseReadSideImpl.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.couchbase

import java.util.concurrent.{CompletableFuture, CompletionStage}
import java.util.function.{BiFunction, Function => JFunction}

import akka.Done
import akka.actor.ActorSystem
import akka.dispatch.MessageDispatcher
import akka.stream.alpakka.couchbase.javadsl.CouchbaseSession
import com.lightbend.lagom.internal.persistence.couchbase.CouchbaseOffsetStore
import com.lightbend.lagom.javadsl.persistence.couchbase.CouchbaseReadSide
import com.lightbend.lagom.javadsl.persistence.{AggregateEvent, AggregateEventTag, Offset, ReadSideProcessor}
import javax.inject.{Inject, Singleton}
import play.api.inject.Injector

@Singleton
private[lagom] class CouchbaseReadSideImpl @Inject() (
    system: ActorSystem,
    couchbaseSession: CouchbaseSession,
    offsetStore: CouchbaseOffsetStore,
    injector: Injector
) extends CouchbaseReadSide {
  private val dispatcher = system.settings.config.getString("lagom.persistence.read-side.use-dispatcher")
  private implicit val ec: MessageDispatcher = system.dispatchers.lookup(dispatcher)

  override def builder[Event <: AggregateEvent[Event]](
      readSideId: String
  ): CouchbaseReadSide.ReadSideHandlerBuilder[Event] =
    new CouchbaseReadSide.ReadSideHandlerBuilder[Event] {
      type Handler[E] = CouchbaseReadSideHandler.Handler[E]

      private var globalPrepareCallback: CouchbaseSession => CompletionStage[Done] =
        (_) => CompletableFuture.completedFuture(Done.getInstance())

      private var prepareCallback: (CouchbaseSession, AggregateEventTag[Event]) => CompletionStage[Done] =
        (_, _) => CompletableFuture.completedFuture(Done.getInstance())

      private var handlers = Map.empty[Class[_ <: Event], Handler[Event]]

      override def setGlobalPrepare(
          callback: JFunction[CouchbaseSession, CompletionStage[Done]]
      ): CouchbaseReadSide.ReadSideHandlerBuilder[Event] = {
        globalPrepareCallback = callback.apply
        this
      }

      override def setPrepare(
          callback: BiFunction[CouchbaseSession, AggregateEventTag[Event], CompletionStage[Done]]
      ): CouchbaseReadSide.ReadSideHandlerBuilder[Event] = {
        prepareCallback = callback.apply
        this
      }

      override def setEventHandler[E <: Event](
          eventClass: Class[E],
          handler: CouchbaseReadSide.TriConsumer[CouchbaseSession, E, Offset, CompletionStage[Done]]
      ): CouchbaseReadSide.ReadSideHandlerBuilder[Event] = {
        handlers += (eventClass -> ((cs: CouchbaseSession, event: E, offset: Offset) => handler(cs, event, offset)))
        this
      }

      override def setEventHandler[E <: Event](
          eventClass: Class[E],
          handler: BiFunction[CouchbaseSession, E, CompletionStage[Done]]
      ): CouchbaseReadSide.ReadSideHandlerBuilder[Event] = {
        handlers += (eventClass -> ((cs: CouchbaseSession, event: E, offset: Offset) => handler(cs, event)))
        this
      }

      override def build(): ReadSideProcessor.ReadSideHandler[Event] =
        new CouchbaseReadSideHandler[Event](
          couchbaseSession,
          offsetStore,
          handlers,
          globalPrepareCallback,
          prepareCallback,
          readSideId,
          dispatcher
        )
    }
} 
Example 6
Source File: CouchbaseClusteredPersistentEntitySpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.couchbase

import java.util.concurrent.CompletionStage

import akka.persistence.couchbase.CouchbaseClusterConnection
import com.lightbend.lagom.internal.persistence.couchbase.TestConfig
import com.lightbend.lagom.internal.persistence.testkit.AwaitPersistenceInit.awaitPersistenceInit
import com.lightbend.lagom.javadsl.persistence.{ReadSideProcessor, TestEntityReadSide}
import com.lightbend.lagom.javadsl.persistence.multinode.{
  AbstractClusteredPersistentEntityConfig,
  AbstractClusteredPersistentEntitySpec
}
import com.lightbend.lagom.javadsl.persistence.TestEntity.Evt
import com.typesafe.config.Config
import play.api.inject.DefaultApplicationLifecycle

object CouchbaseClusteredPersistentEntityConfig extends AbstractClusteredPersistentEntityConfig {
  override def additionalCommonConfig(databasePort: Int): Config =
    TestConfig.persistenceConfig
}

class CouchbaseClusteredPersistentEntitySpecMultiJvmNode1 extends CouchbaseClusteredPersistentEntitySpec
class CouchbaseClusteredPersistentEntitySpecMultiJvmNode2 extends CouchbaseClusteredPersistentEntitySpec
class CouchbaseClusteredPersistentEntitySpecMultiJvmNode3 extends CouchbaseClusteredPersistentEntitySpec

class CouchbaseClusteredPersistentEntitySpec
    extends AbstractClusteredPersistentEntitySpec(CouchbaseClusteredPersistentEntityConfig) {
  import com.lightbend.lagom.javadsl.persistence.couchbase.CouchbaseClusteredPersistentEntityConfig._

  override protected def atStartup(): Unit = {
    runOn(node1) {
      CouchbaseClusterConnection.connect().cleanUp().close()
      awaitPersistenceInit(system)
    }
    enterBarrier("couchbase-started")

    super.atStartup()
  }

  lazy val defaultApplicationLifecycle = new DefaultApplicationLifecycle

  def testEntityReadSide = injector.instanceOf[TestEntityReadSide]

  override protected def getAppendCount(id: String): CompletionStage[java.lang.Long] =
    testEntityReadSide.getAppendCount(id)

  override protected def readSideProcessor: Class[_ <: ReadSideProcessor[Evt]] =
    classOf[TestEntityReadSide.TestEntityReadSideProcessor]
} 
Example 7
Source File: CouchbaseReadSideSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.couchbase

import java.lang
import java.util.concurrent.CompletionStage

import akka.persistence.couchbase.CouchbaseBucketSetup
import com.lightbend.lagom.internal.javadsl.persistence.couchbase.{
  CouchbasePersistentEntityRegistry,
  CouchbaseReadSideImpl,
  JavadslCouchbaseOffsetStore
}
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.javadsl.persistence.{AbstractReadSideSpec, ReadSideProcessor, TestEntity}
import com.typesafe.config.{Config, ConfigFactory}
import play.api.inject.guice.GuiceInjectorBuilder
import com.lightbend.lagom.javadsl.persistence.TestEntityReadSide

object CouchbaseReadSideSpec {
  val defaultConfig: Config = ConfigFactory.parseString("akka.loglevel = INFO")
}

class CouchbaseReadSideSpec
    extends CouchbasePersistenceSpec(CouchbaseReadSideSpec.defaultConfig)
    with AbstractReadSideSpec
    with CouchbaseBucketSetup {
  private lazy val injector = new GuiceInjectorBuilder().build()

  lazy val testSession = couchbaseSession.asJava

  override protected lazy val persistentEntityRegistry = new CouchbasePersistentEntityRegistry(system, injector)

  private lazy val offsetStore = new JavadslCouchbaseOffsetStore(system, testSession, ReadSideConfig())
  private lazy val couchbaseReadSide = new CouchbaseReadSideImpl(system, testSession, offsetStore, injector)

  override def processorFactory(): ReadSideProcessor[TestEntity.Evt] =
    new TestEntityReadSide.TestEntityReadSideProcessor(couchbaseReadSide)

  private lazy val readSide = new TestEntityReadSide(testSession)

  override def getAppendCount(id: String): CompletionStage[lang.Long] = readSide.getAppendCount(id)
} 
Example 8
Source File: package.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence

import java.util.concurrent.CompletionStage
import java.util.function.BiConsumer

import akka.actor.ActorRef

package object testkit {

  implicit class pipe[T](val stage: CompletionStage[T]) extends AnyVal {
    def pipeTo(recipient: ActorRef): Unit = {
      stage.whenComplete(new BiConsumer[T, Throwable] {
        override def accept(value: T, e: Throwable): Unit = {
          if (value != null) recipient ! value
          if (e != null) recipient ! e
        }
      })
    }
  }
} 
Example 9
Source File: PlayServiceConfiguration.scala    From play-soap   with Apache License 2.0 5 votes vote down vote up
package play.soap

import java.lang.reflect.Method
import java.lang.reflect.ParameterizedType
import java.lang.reflect.Type

import org.apache.cxf.wsdl.service.factory.AbstractServiceConfiguration
import java.util.concurrent.CompletionStage

import scala.concurrent.Future

import java.lang.Boolean.FALSE

private[soap] class PlayServiceConfiguration extends AbstractServiceConfiguration {

  
  override def hasOutMessage(m: Method) = {
    m.getGenericReturnType match {
      case future: ParameterizedType
          if future.getRawType == classOf[Future[_]] ||
            future.getRawType == classOf[CompletionStage[_]] =>
        future.getActualTypeArguments.headOption match {
          case Some(unit) if unit == classOf[Unit] || unit == classOf[Void] => FALSE
          case _                                                            => null
        }
      case _ => null
    }
  }
} 
Example 10
Source File: MessageToDeviceSink.scala    From toketi-iothubreact   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.iothubreact.sinks

import java.util.concurrent.CompletionStage

import akka.Done
import akka.japi.function.Procedure
import akka.stream.javadsl.{Sink ⇒ JavaSink}
import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
import com.microsoft.azure.iot.iothubreact.config.{ConnectConfiguration, IConnectConfiguration}
import com.microsoft.azure.iot.iothubreact.{Logger, MessageFromDevice, MessageToDevice}
import com.microsoft.azure.sdk.iot.service.{IotHubServiceClientProtocol, ServiceClient}

object MessageToDeviceSink {
  def apply(): MessageToDeviceSink = new MessageToDeviceSink()

  def apply(config: IConnectConfiguration): MessageToDeviceSink = new MessageToDeviceSink(config)
}


class MessageToDeviceSink(config: IConnectConfiguration)
  extends ISink[MessageToDevice]
    with Logger {

  // Parameterless ctor
  def this() = this(ConnectConfiguration())

  private[iothubreact] val protocol     = IotHubServiceClientProtocol.AMQPS
  private[iothubreact] val timeoutMsecs = 15000

  private[this] val connString    = s"HostName=${config.accessHostname};" +
    s"SharedAccessKeyName=${config.accessPolicy};" +
    s"SharedAccessKey=${config.accessKey}"
  private[this] val serviceClient = ServiceClient.createFromConnectionString(connString, protocol)

  log.info("Connecting client to ${} ...", config.accessHostname)
  serviceClient.open()

  def scalaSink(): ScalaSink[MessageToDevice, scala.concurrent.Future[Done]] =
    ScalaSink.foreach[MessageToDevice](
      m ⇒ {
        log.info("Sending message to device {}", m.deviceId)
        serviceClient.sendAsync(m.deviceId, m.message)
      })

  def javaSink(): JavaSink[MessageToDevice, CompletionStage[Done]] =
    JavaSink.foreach[MessageToDevice] {
      JavaSinkProcedure
    }

  // Required for Scala 2.11
  private[this] object JavaSinkProcedure extends Procedure[MessageToDevice] {
    @scala.throws[Exception](classOf[Exception])
    override def apply(m: MessageToDevice): Unit = {
      log.info("Sending message to device " + m.deviceId)
      serviceClient.sendAsync(m.deviceId, m.message)
    }
  }
} 
Example 11
Source File: OffsetSaveSink.scala    From toketi-iothubreact   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.iothubreact.sinks

import java.util.concurrent.CompletionStage

import akka.Done
import akka.actor.ActorRef
import akka.japi.function.Procedure
import akka.stream.javadsl.{Sink ⇒ JavaSink}
import akka.stream.scaladsl.{Sink ⇒ ScalaSink}
import com.microsoft.azure.iot.iothubreact.checkpointing.CheckpointService.UpdateOffset
import com.microsoft.azure.iot.iothubreact.checkpointing.{CheckpointActorSystem, IOffsetLoader}
import com.microsoft.azure.iot.iothubreact.config.IConfiguration
import com.microsoft.azure.iot.iothubreact.{Logger, MessageFromDevice}

import scala.collection.concurrent.TrieMap
import scala.concurrent.Future

private[iothubreact] final case class OffsetSaveSink(
    config: IConfiguration,
    offsetLoader: IOffsetLoader)
  extends ISink[MessageFromDevice]
    with Logger {

  // The service responsible for writing the offset to the storage
  lazy val checkpointService = (0 until config.connect.iotHubPartitions).map {
    p ⇒
      p → CheckpointActorSystem(config.checkpointing).getCheckpointService(p)
  }(collection.breakOut): Map[Int, ActorRef]

  // The offset stored (value) for each partition (key)
  val current: TrieMap[Int, Long] = TrieMap()

  // Initialize `current` with the offsets in the storage
  offsetLoader.GetSavedOffsets.foreach {
    case (a, c) ⇒ current += a → c.toLong
  }

  def scalaSink(): ScalaSink[MessageFromDevice, scala.concurrent.Future[Done]] = {
    ScalaSink.foreach[MessageFromDevice] {
      doWrite
    }
  }

  def javaSink(): JavaSink[MessageFromDevice, CompletionStage[Done]] = {
    JavaSink.foreach[MessageFromDevice] {
      JavaSinkProcedure
    }
  }

  // Required for Scala 2.11
  private[this] object JavaSinkProcedure extends Procedure[MessageFromDevice] {
    @scala.throws[Exception](classOf[Exception])
    override def apply(m: MessageFromDevice): Unit = {
      doWrite(m)
    }
  }

  private[this] def doWrite(m: MessageFromDevice) = {
    m.runtimeInfo.partitionInfo.partitionNumber.map {
      p =>
        synchronized {
          val os: Long = m.offset.toLong
          val cur: Long = current.getOrElse(p, -1)
          if (os > cur) {
            log.debug(s"Committing offset ${m.offset} on partition ${p}")
            checkpointService(p) ! UpdateOffset(m.offset)
            current += p → os
          } else {
            log.debug(s"Ignoring offset ${m.offset} since it precedes ${cur}")
            Future successful (Done)
          }
        }
    }
  }
} 
Example 12
Source File: JavaInvalidCharacterEscapingTest.scala    From guardrail   with MIT License 5 votes vote down vote up
package core.Dropwizard

import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.ObjectMapper
import invalidCharacters.client.dropwizard.invalidCharacters.InvalidCharactersClient
import invalidCharacters.server.dropwizard.definitions.{InvalidCharacters, InvalidCharactersEnum}
import io.netty.buffer.Unpooled
import java.net.{SocketAddress, URI, URLDecoder}
import java.util.concurrent.{CompletableFuture, CompletionStage}
import java.util.function
import org.asynchttpclient.Response.ResponseBuilder
import org.asynchttpclient.netty.EagerResponseBodyPart
import org.asynchttpclient.uri.Uri
import org.asynchttpclient.{HttpResponseStatus, Request, Response}
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.must.Matchers
import scala.collection.JavaConverters._

object JavaInvalidCharacterEscapingTest {
  private implicit class RichString(private val s: String) extends AnyVal {
    def dec: String = URLDecoder.decode(s, "UTF-8")
  }

  private object OkStatus extends HttpResponseStatus(Uri.create("http://localhost:1234/foo?foo^bar=query-param")) {
    override def getStatusCode = 200
    override def getStatusText = "OK"
    override def getProtocolName = "HTTP"
    override def getProtocolMajorVersion = 1
    override def getProtocolMinorVersion = 1
    override def getProtocolText = "HTTP/1.1"
    override def getRemoteAddress: SocketAddress = ???
    override def getLocalAddress: SocketAddress = ???
  }
}

class JavaInvalidCharacterEscapingTest extends AnyFreeSpec with Matchers {
  import JavaInvalidCharacterEscapingTest._

  "Invalid characters in Java enums should be escaped" in {
    InvalidCharactersEnum.NORMAL.getName mustBe "normal"
    InvalidCharactersEnum.BANG_MOO_COLON_COW_SEMICOLON.getName mustBe "!moo:cow;"
    InvalidCharactersEnum.POUND_YEAH.getName mustBe "#yeah"
    InvalidCharactersEnum.WEIRD_AT.getName mustBe "weird@"
  }

  "Invalid characters in Java POJO properties should be escaped" in {
    val invChar = new InvalidCharacters.Builder("stuff", InvalidCharactersEnum.POUND_YEAH).build()
    invChar.getCloseSquareBraceMoo mustBe "stuff"
    invChar.getSomeEnumAsteriskCaret mustBe InvalidCharactersEnum.POUND_YEAH

    classOf[InvalidCharacters].getDeclaredField("closeSquareBraceMoo").getAnnotation(classOf[JsonProperty]).value mustBe "]moo"
    classOf[InvalidCharacters].getDeclaredField("someEnumAsteriskCaret").getAnnotation(classOf[JsonProperty]).value mustBe "some-enum*^"
  }

  "Invalid characters in Java operation param names should be escaped" in {
    val httpClient = new function.Function[Request, CompletionStage[Response]] {
      override def apply(request: Request): CompletionStage[Response] = {
        println(request.getUri)
        println(request.getQueryParams.asScala.map(_.getName))
        val qps = request.getQueryParams.asScala.map(p => (p.getName.dec, p.getValue.dec))
        val fps = request.getFormParams.asScala.map(p => (p.getName.dec, p.getValue.dec))
        qps.find(_._1 == "foo^bar").map(_._2) mustBe Some("firstarg")
        fps.find(_._1 == "a*b").map(_._2) mustBe Some("secondarg")
        fps.find(_._1 == "bc?").map(_._2) mustBe Some("thirdarg")
        fps.find(_._1 == "d/c").map(_._2) mustBe Some("fourtharg")
        val response = new ResponseBuilder()
        response.accumulate(OkStatus)
        response.accumulate(new EagerResponseBodyPart(
          Unpooled.copiedBuffer(new ObjectMapper().writeValueAsBytes(new InvalidCharacters.Builder("foo", InvalidCharactersEnum.WEIRD_AT).build())),
          true
        ))
        CompletableFuture.completedFuture(response.build())
      }
    }

    val client = new InvalidCharactersClient.Builder(new URI("http://localhost:1234")).withHttpClient(httpClient).build()
    val response = client.getFoo("firstarg", "secondarg", "thirdarg", "fourtharg").call().toCompletableFuture.get()
    response.fold(
      { invChar =>
        invChar.getCloseSquareBraceMoo mustBe "foo"
        invChar.getSomeEnumAsteriskCaret mustBe invalidCharacters.client.dropwizard.definitions.InvalidCharactersEnum.WEIRD_AT
      }
    )
  }
} 
Example 13
Source File: DropwizardRoundTripTest.scala    From guardrail   with MIT License 5 votes vote down vote up
package core.Dropwizard

import com.fasterxml.jackson.databind.ObjectMapper
import examples.client.dropwizard.user.{ UserClient, GetUserByNameResponse => GetUserByNameClientResponse }
import examples.server.dropwizard.definitions.User
import examples.server.dropwizard.user.UserHandler._
import examples.server.dropwizard.user._
import helpers.MockHelpers._
import java.util
import java.util.Optional
import java.util.concurrent.{ CompletableFuture, CompletionStage }
import org.asynchttpclient.{ Request, Response }
import org.mockito.{ ArgumentMatchersSugar, MockitoSugar }
import org.scalatest.concurrent.Waiters
import org.scalatest.{ FreeSpec, Matchers }
import scala.compat.java8.FunctionConverters._

class DropwizardRoundTripTest extends FreeSpec with Matchers with Waiters with MockitoSugar with ArgumentMatchersSugar {
  private implicit val mapper = new ObjectMapper

  "Test server" in {
    val USERNAME = "foobar"

    val serverFuture  = new CompletableFuture[GetUserByNameResponse]
    val asyncResponse = mockAsyncResponse(serverFuture)

    val resource = new UserResource(new UserHandler {
      override def createUser(body: User): CompletionStage[CreateUserResponse]                                          = ???
      override def createUsersWithArrayInput(body: util.List[User]): CompletionStage[CreateUsersWithArrayInputResponse] = ???
      override def createUsersWithListInput(body: util.List[User]): CompletionStage[CreateUsersWithListInputResponse]   = ???
      override def loginUser(username: String, password: String): CompletionStage[LoginUserResponse]                    = ???
      override def logoutUser(): CompletionStage[LogoutUserResponse]                                                    = ???
      override def updateUser(username: String, body: User): CompletionStage[UpdateUserResponse]                        = ???
      override def deleteUser(username: String): CompletionStage[DeleteUserResponse]                                    = ???

      override def getUserByName(username: String): CompletionStage[GetUserByNameResponse] = {
        username match {
          case USERNAME =>
            serverFuture.complete(
              GetUserByNameResponse.Ok(
                new User.Builder()
                  .withEmail("[email protected]")
                  .withFirstName("Foo")
                  .withLastName("Bar")
                  .withId(1)
                  .withUsername(USERNAME)
                  .build()
              )
            )
          case "" =>
            serverFuture.complete(GetUserByNameResponse.BadRequest)
          case _ =>
            serverFuture.complete(GetUserByNameResponse.NotFound)
        }
        serverFuture
      }
    })

    val httpClient: Request => CompletionStage[Response] = { request =>
      val userPath = "^/v2/user/([^/]*)$".r
      request.getUri.getPath match {
        case userPath(username) =>
          resource.getUserByName(username, asyncResponse)
          serverFuture.thenApply({ response =>
            val entityBody = response match {
              case r: GetUserByNameResponse.Ok => Some(r.getEntityBody)
              case _                           => None
            }
            mockAHCResponse(request.getUrl, response.getStatusCode, entityBody)
          })
        case _ =>
          CompletableFuture.completedFuture(mockAHCResponse(request.getUrl, 404))
      }
    }

    val client = new UserClient.Builder()
      .withHttpClient(httpClient.asJava)
      .withObjectMapper(mapper)
      .build()

    val w = new Waiter
    client
      .getUserByName(USERNAME)
      .call()
      .whenComplete({ (response, t) =>
        w { t shouldBe null }
        response match {
          case r: GetUserByNameClientResponse.Ok =>
            w {
              r.getValue.getUsername.get shouldBe USERNAME
              r.getValue.getPassword shouldBe Optional.empty
            }
          case _: GetUserByNameClientResponse.BadRequest => w { fail("Got BadRequest") }
          case _: GetUserByNameClientResponse.NotFound   => w { fail("Got NotFound") }
        }
        w.dismiss()
      })
    w.await(dismissals(1))
  }
} 
Example 14
Source File: TestTopicFactory.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.testkit

import java.util.concurrent.CompletionStage
import javax.inject.Inject

import akka.Done
import akka.stream.Materializer
import akka.stream.javadsl.Flow
import akka.stream.javadsl.Sink
import akka.stream.javadsl.Source
import com.lightbend.lagom.internal.broker.TaggedOffsetTopicProducer
import com.lightbend.lagom.internal.javadsl.api.MethodTopicHolder
import com.lightbend.lagom.internal.javadsl.api.broker.TopicFactory
import com.lightbend.lagom.internal.javadsl.server.ResolvedServices
import com.lightbend.lagom.javadsl.api.Descriptor.TopicCall
import com.lightbend.lagom.javadsl.api.broker.Topic.TopicId
import com.lightbend.lagom.javadsl.api.broker.Message
import com.lightbend.lagom.javadsl.api.broker.Subscriber
import com.lightbend.lagom.javadsl.api.broker.Topic
import com.lightbend.lagom.javadsl.persistence.AggregateEvent
import com.lightbend.lagom.javadsl.persistence.Offset

import scala.collection.JavaConverters._


class TestTopicFactory @Inject() (resolvedServices: ResolvedServices, materializer: Materializer) extends TopicFactory {
  private val topics: Map[TopicId, Any] = resolvedServices.services.flatMap { service =>
    service.descriptor.topicCalls().asScala.map { topicCall =>
      topicCall.topicId -> service.service
    }
  }.toMap

  override def create[Message](topicCall: TopicCall[Message]): Topic[Message] = {
    topics.get(topicCall.topicId()) match {
      case Some(service) =>
        topicCall.topicHolder() match {
          case method: MethodTopicHolder =>
            method.create(service) match {
              case topicProducer: TaggedOffsetTopicProducer[Message, _] =>
                new TestTopic(topicCall, topicProducer)
              case other =>
                throw new IllegalArgumentException(s"Testkit does not know how to handle topic $other")
            }
        }
      case None => throw new IllegalArgumentException(s"$topicCall hasn't been resolved.")
    }
  }

  private class TestTopic[Payload, Event <: AggregateEvent[Event]](
      topicCall: TopicCall[Payload],
      topicProducer: TaggedOffsetTopicProducer[Payload, Event]
  ) extends Topic[Payload] {
    override def topicId = topicCall.topicId

    override def subscribe(): Subscriber[Payload] = new TestSubscriber[Payload](identity)

    private class TestSubscriber[SubscriberPayload](transform: Payload => SubscriberPayload)
        extends Subscriber[SubscriberPayload] {
      override def withGroupId(groupId: String): Subscriber[SubscriberPayload] = this

      override def withMetadata(): Subscriber[Message[SubscriberPayload]] =
        new TestSubscriber(msg => Message.create(transform(msg)))

      override def atMostOnceSource(): Source[SubscriberPayload, _] = {
        val serializer   = topicCall.messageSerializer().serializerForRequest()
        val deserializer = topicCall.messageSerializer().deserializer(serializer.protocol())

        // Create a source for all the tags, and merge them all together.
        // Then, send the flow through a serializer and deserializer, to simulate sending it over the wire.
        Source
          .from(topicProducer.tags)
          .asScala
          .flatMapMerge(topicProducer.tags.size(), { tag =>
            topicProducer.readSideStream.apply(tag, Offset.NONE).asScala.map(_.first)
          })
          .map { message =>
            serializer.serialize(message)
          }
          .map { bytes =>
            deserializer.deserialize(bytes)
          }
          .map(transform)
          .asJava
      }

      override def atLeastOnce(flow: Flow[SubscriberPayload, Done, _]): CompletionStage[Done] = {
        atMostOnceSource().via(flow).runWith(Sink.ignore[Done], materializer)
      }
    }
  }
} 
Example 15
Source File: WebHandler.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.javadsl

import java.util
import java.util.concurrent.CompletionStage

import akka.NotUsed
import akka.actor.ClassicActorSystemProvider
import akka.annotation.ApiMayChange
import akka.grpc.javadsl.ServiceHandler.{ concat, unsupportedMediaType }
import akka.http.javadsl.marshalling.Marshaller
import akka.http.javadsl.model.{ HttpRequest, HttpResponse }
import akka.http.javadsl.server.Route
import akka.http.javadsl.server.directives.RouteAdapter
import akka.http.scaladsl.marshalling.{ ToResponseMarshaller, Marshaller => sMarshaller }
import akka.grpc.scaladsl
import akka.http.scaladsl.server.directives.MarshallingDirectives
import akka.japi.{ Function => JFunction }
import akka.stream.Materializer
import akka.stream.javadsl.{ Keep, Sink, Source }
import akka.util.ConstantFun
import ch.megard.akka.http.cors.javadsl.settings.CorsSettings
import ch.megard.akka.http.cors.javadsl.CorsDirectives

@ApiMayChange
object WebHandler {

  
  def grpcWebHandler(
      handlers: util.List[JFunction[HttpRequest, CompletionStage[HttpResponse]]],
      as: ClassicActorSystemProvider,
      mat: Materializer,
      corsSettings: CorsSettings): JFunction[HttpRequest, CompletionStage[HttpResponse]] = {
    import scala.collection.JavaConverters._
    val servicesHandler = concat(handlers.asScala.toList: _*)
    val servicesRoute = RouteAdapter(MarshallingDirectives.handleWith(servicesHandler.apply(_)))
    val handler = asyncHandler(CorsDirectives.cors(corsSettings, () => servicesRoute), as, mat)
    (req: HttpRequest) =>
      if (scaladsl.ServiceHandler.isGrpcWebRequest(req) || scaladsl.WebHandler.isCorsPreflightRequest(req)) handler(req)
      else unsupportedMediaType
  }

  // Java version of Route.asyncHandler
  private def asyncHandler(
      route: Route,
      as: ClassicActorSystemProvider,
      mat: Materializer): HttpRequest => CompletionStage[HttpResponse] = {
    val sealedFlow =
      route
        .seal()
        .flow(as.classicSystem, mat)
        .toMat(Sink.head[HttpResponse], Keep.right[NotUsed, CompletionStage[HttpResponse]])
    (req: HttpRequest) => Source.single(req).runWith(sealedFlow, mat)
  }
} 
Example 16
Source File: ServiceHandler.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.javadsl

import java.util.concurrent.{ CompletableFuture, CompletionStage }

import akka.annotation.ApiMayChange
import akka.annotation.InternalApi
import akka.grpc.scaladsl.{ ServiceHandler => sServiceHandler }
import akka.http.javadsl.model.{ HttpRequest, HttpResponse, StatusCodes }
// using japi because bindAndHandleAsync expects that
import akka.japi.{ Function => JFunction }

import scala.annotation.varargs

@ApiMayChange
object ServiceHandler {

  
  @varargs
  def handler(handlers: JFunction[HttpRequest, CompletionStage[HttpResponse]]*)
      : JFunction[HttpRequest, CompletionStage[HttpResponse]] = {
    val servicesHandler = concat(handlers: _*)
    (req: HttpRequest) => if (sServiceHandler.isGrpcRequest(req)) servicesHandler(req) else unsupportedMediaType
  }

  private[javadsl] def concat(handlers: JFunction[HttpRequest, CompletionStage[HttpResponse]]*)
      : JFunction[HttpRequest, CompletionStage[HttpResponse]] =
    (req: HttpRequest) =>
      handlers.foldLeft(notFound) { (comp, next) =>
        comp.thenCompose(res => if (res.status == StatusCodes.NOT_FOUND) next.apply(req) else comp)
      }

} 
Example 17
Source File: GrpcMarshalling.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.javadsl

import java.util.concurrent.{ CompletableFuture, CompletionStage }
import java.util.Optional

import akka.NotUsed
import akka.actor.ActorSystem
import akka.actor.ClassicActorSystemProvider
import akka.grpc._
import akka.grpc.internal.{ CancellationBarrierGraphStage, GrpcResponseHelpers, MissingParameterException }
import akka.grpc.GrpcProtocol.{ GrpcProtocolReader, GrpcProtocolWriter }
import akka.http.javadsl.model.{ HttpRequest, HttpResponse }
import akka.japi.Function
import akka.stream.Materializer
import akka.stream.javadsl.{ Sink, Source }
import akka.util.ByteString

import com.github.ghik.silencer.silent

object GrpcMarshalling {

  def negotiated[T](
      req: HttpRequest,
      f: (GrpcProtocolReader, GrpcProtocolWriter) => CompletionStage[T]): Optional[CompletionStage[T]] =
    GrpcProtocol
      .negotiate(req)
      .map {
        case (maybeReader, writer) =>
          maybeReader.map(reader => f(reader, writer)).fold[CompletionStage[T]](failure, identity)
      }
      .fold(Optional.empty[CompletionStage[T]])(Optional.of)

  def unmarshal[T](
      data: Source[ByteString, AnyRef],
      u: ProtobufSerializer[T],
      mat: Materializer,
      reader: GrpcProtocolReader): CompletionStage[T] =
    data.via(reader.dataFrameDecoder).map(u.deserialize).runWith(Sink.headOption[T], mat).thenCompose[T] { opt =>
      if (opt.isPresent) CompletableFuture.completedFuture(opt.get)
      else failure(new MissingParameterException())
    }

  def unmarshalStream[T](
      data: Source[ByteString, AnyRef],
      u: ProtobufSerializer[T],
      @silent("never used") mat: Materializer,
      reader: GrpcProtocolReader): CompletionStage[Source[T, NotUsed]] = {
    CompletableFuture.completedFuture[Source[T, NotUsed]](
      data
        .mapMaterializedValue(_ => NotUsed)
        .via(reader.dataFrameDecoder)
        .map(japiFunction(u.deserialize))
        // In gRPC we signal failure by returning an error code, so we
        // don't want the cancellation bubbled out
        .via(new CancellationBarrierGraphStage)
        .mapMaterializedValue(japiFunction(_ => NotUsed)))
  }

  def marshal[T](
      e: T,
      m: ProtobufSerializer[T],
      writer: GrpcProtocolWriter,
      system: ClassicActorSystemProvider,
      eHandler: Function[ActorSystem, Function[Throwable, Trailers]] = GrpcExceptionHandler.defaultMapper)
      : HttpResponse =
    marshalStream(Source.single(e), m, writer, system, eHandler)

  def marshalStream[T](
      e: Source[T, NotUsed],
      m: ProtobufSerializer[T],
      writer: GrpcProtocolWriter,
      system: ClassicActorSystemProvider,
      eHandler: Function[ActorSystem, Function[Throwable, Trailers]] = GrpcExceptionHandler.defaultMapper)
      : HttpResponse =
    GrpcResponseHelpers(e.asScala, scalaAnonymousPartialFunction(eHandler))(m, writer, system)

  private def failure[R](error: Throwable): CompletableFuture[R] = {
    val future: CompletableFuture[R] = new CompletableFuture()
    future.completeExceptionally(error)
    future
  }
} 
Example 18
Source File: ServerReflection.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.javadsl

import java.util.Collection
import java.util.concurrent.CompletionStage

import akka.actor.ClassicActorSystemProvider
import akka.annotation.ApiMayChange
import akka.grpc.ServiceDescription
import akka.grpc.internal.ServerReflectionImpl
import akka.http.javadsl.model.{ HttpRequest, HttpResponse }

import grpc.reflection.v1alpha.reflection.ServerReflectionHandler

@ApiMayChange(issue = "https://github.com/akka/akka-grpc/issues/850")
object ServerReflection {
  @ApiMayChange(issue = "https://github.com/akka/akka-grpc/issues/850")
  def create(
      objects: Collection[ServiceDescription],
      sys: ClassicActorSystemProvider): akka.japi.Function[HttpRequest, CompletionStage[HttpResponse]] = {
    import scala.collection.JavaConverters._
    val delegate = ServerReflectionHandler.apply(
      ServerReflectionImpl(objects.asScala.map(_.descriptor).toSeq, objects.asScala.map(_.name).toList))(sys)
    import scala.compat.java8.FutureConverters._
    implicit val ec = sys.classicSystem.dispatcher
    request =>
      delegate
        .apply(request.asInstanceOf[akka.http.scaladsl.model.HttpRequest])
        .map(_.asInstanceOf[HttpResponse])
        .toJava
  }
} 
Example 19
Source File: RouteUtils.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.javadsl

import scala.concurrent.ExecutionContext
import java.util.concurrent.CompletionStage

import akka.actor.ClassicActorSystemProvider
import akka.annotation.ApiMayChange
import akka.http.scaladsl
import akka.http.javadsl.model.{ HttpRequest, HttpResponse }
import akka.http.javadsl.server.Route
import akka.http.javadsl.server.directives.RouteAdapter
import akka.http.scaladsl.server.RouteResult
import akka.japi.Function

import scala.compat.java8.FutureConverters


  def toFunction(
      route: Route,
      system: ClassicActorSystemProvider): Function[HttpRequest, CompletionStage[HttpResponse]] = {
    implicit val sys = system.classicSystem
    implicit val ec: ExecutionContext = sys.dispatcher
    val handler = scaladsl.server.Route.asyncHandler(route.asScala)

    (request: HttpRequest) => {
      import FutureConverters._
      handler(request.asInstanceOf[scaladsl.model.HttpRequest]).map(_.asInstanceOf[HttpResponse]).toJava
    }
  }
} 
Example 20
Source File: ChannelUtils.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.internal

import java.util.concurrent.CompletionStage

import akka.Done
import akka.annotation.InternalApi
import akka.event.LoggingAdapter
import io.grpc.{ ConnectivityState, ManagedChannel }

import scala.compat.java8.FutureConverters._
import scala.concurrent.{ Future, Promise }


  @InternalApi
  private[akka] def monitorChannel(
      ready: Promise[Unit],
      done: Promise[Done],
      channel: ManagedChannel,
      maxConnectionAttempts: Option[Int],
      log: LoggingAdapter): Unit = {
    def monitor(currentState: ConnectivityState, connectionAttempts: Int): Unit = {
      log.debug(s"monitoring with state $currentState and connectionAttempts $connectionAttempts")
      val newAttemptOpt = currentState match {
        case ConnectivityState.TRANSIENT_FAILURE =>
          if (maxConnectionAttempts.contains(connectionAttempts + 1)) {
            val ex = new ClientConnectionException(s"Unable to establish connection after [$maxConnectionAttempts]")
            ready.tryFailure(ex) || done.tryFailure(ex)
            None
          } else Some(connectionAttempts + 1)

        case ConnectivityState.READY =>
          ready.trySuccess(())
          Some(0)

        case ConnectivityState.SHUTDOWN =>
          done.trySuccess(Done)
          None

        case ConnectivityState.IDLE | ConnectivityState.CONNECTING =>
          Some(connectionAttempts)
      }
      newAttemptOpt.foreach { attempts =>
        channel.notifyWhenStateChanged(currentState, () => monitor(channel.getState(false), attempts))
      }
    }
    monitor(channel.getState(false), 0)
  }

} 
Example 21
Source File: InletTap.scala    From cloudflow   with Apache License 2.0 5 votes vote down vote up
package cloudflow.akkastream.testkit.javadsl

import akka.NotUsed
import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage._
import akka.stream._
import akka.stream.scaladsl._

import cloudflow.streamlets._
import cloudflow.akkastream.testkit._

// The use of Tuple here is OK since the creation of the tuple is handled
// internally by the AkkaStreamletTestKit when creating instances of this class
case class SourceInletTap[T] private[testkit] (inlet: CodecInlet[T], src: akka.stream.javadsl.Source[(T, Committable), NotUsed])
    extends InletTap[T] {
  val portName = inlet.name

  private[testkit] val source = src.asScala
}

case class QueueInletTap[T](inlet: CodecInlet[T])(implicit system: ActorSystem) extends InletTap[T] {
  private val bufferSize = 1024
  private val hub        = BroadcastHub.sink[T](bufferSize)

  // Here we map the materialized value of the Scala queue source to materialize
  // to the Javadsl version of `SourceQueueWithComplete` so Java users can use
  // the `offer` method with native CompletionStages instead of Scala Futures.
  //
  // It is still a `scaladsl.Source[T, SourceQueueWithComplete]` because it will
  // only be used by lower-level Scala code. But the resulting materialized value,
  // e.g. the `SourceQueueWithComplete` is now the `javadsl.SourceQueueWithComplete`.
  private val qSource =
    Source
      .queue[T](bufferSize, OverflowStrategy.backpressure)
      .mapMaterializedValue(new SourceQueueAdapter(_))

  private[testkit] val (q, src) = qSource.toMat(hub)(Keep.both).run()

  val portName = inlet.name
  val source = src.map { t ⇒
    (t, TestCommittableOffset())
  }
  val queue: akka.stream.javadsl.SourceQueueWithComplete[T] = q
}


private[testkit] final class SourceQueueAdapter[T](delegate: SourceQueueWithComplete[T])
    extends akka.stream.javadsl.SourceQueueWithComplete[T] {
  import java.util.concurrent.CompletionStage
  import scala.compat.java8.FutureConverters._
  import akka.Done

  def offer(elem: T): CompletionStage[QueueOfferResult] = delegate.offer(elem).toJava
  def watchCompletion(): CompletionStage[Done]          = delegate.watchCompletion().toJava
  def complete(): Unit                                  = delegate.complete()
  def fail(ex: Throwable): Unit                         = delegate.fail(ex)
} 
Example 22
Source File: DelegatingWebSocketListener.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.httpclient

import java.net.http.WebSocket
import java.net.http.WebSocket.Listener
import java.nio.ByteBuffer
import java.util.concurrent.CompletionStage
import java.util.concurrent.atomic.AtomicBoolean

private[httpclient] class DelegatingWebSocketListener[WS_RESULT](
    delegate: Listener,
    onInitialOpen: WebSocket => Unit,
    onInitialError: Throwable => Unit
) extends Listener {
  private val initialised = new AtomicBoolean(false)

  override def onOpen(webSocket: WebSocket): Unit = {
    if (!initialised.getAndSet(true)) {
      onInitialOpen(webSocket)
    }
    delegate.onOpen(webSocket)
  }

  override def onText(webSocket: WebSocket, data: CharSequence, last: Boolean): CompletionStage[_] = {
    delegate.onText(webSocket, data, last)
  }

  override def onBinary(webSocket: WebSocket, data: ByteBuffer, last: Boolean): CompletionStage[_] = {
    delegate.onBinary(webSocket, data, last)
  }

  override def onPing(webSocket: WebSocket, message: ByteBuffer): CompletionStage[_] = {
    delegate.onPing(webSocket, message)
  }

  override def onPong(webSocket: WebSocket, message: ByteBuffer): CompletionStage[_] = {
    delegate.onPong(webSocket, message)
  }

  override def onClose(webSocket: WebSocket, statusCode: Int, reason: String): CompletionStage[_] = {
    delegate.onClose(webSocket, statusCode, reason)
  }
  override def onError(webSocket: WebSocket, error: Throwable): Unit = {
    if (!initialised.getAndSet(true)) {
      onInitialError(error)
    }
    delegate.onError(webSocket, error)
  }
} 
Example 23
Source File: HttpClientLowLevelListenerWebSocketTest.scala    From sttp   with Apache License 2.0 5 votes vote down vote up
package sttp.client.httpclient

import java.net.http.WebSocket
import java.net.http.WebSocket.Listener
import java.util.concurrent.CompletionStage

import sttp.client.testing.websocket.LowLevelListenerWebSocketTest

abstract class HttpClientLowLevelListenerWebSocketTest[F[_]]
    extends LowLevelListenerWebSocketTest[F, WebSocket, WebSocketHandler] {

  override def testErrorWhenEndpointIsNotWebsocket: Boolean = false

  override def createHandler(_onTextFrame: String => Unit): WebSocketHandler[WebSocket] =
    WebSocketHandler.fromListener(new Listener {
      var accumulator: String = ""
      override def onText(webSocket: WebSocket, data: CharSequence, last: Boolean): CompletionStage[_] = {
        if (last) {
          _onTextFrame(accumulator + data.toString)
          accumulator = ""
        } else {
          accumulator += data.toString
        }
        super.onText(webSocket, data, last)
      }
    })

  override def sendText(ws: WebSocket, t: String): Unit = ws.sendText(t.toString, true).get()

  override def sendCloseFrame(ws: WebSocket): Unit = ws.sendClose(WebSocket.NORMAL_CLOSURE, "").get()
} 
Example 24
Source File: stage.scala    From neotypes   with MIT License 5 votes vote down vote up
package neotypes
package internal.syntax

import java.util.concurrent.{CompletionException, CompletionStage}

private[neotypes] object stage {
  private final val defaultExHandler: PartialFunction[Throwable, Either[Throwable, Nothing]] = {
    case ex: Throwable => Left(ex)
  }

  implicit class CompletionStageOps[A](private val underlying: CompletionStage[A]) extends AnyVal {
    private final def acceptImpl[B](cb: Either[Throwable, B] => Unit)
                                   (f: A => Either[Throwable, B])
                                   (g: Throwable => Either[Throwable, B]): Unit =
      internal.utils.void(
        underlying.thenAccept(a => cb(f(a))).exceptionally { ex: Throwable =>
          // Execute the function.
          ex match {
            case _: CompletionException => cb(g(ex.getCause))
            case _                      => cb(g(ex))
          }
          // Return null, which is the only value that conforms to Void.
          // See: https://stackoverflow.com/questions/44171262/convert-scala-unit-to-java-void/44172467#44172467
          None.orNull
        }
      )

    def accept[B](cb: Either[Throwable, B] => Unit)
                 (f: A => Either[Throwable, B]): Unit =
      acceptImpl(cb)(f)(defaultExHandler)

    def acceptExceptionally[B](cb: Either[Throwable, B] => Unit)
                              (f: A => Either[Throwable, B])
                              (g: PartialFunction[Throwable, Either[Throwable, B]]): Unit =
      acceptImpl(cb)(f)(g.orElse(defaultExHandler))

    def acceptVoid(cb: Either[Throwable, Unit] => Unit)
                  (implicit ev: A =:= Void): Unit = {
      internal.utils.void(ev)
      acceptImpl(cb)(_ => Right(()))(defaultExHandler)
    }
  }
} 
Example 25
Source File: effects.scala    From izanami   with Apache License 2.0 5 votes vote down vote up
package libs
import java.util.concurrent.CompletionStage

import scala.concurrent.{Future, Promise}

object effects {

  implicit class CSOps[T](cs: CompletionStage[T]) {
    def toFuture: Future[T] = {
      val p = Promise[T]
      cs.whenComplete((ok, e) => {
        if (e != null) {
          p.failure(e)
        } else {
          p.success(ok)
        }
      })
      p.future
    }
  }

} 
Example 26
Source File: ProjectionRegistryModule.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.projection

import java.util.concurrent.CompletionStage

import akka.actor.ActorSystem
import akka.annotation.ApiMayChange
import akka.annotation.InternalApi
import com.lightbend.lagom.internal.projection.ProjectionRegistry
import com.lightbend.lagom.internal.projection.ProjectionRegistryActor.WorkerCoordinates
import com.lightbend.lagom.projection.State
import javax.inject.Inject
import javax.inject.Provider
import javax.inject.Singleton
import play.api.Configuration
import play.api.Environment
import play.api.inject.Binding
import play.api.inject.Module

import scala.compat.java8.FutureConverters
import scala.concurrent.ExecutionContext


private class ProjectionsImpl @Inject() (registry: ProjectionRegistry)(
    implicit executionContext: ExecutionContext
) extends Projections {
  import FutureConverters._

  override def getStatus(): CompletionStage[State] =
    registry.getState().toJava

  override def stopAllWorkers(projectionName: String): Unit =
    registry.stopAllWorkers(projectionName)

  override def stopWorker(projectionName: String, tagName: String): Unit =
    registry.stopWorker(WorkerCoordinates(projectionName, tagName))

  override def startAllWorkers(projectionName: String): Unit =
    registry.startAllWorkers(projectionName)

  override def startWorker(projectionName: String, tagName: String): Unit =
    registry.startWorker(WorkerCoordinates(projectionName, tagName))
} 
Example 27
Source File: TimedWriteService.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.participant.state.v1.metrics

import java.util.concurrent.CompletionStage

import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.health.HealthStatus
import com.daml.ledger.participant.state.v1._
import com.daml.lf.data.Time
import com.daml.metrics.{Metrics, Timed}

final class TimedWriteService(delegate: WriteService, metrics: Metrics) extends WriteService {

  override def submitTransaction(
      submitterInfo: SubmitterInfo,
      transactionMeta: TransactionMeta,
      transaction: SubmittedTransaction,
      estimatedInterpretationCost: Long
  ): CompletionStage[SubmissionResult] =
    Timed.completionStage(
      metrics.daml.services.write.submitTransaction,
      delegate.submitTransaction(
        submitterInfo,
        transactionMeta,
        transaction,
        estimatedInterpretationCost,
      ),
    )

  override def uploadPackages(
      submissionId: SubmissionId,
      archives: List[DamlLf.Archive],
      sourceDescription: Option[String]
  ): CompletionStage[SubmissionResult] =
    Timed.completionStage(
      metrics.daml.services.write.uploadPackages,
      delegate.uploadPackages(submissionId, archives, sourceDescription))

  override def allocateParty(
      hint: Option[Party],
      displayName: Option[String],
      submissionId: SubmissionId
  ): CompletionStage[SubmissionResult] =
    Timed.completionStage(
      metrics.daml.services.write.allocateParty,
      delegate.allocateParty(hint, displayName, submissionId))

  override def submitConfiguration(
      maxRecordTime: Time.Timestamp,
      submissionId: SubmissionId,
      config: Configuration
  ): CompletionStage[SubmissionResult] =
    Timed.completionStage(
      metrics.daml.services.write.submitConfiguration,
      delegate.submitConfiguration(maxRecordTime, submissionId, config))

  override def currentHealth(): HealthStatus =
    delegate.currentHealth()
} 
Example 28
Source File: Timed.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.metrics

import java.util.concurrent.CompletionStage

import akka.Done
import akka.stream.scaladsl.{Keep, Source}
import com.codahale.metrics.{Counter, Timer}
import com.daml.dec.DirectExecutionContext

import scala.concurrent.Future

object Timed {

  def value[T](timer: Timer, value: => T): T =
    timer.time(() => value)

  def completionStage[T](timer: Timer, future: => CompletionStage[T]): CompletionStage[T] = {
    val ctx = timer.time()
    future.whenComplete { (_, _) =>
      ctx.stop()
      ()
    }
  }

  def future[T](timer: Timer, future: => Future[T]): Future[T] = {
    val ctx = timer.time()
    val result = future
    result.onComplete(_ => ctx.stop())(DirectExecutionContext)
    result
  }

  def trackedFuture[T](counter: Counter, future: => Future[T]): Future[T] = {
    counter.inc()
    future.andThen { case _ => counter.dec() }(DirectExecutionContext)
  }

  def timedAndTrackedFuture[T](timer: Timer, counter: Counter, future: => Future[T]): Future[T] = {
    Timed.future(timer, trackedFuture(counter, future))
  }

  def source[Out, Mat](timer: Timer, source: => Source[Out, Mat]): Source[Out, Mat] = {
    val ctx = timer.time()
    source
      .watchTermination()(Keep.both[Mat, Future[Done]])
      .mapMaterializedValue {
        case (mat, done) =>
          done.onComplete(_ => ctx.stop())(DirectExecutionContext)
          mat
      }
  }

} 
Example 29
Source File: AuthServiceJWT.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.auth

import java.util.concurrent.{CompletableFuture, CompletionStage}

import com.daml.lf.data.Ref
import com.daml.jwt.{JwtVerifier, JwtVerifierBase}
import com.daml.ledger.api.auth.AuthServiceJWT.Error
import io.grpc.Metadata
import org.slf4j.{Logger, LoggerFactory}
import spray.json._

import scala.collection.mutable.ListBuffer
import scala.util.Try


class AuthServiceJWT(verifier: JwtVerifierBase) extends AuthService {

  protected val logger: Logger = LoggerFactory.getLogger(AuthServiceJWT.getClass)

  override def decodeMetadata(headers: Metadata): CompletionStage[Claims] = {
    decodeAndParse(headers).fold(
      error => {
        logger.warn("Authorization error: " + error.message)
        CompletableFuture.completedFuture(Claims.empty)
      },
      token => CompletableFuture.completedFuture(payloadToClaims(token))
    )
  }

  private[this] def parsePayload(jwtPayload: String): Either[Error, AuthServiceJWTPayload] = {
    import AuthServiceJWTCodec.JsonImplicits._
    Try(JsonParser(jwtPayload).convertTo[AuthServiceJWTPayload]).toEither.left.map(t =>
      Error("Could not parse JWT token: " + t.getMessage))
  }

  private[this] def decodeAndParse(headers: Metadata): Either[Error, AuthServiceJWTPayload] = {
    val bearerTokenRegex = "Bearer (.*)".r

    for {
      headerValue <- Option
        .apply(headers.get(AUTHORIZATION_KEY))
        .toRight(Error("Authorization header not found"))
      token <- bearerTokenRegex
        .findFirstMatchIn(headerValue)
        .map(_.group(1))
        .toRight(Error("Authorization header does not use Bearer format"))
      decoded <- verifier
        .verify(com.daml.jwt.domain.Jwt(token))
        .toEither
        .left
        .map(e => Error("Could not verify JWT token: " + e.message))
      parsed <- parsePayload(decoded.payload)
    } yield parsed
  }

  private[this] def payloadToClaims(payload: AuthServiceJWTPayload): Claims = {
    val claims = ListBuffer[Claim]()

    // Any valid token authorizes the user to use public services
    claims.append(ClaimPublic)

    if (payload.admin)
      claims.append(ClaimAdmin)

    payload.actAs
      .foreach(party => claims.append(ClaimActAsParty(Ref.Party.assertFromString(party))))

    payload.readAs
      .foreach(party => claims.append(ClaimReadAsParty(Ref.Party.assertFromString(party))))

    Claims(
      claims = claims.toList,
      ledgerId = payload.ledgerId,
      participantId = payload.participantId,
      applicationId = payload.applicationId,
      expiration = payload.exp,
    )
  }
}

object AuthServiceJWT {
  final case class Error(message: String)

  def apply(verifier: com.auth0.jwt.interfaces.JWTVerifier) =
    new AuthServiceJWT(new JwtVerifier(verifier))

  def apply(verifier: JwtVerifierBase) =
    new AuthServiceJWT(verifier)
} 
Example 30
Source File: AuthServiceStatic.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.auth

import java.util.concurrent.{CompletableFuture, CompletionStage}

import io.grpc.Metadata


final class AuthServiceStatic(claims: PartialFunction[String, Claims]) extends AuthService {
  override def decodeMetadata(headers: Metadata): CompletionStage[Claims] = {
    if (headers.containsKey(AUTHORIZATION_KEY)) {
      val authorizationValue = headers.get(AUTHORIZATION_KEY).stripPrefix("Bearer ")
      CompletableFuture.completedFuture(claims.lift(authorizationValue).getOrElse(Claims.empty))
    } else {
      CompletableFuture.completedFuture(Claims.empty)
    }
  }
}

object AuthServiceStatic {
  def apply(claims: PartialFunction[String, Claims]) = new AuthServiceStatic(claims)
} 
Example 31
Source File: KeyValueParticipantStateWriter.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.participant.state.kvutils.api

import java.util.UUID
import java.util.concurrent.CompletionStage

import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.health.HealthStatus
import com.daml.ledger.participant.state.kvutils.DamlKvutils.DamlSubmission
import com.daml.ledger.participant.state.kvutils.{Envelope, KeyValueSubmission}
import com.daml.ledger.participant.state.v1._
import com.daml.lf.data.{Ref, Time}
import com.daml.metrics.Metrics

import scala.compat.java8.FutureConverters

class KeyValueParticipantStateWriter(writer: LedgerWriter, metrics: Metrics) extends WriteService {

  private val keyValueSubmission = new KeyValueSubmission(metrics)

  override def submitTransaction(
      submitterInfo: SubmitterInfo,
      transactionMeta: TransactionMeta,
      transaction: SubmittedTransaction,
      estimatedInterpretationCost: Long,
  ): CompletionStage[SubmissionResult] = {
    val submission =
      keyValueSubmission.transactionToSubmission(
        submitterInfo,
        transactionMeta,
        transaction,
      )
    val metadata = SimpleCommitMetadata(
      estimatedInterpretationCost = Some(estimatedInterpretationCost))
    commit(correlationId = submitterInfo.commandId, submission = submission, metadata = metadata)
  }

  override def uploadPackages(
      submissionId: SubmissionId,
      archives: List[DamlLf.Archive],
      sourceDescription: Option[String]): CompletionStage[SubmissionResult] = {
    val submission = keyValueSubmission
      .archivesToSubmission(
        submissionId,
        archives,
        sourceDescription.getOrElse(""),
        writer.participantId)
    commit(submissionId, submission)
  }

  override def submitConfiguration(
      maxRecordTime: Time.Timestamp,
      submissionId: SubmissionId,
      config: Configuration): CompletionStage[SubmissionResult] = {
    val submission =
      keyValueSubmission
        .configurationToSubmission(maxRecordTime, submissionId, writer.participantId, config)
    commit(submissionId, submission)
  }

  override def allocateParty(
      hint: Option[Party],
      displayName: Option[String],
      submissionId: SubmissionId): CompletionStage[SubmissionResult] = {
    val party = hint.getOrElse(generateRandomParty())
    val submission =
      keyValueSubmission.partyToSubmission(
        submissionId,
        Some(party),
        displayName,
        writer.participantId)
    commit(submissionId, submission)
  }

  override def currentHealth(): HealthStatus = writer.currentHealth()

  private def generateRandomParty(): Ref.Party =
    Ref.Party.assertFromString(s"party-${UUID.randomUUID().toString.take(8)}")

  private def commit(
      correlationId: String,
      submission: DamlSubmission,
      metadata: CommitMetadata = CommitMetadata.Empty,
  ): CompletionStage[SubmissionResult] =
    FutureConverters.toJava(writer.commit(correlationId, Envelope.enclose(submission), metadata))
} 
Example 32
Source File: KeyValueParticipantState.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.participant.state.kvutils.api

import java.util.concurrent.CompletionStage

import akka.NotUsed
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.health.HealthStatus
import com.daml.ledger.participant.state.v1._
import com.daml.lf.data.Time
import com.daml.metrics.Metrics


class KeyValueParticipantState(
    reader: LedgerReader,
    writer: LedgerWriter,
    metrics: Metrics,
)(implicit materializer: Materializer)
    extends ReadService
    with WriteService {
  private val readerAdapter =
    new KeyValueParticipantStateReader(reader, metrics)
  private val writerAdapter =
    new KeyValueParticipantStateWriter(new TimedLedgerWriter(writer, metrics), metrics)

  override def getLedgerInitialConditions(): Source[LedgerInitialConditions, NotUsed] =
    readerAdapter.getLedgerInitialConditions()

  override def stateUpdates(beginAfter: Option[Offset]): Source[(Offset, Update), NotUsed] =
    readerAdapter.stateUpdates(beginAfter)

  override def submitTransaction(
      submitterInfo: SubmitterInfo,
      transactionMeta: TransactionMeta,
      transaction: SubmittedTransaction,
      estimatedInterpretationCost: Long,
  ): CompletionStage[SubmissionResult] =
    writerAdapter.submitTransaction(
      submitterInfo,
      transactionMeta,
      transaction,
      estimatedInterpretationCost,
    )

  override def submitConfiguration(
      maxRecordTime: Time.Timestamp,
      submissionId: SubmissionId,
      config: Configuration): CompletionStage[SubmissionResult] =
    writerAdapter.submitConfiguration(maxRecordTime, submissionId, config)

  override def uploadPackages(
      submissionId: SubmissionId,
      archives: List[DamlLf.Archive],
      sourceDescription: Option[String]): CompletionStage[SubmissionResult] =
    writerAdapter.uploadPackages(submissionId, archives, sourceDescription)

  override def allocateParty(
      hint: Option[Party],
      displayName: Option[String],
      submissionId: SubmissionId): CompletionStage[SubmissionResult] =
    writerAdapter.allocateParty(hint, displayName, submissionId)

  override def currentHealth(): HealthStatus =
    reader.currentHealth() and writer.currentHealth()
} 
Example 33
Source File: ProtobufScoringController.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import java.util.concurrent.CompletionStage

import akka.actor.ActorSystem
import com.google.protobuf.ByteString
import ml.combust.mleap.executor._
import ml.combust.mleap.pb.TransformStatus.STATUS_ERROR
import ml.combust.mleap.pb.{BundleMeta, Mleap, Model, TransformFrameResponse}
import ml.combust.mleap.runtime.serialization.{FrameReader, FrameWriter}
import ml.combust.mleap.springboot.TypeConverters._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.HttpStatus
import org.springframework.web.bind.annotation._

import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
import scala.util.{Failure, Success}

@RestController
@RequestMapping
class ProtobufScoringController(@Autowired val actorSystem : ActorSystem,
                                @Autowired val mleapExecutor: MleapExecutor) {

  private val executor = actorSystem.dispatcher

  @PostMapping(path = Array("/models"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  @ResponseStatus(HttpStatus.ACCEPTED)
  def loadModel(@RequestBody request: Mleap.LoadModelRequest,
                @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.Model] = {
    mleapExecutor
      .loadModel(javaPbToExecutorLoadModelRequest(request))(timeout)
      .map(model => Model.toJavaProto(model))(executor).toJava
  }

  @DeleteMapping(path = Array("/models/{model_name}"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  def unloadModel(@PathVariable("model_name") modelName: String,
                  @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int): CompletionStage[Mleap.Model] =
    mleapExecutor
      .unloadModel(UnloadModelRequest(modelName))(timeout)
      .map(model => Model.toJavaProto(model))(executor).toJava

  @GetMapping(path = Array("/models/{model_name}"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  def getModel(@PathVariable("model_name") modelName: String,
               @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int): CompletionStage[Mleap.Model] =
    mleapExecutor
      .getModel(GetModelRequest(modelName))(timeout)
      .map(model => Model.toJavaProto(model))(executor).toJava

  @GetMapping(path = Array("/models/{model_name}/meta"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  def getMeta(@PathVariable("model_name") modelName: String,
              @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.BundleMeta] =
    mleapExecutor
      .getBundleMeta(GetBundleMetaRequest(modelName))(timeout)
      .map(meta => BundleMeta.toJavaProto(meta))(executor).toJava

  @PostMapping(path = Array("/models/transform"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  def transform(@RequestBody request: Mleap.TransformFrameRequest,
                @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[Mleap.TransformFrameResponse] = {
    FrameReader(request.getFormat).fromBytes(request.getFrame.toByteArray) match {
      case Success(frame) =>
        mleapExecutor.transform(TransformFrameRequest(request.getModelName, frame, request.getOptions))(timeout)
        .mapAll {
          case Success(resp) => resp match {
            case Success(frame) => TransformFrameResponse(tag = request.getTag,
              frame = ByteString.copyFrom(FrameWriter(frame, request.getFormat).toBytes().get))
            case Failure(ex) => handleTransformFailure(request.getTag, ex)
          }
          case Failure(ex) => handleTransformFailure(request.getTag, ex)
        }(executor)
        .map(response => TransformFrameResponse.toJavaProto(response))(executor).toJava
      case Failure(ex) => Future {
          TransformFrameResponse.toJavaProto(handleTransformFailure(request.getTag, ex))
        }(executor).toJava
    }
  }

  private def handleTransformFailure(tag: Long, ex: Throwable): TransformFrameResponse = {
    ProtobufScoringController.logger.error("Transform error due to ", ex)
    TransformFrameResponse(tag = tag, status = STATUS_ERROR,
      error = ExceptionUtils.getMessage(ex), backtrace = ExceptionUtils.getStackTrace(ex))
  }
}

object ProtobufScoringController {
  val logger = LoggerFactory.getLogger(classOf[ProtobufScoringController])
} 
Example 34
Source File: LeapFrameScoringController.scala    From mleap   with Apache License 2.0 5 votes vote down vote up
package ml.combust.mleap.springboot

import java.util.concurrent.CompletionStage

import akka.actor.ActorSystem
import ml.combust.mleap.executor.{MleapExecutor, TransformFrameRequest}
import ml.combust.mleap.pb.ErrorTransformResponse
import ml.combust.mleap.pb.TransformStatus.STATUS_ERROR
import ml.combust.mleap.runtime.serialization.{BuiltinFormats, FrameReader, FrameWriter}
import ml.combust.mleap.springboot.TypeConverters._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.json4s.jackson.JsonMethods
import org.slf4j.LoggerFactory
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.web.bind.annotation._

import scala.compat.java8.FutureConverters._
import scala.concurrent.Future
import scala.util.{Failure, Success}
import scalapb.json4s.{Parser, Printer}

@RestController
@RequestMapping
class LeapFrameScoringController(@Autowired val actorSystem : ActorSystem,
                                 @Autowired val mleapExecutor: MleapExecutor,
                                 @Autowired val jsonPrinter: Printer,
                                 @Autowired val jsonParser: Parser) {

  private val executor = actorSystem.dispatcher

  @PostMapping(path = Array("/models/{model_name}/transform"),
    consumes = Array("application/json; charset=UTF-8"),
    produces = Array("application/json; charset=UTF-8"))
  def transformJson(@RequestBody body: Array[Byte],
                @PathVariable("model_name") modelName: String,
                @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[_] = {
    FrameReader(BuiltinFormats.json).fromBytes(body) match {
      case Success(frame) => mleapExecutor.transform(TransformFrameRequest(modelName, frame, None))(timeout)
        .mapAll {
          case Success(resp) => resp match {
            case Success(frame) => FrameWriter(frame, BuiltinFormats.json).toBytes().get
            case Failure(ex) => JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex)))
          }
          case Failure(ex) => JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex)))

        }(executor)
        .toJava
      case Failure(ex) => Future {
        JsonMethods.compact(jsonPrinter.toJson(handleTransformFailure(ex)))
      }(executor).toJava
    }
  }

  @PostMapping(path = Array("/models/{model_name}/transform"),
    consumes = Array("application/x-protobuf; charset=UTF-8"),
    produces = Array("application/x-protobuf; charset=UTF-8"))
  def transformProto(@RequestBody body: Array[Byte],
                @PathVariable("model_name") modelName: String,
                @RequestHeader(value = "timeout", defaultValue = "60000") timeout: Int) : CompletionStage[_] = {
    FrameReader(BuiltinFormats.binary).fromBytes(body) match {
      case Success(frame) =>
        mleapExecutor.transform(TransformFrameRequest(modelName, frame, None))(timeout)
          .mapAll {
            case Success(resp) => resp match {
              case Success(frame) => FrameWriter(frame, BuiltinFormats.binary).toBytes().get
              case Failure(ex) => ErrorTransformResponse.toJavaProto(handleTransformFailure(ex))
            }
            case Failure(ex) => ErrorTransformResponse.toJavaProto(handleTransformFailure(ex))
          }(executor).toJava
      case Failure(ex) => Future {
        ErrorTransformResponse.toJavaProto(handleTransformFailure(ex))
      }(executor).toJava
    }
  }

  private def handleTransformFailure(ex: Throwable): ErrorTransformResponse = {
    LeapFrameScoringController.logger.error("Transform error due to ", ex)
    ErrorTransformResponse(status = STATUS_ERROR,
      error = ExceptionUtils.getMessage(ex), backtrace = ExceptionUtils.getStackTrace(ex))
  }
}

object LeapFrameScoringController {
  val logger = LoggerFactory.getLogger(classOf[LeapFrameScoringController])
} 
Example 35
Source File: PipeToSupport.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka.pattern

import language.implicitConversions
import scala.concurrent.{ Future, ExecutionContext }
import scala.util.{ Failure, Success }
import akka.actor.{ Status, ActorRef, Actor }
import akka.actor.ActorSelection
import java.util.concurrent.CompletionStage
import java.util.function.BiConsumer

trait PipeToSupport {

  final class PipeableFuture[T](val future: Future[T])(implicit executionContext: ExecutionContext) {
    def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): Future[T] = {
      future andThen {
        case Success(r) ⇒ recipient ! r
        case Failure(f) ⇒ recipient ! Status.Failure(f)
      }
    }
    def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): Future[T] = {
      future andThen {
        case Success(r) ⇒ recipient ! r
        case Failure(f) ⇒ recipient ! Status.Failure(f)
      }
    }
    def to(recipient: ActorRef): PipeableFuture[T] = to(recipient, Actor.noSender)
    def to(recipient: ActorRef, sender: ActorRef): PipeableFuture[T] = {
      pipeTo(recipient)(sender)
      this
    }
    def to(recipient: ActorSelection): PipeableFuture[T] = to(recipient, Actor.noSender)
    def to(recipient: ActorSelection, sender: ActorRef): PipeableFuture[T] = {
      pipeToSelection(recipient)(sender)
      this
    }
  }

  final class PipeableCompletionStage[T](val future: CompletionStage[T])(implicit executionContext: ExecutionContext) {
    def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = {
      future whenComplete new BiConsumer[T, Throwable] {
        override def accept(t: T, ex: Throwable) {
          if (t != null) recipient ! t
          if (ex != null) recipient ! Status.Failure(ex)
        }
      }
    }
    def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = {
      future whenComplete new BiConsumer[T, Throwable] {
        override def accept(t: T, ex: Throwable) {
          if (t != null) recipient ! t
          if (ex != null) recipient ! Status.Failure(ex)
        }
      }
    }
    def to(recipient: ActorRef): PipeableCompletionStage[T] = to(recipient, Actor.noSender)
    def to(recipient: ActorRef, sender: ActorRef): PipeableCompletionStage[T] = {
      pipeTo(recipient)(sender)
      this
    }
    def to(recipient: ActorSelection): PipeableCompletionStage[T] = to(recipient, Actor.noSender)
    def to(recipient: ActorSelection, sender: ActorRef): PipeableCompletionStage[T] = {
      pipeToSelection(recipient)(sender)
      this
    }
  }

  
  implicit def pipeCompletionStage[T](future: CompletionStage[T])(implicit executionContext: ExecutionContext): PipeableCompletionStage[T] = new PipeableCompletionStage(future)
} 
Example 36
Source File: FutureTimeoutSupport.scala    From perf_tester   with Apache License 2.0 5 votes vote down vote up
package akka.pattern

import scala.concurrent.{ ExecutionContext, Promise, Future }
import akka.actor._
import scala.util.control.NonFatal
import scala.concurrent.duration.FiniteDuration
import java.util.concurrent.CompletionStage
import java.util.concurrent.CompletableFuture
import akka.dispatch.Futures
import java.util.function.BiConsumer

trait FutureTimeoutSupport {
  
  def afterCompletionStage[T](duration: FiniteDuration, using: Scheduler)(value: ⇒ CompletionStage[T])(implicit ec: ExecutionContext): CompletionStage[T] =
    if (duration.isFinite() && duration.length < 1) {
      try value catch { case NonFatal(t) ⇒ Futures.failedCompletionStage(t) }
    } else {
      val p = new CompletableFuture[T]
      using.scheduleOnce(duration) {
        try {
          val future = value
          future.whenComplete(new BiConsumer[T, Throwable] {
            override def accept(t: T, ex: Throwable): Unit = {
              if (t != null) p.complete(t)
              if (ex != null) p.completeExceptionally(ex)
            }
          })
        } catch {
          case NonFatal(ex) ⇒ p.completeExceptionally(ex)
        }
      }
      p
    }
} 
Example 37
Source File: ServiceRegistryServiceLocator.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.registry

import java.net.URI
import java.util.concurrent.CompletionStage
import java.util.Optional
import java.util.{ List => JList }

import javax.inject.Inject
import javax.inject.Singleton
import com.lightbend.lagom.devmode.internal.registry.ServiceRegistryClient
import com.lightbend.lagom.javadsl.api.Descriptor.Call
import com.lightbend.lagom.javadsl.client.CircuitBreakersPanel
import com.lightbend.lagom.javadsl.client.CircuitBreakingServiceLocator

import scala.collection.JavaConverters._
import scala.compat.java8.FutureConverters._
import scala.compat.java8.OptionConverters._
import scala.concurrent.ExecutionContext

@Singleton
private[lagom] class ServiceRegistryServiceLocator @Inject() (
    circuitBreakers: CircuitBreakersPanel,
    client: ServiceRegistryClient,
    implicit val ec: ExecutionContext
) extends CircuitBreakingServiceLocator(circuitBreakers) {
  override def locateAll(name: String, serviceCall: Call[_, _]): CompletionStage[JList[URI]] =
    // a ServiceLocator doesn't know what a `portName` is so we default to `None` and the
    // implementation will return any registry without a port name. This means that in order
    // for this queries to work any service registered using `http` as portName will also have
    // to be registered without name.
    client.locateAll(name, None).map(_.asJava).toJava

  override def locate(name: String, serviceCall: Call[_, _]): CompletionStage[Optional[URI]] =
    // a ServiceLocator doesn't know what a `portName` is so we default to `None` and the
    // implementation will return any registry without a port name. This means that in order
    // for this queries to work any service registered using `http` as portName will also have
    // to be registered without name.
    client.locateAll(name, None).map(_.headOption.asJava).toJava
} 
Example 38
Source File: NoServiceLocator.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.registry.impl

import java.net.URI
import java.util.Optional
import java.util.concurrent.CompletionStage
import java.util.function.{ Function => JFunction }

import com.lightbend.lagom.javadsl.api.Descriptor.Call
import com.lightbend.lagom.javadsl.api.ServiceLocator


class NoServiceLocator extends ServiceLocator {
  import java.util.concurrent.CompletableFuture

  override def locate(name: String, serviceCall: Call[_, _]): CompletionStage[Optional[URI]] =
    CompletableFuture.completedFuture(Optional.empty())

  override def doWithService[T](
      name: String,
      serviceCall: Call[_, _],
      block: JFunction[URI, CompletionStage[T]]
  ): CompletionStage[Optional[T]] =
    CompletableFuture.completedFuture(Optional.empty())
} 
Example 39
Source File: Retry.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.jpa

import java.util.concurrent.CompletionStage
import java.util.function.Supplier

import akka.actor.Scheduler
import akka.pattern.after

import scala.concurrent.duration.Duration.fromNanos
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.util.control.NonFatal

// With thanks to https://gist.github.com/viktorklang/9414163
private[lagom] class Retry(delay: FiniteDuration, delayFactor: Double, maxRetries: Int) {
  def apply[T](op: => T)(implicit ec: ExecutionContext, s: Scheduler): Future[T] = {
    def iterate(nextDelay: FiniteDuration, remainingRetries: Int): Future[T] =
      Future(op).recoverWith {
        case NonFatal(throwable) if remainingRetries > 0 => {
          onRetry(throwable, nextDelay, remainingRetries)
          after(nextDelay, s)(iterate(finiteMultiply(nextDelay, delayFactor), remainingRetries - 1))
        }
      }

    iterate(delay, maxRetries)
  }

  // For convenient use from Java 8
  def retry[T](op: Supplier[T])(implicit ec: ExecutionContext, s: Scheduler): CompletionStage[T] = {
    import scala.compat.java8.FutureConverters._

    apply(op.get()).toJava
  }

  protected def onRetry(throwable: Throwable, delay: FiniteDuration, remainingRetries: Int): Unit = ()

  private def finiteMultiply(duration: FiniteDuration, factor: Double): FiniteDuration =
    fromNanos((duration.toNanos * factor).toLong)
}

private[lagom] object Retry {
  def apply[T](delay: FiniteDuration, delayFactor: Double, maxRetries: Int)(
      op: => T
  )(implicit ec: ExecutionContext, s: Scheduler): Future[T] =
    (new Retry(delay, delayFactor, maxRetries))(op)
} 
Example 40
Source File: JpaReadSideImplSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.jpa

import java.lang.Long
import java.util.concurrent.CompletionStage

import com.lightbend.lagom.internal.javadsl.persistence.jdbc.JdbcPersistentEntityRegistry
import com.lightbend.lagom.javadsl.persistence.TestEntity.Evt
import com.lightbend.lagom.javadsl.persistence._
import com.lightbend.lagom.javadsl.persistence.jpa.JpaReadSide
import com.lightbend.lagom.javadsl.persistence.jpa.TestEntityJpaReadSide
import play.api.inject.guice.GuiceInjectorBuilder

import scala.concurrent.duration._

class JpaReadSideImplSpec extends JpaPersistenceSpec with AbstractReadSideSpec {
  private lazy val injector                            = new GuiceInjectorBuilder().build()
  protected override lazy val persistentEntityRegistry = new JdbcPersistentEntityRegistry(system, injector, slick)

  private lazy val jpaReadSide: JpaReadSide = new JpaReadSideImpl(jpa, offsetStore)

  def processorFactory(): ReadSideProcessor[Evt] =
    new TestEntityJpaReadSide.TestEntityJpaReadSideProcessor(jpaReadSide)

  private lazy val readSide = new TestEntityJpaReadSide(jpa)

  def getAppendCount(id: String): CompletionStage[Long] = readSide.getAppendCount(id)

  override def afterAll(): Unit = {
    super.afterAll()
  }
} 
Example 41
Source File: package.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence

import java.util.concurrent.CompletionStage
import java.util.function.BiConsumer

import akka.actor.ActorRef

package object testkit {
  implicit class pipe[T](val stage: CompletionStage[T]) extends AnyVal {
    def pipeTo(recipient: ActorRef): Unit = {
      stage.whenComplete(new BiConsumer[T, Throwable] {
        override def accept(value: T, e: Throwable): Unit = {
          if (value != null) recipient ! value
          if (e != null) recipient ! e
        }
      })
    }
  }
} 
Example 42
Source File: ConfigurationServiceLocatorSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.client

import java.net.URI
import java.util.concurrent.CompletionStage
import java.util.concurrent.TimeUnit
import java.util.function.Supplier
import com.typesafe.config.ConfigFactory

import scala.compat.java8.OptionConverters._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ConfigurationServiceLocatorSpec extends AnyWordSpec with Matchers {
  val serviceLocator = new ConfigurationServiceLocator(
    ConfigFactory.parseString(
      """
        |lagom.services {
        |  foo = "http://localhost:10001"
        |  bar = "http://localhost:10002"
        |}
    """.stripMargin
    ),
    new CircuitBreakersPanel {
      override def withCircuitBreaker[T](id: String, body: Supplier[CompletionStage[T]]): CompletionStage[T] =
        body.get()
    }
  )

  def locate(serviceName: String) =
    serviceLocator.locate(serviceName).toCompletableFuture.get(10, TimeUnit.SECONDS).asScala

  "ConfigurationServiceLocator" should {
    "return a found service" in {
      locate("foo") should contain(URI.create("http://localhost:10001"))
      locate("bar") should contain(URI.create("http://localhost:10002"))
    }
    "return none for not found service" in {
      locate("none") shouldBe None
    }
  }
} 
Example 43
Source File: CassandraReadSideImpl.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.cassandra

import java.util
import java.util.concurrent.CompletableFuture
import java.util.concurrent.CompletionStage
import java.util.function.BiFunction
import java.util.function.Function
import java.util.function.Supplier

import javax.inject.Inject
import javax.inject.Singleton
import akka.Done
import akka.actor.ActorSystem
import com.datastax.driver.core.BoundStatement
import com.lightbend.lagom.internal.javadsl.persistence.ReadSideImpl
import com.lightbend.lagom.internal.persistence.cassandra.CassandraOffsetStore
import com.lightbend.lagom.javadsl.persistence.ReadSideProcessor.ReadSideHandler
import com.lightbend.lagom.javadsl.persistence._
import com.lightbend.lagom.javadsl.persistence.cassandra.CassandraReadSide.ReadSideHandlerBuilder
import com.lightbend.lagom.javadsl.persistence.cassandra.CassandraReadSide
import com.lightbend.lagom.javadsl.persistence.cassandra.CassandraSession
import play.api.inject.Injector


@Singleton
private[lagom] final class CassandraReadSideImpl @Inject() (
    system: ActorSystem,
    session: CassandraSession,
    offsetStore: CassandraOffsetStore,
    readSide: ReadSideImpl,
    injector: Injector
) extends CassandraReadSide {
  private val dispatcher = system.settings.config.getString("lagom.persistence.read-side.use-dispatcher")
  implicit val ec        = system.dispatchers.lookup(dispatcher)

  override def builder[Event <: AggregateEvent[Event]](eventProcessorId: String): ReadSideHandlerBuilder[Event] = {
    new ReadSideHandlerBuilder[Event] {
      import CassandraAutoReadSideHandler.Handler
      private var prepareCallback: AggregateEventTag[Event] => CompletionStage[Done] =
        tag => CompletableFuture.completedFuture(Done.getInstance())
      private var globalPrepareCallback: () => CompletionStage[Done] =
        () => CompletableFuture.completedFuture(Done.getInstance())
      private var handlers = Map.empty[Class[_ <: Event], Handler[Event]]

      override def setGlobalPrepare(callback: Supplier[CompletionStage[Done]]): ReadSideHandlerBuilder[Event] = {
        globalPrepareCallback = () => callback.get
        this
      }

      override def setPrepare(
          callback: Function[AggregateEventTag[Event], CompletionStage[Done]]
      ): ReadSideHandlerBuilder[Event] = {
        prepareCallback = callback.apply
        this
      }

      override def setEventHandler[E <: Event](
          eventClass: Class[E],
          handler: Function[E, CompletionStage[util.List[BoundStatement]]]
      ): ReadSideHandlerBuilder[Event] = {
        handlers += (eventClass -> ((event: E, offset: Offset) => handler(event)))
        this
      }

      override def setEventHandler[E <: Event](
          eventClass: Class[E],
          handler: BiFunction[E, Offset, CompletionStage[util.List[BoundStatement]]]
      ): ReadSideHandlerBuilder[Event] = {
        handlers += (eventClass -> handler.apply _)
        this
      }

      override def build(): ReadSideHandler[Event] = {
        new CassandraAutoReadSideHandler[Event](
          session,
          offsetStore,
          handlers,
          globalPrepareCallback,
          prepareCallback,
          eventProcessorId,
          dispatcher
        )
      }
    }
  }
} 
Example 44
Source File: CassandraClusteredPersistentEntitySpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.cassandra

import java.io.File
import java.util.concurrent.CompletionStage

import akka.persistence.cassandra.testkit.CassandraLauncher
import com.lightbend.lagom.internal.persistence.testkit.AwaitPersistenceInit.awaitPersistenceInit
import com.lightbend.lagom.internal.persistence.testkit.PersistenceTestConfig.cassandraConfigOnly
import com.lightbend.lagom.javadsl.persistence.TestEntity.Evt
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntitySpec
import com.lightbend.lagom.javadsl.persistence.ReadSideProcessor
import com.lightbend.lagom.javadsl.persistence.TestEntityReadSide
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig.Ports
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig.Ports.SpecPorts
import com.typesafe.config.Config

object CassandraClusteredPersistentEntityConfig extends AbstractClusteredPersistentEntityConfig {

  override def specPorts: SpecPorts = Ports.cassandraSpecPorts

  override def additionalCommonConfig: Config = {
    cassandraConfigOnly("ClusteredPersistentEntitySpec", specPorts.database)
      .withFallback(CassandraReadSideSpec.readSideConfig)
  }
}

class CassandraClusteredPersistentEntitySpecMultiJvmNode1 extends CassandraClusteredPersistentEntitySpec
class CassandraClusteredPersistentEntitySpecMultiJvmNode2 extends CassandraClusteredPersistentEntitySpec
class CassandraClusteredPersistentEntitySpecMultiJvmNode3 extends CassandraClusteredPersistentEntitySpec

class CassandraClusteredPersistentEntitySpec
    extends AbstractClusteredPersistentEntitySpec(CassandraClusteredPersistentEntityConfig) {
  import CassandraClusteredPersistentEntityConfig._

  protected override def atStartup(): Unit = {
    // On only one node (node1), start Cassandra & make sure the persistence layers have initialised
    // Node1 is also the only node where cassandra-journal.keyspace-autocreate isn't disabled
    runOn(node1) {
      val cassandraDirectory = new File("target/" + system.name)
      CassandraLauncher.start(
        cassandraDirectory,
        "lagom-test-embedded-cassandra.yaml",
        clean = true,
        port = specPorts.database
      )
      awaitPersistenceInit(system)
    }
    enterBarrier("cassandra-initialised")

    // Now make sure that sure the other node's persistence layers are warmed up
    runOn(node2, node3) {
      awaitPersistenceInit(system)
    }
    enterBarrier("cassandra-accessible")

    super.atStartup()
  }

  protected override def afterTermination(): Unit = {
    super.afterTermination()
    CassandraLauncher.stop()
  }

  def testEntityReadSide = injector.instanceOf[TestEntityReadSide]

  protected override def getAppendCount(id: String): CompletionStage[java.lang.Long] =
    testEntityReadSide.getAppendCount(id)

  protected override def readSideProcessor: Class[_ <: ReadSideProcessor[Evt]] =
    classOf[TestEntityReadSide.TestEntityReadSideProcessor]
} 
Example 45
Source File: JdbcSessionImpl.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.persistence.jdbc

import java.util.concurrent.CompletionStage
import javax.inject.Inject
import javax.inject.Singleton

import com.lightbend.lagom.javadsl.persistence.jdbc.JdbcSession
import com.lightbend.lagom.javadsl.persistence.jdbc.JdbcSession.ConnectionFunction

import scala.compat.java8.FutureConverters._


@Singleton
final class JdbcSessionImpl @Inject() (slick: SlickProvider) extends JdbcSession {
  import slick.profile.api._

  override def withConnection[T](block: ConnectionFunction[T]): CompletionStage[T] = {
    slick.db.run {
      SimpleDBIO { ctx =>
        block(ctx.connection)
      }
    }.toJava
  }

  override def withTransaction[T](block: ConnectionFunction[T]): CompletionStage[T] = {
    slick.db.run {
      SimpleDBIO { ctx =>
        block(ctx.connection)
      }.transactionally
    }.toJava
  }
} 
Example 46
Source File: JdbcClusteredPersistentEntitySpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.jdbc

import java.util.concurrent.CompletionStage

import com.lightbend.lagom.javadsl.persistence.ReadSideProcessor
import com.lightbend.lagom.javadsl.persistence.TestEntity.Evt
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig.Ports
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntityConfig.Ports.SpecPorts
import com.lightbend.lagom.javadsl.persistence.multinode.AbstractClusteredPersistentEntitySpec
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import org.h2.tools.Server

object JdbcClusteredPersistentEntityConfig extends AbstractClusteredPersistentEntityConfig {

  override def specPorts: SpecPorts = Ports.jdbcSpecPorts

  override def additionalCommonConfig: Config = ConfigFactory.parseString(
    s"""
      db.default.driver=org.h2.Driver
      db.default.url="jdbc:h2:tcp://localhost:${specPorts.database}/mem:JdbcClusteredPersistentEntitySpec"
    """
  )
}

class JdbcClusteredPersistentEntitySpecMultiJvmNode1 extends JdbcClusteredPersistentEntitySpec
class JdbcClusteredPersistentEntitySpecMultiJvmNode2 extends JdbcClusteredPersistentEntitySpec
class JdbcClusteredPersistentEntitySpecMultiJvmNode3 extends JdbcClusteredPersistentEntitySpec

class JdbcClusteredPersistentEntitySpec
    extends AbstractClusteredPersistentEntitySpec(JdbcClusteredPersistentEntityConfig) {
  import JdbcClusteredPersistentEntityConfig._

  var h2: Server = _

  protected override def atStartup() {
    runOn(node1) {
      h2 = Server.createTcpServer("-tcpPort", specPorts.database.toString, "-ifNotExists").start()
    }

    enterBarrier("h2-started")

    super.atStartup()
  }

  protected override def afterTermination() {
    super.afterTermination()
    Option(h2).foreach(_.stop())
  }

  def testEntityReadSide = injector.instanceOf[JdbcTestEntityReadSide]

  protected override def getAppendCount(id: String): CompletionStage[java.lang.Long] =
    testEntityReadSide.getAppendCount(id)

  protected override def readSideProcessor: Class[_ <: ReadSideProcessor[Evt]] =
    classOf[JdbcTestEntityReadSide.TestEntityReadSideProcessor]
} 
Example 47
Source File: JdbcReadSideSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.javadsl.persistence.jdbc

import java.lang.Long
import java.util.concurrent.CompletionStage

import com.lightbend.lagom.internal.javadsl.persistence.jdbc.JdbcPersistentEntityRegistry
import com.lightbend.lagom.internal.javadsl.persistence.jdbc.JdbcSessionImpl
import com.lightbend.lagom.javadsl.persistence.TestEntity.Evt
import com.lightbend.lagom.javadsl.persistence._
import play.api.inject.guice.GuiceInjectorBuilder

import scala.concurrent.duration._

class JdbcReadSideSpec extends JdbcPersistenceSpec with AbstractReadSideSpec {
  private lazy val injector                            = new GuiceInjectorBuilder().build()
  protected override lazy val persistentEntityRegistry = new JdbcPersistentEntityRegistry(system, injector, slick)

  override def processorFactory(): ReadSideProcessor[Evt] =
    new JdbcTestEntityReadSide.TestEntityReadSideProcessor(jdbcReadSide)

  protected lazy val session: JdbcSession = new JdbcSessionImpl(slick)
  private lazy val readSide               = new JdbcTestEntityReadSide(session)

  override def getAppendCount(id: String): CompletionStage[Long] = readSide.getAppendCount(id)

  override def afterAll(): Unit = {
    super.afterAll()
  }
} 
Example 48
Source File: TopicStub.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.javadsl.testkit

import java.util.concurrent.CompletionStage

import akka.Done
import akka.actor.ActorRef
import akka.stream.Materializer
import akka.stream.javadsl.Flow
import akka.stream.javadsl.Source
import akka.stream.scaladsl.{ Flow => ScalaFlow }
import com.lightbend.lagom.internal.testkit.InternalSubscriberStub
import com.lightbend.lagom.javadsl.api.broker.Message
import com.lightbend.lagom.javadsl.api.broker.Subscriber
import com.lightbend.lagom.javadsl.api.broker.Topic

import scala.compat.java8.FutureConverters.toJava

private[lagom] class TopicStub[T](val topicId: Topic.TopicId, topicBuffer: ActorRef)(
    implicit materializer: Materializer
) extends Topic[T] {
  // TODO: use ServiceInfo's name as a default value.
  def subscribe = new SubscriberStub("default", topicBuffer, _.getPayload)

  class SubscriberStub[SubscriberPayload](
      groupId: String,
      topicBuffer: ActorRef,
      transform: Message[T] => SubscriberPayload
  )(implicit materializer: Materializer)
      extends InternalSubscriberStub[T, Message](groupId, topicBuffer)(materializer)
      with Subscriber[SubscriberPayload] {
    override def withGroupId(groupId: String): Subscriber[SubscriberPayload] =
      new SubscriberStub(groupId, topicBuffer, transform)

    override def withMetadata(): Subscriber[Message[SubscriberPayload]] =
      new SubscriberStub[Message[SubscriberPayload]](groupId, topicBuffer, msg => msg.withPayload(transform(msg)))

    override def atMostOnceSource(): Source[SubscriberPayload, _] =
      super.mostOnceSource.map(transform).asJava

    override def atLeastOnce(flow: Flow[SubscriberPayload, Done, _]): CompletionStage[Done] =
      toJava(super.leastOnce(ScalaFlow[Message[T]].map(transform).via(flow.asScala)))
  }
} 
Example 49
Source File: TestServiceLocator.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.testkit

import java.net.URI
import java.util.Optional
import java.util.concurrent.CompletionStage

import scala.compat.java8.FutureConverters._
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import com.lightbend.lagom.javadsl.api.Descriptor
import javax.inject.Inject
import javax.inject.Singleton

import com.lightbend.lagom.javadsl.client.CircuitBreakersPanel
import com.lightbend.lagom.javadsl.client.CircuitBreakingServiceLocator

@Singleton
private[lagom] class TestServiceLocator @Inject() (
    circuitBreakers: CircuitBreakersPanel,
    port: TestServiceLocatorPort,
    implicit val ec: ExecutionContext
) extends CircuitBreakingServiceLocator(circuitBreakers) {
  private val futureUri = port.port.map(p => URI.create("http://localhost:" + p))

  override def locate(name: String, call: Descriptor.Call[_, _]): CompletionStage[Optional[URI]] =
    futureUri.map(uri => Optional.of(uri)).toJava
}

private[lagom] final case class TestServiceLocatorPort(port: Future[Int]) 
Example 50
Source File: FakeTopicAdmin.scala    From ohara   with Apache License 2.0 4 votes vote down vote up
package oharastream.ohara.configurator.fake

import java.util.concurrent.{CompletableFuture, CompletionStage, ConcurrentHashMap}
import java.{lang, util}

import oharastream.ohara.common.setting.TopicKey
import oharastream.ohara.kafka.{TopicAdmin, TopicCreator, TopicDescription, TopicOption}

private[configurator] class FakeTopicAdmin extends TopicAdmin {
  import scala.jdk.CollectionConverters._

  override val connectionProps: String = "Unknown"

  private[this] val cachedTopics = new ConcurrentHashMap[TopicKey, TopicDescription]()

  override def createPartitions(topicKey: TopicKey, numberOfPartitions: Int): CompletionStage[Void] = {
    val previous = cachedTopics.get(topicKey)
    val f        = new CompletableFuture[Void]()
    if (previous == null)
      f.completeExceptionally(
        new NoSuchElementException(
          s"the topic:$topicKey doesn't exist. actual:${cachedTopics.keys().asScala.mkString(",")}"
        )
      )
    else {
      cachedTopics.put(
        topicKey,
        new TopicDescription(
          previous.topicKey,
          previous.partitionInfos(),
          previous.options
        )
      )
    }
    f
  }

  override def topicKeys: CompletionStage[util.Set[TopicKey]] = CompletableFuture.completedFuture(cachedTopics.keySet())

  override def topicDescription(key: TopicKey): CompletionStage[TopicDescription] = {
    val topic = cachedTopics.get(key)
    val f     = new CompletableFuture[TopicDescription]()
    if (topic == null) f.completeExceptionally(new NoSuchElementException(s"$key does not exist"))
    else f.complete(topic)
    f
  }

  override def topicCreator(): TopicCreator =
    (_: Int, _: Short, options: util.Map[String, String], topicKey: TopicKey) => {
      val f = new CompletableFuture[Void]()
      if (cachedTopics.contains(topicKey))
        f.completeExceptionally(new IllegalArgumentException(s"$topicKey already exists!"))
      else {
        val topicInfo = new TopicDescription(
          topicKey,
          java.util.List.of(),
          options.asScala
            .map {
              case (key, value) =>
                new TopicOption(
                  key,
                  value,
                  false,
                  false,
                  false
                )
            }
            .toSeq
            .asJava
        )
        if (cachedTopics.putIfAbsent(topicKey, topicInfo) != null)
          throw new RuntimeException(s"the $topicKey already exists in kafka")
        f.complete(null)
      }
      f
    }

  private[this] var _closed = false

  override def close(): Unit = _closed = true

  override def closed(): Boolean = _closed

  override def brokerPorts(): CompletionStage[util.Map[String, Integer]] =
    CompletableFuture.completedFuture(java.util.Map.of())

  override def exist(topicKey: TopicKey): CompletionStage[lang.Boolean] =
    CompletableFuture.completedFuture(cachedTopics.containsKey(topicKey))

  override def deleteTopic(topicKey: TopicKey): CompletionStage[lang.Boolean] = {
    val f       = new CompletableFuture[lang.Boolean]()
    val removed = cachedTopics.remove(topicKey)
    if (removed == null) f.complete(false)
    else f.complete(true)
    f
  }
}