scala.concurrent.ExecutionContext.Implicits.global Scala Examples

The following examples show how to use scala.concurrent.ExecutionContext.Implicits.global. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: TimeLimitedFutureSpec.scala    From gfc-concurrent   with Apache License 2.0 9 votes vote down vote up
package com.gilt.gfc.concurrent

import java.util.concurrent.TimeoutException
import scala.concurrent.{ Future, Await }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.scalatest.{WordSpec, Matchers}

class TimeLimitedFutureSpec extends WordSpec with Matchers {
  import TimeLimitedFutureSpec._

  "RichFuture" when {
    import ScalaFutures._

    "waiting for a result to happen" should {
      "return the completed original Future if it completes before the given timeout" in {
        val now = System.currentTimeMillis
        val future: Future[String] = (Future { Thread.sleep(1000); "Here I am" }).withTimeout(Duration(5, "seconds"))
        val msg: String = Await.result(future, Duration(10, "seconds"))
        val elapsed = (System.currentTimeMillis - now)
        msg should equal ("Here I am")
        elapsed should be (2000L +- 1000L)
      }

      "return the failure of the original Future if it fails before the given timeout" in {
        val now = System.currentTimeMillis
        val future = (Future { Thread.sleep(1000); throw new NullPointerException("That hurts!") }).withTimeout(Duration(5, "seconds"))
        a [NullPointerException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed = (System.currentTimeMillis - now)
        elapsed should be (2000L +- 1000L)
      }

      "return the timeout of the original Future if it had one and it went off and was shorter than the given one" in {
        val now = System.currentTimeMillis
        val timingOutEarlier = Timeouts.timeout(Duration(1, "seconds"))
        val future = timingOutEarlier.withTimeout(Duration(5, "seconds"))
        a [TimeoutException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 500l
        elapsed should be <= 4000l
      }

      "return the timeout if the original Future does not timeout of its own" in {
        val now = System.currentTimeMillis
        val timingOutLater = Timeouts.timeout(Duration(3, "seconds"))
        val future = timingOutLater.withTimeout(Duration(1, "seconds"))
        a [TimeoutException] should be thrownBy  { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 1000l
        elapsed should be <= 2500l
      }
    }

    // an example of how it could be used
    "used in our most common use case" should {
      "fit nicely" in {
        val call: Future[String] = svcCall(1000).withTimeout(Duration(5000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call, Duration(10, "seconds")) should be ("data-1000")

        val call2: Future[String] = svcCall(5000).withTimeout(Duration(1000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call2, Duration(10, "seconds")) should be ("recover.timeout")
      }
    }
  }
}

object TimeLimitedFutureSpec {
  def svcCall(latency: Long): Future[String] = Future { Thread.sleep(latency); s"data-${latency}" }
} 
Example 2
Source File: Launcher.scala    From sparkplug   with MIT License 7 votes vote down vote up
package springnz.sparkplug.client

import java.net.{ URLEncoder, InetAddress }

import better.files._
import com.typesafe.config.{ ConfigRenderOptions, Config }
import org.apache.spark.launcher.SparkLauncher
import springnz.sparkplug.util.{ BuilderOps, ConfigUtils, Logging, Pimpers }

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{ Properties, Try }

object Launcher extends Logging {
  import BuilderOps._
  import Pimpers._

  def startProcess(launcher: SparkLauncher): Future[Unit] = {
    val processFuture = Future {
      launcher.launch()
    }.withErrorLog("Failed to launch: ")
    processFuture.flatMap {
      process ⇒ executeProcess(process)
    }
  }

  private def executeProcess(process: Process): Future[Unit] = Future {
    val outStream = scala.io.Source.fromInputStream(process.getInputStream)
    for (line ← outStream.getLines()) {
      log.info(line)
    }
    val errorStream = scala.io.Source.fromInputStream(process.getErrorStream)
    for (line ← errorStream.getLines()) {
      log.info(line)
    }
    process.waitFor()
  }

  def launch(clientAkkaAddress: String,
    jarPath: File,
    mainJarPattern: String,
    mainClass: String,
    sparkConfig: Config,
    akkaRemoteConfig: Option[Config],
    sendJars: Boolean = true): Try[Future[Unit]] = Try {

    val fullExtraJarFolder = jarPath.pathAsString

    val sparkHome = Properties.envOrNone("SPARK_HOME")
    val sparkMaster = Properties.envOrElse("SPARK_MASTER", s"spark://${InetAddress.getLocalHost.getHostAddress}:7077")
    log.debug(s"Spark master set to: $sparkMaster")

    // TODO: enable this functionality (need Spark 1.5 for this)
    //    val sparkArgs: Array[String] = config.getString("spark.submit.sparkargs").split(' ')

    if (!sparkMaster.startsWith("local[") && !sparkHome.isDefined)
      throw new RuntimeException("If 'SPARK_MASTER' is not set to local, 'SPARK_HOME' must be set.")

    val appName = mainClass.split('.').last

    val mainJar = jarPath.glob(mainJarPattern).collectFirst { case f ⇒ f.pathAsString }

    val configVars: Seq[(String, String)] = ConfigUtils.configFields(sparkConfig).toSeq

    val akkaRemoteConfigString = akkaRemoteConfig.map { config ⇒
      val configString = config.root().render(ConfigRenderOptions.concise())
      URLEncoder.encode(configString, "UTF-8")
    }

    val launcher = (new SparkLauncher)
      .setIfSome[String](mainJar) { (l, mj) ⇒ l.setAppResource(mj) }
      .setMainClass(mainClass)
      .setAppName(appName)
      .setMaster(sparkMaster)
      .setIfSome[String](sparkHome) { (l, sh) ⇒ l.setSparkHome(sh) }
      .addAppArgs("appName", appName)
      .addAppArgs("clientAkkaAddress", clientAkkaAddress)
      .setIfSome(akkaRemoteConfigString) { (l, config) ⇒ l.addAppArgs("remoteAkkaConfig", config) }
      .setFoldLeft(configVars) { case (launcher, (key, value)) ⇒ launcher.setConf(key, value) }
      .setDeployMode(sparkConfig.getString("spark.deploymode"))

    val extraJarFiles = jarPath.glob("*.jar")
      .map { case f ⇒ f.pathAsString }
      .filterNot(_.contains("/akka-"))

    val launcherWithJars =
      if (sendJars)
        extraJarFiles.foldLeft(launcher) { case (l, jarFile) ⇒ l.addJar(jarFile) }
      else if (extraJarFiles.length == 0) launcher
      else launcher
        .setConf(SparkLauncher.DRIVER_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*")
        .setConf(SparkLauncher.EXECUTOR_EXTRA_CLASSPATH, s"$fullExtraJarFolder/*")

    startProcess(launcherWithJars)
  }

} 
Example 3
Source File: KVStore.scala    From Freasy-Monad   with MIT License 6 votes vote down vote up
package examples.cats

import cats.Id
import cats.free.Free
import freasymonad.cats.free

import scala.collection.mutable
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

@free trait KVStore {
  type KVStoreF[A] = Free[GrammarADT, A]
  sealed trait GrammarADT[A]

  def put[T](key: String, value: T): KVStoreF[Unit]
  def get[T](key: String): KVStoreF[Option[T]]
  def delete(key: String): KVStoreF[Unit]

  def update[T](key: String, f: T => T): KVStoreF[Unit] =
    for {
      vMaybe <- get[T](key)
      _      <- vMaybe.map(v => put[T](key, f(v))).getOrElse(Free.pure(()))
    } yield ()
}

object Main extends App {
  import KVStore.ops._

  def program: KVStoreF[Option[Int]] =
    for {
      _ <- put("wild-cats", 2)
      _ <- update[Int]("wild-cats", _ + 12)
      _ <- put("tame-cats", 5)
      n <- get[Int]("wild-cats")
      _ <- delete("tame-cats")
    } yield n

  val idInterpreter = new KVStore.Interp[Id] {
    val kvs = mutable.Map.empty[String, Any]
    def get[T](key: String): Id[Option[T]] = {
      println(s"get($key)")
      kvs.get(key).map(_.asInstanceOf[T])
    }
    def put[T](key: String, value: T): Id[Unit] = {
      println(s"put($key, $value)")
      kvs(key) = value
    }
    def delete(key: String): Id[Unit] = {
      println(s"delete($key)")
      kvs.remove(key)
    }
  }
  val resId: Id[Option[Int]] = idInterpreter.run(program)

  import cats.implicits.catsStdInstancesForFuture
  import scala.concurrent.ExecutionContext.Implicits.global

  val futureInterpreter = new KVStore.Interp[Future] {
    val kvs = mutable.Map.empty[String, Any]
    def get[T](key: String): Future[Option[T]] = Future {
      println(s"get($key)")
      kvs.get(key).map(_.asInstanceOf[T])
    }
    def put[T](key: String, value: T): Future[Unit] = Future {
      println(s"put($key, $value)")
      kvs(key) = value
    }
    def delete(key: String): Future[Unit] = Future {
      println(s"delete($key)")
      kvs.remove(key)
    }
  }
  val resFuture: Future[Option[Int]] = futureInterpreter.run(program)
  Await.ready(resFuture, Duration.Inf)
} 
Example 4
Source File: ShellClient.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client.socket

import akka.actor.Actor
import akka.util.Timeout
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.communication.security.SecurityActorType
import org.apache.toree.kernel.protocol.v5.client.{ActorLoader, Utilities}
import org.apache.toree.kernel.protocol.v5.{KernelMessage, UUID}
import Utilities._
import org.apache.toree.kernel.protocol.v5.client.execution.{DeferredExecution, DeferredExecutionManager}
import org.apache.toree.kernel.protocol.v5.content.ExecuteReply

import org.apache.toree.utils.LogLike
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.pattern.ask


class ShellClient(
  socketFactory: SocketFactory,
  actorLoader: ActorLoader,
  signatureEnabled: Boolean
) extends Actor with LogLike {
  logger.debug("Created shell client actor")
  implicit val timeout = Timeout(21474835.seconds)

  val socket = socketFactory.ShellClient(context.system, self)

  def receiveExecuteReply(parentId:String, kernelMessage: KernelMessage): Unit = {
    val deOption: Option[DeferredExecution] = DeferredExecutionManager.get(parentId)
    deOption match {
      case None =>
        logger.warn(s"No deferred execution for parent id ${parentId}")
      case Some(de) =>
        Utilities.parseAndHandle(kernelMessage.contentString,
          ExecuteReply.executeReplyReads, (er: ExecuteReply) => de.resolveReply(er))
    }
  }

  override def receive: Receive = {
    // from shell
    case message: ZMQMessage =>
      logger.debug("Received shell kernel message.")
      val kernelMessage: KernelMessage = message

      // TODO: Validate incoming message signature

      logger.trace(s"Kernel message is ${kernelMessage}")
      receiveExecuteReply(message.parentHeader.msg_id,kernelMessage)

    // from handler
    case message: KernelMessage =>
      logger.trace(s"Sending kernel message ${message}")
      val signatureManager =
        actorLoader.load(SecurityActorType.SignatureManager)

      import scala.concurrent.ExecutionContext.Implicits.global
      val messageWithSignature = if (signatureEnabled) {
        val signatureMessage = signatureManager ? message
        Await.result(signatureMessage, 100.milliseconds)
          .asInstanceOf[KernelMessage]
      } else message

      val zMQMessage: ZMQMessage = messageWithSignature

      socket ! zMQMessage
  }
} 
Example 5
Source File: StdinClient.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client.socket

import akka.actor.Actor
import org.apache.toree.communication.ZMQMessage
import org.apache.toree.communication.security.SecurityActorType
import org.apache.toree.kernel.protocol.v5.client.ActorLoader
import org.apache.toree.kernel.protocol.v5.{HeaderBuilder, KMBuilder, KernelMessage}
import org.apache.toree.kernel.protocol.v5.content.{InputReply, InputRequest}
import org.apache.toree.utils.LogLike
import org.apache.toree.kernel.protocol.v5.client.Utilities._
import play.api.libs.json.Json

import StdinClient._
import akka.pattern.ask

import scala.concurrent.duration._
import scala.concurrent.Await

object StdinClient {
  case class ResponseFunctionMessage(responseFunction: ResponseFunction)
  type ResponseFunction = (String, Boolean) => String
  val EmptyResponseFunction: ResponseFunction = (_, _) => ""
}


  private var responseFunc: ResponseFunction = EmptyResponseFunction

  override def receive: Receive = {
    case responseFunctionMessage: ResponseFunctionMessage =>
      logger.debug("Updating response function")
      this.responseFunc = responseFunctionMessage.responseFunction

    case message: ZMQMessage =>
      logger.debug("Received stdin kernel message")
      val kernelMessage: KernelMessage = message
      val messageType = kernelMessage.header.msg_type

      if (messageType == InputRequest.toTypeString) {
        logger.debug("Message is an input request")

        val inputRequest =
          Json.parse(kernelMessage.contentString).as[InputRequest]
        val value = responseFunc(inputRequest.prompt, inputRequest.password)
        val inputReply = InputReply(value)

        val newKernelMessage = KMBuilder()
          .withParent(kernelMessage)
          .withHeader(HeaderBuilder.empty.copy(
            msg_type = InputReply.toTypeString,
            session = getSessionId
          ))
          .withContentString(inputReply)
          .build

        import scala.concurrent.ExecutionContext.Implicits.global
        val messageWithSignature = if (signatureEnabled) {
          val signatureManager =
            actorLoader.load(SecurityActorType.SignatureManager)
          val signatureMessage = signatureManager ? newKernelMessage
          Await.result(signatureMessage, 100.milliseconds)
            .asInstanceOf[KernelMessage]
        } else newKernelMessage

        val zmqMessage: ZMQMessage = messageWithSignature

        socket ! zmqMessage
      } else {
        logger.debug(s"Unknown message of type $messageType")
      }
  }
} 
Example 6
Source File: SparkKernelClient.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.client

import akka.actor.ActorSystem
import akka.pattern.ask
import akka.util.Timeout
import org.apache.toree.comm._
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.client.execution.{DeferredExecution, ExecuteRequestTuple}
import org.apache.toree.kernel.protocol.v5.client.socket.HeartbeatMessage
import org.apache.toree.kernel.protocol.v5.client.socket.StdinClient.{ResponseFunctionMessage, ResponseFunction}
import org.apache.toree.kernel.protocol.v5.content.ExecuteRequest
import org.apache.toree.utils.LogLike
import scala.concurrent.duration._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}


  val comm = new ClientCommManager(
    actorLoader = actorLoader,
    kmBuilder = KMBuilder(),
    commRegistrar = commRegistrar
  )

  // TODO: hide this? just heartbeat to see if kernel is reachable?
  def heartbeat(failure: () => Unit): Unit = {
    val future = actorLoader.load(SocketType.Heartbeat) ? HeartbeatMessage

    future.onComplete {
      case Success(_) =>
        logger.info("Client received heartbeat.")
      case Failure(_) =>
        failure()
        logger.info("There was an error receiving heartbeat from kernel.")
    }
  }

  def shutdown() = {
    logger.info("Shutting down client")
    actorSystem.terminate()
  }
} 
Example 7
Source File: CodeCompleteHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import akka.pattern.ask
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content._
import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities}
import Utilities._
import org.apache.toree.utils.{MessageLogSupport, LogLike}
import play.api.data.validation.ValidationError
import play.api.libs.json.{JsPath, Json}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Success

class CodeCompleteHandler(actorLoader: ActorLoader)
  extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = {
    logKernelMessageAction("Generating code completion for", kernelMessage)
    Utilities.parseAndHandle(
      kernelMessage.contentString,
      CompleteRequest.completeRequestReads,
      completeRequest(kernelMessage, _ : CompleteRequest)
    )
  }

  private def completeRequest(km: KernelMessage, cr: CompleteRequest):
                              Future[(Int, List[String])] = {
    val interpreterActor = actorLoader.load(SystemActorType.Interpreter)
    val codeCompleteFuture = ask(interpreterActor, cr).mapTo[(Int, List[String])]
    codeCompleteFuture.onComplete {
      case Success(tuple) =>
        val reply = CompleteReplyOk(tuple._2, tuple._1,
                                    cr.cursor_pos, Metadata())
        val completeReplyType = MessageType.Outgoing.CompleteReply.toString
        logKernelMessageAction("Sending code complete reply for", km)
        actorLoader.load(SystemActorType.KernelMessageRelay) !
          km.copy(
            header = HeaderBuilder.create(completeReplyType),
            parentHeader = km.header,
            contentString = Json.toJson(reply).toString
          )
      case _ =>
        new Exception("Parse error in CodeCompleteHandler")
    }
    codeCompleteFuture
  }
} 
Example 8
Source File: IsCompleteHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import akka.pattern.ask
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content._
import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities}
import Utilities._
import org.apache.toree.utils.{MessageLogSupport, LogLike}
import play.api.data.validation.ValidationError
import play.api.libs.json.{JsPath, Json}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Success

class IsCompleteHandler(actorLoader: ActorLoader)
  extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = {
    logKernelMessageAction("Determining if code is complete for", kernelMessage)
    Utilities.parseAndHandle(
      kernelMessage.contentString,
      IsCompleteRequest.isCompleteRequestReads,
      isCompleteRequest(kernelMessage, _ : IsCompleteRequest)
    )
  }

  private def isCompleteRequest(km: KernelMessage, cr: IsCompleteRequest):
  Future[(String, String)] = {
    val interpreterActor = actorLoader.load(SystemActorType.Interpreter)
    val codeCompleteFuture = ask(interpreterActor, cr).mapTo[(String, String)]
    codeCompleteFuture.onComplete {
      case Success(tuple) =>
        val reply = IsCompleteReply(tuple._1, tuple._2)
        val isCompleteReplyType = MessageType.Outgoing.IsCompleteReply.toString
        logKernelMessageAction("Sending is complete reply for", km)
        actorLoader.load(SystemActorType.KernelMessageRelay) !
          km.copy(
            header = HeaderBuilder.create(isCompleteReplyType),
            parentHeader = km.header,
            contentString = Json.toJson(reply).toString
          )
      case _ =>
        new Exception("Parse error in CodeCompleteHandler")
    }
    codeCompleteFuture
  }
} 
Example 9
Source File: CommMsgHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.comm.{KernelCommWriter, CommRegistrar, CommWriter, CommStorage}
import org.apache.toree.global.ExecuteRequestState
import org.apache.toree.kernel.protocol.v5.content.CommMsg
import org.apache.toree.kernel.protocol.v5.kernel.{Utilities, ActorLoader}
import org.apache.toree.kernel.protocol.v5.{KMBuilder, KernelMessage}
import org.apache.toree.utils.MessageLogSupport
import play.api.data.validation.ValidationError
import play.api.libs.json.JsPath

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global


class CommMsgHandler(
  actorLoader: ActorLoader, commRegistrar: CommRegistrar,
  commStorage: CommStorage
) extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = Future {
    logKernelMessageAction("Initiating Comm Msg for", kernelMessage)

    ExecuteRequestState.processIncomingKernelMessage(kernelMessage)

    val kmBuilder = KMBuilder().withParent(kernelMessage)

    Utilities.parseAndHandle(
      kernelMessage.contentString,
      CommMsg.commMsgReads,
      handler = handleCommMsg(kmBuilder),
      errHandler = handleParseError
    )
  }

  private def handleCommMsg(kmBuilder: KMBuilder)(commMsg: CommMsg) = {
    val commId = commMsg.comm_id
    val data = commMsg.data

    logger.debug(s"Received comm_msg with id '$commId'")

    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)

    commStorage.getCommIdCallbacks(commId) match {
      case None             =>
        logger.warn(s"Received invalid id for Comm Msg: $commId")
      case Some(callbacks)  =>
        logger.debug(s"Executing msg callbacks for id '$commId'")

        // TODO: Should we be checking the return values? Probably not.
        callbacks.executeMsgCallbacks(commWriter, commId, data)
          .filter(_.isFailure).map(_.failed).foreach(throwable => {
            logger.error("Comm msg callback encountered an error!", throwable)
          })
    }
  }

  private def handleParseError(invalid: Seq[(JsPath, Seq[ValidationError])]) = {
    // TODO: Determine proper response for a parse failure
    logger.warn("Parse error for Comm Msg! Not responding!")
  }

} 
Example 10
Source File: ShutdownHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.comm.{CommRegistrar, CommStorage, KernelCommWriter}
import org.apache.toree.kernel.protocol.v5.content.{ShutdownReply, ShutdownRequest, CommOpen}
import org.apache.toree.kernel.protocol.v5.kernel.{ActorLoader, Utilities}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.security.KernelSecurityManager
import org.apache.toree.utils.MessageLogSupport
import play.api.data.validation.ValidationError
import play.api.libs.json.JsPath

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


class ShutdownHandler(
  actorLoader: ActorLoader
) extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = Future {
    logKernelMessageAction("Initiating Shutdown request for", kernelMessage)

    val kernelInfo = SparkKernelInfo

    val shutdownReply = ShutdownReply(false)

    val replyHeader = Header(
      java.util.UUID.randomUUID.toString,
      "",
      java.util.UUID.randomUUID.toString,
      ShutdownReply.toTypeString,
      kernelInfo.protocolVersion)

    val kernelResponseMessage = KMBuilder()
      .withIds(kernelMessage.ids)
      .withSignature("")
      .withHeader(replyHeader)
      .withParent(kernelMessage)
      .withContentString(shutdownReply).build

    logger.debug("Attempting graceful shutdown.")
    actorLoader.load(SystemActorType.KernelMessageRelay) ! kernelResponseMessage

    // Instruct security manager that exit should be allowed
    KernelSecurityManager.enableRestrictedExit()

    System.exit(0)
  }

} 
Example 11
Source File: CommInfoRequestHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.comm.CommStorage
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content.CommInfoReply
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.utils.MessageLogSupport
import play.api.libs.json.Json

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Future, future}


class CommInfoRequestHandler(
                              actorLoader: ActorLoader,
                              commStorage: CommStorage)
  extends BaseHandler(actorLoader) with MessageLogSupport
{

  def buildCommMap(targetName: String) = {
    commStorage.getCommIdsFromTarget(targetName) match {
      case Some(commVector) => {
        commVector.map(x => Map(x -> Map("target_name" -> targetName))).flatten.toMap
      }
      case _ => {
        Map()
      }
    }
  }

  override def process(kernelMessage: KernelMessage): Future[_] = Future {
    logKernelMessageAction("Initiating CommInfo request for", kernelMessage)

    val commMap = (Json.parse(kernelMessage.contentString) \ "target_name").asOpt[String] match {
      case Some(targetName) => {
        buildCommMap(targetName)
      }
      case None => {
        //target_name is missing from the kernel message so return all comms over every target
        commStorage.getTargets().map(buildCommMap(_)).reduce(_ ++ _)
      }
    }
    val commInfoReply = CommInfoReply(commMap.asInstanceOf[Map[String, Map[String, String]]])

    val kernelInfo = SparkKernelInfo

    val replyHeader = Header(
      java.util.UUID.randomUUID.toString,
      "",
      java.util.UUID.randomUUID.toString,
      CommInfoReply.toTypeString,
      kernelInfo.protocolVersion)

    val kernelResponseMessage = KMBuilder()
      .withIds(kernelMessage.ids)
      .withSignature("")
      .withHeader(replyHeader)
      .withParent(kernelMessage)
      .withContentString(commInfoReply).build

    actorLoader.load(SystemActorType.KernelMessageRelay) ! kernelResponseMessage
  }

} 
Example 12
Source File: CommOpenHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.comm.{KernelCommWriter, CommStorage, CommRegistrar, CommWriter}
import org.apache.toree.global.ExecuteRequestState
import org.apache.toree.kernel.protocol.v5.content.CommOpen
import org.apache.toree.kernel.protocol.v5.kernel.{Utilities, ActorLoader}
import org.apache.toree.kernel.protocol.v5.{KMBuilder, KernelMessage}
import org.apache.toree.utils.MessageLogSupport
import play.api.data.validation.ValidationError
import play.api.libs.json.JsPath

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global


class CommOpenHandler(
  actorLoader: ActorLoader, commRegistrar: CommRegistrar,
  commStorage: CommStorage
) extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = Future {
    logKernelMessageAction("Initiating Comm Open for", kernelMessage)

    ExecuteRequestState.processIncomingKernelMessage(kernelMessage)

    val kmBuilder = KMBuilder().withParent(kernelMessage)

    Utilities.parseAndHandle(
      kernelMessage.contentString,
      CommOpen.commOpenReads,
      handler = handleCommOpen(kmBuilder),
      errHandler = handleParseError
    )
  }

  private def handleCommOpen(kmBuilder: KMBuilder)(commOpen: CommOpen) = {
    val commId = commOpen.comm_id
    val targetName = commOpen.target_name
    val data = commOpen.data

    logger.debug(
      s"Received comm_open for target '$targetName' with id '$commId'")

    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)

    commStorage.getTargetCallbacks(targetName) match {
      case None             =>
        logger.warn(s"Received invalid target for Comm Open: $targetName")

        commWriter.close()
      case Some(callbacks)  =>
        logger.debug(s"Executing open callbacks for id '$commId'")

        // TODO: Should we be checking the return values? Probably not.
        callbacks.executeOpenCallbacks(commWriter, commId, targetName, data)
          .filter(_.isFailure).map(_.failed).foreach(throwable => {
            logger.error("Comm open callback encountered an error!", throwable)
          })
    }
  }

  private def handleParseError(invalid: Seq[(JsPath, Seq[ValidationError])]) = {
    // TODO: Determine proper response for a parse failure
    logger.warn("Parse error for Comm Open! Not responding!")
  }

} 
Example 13
Source File: KernelInfoRequestHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.kernel.protocol.v5.content.KernelInfoReply
import org.apache.toree.kernel.protocol.v5.kernel.ActorLoader
import org.apache.toree.utils.LogLike

import scala.concurrent._


class KernelInfoRequestHandler(actorLoader: ActorLoader, languageInfo: LanguageInfo)
  extends BaseHandler(actorLoader) with LogLike
{
  def process(kernelMessage: KernelMessage): Future[_] = {
    import scala.concurrent.ExecutionContext.Implicits.global
    Future {
      logger.debug("Sending kernel info reply message")

      val kernelInfo = SparkKernelInfo
      val kernelInfoReply = KernelInfoReply(
        kernelInfo.protocolVersion,
        kernelInfo.implementation,
        kernelInfo.implementationVersion,
        languageInfo,
        kernelInfo.banner
      )

      // TODO could we use HeaderBuilder here?
      val replyHeader = Header(
        java.util.UUID.randomUUID.toString,
        "",
        java.util.UUID.randomUUID.toString,
        MessageType.Outgoing.KernelInfoReply.toString,
        kernelInfo.protocolVersion
      )

      val kernelResponseMessage = KMBuilder()
        .withIds(kernelMessage.ids)
        .withSignature("")
        .withHeader(replyHeader)
        .withParent(kernelMessage)
        .withContentString(kernelInfoReply).build

      actorLoader.load(SystemActorType.KernelMessageRelay) ! kernelResponseMessage
    }
  }
} 
Example 14
Source File: CommCloseHandler.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.kernel.protocol.v5.handler

import org.apache.toree.comm.{KernelCommWriter, CommRegistrar, CommWriter, CommStorage}
import org.apache.toree.global.ExecuteRequestState
import org.apache.toree.kernel.protocol.v5.content.CommClose
import org.apache.toree.kernel.protocol.v5.kernel.{Utilities, ActorLoader}
import org.apache.toree.kernel.protocol.v5.{KMBuilder, KernelMessage}
import org.apache.toree.utils.MessageLogSupport
import play.api.data.validation.ValidationError
import play.api.libs.json.JsPath

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global


class CommCloseHandler(
  actorLoader: ActorLoader, commRegistrar: CommRegistrar,
  commStorage: CommStorage
) extends BaseHandler(actorLoader) with MessageLogSupport
{
  override def process(kernelMessage: KernelMessage): Future[_] = Future {
    logKernelMessageAction("Initiating Comm Close for", kernelMessage)

    ExecuteRequestState.processIncomingKernelMessage(kernelMessage)

    val kmBuilder = KMBuilder().withParent(kernelMessage)

    Utilities.parseAndHandle(
      kernelMessage.contentString,
      CommClose.commCloseReads,
      handler = handleCommClose(kmBuilder),
      errHandler = handleParseError
    )
  }

  private def handleCommClose(kmBuilder: KMBuilder)(commClose: CommClose) = {
    val commId = commClose.comm_id
    val data = commClose.data

    logger.debug(s"Received comm_close with id '$commId'")

    val commWriter = new KernelCommWriter(actorLoader, kmBuilder, commId)

    commStorage.getCommIdCallbacks(commId) match {
      case None             =>
        logger.warn(s"Received invalid id for Comm Close: $commId")
      case Some(callbacks)  =>
        logger.debug(s"Executing close callbacks for id '$commId'")

        // TODO: Should we be checking the return values? Probably not.
        callbacks.executeCloseCallbacks(commWriter, commId, data)
          .filter(_.isFailure).map(_.failed).foreach(throwable => {
            logger.error("Comm close callback encountered an error!", throwable)
          })
    }
  }

  private def handleParseError(invalid: Seq[(JsPath, Seq[ValidationError])]) = {
    // TODO: Determine proper response for a parse failure
    logger.warn("Parse error for Comm Close! Not responding!")
  }

} 
Example 15
Source File: BrokerTransformer.scala    From incubator-toree   with Apache License 2.0 5 votes vote down vote up
package org.apache.toree.interpreter.broker

import org.apache.toree.interpreter.InterpreterTypes.ExecuteOutput
import org.apache.toree.interpreter.Results.Result
import org.apache.toree.interpreter.broker.BrokerTypes.CodeResults
import org.apache.toree.interpreter.{ExecuteError, ExecuteFailure, Results}

import scala.concurrent.Future


  def transformToInterpreterResult(futureResult: Future[CodeResults]):
    Future[(Result, Either[ExecuteOutput, ExecuteFailure])] =
  {
    import scala.concurrent.ExecutionContext.Implicits.global

    futureResult
      .map(results => (Results.Success, Left(Map("text/plain" -> results))))
      .recover({ case ex: BrokerException =>
        (Results.Error, Right(ExecuteError(
          name = ex.getClass.getName,
          value = ex.getLocalizedMessage,
          stackTrace = ex.getStackTrace.map(_.toString).toList
        )))
      })
  }
} 
Example 16
Source File: GetTicketInfoSpec.scala    From 006877   with MIT License 5 votes vote down vote up
package com.goticks

import org.scalatest.MustMatchers
import org.scalatest.WordSpec
import scala.concurrent.{Future, Await}

class GetTicketInfoSpec extends WordSpec with MustMatchers {

  object TicketInfoService extends TicketInfoService with MockWebServiceCalls
  import TicketInfoService._
  import scala.concurrent.duration._

  "getTicketInfo" must {
    "return a complete ticket info when all futures are successful" in {
      val ticketInfo = Await.result(getTicketInfo("1234", Location(1d,2d)), 10.seconds)

      ticketInfo.event.isEmpty must be(false)
      ticketInfo.event.foreach( event=> event.name must be("Quasimoto"))
      ticketInfo.travelAdvice.isEmpty must be(false)
      ticketInfo.suggestions.map(_.name) must be (Seq("Madlib", "OhNo", "Flying Lotus"))
    }
    "return an incomplete ticket info when getEvent fails" in {
      val ticketInfo = Await.result(getTicketInfo("4321", Location(1d,2d)), 10.seconds)

      ticketInfo.event.isEmpty must be(true)
      ticketInfo.travelAdvice.isEmpty must be(true)
      ticketInfo.suggestions.isEmpty must be (true)
    }
  }
}

trait MockWebServiceCalls extends WebServiceCalls {
  import com.github.nscala_time.time.Imports._
  import scala.concurrent.ExecutionContext.Implicits.global

  def getEvent(ticketNr: String, location: Location): Future[TicketInfo] = {
      Future {
        if(ticketNr == "1234") {
          TicketInfo(ticketNr, location, event = Some(Event("Quasimoto", Location(4.324218908d,53.12311144d), new DateTime(2013,10,1,22,30))))
        } else throw new Exception("crap")
      }
  }

  def callWeatherXService(ticketInfo: TicketInfo): Future[Option[Weather]] = {
    Future { Some(Weather(30, false)) }
  }

  def callWeatherYService(ticketInfo: TicketInfo): Future[Option[Weather]] = {
    Future { Some(Weather(30, false)) }
  }

  def callTrafficService(origin: Location, destination: Location, time: DateTime): Future[Option[RouteByCar]] = {
    Future {
      Some(RouteByCar("route1", time - (35.minutes), origin, destination, 30.minutes, 5.minutes))
    }
  }

  def callPublicTransportService(origin: Location, destination: Location, time: DateTime): Future[Option[PublicTransportAdvice]] = {
    Future {
      Some(PublicTransportAdvice("public transport route 1", time - (20.minutes), origin, destination, 20.minutes))
    }
  }

  def callSimilarArtistsService(event: Event): Future[Seq[Artist]] = {
    Future {
      Seq(Artist("Madlib", "madlib.com/calendar"), Artist("OhNo", "ohno.com/calendar"), Artist("Flying Lotus", "fly.lo/calendar"))
    }
  }

  def callArtistCalendarService(artist: Artist, nearLocation: Location): Future[Event] = {
    Future {
      Event(artist.name,Location(1d,1d), DateTime.now)
    }
  }
} 
Example 17
Source File: WindowsPluginFrontend.scala    From protoc-bridge   with Apache License 2.0 5 votes vote down vote up
package protocbridge.frontend

import java.net.ServerSocket
import java.nio.file.{Files, Path, Paths}

import protocbridge.ProtocCodeGenerator

import scala.concurrent.blocking

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


object WindowsPluginFrontend extends PluginFrontend {

  case class InternalState(batFile: Path)

  override def prepare(plugin: ProtocCodeGenerator): (Path, InternalState) = {
    val ss = new ServerSocket(0)
    val state = createWindowsScript(ss.getLocalPort)

    Future {
      blocking {
        val client = ss.accept()
        val response =
          PluginFrontend.runWithInputStream(plugin, client.getInputStream)
        client.getOutputStream.write(response)
        client.close()
        ss.close()
      }
    }

    (state.batFile, state)
  }

  override def cleanup(state: InternalState): Unit = {
    Files.delete(state.batFile)
  }

  private def createWindowsScript(port: Int): InternalState = {
    val classPath =
      Paths.get(getClass.getProtectionDomain.getCodeSource.getLocation.toURI)
    val classPathBatchString = classPath.toString.replace("%", "%%")
    val batchFile = PluginFrontend.createTempFile(
      ".bat",
      s"""@echo off
          |"${sys
        .props(
          "java.home"
        )}\\bin\\java.exe" -cp "$classPathBatchString" ${classOf[
        BridgeApp
      ].getName} $port
        """.stripMargin
    )
    InternalState(batchFile)
  }
} 
Example 18
Source File: PosixPluginFrontend.scala    From protoc-bridge   with Apache License 2.0 5 votes vote down vote up
package protocbridge.frontend

import java.nio.file.{Files, Path}

import protocbridge.ProtocCodeGenerator
import java.nio.file.attribute.PosixFilePermission

import scala.concurrent.blocking
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.sys.process._
import java.{util => ju}


object PosixPluginFrontend extends PluginFrontend {
  case class InternalState(
      inputPipe: Path,
      outputPipe: Path,
      tempDir: Path,
      shellScript: Path
  )

  override def prepare(plugin: ProtocCodeGenerator): (Path, InternalState) = {
    val tempDirPath = Files.createTempDirectory("protopipe-")
    val inputPipe = createPipe(tempDirPath, "input")
    val outputPipe = createPipe(tempDirPath, "output")
    val sh = createShellScript(inputPipe, outputPipe)

    Future {
      blocking {
        val fsin = Files.newInputStream(inputPipe)
        val response = PluginFrontend.runWithInputStream(plugin, fsin)
        fsin.close()

        val fsout = Files.newOutputStream(outputPipe)
        fsout.write(response)
        fsout.close()
      }
    }
    (sh, InternalState(inputPipe, outputPipe, tempDirPath, sh))
  }

  override def cleanup(state: InternalState): Unit = {
    Files.delete(state.inputPipe)
    Files.delete(state.outputPipe)
    Files.delete(state.tempDir)
    Files.delete(state.shellScript)
  }

  private def createPipe(tempDirPath: Path, name: String): Path = {
    val pipeName = tempDirPath.resolve(name)
    Seq("mkfifo", "-m", "600", pipeName.toAbsolutePath.toString).!!
    pipeName
  }

  private def createShellScript(inputPipe: Path, outputPipe: Path): Path = {
    val scriptName = PluginFrontend.createTempFile("", s"""|#!/usr/bin/env sh
          |set -e
          |cat /dev/stdin > "$inputPipe"
          |cat "$outputPipe"
      """.stripMargin)
    val perms = new ju.HashSet[PosixFilePermission]
    perms.add(PosixFilePermission.OWNER_EXECUTE)
    perms.add(PosixFilePermission.OWNER_READ)
    Files.setPosixFilePermissions(
      scriptName,
      perms
    )
    scriptName
  }
} 
Example 19
Source File: DbManagement.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.db

import org.bitcoins.core.util.{BitcoinSLogger, FutureUtil}
import org.flywaydb.core.Flyway
import org.flywaydb.core.api.FlywayException

import scala.concurrent.{ExecutionContext, Future}

trait DbManagement extends BitcoinSLogger {
  _: JdbcProfileComponent[AppConfig] =>
  import profile.api._

  import scala.language.implicitConversions

  
  def migrate(): Int = {
    val module = appConfig.moduleName
    val config =
      Flyway
        .configure()
        .locations(s"classpath:${driverName}/${module}/migration/")
    val flyway = config.dataSource(jdbcUrl, username, password).load

    try {
      flyway.migrate()
    } catch {
      case err: FlywayException =>
        logger.warn(
          s"Failed to apply first round of migrations, attempting baseline and re-apply",
          err)
        //maybe we have an existing database, so attempt to baseline the existing
        //database and then apply migrations again
        flyway.baseline()
        flyway.migrate()
    }
  }
} 
Example 20
Source File: FlushMode.scala    From scribe   with MIT License 5 votes vote down vote up
package scribe.writer.file

import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong}

import scribe.util.Time

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

trait FlushMode {
  def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit
}

object FlushMode {
  object NeverFlush extends FlushMode {
    override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = {}
  }

  object AlwaysFlush extends FlushMode {
    override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = writer.flush()
  }

  case class AsynchronousFlush(delay: FiniteDuration = 1.second) extends FlushMode {
    private lazy val delayMillis = delay.toMillis
    private lazy val flushing = new AtomicBoolean(false)
    private lazy val dirty = new AtomicBoolean(false)
    private lazy val lastFlush = new AtomicLong(0L)
    private var logFile: LogFile = _

    override def dataWritten(logFile: LogFile, writer: LogFileWriter): Unit = {
      this.logFile = logFile
      if (flushing.compareAndSet(false, true)) {
        flush()
      } else {
        dirty.set(true)
      }
    }

    private def flush(): Unit = Future {
      try {
        val delay = this.delayMillis - (Time() - lastFlush.get())
        if (delay > 0L) {
          Thread.sleep(delay)
        }
        logFile.flush()
      } finally {
        lastFlush.set(Time())
        if (dirty.compareAndSet(true, false)) {
          flush()
        } else {
          flushing.set(false)
        }
      }
    }
  }
} 
Example 21
Source File: UnMarshalling.scala    From introduction-to-akkahttp   with Apache License 2.0 5 votes vote down vote up
package com.shashank.akkahttp.basic.routing

import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, HttpResponse, MessageEntity}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.{ActorMaterializer, Materializer}
import akka.util.ByteString

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import spray.json._


object UnMarshalling {

  def main(args: Array[String]) {

    implicit val sys = ActorSystem("IntroductionToAkkaHttp")
    implicit val mat:Materializer = ActorMaterializer()

    //type FromStringUnmarshaller[T] = Unmarshaller[String, T]
    val intFuture = Unmarshal("42").to[Int]
    val int = Await.result(intFuture, 1.second)
    println("int unmarshalling "+int)

    //type FromStringUnmarshaller[T] = Unmarshaller[String, T]
    val boolFuture = Unmarshal("off").to[Boolean]
    val bool = Await.result(boolFuture, 1.second)
    println("off unmarshalling "+bool)

    //type ToEntityMarshaller[T] = Marshaller[T, MessageEntity]
    val string = "Yeah"
    val entityFuture = Marshal(string).to[MessageEntity]
    val entity = Await.result(entityFuture, 1.second) // don't block in non-test code!
    println(entity)

    //type ToResponseMarshaller[T] = Marshaller[T, HttpResponse]
    val errorMsg = "Not found, pal!"
    val responseFuture = Marshal(404 -> errorMsg).to[HttpResponse]
    val response = Await.result(responseFuture, 1.second)
    println(response)


    //type FromEntityUnmarshaller[T] = Unmarshaller[HttpEntity, T]
    val jsonByteString = ByteString("""{"name":"Hello"}""")
    val httpRequest = HttpRequest(HttpMethods.POST, entity = jsonByteString)
    val jsonDataUnmarshalledFuture = Unmarshal(httpRequest).to[String]
    val jsonDataUnmarshalled = Await.result(jsonDataUnmarshalledFuture, 1.second)
    println(jsonDataUnmarshalled)

    sys.terminate()

  }

} 
Example 22
Source File: AnotherServiceImpl.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package docs.scaladsl.mb

import akka.Done
import akka.NotUsed
import akka.stream.FlowShape
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.GraphDSL
import akka.stream.scaladsl.GraphDSL.Implicits._
import akka.stream.scaladsl.Merge
import akka.stream.scaladsl.Partition
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.api.broker.Message

//#inject-service
class AnotherServiceImpl(helloService: HelloService) extends AnotherService {
  //#inject-service

  //#subscribe-to-topic
  helloService
    .greetingsTopic()
    .subscribe // <-- you get back a Subscriber instance
    .atLeastOnce(
      Flow.fromFunction(doSomethingWithTheMessage)
    )
  //#subscribe-to-topic

  var lastObservedMessage: String = _

  private def doSomethingWithTheMessage(greetingMessage: GreetingMessage): Done = {
    lastObservedMessage = greetingMessage.message
    Done
  }

  import scala.concurrent.ExecutionContext.Implicits.global

  override def foo: ServiceCall[NotUsed, String] = ServiceCall { req =>
    scala.concurrent.Future.successful(lastObservedMessage)
  }

  def subscribeWithMetadata = {
    //#subscribe-to-topic-with-metadata
    import com.lightbend.lagom.scaladsl.api.broker.Message
    import com.lightbend.lagom.scaladsl.broker.kafka.KafkaMetadataKeys

    helloService
      .greetingsTopic()
      .subscribe
      .withMetadata
      .atLeastOnce(
        Flow[Message[GreetingMessage]].map { msg =>
          val greetingMessage = msg.payload
          val messageKey      = msg.messageKeyAsString
          val kafkaHeaders    = msg.get(KafkaMetadataKeys.Headers)
          println(s"Message: $greetingMessage Key: $messageKey Headers: $kafkaHeaders")
          Done
        }
      )
    //#subscribe-to-topic-with-metadata
  }

  def skipMessages = {
    //#subscribe-to-topic-skip-messages
    helloService
      .greetingsTopic()
      .subscribe
      .atLeastOnce(
        Flow[GreetingMessage].map {
          case msg @ GreetingMessage("Kia ora") => doSomethingWithTheMessage(msg)
          case _                                => Done // Skip all messages where the message is not "Kia ora".
        }
      )
    //#subscribe-to-topic-skip-messages
  }
} 
Example 23
Source File: SlickRepos.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package docs.home.scaladsl.persistence

import akka.Done
import slick.dbio.DBIOAction
import slick.dbio.Effect
import slick.dbio.Effect.All
import slick.sql.FixedSqlAction

object SlickRepos {
  object Initial {
    // #slick-mapping-initial
    import slick.jdbc.H2Profile.api._

    class PostSummaryRepository {
      class PostSummaryTable(tag: Tag) extends Table[PostSummary](tag, "post_summary") {
        def *      = (postId, title) <> (PostSummary.tupled, PostSummary.unapply)
        def postId = column[String]("post_id", O.PrimaryKey)
        def title  = column[String]("title")
      }

      val postSummaries = TableQuery[PostSummaryTable]

      def selectPostSummaries() = postSummaries.result
    }

    // #slick-mapping-initial
  }

  object WithCreateTable {
    // need to import it first to make table compile
    import scala.concurrent.ExecutionContext.Implicits.global
    import slick.jdbc.H2Profile.api._

    class PostSummaryTable(tag: Tag) extends Table[PostSummary](tag, "post_summary") {
      def *      = (postId, title) <> (PostSummary.tupled, PostSummary.unapply)
      def postId = column[String]("post_id", O.PrimaryKey)
      def title  = column[String]("title")
    }

    // import again, for documentation purpose
    // #slick-mapping-schema
    import scala.concurrent.ExecutionContext.Implicits.global
    import slick.jdbc.H2Profile.api._

    class PostSummaryRepository {
      // table mapping omitted for conciseness
      val postSummaries = TableQuery[PostSummaryTable]

      def createTable = postSummaries.schema.createIfNotExists
    }
    // #slick-mapping-schema
  }

  object Full {
    import scala.concurrent.ExecutionContext.Implicits.global
    import slick.jdbc.H2Profile.api._

    class PostSummaryRepository {
      class PostSummaryTable(tag: Tag) extends Table[PostSummary](tag, "post_summary") {
        def *      = (postId, title) <> (PostSummary.tupled, PostSummary.unapply)
        def postId = column[String]("post_id", O.PrimaryKey)
        def title  = column[String]("title")
      }

      val postSummaries = TableQuery[PostSummaryTable]

      def createTable = postSummaries.schema.createIfNotExists

      // #insert-or-update
      
      def save(postSummary: PostSummary) = {
        postSummaries.insertOrUpdate(postSummary).map(_ => Done)
      }
      // #insert-or-update
    }
  }
} 
Example 24
Source File: ServiceLocatorSessionProviderSpec.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.persistence.cassandra

import java.net.InetSocketAddress
import java.net.URI

import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import org.scalatest.BeforeAndAfterAll

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.Await
import scala.concurrent.Future
import org.scalatest.matchers.must.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ServiceLocatorSessionProviderSpec extends AnyWordSpec with Matchers with BeforeAndAfterAll {
  val system         = ActorSystem("test")
  val config: Config = ConfigFactory.load()
  val uri            = new URI("http://localhost:8080")

  protected override def afterAll(): Unit = {
    TestKit.shutdownActorSystem(actorSystem = system, verifySystemShutdown = true)
  }

  val locator = new ServiceLocatorAdapter {
    override def locateAll(name: String): Future[List[URI]] = {
      name match {
        case "existing" => Future.successful(List(uri))
        case "absent"   => Future.successful(Nil)
      }
    }
  }

  val providerConfig: Config = config.getConfig("lagom.persistence.read-side.cassandra")
  val provider               = new ServiceLocatorSessionProvider(system, providerConfig)
  ServiceLocatorHolder(system).setServiceLocator(locator)

  "ServiceLocatorSessionProvider" should {
    "Get the address when the contact points exist" in {
      val future = provider.lookupContactPoints("existing")

      Await.result(future, 3.seconds) mustBe Seq(new InetSocketAddress(uri.getHost, uri.getPort))
    }

    "Fail the future when the contact points do not exist" in {
      val future = provider.lookupContactPoints("absent")

      intercept[NoContactPointsException] {
        Await.result(future, 3.seconds)
      }
    }
  }
} 
Example 25
Source File: CassandraTestServer.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.lagom.internal.testkit

import java.nio.file.Files
import java.util.concurrent.TimeUnit

import akka.persistence.cassandra.testkit.CassandraLauncher
import com.google.common.io.MoreFiles
import com.google.common.io.RecursiveDeleteOption
import play.api.Logger
import play.api.inject.ApplicationLifecycle

import scala.concurrent.Future
import scala.util.Try

private[lagom] object CassandraTestServer {
  private val LagomTestConfigResource: String = "lagom-test-embedded-cassandra.yaml"

  private lazy val log = Logger(getClass)

  def run(cassandraDirectoryPrefix: String, lifecycle: ApplicationLifecycle): Int = {

    val cassandraDirectory = Files.createTempDirectory(cassandraDirectoryPrefix)

    // Shut down Cassandra and delete its temporary directory when the application shuts down
    lifecycle.addStopHook { () =>
      import scala.concurrent.ExecutionContext.Implicits.global
      Try(CassandraLauncher.stop())
      // The ALLOW_INSECURE option is required to remove the files on OSes that don't support SecureDirectoryStream
      // See http://google.github.io/guava/releases/snapshot-jre/api/docs/com/google/common/io/MoreFiles.html#deleteRecursively-java.nio.file.Path-com.google.common.io.RecursiveDeleteOption...-
      Future(MoreFiles.deleteRecursively(cassandraDirectory, RecursiveDeleteOption.ALLOW_INSECURE))
    }

    val t0 = System.nanoTime()

    CassandraLauncher.start(
      cassandraDirectory.toFile,
      LagomTestConfigResource,
      clean = false,
      port = 0,
      CassandraLauncher.classpathForResources(LagomTestConfigResource)
    )

    log.debug(s"Cassandra started in ${TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - t0)} ms")

    CassandraLauncher.randomPort
  }
} 
Example 26
Source File: HListShapeSpec.scala    From slickless   with Apache License 2.0 5 votes vote down vote up
package slickless

import shapeless.{::, HNil}
import slick.jdbc.H2Profile.api._

import scala.concurrent.ExecutionContext.Implicits.global

class HListShapeSpec extends Spec {

  class Users(tag: Tag) extends Table[Long :: String :: HNil](tag, "users") {
    def id    = column[Long]( "id", O.PrimaryKey, O.AutoInc )
    def email = column[String]("email")

    def * = id :: email :: HNil
  }

  lazy val users = TableQuery[Users]

  "slick tables with hlist mappings" - {
    "should support inserts and selects" in {
      val db = Database.forConfig("h2")

      val action = for {
        _   <- users.schema.create
        _   <- users += 1L :: "[email protected]" :: HNil
        ans <- users.result.head
        _   <- users.schema.drop
      } yield ans

      whenReady(db.run(action)) { _ should equal (1L :: "[email protected]" :: HNil) }
    }
  }
} 
Example 27
Source File: GenShapeSpec.scala    From slickless   with Apache License 2.0 5 votes vote down vote up
package slickless

import shapeless.{HNil, Generic}
import slick.jdbc.H2Profile.api._

import scala.concurrent.ExecutionContext.Implicits.global

class GenShapeSpec extends Spec {

  case class Address(id: Long, house: Int, street: String)

  class Addresss(tag: Tag) extends Table[Address](tag, "addresses") {
    def id     = column[Long]("id", O.PrimaryKey, O.AutoInc)
    def house  = column[Int]("house")
    def street = column[String]("street")

    def * = (id :: house :: street :: HNil).mappedWith(Generic[Address])
  }

  lazy val addresses = TableQuery[Addresss]

  "slick tables with generic mappings" - {
    "should support inserts and selects" in {
      val db = Database.forConfig("h2")

      val address = Address(1L, 29, "Acacia Road")

      val action = for {
        _   <- addresses.schema.create
        _   <- addresses += address
        ans <- addresses.result.head
        _   <- addresses.schema.drop
      } yield ans

      whenReady(db.run(action)) { _ should equal(address) }
    }
  }
} 
Example 28
Source File: Issue42.scala    From slickless   with Apache License 2.0 5 votes vote down vote up
import slickless.Spec
import shapeless.{HNil, Generic}
import slick.jdbc.H2Profile.api._
import slickless._
import scala.concurrent.ExecutionContext.Implicits.global

class Issue42 extends Spec {

  case class Department(id: Long, city: String)

  case class Employee(id: Long, dept1: Department, dept2: Department, email: String)

  class Employees(tag: Tag) extends Table[Employee](tag, "emps42") {
    def id              = column[Long]("id", O.PrimaryKey, O.AutoInc)
    def departmentIdA   = column[Long]("dept_a_id")
    def departmentCityA = column[String]("dept_a_city")
    def departmentIdB   = column[Long]("dept_b_id")
    def departmentCityB = column[String]("dept_b_city")
    def email           = column[String]("email")

    def departmentA = (departmentIdA, departmentCityA).mapTo[Department]
    def departmentB = (departmentIdB, departmentCityB).mapTo[Department]

    def * = (id :: departmentA :: departmentB :: email :: HNil).mappedWith(Generic[Employee])
  }

  lazy val emps = TableQuery[Employees]

  "slick tables with nested case class mappings" - {
    "should support inserts and selects" in {
      val db = Database.forConfig("h2")

      val emp = Employee(1L, Department(21L, "Brighton"), Department(22L, "Hove"), "[email protected]")

      val action = for {
        _   <- emps.schema.drop.asTry
        _   <- emps.schema.create
        _   <- emps += emp
        ans <- emps.result.head
        _   <- emps.schema.drop
      } yield ans

      whenReady(db.run(action)) { _ should equal(emp) }
    }
  }
} 
Example 29
Source File: LargeSpec.scala    From slickless   with Apache License 2.0 5 votes vote down vote up
package userapp



import slick.jdbc.H2Profile.api._
import shapeless._
import slickless._

case class Large(
  a: Int, b: Int, c: Int, d: Int,
  e: Int, f: Int, g: Int, h: Int,
  i: Int, j: Int, k: Int, l: Int,
  m: Int, n: Int, o: Int, p: Int,
  q: Int, r: Int, s: Int, t: Int,
  u: Int, v: Int, w: Int, x: Int,
  y: Int, z: Int
)

class LargeTable(tag: Tag) extends Table[Large](tag, "large") {
  def a = column[Int]("a")
  def b = column[Int]("b")
  def c = column[Int]("c")
  def d = column[Int]("d")
  def e = column[Int]("e")
  def f = column[Int]("f")
  def g = column[Int]("g")
  def h = column[Int]("h")
  def i = column[Int]("i")
  def j = column[Int]("j")
  def k = column[Int]("k")
  def l = column[Int]("l")
  def m = column[Int]("m")
  def n = column[Int]("n")
  def o = column[Int]("o")
  def p = column[Int]("p")
  def q = column[Int]("q")
  def r = column[Int]("r")
  def s = column[Int]("s")
  def t = column[Int]("t")
  def u = column[Int]("u")
  def v = column[Int]("v")
  def w = column[Int]("w")
  def x = column[Int]("x")
  def y = column[Int]("y")
  def z = column[Int]("z")

  def * = (
    a :: b :: c :: d ::
    e :: f :: g :: h ::
    i :: j :: k :: l ::
    m :: n :: o :: p ::
    q :: r :: s :: t ::
    u :: v :: w :: x ::
    y :: z :: HNil
  ).mappedWith(Generic[Large])
}

import scala.concurrent.ExecutionContext.Implicits.global

class LargeSpec extends slickless.Spec {

  "slick tables with >22 column mappings" - {
    "should support inserts and selects" in {
      val LargeTable = TableQuery[LargeTable]

      val db = Database.forConfig("h2")

      val large = Large(
         1,  2,  3,  4, 
         5,  6,  7,  8, 
         9, 10, 11, 12, 
        13, 14, 15, 16, 
        17, 18, 19, 20, 
        21, 22, 23, 24, 
        25, 26
      )

      val action = for {
        _   <- LargeTable.schema.create
        _   <- LargeTable += large
        ans <- LargeTable.result.head
        _   <- LargeTable.schema.drop
      } yield ans

      whenReady(db.run(action)) { _ should equal(large) }
    }
  }
} 
Example 30
Source File: CrystallisationResource.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.resources

import javax.inject.Inject
import play.api.libs.json.{JsNull, JsValue}
import play.api.mvc.{Action, AnyContent, BodyParser, ControllerComponents}
import router.constants.Versions
import router.constants.Versions._
import router.services.{CrystallisationService, Service}
import uk.gov.hmrc.auth.core.AuthConnector
import scala.concurrent.ExecutionContext.Implicits.global

class CrystallisationResource @Inject()(service: CrystallisationService,
                                        val cc: ControllerComponents,
                                        val authConnector: AuthConnector) extends BaseResource(cc, authConnector) with Service {

  def post(param: Any*): Action[JsValue] = AuthAction.async(parse.json) {
    implicit request =>
      withJsonBody[JsValue] {
        service.post(_).map {
          case Left(error) => buildErrorResponse(error)
          case Right(apiResponse) => buildResponse(apiResponse)
        }
      }
  }

  // Note that intent V1 requires empty JSON (i.e. {}) whereas V2 requires completely empty body. So we need to parse
  // accordingly these with the empty body parsed to JsNull
  private val jsonOrEmptyParser: BodyParser[JsValue] = parse.using { request =>

    if (Versions.getFromRequest(request).contains(VERSION_1))
      parse.json
    else
      parse.empty.map(_ => JsNull)
  }

  def intent(param: Any*): Action[JsValue] = AuthAction.async(jsonOrEmptyParser) { implicit request =>
    withJsonBody[JsValue] { body =>
      val serviceOutcome = body match {
        case JsNull => service.postEmpty
        case json => service.post(json)
      }

      serviceOutcome.map {
        case Left(error) => buildErrorResponse(error)
        case Right(apiResponse) => buildResponse(apiResponse)
      }
    }
  }

  def get(param: Any*): Action[AnyContent] = {
    AuthAction.async {
      implicit request =>
        service.get().map{
          case Left(error) => buildErrorResponse(error)
          case Right(apiResponse) => buildResponse(apiResponse)
        }
    }
  }
} 
Example 31
Source File: SelfEmploymentEopsDeclarationResourceSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.resources

import mocks.services.MockSelfEmploymentEopsDeclarationService
import play.api.mvc.Result
import play.api.test.FakeRequest
import router.errors.{ErrorCode, IncorrectAPIVersion, UnsupportedAPIVersion}
import support.ResourceSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class SelfEmploymentEopsDeclarationResourceSpec extends ResourceSpec
  with MockSelfEmploymentEopsDeclarationService {

  class Setup {
    val resource = new SelfEmploymentEopsDeclarationResource(
      service = mockSelfEmploymentEopsDeclarationService,
      authConnector = mockAuthConnector,
      cc = controllerComponents
    )
    mockAuthAction
  }

  val request = FakeRequest()

  "post" should {
    "return a 204 with the response headers" when {
      "the service returns a HttpResponse containing a 204 with no json response body" in new Setup {
        MockSelfEmploymentEopsDeclarationService.post()
          .returns(Future.successful(Right(HttpResponse(NO_CONTENT, None, testHeaderResponse))))

        val result: Future[Result] = resource.post("","","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NO_CONTENT
        headers(result) shouldBe testHeader
        contentType(result) shouldBe None
      }
    }

    "return a 200 with a json response body and response headers" when {
      "the service returns a HttpResponse containing a 200 with a json response body" in new Setup {
        MockSelfEmploymentEopsDeclarationService.post()
          .returns(Future.successful(Right(HttpResponse(OK, Some(responseJson), testHeaderResponse))))

        val result: Future[Result] = resource.post("","","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe responseJson
      }
    }

    "return a 406 with a json response body representing the error" when {
      "the service returns an IncorrectAPIVersion response" in new Setup {
        MockSelfEmploymentEopsDeclarationService.post()
          .returns(Future.successful(Left(IncorrectAPIVersion)))

        val result: Future[Result] = resource.post("","","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NOT_ACCEPTABLE
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.invalidAcceptHeader.asJson
      }
    }

    "return a 404 with a json response body representing the error" when {
      "the service returns an UnsupportedAPIVersion response" in new Setup {
        MockSelfEmploymentEopsDeclarationService.post()
          .returns(Future.successful(Left(UnsupportedAPIVersion)))

        val result: Future[Result] = resource.post("","","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NOT_FOUND
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.notFound.asJson
      }
    }
  }

} 
Example 32
Source File: PropertyEopsDeclarationResourceSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.resources

import mocks.services.MockPropertyEopsDeclarationService
import play.api.mvc.Result
import play.api.test.FakeRequest
import router.errors.{ErrorCode, IncorrectAPIVersion, UnsupportedAPIVersion}
import support.ResourceSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PropertyEopsDeclarationResourceSpec extends ResourceSpec
  with MockPropertyEopsDeclarationService {

  class Setup {
    val resource = new PropertyEopsDeclarationResource(
      service = mockPropertyEopsDeclarationService,
      authConnector = mockAuthConnector,
      cc = controllerComponents
    )
    mockAuthAction
  }

  val request = FakeRequest()

  "post" should {
    "return a 204 with the response headers" when {
      "the service returns a HttpResponse containing a 204 with no json response body" in new Setup {
        MockPropertyEopsDeclarationService.post()
          .returns(Future.successful(Right(HttpResponse(NO_CONTENT, None, testHeaderResponse))))

        val result: Future[Result] = resource.post("","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NO_CONTENT
        headers(result) shouldBe testHeader
        contentType(result) shouldBe None
      }
    }

    "return a 200 with a json response body and response headers" when {
      "the service returns a HttpResponse containing a 200 with a json response body" in new Setup {
        MockPropertyEopsDeclarationService.post()
          .returns(Future.successful(Right(HttpResponse(OK, Some(responseJson), testHeaderResponse))))

        val result: Future[Result] = resource.post("","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe responseJson
      }
    }

    "return a 406 with a json response body representing the error" when {
      "the service returns an IncorrectAPIVersion response" in new Setup {
        MockPropertyEopsDeclarationService.post()
          .returns(Future.successful(Left(IncorrectAPIVersion)))

        val result: Future[Result] = resource.post("","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NOT_ACCEPTABLE
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.invalidAcceptHeader.asJson
      }
    }

    "return a 404 with a json response body representing the error" when {
      "the service returns an UnsupportedAPIVersion response" in new Setup {
        MockPropertyEopsDeclarationService.post()
          .returns(Future.successful(Left(UnsupportedAPIVersion)))

        val result: Future[Result] = resource.post("","","")(FakeRequest().withBody(requestJson))
        status(result) shouldBe NOT_FOUND
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.notFound.asJson
      }
    }
  }

} 
Example 33
Source File: TaxCalcResourceSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.resources

import mocks.services.MockTaxCalcService
import play.api.mvc.Result
import play.api.test.FakeRequest
import router.errors.{ErrorCode, IncorrectAPIVersion, UnsupportedAPIVersion}
import support.ResourceSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class TaxCalcResourceSpec extends ResourceSpec
  with MockTaxCalcService {

  class Setup {
    val resource = new TaxCalcResource(
      service = mockTaxCalcService,
      authConnector = mockAuthConnector,
      cc = controllerComponents
    )
    mockAuthAction
  }

  "get" should {
    "return a 200 with the response headers" when {
      "the service returns a HttpResponse containing a 200 with no json response body" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Right(HttpResponse(OK, None, testHeaderResponse))))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe None
      }
    }

    "return a 200 with a json response body and response headers" when {
      "the service returns a HttpResponse containing a 200 with a json response body" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Right(HttpResponse(OK, Some(responseJson), testHeaderResponse))))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe responseJson
      }
    }

    "return a 406 with a json response body representing the error" when {
      "the service returns an IncorrectAPIVersion response" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Left(IncorrectAPIVersion)))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe NOT_ACCEPTABLE
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.invalidAcceptHeader.asJson
      }
    }

    "return a 404 with a json response body representing the error" when {
      "the service returns an UnsupportedAPIVersion response" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Left(UnsupportedAPIVersion)))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe NOT_FOUND
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.notFound.asJson
      }
    }
  }

} 
Example 34
Source File: PropertyEopsObligationsResourceSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.resources

import mocks.services.MockTaxCalcService
import play.api.mvc.Result
import play.api.test.FakeRequest
import router.errors.{ErrorCode, IncorrectAPIVersion, UnsupportedAPIVersion}
import support.ResourceSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PropertyEopsObligationsResourceSpec extends ResourceSpec
  with MockTaxCalcService {

  class Setup {
    val resource = new TaxCalcResource(
      service = mockTaxCalcService,
      authConnector = mockAuthConnector,
      cc = controllerComponents
    )
    mockAuthAction
  }

  "get" should {
    "return a 200 with the response headers" when {
      "the service returns a HttpResponse containing a 200 with no json response body" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Right(HttpResponse(OK, None, testHeaderResponse))))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe None
      }
    }

    "return a 200 with a json response body and response headers" when {
      "the service returns a HttpResponse containing a 200 with a json response body" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Right(HttpResponse(OK, Some(responseJson), testHeaderResponse))))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe OK
        headers(result) shouldBe testHeader
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe responseJson
      }
    }

    "return a 406 with a json response body representing the error" when {
      "the service returns an IncorrectAPIVersion response" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Left(IncorrectAPIVersion)))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe NOT_ACCEPTABLE
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.invalidAcceptHeader.asJson
      }
    }

    "return a 404 with a json response body representing the error" when {
      "the service returns an UnsupportedAPIVersion response" in new Setup {
        MockTaxCalcService.get()
          .returns(Future.successful(Left(UnsupportedAPIVersion)))

        val result: Future[Result] = resource.get("","")(FakeRequest())
        status(result) shouldBe NOT_FOUND
        contentType(result) shouldBe Some(JSON)
        contentAsJson(result) shouldBe ErrorCode.notFound.asJson
      }
    }
  }

} 
Example 35
Source File: TaxCalcConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.Json
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


class TaxCalcConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends TaxCalcConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.taxCalcUrl returns taxCalcUrl
  }

  lazy val taxCalcUrl = "test-sa-api-url"
  val path = "/2.0/test-path"

  "get" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$taxCalcUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 36
Source File: CrystallisationConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors


import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CrystallisationConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {


  class Setup {

    object TestConnector extends CrystallisationConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )

    MockAppConfig.crystallisationApiUrl returns crystallisationApiUrl
  }

  lazy val crystallisationApiUrl = "test-di-api-url"
  val path = "/2.0/test-path"

  "post" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response = HttpResponse(Status.NO_CONTENT)
        val requestJson = Json.obj("test" -> "request json")

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.POST[JsValue, SelfAssessmentOutcome](s"$crystallisationApiUrl$path", requestJson)
          .returns(Future.successful(Right(response)))
        await(TestConnector.post(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }

  "postEmpty" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response = HttpResponse(Status.NO_CONTENT)

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.POSTEmpty[SelfAssessmentOutcome](s"$crystallisationApiUrl$path")
          .returns(Future.successful(Right(response)))
        await(TestConnector.postEmpty(path)(hc)) shouldBe Right(response)
      }
    }
  }

  "retrieve" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$crystallisationApiUrl$path")
          .returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 37
Source File: DividendsConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class DividendsConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends DividendsConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.dividendsApiUrl returns dividendsApiUrl
  }

  lazy val dividendsApiUrl = "test-di-api-url"
  val path = "/2.0/test-path"

  "amend" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))
        val requestJson = Json.obj("test" -> "request json")

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.PUT[JsValue, SelfAssessmentOutcome](s"$dividendsApiUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.put(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }

  "retrieve" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$dividendsApiUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 38
Source File: CharitableGivingConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CharitableGivingConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends CharitableGivingConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.cgApiUrl returns cgApiUrl
  }

  lazy val cgApiUrl = "test-cg-api-url"
  val path = "/2.0/test-path"

  "amend" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))
        val requestJson = Json.obj("test" -> "request json")

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.PUT[JsValue, SelfAssessmentOutcome](s"$cgApiUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.put(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }

  "retrieve" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$cgApiUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 39
Source File: SavingsAccountsConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class SavingsAccountsConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends SavingsAccountConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.savingsAccountsApiUrl returns savingsAccountsApiUrl
  }

  lazy val savingsAccountsApiUrl = "test-di-api-url"
  val path = "/2.0/test-path"

  "create" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.CREATED, Some(Json.obj()))
        val requestJson = Json.obj("test" -> "request json")

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.POST[JsValue, SelfAssessmentOutcome](s"$savingsAccountsApiUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.post(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }

  "retrieve" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$savingsAccountsApiUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }

  "amend" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.NO_CONTENT)
        val requestJson = Json.obj("test" -> "request json")

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.PUT[JsValue, SelfAssessmentOutcome](s"$savingsAccountsApiUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.put(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }

} 
Example 40
Source File: SelfEmploymentConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class SelfEmploymentConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends SelfEmploymentConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.selfEmploymentUrl returns selfEmploymentUrl
  }

  lazy val selfEmploymentUrl = "test-sa-api-url"
  val path = "/2.0/test-path"

  "post" should {
    "return an HttpResponse" when {
      "a successful HttpResponse with no content is returned" in new Setup {
        val response  = HttpResponse(Status.NO_CONTENT)
        val requestJson = Json.obj("test" -> "request json")

        MockHttp.POST[JsValue, SelfAssessmentOutcome](s"$selfEmploymentUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.post(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 41
Source File: PropertyConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors


import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.{JsValue, Json}
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PropertyConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends PropertyConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.propertyUrl returns propertyUrl
  }

  lazy val propertyUrl = "test-sa-api-url"
  val path = "/2.0/test-path"

  "get" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$propertyUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }

  "post" should {
    "return an HttpResponse" when {
      "a successful HttpResponse with no content is returned" in new Setup {
        val response = HttpResponse(Status.NO_CONTENT)
        val requestJson = Json.obj("test" -> "request json")

        MockHttp.POST[JsValue, SelfAssessmentOutcome](s"$propertyUrl$path", requestJson).returns(Future.successful(Right(response)))
        await(TestConnector.post(path, requestJson)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 42
Source File: SelfAssessmentConnectorSpec.scala    From self-assessment-api   with Apache License 2.0 5 votes vote down vote up
package router.connectors

import mocks.MockHttp
import mocks.config.MockAppConfig
import mocks.httpParser.MockSelfAssessmentHttpParser
import play.api.http.Status
import play.api.libs.json.Json
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import support.UnitSpec
import uk.gov.hmrc.http.HttpResponse

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class SelfAssessmentConnectorSpec extends UnitSpec
  with MockHttp
  with MockAppConfig
  with MockSelfAssessmentHttpParser {

  class Setup {
    object TestConnector extends SelfAssessmentConnector(
      mockHttp,
      mockSelfAssessmentHttpParser,
      mockAppConfig
    )
    MockAppConfig.saApiUrl returns saApiUrl
  }

  lazy val saApiUrl = "test-sa-api-url"
  val path = "/test-path"

  "get" should {
    "return a HttpResponse" when {
      "a successful HttpResponse is returned" in new Setup {
        val response  = HttpResponse(Status.OK, Some(Json.obj()))

        MockSelfAssessmentHttpParser.read.returns(Right(response))
        MockHttp.GET[SelfAssessmentOutcome](s"$saApiUrl$path").returns(Future.successful(Right(response)))
        await(TestConnector.get(path)(hc)) shouldBe Right(response)
      }
    }
  }
} 
Example 43
Source File: TodoPresenter.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.todo.views.todo

import io.udash._
import io.udash.logging.CrossLogging
import io.udash.todo._
import io.udash.todo.rpc.model.{Todo => STodo}
import io.udash.todo.storage.TodoStorage

import scala.util.{Failure, Success}

import scala.concurrent.ExecutionContext.Implicits.global

class TodoPresenter(model: ModelProperty[TodoViewModel], todoStorage: TodoStorage)
  extends Presenter[TodoState] with CrossLogging {

  private val todos = model.subSeq(_.todos)

  // Toggle button state update listener
  private val toggleButtonListener = todos.listen { todos =>
    model.subProp(_.toggleAllChecked).set(todos.forall(_.completed))
  }

  // Load from storage
  todoStorage.load() onComplete {
    case Success(response) =>
      updateTodos(response)

      // Persist to do list on every change
      todos.listen { v =>
        todoStorage.store(v.map(todo => STodo(todo.name, todo.completed)))
      }
    case Failure(ex) =>
      logger.error("Can not load todos from server!")
  }

  // Persist todos on every change
  private val todosPersistListener = todoStorage.listen { todos =>
    updateTodos(todos)
  }

  override def handleState(state: TodoState): Unit = {
    model.subProp(_.todosFilter).set(state.filter)
  }

  override def onClose(): Unit = {
    super.onClose()
    toggleButtonListener.cancel()
    todosPersistListener.cancel()
  }

  def addTodo(): Unit = {
    val nameProperty: Property[String] = model.subProp(_.newTodoName)
    val name = nameProperty.get.trim
    if (name.nonEmpty) {
      todos.append(Todo(name))
      nameProperty.set("")
    }
  }

  def startItemEdit(item: ModelProperty[Todo], nameEditor: Property[String]): Unit = {
    nameEditor.set(item.subProp(_.name).get)
    item.subProp(_.editing).set(true)
  }

  def cancelItemEdit(item: ModelProperty[Todo]): Unit =
    item.subProp(_.editing).set(false)

  def endItemEdit(item: ModelProperty[Todo], nameEditor: Property[String]): Unit = {
    val name = nameEditor.get.trim
    if (item.subProp(_.editing).get && name.nonEmpty) {
      item.subProp(_.name).set(name)
      item.subProp(_.editing).set(false)
    } else if (name.isEmpty) {
      deleteItem(item.get)
    }
  }

  def deleteItem(item: Todo): Unit =
    todos.remove(item)

  def clearCompleted(): Unit =
    todos.set(todos.get.filter(TodosFilter.Active.matcher))

  def setItemsCompleted(): Unit =
    CallbackSequencer().sequence {
      val targetValue = !model.subProp(_.toggleAllChecked).get
      todos.elemProperties.foreach(p => p.asModel.subProp(_.completed).set(targetValue))
    }

  private def updateTodos(updated: Seq[STodo]): Unit =
    todos.set(updated.map(todo => Todo(name = todo.title, completed = todo.completed)))
} 
Example 44
Source File: ExposedRpcInterfaces.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.todo.rpc

import io.udash.rpc._
import io.udash.todo.rpc.model.Todo
import io.udash.todo.services.TodoStorage

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

class ExposedRpcInterfaces(todoStorage: TodoStorage) extends MainServerRPC {
  override def store(todos: Seq[Todo]): Future[Boolean] = Future {
    if (todoStorage.store(todos)) {
      ClientRPC(AllClients).storeUpdated(todos)
      true
    } else false
  }

  override def load(): Future[Seq[Todo]] = Future {
    todoStorage.load()
  }
} 
Example 45
Source File: ContactFormPresenter.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.demos.rest.views.contact

import io.udash._
import io.udash.core.Presenter
import io.udash.demos.rest.model.{Contact, ContactId}
import io.udash.demos.rest.{ContactFormState, ApplicationContext, IndexState}
import org.scalajs.dom

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

class ContactFormPresenter(model: ModelProperty[ContactEditorModel]) extends Presenter[ContactFormState] {
  import ApplicationContext._

  override def handleState(state: ContactFormState): Unit = {
    state match {
      case ContactFormState(None) =>
        model.subProp(_.loaded).set(true)
        model.subProp(_.loadingText).set("")

        model.subProp(_.isNewContact).set(true)
        model.subProp(_.firstName).set("")
        model.subProp(_.lastName).set("")
        model.subProp(_.phone).set("")
        model.subProp(_.email).set("")
      case ContactFormState(Some(id)) =>
        model.subProp(_.loaded).set(false)
        model.subProp(_.loadingText).set("Loading contact data...")
        model.subProp(_.isNewContact).set(false)

        loadContactData(id)
    }
  }

  def loadContactData(id: ContactId): Unit = {
    restServer.contacts(id).load() onComplete {
      case Success(contact) =>
        model.subProp(_.loaded).set(true)
        model.subProp(_.id).set(id)
        model.subProp(_.firstName).set(contact.firstName)
        model.subProp(_.lastName).set(contact.lastName)
        model.subProp(_.phone).set(contact.phone)
        model.subProp(_.email).set(contact.email)
      case Failure(ex) =>
        model.subProp(_.loadingText).set(s"Problem with contact details loading: $ex")
    }
  }

  def createContact(): Unit = {
    restServer.contacts().create(Contact(
      ContactId(-1),
      model.subProp(_.firstName).get,
      model.subProp(_.lastName).get,
      model.subProp(_.phone).get,
      model.subProp(_.email).get
    )) onComplete {
      case Success(contact) =>
        applicationInstance.goTo(IndexState)
      case Failure(ex) =>
        dom.window.alert(s"Contact creation failed: $ex")
    }
  }

  def updateContact(): Unit =
    restServer.contacts(model.subProp(_.id).get).update(Contact(
      model.subProp(_.id).get,
      model.subProp(_.firstName).get,
      model.subProp(_.lastName).get,
      model.subProp(_.phone).get,
      model.subProp(_.email).get
    )) onComplete {
      case Success(contact) =>
        applicationInstance.goTo(IndexState)
      case Failure(ex) =>
        dom.window.alert(s"Contact update failed: $ex")
    }
} 
Example 46
Source File: IndexPresenter.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.demos.rest.views.index

import io.udash._
import io.udash.demos.rest.IndexState
import io.udash.demos.rest.model.{Contact, ContactId, PhoneBookId, PhoneBookInfo}
import org.scalajs.dom

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}

class IndexPresenter(model: ModelProperty[IndexViewModel]) extends Presenter[IndexState.type] {
  import io.udash.demos.rest.ApplicationContext._

  override def handleState(state: IndexState.type): Unit =
    refresh()

  def removeContact(id: ContactId): Unit = {
    restServer.contacts(id).remove() onComplete {
      case Success(removedContact) =>
        model.subSeq(_.contacts.elements).remove(removedContact)
        refreshPhoneBooksSizes(model.subModel(_.books))
      case Failure(ex) =>
        dom.window.alert(s"Contact removing failed! ($ex)")
    }
  }

  def removePhoneBook(id: PhoneBookId): Unit = {
    restServer.phoneBooks(id).remove() onComplete {
      case Success(_) =>
        val elements = model.subSeq(_.books.elements)
        val removed = elements.get.find(_.id == id)
        removed.foreach(elements.remove)
      case Failure(ex) =>
        dom.window.alert(s"Phone book removing failed! ($ex)")
    }
  }

  def refresh(): Unit = {
    refreshPhoneBooks(model.subModel(_.books), restServer.phoneBooks().load(), "Loading phone books...")
    refreshContacts(model.subModel(_.contacts), restServer.contacts().load(), "Loading contacts...")
  }

  private def refreshContacts(model: ModelProperty[DataLoadingModel[Contact]], elements: Future[Seq[Contact]], loadingText: String) : Unit = {
    model.subProp(_.loaded).set(false)
    model.subProp(_.loadingText).set(loadingText)

    elements onComplete {
      case Success(elems) =>
        model.subProp(_.loaded).set(true)
        model.subSeq(_.elements).set(elems)
      case Failure(ex) =>
        model.subProp(_.loadingText).set(s"Error: $ex")
    }
  }

  private def refreshPhoneBooks(model: ModelProperty[DataLoadingModel[PhoneBookExtInfo]], elements: Future[Seq[PhoneBookInfo]], loadingText: String) : Unit = {
    model.subProp(_.loaded).set(false)
    model.subProp(_.loadingText).set(loadingText)

    elements onComplete {
      case Success(elems) =>
        model.subProp(_.loaded).set(true)
        model.subSeq(_.elements).clear()

        elems.foreach { el =>
          model.subSeq(_.elements).append(
            PhoneBookExtInfo(el.id, el.name, el.description, 0)
          )
        }

        refreshPhoneBooksSizes(model)
      case Failure(ex) =>
        model.subProp(_.loadingText).set(s"Error: $ex")
    }
  }

  private def refreshPhoneBooksSizes(model: ModelProperty[DataLoadingModel[PhoneBookExtInfo]]): Unit = {
    model.subSeq(_.elements).elemProperties.foreach { el =>
      val element = el.asModel
      restServer.phoneBooks(el.get.id).contacts().count() onComplete {
        case Success(count) =>
          element.subProp(_.contactsCount).set(count)
        case Failure(ex) =>
          dom.window.alert(s"Contacts count for book ${el.get.id} loading failed: $ex")
          element.subProp(_.contactsCount).set(-1)
      }
    }
  }
} 
Example 47
Source File: IndexPresenter.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.demos.files.views.index

import io.udash._
import io.udash.demos.files.{ApplicationServerContexts, IndexState}
import io.udash.logging.CrossLogging

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

class IndexPresenter(model: ModelProperty[UploadViewModel]) extends Presenter[IndexState.type] with CrossLogging {
  import io.udash.demos.files.ApplicationContext._

  private val uploader = new FileUploader(Url(ApplicationServerContexts.uploadContextPrefix))

  rpcService.listenStorageUpdate(() => reloadUploadedFiles())

  override def handleState(state: IndexState.type): Unit = {
    reloadUploadedFiles()
  }

  def uploadSelectedFiles(): Unit = {
    uploader
      .upload("files", model.subSeq(_.selectedFiles).get)
      .listen(model.subProp(_.state).set(_))
  }

  def reloadUploadedFiles(): Unit = {
    serverRpc.loadUploadedFiles() onComplete {
      case Success(files) =>
        model.subProp(_.uploadedFiles).set(files)
      case Failure(ex) =>
        logger.error(ex.getMessage)
    }
  }
} 
Example 48
Source File: DemoFileUploadServlet.scala    From udash-demos   with GNU General Public License v3.0 5 votes vote down vote up
package io.udash.demos.files.jetty

import scala.concurrent.ExecutionContext.Implicits.global
import java.io.{File, InputStream}
import java.nio.file.Files
import java.util.UUID

import io.udash.demos.files.UploadedFile
import io.udash.demos.files.rpc.ClientRPC
import io.udash.demos.files.services.FilesStorage
import io.udash.rpc._

class DemoFileUploadServlet(uploadDir: String) extends FileUploadServlet(Set("file", "files")) {
  new File(uploadDir).mkdir()

  override protected def handleFile(name: String, content: InputStream): Unit = {
    val targetName: String = s"${UUID.randomUUID()}_${name.replaceAll("[^a-zA-Z0-9.-]", "_")}"
    val targetFile = new File(uploadDir, targetName)
    Files.copy(content, targetFile.toPath)
    FilesStorage.add(
      UploadedFile(name, targetName, targetFile.length())
    )

    // Notify clients
    ClientRPC(AllClients).fileStorageUpdated()
  }
} 
Example 49
Source File: UserRegistrationService.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package users.services

import authentication.api.SecurityUserCreator
import commons.exceptions.ValidationException
import commons.repositories.DateTimeProvider
import commons.utils.DbioUtils
import authentication.models.{NewSecurityUser, SecurityUserId}
import users.models.{User, UserId, UserRegistration}
import users.repositories.UserRepo
import play.api.Configuration
import slick.dbio.DBIO

import scala.concurrent.ExecutionContext.Implicits.global

private[users] class UserRegistrationService(userRegistrationValidator: UserRegistrationValidator,
                                             securityUserCreator: SecurityUserCreator,
                                             dateTimeProvider: DateTimeProvider,
                                             userRepo: UserRepo,
                                             config: Configuration) {

  private val defaultImage = Some(config.get[String]("app.defaultImage"))

  def register(userRegistration: UserRegistration): DBIO[User] = {
    for {
      _ <- validate(userRegistration)
      user <- doRegister(userRegistration)
    } yield user
  }

  private def validate(userRegistration: UserRegistration) = {
    userRegistrationValidator.validate(userRegistration)
      .flatMap(violations => DbioUtils.fail(violations.isEmpty, new ValidationException(violations)))
  }

  private def doRegister(userRegistration: UserRegistration) = {
    val newSecurityUser = NewSecurityUser(userRegistration.email, userRegistration.password)
    for {
      securityUser <- securityUserCreator.create(newSecurityUser)
      now = dateTimeProvider.now
      user = User(UserId(-1), securityUser.id, userRegistration.username, userRegistration.email, null, defaultImage,
        now, now)
      savedUser <- userRepo.insertAndGet(user)
    } yield savedUser
  }
} 
Example 50
Source File: ContextPassingSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.execution

import sangria.parser.QueryParser
import sangria.schema._
import sangria.util.FutureResultSupport

import scala.util.Success

import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class ContextPassingSpec extends AnyWordSpec with Matchers with FutureResultSupport {
  trait ColorComponent {
    def color = "green"
  }

  trait NameComponent {
    def name = "foo"
  }

  trait PersonComponent {
    this: NameComponent =>

    def fullName = name + " bar"
  }

  class Cake extends ColorComponent with NameComponent with PersonComponent

  val ColorType = ObjectType("Color", fields[ColorComponent with NameComponent, Unit](
    Field("colorName", StringType, resolve = _.ctx.color),
    Field("name", StringType, resolve = _.ctx.name)))

  val NameType = ObjectType("Name", fields[NameComponent, Unit](
    Field("name", StringType, resolve = _.ctx.name)))

  val PersonType = ObjectType("Person", fields[PersonComponent, Unit](
    Field("fullName", StringType, resolve = _.ctx.fullName),
    Field("name", NameType, resolve = _ => ())))

  def colorField[Ctx <: ColorComponent with NameComponent] =
    Field("color", ColorType, None, resolve = (ctx: Context[Ctx, Unit]) => ())

  val QueryType = ObjectType("Query", fields[Cake, Unit](
    colorField,
    Field("person", PersonType, resolve = _ => ())
  ))

  val schema = Schema(QueryType)

  "Context" should {
    "should respect inheritance" in {
      val Success(doc) = QueryParser.parse("""
        {
          color {name, colorName}
          person {
            name {name}
            fullName
          }
        }
        """)

      Executor.execute(schema, doc, userContext = new Cake).await should be (Map(
        "data" -> Map(
          "color" -> Map(
            "name" -> "foo",
            "colorName" -> "green"),
          "person" -> Map(
            "name" -> Map("name" -> "foo"),
            "fullName" -> "foo bar"))))
    }
  }

} 
Example 51
Source File: FutureResultSupport.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.util

import sangria.execution.{ErrorWithResolver, QueryAnalysisError}
import sangria.marshalling.ResultMarshallerForType

import language.postfixOps
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

trait FutureResultSupport {
  implicit class FutureResult[T](f: Future[T]) {
    def await = Await.result(f, 10 seconds)
    def await(duration: Duration) = Await.result(f, duration)

    def awaitAndRecoverQueryAnalysis(implicit m: ResultMarshallerForType[T]): T = Await.result(recoverQueryAnalysis, 10 seconds)

    def recoverQueryAnalysis(implicit m: ResultMarshallerForType[T]): Future[T] = f.recover {
      case analysisError: QueryAnalysisError => analysisError.resolveError(m.marshaller).asInstanceOf[T]
    }

    def awaitAndRecoverQueryAnalysisScala(implicit ev: T =:= Any) = Await.result(recoverQueryAnalysisScala, 10 seconds)

    def recoverQueryAnalysisScala(implicit ev: T =:= Any) = f.recover {
      case analysisError: ErrorWithResolver => analysisError.resolveError
    }
  }


  object sync {
    val executionContext = ExecutionContext.fromExecutor(new java.util.concurrent.Executor {
      def execute(command: Runnable) = command.run()
    })
  }
} 
Example 52
Source File: SchemaDefinitionSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.schema

import sangria.ast
import sangria.execution.Executor
import sangria.validation.StringCoercionViolation
import sangria.introspection.{IntrospectionParser, introspectionQuery}
import sangria.util.FutureResultSupport
import sangria.marshalling.queryAst._
import sangria.parser.DeliveryScheme.Throw

import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class SchemaDefinitionSpec extends AnyWordSpec with Matchers with FutureResultSupport {
  "Schema" should {
    "collect all reachable types in `additionalTypes`" in {
      val CustomScalarType = ScalarType[String]("CustomScalar",
        coerceOutput = valueOutput,
        coerceUserInput = {
          case s: String => Right(s)
          case _ => Left(StringCoercionViolation)
        },
        coerceInput = {
          case ast.StringValue(s, _, _, _, _) => Right(s)
          case _ => Left(StringCoercionViolation)
        })

      val NamedType = InterfaceType("Named", fields[Unit, Unit](
        Field("name", OptionType(StringType), resolve = _ => None),
        Field("custom", OptionType(CustomScalarType), resolve = _ => None)))

      val DogType = ObjectType("Dog", interfaces[Unit, Unit](NamedType), fields[Unit, Unit](
        Field("barks", OptionType(BooleanType), resolve = _ => None)))

      val CatType = ObjectType("Cat", interfaces[Unit, Unit](NamedType), fields[Unit, Unit](
        Field("meows", OptionType(BooleanType), resolve = _ => None)))

      val queryType = ObjectType("Query", fields[Unit, Unit](
        Field("foo", OptionType(StringType), resolve = _ => None)))

      val schema = Schema(queryType, additionalTypes = DogType :: CatType :: Nil)

      val introspection = IntrospectionParser.parse(Executor.execute(schema, introspectionQuery).await)

      val fromIntro = introspection.types.map(_.name).toSet

      schema.types.keySet should be (fromIntro)

      List(schema.types.keySet, fromIntro) foreach { typeNames =>
        typeNames should (
          contain("Named") and
          contain("Dog") and
          contain("Cat") and
          contain("CustomScalar"))
      }
    }
  }
} 
Example 53
Source File: ParallelIteratorExecutor.scala    From codepropertygraph   with Apache License 2.0 5 votes vote down vote up
package io.shiftleft.passes

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class ParallelIteratorExecutor[T](iterator: Iterator[T]) {
  def map[D](func: T => D): Iterator[D] = {
    val futures = Future.traverse(iterator) { element =>
      Future {
        func(element)
      }
    }
    Await.result(futures, Duration.Inf)
  }
} 
Example 54
Source File: TestSegmentReadingForMultiThreading.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.carbondata.spark.testsuite.segmentreading

import java.util.concurrent.TimeUnit

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

import org.apache.spark.sql.{CarbonUtils, Row}
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll



class TestSegmentReadingForMultiThreading extends QueryTest with BeforeAndAfterAll {

  override def beforeAll: Unit = {
    sql("DROP TABLE IF EXISTS carbon_table_MulTI_THread")
    sql(
      "CREATE TABLE carbon_table_MulTI_THread (empno int, empname String, designation String, doj " +
      "Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, deptname " +
      "String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance " +
      "int,utilization int,salary int) STORED AS carbondata")
    sql(
      s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE carbon_table_MulTI_THread " +
      "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
    sql(
      s"LOAD DATA LOCAL INPATH '$resourcesPath/data1.csv' INTO TABLE carbon_table_MulTI_THread " +
      "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
    sql(
      s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE carbon_table_MulTI_THread " +
      "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
    sql(
      s"LOAD DATA LOCAL INPATH '$resourcesPath/data1.csv' INTO TABLE carbon_table_MulTI_THread " +
      "OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
  }

  test("test multithreading for segment reading") {


    CarbonUtils.threadSet("carbon.input.segments.default.carbon_table_MulTI_THread", "1,2,3")
    val df = sql("select count(empno) from carbon_table_MulTI_THread")
    checkAnswer(df, Seq(Row(30)))

    val four = Future {
      CarbonUtils.threadSet("carbon.input.segments.default.carbon_table_MulTI_THread", "1,3")
      val df = sql("select count(empno) from carbon_table_MulTI_THread")
      checkAnswer(df, Seq(Row(20)))
    }

    val three = Future {
      CarbonUtils.threadSet("carbon.input.segments.default.carbon_table_MulTI_THread", "0,1,2")
      val df = sql("select count(empno) from carbon_table_MulTI_THread")
      checkAnswer(df, Seq(Row(30)))
    }


    val one = Future {
      CarbonUtils.threadSet("carbon.input.segments.default.carbon_table_MulTI_THread", "0,2")
      val df = sql("select count(empno) from carbon_table_MulTI_THread")
      checkAnswer(df, Seq(Row(20)))
    }

    val two = Future {
      CarbonUtils.threadSet("carbon.input.segments.default.carbon_table_MulTI_THread", "1")
      val df = sql("select count(empno) from carbon_table_MulTI_THread")
      checkAnswer(df, Seq(Row(10)))
    }
    Await.result(Future.sequence(Seq(one, two, three, four)), Duration(300, TimeUnit.SECONDS))
  }

  override def afterAll: Unit = {
    sql("DROP TABLE IF EXISTS carbon_table_MulTI_THread")
    CarbonUtils.threadUnset("carbon.input.segments.default.carbon_table_MulTI_THread")
  }
} 
Example 55
Source File: SubscriptionManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.management

import com.github.fsanaulla.chronicler.core.duration._
import com.github.fsanaulla.chronicler.core.enums.{Destination, Destinations}
import com.github.fsanaulla.chronicler.core.model.Subscription
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class SubscriptionManagementSpec
  extends FlatSpec
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    super.afterAll()
  }

  val subName                       = "subs"
  val dbName                        = "async_subs_spec_db"
  val rpName                        = "subs_rp"
  val destType: Destination         = Destinations.ANY
  val newDestType: Destination      = Destinations.ALL
  val hosts                         = Array("udp://h1.example.com:9090", "udp://h2.example.com:9090")
  val subscription                  = Subscription(rpName, subName, destType, hosts)
  val newSubscription: Subscription = subscription.copy(destType = newDestType)

  val duration: String = 1.hours + 30.minutes

  lazy val influx: AhcManagementClient =
    InfluxMng(host, port, Some(creds))

  "Subscription API" should "create subscription" in {

    influx.createDatabase(dbName).futureValue.right.get shouldEqual 200

    influx
      .createRetentionPolicy(rpName, dbName, duration, 1, Some(duration))
      .futureValue
      .right
      .get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(dbName) shouldEqual true

    influx
      .createSubscription(subName, dbName, rpName, destType, hosts)
      .futureValue
      .right
      .get shouldEqual 200

    val subscr = influx.showSubscriptionsInfo.futureValue.right.get.headOption
      .flatMap(_.subscriptions.headOption)
      .get

    subscr.subsName shouldEqual subscription.subsName
    subscr.addresses shouldEqual subscription.addresses
    subscr.destType shouldEqual subscription.destType
    subscr.addresses.toList shouldEqual subscription.addresses.toList
  }

  it should "drop subscription" in {
    influx.dropSubscription(subName, dbName, rpName).futureValue.right.get shouldEqual 200

    influx.showSubscriptionsInfo.futureValue.right.get shouldEqual Nil

    influx.dropRetentionPolicy(rpName, dbName).futureValue.right.get shouldEqual 200

    influx.dropDatabase(dbName).futureValue.right.get shouldEqual 200
  }
} 
Example 56
Source File: RetentionPolicyManagerSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.management

import com.github.fsanaulla.chronicler.core.duration._
import com.github.fsanaulla.chronicler.core.model.RetentionPolicyInfo
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class RetentionPolicyManagerSpec
  extends FlatSpec
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    super.afterAll()
  }

  val rpDB = "db"

  lazy val influx: AhcManagementClient =
    InfluxMng(host, port, Some(creds))

  "Retention policy API" should "create retention policy" in {
    influx.createDatabase(rpDB).futureValue.right.get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(rpDB) shouldEqual true

    influx
      .createRetentionPolicy("test", rpDB, 2.hours, 2, Some(2.hours), default = true)
      .futureValue
      .right
      .get shouldEqual 200

    influx
      .showRetentionPolicies(rpDB)
      .futureValue
      .right
      .get
      .contains(RetentionPolicyInfo("test", "2h0m0s", "2h0m0s", 2, default = true)) shouldEqual true

  }

  it should "drop retention policy" in {
    influx.dropRetentionPolicy("autogen", rpDB).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get shouldEqual Seq(
      RetentionPolicyInfo("test", "2h0m0s", "2h0m0s", 2, default = true)
    )
  }

  it should "update retention policy" in {
    influx.updateRetentionPolicy("test", rpDB, Some(3.hours)).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get shouldEqual Seq(
      RetentionPolicyInfo("test", "3h0m0s", "2h0m0s", 2, default = true)
    )
  }

  it should "clean up everything" in {
    influx.dropRetentionPolicy("test", rpDB).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get.toList shouldEqual Nil

    influx.dropDatabase(rpDB).futureValue.right.get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(rpDB) shouldEqual false
  }
} 
Example 57
Source File: AuthenticationSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.management

import com.github.fsanaulla.chronicler.core.enums.Privileges
import com.github.fsanaulla.chronicler.core.model.{InfluxException, UserPrivilegesInfo}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class AuthenticationSpec extends FlatSpec with Matchers with Futures with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    authInflux.close()
    super.afterAll()
  }

  val userDB    = "db"
  val userName  = "some_user"
  val userPass  = "some_user_pass"
  val userNPass = "some_new_user_pass"

  val admin     = "admin"
  val adminPass = "admin"

  lazy val influx: AhcManagementClient =
    InfluxMng(host, port)

  lazy val authInflux: AhcManagementClient =
    InfluxMng(host = host, port = port, credentials = Some(creds))

  "Authenticated User Management API" should "create admin user " in {
    influx.showUsers.futureValue.left.get shouldBe a[InfluxException]
  }

  it should "create database" in {
    authInflux.createDatabase(userDB).futureValue.right.get shouldEqual 200
  }

  it should "create user" in {
    authInflux.createUser(userName, userPass).futureValue.right.get shouldEqual 200
    authInflux.showUsers.futureValue.right.get.exists(_.username == userName) shouldEqual true
  }

  it should "set user password" in {
    authInflux.setUserPassword(userName, userNPass).futureValue.right.get shouldEqual 200
  }

  it should "set user privileges" in {
    authInflux
      .setPrivileges(userName, userDB, Privileges.READ)
      .futureValue
      .right
      .get shouldEqual 200
  }

  it should "get user privileges" in {
    val userPrivs = authInflux.showUserPrivileges(userName).futureValue.right.get

    userPrivs.length shouldEqual 1
    userPrivs.exists { upi =>
      upi.database == userDB && upi.privilege == Privileges.withName("READ")
    } shouldEqual true
  }

  it should "revoke user privileges" in {
    authInflux
      .revokePrivileges(userName, userDB, Privileges.READ)
      .futureValue
      .right
      .get shouldEqual 200
    authInflux.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.NO_PRIVILEGES)
    )
  }

  it should "drop user" in {
    authInflux.dropUser(userName).futureValue.right.get shouldEqual 200
    authInflux.dropUser(admin).futureValue.right.get shouldEqual 200
  }
} 
Example 58
Source File: UserManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.management

import com.github.fsanaulla.chronicler.core.enums.Privileges
import com.github.fsanaulla.chronicler.core.model.{UserInfo, UserPrivilegesInfo}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class UserManagementSpec extends FlatSpec with Matchers with Futures with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    super.afterAll()
  }

  val userDB    = "db"
  val userName  = "Martin"
  val userPass  = "pass"
  val userNPass = "new_pass"

  val admin     = "Admin"
  val adminPass = "admin_pass"

  lazy val influx: AhcManagementClient =
    InfluxMng(host, port, Some(creds))

  "User Management API" should "create user" in {
    influx.createDatabase(userDB).futureValue.right.get shouldEqual 200

    influx.createUser(userName, userPass).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(userName, isAdmin = false)) shouldEqual true
  }

  it should "create admin" in {
    influx.createAdmin(admin, adminPass).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = true)) shouldEqual true
  }

  it should "show user privileges" in {
    influx.showUserPrivileges(admin).futureValue.right.get shouldEqual Nil
  }

  it should "set user password" in {
    influx.setUserPassword(userName, userNPass).futureValue.right.get shouldEqual 200
  }

  it should "set privileges" in {
    influx.setPrivileges(userName, userDB, Privileges.READ).futureValue.right.get shouldEqual 200
    influx
      .setPrivileges("unknown", userDB, Privileges.READ)
      .futureValue
      .left
      .get
      .getMessage shouldEqual "user not found"

    influx.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.READ)
    )
  }

  it should "revoke privileges" in {
    influx.revokePrivileges(userName, userDB, Privileges.READ).futureValue.right.get shouldEqual 200
    influx.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.NO_PRIVILEGES)
    )
  }

  it should "disable admin" in {
    influx.disableAdmin(admin).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = false)) shouldEqual true
  }

  it should "make admin" in {
    influx.makeAdmin(admin).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = true)) shouldEqual true
  }

  it should "drop users" in {
    influx.dropUser(userName).futureValue.right.get shouldEqual 200
    influx.dropUser(admin).futureValue.right.get shouldEqual 200
  }
} 
Example 59
Source File: SystemManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.io.it

import com.github.fsanaulla.chronicler.ahc.io.{AhcIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class SystemManagementSpec extends FlatSpec with Matchers with Futures with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    super.afterAll()
  }

  lazy val influx: AhcIOClient =
    InfluxIO(host, port, Some(creds))

  it should "ping InfluxDB" in {
    val result = influx.ping.futureValue.right.get
    result.build shouldEqual "OSS"
    result.version shouldEqual version
  }
} 
Example 60
Source File: CompressionSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.ahc.io.it

import java.nio.file.Paths

import com.github.fsanaulla.chronicler.ahc.io.InfluxIO
import com.github.fsanaulla.chronicler.ahc.management.InfluxMng
import com.github.fsanaulla.chronicler.ahc.shared.Uri
import com.github.fsanaulla.chronicler.core.alias.Id
import com.github.fsanaulla.chronicler.core.api.DatabaseApi
import com.github.fsanaulla.chronicler.testing.it.DockerizedInfluxDB
import org.asynchttpclient.Response
import org.scalatest.concurrent.{Eventually, IntegrationPatience, ScalaFutures}
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CompressionSpec
  extends FlatSpec
  with Matchers
  with DockerizedInfluxDB
  with ScalaFutures
  with Eventually
  with IntegrationPatience {

  override def afterAll(): Unit = {
    mng.close()
    io.close()
    super.afterAll()
  }

  val testDB = "db"

  lazy val mng =
    InfluxMng(host, port, Some(creds), None)

  lazy val io =
    InfluxIO(host, port, Some(creds), compress = true)

  lazy val db: DatabaseApi[Future, Id, Response, Uri, String] =
    io.database(testDB)

  it should "ping database" in {
    eventually {
      io.ping.futureValue.right.get.version shouldEqual version
    }
  }

  it should "write data from file" in {
    mng.createDatabase(testDB).futureValue.right.get shouldEqual 200

    db.writeFromFile(Paths.get(getClass.getResource("/large_batch.txt").getPath))
      .futureValue
      .right
      .get shouldEqual 204

    db.readJson("SELECT * FROM test1").futureValue.right.get.length shouldEqual 10000
  }
} 
Example 61
Source File: SubscriptionManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.duration._
import com.github.fsanaulla.chronicler.core.enums.{Destination, Destinations}
import com.github.fsanaulla.chronicler.core.model.Subscription
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class SubscriptionManagementSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  val subName                       = "subs"
  val dbName                        = "async_subs_spec_db"
  val rpName                        = "subs_rp"
  val destType: Destination         = Destinations.ANY
  val newDestType: Destination      = Destinations.ALL
  val hosts                         = Array("udp://h1.example.com:9090", "udp://h2.example.com:9090")
  val subscription                  = Subscription(rpName, subName, destType, hosts)
  val newSubscription: Subscription = subscription.copy(destType = newDestType)

  val duration: String = 1.hours + 30.minutes

  lazy val influx: AkkaManagementClient =
    InfluxMng(host, port, Some(creds))

  "Subscription API" should "create subscription" in {

    influx.createDatabase(dbName).futureValue.right.get shouldEqual 200

    influx
      .createRetentionPolicy(rpName, dbName, duration, 1, Some(duration))
      .futureValue
      .right
      .get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(dbName) shouldEqual true

    influx
      .createSubscription(subName, dbName, rpName, destType, hosts)
      .futureValue
      .right
      .get shouldEqual 200

    val subscr = influx.showSubscriptionsInfo.futureValue.right.get.headOption
      .flatMap(_.subscriptions.headOption)
      .get

    subscr.subsName shouldEqual subscription.subsName
    subscr.addresses shouldEqual subscription.addresses
    subscr.destType shouldEqual subscription.destType
    subscr.addresses.toList shouldEqual subscription.addresses.toList
  }

  it should "drop subscription" in {
    influx.dropSubscription(subName, dbName, rpName).futureValue.right.get shouldEqual 200

    influx.showSubscriptionsInfo.futureValue.right.get shouldEqual Nil

    influx.dropRetentionPolicy(rpName, dbName).futureValue.right.get shouldEqual 200

    influx.dropDatabase(dbName).futureValue.right.get shouldEqual 200

    influx.close() shouldEqual {}
  }
} 
Example 62
Source File: SystemManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class SystemManagementSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  lazy val influx: AkkaManagementClient =
    InfluxMng(host, port, Some(creds))

  it should "ping InfluxDB" in {
    val result = influx.ping.futureValue.right.get
    result.build shouldEqual "OSS"
    result.version shouldEqual version
  }
} 
Example 63
Source File: RetentionPolicyManagerSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.duration._
import com.github.fsanaulla.chronicler.core.model.RetentionPolicyInfo
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps


class RetentionPolicyManagerSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  val rpDB = "db"

  lazy val influx: AkkaManagementClient =
    InfluxMng(host, port, Some(creds))

  "Retention policy API" should "create retention policy" in {
    influx.createDatabase(rpDB).futureValue.right.get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(rpDB) shouldEqual true

    influx
      .createRetentionPolicy("test", rpDB, 2 hours, 2, Some(2 hours), default = true)
      .futureValue
      .right
      .get shouldEqual 200

    influx
      .showRetentionPolicies(rpDB)
      .futureValue
      .right
      .get
      .contains(RetentionPolicyInfo("test", "2h0m0s", "2h0m0s", 2, default = true)) shouldEqual true

  }

  it should "drop retention policy" in {
    influx.dropRetentionPolicy("autogen", rpDB).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get shouldEqual Seq(
      RetentionPolicyInfo("test", "2h0m0s", "2h0m0s", 2, default = true)
    )
  }

  it should "update retention policy" in {
    influx.updateRetentionPolicy("test", rpDB, Some(3 hours)).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get shouldEqual Seq(
      RetentionPolicyInfo("test", "3h0m0s", "2h0m0s", 2, default = true)
    )
  }

  it should "clean up everything" in {
    influx.dropRetentionPolicy("test", rpDB).futureValue.right.get shouldEqual 200

    influx.showRetentionPolicies(rpDB).futureValue.right.get.toList shouldEqual Nil

    influx.dropDatabase(rpDB).futureValue.right.get shouldEqual 200

    influx.showDatabases().futureValue.right.get.contains(rpDB) shouldEqual false
  }

  it should "clear up after all" in {
    influx.close() shouldEqual {}
  }
} 
Example 64
Source File: AuthenticationSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Privileges
import com.github.fsanaulla.chronicler.core.model.{InfluxException, UserPrivilegesInfo}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class AuthenticationSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    authInflux.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  val userDB    = "db"
  val userName  = "some_user"
  val userPass  = "some_user_pass"
  val userNPass = "some_new_user_pass"

  val admin     = "admin"
  val adminPass = "admin"

  lazy val influx: AkkaManagementClient =
    InfluxMng(host, port)

  lazy val authInflux: AkkaManagementClient =
    InfluxMng(host = host, port = port, credentials = Some(creds))

  "AuthenticationUserManagement" should "create admin user " in {
    influx.showUsers.futureValue.left.get shouldBe a[InfluxException]
  }

  it should "create database" in {
    authInflux.createDatabase(userDB).futureValue.right.get shouldEqual 200
  }

  it should "create user" in {
    authInflux.createUser(userName, userPass).futureValue.right.get shouldEqual 200
    authInflux.showUsers.futureValue.right.get.exists(_.username == userName) shouldEqual true
  }

  it should "set user password" in {
    authInflux.setUserPassword(userName, userNPass).futureValue.right.get shouldEqual 200
  }

  it should "set user privileges" in {
    authInflux
      .setPrivileges(userName, userDB, Privileges.READ)
      .futureValue
      .right
      .get shouldEqual 200
  }

  it should "get user privileges" in {
    val userPrivs = authInflux.showUserPrivileges(userName).futureValue.right.get

    userPrivs.length shouldEqual 1
    userPrivs.exists { upi =>
      upi.database == userDB && upi.privilege == Privileges.withName("READ")
    } shouldEqual true
  }

  it should "revoke user privileges" in {
    authInflux
      .revokePrivileges(userName, userDB, Privileges.READ)
      .futureValue
      .right
      .get shouldEqual 200
    authInflux.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.NO_PRIVILEGES)
    )
  }

  it should "drop user" in {
    authInflux.dropUser(userName).futureValue.right.get shouldEqual 200
    authInflux.dropUser(admin).futureValue.right.get shouldEqual 200

    authInflux.close() shouldEqual {}
    influx.close() shouldEqual {}
  }
} 
Example 65
Source File: UserManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.core.enums.Privileges
import com.github.fsanaulla.chronicler.core.model.{UserInfo, UserPrivilegesInfo}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class UserManagementSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    influx.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  val userDB    = "db"
  val userName  = "Martin"
  val userPass  = "pass"
  val userNPass = "new_pass"

  val admin     = "Admin"
  val adminPass = "admin_pass"

  lazy val influx: AkkaManagementClient =
    InfluxMng(host, port, Some(creds))

  "User Management API" should "create user" in {
    influx.createDatabase(userDB).futureValue.right.get shouldEqual 200

    influx.createUser(userName, userPass).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(userName, isAdmin = false)) shouldEqual true
  }

  it should "create admin" in {
    influx.createAdmin(admin, adminPass).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = true)) shouldEqual true
  }

  it should "show user privileges" in {
    influx.showUserPrivileges(admin).futureValue.right.get shouldEqual Nil
  }

  it should "set user password" in {
    influx.setUserPassword(userName, userNPass).futureValue.right.get shouldEqual 200
  }

  it should "set privileges" in {
    influx.setPrivileges(userName, userDB, Privileges.READ).futureValue.right.get shouldEqual 200
    influx
      .setPrivileges("unknown", userDB, Privileges.READ)
      .futureValue
      .left
      .get
      .getMessage shouldEqual "user not found"

    influx.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.READ)
    )
  }

  it should "revoke privileges" in {
    influx.revokePrivileges(userName, userDB, Privileges.READ).futureValue.right.get shouldEqual 200
    influx.showUserPrivileges(userName).futureValue.right.get shouldEqual Array(
      UserPrivilegesInfo(userDB, Privileges.NO_PRIVILEGES)
    )
  }

  it should "disable admin" in {
    influx.disableAdmin(admin).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = false)) shouldEqual true
  }

  it should "make admin" in {
    influx.makeAdmin(admin).futureValue.right.get shouldEqual 200
    influx.showUsers.futureValue.right.get
      .contains(UserInfo(admin, isAdmin = true)) shouldEqual true
  }

  it should "drop users" in {
    influx.dropUser(userName).futureValue.right.get shouldEqual 200
    influx.dropUser(admin).futureValue.right.get shouldEqual 200

    influx.close() shouldEqual {}
  }
} 
Example 66
Source File: SystemManagementSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.io.{AkkaIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class SystemManagementSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    io.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  lazy val io: AkkaIOClient =
    InfluxIO(host, port, Some(creds))

  it should "ping InfluxDB" in {
    val result = io.ping.futureValue.right.get
    result.build shouldEqual "OSS"
    result.version shouldEqual version
  }
} 
Example 67
Source File: MeasurementApiSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import _root_.akka.actor.ActorSystem
import _root_.akka.testkit.TestKit
import com.github.fsanaulla.chronicler.akka.SampleEntitys._
import com.github.fsanaulla.chronicler.akka.io.{AkkaIOClient, InfluxIO}
import com.github.fsanaulla.chronicler.akka.management.{AkkaManagementClient, InfluxMng}
import com.github.fsanaulla.chronicler.akka.shared.InfluxConfig
import com.github.fsanaulla.chronicler.testing.it.{DockerizedInfluxDB, FakeEntity, Futures}
import org.scalatest.{FlatSpecLike, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global


class MeasurementApiSpec
  extends TestKit(ActorSystem())
  with FlatSpecLike
  with Matchers
  with Futures
  with DockerizedInfluxDB {

  override def afterAll(): Unit = {
    mng.close()
    io.close()
    TestKit.shutdownActorSystem(system)
    super.afterAll()
  }

  val db       = "db"
  val measName = "meas"

  lazy val influxConf =
    InfluxConfig(host, port, credentials = Some(creds), compress = false, None)

  lazy val mng: AkkaManagementClient =
    InfluxMng(host, port, credentials = Some(creds))

  lazy val io: AkkaIOClient = InfluxIO(influxConf)
  lazy val meas: io.Measurement[FakeEntity] =
    io.measurement[FakeEntity](db, measName)

  it should "write single point" in {
    mng.createDatabase(db).futureValue.right.get shouldEqual 200

    meas.write(singleEntity).futureValue.right.get shouldEqual 204

    meas.read(s"SELECT * FROM $measName").futureValue.right.get shouldEqual Seq(singleEntity)
  }

  it should "bulk write" in {
    meas.bulkWrite(multiEntitys).futureValue.right.get shouldEqual 204

    meas.read(s"SELECT * FROM $measName").futureValue.right.get.length shouldEqual 3

    mng.close() shouldEqual {}
    io.close() shouldEqual {}
  }
} 
Example 68
Source File: Main.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.example.ahc.management

import com.github.fsanaulla.chronicler.ahc.management.InfluxMng

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

object Main {

  def main(args: Array[String]): Unit = {

    val host   = args.headOption.getOrElse("localhost")
    val influx = InfluxMng(host)

    val result = for {
      // write record to Influx
      _ <- influx.createDatabase("db")
      // retrieve written record from Influx
      databases <- influx.showDatabases()
      // close
      _ = influx.close()
    } yield databases

    result.onComplete {
      case Success(Right(dbs)) => dbs.foreach(println)
      case Success(Left(err))  => println(s"Can't retrieve boys coz of: $err")
      case Failure(exception)  => println(s"Execution error: $exception")
    }
  }
} 
Example 69
Source File: Main.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.example.ahc.io

import com.github.fsanaulla.chronicler.ahc.io.InfluxIO
import com.github.fsanaulla.chronicler.macros.annotations.{field, tag}
import com.github.fsanaulla.chronicler.macros.auto._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

object Main {

  def main(args: Array[String]): Unit = {
    final case class Boy(@tag name: String, @field age: Int)

    val t      = Boy("f", 1)
    val host   = args.headOption.getOrElse("localhost")
    val influx = InfluxIO(host)
    val meas   = influx.measurement[Boy]("db", "cpu")

    val result = for {
      // write record to Influx
      _ <- meas.write(t)
      // retrieve written record from Influx
      boys <- meas.read("SELECT * FROM cpu")
      // close client
      _ = influx.close()
    } yield boys

    result.onComplete {
      case Success(Right(boys)) => boys.foreach(b => println(b.name))
      case Success(Left(err))   => println(s"Can't retrieve boys coz of: $err")
      case Failure(exception)   => println(s"Execution error: $exception")
    }
  }
} 
Example 70
Source File: FutureUtilsSpec.scala    From akka-persistence-couchbase   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.couchbase.internal

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}
import org.scalatest.Inspectors._

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

class FutureUtilsSpec extends WordSpec with Matchers with ScalaFutures {
  "The future utils" must {
    "allow for sequential traversal" in {
      @volatile var counter = -1

      val result = FutureUtils
        .traverseSequential(0 to 1000)(
          n =>
            Future {
              counter += 1
              (n, counter)
            }
        )
        .futureValue

      forAll(result) {
        case (n, c) =>
          n should ===(c)
      }
    }
  }
} 
Example 71
Source File: ReservationProjectionSpec.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales.view

import java.sql.Date

import com.typesafe.config.{Config, ConfigFactory}
import ecommerce.sales._
import ecommerce.sales.ReservationStatus.{Confirmed, Opened}
import org.joda.time.DateTime.now
import org.scalatest._
import pl.newicom.dddd.messaging.event.OfficeEventMessage
import pl.newicom.dddd.office.CaseRef

import scala.concurrent.ExecutionContext.Implicits.global

class ReservationProjectionSpec extends WordSpecLike with Matchers with ViewTestSupport {

  override def config: Config = ConfigFactory.load()

  val dao = new ReservationDao
  val projection = new ReservationProjection(dao)

  "ReservationProjection" should {
    "consume ReservationCreated event" in {
      // When
      projection.consume(ReservationCreated(new ReservationId("reservation-1"), "client-1")).run()

      // Then
      val reservation = dao.byId("reservation-1").result
      assert(reservation.map(_.status) == Some(Opened))
    }
  }

  "ReservationProjection" should {
    "consume ReservationConfirmed event" in {
      // Given

      dao.createOrUpdate(ReservationView("reservation-1", "client-1", Opened, new Date(now.getMillis))).run()

      // When
      projection.consume(ReservationConfirmed(new ReservationId("reservation-1"), "client-1", Money(10))).run()

      // Then
      val reservation = dao.byId("reservation-1").result
      assert(reservation.map(_.status) == Some(Confirmed))
    }
  }

  override def ensureSchemaDropped = dao.ensureSchemaDropped

  override def ensureSchemaCreated = dao.ensureSchemaCreated

  implicit def toEventMessage(event: ReservationCreated): OfficeEventMessage = OfficeEventMessage(CaseRef(event.reservationId.value, ReservationOfficeId, None), event)
  implicit def toEventMessage(event: ReservationConfirmed): OfficeEventMessage = OfficeEventMessage(CaseRef(event.reservationId.value, ReservationOfficeId, None), event)

} 
Example 72
Source File: ShipmentProjectionSpec.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales.view

import com.typesafe.config.{Config, ConfigFactory}
import ecommerce.shipping.{ShipmentCreated, ShipmentId, ShippingOfficeId}
import ecommerce.shipping.ShippingStatus.Waiting
import ecommerce.shipping.view.{ShipmentDao, ShipmentProjection}
import org.scalatest._
import pl.newicom.dddd.messaging.event.OfficeEventMessage
import pl.newicom.dddd.office.CaseRef

import scala.concurrent.ExecutionContext.Implicits.global

class ShipmentProjectionSpec extends WordSpecLike with Matchers with ViewTestSupport {

  override def config: Config = ConfigFactory.load()

  val dao = new ShipmentDao
  val projection = new ShipmentProjection(dao)

  "ShipmentProjection" should {
    "consume ShipmentCreated event" in {
      // When
      projection.consume(ShipmentCreated(new ShipmentId("shipment-1"), "order-1")).run()

      // Then
      assert(dao.byId("shipment-1").result.get.status == Waiting)
    }
  }

  override def ensureSchemaDropped = dao.ensureSchemaDropped
  override def ensureSchemaCreated = dao.ensureSchemaCreated

  implicit def toEventMessage(event: ShipmentCreated): OfficeEventMessage = OfficeEventMessage(CaseRef(event.shipmentId.value, ShippingOfficeId, None), event)

} 
Example 73
Source File: Transaction.scala    From scredis   with Apache License 2.0 5 votes vote down vote up
package scredis

import scredis.protocol.Request
import scredis.protocol.requests.TransactionRequests.{Exec, Multi}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

private[scredis] final case class Transaction (requests: Seq[Request[_]]) {
  val execRequest = Exec(requests.map(_.decode))
  private val future = execRequest.future
  
  future.onComplete {
    case Success(results) =>
      var i = 0
      requests.foreach { request =>
        if (!request.future.isCompleted) {
          results.apply(i) match {
            case Success(x) => request.success(x)
            case Failure(e) => request.failure(e)
          }
        }
        i += 1
      }
    case Failure(e) => requests.foreach { request =>
      if (!request.future.isCompleted) {
        request.failure(e)
      }
    }
  }
  
  override def toString: String = requests.mkString("Transaction(", ", ", ")")
}

private[scredis] object Transaction {
  val MultiRequest = Multi()
} 
Example 74
Source File: RedisClusterSpec.scala    From scredis   with Apache License 2.0 5 votes vote down vote up
package scredis

import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks

import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class RedisClusterSpec extends AnyWordSpec
  with Matchers
  with ScalaFutures
  with ScalaCheckDrivenPropertyChecks {

  val keys = org.scalacheck.Arbitrary.arbString.arbitrary

  // we assume there is a local cluster started on ports 7000 - 7005
  // see testing.md
  lazy val cluster = RedisCluster(Server("localhost",7000))

  val badSeed1 = Server("localhost",7777)
  val badSeed2 = Server("localhost",2302)
  val badSeeds = List(badSeed1, badSeed2, Server("localhost",7003))

  "connection to cluster" should {
    "work for a single valid seed node" in {
      val info = cluster.clusterInfo().futureValue

      info("cluster_state") should be ("ok")
      info("cluster_known_nodes").toInt should be (6) // 6 total nodes
      info("cluster_size").toInt should be (3) // 3 master nodes
    }

    "work when some of the seed nodes are offline" in {
      val badServers = RedisCluster(badSeeds)

      val info = badServers.clusterInfo().futureValue
      info("cluster_state") should be ("ok")
      info("cluster_known_nodes").toInt should be (6) // 6 total nodes
      info("cluster_size").toInt should be (3) // 3 master nodes
    }
  }

  "writes to cluster" should {
    "be readable" in {
      forAll { (key:String, value: String) =>
        whenever (value.nonEmpty) {
          val res = for {
            _ <- cluster.set(key, value)
            g <- cluster.get(key)
          } yield g.get
          res.futureValue should be(value)
        }
      }
    }

    "be idempotent" in {
      forAll { (key:String, value: String) =>
        whenever (value.nonEmpty) {
          val res = for {
            _ <- cluster.set(key, value)
            g1 <- cluster.get(key)
            _ <- cluster.set(key, value)
            g2 <- cluster.get(key)
          } yield (g1.get,g2.get)
          res.futureValue should be(value,value)
        }
      }
    }
  }

  // TODO basic test for each supported / unsupported command

} 
Example 75
Source File: MultipartFormDataWritable.scala    From play-swagger   with MIT License 5 votes vote down vote up
package de.zalando.play.controllers

import java.nio.file.{Files, Paths}

import play.api.http.{HeaderNames, Writeable}
import play.api.libs.Files.TemporaryFile
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc.{Codec, MultipartFormData}


object MultipartFormDataWritable {
  import scala.concurrent.ExecutionContext.Implicits.global

  val boundary = "--------ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890"

  def formatDataParts(data: Map[String, Seq[String]]): Array[Byte] = {
    val dataParts = data.flatMap { case (key, values) =>
      values.map { value =>
        val name = s""""$key""""
        s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name\r\n\r\n$value\r\n"
      }
    }.mkString("")
    Codec.utf_8.encode(dataParts)
  }

  def filePartHeader(file: FilePart[TemporaryFile]): Array[Byte] = {
    val name = s""""${file.key}""""
    val filename = s""""${file.filename}""""
    val contentType = file.contentType.map { ct =>
      s"${HeaderNames.CONTENT_TYPE}: $ct\r\n"
    }.getOrElse("")
    Codec.utf_8.encode(s"--$boundary\r\n${HeaderNames.CONTENT_DISPOSITION}: form-data; name=$name; filename=$filename\r\n$contentType\r\n")
  }

  val singleton = Writeable[MultipartFormData[TemporaryFile]](
    transform = { form: MultipartFormData[TemporaryFile] =>
      formatDataParts(form.dataParts) ++
        form.files.flatMap { file =>
          val fileBytes = Files.readAllBytes(Paths.get(file.ref.file.getAbsolutePath))
          filePartHeader(file) ++ fileBytes ++ Codec.utf_8.encode("\r\n")
        } ++
        Codec.utf_8.encode(s"--$boundary--")
    },
    contentType = Some(s"multipart/form-data; boundary=$boundary")
  )
} 
Example 76
Source File: KVStore.scala    From Freasy-Monad   with MIT License 5 votes vote down vote up
package examples.scalaz

import scalaz._
import scalaz.Id.Id
import freasymonad.scalaz._

import scala.collection.mutable
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

@free trait KVStore {
  type KVStoreF[A] = Free[GrammarADT, A]
  sealed trait GrammarADT[A]

  def put[T](key: String, value: T): KVStoreF[Unit]
  def get[T](key: String): KVStoreF[Option[T]]
  def delete(key: String): KVStoreF[Unit]

  def update[T](key: String, f: T => T): KVStoreF[Unit] =
    for {
      vMaybe <- get[T](key)
      _      <- vMaybe.map(v => put[T](key, f(v))).getOrElse(Free.pure(()))
    } yield ()
}

object Main extends App {
  import KVStore.ops._

  def program: KVStoreF[Option[Int]] =
    for {
      _ <- put("wild-cats", 2)
      _ <- update[Int]("wild-cats", _ + 12)
      _ <- put("tame-cats", 5)
      n <- get[Int]("wild-cats")
      _ <- delete("tame-cats")
    } yield n

  val idInterpreter = new KVStore.Interp[Id] {
    val kvs = mutable.Map.empty[String, Any]
    def get[T](key: String): Id[Option[T]] = {
      println(s"get($key)")
      kvs.get(key).map(_.asInstanceOf[T])
    }
    def put[T](key: String, value: T): Id[Unit] = {
      println(s"put($key, $value)")
      kvs(key) = value
    }
    def delete(key: String): Id[Unit] = {
      println(s"delete($key)")
      kvs.remove(key)
    }
  }
  val resId: Id[Option[Int]] = idInterpreter.run(program)

  import scalaz.std.scalaFuture.futureInstance
  import scala.concurrent.ExecutionContext.Implicits.global

  val futureInterpreter = new KVStore.Interp[Future] {
    val kvs = mutable.Map.empty[String, Any]
    def get[T](key: String): Future[Option[T]] = Future {
      println(s"get($key)")
      kvs.get(key).map(_.asInstanceOf[T])
    }
    def put[T](key: String, value: T): Future[Unit] = Future {
      println(s"put($key, $value)")
      kvs(key) = value
    }
    def delete(key: String): Future[Unit] = Future {
      println(s"delete($key)")
      kvs.remove(key)
    }
  }
  val resFuture: Future[Option[Int]] = futureInterpreter.run(program)
  Await.ready(resFuture, Duration.Inf)
} 
Example 77
Source File: Master.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import java.time.LocalDateTime
import java.util.concurrent.TimeUnit

import akka.actor.{Props, Cancellable, Actor}
import akka.routing.RoundRobinPool
import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done}
import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.Duration
import scala.collection.mutable.ListBuffer
import scala.concurrent.ExecutionContext.Implicits.global

class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging {
  val cancelables = ListBuffer[Cancellable]()
  
  val router = context.actorOf(
    Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)),
    "scheduler-master-worker-router"
  )
  
  override def receive: Receive = {
    case Done(name, command, jobType, success) =>
      if (success) {
        logger.info("Successfully completed {} ({}).", name, command)
      } else {
        logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command)
      }
    case Schedule(configs) => 
      configs.foreach {
        case config =>
          val cancellable = this.context.system.scheduler.schedule(
            config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency),
            config.frequency match {
              case Hourly => Duration.create(1, TimeUnit.HOURS)
              case Daily => Duration.create(1, TimeUnit.DAYS)
            },
            router,
            Work(config.name, config.command, config.jobType)
          )
          cancellable +: cancelables
          logger.info("Scheduled: {}", config)
      }
  }
  
  override def postStop(): Unit = {
    cancelables.foreach(_.cancel())
  }
} 
Example 78
Source File: Master.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import java.time.LocalDateTime
import java.util.concurrent.TimeUnit

import akka.actor.{Props, Cancellable, Actor}
import akka.routing.RoundRobinPool
import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done}
import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.Duration
import scala.collection.mutable.ListBuffer
import scala.concurrent.ExecutionContext.Implicits.global

class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging {
  val cancelables = ListBuffer[Cancellable]()
  
  val router = context.actorOf(
    Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)),
    "scheduler-master-worker-router"
  )
  
  override def receive: Receive = {
    case Done(name, command, jobType, success) =>
      if (success) {
        logger.info("Successfully completed {} ({}).", name, command)
      } else {
        logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command)
      }
    case Schedule(configs) => 
      configs.foreach {
        case config =>
          val cancellable = this.context.system.scheduler.schedule(
            config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency),
            config.frequency match {
              case Hourly => Duration.create(1, TimeUnit.HOURS)
              case Daily => Duration.create(1, TimeUnit.DAYS)
            },
            router,
            Work(config.name, config.command, config.jobType)
          )
          cancellable +: cancelables
          logger.info("Scheduled: {}", config)
      }
  }
  
  override def postStop(): Unit = {
    cancelables.foreach(_.cancel())
  }
} 
Example 79
Source File: GenericHandler.scala    From tap   with Apache License 2.0 5 votes vote down vote up
package controllers.handlers


import io.heta.tap.data.results.StringResult
import play.api.Logger
import play.api.libs.json.{JsValue, Json}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.reflect.runtime.universe._
import scala.util.Try

trait GenericHandler {

  def queryTime(start:Long):Int = (System.currentTimeMillis() - start).toInt

  def validJson(parameters:Option[String]):Option[JsValue] = parameters.flatMap(p => Try(Json.parse(p)).toOption).map(_.result.get)

  def extractParameter[A:TypeTag](paramName:String,parameters:Option[String]):Option[Any] = {
    val jsParams = validJson(parameters)
    Logger.debug(s"JSON: $jsParams")
    jsParams.flatMap { jp =>
      val result = Try((jp \ paramName).toOption).toOption.flatten
      typeOf[A] match {
        case t if t =:= typeOf[String] => Try(result.map(_.as[String])).toOption.flatten // scalastyle:ignore
        case t if t =:= typeOf[Double] => Try(result.map(_.as[Double])).toOption.flatten // scalastyle:ignore
        case t if t =:= typeOf[Int] => Try(result.map(_.as[Int])).toOption.flatten       // scalastyle:ignore
        case _ => None
      }
    }
  }

  def dummyResult(text:String):Future[StringResult] = Future {
    StringResult("This features is not implemented yet")
  }


} 
Example 80
Source File: GraphQlController.scala    From tap   with Apache License 2.0 5 votes vote down vote up
package controllers

import javax.inject.Inject
import models.GraphqlSchema
import models.graphql.GraphqlActions
import play.api.Logger
import play.api.libs.json.{JsObject, JsValue, Json}
import play.api.mvc.{Action, AnyContent, InjectedController, Result}
import sangria.ast.Document
import sangria.execution.{ErrorWithResolver, Executor, QueryAnalysisError}
import sangria.marshalling.playJson.{PlayJsonInputUnmarshallerJObject, PlayJsonResultMarshaller}
import sangria.parser.{QueryParser, SyntaxError}
import sangria.schema.Schema
import views.GraphiqlPage

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}


class GraphQlController @Inject() (assets: AssetsFinder, gqlSchema: GraphqlSchema, actions: GraphqlActions) extends InjectedController {

  val schema:Schema[GraphqlActions,Unit] = gqlSchema.create

  def graphiql:Action[AnyContent] = Action {
    request => Logger.info("Got Any content request from:" + request.remoteAddress)
    //Ok(views.html.graphiql(assets))
    Ok(GraphiqlPage.render("Explore TAP with GraphiQL"))
  }

  def graphql:Action[JsValue] = Action.async(parse.json) { request =>
    val query = (request.body \ "query").as[String]
    val operation = (request.body \ "operationName").asOpt[String]
    val variables = (request.body \ "variables").asOpt[JsObject].getOrElse(Json.obj())
    Logger.info(s"Query received from ${request.remoteAddress} >>> ${operation.getOrElse("No query")}")
    Logger.info(s"Variables: $variables")
    process(query,operation,variables)
  }

  def process(query:String,name:Option[String],variables:JsObject):Future[Result] = QueryParser.parse(query) match {
    case Success(queryAst) => executeGraphQLQuery(queryAst, name, variables)
    case Failure(error: SyntaxError) => Future.successful(BadRequest(error.getMessage))
    case _ => Future.successful(BadRequest("There was a problem with the request to TAP graphql."))
  }

  def executeGraphQLQuery(query: Document, name: Option[String], vars: JsObject):Future[Result] = {
     Executor.execute(schema, query, actions, operationName = name, variables = vars)
      .map(Ok(_))
      .recover {
        case error: QueryAnalysisError => BadRequest(error.resolveError)
        case error: ErrorWithResolver => InternalServerError(error.resolveError)
      }

  }
} 
Example 81
Source File: HttpTimeoutSpec.scala    From http-verbs   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.play.http

import java.net.{ServerSocket, URI}
import java.util.concurrent.TimeoutException

import org.scalatest.concurrent.ScalaFutures
import org.scalatest.BeforeAndAfterAll
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.matchers.should.Matchers
import org.webbitserver.handler.{DelayedHttpHandler, StringHttpHandler}
import org.webbitserver.netty.NettyWebServer
import play.api.Play
import play.api.test.FakeApplication
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.http.ws.WSHttp
import uk.gov.hmrc.play.test.TestHttpCore

import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
import scala.concurrent.ExecutionContext.Implicits.global

class HttpTimeoutSpec extends AnyWordSpecLike with Matchers with ScalaFutures with BeforeAndAfterAll {

  lazy val fakeApplication = FakeApplication(additionalConfiguration = Map("ws.timeout.request" -> "1000"))

  override def beforeAll() {
    super.beforeAll()
    Play.start(fakeApplication)
  }

  override def afterAll() {
    super.afterAll()
    Play.stop(fakeApplication)
  }

  "HttpCalls" should {

    "be gracefully timeout when no response is received within the 'timeout' frame" in {
      val http = new WSHttp with TestHttpCore

      // get an unused port
      val ss = new ServerSocket(0)
      ss.close()
      val publicUri = URI.create(s"http://localhost:${ss.getLocalPort}")
      val ws        = new NettyWebServer(global, ss.getLocalSocketAddress, publicUri)
      try {
        //starts web server
        ws.add(
          "/test",
          new DelayedHttpHandler(global, 2000, new StringHttpHandler("application/json", "{name:'pong'}")))
        ws.start().get()

        implicit val hc = HeaderCarrier()

        val start = System.currentTimeMillis()
        intercept[TimeoutException] {
          //make request to web server
          Await.result(http.doPost(s"$publicUri/test", "{name:'ping'}", Seq()), 5.seconds)
        }
        val diff = (System.currentTimeMillis() - start).toInt
        // there is test execution delay around 700ms
        diff should be >= 1000
        diff should be < 2500
      } finally {
        ws.stop()
      }
    }
  }
} 
Example 82
Source File: JavafxBoot.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair

import java.io.File

import akka.actor.ActorSystem
import fr.acinq.eclair.gui.{FxApp, FxPreloader}
import grizzled.slf4j.Logging
import javafx.application.Application
import scala.concurrent.ExecutionContext.Implicits.global

object JavafxBoot extends App with Logging {
  try {
    val datadir = new File(System.getProperty("eclair.datadir", System.getProperty("user.home") + "/.eclair"))
    val config = NodeParams.loadConfiguration(datadir)
    val headless = System.getProperty("eclair.headless") != null

    if (headless) {
      implicit val system = ActorSystem("eclair-node-gui", config)
      val setup = new Setup(datadir)
      setup.bootstrap.map { kit =>
        Boot.startApiServiceIfEnabled(kit)
      }
    } else {
      System.setProperty("javafx.preloader", classOf[FxPreloader].getName)
      Application.launch(classOf[FxApp], datadir.getAbsolutePath)
    }
  } catch {
    case t: Throwable =>
      val errorMsg = if (t.getMessage != null) t.getMessage else t.getClass.getSimpleName
      System.err.println(s"fatal error: $errorMsg")
      logger.error(s"fatal error: $errorMsg", t)
      System.exit(1)
  }
} 
Example 83
Source File: Autoprobe.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.payment.send

import akka.actor.{Actor, ActorLogging, ActorRef, Props}
import fr.acinq.bitcoin.Crypto.PublicKey
import fr.acinq.eclair.crypto.Sphinx.DecryptedFailurePacket
import fr.acinq.eclair.payment.{PaymentEvent, PaymentFailed, RemoteFailure}
import fr.acinq.eclair.router.Announcements
import fr.acinq.eclair.router.Router.{Data, PublicChannel}
import fr.acinq.eclair.wire.IncorrectOrUnknownPaymentDetails
import fr.acinq.eclair.{LongToBtcAmount, NodeParams, randomBytes32, secureRandom}

import scala.concurrent.duration._


class Autoprobe(nodeParams: NodeParams, router: ActorRef, paymentInitiator: ActorRef) extends Actor with ActorLogging {

  import Autoprobe._

  import scala.concurrent.ExecutionContext.Implicits.global

  // refresh our map of channel_updates regularly from the router
  context.system.scheduler.schedule(0 seconds, ROUTING_TABLE_REFRESH_INTERVAL, router, Symbol("data"))

  override def receive: Receive = {
    case routingData: Data =>
      scheduleProbe()
      context become main(routingData)
  }

  def main(routingData: Data): Receive = {
    case routingData: Data =>
      context become main(routingData)

    case TickProbe =>
      pickPaymentDestination(nodeParams.nodeId, routingData) match {
        case Some(targetNodeId) =>
          val paymentHash = randomBytes32 // we don't even know the preimage (this needs to be a secure random!)
          log.info(s"sending payment probe to node=$targetNodeId payment_hash=$paymentHash")
          paymentInitiator ! PaymentInitiator.SendPaymentRequest(PAYMENT_AMOUNT_MSAT, paymentHash, targetNodeId, maxAttempts = 1)
        case None =>
          log.info(s"could not find a destination, re-scheduling")
          scheduleProbe()
      }

    case paymentResult: PaymentEvent =>
      paymentResult match {
        case PaymentFailed(_, _, _ :+ RemoteFailure(_, DecryptedFailurePacket(targetNodeId, IncorrectOrUnknownPaymentDetails(_, _))), _) =>
          log.info(s"payment probe successful to node=$targetNodeId")
        case _ =>
          log.info(s"payment probe failed with paymentResult=$paymentResult")
      }
      scheduleProbe()
  }

  def scheduleProbe() = context.system.scheduler.scheduleOnce(PROBING_INTERVAL, self, TickProbe)

}

object Autoprobe {

  def props(nodeParams: NodeParams, router: ActorRef, paymentInitiator: ActorRef) = Props(classOf[Autoprobe], nodeParams, router, paymentInitiator)

  val ROUTING_TABLE_REFRESH_INTERVAL = 10 minutes

  val PROBING_INTERVAL = 20 seconds

  val PAYMENT_AMOUNT_MSAT = (100 * 1000) msat // this is below dust_limit so there won't be an output in the commitment tx

  object TickProbe

  def pickPaymentDestination(nodeId: PublicKey, routingData: Data): Option[PublicKey] = {
    // we only pick direct peers with enabled public channels
    val peers = routingData.channels
      .collect {
        case (shortChannelId, c@PublicChannel(ann, _, _, Some(u1), _, _))
          if c.getNodeIdSameSideAs(u1) == nodeId && Announcements.isEnabled(u1.channelFlags) && routingData.channels.exists(_._1 == shortChannelId) => ann.nodeId2 // we only consider outgoing channels that are enabled and announced
      }
    if (peers.isEmpty) {
      None
    } else {
      peers.drop(secureRandom.nextInt(peers.size)).headOption
    }
  }

} 
Example 84
Source File: BatchingClient.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.bitcoind.rpc

import akka.actor.{Actor, ActorLogging, ActorRef, Status}
import akka.pattern.pipe
import fr.acinq.eclair.blockchain.bitcoind.rpc.BatchingClient.Pending

import scala.collection.immutable.Queue

class BatchingClient(rpcClient: BasicBitcoinJsonRPCClient) extends Actor with ActorLogging {

  import scala.concurrent.ExecutionContext.Implicits.global

  override def receive: Receive = {
    case request: JsonRPCRequest =>
      // immediately process isolated request
      process(queue = Queue(Pending(request, sender)))
  }

  def waiting(queue: Queue[Pending], processing: Seq[Pending]): Receive = {
    case request: JsonRPCRequest =>
      // there is already a batch in flight, just add this request to the queue
      context become waiting(queue :+ Pending(request, sender), processing)

    case responses: Seq[JsonRPCResponse]@unchecked =>
      log.debug("got {} responses", responses.size)
      // let's send back answers to the requestors
      require(responses.size == processing.size, s"responses=${responses.size} != processing=${processing.size}")
      responses.zip(processing).foreach {
        case (JsonRPCResponse(result, None, _), Pending(_, requestor)) => requestor ! result
        case (JsonRPCResponse(_, Some(error), _), Pending(_, requestor)) => requestor ! Status.Failure(JsonRPCError(error))
      }
      process(queue)

    case [email protected](t) =>
      log.error(t, s"got exception for batch of ${processing.size} requests")
      // let's fail all requests
      processing.foreach { case Pending(_, requestor) => requestor ! s }
      process(queue)
  }

  def process(queue: Queue[Pending]) = {
    // do we have queued requests?
    if (queue.isEmpty) {
      log.debug("no more requests, going back to idle")
      context become receive
    } else {
      val (batch, rest) = queue.splitAt(BatchingClient.BATCH_SIZE)
      log.debug(s"sending {} request(s): {} (queue={})", batch.size, batch.groupBy(_.request.method).map(e => e._1 + "=" + e._2.size).mkString(" "), queue.size)
      rpcClient.invoke(batch.map(_.request)) pipeTo self
      context become waiting(rest, batch)
    }
  }

}

object BatchingClient {

  val BATCH_SIZE = 50

  case class Pending(request: JsonRPCRequest, requestor: ActorRef)

} 
Example 85
Source File: ThroughputSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel

import java.util.UUID
import java.util.concurrent.CountDownLatch
import java.util.concurrent.atomic.AtomicLong

import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import akka.testkit.TestProbe
import fr.acinq.bitcoin.{ByteVector32, Crypto}
import fr.acinq.eclair.TestConstants.{Alice, Bob}
import fr.acinq.eclair._
import fr.acinq.eclair.blockchain._
import fr.acinq.eclair.blockchain.bitcoind.ZmqWatcher
import fr.acinq.eclair.payment.relay.{CommandBuffer, Relayer}
import fr.acinq.eclair.wire.{Init, UpdateAddHtlc}
import org.scalatest.funsuite.AnyFunSuite

import scala.concurrent.duration._
import scala.util.Random

class ThroughputSpec extends AnyFunSuite {
  ignore("throughput") {
    implicit val system = ActorSystem("test")
    val pipe = system.actorOf(Props[Pipe], "pipe")
    val blockCount = new AtomicLong()
    val blockchain = system.actorOf(ZmqWatcher.props(blockCount, new TestBitcoinClient()), "blockchain")
    val paymentHandler = system.actorOf(Props(new Actor() {
      val random = new Random()

      context.become(run(Map()))

      override def receive: Receive = ???

      def run(h2r: Map[ByteVector32, ByteVector32]): Receive = {
        case ('add, tgt: ActorRef) =>
          val r = randomBytes32
          val h = Crypto.sha256(r)
          tgt ! CMD_ADD_HTLC(1 msat, h, CltvExpiry(1), TestConstants.emptyOnionPacket, Upstream.Local(UUID.randomUUID()))
          context.become(run(h2r + (h -> r)))

        case ('sig, tgt: ActorRef) => tgt ! CMD_SIGN

        case htlc: UpdateAddHtlc if h2r.contains(htlc.paymentHash) =>
          val r = h2r(htlc.paymentHash)
          sender ! CMD_FULFILL_HTLC(htlc.id, r)
          context.become(run(h2r - htlc.paymentHash))
      }
    }), "payment-handler")
    val registerA = TestProbe()
    val registerB = TestProbe()
    val commandBufferA = system.actorOf(Props(new CommandBuffer(Alice.nodeParams, registerA.ref)))
    val commandBufferB = system.actorOf(Props(new CommandBuffer(Bob.nodeParams, registerB.ref)))
    val relayerA = system.actorOf(Relayer.props(Alice.nodeParams, TestProbe().ref, registerA.ref, commandBufferA, paymentHandler))
    val relayerB = system.actorOf(Relayer.props(Bob.nodeParams, TestProbe().ref, registerB.ref, commandBufferB, paymentHandler))
    val wallet = new TestWallet
    val alice = system.actorOf(Channel.props(Alice.nodeParams, wallet, Bob.nodeParams.nodeId, blockchain, relayerA, None), "a")
    val bob = system.actorOf(Channel.props(Bob.nodeParams, wallet, Alice.nodeParams.nodeId, blockchain, relayerB, None), "b")
    val aliceInit = Init(Alice.channelParams.features)
    val bobInit = Init(Bob.channelParams.features)
    alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, TestConstants.fundingSatoshis, TestConstants.pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, pipe, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD)
    bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, pipe, aliceInit)

    val latch = new CountDownLatch(2)
    val listener = system.actorOf(Props(new Actor {
      override def receive: Receive = {
        case ChannelStateChanged(_, _, _, _, NORMAL, _) => latch.countDown()
      }
    }), "listener")
    system.eventStream.subscribe(listener, classOf[ChannelEvent])

    pipe ! (alice, bob)
    latch.await()

    var i = new AtomicLong(0)
    val random = new Random()

    def msg = random.nextInt(100) % 5 match {
      case 0 | 1 | 2 | 3 => 'add
      case 4 => 'sig
    }

    import scala.concurrent.ExecutionContext.Implicits.global
    system.scheduler.schedule(0 seconds, 50 milliseconds, new Runnable() {
      override def run(): Unit = paymentHandler ! (msg, alice)
    })
    system.scheduler.schedule(5 seconds, 70 milliseconds, new Runnable() {
      override def run(): Unit = paymentHandler ! (msg, bob)
    })

    Thread.sleep(Long.MaxValue)
  }
} 
Example 86
Source File: FuzzyPipe.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel

import akka.actor.{Actor, ActorLogging, ActorRef, Stash}
import fr.acinq.eclair.Features
import fr.acinq.eclair.channel.Commitments.msg2String
import fr.acinq.eclair.wire.{Init, LightningMessage}
import scodec.bits.ByteVector

import scala.concurrent.duration._
import scala.util.Random


class FuzzyPipe(fuzzy: Boolean) extends Actor with Stash with ActorLogging {

  import scala.concurrent.ExecutionContext.Implicits.global

  def receive = {
    case (a: ActorRef, b: ActorRef) =>
      unstashAll()
      context become connected(a, b, 10)

    case _ => stash()
  }

  def stayOrDisconnect(a: ActorRef, b: ActorRef, countdown: Int): Unit = {
    if (!fuzzy) context become connected(a, b, countdown - 1) // fuzzy mode disabled, we never disconnect
    else if (countdown > 1) context become connected(a, b, countdown - 1)
    else {
      log.debug("DISCONNECTED")
      a ! INPUT_DISCONNECTED
      b ! INPUT_DISCONNECTED
      context.system.scheduler.scheduleOnce(100 millis, self, 'reconnect)
      context become disconnected(a, b)
    }
  }

  def connected(a: ActorRef, b: ActorRef, countdown: Int): Receive = {
    case msg: LightningMessage if sender() == a =>
      log.debug(f"A ---${msg2String(msg)}%-6s--> B")
      b forward msg
      stayOrDisconnect(a, b, countdown)
    case msg: LightningMessage if sender() == b =>
      log.debug(f"A <--${msg2String(msg)}%-6s--- B")
      a forward msg
      stayOrDisconnect(a, b, countdown)
  }

  def disconnected(a: ActorRef, b: ActorRef): Receive = {
    case msg: LightningMessage if sender() == a =>
      // dropped
      log.info(f"A ---${msg2String(msg)}%-6s-X")
    case msg: LightningMessage if sender() == b =>
      // dropped
      log.debug(f"  X-${msg2String(msg)}%-6s--- B")
    case 'reconnect =>
      log.debug("RECONNECTED")
      val dummyInit = Init(Features.empty)
      a ! INPUT_RECONNECTED(self, dummyInit, dummyInit)
      b ! INPUT_RECONNECTED(self, dummyInit, dummyInit)
      context become connected(a, b, Random.nextInt(40))
  }
} 
Example 87
Source File: SmoothFeeProviderSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.fee

import org.scalatest.funsuite.AnyFunSuite

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}


class SmoothFeeProviderSpec extends AnyFunSuite {
  test("smooth fee rates") {
    val rates = Array(
      FeeratesPerKB(100, 200, 300, 400, 500, 600, 650),
      FeeratesPerKB(200, 300, 400, 500, 600, 700, 750),
      FeeratesPerKB(300, 400, 500, 600, 700, 800, 850),
      FeeratesPerKB(300, 400, 500, 600, 700, 800, 850),
      FeeratesPerKB(300, 400, 500, 600, 700, 800, 850)
    )
    val provider = new FeeProvider {
       var index = 0

      override def getFeerates: Future[FeeratesPerKB] = {
        val rate = rates(index)
        index = (index + 1) % rates.length
        Future.successful(rate)
      }
    }

    val smoothProvider = new SmoothFeeProvider(provider, windowSize = 3)
    val f = for {
      rate1 <- smoothProvider.getFeerates
      rate2 <- smoothProvider.getFeerates
      rate3 <- smoothProvider.getFeerates
      rate4 <- smoothProvider.getFeerates
      rate5 <- smoothProvider.getFeerates
    } yield (rate1, rate2, rate3, rate4, rate5)

    val (rate1, rate2, rate3, rate4, rate5) = Await.result(f, 5 seconds)
    assert(rate1 == rates(0))
    assert(rate2 == SmoothFeeProvider.smooth(Seq(rates(0), rates(1))))
    assert(rate3 == SmoothFeeProvider.smooth(Seq(rates(0), rates(1), rates(2))))
    assert(rate3 ==  FeeratesPerKB(200, 300, 400, 500, 600, 700, 750))
    assert(rate4 == SmoothFeeProvider.smooth(Seq(rates(1), rates(2), rates(3))))
    assert(rate5 == rates(4)) // since the last 3 values are the same
  }
} 
Example 88
Source File: DbFeeProviderSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.fee

import akka.util.Timeout
import fr.acinq.eclair.TestConstants
import fr.acinq.eclair.db.sqlite.SqliteFeeratesDb
import org.scalatest.funsuite.AnyFunSuite

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}


class DbFeeProviderSpec extends AnyFunSuite {

  val feerates1: FeeratesPerKB = FeeratesPerKB(100, 200, 300, 400, 500, 600, 700)

  test("db fee provider saves feerates in database") {
    val sqlite = TestConstants.sqliteInMemory()
    val db = new SqliteFeeratesDb(sqlite)
    val provider = new DbFeeProvider(db, new ConstantFeeProvider(feerates1))

    assert(db.getFeerates().isEmpty)
    assert(Await.result(provider.getFeerates, Timeout(30 seconds).duration) == feerates1)
    assert(db.getFeerates().get == feerates1)
  }
} 
Example 89
Source File: SecurityController.scala    From cluster-broccoli   with Apache License 2.0 5 votes vote down vote up
package de.frosner.broccoli.controllers

import javax.inject.Inject

import cats.data.{EitherT, OptionT}
import cats.instances.future._
import cats.syntax.either._
import com.mohiva.play.silhouette.api.util.Credentials
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import de.frosner.broccoli.services.{SecurityService, WebSocketService}
import jp.t2v.lab.play2.auth.{BroccoliSimpleAuthorization, LoginLogout}
import play.api.{Environment, Logger}
import play.api.cache.CacheApi
import play.api.data.Forms._
import play.api.data._
import play.api.libs.json.Json
import play.api.mvc.{Action, AnyContent, Controller, Results}

import scala.concurrent.Future

case class SecurityController @Inject()(
    override val securityService: SecurityService,
    override val cacheApi: CacheApi,
    override val playEnv: Environment,
    webSocketService: WebSocketService
) extends Controller
    with LoginLogout
    with BroccoliSimpleAuthorization {

  private val log = Logger(getClass)

  import scala.concurrent.ExecutionContext.Implicits.global

  // https://www.playframework.com/documentation/2.5.x/ScalaForms
  val loginForm = Form {
    mapping(
      SecurityController.UsernameFormKey -> text,
      SecurityController.PasswordFormKey -> text
    )(Credentials.apply)(Credentials.unapply)
  }

  def login: Action[AnyContent] = Action.async { implicit request =>
    getSessionId(request).map(id => (id, webSocketService.closeConnections(id))) match {
      case Some((id, true)) => log.info(s"Removing websocket connection of $id due to another login")
      case _                =>
    }
    (for {
      credentials <- EitherT.fromEither[Future](
        loginForm.bindFromRequest().fold(Function.const(Results.BadRequest.asLeft), _.asRight))
      login <- OptionT(securityService.authenticate(credentials)).toRight(Results.Unauthorized)
      result <- EitherT.right(gotoLoginSucceeded(login.providerKey))
      user <- OptionT(resolveUser(login.providerKey)).toRight(Results.Unauthorized)
    } yield {
      val userResult = Results.Ok(Json.toJson(user))
      result.copy(
        header = result.header.copy(
          headers = userResult.header.headers
            .get("Content-Type")
            .map { contentType =>
              result.header.headers.updated("Content-Type", contentType)
            }
            .getOrElse(result.header.headers)
        ),
        body = userResult.body
      )
    }).merge
  }

  def logout = Action.async(parse.empty) { implicit request =>
    gotoLogoutSucceeded.andThen {
      case tryResult =>
        getSessionId(request).map(id => (id, webSocketService.closeConnections(id))) match {
          case Some((id, true))  => log.info(s"Removing websocket connection of $id due to logout")
          case Some((id, false)) => log.info(s"There was no websocket connection for session $id")
          case None              => log.info(s"No session available to logout from")
        }
    }
  }

  def verify = StackAction(parse.empty) { implicit request =>
    Ok(loggedIn.name)
  }

}

object SecurityController {

  val UsernameFormKey = "username"
  val PasswordFormKey = "password"

} 
Example 90
Source File: BroccoliWebsocketSecurity.scala    From cluster-broccoli   with Apache License 2.0 5 votes vote down vote up
package jp.t2v.lab.play2.auth

import de.frosner.broccoli.auth.Account.anonymous
import de.frosner.broccoli.auth.AuthMode
import de.frosner.broccoli.conf
import de.frosner.broccoli.controllers.AuthConfigImpl
import de.frosner.broccoli.services.SecurityService
import jp.t2v.lab.play2.stackc.{RequestAttributeKey, RequestWithAttributes}
import play.api.Logger
import play.api.libs.iteratee.{Enumerator, Iteratee}
import play.api.mvc._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

trait BroccoliWebsocketSecurity extends AsyncAuth with AuthConfigImpl {

  self: Controller with AuthConfig =>

  protected def log: Logger

  val securityService: SecurityService

  private[auth] case object AuthKey extends RequestAttributeKey[User]

  def withSecurity[A](req: RequestHeader)(
      f: (Option[AuthenticityToken], User, RequestHeader) => (Iteratee[A, _], Enumerator[A]))
    : Future[Either[Result, (Iteratee[A, _], Enumerator[A])]] =
    securityService.authMode match {
      case AuthMode.Conf =>
        val maybeToken = extractToken(req)
        val tokenString = maybeToken.getOrElse("<session ID missing>")
        val maybeUser = restoreUser(req, scala.concurrent.ExecutionContext.Implicits.global)
        maybeUser
          .recover {
            case exception =>
              log.info(s"Authenticating the following session failed (session probably outdated): $tokenString") // TODO log level
              (None, identity[Result] _) // don't follow IntelliJ's recommendation here!
          }
          .flatMap {
            // TODO do we need the updater here? can we even use cookies or should we authenticate for each new WS connection?
            case (Some(user), updater) =>
              log.info(s"Successfully authenticated session $tokenString of $user") // TODO log level
              Future.successful(Right(f(maybeToken, user, req)))
            case (None, _) =>
              log.info(s"Websocket to ${req.remoteAddress} not established because of missing authentication") // TODO log level
              authenticationFailed(req).map(result => Left(result))
          }
      case AuthMode.None =>
        Future.successful(Right(f(None, anonymous, req)))
    }

  implicit def loggedIn(implicit req: RequestWithAttributes[_]): User = securityService.authMode match {
    case AuthMode.Conf => req.get(AuthKey).get
    case AuthMode.None => anonymous
  }

} 
Example 91
Source File: NomadServiceSpec.scala    From cluster-broccoli   with Apache License 2.0 5 votes vote down vote up
package de.frosner.broccoli.services

import java.util.concurrent.TimeUnit

import de.frosner.broccoli.controllers.ServiceMocks
import de.frosner.broccoli.nomad.NomadConfiguration
import de.frosner.broccoli.nomad.models.Job.jobFormat
import de.frosner.broccoli.nomad.models._
import org.mockito.Mockito._
import org.specs2.mutable.Specification
import play.api.libs.json._
import play.api.mvc._
import play.api.routing.sird._
import play.api.test._
import play.core.server.Server
import squants.information.InformationConversions._
import squants.time.FrequencyConversions._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.Duration

class NomadServiceSpec extends Specification with ServiceMocks {

  sequential

  "Requesting services for specific job" should {

    "ask consul for the services that nomad returns" in {
      val service = Service("my-service")
      val resources = Resources(
        shapeless.tag[Resources.CPU](20.megahertz),
        shapeless.tag[Resources.Memory](1.gigabytes)
      )
      val task = Task(shapeless.tag[Task.Name]("foo"), resources, Some(Seq(service)))
      val taskGroup = TaskGroup(Seq(task))
      val job = Job(Seq(taskGroup))
      val jobId = "my-job"
      Server.withRouter() {
        case GET(p"/v1/job/my-job") =>
          Action {
            Results.Ok(Json.toJson(job))
          }
      } { implicit port =>
        WsTestClient.withClient { client =>
          val configuration = NomadConfiguration(url = s"http://localhost:$port")
          val nomadService = new NomadService(configuration, client)
          val result = Await.result(nomadService.requestServices(jobId), Duration(5, TimeUnit.SECONDS))
          result === Seq(service.name)
        }
      }
    }

    "not explode when receiving tasks without services" in {
      val service1 = Service("my-service")
      val resources = Resources(
        shapeless.tag[Resources.CPU](100.megahertz),
        shapeless.tag[Resources.Memory](100.megabytes)
      )
      val task1 = Task(shapeless.tag[Task.Name]("foo1"), resources, Some(Seq(service1)))
      val task2 = Task(shapeless.tag[Task.Name]("foo2"), resources, None)
      val taskGroup1 = TaskGroup(Seq(task1))
      val taskGroup2 = TaskGroup(Seq(task2))
      val job = Job(Seq(taskGroup1, taskGroup2))
      val jobId = "my-job"
      Server.withRouter() {
        case GET(p"/v1/job/my-job") =>
          Action {
            Results.Ok(Json.toJson(job))
          }
      } { implicit port =>
        WsTestClient.withClient { client =>
          val configuration = NomadConfiguration(url = s"http://localhost:$port")
          val nomadService = new NomadService(configuration, client)
          val result = Await.result(nomadService.requestServices(jobId), Duration(5, TimeUnit.SECONDS))
          result === Seq(service1.name)
        }
      }
    }

  }

} 
Example 92
Source File: WsTestUtil.scala    From cluster-broccoli   with Apache License 2.0 5 votes vote down vote up
package de.frosner.broccoli.controllers

import java.util.concurrent.TimeUnit

import de.frosner.broccoli.services.WebSocketService.Msg
import play.api.libs.iteratee.{Enumerator, Input, Iteratee}
import play.api.mvc.Result
import play.api.test.WithApplication

import scala.concurrent.{Await, Future}
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.ExecutionContext.Implicits.global

object WsTestUtil {

  private val waitTime = FiniteDuration(1, TimeUnit.SECONDS)

  case class Incoming(iteratee: Iteratee[Msg, _]) {

    def feed(message: Msg): Incoming =
      Incoming(Await.result(iteratee.feed(Input.El(message)), waitTime))

    def end: Incoming =
      Incoming(Await.result(iteratee.feed(Input.EOF), waitTime))

  }

  case class Outgoing(enum: Enumerator[Msg]) {
    private val messages = enum(Iteratee.fold(List[Msg]()) { (l, jsValue) =>
      jsValue :: l
    }).flatMap(_.run)

    def get: List[Msg] =
      Await.result(messages, waitTime)
  }

  def wrapConnection(connection: => Future[Either[Result, (Iteratee[Msg, _], Enumerator[Msg])]])
    : Either[Result, (Incoming, Outgoing)] = {
    val future = connection.map {
      _.right.map {
        case (iteratee, enumerator) => (Incoming(iteratee), Outgoing(enumerator))
      }
    }
    Await.result(future, waitTime)
  }

} 
Example 93
Source File: MasterSlaveClientExample.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis.examples

import akka.actor.ActorSystem
import com.avsystem.commons.redis._

// Global execution context is used for the sake of simplicity of this example,
// think well if this is what you actually want.
import scala.concurrent.ExecutionContext.Implicits.global


object MasterSlaveClientExample extends App {
  // The driver is implemented using Akka IO, so we need actor system
  implicit val actorSystem: ActorSystem = ActorSystem()
  // The client is the object that actually talks to Redis, but does not expose Redis API
  // Assuming Redis Sentinel is working locally on its default port, 26379
  val client = new RedisMasterSlaveClient("mymaster")
  // API object exposes API to access individual Redis commands. The API variant we're using here is:
  // - node level: commands specific to single Redis connection are excluded (e.g. CLIENT SETNAME)
  // - asynchronous: API object uses the client to actually execute commands and returns results as Futures
  // - stringly-typed: Redis keys, hash keys and values used in method signatures are typed as Strings
  val api = RedisApi.Node.Async.StringTyped(client)

  // execute GET method and register callback
  api.get("key").onComplete {
    case Success(Opt(value)) => println(s"Got value $value")
    case Success(Opt.Empty) => println(s"Got no value")
    case Failure(t) => t.printStackTrace()
  }
} 
Example 94
Source File: ConnectionClientExample.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis.examples

import akka.actor.ActorSystem
import com.avsystem.commons.redis._

// Global execution context is used for the sake of simplicity of this example,
// think well if this is what you actually want.
import scala.concurrent.ExecutionContext.Implicits.global


object ConnectionClientExample extends App {
  implicit val actorSystem: ActorSystem = ActorSystem()

  // Connection client only uses a single, non-reconnectable connection
  val client = new RedisConnectionClient
  // but exposes API to manipulate that connection
  val api = RedisApi.Connection.Async.StringTyped(client)

  // for example, we can execute CLIENT GETNAME, which we cannot execute using RedisNodeClient
  api.clientGetname.onComplete {
    case Success(Opt(name)) => println(s"Connection name is $name")
    case Success(Opt.Empty) => println("No connection name")
    case Failure(t) => t.printStackTrace()
  }
} 
Example 95
Source File: NodeClientExample.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis.examples

import akka.actor.ActorSystem
import com.avsystem.commons.redis._

// Global execution context is used for the sake of simplicity of this example,
// think well if this is what you actually want.
import scala.concurrent.ExecutionContext.Implicits.global


object NodeClientExample extends App {
  // The driver is implemented using Akka IO, so we need actor system
  implicit val actorSystem: ActorSystem = ActorSystem()
  // The client is the object that actually talks to Redis, but does not expose Redis API
  val client = new RedisNodeClient
  // API object exposes API to access individual Redis commands. The API variant we're using here is:
  // - node level: commands specific to single Redis connection are excluded (e.g. CLIENT SETNAME)
  // - asynchronous: API object uses the client to actually execute commands and returns results as Futures
  // - stringly-typed: Redis keys, hash keys and values used in method signatures are typed as Strings
  val api = RedisApi.Node.Async.StringTyped(client)

  // execute GET method and register callback
  api.get("key").onComplete {
    case Success(Opt(value)) => println(s"Got value $value")
    case Success(Opt.Empty) => println(s"Got no value")
    case Failure(t) => t.printStackTrace()
  }
} 
Example 96
Source File: ClusterClientExample.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis.examples

import akka.actor.ActorSystem
import com.avsystem.commons.redis._

// Global execution context is used for the sake of simplicity of this example,
// think well if this is what you actually want.
import scala.concurrent.ExecutionContext.Implicits.global


object ClusterClientExample extends App {
  implicit val actorSystem: ActorSystem = ActorSystem()

  // The cluster client asks seed nodes about cluster state (by default local Redis instance is the only seed node)
  // and then uses separate RedisNodeClients to connect individually to every master mentioned in cluster state
  // that holds some data.
  val client = new RedisClusterClient
  // We can only execute keyed commands on cluster deployment
  val api = RedisApi.Keyed.Async.StringTyped(client)

  // Appropriate master node is automatically chosen for execution based on hash of the key and current cluster slot mapping
  api.get("key").onComplete {
    case Success(Opt(value)) => println(s"Got value $value")
    case Success(Opt.Empty) => println(s"Got no value")
    case Failure(t) => t.printStackTrace()
  }
} 
Example 97
Source File: ForkJoinCreation.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.effpi

import scala.concurrent.duration.Duration
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

import effpi.actor.ActorRef
import effpi.actor.dsl._
import effpi.process._
import effpi.process.dsl.{Yielding, pyield, Loop, Rec, rec => prec, loop => ploop}
import effpi.system._

object ForkJoinCreation {

  implicit val timeout: Duration = Duration.Inf

  case class Message(msg: String)

  type SimpleActor = Read[Message, PNil]

  val simpleActor = Behavior[Message, SimpleActor] {
    read { _ =>
      nil
    }
  }

  type Sender = Rec[RecAt, (SendTo[ActorRef[Message], Message] >>: Loop[RecAt] | PNil)]

  def sender(receivers: Array[ActorRef[Message]])(endTimePromise: Promise[Long]) = Behavior[Nothing, Sender] {
    var i = 0
    prec(RecA) {
      if (i < receivers.length) {
        send(receivers(i), Message("Hello, World!")) >> {
          i += 1
          ploop(RecA)
        }
      } else {
        endTimePromise.success(System.nanoTime())
        nil
      }
    }
  }

  def bench(numActors: Int, psC: () => ProcessSystem) = {

    implicit val ps = psC()
    val endTimePromise = Promise[Long]()
    val endTimeFuture = endTimePromise.future

    val startTime = System.nanoTime()

    val simpleActorsRef = (1 to numActors).toArray.map{ _ => Actor.spawn(simpleActor)}
    Actor.spawn(sender(simpleActorsRef)(endTimePromise))(ps)

    val endTime = Await.result(endTimeFuture, Duration.Inf)
    val creationDuration = endTime - startTime
    ps.kill()
    creationDuration
  }

} 
Example 98
Source File: CountingActor.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.effpi

import scala.concurrent.duration.Duration
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

import effpi.actor.ActorRef
import effpi.actor.dsl._
import effpi.process._
import effpi.process.dsl.{Yielding, pyield, Loop, Rec, rec => prec, loop => ploop}
import effpi.system._

object CountingActor {

  implicit val timeout: Duration = Duration.Inf

  sealed trait CounterAction
  object CounterAction {
    final case class Add(num: Int) extends CounterAction
    final case class Cheque(replyTo: ActorRef[Sum]) extends CounterAction
  }

  case class Sum(sum: Int)

  type Counter = Rec[RecAt, Read[CounterAction, (Loop[RecAt] | SendTo[ActorRef[Sum], Sum])]]

  val counter = Behavior[CounterAction, Counter] {
    var counter = 0
    prec(RecA) {
      read {
        case CounterAction.Add(num) =>
          counter += num
          ploop(RecA)
        case CounterAction.Cheque(replyTo) =>
          send(replyTo, Sum(counter))
      }
    }
  }

  type Contributor = Rec[RecAt, SendTo[ActorRef[CounterAction], CounterAction] >>: (Loop[RecAt] | Read[Sum, PNil])]

  def contributor(elemNum: Int, counter: ActorRef[CounterAction])(startTimePromise: Promise[Long], endTimePromise: Promise[Long]) = Behavior[Sum, Contributor] {
    startTimePromise.success(System.nanoTime())
    var num = 0
    prec(RecA) {
      if (num < elemNum) {
        num += 1
        send(counter, CounterAction.Add(num)) >>
          ploop(RecA)
      } else {
        send(counter, CounterAction.Cheque(self)) >>
        read { x: Sum => x match {
            case Sum(sum) =>
              endTimePromise.success(System.nanoTime())
              // println(s"The sum of all numbers is $sum")
              nil
          }}
      }
    }
  }

  def bench(numMessages: Int, psC: () => ProcessSystem): Long = {

    implicit val ps = psC()
    val startTimePromise = Promise[Long]()
    val startTimeFuture = startTimePromise.future

    val endTimePromise = Promise[Long]()
    val endTimeFuture = endTimePromise.future

    val counterRef = Actor.spawn(counter)
    Actor.spawn(contributor(numMessages, counterRef)(startTimePromise, endTimePromise))

    val startTime = Await.result(startTimeFuture, Duration.Inf)
    val endTime = Await.result(endTimeFuture, Duration.Inf)
    val countingDuration = endTime - startTime
    ps.kill()
    countingDuration
  }

} 
Example 99
Source File: ForkJoinThroughput.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.effpi

import scala.concurrent.duration.Duration
import scala.concurrent.{ Future, Promise }
import scala.concurrent.ExecutionContext.Implicits.global

import effpi.actor.ActorRef
import effpi.actor.dsl._
import effpi.process._
import effpi.process.dsl.{Yielding, pyield, Loop, Rec, rec => prec, loop => ploop}
import effpi.system._

object ForkJoinThroughput {

  implicit val timeout: Duration = Duration.Inf

  case class Message(msg: String)

  sealed abstract class RecAt2[A] extends RecVar[A]("InfiniteActorLoop")
  case object RecA2 extends RecAt2[Unit]

  type SimpleActor = Rec[RecAt, (Read[Message, Loop[RecAt]] | PNil)]

  def simpleActor(maxMsgs: Int) = Behavior[Message, SimpleActor] {
    var count = 0
    prec(RecA) {
      if (count < maxMsgs) {
        read { _: Message =>
          count += 1
          ploop(RecA)
        }
      } else {
        nil
      }
    }
  }

  def bench(param: (Int, Int), psC: () => ProcessSystem) = {

    val (numActors, numMessages) = param
    implicit val ps = psC()
    val startTimePromise = Promise[Long]()
    val startTimeFuture = startTimePromise.future

    val endTimePromise = Promise[Long]()
    val endTimeFuture = endTimePromise.future

    val simpleActorsRef = (1 to numActors).map{ _ => Actor.spawn(simpleActor(numMessages))}

    val startTime = System.nanoTime()
    (1 to numMessages).foreach { n =>
      // println(n)
      simpleActorsRef.foreach { simpleActor =>
        simpleActor.send(Message("Hello World!"))
      }
    }
    val endTime = System.nanoTime()

    val throughputDuration = endTime - startTime
    ps.kill()
    throughputDuration
  }

} 
Example 100
Source File: ForkJoinCreation.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.akka

import akka.NotUsed
import akka.actor.typed.scaladsl.{ Behaviors, MutableBehavior, ActorContext}
import akka.actor.typed.{ ActorRef, ActorSystem, Behavior, DispatcherSelector, Terminated }

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

object ForkJoinCreation {

  case class Message(msg: String)

  val simpleActor = Behaviors.receive[Message] { (ctx, msg) =>
    Behaviors.stopped
  }

  def mainActor(
    durationPromise: Promise[Long], numActors: Int
  ): Behavior[akka.NotUsed] =
    Behaviors.setup { ctx =>

      val startTime = System.nanoTime()

      val simpleActorRefs = (1 to numActors).toList.map { id =>
        ctx.spawn(simpleActor, "simple" + id)
      }

      simpleActorRefs.foreach { simpleActorRef =>
        simpleActorRef ! Message("Hello World!")
      }

      val endTime = System.nanoTime()

      durationPromise.success(endTime - startTime)
      Behaviors.stopped
    }

  def bench(params: Int): Long = {
    val durationPromise = Promise[Long]()
    val durationFuture = durationPromise.future
    val system = ActorSystem(
      mainActor(durationPromise, params), "ForkJoinCreationDemo")
    Await.result(system.whenTerminated, Duration.Inf)
    val duration = Await.result(durationFuture, Duration.Inf)
    duration
  }

} 
Example 101
Source File: PingPong.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.akka

import akka.NotUsed
import akka.actor.typed.scaladsl.{ Behaviors, MutableBehavior, ActorContext}
import akka.actor.typed.{ ActorRef, ActorSystem, Behavior, DispatcherSelector, Terminated }
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

object PingPong {

  final case class Ping(iterations: Int, replyTo: ActorRef[Pong])

  case class Pong(iterations: Int, pingTo: ActorRef[Ping])

  val pong = Behaviors.receive[Ping] { (ctx, msg) =>
    msg.replyTo ! Pong(msg.iterations - 1, ctx.self)
    Behaviors.same
  }

  def ping(startTimePromise: Promise[Long], endTimePromise: Promise[Long], expectedIterations: Int) = Behaviors.receive[Pong] { (ctx, pong) =>
    if (pong.iterations == 0) {
      endTimePromise.success(System.nanoTime())
      Behaviors.stopped
    } else {
      if (expectedIterations == pong.iterations) {
        startTimePromise.success(System.nanoTime())
      }
      pong.pingTo ! Ping(pong.iterations, ctx.self)
      Behaviors.same
    }
  }

  def mainActor(
    durationPromise: Promise[Long],
    numPairs: Int,
    numIterations: Int
  ): Behavior[akka.NotUsed] =
    Behaviors.setup { ctx =>

      val (startTimePromises, startTimeFutures): (List[Promise[Long]], List[Future[Long]]) = (1 to numPairs).toList.map { _ =>
        val startTimePromise = Promise[Long]()
        val startTimeFuture = startTimePromise.future

        (startTimePromise, startTimeFuture)
      }.unzip

      val (endTimePromises, endTimeFutures): (List[Promise[Long]], List[Future[Long]]) = (1 to numPairs).toList.map { _ =>
        val endTimePromise = Promise[Long]()
        val endTimeFuture = endTimePromise.future

        (endTimePromise, endTimeFuture)
      }.unzip

      // val refs = (1 to numPairs).toList.map { id =>
      val refs = startTimePromises.zip(endTimePromises).zipWithIndex.map { (promises, id) =>
        val (sPromise, ePromise) = promises
        val pongRef = ctx.spawn(pong, "pong" + id)
        val pingRef = ctx.spawn(ping(sPromise, ePromise, numIterations), "ping" + id)
        ctx.watch(pingRef)
        (pingRef, pongRef)
      }
      refs.foreach { (pingRef, pongRef) => pingRef ! Pong(numIterations, pongRef) }

      val startTimes = Await.result(Future.sequence(startTimeFutures), Duration.Inf)
      val startTime = startTimes.min
      val endTimes = Await.result(Future.sequence(endTimeFutures), Duration.Inf)
      val endTime = endTimes.max
      durationPromise.success(endTime - startTime)
      val pingPongDuration = endTime - startTime

      var terminatedProcesses = 0
      Behaviors.receiveSignal {
        case (_, Terminated(ref)) =>
          terminatedProcesses = terminatedProcesses + 1
          if (terminatedProcesses == numPairs) {
            Behaviors.stopped
          } else {
            Behaviors.same
          }
          Behaviors.stopped
        case (_, _) =>
          Behaviors.empty
      }
    }

  def bench(params: (Int, Int)): Long = {
    val (numPairs, numIterations) = params
    val durationPromise = Promise[Long]()
    val durationFuture = durationPromise.future
    val system = ActorSystem(
      mainActor(durationPromise, numPairs, numIterations), "PingPongDemo")
    Await.result(system.whenTerminated, Duration.Inf)
    val duration = Await.result(durationFuture, Duration.Inf)
    duration
  }
} 
Example 102
Source File: CountingActor.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.akka

import akka.NotUsed
import akka.actor.typed.scaladsl.{ Behaviors, MutableBehavior, ActorContext}
import akka.actor.typed.{ ActorRef, ActorSystem, Behavior, DispatcherSelector, Terminated }

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

object CountingActor {

  sealed trait CounterAction
  object CounterAction {
    final case class Add(num: Int, p: Promise[Int]) extends CounterAction
    final case class Cheque(replyTo: ActorRef[Sum]) extends CounterAction
  }

  case class Sum(sum: Int)

  val counter = Behaviors.setup[CounterAction] { ctx =>
    new MutableCounter(ctx)
  }

  class MutableCounter(
    ctx: ActorContext[CounterAction]
  ) extends MutableBehavior[CounterAction] {
    var counter = 0

    override def onMessage(msg: CounterAction): Behavior[CounterAction] = {
      msg match {
        case CounterAction.Add(num, p) =>
          counter += 1
          p.success(num)
          Behaviors.same
        case CounterAction.Cheque(replyTo) =>
          replyTo ! Sum(counter)
          Behaviors.stopped
      }
    }
  }

  def sink(endTimePromise: Promise[Long]) = Behaviors.receive[Sum] { (ctx, msg) =>
    endTimePromise.success(System.nanoTime())
    Behaviors.stopped
  }

  def mainActor(
    durationPromise: Promise[Long],
    numMessages: Int
  ): Behavior[akka.NotUsed] =
    Behaviors.setup { ctx =>

      val endTimePromise = Promise[Long]()
      val endTimeFuture = endTimePromise.future

      val sinkRef = ctx.spawn(sink(endTimePromise), "sink")
      ctx.watch(sinkRef)
      val counterRef = ctx.spawn(counter, "counter")

      val startTime = System.nanoTime()
      val futs = (1 to numMessages).toList.map { num =>
        val p = Promise[Int]()
        val f = p.future
        counterRef ! CounterAction.Add(num, p)
        f
      }

      Await.result(Future.sequence(futs), Duration.Inf)

      counterRef ! CounterAction.Cheque(sinkRef)

      val endTime = Await.result(endTimeFuture, Duration.Inf)
      val countingDuration = endTime - startTime
      durationPromise.success(countingDuration)

      Behaviors.receiveSignal {
        case (_, Terminated(ref)) =>
          Behaviors.stopped
        case (_, _) =>
          Behaviors.empty
      }
    }

  def bench(params: Int): Long = {
    val durationPromise = Promise[Long]()
    val durationFuture = durationPromise.future
    val system = ActorSystem(
      mainActor(durationPromise, params), "CountingActorDemo")
    Await.result(system.whenTerminated, Duration.Inf)
    val duration = Await.result(durationFuture, Duration.Inf)
    duration
  }
} 
Example 103
Source File: ForkJoinThroughput.scala    From effpi   with MIT License 5 votes vote down vote up
// Effpi - verified message-passing programs in Dotty
// Copyright 2019 Alceste Scalas and Elias Benussi
// Released under the MIT License: https://opensource.org/licenses/MIT
package effpi.benchmarks.akka

import akka.NotUsed
import akka.actor.typed.scaladsl.{ Behaviors, MutableBehavior, ActorContext}
import akka.actor.typed.{ ActorRef, ActorSystem, Behavior, DispatcherSelector, Terminated }
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.{ Future, Promise, Await }
import scala.concurrent.ExecutionContext.Implicits.global

object ForkJoinThroughput {

  case class Message(msg: String)

  def receiver(maxMsgs: Int) = Behaviors.setup[Message] { ctx =>
    new MutableSimpleActor(ctx, maxMsgs)
  }

  class MutableSimpleActor(
    ctx: ActorContext[Message],
    maxMsgs: Int
  ) extends MutableBehavior[Message] {
    var count = 0

    override def onMessage(msg: Message): Behavior[Message] = {
      count +=1
      if (count < maxMsgs) {
        Behaviors.same
      } else {
        Behaviors.stopped
      }
    }
  }

  def mainActor(
    durationPromise: Promise[Long],
    numActors: Int,
    numMessages: Int
  ): Behavior[akka.NotUsed] =
    Behaviors.setup { ctx =>

      val receiversRef = (1 to numActors).map{ id => ctx.spawn(receiver(numMessages), "receiver" + id)}

      val startTime = System.nanoTime()

      (1 to numMessages).foreach { n =>
        receiversRef.foreach { simpleActor =>
          simpleActor ! Message("Hello World!")
        }
      }

      val endTime = System.nanoTime()

      durationPromise.success(endTime - startTime)
      Behaviors.stopped
    }

  def bench(params: (Int, Int)): Long = {
    val (numActors, numMessages) = params
    val durationPromise = Promise[Long]()
    val durationFuture = durationPromise.future
    val system = ActorSystem(
      mainActor(durationPromise, numActors, numMessages),
      "ForkJoinCreationDemo")
    Await.result(system.whenTerminated, Duration.Inf)
    val duration = Await.result(durationFuture, Duration.Inf)
    duration
  }
} 
Example 104
Source File: ReconnectingActor.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.util

import akka.actor.{Actor, ActorContext, ActorLogging, Cancellable}
import akka.remote.DisassociatedEvent
import com.olegych.scastie.api.ActorConnected

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._

case class ReconnectInfo(serverHostname: String, serverAkkaPort: Int, actorHostname: String, actorAkkaPort: Int)

trait ActorReconnecting extends Actor with ActorLogging {

  private var tryReconnectCallback: Option[Cancellable] = None

  def reconnectInfo: Option[ReconnectInfo]

  def tryConnect(context: ActorContext): Unit

  def onConnected(): Unit = {}

  def onDisconnected(): Unit = {}

  private def setupReconnectCallback(context: ActorContext): Unit = {
    if (reconnectInfo.isDefined) {
      tryReconnectCallback.foreach(_.cancel())
      tryReconnectCallback = Some(
        context.system.scheduler.schedule(0.seconds, 10.seconds) {
          log.info("Reconnecting to server")
          tryConnect(context)
        }
      )
    }
  }

  override def preStart(): Unit =
    try {
      context.system.eventStream.subscribe(self, classOf[DisassociatedEvent])
      setupReconnectCallback(context)
    } finally super.preStart()

  val reconnectBehavior: Receive = {
    case ActorConnected =>
      log.info("Connected to server")
      tryReconnectCallback.foreach(_.cancel())
      tryReconnectCallback = None
      onConnected()

    case ev: DisassociatedEvent => {
      println("DisassociatedEvent " + ev)

      val isServerHostname =
        reconnectInfo
          .map(info => ev.remoteAddress.host.contains(info.serverHostname))
          .getOrElse(false)

      val isServerAkkaPort =
        reconnectInfo
          .map(info => ev.remoteAddress.port.contains(info.serverAkkaPort))
          .getOrElse(false)

      if (isServerHostname && isServerAkkaPort && ev.inbound) {
        log.warning("Disconnected from server")
        onDisconnected()
        setupReconnectCallback(context)
      }
    }
  }
} 
Example 105
Source File: RestResource.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.webapp.rest

import moe.pizza.auth.config.ConfigFile.ConfigFile
import moe.pizza.auth.graphdb.EveMapDb
import moe.pizza.auth.interfaces.{PilotGrader, UserDatabase, BroadcastService}
import BroadcastService._
import moe.pizza.auth.tasks.Update
import moe.pizza.crestapi.CrestApi
import org.http4s.{HttpService, _}
import org.http4s.dsl.{Root, _}
import org.http4s.server._
import org.http4s.server.staticcontent.ResourceService
import org.http4s.server.syntax.ServiceOps
import org.joda.time.DateTime
import play.twirl.api.Html
import moe.pizza.eveapi._
import scala.concurrent.ExecutionContext.Implicits.global
import org.http4s.twirl._
import scala.concurrent.Future
import scala.util.Try
import scalaz._
import Scalaz._
import scala.util.{Success => TSuccess}
import scala.util.{Failure => TFailure}
import scala.concurrent.duration._
import scala.concurrent.Await
import moe.pizza.crestapi.character.location.Types.Location
import org.slf4j.LoggerFactory
import io.circe.generic.auto._
import io.circe.syntax._
import org.http4s.circe._
import io.circe.Json
import io.circe.generic.JsonCodec

class RestResource(fullconfig: ConfigFile,
                   graders: PilotGrader,
                   portnumber: Int = 9021,
                   ud: UserDatabase,
                   crestapi: Option[CrestApi] = None,
                   eve: Option[EVEAPI] = None,
                   mapper: Option[EveMapDb] = None,
                   updater: Option[Update] = None,
                   broadcasters: List[BroadcastService] =
                     List.empty[BroadcastService]) {

  case class ApiError(`type`: String, message: String)

  case class PingRequest(message: String, from: String, to: String)
  case class PingResponse(total: Int)

  def resource = HttpService {

    case req @ GET -> Root / "api" / "v1" / "ping" / "group" / group => {
      req.decode[Json] { p =>
        p.as[PingRequest] match {
          case Left(failure) =>
            BadRequest(ApiError(
              "bad_post_body",
              "Unable to process your post body, please format it correctly").asJson)
          case Right(pingreq) =>
            val users = ud.getUsers(s"authgroup=${group}")
            val templatedMessage = templates.txt.broadcast(pingreq.message,
                                                           pingreq.to,
                                                           pingreq.from,
                                                           DateTime.now())
            val sendreqs =
              ud.sendGroupAnnouncement(broadcasters,
                                       templatedMessage.toString(),
                                       pingreq.from,
                                       users)
            val r = Await.result(Future.sequence(sendreqs), 2 seconds).sum
            Ok(PingResponse(r).asJson)
          }
      }
    }

  }

} 
Example 106
Source File: XmppBroadcastService.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.adapters

import moe.pizza.auth.bots.XmppBot
import moe.pizza.auth.interfaces.BroadcastService
import moe.pizza.auth.models.Pilot

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

class XmppBroadcastService(host: String, password: String)
    extends BroadcastService {

  def getJabberServer(u: Pilot): String = u.accountStatus match {
    case Pilot.Status.internal => host
    case Pilot.Status.ally => s"allies.$host"
    case Pilot.Status.ineligible => s"public.$host"
    case _ => "none"
  }

  val xmppbot = new XmppBot
  xmppbot.connect("pingbot", host, password)

  override def sendAnnouncement(msg: String,
                                from: String,
                                to: List[Pilot]): Future[Int] = {
    Future {
      to.map { p =>
        xmppbot.sendMessage(s"${p.uid}@${getJabberServer(p)}", msg)
        1
      }.sum
    }
  }
} 
Example 107
Source File: WebappTestSupports.scala    From pizza-auth-3   with MIT License 5 votes vote down vote up
package moe.pizza.auth.webapp

import java.net.{Socket, InetSocketAddress, ServerSocket}

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import moe.pizza.auth.config.ConfigFile.ConfigFile

import scala.concurrent.{Future, Await}
import scala.io.Source
import scala.util.Try
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global


object WebappTestSupports {
  val OM = new ObjectMapper(new YAMLFactory())
  OM.registerModule(DefaultScalaModule)

  def readTestConfig(): ConfigFile = {
    val config = Source
      .fromURL(getClass.getResource("/config.yml"))
      .getLines()
      .mkString("\n")
    val conf = OM.readValue[ConfigFile](config, classOf[ConfigFile])
    conf
  }

} 
Example 108
Source File: AjaxClient.scala    From scalajs-spa-tutorial   with Apache License 2.0 5 votes vote down vote up
package spatutorial.client.services

import java.nio.ByteBuffer

import boopickle.Default._
import org.scalajs.dom

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js.typedarray._

object AjaxClient extends autowire.Client[ByteBuffer, Pickler, Pickler] {
  override def doCall(req: Request): Future[ByteBuffer] = {
    dom.ext.Ajax.post(
      url = "/api/" + req.path.mkString("/"),
      data = Pickle.intoBytes(req.args),
      responseType = "arraybuffer",
      headers = Map("Content-Type" -> "application/octet-stream")
    ).map(r => TypedArrayBuffer.wrap(r.response.asInstanceOf[ArrayBuffer]))
  }

  override def read[Result: Pickler](p: ByteBuffer) = Unpickle[Result].fromBytes(p)
  override def write[Result: Pickler](r: Result) = Pickle.intoBytes(r)
} 
Example 109
Source File: Application.scala    From scalajs-spa-tutorial   with Apache License 2.0 5 votes vote down vote up
package controllers

import java.nio.ByteBuffer

import boopickle.Default._
import com.google.inject.Inject
import play.api.{Configuration, Environment}
import play.api.mvc._
import services.ApiService
import spatutorial.shared.Api

import scala.concurrent.ExecutionContext.Implicits.global

object Router extends autowire.Server[ByteBuffer, Pickler, Pickler] {
  override def read[R: Pickler](p: ByteBuffer) = Unpickle[R].fromBytes(p)
  override def write[R: Pickler](r: R) = Pickle.intoBytes(r)
}

class Application @Inject() (implicit val config: Configuration, env: Environment) extends Controller {
  val apiService = new ApiService()

  def index = Action {
    Ok(views.html.index("SPA tutorial"))
  }

  def autowireApi(path: String) = Action.async(parse.raw) {
    implicit request =>
      println(s"Request path: $path")

      // get the request body as ByteString
      val b = request.body.asBytes(parse.UNLIMITED).get

      // call Autowire route
      Router.route[Api](apiService)(
        autowire.Core.Request(path.split("/"), Unpickle[Map[String, ByteBuffer]].fromBytes(b.asByteBuffer))
      ).map(buffer => {
        val data = Array.ofDim[Byte](buffer.remaining())
        buffer.get(data)
        Ok(data)
      })
  }

  def logging = Action(parse.anyContent) {
    implicit request =>
      request.body.asJson.foreach { msg =>
        println(s"CLIENT - $msg")
      }
      Ok("")
  }
} 
Example 110
Source File: LegalModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import shared.models.{ EmailValidationModel, UserModel }
import client.LGMain.{ Loc }
import org.scalajs.dom._
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.reflectiveCalls
import org.querki.jquery._

object LegalModal { //TodoForm
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (Boolean, Boolean, Boolean, Boolean, Boolean, Boolean) => Callback)
  case class State(legal: Boolean = false, showPrivacyPolicyModal: Boolean = false,
    showTermsOfServicesForm: Boolean = false, showEndUserAgreementModal: Boolean = false, showTrademarksModal: Boolean = false, showCopyrightModal: Boolean = false)

  class Backend(t: BackendScope[Props, State]) {
    def submitForm(e: ReactEventI) = {
      e.preventDefault()
      t.modState(s => s.copy(legal = true))
    }

    def hide = {
      console.log("hide")
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }

    def showPrivacyPolicy(e: ReactEventI) = {
      console.log("in showPrivacyPolicy ")
      t.modState(s => s.copy(showPrivacyPolicyModal = true))
    }
    def showTrademarks(e: ReactEventI) = {
      console.log("in tradeMarks ")
      t.modState(s => s.copy(showTrademarksModal = true))
    }
    def showCopyright(e: ReactEventI) = {
      console.log("in tradeMarks ")
      t.modState(s => s.copy(showCopyrightModal = true))
    }
    def showEndUserAgreement(e: ReactEventI) = {
      t.modState(s => s.copy(showEndUserAgreementModal = true))
    }
    def showTermsOfServices(e: ReactEventI) = {
      t.modState(s => s.copy(showTermsOfServicesForm = true))
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      //println("form closed")
//      println("state.showTrademarksModal : " + state.showTrademarksModal)
      props.submitHandler(state.legal, state.showPrivacyPolicyModal, state.showTermsOfServicesForm, state.showEndUserAgreementModal, state.showTrademarksModal, state.showCopyrightModal)
    }
    def render(s: State, p: Props) = {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = "Legal"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          closed = () => formClosed(s, p)
        ),
        <.form(^.onSubmit ==> submitForm)(
          <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
            <.ul()(
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Privacy Policy", ^.onClick ==> showPrivacyPolicy)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "End User Agreement", ^.onClick ==> showEndUserAgreement)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn-link",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Terms of Service", ^.onClick ==> showTermsOfServices)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Trademarks and Credits", ^.onClick ==> showTrademarks)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Copyright", ^.onClick ==> showCopyright))
            )
          ),
          <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
        ) //submitform
      )

    }
  }
  private val component = ReactComponentB[Props]("LegalModal")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .componentDidUpdate(scope => Callback {
      if (scope.currentState.legal || scope.currentState.showPrivacyPolicyModal || scope.currentState.showTermsOfServicesForm || scope.currentState.showEndUserAgreementModal
        || scope.currentState.showTrademarksModal || scope.currentState.showCopyrightModal) {
        scope.$.backend.hide
      }
    })
    .build
  def apply(props: Props) = component(props)
} 
Example 111
Source File: ConnectionsUtils.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.utils

import client.components.{ConnectionsSelectize, LabelsSelectize}
import client.handler._
import client.modules.AppModule
import client.services.{CoreApi, LGCircuit}
import shared.dtos._
import shared.models._
import org.scalajs.dom._
import client.sessionitems.SessionItems.{MessagesViewItems, ProfilesViewItems, ProjectsViewItems}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js.timers._
import scala.util.{Failure, Success}
import diode.AnyAction._
import client.sessionitems.SessionItems

import scala.scalajs.js.JSON

//scalastyle:off
object ConnectionsUtils {

  



  def getCnxnForReq(cnxn: Seq[Connection], viewName: String): Seq[Connection] = {
    if (cnxn.isEmpty) {
      LGCircuit.zoom(_.connections.connectionsResponse).value.map(_.connection) ++ Seq(getSelfConnnection(viewName))
    } else {
      cnxn ++ Seq(getSelfConnnection(viewName))
    }
  }

  def getNameImgFromJson (jsonBlob: String) :(String, String) = {
    val json = JSON.parse(jsonBlob)
    val name = json.name.asInstanceOf[String]
    val imgSrc = if (jsonBlob.contains("imgSrc")) json.imgSrc.asInstanceOf[String] else ""
    (name, imgSrc)
  }

  def getCnxnFromRes (cnxn: ConnectionProfileResponse): ConnectionsModel = {
    val (name, imgSrc) = getNameImgFromJson(cnxn.jsonBlob)
    ConnectionsModel(cnxn.sessionURI, cnxn.connection,
      name, imgSrc)
  }

  def getConnectionsModel(response: String): Seq[ConnectionsModel] = {

    try {
      val connections = upickle.default.read[Seq[ApiResponse[ConnectionProfileResponse]]](response)
      connections.map(e => getCnxnFromRes(e.content))
        .sortBy(_.name)
    } catch {
      case e:Exception =>
        Nil
    }
  }

  def getCnxnFromNot (cnxn: ConnectNotification): ConnectionsModel = {
    val (name, imgSrc) = ConnectionsUtils.getNameImgFromJson(cnxn.introProfile)
    ConnectionsModel("", cnxn.connection, name, imgSrc)
  }

  // #todo think about better structure for the label prolog
  //

} 
Example 112
Source File: MessagesHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.handler

import client.modules.AppModule
import diode._
import diode.data._
import shared.models.MessagePost
import client.rootmodel.MessagesRootModel
import client.logger
import client.services.{CoreApi, LGCircuit}
import diode.util.{Retry, RetryPolicy}
import client.utils.{AppUtils, ConnectionsUtils, ContentUtils}
import org.widok.moment.Moment
import shared.dtos.{CancelSubscribeRequest, Expression, ExpressionContent, SubscribeRequest}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success, Try}

// scalastyle:off
case class RefreshMessages(potResult: Pot[MessagesRootModel] = Empty, retryPolicy: RetryPolicy = Retry(5))
  extends PotActionRetriable[MessagesRootModel, RefreshMessages] {
  override def next(value: Pot[MessagesRootModel], newRetryPolicy: RetryPolicy): RefreshMessages = RefreshMessages(value, newRetryPolicy)
}

case class ClearMessages()


class MessagesHandler[M](modelRW: ModelRW[M, Pot[MessagesRootModel]]) extends ActionHandler(modelRW) {
  //  var labelFamily = LabelsUtils.getLabelProlog(Nil)

  override def handle: PartialFunction[Any, ActionResult[M]] = {

    case action: RefreshMessages =>
      val updateF = action.effectWithRetry {
        CoreApi.sessionPing(LGCircuit.zoom(_.session.messagesSessionUri).value)
      } { messagesResponse =>
        LGCircuit.dispatch(RefreshMessages())
        val currentVal = if (value.nonEmpty) value.get.messagesModelList else Nil
        val msg = currentVal ++ ContentUtils
          .processRes(messagesResponse)
          .filterNot(_.pageOfPosts.isEmpty)
          .flatMap(content => Try(upickle.default.read[MessagePost](content.pageOfPosts(0))).toOption)
        MessagesRootModel(msg.sortWith((x, y) => Moment(x.created).isAfter(Moment(y.created))))
      }
      action.handleWith(this, updateF)(PotActionRetriable.handler())

    case ClearMessages() =>
      updated(Pot.empty)

  }
} 
Example 113
Source File: ProfilesHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.handler

import diode.data.{Empty, Pot, PotActionRetriable}
import client.rootmodel.ProfilesRootModel
import shared.models.{MessagePost, ProfilesPost}
import client.modules.AppModule
import diode.{ActionHandler, ActionResult, ModelRW}
import org.scalajs.dom.window
import client.services.{CoreApi, LGCircuit}
import diode.util.{Retry, RetryPolicy}
import client.sessionitems.SessionItems
import client.utils.ContentUtils
import org.widok.moment.Moment

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Try


// Actions
case class RefreshProfiles(potResult: Pot[ProfilesRootModel] = Empty, retryPolicy: RetryPolicy = Retry(3))
  extends PotActionRetriable[ProfilesRootModel, RefreshProfiles] {
  override def next(value: Pot[ProfilesRootModel], newRetryPolicy: RetryPolicy) = RefreshProfiles(value, newRetryPolicy)
}

case class ClearProfiles()

class ProfilesHandler[M](modelRW: ModelRW[M, Pot[ProfilesRootModel]]) extends ActionHandler(modelRW) {
  override def handle: PartialFunction[Any, ActionResult[M]] = {
    case action: RefreshProfiles =>
      val updateF = action.effectWithRetry(CoreApi.sessionPing(LGCircuit.zoom(_.session.profileSessionUri).value)) { profilesResponse =>
        LGCircuit.dispatch(RefreshProfiles())
        val currentProfile = if (value.nonEmpty) value.get.profilesList else Nil
        val updatedProfiles = currentProfile ++ ContentUtils
          .processRes(profilesResponse)
          .filterNot(_.pageOfPosts.isEmpty)
          .flatMap(content => Try(upickle.default.read[ProfilesPost](content.pageOfPosts(0))).toOption)
        ProfilesRootModel(updatedProfiles.sortWith((x, y) => Moment(x.created).isAfter(Moment(y.created))))
      }
      action.handleWith(this, updateF)(PotActionRetriable.handler())

    case ClearProfiles() =>
      updated(Pot.empty)
  }
} 
Example 114
Source File: ProjectsHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.handler

import client.modules.AppModule
import diode.{ActionHandler, ActionResult, Effect, ModelRW}
import diode.data._
import shared.models.{ProfilesPost, ProjectsPost}
import org.scalajs.dom.window
import client.rootmodel.ProjectsRootModel
import client.services.{CoreApi, LGCircuit}
import diode.util.{Retry, RetryPolicy}
import client.sessionitems.SessionItems
import client.utils.ContentUtils
import org.widok.moment.Moment

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Try

// Actions
case class RefreshProjects(potResult: Pot[ProjectsRootModel] = Empty, retryPolicy: RetryPolicy = Retry(3))
    extends PotActionRetriable[ProjectsRootModel, RefreshProjects] {
  override def next(value: Pot[ProjectsRootModel], newRetryPolicy: RetryPolicy) = RefreshProjects(value, newRetryPolicy)
}

case class ClearProjects()

class ProjectsHandler[M](modelRW: ModelRW[M, Pot[ProjectsRootModel]]) extends ActionHandler(modelRW) {
  override def handle: PartialFunction[Any, ActionResult[M]] = {
    case action: RefreshProjects =>
//      val labels = window.sessionStorage.getItem(SessionItems.ProjectsViewItems.CURRENT_PROJECTS_LABEL_SEARCH)
      val updateF = action.effectWithRetry(CoreApi.sessionPing(LGCircuit.zoom(_.session.projectSessionUri).value)) { res =>
        LGCircuit.dispatch(RefreshProjects())
        val currentProjects = if (value.nonEmpty) value.get.projectsModelList else Nil
        val proj = currentProjects ++
          ContentUtils
            .processRes(res)
            .filterNot(_.pageOfPosts.isEmpty)
            .flatMap(content => Try(upickle.default.read[ProjectsPost](content.pageOfPosts(0))).toOption)
        ProjectsRootModel(proj.sortWith((x, y) => Moment(x.created).isAfter(Moment(y.created))))
      }
      action.handleWith(this, updateF)(PotActionRetriable.handler())

    case ClearProjects() =>
      updated(Pot.empty)
  }
} 
Example 115
Source File: UserHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.handler

import client.services.{CoreApi, LGCircuit}
import diode.{ActionHandler, ActionResult, ModelRW}
import org.scalajs.dom.window
import shared.dtos.UpdateUserRequest
import shared.models.UserModel
import client.logger
import diode.AnyAction._
import scala.util.{Failure, Success}
import scala.concurrent.ExecutionContext.Implicits.global

// scalastyle:off
case class LoginUser(userModel: UserModel)

case class LogoutUser()

case class PostUserUpdate(updateUserRequest: UpdateUserRequest)

case class ToggleAvailablity()

class UserHandler[M](modelRW: ModelRW[M, UserModel]) extends ActionHandler(modelRW) {
  //  val messageLoader = "#messageLoader"
  override def handle: PartialFunction[Any, ActionResult[M]] = {
    case LoginUser(userModel) =>
      updated(userModel.copy(isAvailable = true, isLoggedIn = true))

    case LogoutUser() =>
      // todo: Cancel all subscribe request for all sessions
      window.sessionStorage.clear()
      window.location.href = "/"
      updated(UserModel(email = "", name = "", imgSrc = "", isLoggedIn = false))

    case PostUserUpdate(req) =>
      var count = 1
      post()
      def post(): Unit = CoreApi.updateUserRequest(req).onComplete {
        case Success(response) =>
          logger.log.debug("user update request sent successfully")
        case Failure(response) =>
          if (count == 3) {
            //            logger.log.error("user update error")
            LGCircuit.dispatch(ShowServerError(response.toString))
          } else {
            count = count + 1
            post()
          }
      }
      if (req.jsonBlob.imgSrc != null) {
        updated(value.copy(imgSrc = req.jsonBlob.imgSrc))
      } else noChange

    case ToggleAvailablity() =>
      updated(value.copy(isAvailable = !value.isAvailable))

  }
} 
Example 116
Source File: Application.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package controllers

import java.nio.ByteBuffer

import boopickle.Default._
import com.google.inject.Inject
import play.api.mvc._
import play.api.{Configuration, Environment}
import play.api.Environment._
import services.ApiService
import shared.Api

import scala.concurrent.ExecutionContext.Implicits.global

object Router extends autowire.Server[ByteBuffer, Pickler, Pickler] {
  override def read[R: Pickler](p: ByteBuffer) = Unpickle[R].fromBytes(p)

  override def write[R: Pickler](r: R) = Pickle.intoBytes(r)
}

class Application @Inject()(implicit val config: Configuration, env: Environment) extends Controller {
  val apiService = new ApiService()

  def index = Action {

    Ok(views.html.index("LivelyGig"))
    //    Ok(views.html.index("Welcome to Synereo - the decentralized and distributed social network"))
  }

  def logging = Action(parse.anyContent) {
    implicit request =>
      request.body.asJson.foreach { msg =>
        println(s"Application - CLIENT - $msg")
      }
      Ok("")
  }

  def autowireApi(path: String) = Action.async(parse.raw) {
    implicit request =>
      println(s"Request path: $path")

      // get the request body as ByteString
      val b = request.body.asBytes(parse.UNLIMITED).get

      // call Autowire route
      Router.route[Api](apiService)(
        autowire.Core.Request(path.split("/"), Unpickle[Map[String, ByteBuffer]].fromBytes(b.asByteBuffer))
      ).map(buffer => {
        val data = Array.ofDim[Byte](buffer.remaining())
        buffer.get(data)
        Ok(data)
      })
  }
} 
Example 117
Source File: ConnectionHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.handlers

import diode.{ActionHandler, ActionResult, ModelRW}
import synereo.client.rootmodels.ConnectionsRootModel
import shared.dtos.{Connection, Content, IntroConfirmReq}
import shared.models.ConnectionsModel
import synereo.client.logger
import synereo.client.services.{CoreApi, SYNEREOCircuit}
import diode.AnyAction._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

// Actions
//scalastyle:off
case class UpdateConnections(newConnectionModel: Seq[ConnectionsModel])

class ConnectionHandler[M](modelRW: ModelRW[M, ConnectionsRootModel]) extends ActionHandler(modelRW) {
  override def handle: PartialFunction[Any, ActionResult[M]] = {

    case UpdateConnections(newConnectionsModel) =>
      val cnxnModelMod = if (value.connectionsResponse.nonEmpty) {
        value.connectionsResponse ++ newConnectionsModel.filterNot(e =>
          value.connectionsResponse.exists(p => e.connection.source == p.connection.target || e.connection.target == p.connection.target))

      } else {
        newConnectionsModel
      }
      updated(ConnectionsRootModel(cnxnModelMod))
  }
} 
Example 118
Source File: SearchesHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.handlers

import diode.{ActionHandler, ActionResult, ModelRW}
import shared.models.Label
import synereo.client.rootmodels.SearchesRootModel
import shared.dtos.{Connection, LabelPost, SubscribeRequest}
import synereo.client.logger
import synereo.client.services.{CoreApi, SYNEREOCircuit}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.JSON
import scala.util.{Failure, Success}
import diode.AnyAction._
import synereo.client.facades.PrologParser

object SearchesModelHandler {
  def getSearchesModel(listOfLabels: Seq[String]): SearchesRootModel = {

    try {
      val labelsArray = PrologParser.StringToLabel(listOfLabels.toJSArray)
      val model = upickle.default.read[Seq[Label]](JSON.stringify(labelsArray))
      SearchesRootModel(searchesModel = model)
    } catch {
      case e: Exception =>
        logger.log.error("error in method getsearchesModel")
        SearchesRootModel(Nil)
    }
  }

  def leaf(text: String ) = "\"leaf(text(\\\"" + s"${text}" + "\\\"),display(color(\\\"\\\"),image(\\\"\\\")))\""

}

case class CreateLabels(labelStrSeq: Seq[String])

case class AddLabel(label: Label)

case class UpdatePrevSearchLabel(labelStr: String)

case class UpdatePrevSearchCnxn(cnxns: Seq[Connection])


// scalastyle:off
class SearchesHandler[M](modelRW: ModelRW[M, SearchesRootModel]) extends ActionHandler(modelRW) {
  override def handle: PartialFunction[Any, ActionResult[M]] = {
    case CreateLabels(labelStrSeq: Seq[String]) =>
      try {
        updated(SearchesModelHandler.getSearchesModel(labelStrSeq))
        //        noChange
      } catch {
        case e: Exception =>
          println(s" exception in Create Label action $e")
          noChange
      }

    case AddLabel(label: Label) =>
      updated(value.copy(searchesModel = value.searchesModel :+ label))

    case UpdatePrevSearchLabel(labelStr) =>
      updated(value.copy(previousSearchLabel = labelStr))

    case UpdatePrevSearchCnxn(cnxns) =>
      updated(value.copy(previousSearchCnxn = cnxns))

  }

} 
Example 119
Source File: MessagesHandler.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.handlers

import diode._
import diode.data._
import shared.models.MessagePost
import synereo.client.rootmodels.MessagesRootModel
import diode.util.{Retry, RetryPolicy}
import shared.dtos._
import synereo.client.logger
import synereo.client.services.{CoreApi, SYNEREOCircuit}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success, Try}
import diode.AnyAction._
import org.widok.moment.Moment
import synereo.client.utils.{AppUtils, ConnectionsUtils, ContentUtils}

// Actions
//scalastyle:off
case class RefreshMessages(potResult: Pot[MessagesRootModel] = Empty, retryPolicy: RetryPolicy = Retry(3))
  extends PotActionRetriable[MessagesRootModel, RefreshMessages] {
  override def next(value: Pot[MessagesRootModel], newRetryPolicy: RetryPolicy): RefreshMessages = RefreshMessages(value, newRetryPolicy)
}

//case class StoreCnxnAndLabels(slctzId: String, sessionUriName: String)

case class ClearMessages()


class MessagesHandler[M](modelRW: ModelRW[M, Pot[MessagesRootModel]]) extends ActionHandler(modelRW) {

  override def handle: PartialFunction[Any, ActionResult[M]] = {
    
    case action: RefreshMessages =>
      val updateF = action.effectWithRetry {
        CoreApi.sessionPing(SYNEREOCircuit.zoom(_.sessionRootModel.sessionUri).value)
      } { messagesResponse =>
        SYNEREOCircuit.dispatch(RefreshMessages())
        val currentVal = if (value.nonEmpty) value.get.messagesModelList else Nil
        val msg = currentVal ++ ContentUtils
          .processRes(messagesResponse)
          .filterNot(_.pageOfPosts.isEmpty)
          .flatMap(content => Try(upickle.default.read[MessagePost](content.pageOfPosts(0))).toOption)
          .map(ConnectionsUtils.getSenderReceivers)
        MessagesRootModel(msg.sortWith((x, y) => Moment(x.created).isAfter(Moment(y.created))))
      }
      action.handleWith(this, updateF)(PotActionRetriable.handler())

    case ClearMessages() =>
      updated(Pot.empty)
  }
} 
Example 120
Source File: playajaxclient.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.services

import boopickle.Pickler
import java.nio.ByteBuffer

import boopickle.Default._
import org.scalajs.dom

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js.typedarray._



object PlayAjaxClient extends autowire.Client[ByteBuffer, Pickler, Pickler] {
  override def doCall(req: Request): Future[ByteBuffer] = {
    dom.ext.Ajax.post(
      url = "/api/" + req.path.mkString("/"),
      data = Pickle.intoBytes(req.args),
      responseType = "arraybuffer",
      headers = Map("Content-Type" -> "application/octet-stream")
    ).map(r => TypedArrayBuffer.wrap(r.response.asInstanceOf[ArrayBuffer]))
  }

  override def read[Result: Pickler](p: ByteBuffer) = Unpickle[Result].fromBytes(p)
  override def write[Result: Pickler](r: Result) = Pickle.intoBytes(r)
} 
Example 121
Source File: CSVOutput.scala    From subsearch   with GNU General Public License v2.0 5 votes vote down vote up
package com.gilazaria.subsearch.output

import com.gilazaria.subsearch.model.Record
import com.gilazaria.subsearch.utils.{File, TimeUtils}

import scala.collection.SortedSet
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

class CSVOutput(private val file: Option[File]) extends Output {
  override def print(string: String): Unit = {}

  

  private var saveToFileFuture: Future[Unit] = Future(Unit)
  override def printRecords(records: SortedSet[Record]) = {
    if (file.isDefined) {
      saveToFileFuture = saveToFileFuture.map {
        _ =>
          val lines = records.map(record => s"${TimeUtils.timestampNow},${record.name},${record.recordType},${record.data}")
          file.get.write(lines.mkString("\n") + "\n")
      }
    }
  }

  override def writingToFileFuture: Future[Unit] = {
    saveToFileFuture
  }
}

object CSVOutput {
  def create(fileOption: Option[File]): Option[CSVOutput] =
    if (fileOption.isDefined) Some(new CSVOutput(fileOption))
    else None
} 
Example 122
Source File: Output.scala    From subsearch   with GNU General Public License v2.0 5 votes vote down vote up
package com.gilazaria.subsearch.output

import com.gilazaria.subsearch.model.Record
import com.gilazaria.subsearch.utils.TimeUtils

import scala.collection.SortedSet
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

trait Output {
  def print(string: String)
  def println(): Unit = println("")
  def println(string: String): Unit = print(s"$string\n")

  def printSuccess(string: String) = printSuccessWithoutTime(prependTime(string))
  def printStatus(string: String) = printStatusWithoutTime(prependTime(string))
  def printInfo(string: String) = printInfoWithoutTime(prependTime(string))
  def printError(string: String) = printErrorWithoutTime(prependTime(string))

  def printSuccessWithoutTime(string: String) = println(string)
  def printStatusWithoutTime(string: String) = println(string)
  def printInfoWithoutTime(string: String) = println(string)
  def printErrorWithoutTime(string: String) = println(string)

  def printSuccessDuringScan(string: String) = printSuccess(string)
  def printStatusDuringScan(string: String) = printStatus(string)
  def printInfoDuringScan(string: String) = printInfo(string)
  def printErrorDuringScan(string: String) = printError(string)

  // Utility

  final def prependTime(string: String): String =
    s"${TimeUtils.currentTimePretty} $string"

  def writingToFileFuture: Future[Unit] = Future(Unit)

  // Application Specific

  def printHeader(header: String) = {
    println(header)
    println()
  }

  def printConfig(config: List[(String, String)], separator: String) = {
    val string: String =
      config
        .map((tuple: (String, String)) => tuple._1 + tuple._2)
        .mkString(separator)

    println(string)
    println()
  }

  def printTarget(text: String, hostname: String) = {
    println(s"$text$hostname")
    println()
  }

  def printTaskCompleted(text: String) = {
    println()
    printStatusWithoutTime(text)
    println()
  }

  def printTaskFailed(text: String) = {
    println()
    printErrorWithoutTime(text)
    println()
  }

  def printPausingThreads(text: String) = {}

  def printPauseOptions(text: String) = {}

  def printInvalidPauseOptions(text: String) = {}

  def printLastRequest(text: String) = {}

  def printLastRequest() = {}

  def printRecords(records: SortedSet[Record])

  def printRecordsDuringScan(records: SortedSet[Record]) = printRecords(records)
} 
Example 123
Source File: StandardOutput.scala    From subsearch   with GNU General Public License v2.0 5 votes vote down vote up
package com.gilazaria.subsearch.output

import com.gilazaria.subsearch.model.{Record, RecordType}
import com.gilazaria.subsearch.utils.File

import scala.collection.SortedSet
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global

class StandardOutput(private val file: Option[File], private val verbose: Boolean) extends Output {
  private var saveToFileFuture: Future[Unit] = Future(Unit)

  override def print(string: String): Unit =  {
    if (file.isDefined) {
      saveToFileFuture = saveToFileFuture.map {
        _ => file.get.write(string)
      }
    }
  }

  override def writingToFileFuture: Future[Unit] = {
    saveToFileFuture
  }

  override def printRecords(records: SortedSet[Record]) = {
    if (verbose) printRecordsVerbose(records)
    else printRecordsNormal(records)
  }

  protected def printRecordsVerbose(records: SortedSet[Record]) = {
    val lines: List[String] =
      records
        .map(_.name)
        .toList
        .flatMap {
          subdomain =>
            val subdomainRecords: SortedSet[Record] = records.filter(_.name == subdomain)
            val recordTypes: SortedSet[RecordType] = subdomainRecords.map(_.recordType)

            recordTypes.flatMap {
              recordType =>
                subdomainRecords.filter(_.recordType == recordType).map {
                  case Record(_, _, data) =>
                    val msg = formatRecordTypeAndSubdomainForPrinting(recordType, subdomain)

                    if (recordType.isOneOf("A", "AAAA", "CNAME", "NS", "SRV"))
                      s"$msg  ->  $data"
                    else if (recordType.stringValue == "MX")
                      s"$msg  @@  $data"
                    else
                      s"$msg  --  $data"
                }
            }
        }

    if (lines.nonEmpty)
      println(lines.mkString("\n"))
  }

  protected def formatRecordTypeAndSubdomainForPrinting(recordType: RecordType, subdomain: String): String =
    prependTime(f"${recordType.toString}%-7s:  $subdomain")

  protected def printRecordsNormal(records: SortedSet[Record]) = {
    val lines: List[String] =
      records
        .map(_.name)
        .toList
        .map(subdomain => (subdomain, records.filter(_.name == subdomain).map(_.recordType)))
        .map((data: (String, SortedSet[RecordType])) => s"${data._2.mkString(", ")}:  ${data._1}")

    if (lines.nonEmpty)
      printSuccess(lines.mkString("\n"))
  }
}

object StandardOutput {
  def create(fileOption: Option[File], verbose: Boolean): Option[StandardOutput] =
    if (fileOption.isDefined) Some(new StandardOutput(fileOption, verbose))
    else None
} 
Example 124
Source File: PauseHandler.scala    From subsearch   with GNU General Public License v2.0 5 votes vote down vote up
package com.gilazaria.subsearch.core.subdomainscanner

import com.gilazaria.subsearch.core.subdomainscanner.DispatcherMessage.{PauseScanning, ResumeScanning}
import com.gilazaria.subsearch.output.Logger
import com.gilazaria.subsearch.utils.TimeUtils
import akka.actor.ActorRef
import akka.pattern.ask
import scala.concurrent.ExecutionContext.Implicits.global
import sun.misc.{Signal, SignalHandler}

import scala.concurrent.Await



object PauseHandler {
  def create(dispatcher: ActorRef, logger: Logger): PauseHandler =
    new PauseHandler(List("INT"), dispatcher, logger)

  case class InterruptException(msg: String) extends Exception(msg)
  case class ContinueException(msg: String) extends Exception(msg)
}

class PauseHandler(signalNames: List[String], dispatcher: ActorRef, logger: Logger) extends SignalHandler {
  import PauseHandler.{InterruptException, ContinueException}

  private val signalMap = signalNames.map(name => (name, Signal.handle(new Signal(name), this))).toMap

  private var pausingCalled: Boolean = false

  override def handle(signal: Signal) = {
    if (pausingCalled)
      forceExit()
    else
      pausingCalled = true

    implicit val timeout = TimeUtils.akkaAskTimeout
    Await.result(dispatcher ? PauseScanning, TimeUtils.awaitDuration)

    try {
      while (true) {
        logger.logPauseOptions()

        val option: String = System.console.readLine().toLowerCase

        if (option == "e")
          throw new InterruptException("Exited the program.")
        else if (option == "c")
          throw new ContinueException("Continuing the scan.")
        else
          logger.logInvalidPauseOption()
      }
    } catch {
      case InterruptException(msg) =>
        exit()
      case ContinueException(msg) =>
        resume()
    }
  }

  private def forceExit() = {
    logger.logScanForceCancelled()
    System.exit(0)
  }

  private def exit() = {
    logger.logScanCancelled()
    logger.completedLoggingFuture.andThen { case _ => System.exit(0) }
  }

  private def resume() = {
    dispatcher ! ResumeScanning
    pausingCalled = false
  }
} 
Example 125
Source File: TestVersionFile.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.script

import oharastream.ohara.common.util.{Releasable, VersionUtils}
import oharastream.ohara.it.{ContainerPlatform, IntegrationTest, ServiceKeyHolder}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global


  @Test
  def testBroker(): Unit = testVersion(s"oharastream/broker:${VersionUtils.VERSION}", Set("ohara"))

  private[this] def testVersion(imageName: String, expectedStrings: Set[String]): Unit = platform.nodeNames.foreach {
    hostname =>
      val key           = serviceKeyHolder.generateClusterKey()
      val containerName = s"${key.group()}-${key.name()}"
      val versionString: String = result(
        containerClient.containerCreator
          .imageName(imageName)
          .command("-v")
          .name(containerName)
          .nodeName(hostname)
          .create()
          .flatMap(_ => containerClient.log(containerName).map(_.head._2))
      )
      expectedStrings.foreach(s => versionString should include(s))
  }

  @After
  def releaseConfigurator(): Unit = {
    Releasable.close(serviceKeyHolder)
    Releasable.close(resourceRef)
  }
} 
Example 126
Source File: ServiceKeyHolder.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it

import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.container.ContainerClient
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import com.typesafe.scalalogging.Logger

import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}


      if (!finalClose || !KEEP_CONTAINERS)
        result(client.containers())
          .filter(
            container =>
              clusterKey.exists(key => container.name.contains(key.group()) && container.name.contains(key.name()))
          )
          .filterNot(container => excludedNodes.contains(container.nodeName))
          .foreach { container =>
            try {
              println(s"[-----------------------------------${container.name}-----------------------------------]")
              // Before 10 minutes container log. Avoid the OutOfMemory of Java heap
              val containerLogs = try result(client.log(container.name, Option(600)))
              catch {
                case e: Throwable =>
                  s"failed to fetch the logs for container:${container.name}. caused by:${e.getMessage}"
              }
              println(containerLogs)
              println("[------------------------------------------------------------------------------------]")
              result(client.forceRemove(container.name))
            } catch {
              case e: Throwable =>
                LOG.error(s"failed to remove container ${container.name}", e)
            }
          } finally Releasable.close(client)
} 
Example 127
Source File: TestQueryConfiguratorLog.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.LogApi
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import oharastream.ohara.it.category.ClientGroup
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestQueryConfiguratorLog(platform: ContainerPlatform)
    extends WithRemoteConfigurator(platform: ContainerPlatform) {
  @Test
  def test(): Unit = {
    val log = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator())
    log.logs.size shouldBe 1
    log.logs.head.hostname.length should not be 0
    log.logs.head.value.length should not be 0

    val logOf1Second = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator(1)).logs.head.value
    TimeUnit.SECONDS.sleep(6)
    val logOf6Second = result(LogApi.access.hostname(configuratorHostname).port(configuratorPort).log4Configurator(6)).logs.head.value
    withClue(s"logOf1Second:$logOf1Second\nlogOf6Second:$logOf6Second") {
      // it counts on timer so the "=" is legal :)
      logOf1Second.length should be <= logOf6Second.length
    }
  }
} 
Example 128
Source File: TestListNode.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.it.category.ClientGroup
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestListNode(platform: ContainerPlatform) extends WithRemoteConfigurator(platform: ContainerPlatform) {
  @Test
  def test(): Unit = {
    val services =
      result(NodeApi.access.hostname(configuratorHostname).port(configuratorPort).list()).flatMap(_.services)
    services should not be Seq.empty
    services.find(_.name == NodeApi.CONFIGURATOR_SERVICE_NAME) should not be None
  }
} 
Example 129
Source File: TestNodeResource.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.it.client

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.it.category.ClientGroup
import oharastream.ohara.it.{ContainerPlatform, WithRemoteConfigurator}
import org.junit.Test
import org.junit.experimental.categories.Category
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global

@Category(Array(classOf[ClientGroup]))
class TestNodeResource(platform: ContainerPlatform) extends WithRemoteConfigurator(platform: ContainerPlatform) {
  private[this] def nodeApi: NodeApi.Access = NodeApi.access.hostname(configuratorHostname).port(configuratorPort)

  @Test
  def testResources(): Unit = {
    val nodes = result(nodeApi.list())
    nodes should not be Seq.empty
    nodes.foreach { node =>
      nodes.exists(_.hostname == node.hostname) shouldBe true
      node.resources should not be Seq.empty
      node.resources.size should be >= 1
      node.resources.foreach { resource =>
        resource.value.toInt should be >= 1
        resource.name.isEmpty shouldBe false
        resource.unit.isEmpty shouldBe false
      }
    }
  }

  @Test
  def testStatus(): Unit = {
    val nodes = result(nodeApi.list())
    nodes should not be Seq.empty
    nodes.foreach { node =>
      nodes.exists(_.hostname == node.hostname) shouldBe true
      node.state shouldBe NodeApi.State.AVAILABLE
      node.error shouldBe None
    }
  }
} 
Example 130
Source File: TestVerifyNode.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.docker

import java.util
import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.{DataCollie, ServiceCollie}
import oharastream.ohara.client.configurator.NodeApi.{Node, State}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.SshdServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration


class TestVerifyNode extends OharaTest {
  private[this] var errorMessage: String = _
  private[this] val sshServer = SshdServer.local(
    0,
    java.util.Map.of(
      "docker info --format '{{json .}}'",
      (_: String) =>
        if (errorMessage != null)
          throw new IllegalArgumentException(errorMessage)
        else util.List.of("""
                        |  {
                        |    "NCPU": 1,
                        |    "MemTotal": 1024
                        |  }
                        |""".stripMargin)
    )
  )

  private[this] val node = Node(
    hostname = sshServer.hostname(),
    port = sshServer.port(),
    user = sshServer.user(),
    password = sshServer.password(),
    services = Seq.empty,
    state = State.AVAILABLE,
    error = None,
    lastModified = CommonUtils.current(),
    resources = Seq.empty,
    tags = Map.empty
  )

  private[this] val collie = ServiceCollie.dockerModeBuilder.dataCollie(DataCollie(Seq(node))).build

  @Test
  def happyCase(): Unit = Await.result(collie.verifyNode(node), Duration(30, TimeUnit.SECONDS))

  @Test
  def badCase(): Unit = {
    errorMessage = CommonUtils.randomString()
    intercept[Exception] {
      Await.result(collie.verifyNode(node), Duration(30, TimeUnit.SECONDS))
    }.getMessage should include("unavailable")
  }

  @After
  def tearDown(): Unit = {
    Releasable.close(collie)
    Releasable.close(sshServer)
  }
} 
Example 131
Source File: TestK8SClientVerify.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.k8s

import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.fake.FakeK8SClient
import oharastream.ohara.agent.{DataCollie, ServiceCollie}
import oharastream.ohara.client.configurator.NodeApi.Node
import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration

class TestK8SClientVerify extends OharaTest {
  private[this] val dataCollie: DataCollie = DataCollie(Seq.empty)

  private[this] def node: Node = Node("ohara", "user", "password")

  @Test
  def testMockK8sClientVerifyNode1(): Unit = {
    val fakeK8SClient = new FakeK8SClient(true, Option(K8SStatusInfo(true, "")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    Await.result(
      serviceCollie.verifyNode(node),
      Duration(30, TimeUnit.SECONDS)
    ) shouldBe "ohara node is running."
  }

  @Test
  def testMockK8sClientVerifyNode2(): Unit = {
    val fakeK8SClient = new FakeK8SClient(true, Option(K8SStatusInfo(false, "node failed.")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: node failed."
  }

  @Test
  def testMockK8sClientVerifyNode3(): Unit = {
    val fakeK8SClient = new FakeK8SClient(false, Option(K8SStatusInfo(false, "failed")), "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: failed"
  }

  @Test
  def testMockK8SClientVerifyNode4(): Unit = {
    val fakeK8SClient = new FakeK8SClient(false, None, "")
    val serviceCollie: ServiceCollie =
      ServiceCollie.k8sModeBuilder.dataCollie(dataCollie).k8sClient(fakeK8SClient).build()
    intercept[IllegalStateException] {
      Await.result(
        serviceCollie.verifyNode(node),
        Duration(30, TimeUnit.SECONDS)
      )
    }.getMessage shouldBe "ohara node doesn't running container. cause: ohara node doesn't exists."
  }
} 
Example 132
Source File: TestK8SServiceCollieImpl.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent.k8s

import java.util.concurrent.TimeUnit

import oharastream.ohara.agent.DataCollie
import oharastream.ohara.agent.fake.FakeK8SClient
import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.client.configurator.NodeApi.{Node, Resource}
import oharastream.ohara.common.rule.OharaTest
import org.junit.Test

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, ExecutionContext, Future}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.duration.Duration

class TestK8SServiceCollieImpl extends OharaTest {
  @Test
  def testResource(): Unit = {
    val nodeCache  = (1 to 3).map(x => Node(s"node$x", "user", "password"))
    val dataCollie = DataCollie(nodeCache)

    val k8sClient = new FakeK8SClient(false, None, "container1") {
      override def resources()(
        implicit executionContext: ExecutionContext
      ): Future[Map[String, Seq[NodeApi.Resource]]] =
        Future.successful {
          Map(
            "node1" -> Seq(Resource.cpu(8, Option(2.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0))),
            "node2" -> Seq(Resource.cpu(8, Option(1.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0))),
            "node3" -> Seq(Resource.cpu(8, Option(3.0)), Resource.memory(1024 * 1024 * 1024 * 100, Option(5.0)))
          )
        }
    }

    val k8sServiceCollieImpl = new K8SServiceCollieImpl(dataCollie, k8sClient)
    val resource             = result(k8sServiceCollieImpl.resources())
    resource.size shouldBe 3
    val nodeNames = resource.keys.toSeq
    nodeNames(0) shouldBe "node1"
    nodeNames(1) shouldBe "node2"
    nodeNames(2) shouldBe "node3"

    val node1Resource: Seq[Resource] =
      resource.filter(x => x._1 == "node1").flatMap(x => x._2).toSeq

    node1Resource(0).name shouldBe "CPU"
    node1Resource(0).unit shouldBe "cores"
    node1Resource(0).used.get shouldBe 2.0
    node1Resource(0).value shouldBe 8

    node1Resource(1).name shouldBe "Memory"
    node1Resource(1).unit shouldBe "bytes"
    node1Resource(1).used.get shouldBe 5.0
    node1Resource(1).value shouldBe 1024 * 1024 * 1024 * 100
  }

  @Test
  def testEmptyResource(): Unit = {
    val nodeCache  = (1 to 3).map(x => Node(s"node$x", "user", "password"))
    val dataCollie = DataCollie(nodeCache)

    val k8sClient = new FakeK8SClient(false, None, "container1") {
      override def resources()(
        implicit executionContext: ExecutionContext
      ): Future[Map[String, Seq[NodeApi.Resource]]] =
        Future.successful(Map.empty)
    }

    val k8sServiceCollieImpl = new K8SServiceCollieImpl(dataCollie, k8sClient)
    val resource             = result(k8sServiceCollieImpl.resources())
    resource.size shouldBe 0
  }

  private[this] def result[T](future: Future[T]): T = Await.result(future, Duration(10, TimeUnit.SECONDS))
} 
Example 133
Source File: TestRemoteFolderHandler.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.agent

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.NodeApi.Node
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.service.SshdServer
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestRemoteFolderHandler extends OharaTest {
  private[this] val server   = SshdServer.local(0)
  private[this] val hostname = server.hostname()
  private[this] val dataCollie = DataCollie(
    Seq(
      Node(
        hostname = hostname,
        port = server.port(),
        user = server.user(),
        password = server.password()
      )
    )
  )
  private[this] val folderHandler = RemoteFolderHandler.builder().dataCollie(dataCollie).build()

  @Test
  def testFolderNotExists(): Unit =
    result(folderHandler.exist(server.hostname(), "/home/ohara100")) shouldBe false

  @Test
  def testCreateFolderAndDelete(): Unit = {
    val path = s"/tmp/${CommonUtils.randomString(5)}"
    result(folderHandler.create(hostname, path)) shouldBe true
    result(folderHandler.exist(hostname, path)) shouldBe true
    // file exists so it does nothing
    result(folderHandler.create(hostname, path)) shouldBe false
    result(folderHandler.delete(hostname, path)) shouldBe true
    result(folderHandler.delete(hostname, path)) shouldBe false
  }

  @Test
  def testListFolder(): Unit = {
    result(folderHandler.list(hostname, "/tmp")).size should not be 0
    val path = s"/tmp/${CommonUtils.randomString(5)}"
    result(folderHandler.create(hostname, path)) shouldBe true
    result(folderHandler.list(hostname, "/tmp")) should contain(path)
  }

  @Test
  def testInspectFolder(): Unit = {
    val folderInfo = result(folderHandler.inspect(hostname, "/tmp"))
    folderInfo.name shouldBe "tmp"
    folderInfo.permission shouldBe FolderPermission.READWRITE
    folderInfo.size should be > 0L
    folderInfo.uid should be >= 0
  }

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(120, TimeUnit.SECONDS))

  @After
  def tearDown(): Unit = Releasable.close(server)
} 
Example 134
Source File: TestDefaultValuesAutoComplete.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.{ConcurrentHashMap, TimeUnit}

import oharastream.ohara.client.configurator.{ConnectorApi, WorkerApi}
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestDefaultValuesAutoComplete extends WithBrokerWorker {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head

  private[this] val connectorApi = ConnectorApi.access.hostname(configurator.hostname).port(configurator.port)

  @Test
  def testDefaultValuesForPerfSource(): Unit = {
    val connector = result(
      connectorApi.request
        .workerClusterKey(workerClusterInfo.key)
        .className("oharastream.ohara.connector.perf.PerfSource")
        .create()
    )
    connector.settings.keySet should contain("perf.batch")
    connector.settings.keySet should contain("perf.frequency")
    connector.settings.keySet should contain("perf.cell.length")
  }

  @Test
  def testDefaultValuesForConsoleSink(): Unit = {
    val connector = result(
      connectorApi.request
        .workerClusterKey(workerClusterInfo.key)
        .className("oharastream.ohara.connector.console.ConsoleSink")
        .create()
    )
    connector.settings.keySet should contain("console.sink.frequence")
    connector.settings.keySet should contain("console.sink.row.divider")

    val a = new ConcurrentHashMap[String, String]()
    import scala.jdk.CollectionConverters._
    a.elements().asScala.toSeq
  }
} 
Example 135
Source File: TestFakeConnectorAdmin.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator
import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.ConnectorApi.State
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.{ConnectorKey, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.configurator.fake.FakeConnectorAdmin
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestFakeConnectorAdmin extends OharaTest {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))
  @Test
  def testControlConnector(): Unit = {
    val connectorKey = ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val topicKey     = TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val className    = CommonUtils.randomString(10)
    val fake         = new FakeConnectorAdmin()
    result(
      fake
        .connectorCreator()
        .connectorKey(connectorKey)
        .topicKey(topicKey)
        .numberOfTasks(1)
        .className(className)
        .create()
    )

    result(fake.exist(connectorKey)) shouldBe true

    result(fake.status(connectorKey)).connector.state shouldBe State.RUNNING.name

    result(fake.pause(connectorKey))
    result(fake.status(connectorKey)).connector.state shouldBe State.PAUSED.name

    result(fake.resume(connectorKey))
    result(fake.status(connectorKey)).connector.state shouldBe State.RUNNING.name

    result(fake.delete(connectorKey))
    result(fake.exist(connectorKey)) shouldBe false
  }
} 
Example 136
Source File: TestInspectWorkerCluster.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.{InspectApi, WorkerApi}
import oharastream.ohara.common.util.Releasable
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestInspectWorkerCluster extends WithBrokerWorker {
  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head
  private[this] def inspectApi = InspectApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def inspectWithoutKey(): Unit = {
    val info = result(inspectApi.workerInfo())
    info.imageName shouldBe WorkerApi.IMAGE_NAME_DEFAULT
    info.settingDefinitions.size shouldBe WorkerApi.DEFINITIONS.size
    info.settingDefinitions.foreach { definition =>
      definition shouldBe WorkerApi.DEFINITIONS.find(_.key() == definition.key()).get
    }
  }

  @Test
  def inspectWithKey(): Unit = {
    val info = result(inspectApi.workerInfo(workerClusterInfo.key))
    info.imageName shouldBe WorkerApi.IMAGE_NAME_DEFAULT
    info.settingDefinitions.size shouldBe WorkerApi.DEFINITIONS.size
    info.settingDefinitions.foreach { definition =>
      definition shouldBe WorkerApi.DEFINITIONS.find(_.key() == definition.key()).get
    }
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 137
Source File: TestResponseFromUnsupportedApis.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.{HttpMethod, HttpMethods, HttpRequest}
import akka.http.scaladsl.unmarshalling.Unmarshal
import oharastream.ohara.client.configurator.ErrorApi
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestResponseFromUnsupportedApis extends OharaTest {
  private[this] val configurator = Configurator.builder.fake().build()

  private[this] implicit val actorSystem: ActorSystem = ActorSystem("Executor-TestResponseFromUnsupportedApis")

  private[this] val expectedMessage = oharastream.ohara.configurator.route.apiUrl

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def testGet(): Unit = sendRequest(HttpMethods.GET, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testPut(): Unit = sendRequest(HttpMethods.PUT, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testDelete(): Unit =
    sendRequest(HttpMethods.DELETE, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  @Test
  def testPost(): Unit = sendRequest(HttpMethods.POST, CommonUtils.randomString()).apiUrl.get shouldBe expectedMessage

  private[this] def sendRequest(method: HttpMethod, postfix: String): ErrorApi.Error =
    result(
      Http()
        .singleRequest(HttpRequest(method, s"http://${configurator.hostname}:${configurator.port}/$postfix"))
        .flatMap { response =>
          if (response.status.isSuccess()) Future.failed(new AssertionError())
          else Unmarshal(response.entity).to[ErrorApi.Error]
        }
    )

  @After
  def tearDown(): Unit = {
    Releasable.close(configurator)
    result(actorSystem.terminate())
  }
} 
Example 138
Source File: TestListManyPipelines.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.{BrokerApi, ConnectorApi, PipelineApi, TopicApi, WorkerApi}
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestListManyPipelines extends WithBrokerWorker {
  private[this] val configurator =
    Configurator.builder.fake(testUtil().brokersConnProps(), testUtil().workersConnProps()).build()

  private[this] val workerClusterInfo = result(
    WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()
  ).head

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val numberOfPipelines = 30
  @Test
  def test(): Unit = {
    val topic = result(
      TopicApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .name(CommonUtils.randomString(10))
        .brokerClusterKey(
          result(BrokerApi.access.hostname(configurator.hostname).port(configurator.port).list()).head.key
        )
        .create()
    )

    val connector = result(
      ConnectorApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .name(CommonUtils.randomString(10))
        .className("oharastream.ohara.connector.perf.PerfSource")
        .topicKey(topic.key)
        .numberOfTasks(1)
        .workerClusterKey(workerClusterInfo.key)
        .create()
    )

    val pipelines = (0 until numberOfPipelines).map { _ =>
      result(
        PipelineApi.access
          .hostname(configurator.hostname)
          .port(configurator.port)
          .request
          .name(CommonUtils.randomString(10))
          .endpoint(connector)
          .endpoint(topic)
          .create()
      )
    }

    val listPipeline =
      Await.result(
        PipelineApi.access.hostname(configurator.hostname).port(configurator.port).list(),
        Duration(20, TimeUnit.SECONDS)
      )
    pipelines.size shouldBe listPipeline.size
    pipelines.foreach(p => listPipeline.exists(_.name == p.name) shouldBe true)
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 139
Source File: TestNodeNameUpperCaseRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.NodeApi
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestNodeNameUpperCaseRoute extends OharaTest {
  private[this] val numberOfCluster = 1
  private[this] val configurator =
    Configurator.builder.fake(numberOfCluster, numberOfCluster, "zookeepercluster").build()
  private[this] val nodeApi                    = NodeApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def testAddNodeNameLowerCase(): Unit = {
    val name = CommonUtils.randomString(10).toLowerCase
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create()).name shouldBe name
  }

  @Test
  def testAddNodeNameUpperCase1(): Unit = {
    val name = CommonUtils.randomString(10).toUpperCase
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @Test
  def testAddNodeNameUpperCase2(): Unit = {
    val name = "HOST1.test"
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @Test
  def testAddNodeNameUpperCase3(): Unit = {
    val name = "aaa-Node1.test"
    result(nodeApi.request.nodeName(name).port(22).user("b").password("c").create())
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 140
Source File: TestValidationRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator.{ValidationApi, WorkerApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.TopicKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.{Configurator, FallibleSink}
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestValidationRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake().build()

  private[this] val wkCluster = result(WorkerApi.access.hostname(configurator.hostname).port(configurator.port).list()).head

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Test
  def validateConnector(): Unit = {
    val className = classOf[FallibleSink].getName
    val response = result(
      ValidationApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .connectorRequest
        .name(CommonUtils.randomString(10))
        .className(className)
        .topicKey(TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5)))
        .workerClusterKey(wkCluster.key)
        .verify()
    )
    response.className.get() shouldBe className
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 141
Source File: TestPrivateRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.PrivateApi.Deletion
import oharastream.ohara.client.configurator.{PrivateApi, WorkerApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.configurator.Configurator
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestPrivateRoute extends OharaTest {
  private[this] val workerCount  = 2
  private[this] val configurator = Configurator.builder.fake(1, workerCount).build()

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] val workerApi = WorkerApi.access.hostname(configurator.hostname).port(configurator.port)

  @Test
  def testDeletion(): Unit = {
    val workers = result(workerApi.list())
    val group   = workers.head.group
    val kind    = workers.head.kind
    workers.size shouldBe workerCount
    result(
      workerApi.request
        .group(group)
        .nodeNames(workers.head.nodeNames)
        .brokerClusterKey(workers.head.brokerClusterKey)
        .create()
    )

    result(workerApi.list()).size shouldBe workers.size + 1

    // we use same group to create an new worker cluster
    result(workerApi.list()).groupBy(_.group).size shouldBe workerCount

    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(group), kinds = Set(kind))
      )
    )

    val latestWorkers = result(workerApi.list())
    latestWorkers.size shouldBe workers.size - 1

    // delete again
    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(group), kinds = Set(kind))
      )
    )
    result(workerApi.list()).size shouldBe latestWorkers.size

    // delete group without kind
    result(
      PrivateApi.delete(
        hostname = configurator.hostname,
        port = configurator.port,
        deletion = Deletion(groups = Set(latestWorkers.head.group), kinds = Set.empty)
      )
    )
    result(workerApi.list()).size shouldBe latestWorkers.size
  }
} 
Example 142
Source File: TestContainerRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator._
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Before, Test}
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.Duration

class TestContainerRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake(0, 0).build()
  private[this] val containerApi = ContainerApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val brokerApi    = BrokerApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val workerApi    = WorkerApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] val zkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))
  private[this] val bkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))
  private[this] val wkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10))

  private[this] val nodeNames: Set[String] = Set("n0", "n1")

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Before
  def setup(): Unit = {
    val nodeApi = NodeApi.access.hostname(configurator.hostname).port(configurator.port)

    nodeNames.isEmpty shouldBe false
    nodeNames.foreach { n =>
      result(nodeApi.request.nodeName(n).port(22).user("user").password("pwd").create())
    }

    val zk = result(
      ZookeeperApi.access
        .hostname(configurator.hostname)
        .port(configurator.port)
        .request
        .key(zkClusterKey)
        .nodeNames(nodeNames)
        .create()
    )
    zk.key shouldBe zkClusterKey
    result(ZookeeperApi.access.hostname(configurator.hostname).port(configurator.port).start(zk.key))

    val bk = result(brokerApi.request.key(bkClusterKey).zookeeperClusterKey(zkClusterKey).nodeNames(nodeNames).create())
    result(brokerApi.start(bk.key))

    val wk = result(workerApi.request.key(wkClusterKey).brokerClusterKey(bkClusterKey).nodeNames(nodeNames).create())
    result(workerApi.start(wk.key))
  }

  @Test
  def testGetContainersOfBrokerCluster(): Unit = {
    val containerGroups = result(containerApi.get(bkClusterKey))
    containerGroups.size should not be 0
    containerGroups.foreach(group => {
      group.clusterKey shouldBe bkClusterKey
      group.clusterType shouldBe "broker"
      group.containers.size should not be 0
    })
  }

  @Test
  def testGetContainersOfWorkerCluster(): Unit = {
    val containerGroups = result(containerApi.get(wkClusterKey))
    containerGroups.size should not be 0
    containerGroups.foreach(group => {
      group.clusterKey shouldBe wkClusterKey
      group.clusterType shouldBe "worker"
      group.containers.size should not be 0
    })
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 143
Source File: TestClusterNameUpperCaseRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import oharastream.ohara.client.configurator.{NodeApi, ZookeeperApi}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import spray.json.DeserializationException

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}

class TestClusterNameUpperCaseRoute extends OharaTest {
  private[this] val numberOfCluster = 1
  private[this] val configurator =
    Configurator.builder.fake(numberOfCluster, numberOfCluster, "zk").build()
  private[this] val nodeApi      = NodeApi.access.hostname(configurator.hostname).port(configurator.port)
  private[this] val zookeeperApi = ZookeeperApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds"))
  @Test
  def testAddZookeeper(): Unit = {
    result(nodeApi.request.nodeName("host1").port(22).user("b").password("c").create())

    an[DeserializationException] should be thrownBy result(
      zookeeperApi.request.name(s"ZK-${CommonUtils.randomString(10)}").nodeName("host1").create()
    )
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 144
Source File: TestObjectRoute.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.configurator.route

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.ObjectApi
import oharastream.ohara.client.configurator.ObjectApi.ObjectInfo
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ObjectKey
import oharastream.ohara.common.util.{CommonUtils, Releasable}
import oharastream.ohara.configurator.Configurator
import org.junit.{After, Test}
import org.scalatest.matchers.should.Matchers._
import spray.json.JsString

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestObjectRoute extends OharaTest {
  private[this] val configurator = Configurator.builder.fake(1, 1).build()

  private[this] val objectApi = ObjectApi.access.hostname(configurator.hostname).port(configurator.port)

  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  private[this] def create(): ObjectInfo = {
    val key = ObjectKey.of("g", "n")
    val settings = Map(
      CommonUtils.randomString() -> JsString(CommonUtils.randomString()),
      CommonUtils.randomString() -> JsString(CommonUtils.randomString())
    )
    val objectInfo = result(objectApi.request.key(key).settings(settings).create())
    objectInfo.key shouldBe key
    settings.foreach {
      case (k, v) => objectInfo.settings(k) shouldBe v
    }
    objectInfo
  }

  @Test
  def testCreate(): Unit = create()

  @Test
  def testGet(): Unit = {
    val objectInfo = create()
    objectInfo shouldBe result(objectApi.get(objectInfo.key))
  }

  @Test
  def testGetNothing(): Unit =
    an[IllegalArgumentException] should be thrownBy result(objectApi.get(ObjectKey.of(CommonUtils.randomString(), "n")))

  @Test
  def testList(): Unit = {
    val objectInfo = create()
    objectInfo shouldBe result(objectApi.list()).head
  }

  @Test
  def testDelete(): Unit = {
    val objectInfo = create()
    result(objectApi.delete(objectInfo.key))
    result(objectApi.list()) shouldBe Seq.empty
  }

  @Test
  def testUpdate(): Unit = {
    val objectInfo = create()
    val settings = Map(
      CommonUtils.randomString() -> JsString(CommonUtils.randomString()),
      CommonUtils.randomString() -> JsString(CommonUtils.randomString())
    )
    val updated = result(objectApi.request.key(objectInfo.key).settings(settings).update())
    settings.foreach {
      case (k, v) => updated.settings(k) shouldBe v
    }
    objectInfo.settings.foreach {
      case (k, v) =>
        if (k == "lastModified") updated.settings(k) should not be v
        else updated.settings(k) shouldBe v
    }
  }

  @After
  def tearDown(): Unit = Releasable.close(configurator)
} 
Example 145
Source File: ConnectorTestUtils.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.configurator.ConnectorApi.State
import oharastream.ohara.client.filesystem.FileSystem
import oharastream.ohara.client.kafka.ConnectorAdmin
import oharastream.ohara.common.exception.NoSuchFileException
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.testing.OharaTestUtils
import org.apache.kafka.connect.connector.Connector
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration

object ConnectorTestUtils {
  private[this] val TIMEOUT = java.time.Duration.ofSeconds(60)

  def assertFailedConnector(testingUtil: OharaTestUtils, connectorKey: ConnectorKey): Unit =
    assertFailedConnector(testingUtil.workersConnProps(), connectorKey)

  def assertFailedConnector(workersConnProps: String, connectorKey: ConnectorKey): Unit = CommonUtils.await(
    () => {
      val client = ConnectorAdmin(workersConnProps)
      try Await.result(client.status(connectorKey), Duration(20, TimeUnit.SECONDS)).connector.state == State.FAILED.name
      catch {
        case _: Throwable => false
      }
    },
    TIMEOUT
  )

  def checkConnector(testingUtil: OharaTestUtils, connectorKey: ConnectorKey): Unit =
    checkConnector(testingUtil.workersConnProps(), connectorKey)

  def checkConnector(workersConnProps: String, connectorKey: ConnectorKey): Unit =
    CommonUtils.await(
      () => {
        val connectorAdmin = ConnectorAdmin(workersConnProps)
        try {
          Await.result(connectorAdmin.activeConnectors(), Duration(10, TimeUnit.SECONDS)).contains(connectorKey)
          val status = Await.result(connectorAdmin.status(connectorKey), Duration(10, TimeUnit.SECONDS))
          status.connector.state == State.RUNNING.name && status.tasks.nonEmpty && status.tasks
            .forall(_.state == State.RUNNING.name)
        } catch {
          case _: Throwable => false
        }
      },
      TIMEOUT
    )

  def nonexistentFolderShouldFail(
    fileSystem: FileSystem,
    connectorClass: Class[_ <: Connector],
    props: Map[String, String],
    path: String
  ): Unit = {
    fileSystem.delete(path, true)
    intercept[NoSuchFileException] {
      val connector = connectorClass.getDeclaredConstructor().newInstance()
      try connector.start(props.asJava)
      finally connector.stop()
    }.getMessage should include("doesn't exist")
  }

  def fileShouldFail(
    fileSystem: FileSystem,
    connectorClass: Class[_ <: Connector],
    props: Map[String, String],
    path: String
  ): Unit = {
    fileSystem.delete(path, true)
    val output = fileSystem.create(path)
    try output.write("fileShouldFail".getBytes)
    finally output.close()
    intercept[IllegalArgumentException] {
      val connector = connectorClass.getDeclaredConstructor().newInstance()
      try connector.start(props.asJava)
      finally connector.stop()
    }.getMessage should include("NOT folder")
  }
} 
Example 146
Source File: TestPerfDefinition.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.connector.perf

import java.util.concurrent.TimeUnit

import oharastream.ohara.client.kafka.ConnectorAdmin
import oharastream.ohara.common.setting.SettingDef.{Necessary, Permission, Reference}
import oharastream.ohara.common.setting.{ConnectorKey, SettingDef, TopicKey}
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.ConnectorDefUtils
import oharastream.ohara.testing.WithBrokerWorker
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.jdk.CollectionConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class TestPerfDefinition extends WithBrokerWorker {
  private[this] val perfSource                 = new PerfSource
  private[this] val connectorAdmin             = ConnectorAdmin(testUtil().workersConnProps())
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(20, TimeUnit.SECONDS))

  @Test
  def checkBatch(): Unit = {
    val definition = perfSource.settingDefinitions().get(PERF_BATCH_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultInt() shouldBe PERF_BATCH_DEFAULT
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.INT
  }

  @Test
  def checkFrequence(): Unit = {
    val definition = perfSource.settingDefinitions().get(PERF_FREQUENCY_KEY)
    definition.necessary() should not be Necessary.REQUIRED
    definition.defaultDuration() shouldBe java.time.Duration.ofMillis(PERF_FREQUENCY_DEFAULT.toMillis)
    definition.permission() shouldBe Permission.EDITABLE
    definition.internal() shouldBe false
    definition.reference() shouldBe Reference.NONE
    definition.valueType() shouldBe SettingDef.Type.DURATION
  }

  @Test
  def testSource(): Unit = {
    val topicKey = TopicKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
    val response = result(
      connectorAdmin
        .connectorValidator()
        .connectorKey(ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5)))
        .numberOfTasks(1)
        .topicKey(topicKey)
        .connectorClass(classOf[PerfSource])
        .run()
    )

    response.settings().size should not be 0
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.TOPIC_NAMES_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.CONNECTOR_CLASS_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.NUMBER_OF_TASKS_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.OPTIONAL
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.COLUMNS_DEFINITION.key())
      .head
      .definition()
      .necessary() should not be Necessary.REQUIRED
    response
      .settings()
      .asScala
      .filter(_.definition().key() == ConnectorDefUtils.WORKER_CLUSTER_KEY_DEFINITION.key())
      .head
      .definition()
      .necessary() shouldBe Necessary.REQUIRED
    response.errorCount() shouldBe 0
  }
} 
Example 147
Source File: TestFileInfoApi.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.configurator

import java.io.File

import oharastream.ohara.client.configurator.FileInfoApi.FileInfo
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.util.CommonUtils
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import spray.json.JsString

import scala.concurrent.ExecutionContext.Implicits.global
class TestFileInfoApi extends OharaTest {
  private[this] def access: FileInfoApi.Access = FileInfoApi.access.hostname(CommonUtils.hostname()).port(22)

  @Test
  def nullKeyInGet(): Unit =
    an[NullPointerException] should be thrownBy access.get(null)

  @Test
  def nullKeyInDelete(): Unit =
    an[NullPointerException] should be thrownBy access.delete(null)

  @Test
  def emptyName(): Unit = an[IllegalArgumentException] should be thrownBy access.request.name("")

  @Test
  def nullName(): Unit = an[NullPointerException] should be thrownBy access.request.name(null)

  @Test
  def emptyGroup(): Unit = an[IllegalArgumentException] should be thrownBy access.request.group("")

  @Test
  def nullGroup(): Unit = an[NullPointerException] should be thrownBy access.request.group(null)

  @Test
  def nullFile(): Unit = an[NullPointerException] should be thrownBy access.request.file(null)

  @Test
  def nonexistentFile(): Unit =
    an[IllegalArgumentException] should be thrownBy access.request.file(new File(CommonUtils.randomString(5)))

  @Test
  def nullTags(): Unit = an[NullPointerException] should be thrownBy access.request.tags(null)

  @Test
  def emptyTags(): Unit = access.request.tags(Map.empty)

  @Test
  def bytesMustBeEmptyAfterSerialization(): Unit = {
    val bytes = CommonUtils.randomString().getBytes()
    val fileInfo = new FileInfo(
      group = CommonUtils.randomString(),
      name = CommonUtils.randomString(),
      lastModified = CommonUtils.current(),
      bytes = bytes,
      url = None,
      classInfos = Seq.empty,
      tags = Map("a" -> JsString("b"))
    )

    val copy = FileInfoApi.FILE_INFO_FORMAT.read(FileInfoApi.FILE_INFO_FORMAT.write(fileInfo))
    copy.group shouldBe fileInfo.group
    copy.name shouldBe fileInfo.name
    copy.lastModified shouldBe fileInfo.lastModified
    copy.bytes shouldBe Array.empty
    copy.url shouldBe fileInfo.url
    copy.tags shouldBe fileInfo.tags
  }

  @Test
  def nullUrlShouldBeRemoved(): Unit = {
    val fileInfo = new FileInfo(
      group = CommonUtils.randomString(),
      name = CommonUtils.randomString(),
      lastModified = CommonUtils.current(),
      bytes = Array.emptyByteArray,
      url = None,
      classInfos = Seq.empty,
      tags = Map("a" -> JsString("b"))
    )
    FileInfoApi.FILE_INFO_FORMAT.write(fileInfo).asJsObject.fields should not contain "url"
  }
} 
Example 148
Source File: Test873.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.{Http, server}
import oharastream.ohara.client.kafka.WorkerJson.{ConnectorCreationResponse, KafkaConnectorTaskId, _}
import oharastream.ohara.common.rule.OharaTest
import oharastream.ohara.common.setting.ConnectorKey
import oharastream.ohara.common.util.CommonUtils
import oharastream.ohara.kafka.connector.json.Creation
import org.junit.Test
import org.scalatest.matchers.should.Matchers._

import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._


class Test873 extends OharaTest {
  private[this] def result[T](f: Future[T]): T = Await.result(f, Duration(60, TimeUnit.SECONDS))

  @Test
  def testCreateConnector(): Unit = {
    val className = CommonUtils.randomString()
    val settings = Map(
      CommonUtils.randomString() -> CommonUtils.randomString()
    )
    val tasks = Seq(
      KafkaConnectorTaskId(
        connector = CommonUtils.randomString(),
        task = 10
      )
    )
    val server = toServer {
      path("connectors") {
        post {
          entity(as[Creation]) { req =>
            complete(
              ConnectorCreationResponse(
                name = req.name(),
                config = req.configs().asScala.toMap,
                tasks = tasks
              )
            )
          }
        }
      }
    }

    try {
      val connectorKey = ConnectorKey.of(CommonUtils.randomString(5), CommonUtils.randomString(5))
      val client       = ConnectorAdmin(s"${server.hostname}:${server.port}")
      val response = result(
        client.connectorCreator().connectorKey(connectorKey).settings(settings).className(className).create()
      )
      response.name shouldBe connectorKey.connectorNameOnKafka()
      response.tasks shouldBe tasks
      settings.foreach {
        case (k, v) =>
          response.config(k) shouldBe v
      }
    } finally server.close()
  }

  private[this] def toServer(route: server.Route): SimpleServer = {
    implicit val system: ActorSystem = ActorSystem("my-system")
    val server                       = Await.result(Http().bindAndHandle(route, "localhost", 0), Duration(30, TimeUnit.SECONDS))
    new SimpleServer {
      override def hostname: String = server.localAddress.getHostString
      override def port: Int        = server.localAddress.getPort
      override def close(): Unit = {
        Await.result(server.unbind(), Duration(30, TimeUnit.SECONDS))
        Await.result(system.terminate(), Duration(30, TimeUnit.SECONDS))
      }
    }
  }
} 
Example 149
Source File: SimpleRowSourceTask.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import java.util
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.atomic.AtomicBoolean

import oharastream.ohara.common.data.{Row, Serializer}
import oharastream.ohara.common.setting.TopicKey
import oharastream.ohara.common.util.Releasable
import oharastream.ohara.kafka.Consumer
import oharastream.ohara.kafka.connector.{RowSourceRecord, RowSourceTask, TaskSetting}

import scala.jdk.CollectionConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class SimpleRowSourceTask extends RowSourceTask {
  private[this] var settings: TaskSetting                = _
  private[this] val queue                                = new LinkedBlockingQueue[RowSourceRecord]
  private[this] val closed                               = new AtomicBoolean(false)
  private[this] var consumer: Consumer[Row, Array[Byte]] = _
  override protected def run(settings: TaskSetting): Unit = {
    this.settings = settings
    this.consumer = Consumer
      .builder()
      .connectionProps(settings.stringValue(SimpleRowSourceConnector.BROKER))
      .groupId(settings.name)
      .topicKeys(java.util.Set.copyOf(TopicKey.toTopicKeys(settings.stringValue(SimpleRowSourceConnector.INPUT))))
      .offsetFromBegin()
      .keySerializer(Serializer.ROW)
      .valueSerializer(Serializer.BYTES)
      .build()
    Future {
      try while (!closed.get) {
        consumer
          .poll(java.time.Duration.ofSeconds(2))
          .asScala
          .filter(_.key.isPresent)
          .map(_.key.get)
          .flatMap(
            row => settings.topicKeys().asScala.map(topic => RowSourceRecord.builder().row(row).topicKey(topic).build())
          )
          .foreach(r => queue.put(r))
      } finally Releasable.close(consumer)
    }
  }

  override protected def pollRecords(): util.List[RowSourceRecord] =
    Iterator.continually(queue.poll()).takeWhile(_ != null).toSeq.asJava

  override protected def terminate(): Unit = {
    closed.set(true)
    consumer.wakeup()
  }
} 
Example 150
Source File: TestConnectorValidator.scala    From ohara   with Apache License 2.0 5 votes vote down vote up
package oharastream.ohara.client.kafka

import oharastream.ohara.common.rule.OharaTest
import org.junit.Test
import org.scalatest.matchers.should.Matchers._
import scala.concurrent.ExecutionContext.Implicits.global
class TestConnectorValidator extends OharaTest {
  
  private[this] val notWorkingClient = ConnectorAdmin("localhost:2222")

  @Test
  def ignoreClassName(): Unit =
    an[NoSuchElementException] should be thrownBy notWorkingClient.connectorValidator().run()

  @Test
  def nullSettingKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().setting(null, "asdsad")

  @Test
  def emptySettingKey(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().setting("", "asdsad")

  @Test
  def nullSettingValue(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().setting("asdsad", null)

  @Test
  def emptySettingValue(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().setting("asdsad", "")

  @Test
  def nullSettings(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().settings(null)

  @Test
  def emptySettings(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().settings(Map.empty)

  @Test
  def nullSchema(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().columns(null)

  @Test
  def illegalNumberOfTasks(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().numberOfTasks(-1)

  @Test
  def nullClass(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient
      .connectorValidator()
      .connectorClass(null.asInstanceOf[Class[_]])

  @Test
  def nullClassName(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient
      .connectorValidator()
      .className(null.asInstanceOf[String])

  @Test
  def emptyClassName(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().className("")

  @Test
  def nullTopicKey(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().topicKey(null)

  @Test
  def nullTopicKeys(): Unit =
    an[NullPointerException] should be thrownBy notWorkingClient.connectorValidator().topicKeys(null)

  @Test
  def emptyTopicKeys(): Unit =
    an[IllegalArgumentException] should be thrownBy notWorkingClient.connectorValidator().topicKeys(Set.empty)
} 
Example 151
Source File: LifetimeAllowanceService.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package services

import com.kenshoo.play.metrics.Metrics
import com.google.inject.{Inject, Singleton}
import metrics.HasMetrics
import models.LtaProtections
import play.api.Mode.Mode
import play.api.{Configuration, Environment, Logger}
import services.http.SimpleHttp
import uk.gov.hmrc.domain.Nino
import uk.gov.hmrc.http.{HeaderCarrier, HttpReads}
import uk.gov.hmrc.play.bootstrap.config.ServicesConfig

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
@Singleton
class LifetimeAllowanceService @Inject()(
  environment: Environment,
  configuration: Configuration,
  val simpleHttp: SimpleHttp,
  val metrics: Metrics,
  servicesConfig: ServicesConfig)
    extends HasMetrics {

  val mode: Mode = environment.mode
  val runModeConfiguration: Configuration = configuration
  lazy val lifetimeAllowanceUrl = servicesConfig.baseUrl("pensions-lifetime-allowance")

  def getCount(nino: Nino)(implicit hc: HeaderCarrier, rds: HttpReads[LtaProtections]): Future[Option[Int]] =
    withMetricsTimer("has-lta-response") { t =>
      simpleHttp.get[Option[Int]](
        lifetimeAllowanceUrl + s"/protect-your-lifetime-allowance/individuals/$nino/protections/count")(
        onComplete = {
          case r if r.status >= 200 && r.status < 300 =>
            t.completeTimerAndIncrementSuccessCounter()
            Some((r.json.as[LtaProtections]).count)

          case r =>
            t.completeTimerAndIncrementFailedCounter()
            Logger.warn(
              s"Unexpected ${r.status} response getting lifetime allowance protections count from LTA service")
            None
        },
        onError = {
          case e =>
            t.completeTimerAndIncrementFailedCounter()
            Logger.warn("Error getting lifetime allowance protections count from LTA service", e)
            None
        }
      )
    }

  def hasLtaProtection(nino: Nino)(implicit hc: HeaderCarrier, rds: HttpReads[LtaProtections]): Future[Boolean] =
    getCount(nino) map {
      case (Some(0) | None) => false
      case _                => true
    }
} 
Example 152
Source File: HomePageCachingHelper.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package controllers.controllershelpers

import com.google.inject.Inject
import services.LocalSessionCache
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.http.cache.client.CacheMap

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class HomePageCachingHelper @Inject()(
  val sessionCache: LocalSessionCache
) {

  def hasUserDismissedUrInvitation[T](implicit hc: HeaderCarrier): Future[Boolean] =
    sessionCache.fetch() map {
      case Some(cacheMap) => cacheMap.getEntry[Boolean]("urBannerDismissed").getOrElse(false)
      case None           => false
    }

  def storeUserUrDismissal()(implicit hc: HeaderCarrier): Future[CacheMap] =
    sessionCache.cache("urBannerDismissed", true)
} 
Example 153
Source File: FormPartialServiceSpec.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package services

import com.codahale.metrics.Timer
import com.kenshoo.play.metrics.Metrics
import config.ConfigDecorator
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import play.api.{Configuration, Environment}
import play.twirl.api.Html
import services.partials.FormPartialService
import uk.gov.hmrc.crypto.ApplicationCrypto
import uk.gov.hmrc.play.bootstrap.config.ServicesConfig
import uk.gov.hmrc.play.bootstrap.filters.frontend.crypto.SessionCookieCrypto
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import uk.gov.hmrc.play.partials.HtmlPartial
import util.BaseSpec
import util.Fixtures._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class FormPartialServiceSpec extends BaseSpec {

  trait LocalSetup {
    val servicesConfig = app.injector.instanceOf[ServicesConfig]
    val timer = MockitoSugar.mock[Timer.Context]
    val formPartialService: FormPartialService = new FormPartialService(
      injected[Environment],
      injected[Configuration],
      MockitoSugar.mock[DefaultHttpClient],
      MockitoSugar.mock[Metrics],
      MockitoSugar.mock[ConfigDecorator],
      injected[SessionCookieCrypto],
      servicesConfig
    ) {
      override val metricsOperator: MetricsOperator = MockitoSugar.mock[MetricsOperator]
      when(metricsOperator.startTimer(any())) thenReturn timer
    }
  }

  "Calling FormPartialServiceSpec" should {

    "return form list for National insurance" in new LocalSetup {

      when(formPartialService.http.GET[HtmlPartial](any())(any(), any(), any())) thenReturn
        Future.successful[HtmlPartial](HtmlPartial.Success(Some("Title"), Html("<title/>")))

      formPartialService.getNationalInsurancePartial(buildFakeRequestWithAuth("GET")).map(p => p shouldBe "<title/>")
      verify(formPartialService.http, times(1)).GET[Html](any())(any(), any(), any())
    }

    "return form list for Self-assessment" in new LocalSetup {

      when(formPartialService.http.GET[HtmlPartial](any())(any(), any(), any())) thenReturn
        Future.successful[HtmlPartial](HtmlPartial.Success(Some("Title"), Html("<title/>")))

      formPartialService.getSelfAssessmentPartial(buildFakeRequestWithAuth("GET")).map(p => p shouldBe "<title/>")
      verify(formPartialService.http, times(1)).GET[Html](any())(any(), any(), any())
    }

  }

} 
Example 154
Source File: PaperlessPreferencesControllerSpec.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package controllers

import config.ConfigDecorator
import controllers.auth.requests.UserRequest
import controllers.auth.{AuthJourney, WithActiveTabAction, WithBreadcrumbAction}
import models.{ActivatedOnlineFilerSelfAssessmentUser, NonFilerSelfAssessmentUser}
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import play.api.i18n.MessagesApi
import play.api.mvc.{ActionBuilder, MessagesControllerComponents, Request, Result}
import play.api.test.FakeRequest
import play.api.test.Helpers._
import play.twirl.api.Html
import services.partials.PreferencesFrontendPartialService
import uk.gov.hmrc.auth.core.ConfidenceLevel
import uk.gov.hmrc.auth.core.retrieve.Credentials
import uk.gov.hmrc.domain.SaUtr
import uk.gov.hmrc.play.partials.HtmlPartial
import uk.gov.hmrc.renderer.TemplateRenderer
import util.UserRequestFixture.buildUserRequest
import util.{ActionBuilderFixture, BaseSpec, BetterOptionValues, LocalPartialRetriever, Tools}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ExecutionContext, Future}

class PaperlessPreferencesControllerSpec extends BaseSpec with MockitoSugar {
  import BetterOptionValues._

  override implicit lazy val app = localGuiceApplicationBuilder().build()

  val mockPreferencesFrontendPartialService = mock[PreferencesFrontendPartialService]
  val mockAuthJourney = mock[AuthJourney]

  def controller: PaperlessPreferencesController =
    new PaperlessPreferencesController(
      mockPreferencesFrontendPartialService,
      mockAuthJourney,
      injected[WithActiveTabAction],
      injected[WithBreadcrumbAction],
      injected[MessagesControllerComponents],
      injected[Tools]
    )(mock[LocalPartialRetriever], injected[ConfigDecorator], injected[TemplateRenderer], injected[ExecutionContext]) {}

  "Calling PaperlessPreferencesController.managePreferences" should {
    "Redirect to  preferences-frontend manage paperless url when a user is logged in using GG" in {

      when(mockAuthJourney.authWithPersonalDetails).thenReturn(new ActionBuilderFixture {
        override def invokeBlock[A](request: Request[A], block: UserRequest[A] => Future[Result]): Future[Result] =
          block(
            buildUserRequest(request = request)
          )
      })

      val r = controller.managePreferences(FakeRequest())
      status(r) shouldBe SEE_OTHER

      val redirectUrl = redirectLocation(r).getValue
      val configDecorator = app.injector.instanceOf[ConfigDecorator]
      redirectUrl should include regex s"${configDecorator.preferencesFrontendService}/paperless/check-settings\\?returnUrl=.*\\&returnLinkText=.*"
    }

    "Return 400 for Verify users" in {

      when(mockAuthJourney.authWithPersonalDetails).thenReturn(new ActionBuilderFixture {
        override def invokeBlock[A](request: Request[A], block: UserRequest[A] => Future[Result]): Future[Result] =
          block(
            buildUserRequest(
              credentials = Credentials("", "Verify"),
              confidenceLevel = ConfidenceLevel.L500,
              request = request
            ))
      })

      val r = controller.managePreferences(FakeRequest())
      status(r) shouldBe BAD_REQUEST
    }
  }
} 
Example 155
Source File: EnrolmentsConnectorSpec.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package connectors

import models._
import org.joda.time.DateTime
import org.mockito.Matchers.{any, eq => eqTo}
import org.mockito.Mockito.when
import org.scalatest.EitherValues
import org.scalatest.Inspectors.forAll
import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.mockito.MockitoSugar
import play.api.http.Status._
import play.api.libs.json.{JsObject, JsResultException, Json}
import uk.gov.hmrc.http.{HttpException, HttpResponse}
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import util.BaseSpec

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class EnrolmentsConnectorSpec extends BaseSpec with MockitoSugar with ScalaFutures with EitherValues {

  val http = mock[DefaultHttpClient]
  val connector = new EnrolmentsConnector(http, config)
  val baseUrl = config.enrolmentStoreProxyUrl

  "getAssignedEnrolments" should {
    val utr = "1234500000"
    val url = s"$baseUrl/enrolment-store/enrolments/IR-SA~UTR~$utr/users"

    "Return the error message for a BAD_REQUEST response" in {
      when(http.GET[HttpResponse](eqTo(url))(any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(BAD_REQUEST)))

      connector.getUserIdsWithEnrolments(utr).futureValue.left.value should include(BAD_REQUEST.toString)
    }

    "NO_CONTENT response should return no enrolments" in {
      when(http.GET[HttpResponse](eqTo(url))(any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(NO_CONTENT)))

      connector.getUserIdsWithEnrolments(utr).futureValue.right.value shouldBe Seq.empty
    }

    "query users with no principal enrolment returns empty enrolments" in {
      val json = Json.parse("""
                              |{
                              |    "principalUserIds": [],
                              |     "delegatedUserIds": []
                              |}""".stripMargin)

      when(http.GET[HttpResponse](eqTo(url))(any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(OK, Some(json))))

      connector.getUserIdsWithEnrolments(utr).futureValue.right.value shouldBe Seq.empty
    }

    "query users with assigned enrolment return two principleIds" in {
      val json = Json.parse("""
                              |{
                              |    "principalUserIds": [
                              |       "ABCEDEFGI1234567",
                              |       "ABCEDEFGI1234568"
                              |    ],
                              |    "delegatedUserIds": [
                              |     "dont care"
                              |    ]
                              |}""".stripMargin)

      when(http.GET[HttpResponse](eqTo(url))(any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(OK, Some(json))))

      val expected = Seq("ABCEDEFGI1234567", "ABCEDEFGI1234568")

      connector.getUserIdsWithEnrolments(utr).futureValue.right.value shouldBe expected
    }
  }
} 
Example 156
Source File: PayApiConnectorSpec.scala    From pertax-frontend   with Apache License 2.0 5 votes vote down vote up
package connectors

import models.{CreatePayment, PaymentRequest}
import org.mockito.Matchers.{any, eq => eqTo}
import org.mockito.Mockito.when
import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.mockito.MockitoSugar
import play.api.http.Status._
import play.api.libs.json.{JsResultException, Json}
import uk.gov.hmrc.http.HttpResponse
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import util.BaseSpec

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PayApiConnectorSpec extends BaseSpec with MockitoSugar with ScalaFutures {

  val http = mock[DefaultHttpClient]
  val connector = new PayApiConnector(http, config)
  val paymentRequest = PaymentRequest(config, "some utr")
  val postUrl = config.makeAPaymentUrl

  "createPayment" should {
    "parse the json load for a successful CREATED response" in {
      val json = Json.obj(
        "journeyId" -> "exampleJourneyId",
        "nextUrl"   -> "testNextUrl"
      )

      when(
        http.POST[PaymentRequest, HttpResponse](eqTo(postUrl), eqTo(paymentRequest), any())(any(), any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(CREATED, Some(json))))

      connector.createPayment(paymentRequest).futureValue shouldBe Some(
        CreatePayment("exampleJourneyId", "testNextUrl"))
    }

    "Returns a None when the status code is not CREATED" in {
      when(
        http.POST[PaymentRequest, HttpResponse](eqTo(postUrl), eqTo(paymentRequest), any())(any(), any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(BAD_REQUEST)))

      connector.createPayment(paymentRequest).futureValue shouldBe None
    }

    "Throws a JsResultException when given bad json" in {
      val badJson = Json.obj("abc" -> "invalidData")

      when(
        http.POST[PaymentRequest, HttpResponse](eqTo(postUrl), eqTo(paymentRequest), any())(any(), any(), any(), any()))
        .thenReturn(Future.successful(HttpResponse(CREATED, Some(badJson))))

      val f = connector.createPayment(paymentRequest)
      whenReady(f.failed) { e =>
        e shouldBe a[JsResultException]
      }
    }
  }
} 
Example 157
Source File: TrackerImplTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.platform.apiserver.services.tracking

import akka.NotUsed
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.{Keep, Source, SourceQueueWithComplete}
import akka.stream.testkit.TestSubscriber
import akka.stream.testkit.scaladsl.TestSink
import com.daml.ledger.api.testing.utils.{
  AkkaBeforeAndAfterAll,
  IsStatusException,
  TestingException
}
import com.daml.ledger.api.v1.command_service.SubmitAndWaitRequest
import com.daml.ledger.api.v1.commands.Commands
import com.daml.ledger.api.v1.completion.Completion
import com.daml.dec.DirectExecutionContext
import com.google.rpc.status.{Status => RpcStatus}
import io.grpc.Status
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterEach, Matchers, Succeeded, WordSpec}

import scala.concurrent.ExecutionContext.Implicits.global

class TrackerImplTest
    extends WordSpec
    with Matchers
    with BeforeAndAfterEach
    with ScalaFutures
    with AkkaBeforeAndAfterAll {

  private var sut: Tracker = _
  private var consumer: TestSubscriber.Probe[NotUsed] = _
  private var queue: SourceQueueWithComplete[TrackerImpl.QueueInput] = _

  private def input(cid: Int) = SubmitAndWaitRequest(Some(Commands(commandId = cid.toString)))

  override protected def beforeEach(): Unit = {
    val (q, sink) = Source
      .queue[TrackerImpl.QueueInput](1, OverflowStrategy.dropNew)
      .map { in =>
        in.context.success(Completion(in.value.getCommands.commandId, Some(RpcStatus())))
        NotUsed
      }
      .toMat(TestSink.probe[NotUsed])(Keep.both)
      .run()
    queue = q
    sut = new TrackerImpl(q)
    consumer = sink
  }

  override protected def afterEach(): Unit = {
    consumer.cancel()
    queue.complete()
  }

  "Tracker Implementation" when {

    "input is submitted, and the queue is available" should {

      "work successfully" in {

        val resultF1 = sut.track(input(1))
        consumer.requestNext()
        val resultF = resultF1.flatMap(_ => sut.track(input(2)))(DirectExecutionContext)
        consumer.requestNext()
        whenReady(resultF)(_ => Succeeded)
      }
    }

    "input is submitted, and the queue is backpressuring" should {

      "return a RESOURCE_EXHAUSTED error" in {

        sut.track(input(1))
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.RESOURCE_EXHAUSTED))
      }
    }

    "input is submitted, and the queue has been completed" should {

      "return an ABORTED error" in {

        queue.complete()
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED))
      }
    }

    "input is submitted, and the queue has failed" should {

      "return an ABORTED error" in {

        queue.fail(TestingException("The queue fails with this error."))
        whenReady(sut.track(input(2)).failed)(IsStatusException(Status.ABORTED))
      }
    }
  }
} 
Example 158
Source File: ValidatorTestUtils.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.api.validation

import brave.propagation
import com.daml.lf.data.Ref
import com.daml.ledger.api.domain
import com.daml.ledger.api.messages.transaction
import io.grpc.Status.Code
import io.grpc.StatusRuntimeException
import org.scalatest._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
trait ValidatorTestUtils extends Matchers with Inside with OptionValues { self: Suite =>

  protected val traceIdHigh = 1L
  protected val traceId = 2L
  protected val spanId = 3L
  protected val parentSpanId = Some(4L)
  protected val sampled = true
  protected val includedModule = "includedModule"
  protected val includedTemplate = "includedTemplate"
  protected val expectedLedgerId = "expectedLedgerId"
  protected val packageId = Ref.PackageId.assertFromString("packageId")
  protected val absoluteOffset = Ref.LedgerString.assertFromString("42")
  protected val party = Ref.Party.assertFromString("party")
  protected val verbose = false
  protected val eventId = "eventId"
  protected val transactionId = "42"
  protected val offsetOrdering = Ordering.by[domain.LedgerOffset.Absolute, Int](_.value.toInt)
  protected val ledgerEnd = domain.LedgerOffset.Absolute(Ref.LedgerString.assertFromString("1000"))

  protected def hasExpectedFilters(req: transaction.GetTransactionsRequest) = {
    val filtersByParty = req.filter.filtersByParty
    filtersByParty should have size 1
    inside(filtersByParty.headOption.value) {
      case (p, filters) =>
        p shouldEqual party
        filters shouldEqual domain.Filters(
          Some(domain.InclusiveFilters(Set(Ref.Identifier(
            Ref.PackageId.assertFromString(packageId),
            Ref.QualifiedName(
              Ref.DottedName.assertFromString(includedModule),
              Ref.DottedName.assertFromString(includedTemplate))
          )))))
    }
  }

  protected def hasExpectedTraceContext(req: transaction.GetTransactionsRequest) = {
    inside(req.traceContext.value) {
      case e => isExpectedTraceContext(e)
    }
  }

  protected def isExpectedTraceContext(e: propagation.TraceContext) = {
    e.traceIdHigh() shouldEqual traceIdHigh
    e.traceId() shouldEqual traceId
    e.spanId() shouldEqual spanId
    Option(e.parentId()) shouldEqual parentSpanId
    e.sampled() shouldEqual sampled
  }

  protected def requestMustFailWith(
      request: Future[_],
      code: Code,
      description: String): Future[Assertion] = {
    val f = request.map(Right(_)).recover { case ex: StatusRuntimeException => Left(ex) }
    f.map(inside(_)(isError(code, description)))
  }

  protected def requestMustFailWith(
      request: Either[StatusRuntimeException, _],
      code: Code,
      description: String): Assertion = {
    inside(request)(isError(code, description))
  }
  protected def isError(expectedCode: Code, expectedDescription: String)
    : PartialFunction[Either[StatusRuntimeException, _], Assertion] = {

    case Left(err) =>
      err.getStatus should have('code (expectedCode))
      err.getStatus should have('description (expectedDescription))

  }

} 
Example 159
Source File: LedgerReader.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.service

import com.daml.lf.archive.Reader
import com.daml.lf.data.Ref.{Identifier, PackageId}
import com.daml.lf.iface.reader.InterfaceReader
import com.daml.lf.iface.{DefDataType, Interface}
import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.v1.package_service.GetPackageResponse
import com.daml.ledger.client.services.pkg.PackageClient
import scalaz.Scalaz._
import scalaz._

import scala.collection.immutable.Map
import scala.concurrent.Future

object LedgerReader {

  type Error = String

  // PackageId -> Interface
  type PackageStore = Map[String, Interface]

  val UpToDate: Future[Error \/ Option[PackageStore]] =
    Future.successful(\/-(None))

  // FIXME Find a more suitable execution context for these helpers
  import scala.concurrent.ExecutionContext.Implicits.global

  
  def loadPackageStoreUpdates(client: PackageClient, token: Option[String])(
      loadedPackageIds: Set[String]): Future[Error \/ Option[PackageStore]] =
    for {
      newPackageIds <- client.listPackages(token).map(_.packageIds.toList)
      diffIds = newPackageIds.filterNot(loadedPackageIds): List[String] // keeping the order
      result <- if (diffIds.isEmpty) UpToDate
      else load(client, diffIds, token)
    } yield result

  private def load(
      client: PackageClient,
      packageIds: List[String],
      token: Option[String]): Future[Error \/ Some[PackageStore]] =
    packageIds
      .traverse(client.getPackage(_, token))
      .map(createPackageStoreFromArchives)
      .map(_.map(Some(_)))

  private def createPackageStoreFromArchives(
      packageResponses: List[GetPackageResponse]): Error \/ PackageStore = {
    packageResponses
      .traverseU { packageResponse: GetPackageResponse =>
        decodeInterfaceFromPackageResponse(packageResponse).map { interface =>
          (interface.packageId, interface)
        }
      }
      .map(_.toMap)
  }

  private def decodeInterfaceFromPackageResponse(
      packageResponse: GetPackageResponse): Error \/ Interface = {
    import packageResponse._
    \/.fromTryCatchNonFatal {
      val cos = Reader.damlLfCodedInputStream(archivePayload.newInput)
      val payload = DamlLf.ArchivePayload.parseFrom(cos)
      val (errors, out) =
        InterfaceReader.readInterface(PackageId.assertFromString(hash) -> payload)
      if (!errors.empty) \/.left("Errors reading LF archive:\n" + errors.toString)
      else \/.right(out)
    }.leftMap(_.getLocalizedMessage).join
  }

  def damlLfTypeLookup(packageStore: () => PackageStore)(id: Identifier): Option[DefDataType.FWT] =
    for {
      iface <- packageStore().get(id.packageId.toString)
      ifaceType <- iface.typeDecls.get(id.qualifiedName)
    } yield ifaceType.`type`
} 
Example 160
Source File: AkkaClientCompatibilityCheck.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.grpc.adapter.operation

import akka.actor.ActorSystem
import akka.stream.scaladsl.Sink
import akka.stream.{Materializer, ThrottleMode}
import com.daml.grpc.adapter.ExecutionSequencerFactory
import com.daml.grpc.adapter.client.ResultAssertions
import com.daml.grpc.adapter.client.akka.ClientAdapter
import com.daml.platform.hello.HelloRequest
import com.daml.platform.hello.HelloServiceGrpc.HelloServiceStub
import io.grpc.{ClientCall, MethodDescriptor}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._

trait AkkaClientCompatibilityCheck {
  self: WordSpec with Matchers with ScalaFutures with ResultAssertions =>

  implicit protected def system: ActorSystem

  implicit protected def materializer: Materializer

  implicit protected def esf: ExecutionSequencerFactory

  def akkaClientCompatible(helloStub: => HelloServiceStub): Unit = {

    def getCall[Req, Resp](call: MethodDescriptor[Req, Resp]): ClientCall[Req, Resp] =
      helloStub.getChannel.newCall(call, helloStub.getCallOptions)

    "respond with the correct number of elements and correct content in 1-* setup" in {
      val elemsF = ClientAdapter
        .serverStreaming(HelloRequest(elemCount), helloStub.serverStreaming)
        .runWith(Sink.seq)

      whenReady(elemsF)(assertElementsAreInOrder(elemCount.toLong))
    }

    "tolerate rematerialization of the same response source in 1-* setup" in {
      val source = ClientAdapter
        .serverStreaming(HelloRequest(elemCount), helloStub.serverStreaming)
      val elemsF1 = source.runWith(Sink.seq)
      val elemsF2 = source.runWith(Sink.seq)

      whenReady(for {
        elems1 <- elemsF1
        elems2 <- elemsF2
      } yield elems1 -> elems2)({
        case (elems1, elems2) =>
          val check = assertElementsAreInOrder(elemCount.toLong) _
          check(elems1)
          check(elems2)
      })
    }

    "respond with the correct number of elements and correct content in 1-* setup when back-pressured" in {
      val elemsF = ClientAdapter
        .serverStreaming(HelloRequest(elemCount), helloStub.serverStreaming)
        .throttle(100, 1.second, 16, ThrottleMode.shaping)
        .runWith(Sink.seq)

      whenReady(elemsF)(assertElementsAreInOrder(elemCount.toLong))
    }

    "handle cancellation in 1-* setup" in {
      val elemsF = ClientAdapter
        .serverStreaming(HelloRequest(elemCount), helloStub.serverStreaming)
        .take(halfCount.toLong)
        .runWith(Sink.seq)

      whenReady(elemsF)(assertElementsAreInOrder(halfCount.toLong))
    }

  }
} 
Example 161
Source File: AkkaImplementation.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.grpc.adapter.utils.implementations

import java.util.concurrent.atomic.AtomicInteger

import akka.stream.Materializer
import akka.stream.scaladsl.{Flow, Source}
import com.daml.grpc.adapter.ExecutionSequencerFactory
import com.daml.grpc.adapter.server.akka.ServerAdapter
import com.daml.grpc.sampleservice.Responding
import com.daml.platform.hello.HelloServiceGrpc.HelloService
import com.daml.platform.hello.{HelloRequest, HelloResponse, HelloServiceGrpc}
import io.grpc.stub.StreamObserver
import io.grpc.{BindableService, ServerServiceDefinition}

import scala.concurrent.ExecutionContext.Implicits.global

class AkkaImplementation(
    implicit executionSequencerFactory: ExecutionSequencerFactory,
    materializer: Materializer,
) extends HelloService
    with Responding
    with BindableService {

  private val serverStreamingCalls = new AtomicInteger()

  def getServerStreamingCalls: Int = serverStreamingCalls.get()

  override def bindService(): ServerServiceDefinition =
    HelloServiceGrpc.bindService(this, global)

  override def serverStreaming(
      request: HelloRequest,
      responseObserver: StreamObserver[HelloResponse],
  ): Unit =
    Source
      .single(request)
      .via(Flow[HelloRequest].mapConcat(responses))
      .runWith(ServerAdapter.toSink(responseObserver))
      .onComplete(_ => serverStreamingCalls.incrementAndGet())

} 
Example 162
Source File: SdkClient.scala    From iotchain   with MIT License 5 votes vote down vote up
package jbok.sdk

import java.net.URI

import cats.effect.{Clock, IO}
import io.circe.Json
import io.circe.syntax._
import io.circe.parser._
import jbok.network.http.HttpTransport
import jbok.network.rpc.{RpcClient, RpcRequest}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js
import scala.scalajs.js.JSConverters._
import scala.scalajs.js.Promise
import scala.scalajs.js.annotation.{JSExportAll, JSExportTopLevel}
import scala.scalajs.js.JSON

@JSExportAll
final class SdkClient(val uri: URI, val client: RpcClient[IO, Json]) {
  def fetch(api: String, method: String, params: js.UndefOr[js.Any]): Promise[String] = {
    val json = params.toOption match {
      case Some(a) => parse(JSON.stringify(a)).getOrElse(Json.Null)
      case None => Json.Null
    }
    val request = RpcRequest(List(api, method), json)
    client.transport.fetch(request).map(_.asJson.spaces2).unsafeToFuture().toJSPromise
  }
}

@JSExportTopLevel("SdkClient")
@JSExportAll
object SdkClient {
  implicit val clock: Clock[IO] = Clock.create[IO]

  def http(url: String): SdkClient = {
    val transport = HttpTransport[IO](url)
    val client    = RpcClient(transport)
    new SdkClient(new URI(url), client)
  }
} 
Example 163
Source File: EnvironmentTest.scala    From nanotest-strawman   with Apache License 2.0 5 votes vote down vote up
package example.tests

import verify.TestSuite
import scala.concurrent.Future
import scala.util.Random

object EnvironmentTest extends TestSuite[Int] {
  def setup(): Int = {
    Random.nextInt(100) + 1
  }

  def tearDown(env: Int): Unit = {
    assert(env > 0)
  }

  override def setupSuite() = {}

  override def tearDownSuite() = {}

  test("simple test") { env =>
    assert(env == env)
  }

  testAsync("asynchronous test") { env =>
    import scala.concurrent.ExecutionContext.Implicits.global

    Future(env).map(_ + 1).map { result =>
      assert(result == env + 1)
    }
  }
} 
Example 164
Source File: SimpleTest.scala    From nanotest-strawman   with Apache License 2.0 5 votes vote down vote up
package example.tests

import verify.BasicTestSuite
import scala.concurrent.Future

object SimpleTest extends BasicTestSuite {
  test("ignored test") {
    ignore()
  }

  test("ignore test with reason") {
    ignore("test was ignored with a message")
  }

  test("canceled test") {
    cancel()
  }

  test("canceled test with reason") {
    cancel("test was canceled with a message")
  }

  test("simple assert") {
    def hello: String = "hello"
    assert(hello == "hello")
  }

  test("assert with hint") {
    def hello: String = "hello"
    assert(hello == "hello", "assertion with hint is failing")
  }

  // test("failing test") {
  //   case class Person(name: String = "Fred", age: Int = 42) {
  //     def say(words: String*) = words.mkString(" ")
  //   }
  //   assert(Person().say("ping", "poing") == "pong pong")
  // }

  test("assert equals with nulls") {
    val s: String = null

    intercept[AssertionError] {
      assert(s == "dummy")
    }
  }

  test("intercept") {
    class DummyException extends RuntimeException
    def test = 1

    intercept[DummyException] {
      if (test != 2) throw new DummyException
    }
  }

  testAsync("asynchronous test") {
    import scala.concurrent.ExecutionContext.Implicits.global

    Future(1).map(_ + 1).map { result =>
      assert(result == 2)
    }
  }

  test("intercept failure") {
    class DummyException extends RuntimeException

    intercept[AssertionError] {
      intercept[DummyException] {
        def hello(x: Int) = x + 1
        if (hello(1) != 2) throw new DummyException
      }
    }
  }

  test("fail()") {
    def x = 1
    intercept[AssertionError] { if (x == 1) fail() }
  }

  test("fail(reason)") {
    def x = 1
    val isSuccess = try {
      if (x == 1) fail("dummy")
      false
    } catch {
      case ex: AssertionError =>
        ex.getMessage == "dummy"
    }

    assert(isSuccess)
  }
} 
Example 165
Source File: AsyncStorageExample.scala    From scalajs-react-native   with Apache License 2.0 5 votes vote down vote up
package chandu0101.scalajs.rn.examples.uiexplorer.apis

import chandu0101.scalajs.rn.ReactNativeComponentB
import chandu0101.scalajs.rn.apis.{AsyncStorage, AsyncStorageException}
import chandu0101.scalajs.rn.components._
import chandu0101.scalajs.rn.examples.uiexplorer.{UIExample, UIExplorerBlock, UIExplorerPage}
import japgolly.scalajs.react.BackendScope
import chandu0101.scalajs.rn.styles.NativeStyleSheet

import scala.async.Async._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.scalajs.js

object AsyncStorageExample extends UIExample {

  val STORAGE_KEY = "@AsyncStorageExample:key"
  val COLORS = js.Array("red", "orange", "yellow", "green", "blue")

  case class State(selectedValue: String = COLORS.head, messages: js.Array[String] = js.Array())

  class Backend(t: BackendScope[_, State]) {

    def appendMessage(message: String) = {
      t.modState(s => s.copy(messages = s.messages.+:(message)))
    }

    val saveError: PartialFunction[Throwable, _] = {
      case (ex: Throwable) => {
        appendMessage(s"AsyncStorage Error ${ex.asInstanceOf[AsyncStorageException].err.message.toString}")
      }
    }

    def onValueChange(selectedValue: String) : Unit = {
      t.modState(_.copy(selectedValue = selectedValue))
      async {
        val result = await(AsyncStorage.setItem(STORAGE_KEY, selectedValue))
        appendMessage(s"Saved selection to disk ${selectedValue}")
      }.recover(saveError)
    }

    def removeStorage : Unit = async{
      val result = await(AsyncStorage.removeItem(STORAGE_KEY))
      appendMessage(s"Selection Removed from Disk")
    }.recover(saveError)
  }

  val component = ReactNativeComponentB[Unit]("AsyncStorageExample")
    .initialState(State())
    .backend(new Backend(_))
    .render((P, S, B) => {
    UIExplorerPage(
     UIExplorerBlock("Basics - getItem, setItem, removeItem")(
       View()(
         PickerIOS(selectedValue = S.selectedValue,onValueChange = B.onValueChange _)(
           COLORS.map(v => PickerItemIOS(key = v , value = v,label = v))
         ),
         Text()("Selected : ",
           Text(style = styles.getColorStyle(S.selectedValue))(S.selectedValue)
         ),
         Text()(" "),
         Text(onPress = B.removeStorage _)("Press here to remove from storage"),
         Text()(" "),
         Text()("Messages : "),
         S.messages.map(m => Text()(m))
       )
     )
    )
  }).componentDidMount(scope => {
     async {
       val result = await(AsyncStorage.getItem(STORAGE_KEY))
       if (result != null) {
         scope.modState(_.copy(selectedValue = result))
         scope.backend.appendMessage(s"Recovered selection from disk : ${result}")
       } else {
         scope.backend.appendMessage(s"Initialized with no selection on disk")
       }
     }.recover(scope.backend.saveError)
  })
  .buildNative

  object styles extends NativeStyleSheet {

   def getColorStyle(c : String) = style(color := c)
  }

  override def title: String = "AsyncStorage"

  override def description: String = "Asynchronous local disk storage."
} 
Example 166
Source File: CartDaoSpec.scala    From Scala-Programming-Projects   with MIT License 5 votes vote down vote up
import dao.CartDao
import io.fscala.shopping.shared.{Cart, ProductInCart}
import org.scalatest.Matchers._
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.RecoverMethods._
import org.scalatestplus.play._
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.Application

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CartDaoSpec extends PlaySpec with ScalaFutures with GuiceOneAppPerSuite {

  "CartDao" should {
    val app2dao = Application.instanceCache[CartDao]
    "be empty on database creation" in {
      val dao: CartDao = app2dao(app)

      dao.all().futureValue shouldBe empty
    }

    "accept to add new cart" in {
      val dao: CartDao = app2dao(app)
      val user = "userAdd"

      val expected = Set(
        Cart(user, "ALD1", 1),
        Cart(user, "BEO1", 5)
      )
      val noise = Set(
        Cart("userNoise", "ALD2", 10)
      )
      val allCarts = expected ++ noise

      val insertFutures = allCarts.map(dao.insert)

      whenReady(Future.sequence(insertFutures)) { _ =>
        dao.cart4(user).futureValue should contain theSameElementsAs expected
        dao.all().futureValue.size should equal(allCarts.size)
      }
    }
    "error thrown when adding a cart with same user and productCode" in {
      val dao: CartDao = app2dao(app)
      val user = "userAdd"

      val expected = Set(
        Cart(user, "ALD1", 1),
        Cart(user, "BEO1", 5)
      )
      val noise = Set(
        Cart(user, "ALD1", 10)
      )
      val allCarts = expected ++ noise


      val insertFutures = allCarts.map(dao.insert)

      recoverToSucceededIf[org.h2.jdbc.JdbcSQLException]{
        Future.sequence(insertFutures)
      }
    }

    "accept to remove a product in a cart" in {
      val dao: CartDao = app2dao(app)
      val user = "userRmv"
      val initial = Vector(
        Cart(user, "ALD1", 1),
        Cart(user, "BEO1", 5)
      )
      val expected = Vector(Cart(user, "ALD1", 1))

      whenReady(Future.sequence(initial.map(dao.insert(_)))) { _ =>
        dao.remove(ProductInCart(user, "BEO1")).futureValue
        dao.cart4(user).futureValue should contain theSameElementsAs (expected)
      }
    }

    "accept to update quantities of an item in a cart" in {
      val dao: CartDao = app2dao(app)
      val user = "userUpd"
      val initial = Vector(Cart(user, "ALD1", 1))
      val expected = Vector(Cart(user, "ALD1", 5))

      whenReady(Future.sequence(initial.map(dao.insert(_)))) { _ =>
        dao.update(Cart(user, "ALD1", 5)).futureValue
        dao.cart4(user).futureValue should contain theSameElementsAs (expected)
      }
    }
  }
} 
Example 167
Source File: PostgresInstanceSpec.scala    From fuuid   with MIT License 5 votes vote down vote up
package io.chrisdavenport.fuuid.doobie.postgres

import cats.effect.{ContextShift, IO}
import cats.implicits._
import doobie._
import doobie.implicits._
import doobie.postgres.implicits._
import doobie.specs2._
import io.chrisdavenport.fuuid.FUUID
import io.chrisdavenport.fuuid.doobie.implicits._
import io.chrisdavenport.testcontainersspecs2.ForAllTestContainer
import com.dimafeng.testcontainers.GenericContainer
import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy
import java.time.Duration
import java.time.temporal.ChronoUnit.SECONDS
import org.specs2._
import scala.concurrent.ExecutionContext.Implicits.global

class PostgresInstanceSpec extends mutable.Specification with IOChecker with ForAllTestContainer {
  sequential
  implicit val contextShiftIO: ContextShift[IO] = IO.contextShift(global)

  override lazy val container = GenericContainer(
    "postgres",
    List(5432),
    Map(
      "POSTGRES_DB" -> dbName,
      "POSTGRES_USER" -> dbUserName,
      "POSTGRES_PASSWORD" -> dbPassword
    ),
    waitStrategy = new LogMessageWaitStrategy()
      .withRegEx(".*database system is ready to accept connections.*\\s")
      .withTimes(2)
      .withStartupTimeout(Duration.of(60, SECONDS))
  )

  lazy val driverName = "org.postgresql.Driver"
  lazy val jdbcUrl = s"jdbc:postgresql://${container.container.getContainerIpAddress()}:${container.container.getMappedPort(5432)}/${dbName}"
  lazy val dbUserName = "user"
  lazy val dbPassword = "password"
  lazy val dbName = "db"

  lazy val transactor = Transactor.fromDriverManager[IO](
    driverName,
    jdbcUrl,
    dbUserName,
    dbPassword
  )


  override def afterStart(): Unit = {
    sql"""
    CREATE TABLE IF NOT EXISTS PostgresInstanceSpec (
      id   UUID NOT NULL
    )
    """.update.run.transact(transactor).void.unsafeRunSync()
  }

  def insertId(fuuid: FUUID): Update0 = {
    sql"""INSERT into PostgresInstanceSpec (id) VALUES ($fuuid)""".update
  }
  val fuuid = FUUID.randomFUUID[IO].unsafeRunSync

  check(sql"SELECT id from PostgresInstanceSpec".query[FUUID])
  check(insertId(fuuid))

} 
Example 168
Source File: PostgresTraversalSpec.scala    From fuuid   with MIT License 5 votes vote down vote up
package io.chrisdavenport.fuuid.doobie.postgres

import cats.effect.{ContextShift, IO}
import cats.implicits._
import doobie._
import doobie.implicits._
import doobie.postgres.implicits._
import io.chrisdavenport.fuuid.doobie.implicits._
import io.chrisdavenport.fuuid._
import io.chrisdavenport.testcontainersspecs2.ForAllTestContainer
import com.dimafeng.testcontainers.GenericContainer
import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy
import java.time.Duration
import java.time.temporal.ChronoUnit.SECONDS
import org.specs2._
import scala.concurrent.ExecutionContext.Implicits.global

class PostgresTraversalSpec extends mutable.Specification
  with ScalaCheck with FUUIDArbitraries with ForAllTestContainer {
  sequential
  implicit val contextShiftIO: ContextShift[IO] = IO.contextShift(global)

  override lazy val container = GenericContainer(
    "postgres",
    List(5432),
    Map(
      "POSTGRES_DB" -> dbName,
      "POSTGRES_USER" -> dbUserName,
      "POSTGRES_PASSWORD" -> dbPassword
    ),
    waitStrategy = new LogMessageWaitStrategy()
      .withRegEx(".*database system is ready to accept connections.*\\s")
      .withTimes(2)
      .withStartupTimeout(Duration.of(60, SECONDS))
  )

  lazy val driverName = "org.postgresql.Driver"
  lazy val jdbcUrl = s"jdbc:postgresql://${container.container.getContainerIpAddress()}:${container.container.getMappedPort(5432)}/${dbName}"
  lazy val dbUserName = "user"
  lazy val dbPassword = "password"
  lazy val dbName = "db"

  lazy val transactor = Transactor.fromDriverManager[IO](
    driverName,
    jdbcUrl,
    dbUserName,
    dbPassword
  )

  // lazy val transactor = Transactor.fromDriverManager[IO](
  //   "org.postgresql.Driver",
  //   "jdbc:postgresql:world",
  //   "postgres", ""
  // )

  override def afterStart(): Unit = {
    sql"""
    CREATE TABLE IF NOT EXISTS PostgresTraversalSpec (
      id   UUID NOT NULL
    )
    """.update.run.transact(transactor).void.unsafeRunSync()
  }

  def queryBy(fuuid: FUUID): Query0[FUUID] = {
    sql"""SELECT id from PostgresTraversalSpec where id = ${fuuid}""".query[FUUID]
  }

  def insertId(fuuid: FUUID): Update0 = {
    sql"""INSERT into PostgresTraversalSpec (id) VALUES ($fuuid)""".update
  }

  "Doobie Postgres Meta" should {
    "traverse input and then extraction" in prop { fuuid: FUUID =>

      val action = for {
        _ <- insertId(fuuid).run.transact(transactor)
        fuuid <- queryBy(fuuid).unique.transact(transactor)
      } yield fuuid

      action.unsafeRunSync must_=== fuuid
    }
    "fail on a non-present value" in prop { fuuid: FUUID =>
      queryBy(fuuid)
        .unique
        .transact(transactor)
        .attempt
        .map(_.isLeft)
        .unsafeRunSync must_=== true
    }
  }

} 
Example 169
Source File: H2TraversalSpec.scala    From fuuid   with MIT License 5 votes vote down vote up
package io.chrisdavenport.fuuid.doobie.h2

import cats.effect.{ContextShift, IO}
import cats.implicits._
import doobie._
import doobie.h2.implicits._
import doobie.implicits._
import io.chrisdavenport.fuuid.doobie.implicits._
import io.chrisdavenport.fuuid._
import org.specs2.ScalaCheck
import org.specs2.mutable.Specification
import org.specs2.specification.BeforeAll
import scala.concurrent.ExecutionContext.Implicits.global

class H2TraversalSpec extends Specification
  with BeforeAll with ScalaCheck with FUUIDArbitraries {

  implicit val contextShiftIO: ContextShift[IO] = IO.contextShift(global)

  lazy val transactor: Transactor[IO] =
    Transactor.fromDriverManager[IO](
      driver = "org.h2.Driver",
      url = "jdbc:h2:mem:testH2Table;DB_CLOSE_DELAY=-1",
      user = "sa",
      pass = ""
    )

  def beforeAll(): Unit = {
    sql"""
    CREATE TABLE testH2Table (
      id   UUID NOT NULL
    )
    """.update.run.transact(transactor).void.unsafeRunSync
  }

  def queryBy(fuuid: FUUID): Query0[FUUID] = {
    sql"""SELECT id from testH2Table where id = ${fuuid}""".query[FUUID]
  }

  def insertId(fuuid: FUUID): Update0 = {
    sql"""INSERT into testH2Table (id) VALUES ($fuuid)""".update
  }

  "Doobie H2 Meta" should {

    "traverse input and then extraction" in prop { fuuid: FUUID =>

      val action = for {
        _ <- insertId(fuuid).run.transact(transactor)
        fuuid <- queryBy(fuuid).unique.transact(transactor)
      } yield fuuid

      action.unsafeRunSync must_=== fuuid
    }

    "fail on a non-present value" in prop { fuuid: FUUID =>
      queryBy(fuuid)
        .unique
        .transact(transactor)
        .attempt
        .map(_.isLeft)
        .unsafeRunSync must_=== true
    }
  }

} 
Example 170
Source File: H2InstanceSpec.scala    From fuuid   with MIT License 5 votes vote down vote up
package io.chrisdavenport.fuuid.doobie.h2

import cats.effect.{ContextShift, IO}
import cats.syntax.functor._
import doobie._
import doobie.h2.implicits._
import doobie.implicits._
import doobie.specs2._
import io.chrisdavenport.fuuid.FUUID
import io.chrisdavenport.fuuid.doobie.implicits._
import org.specs2.mutable.Specification
import org.specs2.specification.BeforeAll
import scala.concurrent.ExecutionContext.Implicits.global

class H2InstanceSpec extends Specification with IOChecker with BeforeAll {

  implicit val contextShiftIO: ContextShift[IO] = IO.contextShift(global)

  lazy val transactor: Transactor[IO] =
    Transactor.fromDriverManager[IO](
      driver = "org.h2.Driver",
      url = "jdbc:h2:mem:test;DB_CLOSE_DELAY=-1",
      user = "sa",
      pass = ""
    )

  def beforeAll(): Unit = {
    sql"CREATE TABLE test (id UUID NOT NULL)".update.run.transact(transactor).void.unsafeRunSync
  }

  def insertId(fuuid: FUUID): Update0 = {
    sql"""INSERT into test (id) VALUES ($fuuid)""".update
  }

  val fuuid = FUUID.randomFUUID[IO].unsafeRunSync

  check(sql"SELECT id from test".query[FUUID])
  check(insertId(fuuid))

} 
Example 171
Source File: KnownNodesManager.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.network

import java.net.URI

import akka.actor.{Actor, ActorLogging, Props, Scheduler}
import io.iohk.ethereum.db.storage.KnownNodesStorage
import io.iohk.ethereum.network.KnownNodesManager.KnownNodesManagerConfig

import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

class KnownNodesManager(
    config: KnownNodesManagerConfig,
    knownNodesStorage: KnownNodesStorage,
    externalSchedulerOpt: Option[Scheduler] = None)
  extends Actor with ActorLogging {

  import KnownNodesManager._

  private def scheduler = externalSchedulerOpt getOrElse context.system.scheduler

  var knownNodes: Set[URI] = knownNodesStorage.getKnownNodes()

  var toAdd: Set[URI] = Set.empty

  var toRemove: Set[URI] = Set.empty

  scheduler.schedule(config.persistInterval, config.persistInterval, self, PersistChanges)

  override def receive: Receive = {
    case AddKnownNode(uri) =>
      if (!knownNodes.contains(uri)) {
        knownNodes += uri
        toAdd += uri
        toRemove -= uri
      }

    case RemoveKnownNode(uri) =>
      if (knownNodes.contains(uri)) {
        knownNodes -= uri
        toAdd -= uri
        toRemove += uri
      }

    case GetKnownNodes =>
      sender() ! KnownNodes(knownNodes)

    case PersistChanges =>
      persistChanges()
  }

  private def persistChanges(): Unit = {
    log.debug(s"Persisting ${knownNodes.size} known nodes.")
    if (knownNodes.size > config.maxPersistedNodes) {
      val toAbandon = knownNodes.take(knownNodes.size - config.maxPersistedNodes)
      toRemove ++= toAbandon
      toAdd --= toAbandon
    }
    if (toAdd.nonEmpty || toRemove.nonEmpty) {
      knownNodesStorage.updateKnownNodes(
        toAdd = toAdd,
        toRemove = toRemove)
      toAdd = Set.empty
      toRemove = Set.empty
    }
  }

}

object KnownNodesManager {
  def props(config: KnownNodesManagerConfig, knownNodesStorage: KnownNodesStorage): Props =
    Props(new KnownNodesManager(config, knownNodesStorage))

  case class AddKnownNode(uri: URI)
  case class RemoveKnownNode(uri: URI)
  case object GetKnownNodes
  case class KnownNodes(nodes: Set[URI])

  private case object PersistChanges

  case class KnownNodesManagerConfig(persistInterval: FiniteDuration, maxPersistedNodes: Int)

  object KnownNodesManagerConfig {
    def apply(etcClientConfig: com.typesafe.config.Config): KnownNodesManagerConfig = {
      val knownNodesManagerConfig = etcClientConfig.getConfig("network.known-nodes")
      KnownNodesManagerConfig(
        persistInterval = knownNodesManagerConfig.getDuration("persist-interval").toMillis.millis,
        maxPersistedNodes = knownNodesManagerConfig.getInt("max-persisted-nodes"))
    }
  }
} 
Example 172
Source File: NetService.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc

import akka.actor.ActorRef
import akka.agent.Agent
import akka.util.Timeout
import io.iohk.ethereum.jsonrpc.NetService.NetServiceConfig
import io.iohk.ethereum.network.PeerManagerActor
import io.iohk.ethereum.utils.ServerStatus.{Listening, NotListening}
import io.iohk.ethereum.utils.{Config, NodeStatus}

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

object NetService {
  case class VersionRequest()
  case class VersionResponse(value: String)

  case class ListeningRequest()
  case class ListeningResponse(value: Boolean)

  case class PeerCountRequest()
  case class PeerCountResponse(value: Int)

  case class NetServiceConfig(peerManagerTimeout: FiniteDuration)

  object NetServiceConfig {
    def apply(etcClientConfig: com.typesafe.config.Config): NetServiceConfig = {
      val netServiceConfig = etcClientConfig.getConfig("network.rpc.net")
      NetServiceConfig(
        peerManagerTimeout = netServiceConfig.getDuration("peer-manager-timeout").toMillis.millis)
    }
  }
}

class NetService(nodeStatusHolder: Agent[NodeStatus], peerManager: ActorRef, config: NetServiceConfig) {
  import NetService._

  def version(req: VersionRequest): ServiceResponse[VersionResponse] =
    Future.successful(Right(VersionResponse(Config.Network.peer.networkId.toString)))

  def listening(req: ListeningRequest): ServiceResponse[ListeningResponse] = {
    Future.successful {
      Right(
        nodeStatusHolder().serverStatus match {
          case _: Listening => ListeningResponse(true)
          case NotListening => ListeningResponse(false)
        }
      )
    }
  }

  def peerCount(req: PeerCountRequest): ServiceResponse[PeerCountResponse] = {
    import akka.pattern.ask
    implicit val timeout = Timeout(config.peerManagerTimeout)

    (peerManager ? PeerManagerActor.GetPeers)
      .mapTo[PeerManagerActor.Peers]
      .map { peers => Right(PeerCountResponse(peers.handshaked.size)) }
  }

} 
Example 173
Source File: JsonRpcServer.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc.server

import java.security.SecureRandom

import akka.actor.ActorSystem
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.headers.HttpOriginRange
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{MalformedRequestContentRejection, RejectionHandler, Route}
import ch.megard.akka.http.cors.javadsl.CorsRejection
import ch.megard.akka.http.cors.scaladsl.CorsDirectives._
import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings
import de.heikoseeberger.akkahttpjson4s.Json4sSupport
import io.iohk.ethereum.jsonrpc.{JsonRpcController, JsonRpcErrors, JsonRpcRequest, JsonRpcResponse}
import io.iohk.ethereum.utils.Logger
import org.json4s.JsonAST.JInt
import org.json4s.{DefaultFormats, native}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

trait JsonRpcServer extends Json4sSupport {
  val jsonRpcController: JsonRpcController

  implicit val serialization = native.Serialization

  implicit val formats = DefaultFormats

  def corsAllowedOrigins: HttpOriginRange

  val corsSettings = CorsSettings.defaultSettings.copy(
    allowGenericHttpRequests = true,
    allowedOrigins = corsAllowedOrigins
  )

  implicit def myRejectionHandler: RejectionHandler =
    RejectionHandler.newBuilder()
      .handle {
        case _: MalformedRequestContentRejection =>
          complete((StatusCodes.BadRequest, JsonRpcResponse("2.0", None, Some(JsonRpcErrors.ParseError), JInt(0))))
        case _: CorsRejection =>
          complete(StatusCodes.Forbidden)
      }
      .result()

  val route: Route = cors(corsSettings) {
    (pathEndOrSingleSlash & post) {
      entity(as[JsonRpcRequest]) { request =>
        handleRequest(request)
      } ~ entity(as[Seq[JsonRpcRequest]]) { request =>
        handleBatchRequest(request)
      }
    }
  }

  
  def run(): Unit

  private def handleRequest(request: JsonRpcRequest) = {
    complete(jsonRpcController.handleRequest(request))
  }

  private def handleBatchRequest(requests: Seq[JsonRpcRequest]) = {
    complete(Future.sequence(requests.map(request => jsonRpcController.handleRequest(request))))
  }
}

object JsonRpcServer extends Logger {

  def apply(jsonRpcController: JsonRpcController, config: JsonRpcServerConfig, secureRandom: SecureRandom)
           (implicit actorSystem: ActorSystem): Either[String, JsonRpcServer] = config.mode match {
    case "http" => Right(new JsonRpcHttpServer(jsonRpcController, config)(actorSystem))
    case "https" => Right(new JsonRpcHttpsServer(jsonRpcController, config, secureRandom)(actorSystem))
    case _ => Left(s"Cannot start JSON RPC server: Invalid mode ${config.mode} selected")
  }

  trait JsonRpcServerConfig {
    val mode: String
    val enabled: Boolean
    val interface: String
    val port: Int
    val certificateKeyStorePath: Option[String]
    val certificateKeyStoreType: Option[String]
    val certificatePasswordFile: Option[String]
    val corsAllowedOrigins: HttpOriginRange
  }


} 
Example 174
Source File: JsonRpcHttpsServer.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc.server

import java.io.{File, FileInputStream}
import java.security.{KeyStore, SecureRandom}
import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}

import akka.actor.ActorSystem
import akka.http.scaladsl.model.headers.HttpOriginRange
import akka.http.scaladsl.{ConnectionContext, Http}
import akka.stream.ActorMaterializer
import io.iohk.ethereum.jsonrpc.JsonRpcController
import io.iohk.ethereum.jsonrpc.server.JsonRpcHttpsServer.HttpsSetupResult
import io.iohk.ethereum.jsonrpc.server.JsonRpcServer.JsonRpcServerConfig
import io.iohk.ethereum.utils.Logger

import scala.concurrent.ExecutionContext.Implicits.global
import scala.io.Source
import scala.util.{Failure, Success, Try}

class JsonRpcHttpsServer(val jsonRpcController: JsonRpcController, config: JsonRpcServerConfig,
                         secureRandom: SecureRandom)(implicit val actorSystem: ActorSystem)
  extends JsonRpcServer with Logger {

  def run(): Unit = {
    implicit val materializer = ActorMaterializer()

    val maybeSslContext = validateCertificateFiles(config.certificateKeyStorePath, config.certificateKeyStoreType, config.certificatePasswordFile).flatMap{
      case (keystorePath, keystoreType, passwordFile) =>
        val passwordReader = Source.fromFile(passwordFile)
        try {
          val password = passwordReader.getLines().mkString
          obtainSSLContext(keystorePath, keystoreType, password)
        } finally {
          passwordReader.close()
        }
    }

    val maybeHttpsContext = maybeSslContext.map(sslContext => ConnectionContext.https(sslContext))

    maybeHttpsContext match {
      case Right(httpsContext) =>
        Http().setDefaultServerHttpContext(httpsContext)
        val bindingResultF = Http().bindAndHandle(route, config.interface, config.port, connectionContext = httpsContext)

        bindingResultF onComplete {
          case Success(serverBinding) => log.info(s"JSON RPC HTTPS server listening on ${serverBinding.localAddress}")
          case Failure(ex) => log.error("Cannot start JSON HTTPS RPC server", ex)
        }
      case Left(error) => log.error(s"Cannot start JSON HTTPS RPC server due to: $error")
    }
  }

  
  private def validateCertificateFiles(maybeKeystorePath: Option[String],
                                       maybeKeystoreType: Option[String],
                                       maybePasswordFile: Option[String]): HttpsSetupResult[(String, String, String)] =
    (maybeKeystorePath, maybeKeystoreType, maybePasswordFile) match {
      case (Some(keystorePath), Some(keystoreType), Some(passwordFile)) =>
        val keystoreDirMissing = !new File(keystorePath).isFile
        val passwordFileMissing = !new File(passwordFile).isFile
        if(keystoreDirMissing && passwordFileMissing)
          Left("Certificate keystore path and password file configured but files are missing")
        else if(keystoreDirMissing)
          Left("Certificate keystore path configured but file is missing")
        else if(passwordFileMissing)
          Left("Certificate password file configured but file is missing")
        else
          Right((keystorePath, keystoreType, passwordFile))
      case _ =>
        Left("HTTPS requires: certificate-keystore-path, certificate-keystore-type and certificate-password-file to be configured")
    }

  override def corsAllowedOrigins: HttpOriginRange = config.corsAllowedOrigins
}

object JsonRpcHttpsServer {
  type HttpsSetupResult[T] = Either[String, T]
} 
Example 175
Source File: JsonRpcHttpServer.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc.server

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.HttpOriginRange
import akka.stream.ActorMaterializer
import io.iohk.ethereum.jsonrpc._
import io.iohk.ethereum.jsonrpc.server.JsonRpcServer.JsonRpcServerConfig
import io.iohk.ethereum.utils.Logger

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}

class JsonRpcHttpServer(val jsonRpcController: JsonRpcController, config: JsonRpcServerConfig)
                       (implicit val actorSystem: ActorSystem)
  extends JsonRpcServer with Logger {

  def run(): Unit = {
    implicit val materializer = ActorMaterializer()

    val bindingResultF = Http(actorSystem).bindAndHandle(route, config.interface, config.port)

    bindingResultF onComplete {
      case Success(serverBinding) => log.info(s"JSON RPC HTTP server listening on ${serverBinding.localAddress}")
      case Failure(ex) => log.error("Cannot start JSON HTTP RPC server", ex)
    }
  }

  override def corsAllowedOrigins: HttpOriginRange = config.corsAllowedOrigins
} 
Example 176
Source File: BlacklistSupport.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.blockchain.sync

import scala.concurrent.duration.FiniteDuration
import akka.actor.{Actor, ActorLogging, Cancellable, Scheduler}
import io.iohk.ethereum.network.PeerId

import scala.concurrent.ExecutionContext.Implicits.global

trait BlacklistSupport {
  selfActor: Actor with ActorLogging =>

  import BlacklistSupport._

  def scheduler: Scheduler

  var blacklistedPeers: Seq[(PeerId, Cancellable)] = Nil

  def blacklist(peerId: PeerId, duration: FiniteDuration, reason: String): Unit = {
    undoBlacklist(peerId)
    log.debug(s"Blacklisting peer ($peerId), $reason")
    val unblacklistCancellable = scheduler.scheduleOnce(duration, self, UnblacklistPeer(peerId))
    blacklistedPeers :+= (peerId, unblacklistCancellable)
  }

  def undoBlacklist(peerId: PeerId): Unit = {
    blacklistedPeers.find(_._1 == peerId).foreach(_._2.cancel())
    blacklistedPeers = blacklistedPeers.filterNot(_._1 == peerId)
  }

  def isBlacklisted(peerId: PeerId): Boolean =
    blacklistedPeers.exists(_._1 == peerId)

  def handleBlacklistMessages: Receive = {
    case UnblacklistPeer(ref) => undoBlacklist(ref)
  }
}

object BlacklistSupport {
  private case class UnblacklistPeer(peerId: PeerId)
} 
Example 177
Source File: PeerRequestHandler.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.blockchain.sync

import scala.concurrent.ExecutionContext.Implicits.global
import scala.reflect.ClassTag
import akka.actor._
import io.iohk.ethereum.network.{EtcPeerManagerActor, Peer}
import io.iohk.ethereum.network.PeerEventBusActor.PeerEvent.{MessageFromPeer, PeerDisconnected}
import io.iohk.ethereum.network.PeerEventBusActor.SubscriptionClassifier.{MessageClassifier, PeerDisconnectedClassifier}
import io.iohk.ethereum.network.PeerEventBusActor.{PeerSelector, Subscribe, Unsubscribe}
import io.iohk.ethereum.network.p2p.{Message, MessageSerializable}

import scala.concurrent.duration.FiniteDuration

class PeerRequestHandler[RequestMsg <: Message, ResponseMsg <: Message : ClassTag]
    (peer: Peer, responseTimeout: FiniteDuration, etcPeerManager: ActorRef, peerEventBus: ActorRef, requestMsg: RequestMsg, responseMsgCode: Int)
    (implicit scheduler: Scheduler, toSerializable: RequestMsg => MessageSerializable)
  extends Actor with ActorLogging {

  import PeerRequestHandler._

  val initiator: ActorRef = context.parent

  val timeout: Cancellable = scheduler.scheduleOnce(responseTimeout, self, Timeout)

  val startTime: Long = System.currentTimeMillis()

  private def subscribeMessageClassifier = MessageClassifier(Set(responseMsgCode), PeerSelector.WithId(peer.id))

  def timeTakenSoFar(): Long = System.currentTimeMillis() - startTime

  override def preStart(): Unit = {
    etcPeerManager ! EtcPeerManagerActor.SendMessage(toSerializable(requestMsg), peer.id)
    peerEventBus ! Subscribe(PeerDisconnectedClassifier(PeerSelector.WithId(peer.id)))
    peerEventBus ! Subscribe(subscribeMessageClassifier)
  }

  override def receive: Receive = {
    case MessageFromPeer(responseMsg: ResponseMsg, _) => handleResponseMsg(responseMsg)
    case Timeout => handleTimeout()
    case PeerDisconnected(peerId) if peerId == peer.id => handleTerminated()
  }

  def handleResponseMsg(responseMsg: ResponseMsg): Unit = {
    cleanupAndStop()
    initiator ! ResponseReceived(peer, responseMsg, timeTaken = timeTakenSoFar())
  }

  def handleTimeout(): Unit = {
    cleanupAndStop()
    initiator ! RequestFailed(peer, "request timeout")
  }

  def handleTerminated(): Unit = {
    cleanupAndStop()
    initiator ! RequestFailed(peer, "connection closed")
  }

  def cleanupAndStop(): Unit = {
    timeout.cancel()
    peerEventBus ! Unsubscribe()
    context stop self
  }
}

object PeerRequestHandler {
  def props[RequestMsg <: Message,
            ResponseMsg <: Message : ClassTag]
  (peer: Peer, responseTimeout: FiniteDuration, etcPeerManager: ActorRef, peerEventBus: ActorRef, requestMsg: RequestMsg, responseMsgCode: Int)
  (implicit scheduler: Scheduler, toSerializable: RequestMsg => MessageSerializable): Props =
    Props(new PeerRequestHandler(peer, responseTimeout, etcPeerManager, peerEventBus, requestMsg, responseMsgCode))

  case class RequestFailed(peer: Peer, reason: String)
  case class ResponseReceived[T](peer: Peer, response: T, timeTaken: Long)

  private case object Timeout
} 
Example 178
Source File: PeerListSupport.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.blockchain.sync

import akka.actor.{Actor, ActorLogging, ActorRef, Scheduler}
import io.iohk.ethereum.network.{EtcPeerManagerActor, Peer, PeerId}
import io.iohk.ethereum.network.EtcPeerManagerActor.PeerInfo
import io.iohk.ethereum.network.PeerEventBusActor.PeerEvent.PeerDisconnected
import io.iohk.ethereum.network.PeerEventBusActor.SubscriptionClassifier.PeerDisconnectedClassifier
import io.iohk.ethereum.network.PeerEventBusActor.{PeerSelector, Subscribe, Unsubscribe}
import io.iohk.ethereum.utils.Config.SyncConfig

import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

trait PeerListSupport {
  self: Actor with ActorLogging with BlacklistSupport =>

  def etcPeerManager: ActorRef
  def peerEventBus: ActorRef
  def syncConfig: SyncConfig
  def scheduler: Scheduler

  var handshakedPeers: Map[Peer, PeerInfo] = Map.empty

  scheduler.schedule(0.seconds, syncConfig.peersScanInterval, etcPeerManager, EtcPeerManagerActor.GetHandshakedPeers)(global, context.self)

  def removePeer(peerId: PeerId): Unit = {
    peerEventBus ! Unsubscribe(PeerDisconnectedClassifier(PeerSelector.WithId(peerId)))
    handshakedPeers.find(_._1.id == peerId).foreach { case (peer, _) => undoBlacklist(peer.id) }
    handshakedPeers = handshakedPeers.filterNot(_._1.id == peerId)
  }

  def peersToDownloadFrom: Map[Peer, PeerInfo] =
    handshakedPeers.filterNot { case (p, s) => isBlacklisted(p.id) }

  def handlePeerListMessages: Receive = {
    case EtcPeerManagerActor.HandshakedPeers(peers) =>
      peers.keys.filterNot(handshakedPeers.contains).foreach { peer =>
        peerEventBus ! Subscribe(PeerDisconnectedClassifier(PeerSelector.WithId(peer.id)))
      }
      handshakedPeers = peers

    case PeerDisconnected(peerId) if handshakedPeers.exists(_._1.id == peerId) =>
      removePeer(peerId)
  }
} 
Example 179
Source File: NetServiceSpec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc

import java.net.InetSocketAddress

import akka.actor.ActorSystem
import akka.agent.Agent
import akka.testkit.TestProbe
import io.iohk.ethereum.{NormalPatience, crypto}
import io.iohk.ethereum.jsonrpc.NetService._
import io.iohk.ethereum.network.{Peer, PeerActor, PeerManagerActor}
import io.iohk.ethereum.nodebuilder.SecureRandomBuilder
import io.iohk.ethereum.utils.{NodeStatus, ServerStatus}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

class NetServiceSpec extends FlatSpec with Matchers with ScalaFutures with NormalPatience with SecureRandomBuilder {

  "NetService" should "return handshaked peer count" in new TestSetup {
    val resF = netService.peerCount(PeerCountRequest())

    peerManager.expectMsg(PeerManagerActor.GetPeers)
    peerManager.reply(PeerManagerActor.Peers(Map(
      Peer(new InetSocketAddress(1), testRef, false) -> PeerActor.Status.Handshaked,
      Peer(new InetSocketAddress(2), testRef, false) -> PeerActor.Status.Handshaked,
      Peer(new InetSocketAddress(3), testRef, false) -> PeerActor.Status.Connecting)))

    resF.futureValue shouldBe Right(PeerCountResponse(2))
  }

  it should "return listening response" in new TestSetup {
    netService.listening(ListeningRequest()).futureValue shouldBe Right(ListeningResponse(true))
  }

  it should "return version response" in new TestSetup {
    netService.version(VersionRequest()).futureValue shouldBe Right(VersionResponse("1"))
  }

  trait TestSetup {
    implicit val system = ActorSystem("Testsystem")

    val testRef = TestProbe().ref

    val peerManager = TestProbe()

    val nodeStatus = NodeStatus(crypto.generateKeyPair(secureRandom), ServerStatus.Listening(new InetSocketAddress(9000)),
      discoveryStatus = ServerStatus.NotListening)
    val netService = new NetService(Agent(nodeStatus), peerManager.ref, NetServiceConfig(5.seconds))
  }

} 
Example 180
Source File: JSONRPCWebSocket.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e.websocket

import io.github.shogowada.scala.jsonrpc.JSONRPCServerAndClient
import io.github.shogowada.scala.jsonrpc.Types.JSONSender
import io.github.shogowada.scala.jsonrpc.serializers.UpickleJSONSerializer
import org.eclipse.jetty.websocket.api.{Session, WebSocketAdapter}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Try

class JSONRPCWebSocket extends WebSocketAdapter {
  private var serverAndClient: JSONRPCServerAndClient[UpickleJSONSerializer] = _

  override def onWebSocketConnect(session: Session): Unit = {
    super.onWebSocketConnect(session)

    val jsonSender: JSONSender = (json: String) => {
      Try(session.getRemote.sendString(json)).fold(
        throwable => Future.failed(throwable),
        _ => Future(None)
      )
    }

    // Create an independent server and client for each WebSocket session.
    // This is to make sure we clean up all the caches (e.g. promised response, etc)
    // on each WebSocket session.
    serverAndClient = JSONRPCModule.createJSONRPCServerAndClient(jsonSender)
  }

  override def onWebSocketText(message: String): Unit = {
    serverAndClient.receiveAndSend(message)
  }
} 
Example 181
Source File: TodoRepositoryAPIImpl.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e.websocket

import java.util.UUID

import io.github.shogowada.scala.jsonrpc.DisposableFunction1

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class TodoRepositoryAPIImpl extends TodoRepositoryAPI {

  var todos: Seq[Todo] = Seq()
  var observersById: Map[String, DisposableFunction1[TodoEvent, Future[Unit]]] = Map()

  override def add(description: String): Future[Todo] = this.synchronized {
    val todo = Todo(id = UUID.randomUUID().toString, description)
    todos = todos :+ todo

    notify(TodoEvent(todo, TodoEventTypes.Add))

    Future(todo)
  }

  override def remove(id: String): Future[Unit] = this.synchronized {
    val index = todos.indexWhere(todo => todo.id == id)
    if (index >= 0) {
      val todo = todos(index)
      todos = todos.patch(index, Seq(), 1)
      notify(TodoEvent(todo, TodoEventTypes.Remove))
    }
    Future()
  }

  override def register(observer: DisposableFunction1[TodoEvent, Future[Unit]]): Future[String] = this.synchronized {
    val id = UUID.randomUUID().toString
    observersById = observersById + (id -> observer)

    todos.map(todo => TodoEvent(todo, TodoEventTypes.Add))
        .foreach(todoEvent => notify(id, observer, todoEvent))

    Future(id)
  }

  override def unregister(observerId: String): Future[Unit] = this.synchronized {
    observersById.get(observerId).foreach(observer => {
      observersById = observersById - observerId
      observer.dispose()
    })
    Future()
  }

  private def notify(todoEvent: TodoEvent): Unit = {
    observersById.foreach {
      case (id, observer) => notify(id, observer, todoEvent)
    }
  }

  private def notify(observerId: String, observer: DisposableFunction1[TodoEvent, Future[Unit]], todoEvent: TodoEvent): Unit = {
    observer(todoEvent)
        .failed // Probably connection is lost.
        .foreach(_ => unregister(observerId))
  }
} 
Example 182
Source File: Main.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e.websocket

import java.io.IOException

import io.github.shogowada.scala.jsonrpc.JSONRPCServerAndClient
import io.github.shogowada.scala.jsonrpc.Types.JSONSender
import io.github.shogowada.scala.jsonrpc.client.JSONRPCClient
import io.github.shogowada.scala.jsonrpc.serializers.UpickleJSONSerializer
import io.github.shogowada.scala.jsonrpc.server.JSONRPCServer
import io.github.shogowada.scalajs.reactjs.ReactDOM
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import org.scalajs.dom
import org.scalajs.dom.WebSocket

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Future, Promise}
import scala.scalajs.js.JSApp
import scala.util.{Failure, Try}

object Main extends JSApp {
  override def main(): Unit = {
    val futureWebSocket = createFutureWebSocket()
    val serverAndClient = createServerAndClient(futureWebSocket)

    val mountNode = dom.document.getElementById("mount-node")
    ReactDOM.render(
      <((new TodoListView(serverAndClient.createAPI[TodoRepositoryAPI])) ()).empty,
      mountNode
    )
  }

  private def createFutureWebSocket(): Future[WebSocket] = {
    val promisedWebSocket: Promise[WebSocket] = Promise()
    val webSocket = new dom.WebSocket(webSocketUrl)

    webSocket.onopen = (_: dom.Event) => {
      promisedWebSocket.success(webSocket)
    }

    webSocket.onerror = (event: dom.Event) => {
      promisedWebSocket.failure(new IOException(event.toString))
    }

    promisedWebSocket.future
  }

  private def webSocketUrl: String = {
    val location = dom.window.location
    val protocol = location.protocol match {
      case "http:" => "ws:"
      case "https:" => "wss:"
    }
    s"$protocol//${location.host}/jsonrpc"
  }

  private def createServerAndClient(futureWebSocket: Future[WebSocket]): JSONRPCServerAndClient[UpickleJSONSerializer] = {
    val jsonSerializer = UpickleJSONSerializer()

    val server = JSONRPCServer(jsonSerializer)

    val jsonSender: JSONSender = (json: String) => {
      futureWebSocket
          .map(webSocket => Try(webSocket.send(json)))
          .flatMap(tried => tried.fold(
            throwable => Future.failed(throwable),
            _ => Future(None)
          ))
    }
    val client = JSONRPCClient(jsonSerializer, jsonSender)

    val serverAndClient = JSONRPCServerAndClient(server, client)

    futureWebSocket.foreach(webSocket => {
      webSocket.onmessage = (event: dom.MessageEvent) => {
        val message = event.data.toString
        serverAndClient.receiveAndSend(message).onComplete {
          case Failure(throwable) => {
            println("Failed to send response", throwable)
          }
          case _ =>
        }
      }
    })

    serverAndClient
  }
} 
Example 183
Source File: JSONRPCModule.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import io.github.shogowada.scala.jsonrpc.serializers.CirceJSONSerializer
import io.github.shogowada.scala.jsonrpc.server.JSONRPCServer

import scala.concurrent.ExecutionContext.Implicits.global

object JSONRPCModule {
  lazy val loggerAPI: LoggerAPI = new LoggerAPIImpl

  lazy val jsonRPCServer: JSONRPCServer[CirceJSONSerializer] = {
    val server = JSONRPCServer(CirceJSONSerializer())
    server.bindAPI[CalculatorAPI](new CalculatorAPIImpl)
    server.bindAPI[EchoAPI](new EchoAPIImpl)
    server.bindAPI[LoggerAPI](loggerAPI)
    server
  }
} 
Example 184
Source File: APIImpl.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class CalculatorAPIImpl extends CalculatorAPI {
  override def add(lhs: Int, rhs: Int): Future[Int] = {
    Future(lhs + rhs)
  }

  override def subtract(lhs: Int, rhs: Int): Future[Int] = {
    Future(lhs - rhs)
  }
}

class EchoAPIImpl extends EchoAPI {
  override def echo(message: String): Future[String] = {
    Future(message) // It just returns the message as is
  }
}

class LoggerAPIImpl extends LoggerAPI {
  var logs: Seq[String] = Seq()

  override def log(message: String): Unit = this.synchronized {
    logs = logs :+ message
    println(message) // It logs the message
  }

  override def getAllLogs(): Future[Seq[String]] = {
    Future(logs)
  }
} 
Example 185
Source File: JSONRPCServlet.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import org.scalatra._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}

class JSONRPCServlet extends ScalatraServlet {
  post("/") {
    val server = JSONRPCModule.jsonRPCServer
    val futureResult: Future[ActionResult] = server.receive(request.body).map {
      case Some(responseJSON) => Ok(responseJSON) // For JSON-RPC request, we return response.
      case None => NoContent() // For JSON-RPC notification, we do not return response.
    }
    Await.result(futureResult, 1.minutes)
  }
} 
Example 186
Source File: Main.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import io.github.shogowada.scala.jsonrpc.client.JSONRPCClient
import io.github.shogowada.scala.jsonrpc.serializers.CirceJSONSerializer
import io.github.shogowada.scalajs.reactjs.ReactDOM
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.elements.ReactElement
import org.scalajs.dom

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.scalajs.js.JSApp

class App(
    calculatorAPI: CalculatorAPI,
    echoAPI: EchoAPI,
    loggerAPI: LoggerAPI
) {
  def apply(): ReactElement =
    <.div()(
      <((new Calculator(calculatorAPI)) ()).empty,
      <((new Echo(echoAPI)) ()).empty,
      <((new Logger(loggerAPI)) ()).empty
    )
}

object Main extends JSApp {
  override def main(): Unit = {
    val jsonSender: (String) => Future[Option[String]] =
      (json: String) => {
        val NoContentStatus = 204
        dom.ext.Ajax
            .post(url = "/jsonrpc", data = json)
            .map(response => {
              if (response.status == NoContentStatus) {
                None
              } else {
                Option(response.responseText)
              }
            })
      }

    val client = JSONRPCClient(CirceJSONSerializer(), jsonSender)

    val calculatorAPI = client.createAPI[CalculatorAPI]
    val echoAPI = client.createAPI[EchoAPI]
    val loggerAPI = client.createAPI[LoggerAPI]

    val mountNode = dom.document.getElementById("mount-node")
    ReactDOM.render((new App(calculatorAPI, echoAPI, loggerAPI)) (), mountNode)
  }
} 
Example 187
Source File: Logger.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import io.github.shogowada.scalajs.reactjs.React
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.events.{FormSyntheticEvent, SyntheticEvent}
import org.scalajs.dom.raw.HTMLInputElement

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success

object Logger {
  case class State(log: String, logs: Seq[String])

  type Self = React.Self[Unit, State]
}

class Logger(loggerAPI: LoggerAPI) {

  import Logger._

  def apply() = reactClass

  private lazy val reactClass = React.createClass[Unit, State](
    getInitialState = (self) => State("", Seq()),
    render = (self) =>
      <.div()(
        <.h2()("Logger"),
        <.form(^.onSubmit := onLog(self))(
          <.input(
            ^.id := ElementIds.LoggerLogText,
            ^.value := self.state.log,
            ^.onChange := onChange(self)
          )(),
          <.button(
            ^.id := ElementIds.LoggerLog,
            ^.`type` := "submit"
          )("Log")
        ),
        <.form(^.onSubmit := onGetLogs(self))(
          <.button(
            ^.id := ElementIds.LoggerGetLogs,
            ^.`type` := "submit"
          )("Get Logs")
        ),
        <.div(^.id := ElementIds.LoggerLogs)(
          self.state.logs.map(log => {
            <.div()(log)
          })
        )
      ).asReactElement
  )

  private def onChange(self: Self) =
    (event: FormSyntheticEvent[HTMLInputElement]) => {
      val log = event.target.value

      self.setState(_.copy(log = log))
    }

  private def onLog(self: Self) =
    (event: SyntheticEvent) => {
      event.preventDefault()

      loggerAPI.log(self.state.log)

      self.setState(_.copy(log = ""))
    }

  private def onGetLogs(self: Self) =
    (event: SyntheticEvent) => {
      event.preventDefault()

      loggerAPI.getAllLogs().onComplete {
        case Success(logs) => self.setState(_.copy(logs = logs))
        case _ =>
      }
    }
} 
Example 188
Source File: Calculator.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import io.github.shogowada.scalajs.reactjs.React
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.events.{FormSyntheticEvent, SyntheticEvent}
import org.scalajs.dom.raw.HTMLInputElement

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success

object Calculator {
  case class State(lhs: Int, rhs: Int, added: Option[Int], subtracted: Option[Int])

  type Self = React.Self[Unit, State]
}

class Calculator(calculatorAPI: CalculatorAPI) {

  import Calculator._

  def apply() = reactClass

  private lazy val reactClass = React.createClass[Unit, State](
    getInitialState = (self) => Calculator.State(0, 0, None, None),
    render = (self) =>
      <.div()(
        <.h2()("Calculator"),
        <.form(^.onSubmit := onSubmit(self))(
          <.input(
            ^.id := ElementIds.CalculatorLhs,
            ^.onChange := onLhsChange(self),
            ^.value := self.state.lhs
          )(),
          <.input(
            ^.id := ElementIds.CalculatorRhs,
            ^.onChange := onRhsChange(self),
            ^.value := self.state.rhs
          )(),
          <.button(
            ^.id := ElementIds.CalculatorCalculate,
            ^.`type` := "submit"
          )("Calculate")
        ),
        <.div(^.id := ElementIds.CalculatorAdded)(
          s"${self.state.lhs} + ${self.state.rhs} = ${self.state.added.getOrElse("?")}"
        ),
        <.div(^.id := ElementIds.CalculatorSubtracted)(
          s"${self.state.lhs} - ${self.state.rhs} = ${self.state.subtracted.getOrElse("?")}"
        )
      ).asReactElement
  )

  private def onLhsChange(self: Self) =
    (event: FormSyntheticEvent[HTMLInputElement]) => {
      val value = event.target.value
      self.setState(_.copy(
        lhs = value.toInt,
        added = None,
        subtracted = None
      ))
    }

  private def onRhsChange(self: Self) =
    (event: FormSyntheticEvent[HTMLInputElement]) => {
      val value = event.target.value
      self.setState(_.copy(
        rhs = value.toInt,
        added = None,
        subtracted = None
      ))
    }

  private def onSubmit(self: Self) =
    (event: SyntheticEvent) => {
      event.preventDefault()

      val lhs = self.state.lhs
      val rhs = self.state.rhs

      calculatorAPI.add(lhs, rhs).onComplete {
        case Success(added) if lhs == self.state.lhs && rhs == self.state.rhs => {
          self.setState(_.copy(added = Some(added)))
        }
        case _ =>
      }

      calculatorAPI.subtract(lhs, rhs).onComplete {
        case Success(subtracted) if lhs == self.state.lhs && rhs == self.state.rhs => {
          self.setState(_.copy(subtracted = Some(subtracted)))
        }
        case _ =>
      }
    }
} 
Example 189
Source File: Echo.scala    From scala-json-rpc   with MIT License 5 votes vote down vote up
package io.github.shogowada.scala.jsonrpc.example.e2e

import io.github.shogowada.scalajs.reactjs.React
import io.github.shogowada.scalajs.reactjs.VirtualDOM._
import io.github.shogowada.scalajs.reactjs.events.FormSyntheticEvent
import org.scalajs.dom.raw.HTMLInputElement

import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.Success

object Echo {
  case class State(text: String, echoedText: Option[String])

  type Self = React.Self[Unit, State]
}

class Echo(echoAPI: EchoAPI) {

  import Echo._

  def apply() = reactClass

  private lazy val reactClass = React.createClass[Unit, State](
    getInitialState = (self) => State(text = "", echoedText = Some("")),
    render = (self) =>
      <.div()(
        <.h2()("Echo"),
        <.label(^.`for` := ElementIds.EchoText)("I say:"),
        <.input(
          ^.id := ElementIds.EchoText,
          ^.value := self.state.text,
          ^.onChange := onChange(self)
        )(),
        <.label(^.`for` := ElementIds.EchoEchoedText)("Server says:"),
        <.span(^.id := ElementIds.EchoEchoedText)(self.state.echoedText.getOrElse(""))
      ).asReactElement
  )

  private def onChange(self: Self) =
    (event: FormSyntheticEvent[HTMLInputElement]) => {
      val text = event.target.value

      self.setState(_.copy(
        text = text,
        echoedText = None
      ))

      echoAPI.echo(text).onComplete {
        case Success(echoedText) if self.state.text == text => self.setState(_.copy(echoedText = Some(echoedText)))
        case _ =>
      }
    }
} 
Example 190
Source File: PasswordInfoDAO.scala    From play-silhouette-4.0-slick-postgres-seed   with Apache License 2.0 5 votes vote down vote up
package models.daos

import javax.inject.Inject

import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.api.util.PasswordInfo
import com.mohiva.play.silhouette.persistence.daos.DelegableAuthInfoDAO
import models.daos.PasswordInfoDAO._
import models.tables.{ DbPasswordInfo, PasswordInfoTable }
import play.api.db.slick.DatabaseConfigProvider
import slick.backend.DatabaseConfig
import slick.driver.JdbcProfile
import slick.jdbc.JdbcBackend
import slick.lifted.TableQuery

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PasswordInfoDAO @Inject() (
  protected val dbConfigProvider: DatabaseConfigProvider,
  userDAO: UserDAO) extends DelegableAuthInfoDAO[PasswordInfo] {

  val dbConfig: DatabaseConfig[JdbcProfile] = dbConfigProvider.get[JdbcProfile]
  val db: JdbcBackend#DatabaseDef = dbConfig.db

  import dbConfig.driver.api._

  def passwordInfoQuery(loginInfo: LoginInfo): Query[PasswordInfoTable, DbPasswordInfo, Seq] = {
    for {
      dbLoginInfo <- userDAO.loginInfoQuery(loginInfo)
      dbPasswordInfo <- passwordInfos.filter(_.loginInfoId === dbLoginInfo.id)
    } yield dbPasswordInfo
  }

  def find(loginInfo: LoginInfo): Future[Option[PasswordInfo]] = {
    db.run(passwordInfoQuery(loginInfo).result.headOption).map { dbPasswordInfoOption =>
      dbPasswordInfoOption.map {
        dbPasswordInfo => PasswordInfo(dbPasswordInfo.hasher, dbPasswordInfo.password, dbPasswordInfo.salt)
      }
    }
  }

  def add(loginInfo: LoginInfo, authInfo: PasswordInfo): Future[PasswordInfo] = {
    db.run(userDAO.loginInfoQuery(loginInfo).result.headOption.map { dbLoginInfoOption =>
      dbLoginInfoOption.map {
        dbLoginInfo =>
          {
            val dbPasswordInfo = DbPasswordInfo(authInfo.hasher, authInfo.password, authInfo.salt, dbLoginInfo.id.get)
            db.run(passwordInfos += dbPasswordInfo)
          }
      }
    }).map { _ =>
      authInfo
    }
  }

  def update(loginInfo: LoginInfo, authInfo: PasswordInfo): Future[PasswordInfo] = {
    db.run(userDAO.loginInfoQuery(loginInfo).result.headOption.map { dbLoginInfoOption =>
      dbLoginInfoOption.map {
        dbLoginInfo =>
          {
            db.run { 
              passwordInfos.filter(_.loginInfoId === dbLoginInfo.id) 
              .map(p => (p.hasher, p.password, p.salt)) 
              .update((authInfo.hasher, authInfo.password, authInfo.salt)) 
              .transactionally 
            }
          }
      }
    }).map { _ =>
      authInfo
    }
  }

  def save(loginInfo: LoginInfo, authInfo: PasswordInfo): Future[PasswordInfo] = {
    db.run(userDAO.loginInfoQuery(loginInfo).result.headOption.map { dbLoginInfoOption =>
      dbLoginInfoOption.map {
        dbLoginInfo =>
          {
            val dbPasswordInfo = DbPasswordInfo(authInfo.hasher, authInfo.password, authInfo.salt, dbLoginInfo.id.get)
            db.run(passwordInfoQuery(loginInfo).insertOrUpdate(dbPasswordInfo).transactionally)
          }
      }
    }).map { _ =>
      authInfo
    }
  }

  def remove(loginInfo: LoginInfo): Future[Unit] = {
    db.run(passwordInfoQuery(loginInfo).delete).map(_ => Unit)
  }
}

object PasswordInfoDAO {

  private val passwordInfos = TableQuery[PasswordInfoTable]

} 
Example 191
Source File: PasswordInfoDAO.scala    From crm-seed   with Apache License 2.0 5 votes vote down vote up
package com.dataengi.crm.identities.daos

import com.google.inject.Singleton
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.api.util.PasswordInfo
import com.mohiva.play.silhouette.persistence.daos.DelegableAuthInfoDAO

import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

trait PasswordInfoDAO extends DelegableAuthInfoDAO[PasswordInfo]

@Singleton
class InMemoryPasswordInfoDAOImpl extends PasswordInfoDAO {

  val passwords = mutable.HashMap.empty[LoginInfo, PasswordInfo]

  
  def remove(loginInfo: LoginInfo): Future[Unit] = {
    Future.successful(
      passwords.remove(loginInfo)
    )
  }
} 
Example 192
Source File: CounterEtlFunctionsSpec.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.counter.loader.core

import com.typesafe.config.ConfigFactory
import org.apache.s2graph.core.schema.{Label, Service}
import org.apache.s2graph.core.types.HBaseType
import org.apache.s2graph.core.{S2Graph, Management}
import org.apache.s2graph.counter.models.DBModel
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}

import scala.concurrent.ExecutionContext.Implicits.global

class CounterEtlFunctionsSpec extends FlatSpec with BeforeAndAfterAll with Matchers {
  val config = ConfigFactory.load()
  val cluster = config.getString("hbase.zookeeper.quorum")
  DBModel.initialize(config)

  val graph = new S2Graph(config)(global)
  val management = new Management(graph)

  override def beforeAll: Unit = {
    management.createService("test", cluster, "test", 1, None, "gz")
    management.createLabel("test_case", "test", "src", "string", "test", "tgt", "string", true, "test", Nil, Nil, "weak", None, None, HBaseType.DEFAULT_VERSION, false, "gz")
  }

  override def afterAll: Unit = {
    Label.delete(Label.findByName("test_case", false).get.id.get)
    Service.delete(Service.findByName("test", false).get.id.get)
  }

  "CounterEtlFunctions" should "parsing log" in {
    val data =
      """
        |1435107139287	insert	e	aaPHfITGUU0B_150212123559509	abcd	test_case	{"cateid":"100110102","shopid":"1","brandid":""}
        |1435106916136	insert	e	Tgc00-wtjp2B_140918153515441	efgh	test_case	{"cateid":"101104107","shopid":"2","brandid":""}
      """.stripMargin.trim.split('\n')
    val items = {
      for {
        line <- data
        item <- CounterEtlFunctions.parseEdgeFormat(line)
      } yield {
        item.action should equal("test_case")
        item
      }
    }

    items should have size 2
  }
} 
Example 193
Source File: GraphOperation.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.counter.core.v2

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.typesafe.config.Config
import org.apache.http.HttpStatus
import org.apache.s2graph.counter.config.S2CounterConfig
import org.apache.s2graph.counter.core.v2.ExactStorageGraph._
import org.asynchttpclient.DefaultAsyncHttpClientConfig
import org.slf4j.LoggerFactory
import play.api.libs.json.{JsObject, JsValue, Json}
import scala.concurrent.Await
import scala.concurrent.duration._

class GraphOperation(config: Config) {
  // using play-ws without play app
  implicit val materializer = ActorMaterializer.create(ActorSystem(getClass.getSimpleName))
  private val builder = new DefaultAsyncHttpClientConfig.Builder()
  private val wsClient = new play.api.libs.ws.ning.NingWSClient(builder.build)
  private val s2config = new S2CounterConfig(config)
  val s2graphUrl = s2config.GRAPH_URL
  private[counter] val log = LoggerFactory.getLogger(this.getClass)

  import scala.concurrent.ExecutionContext.Implicits.global

  def createLabel(json: JsValue): Boolean = {
    // fix counter label's schemaVersion
    val newJson = json.as[JsObject] ++ Json.obj("schemaVersion" -> "v2")
    val future = wsClient.url(s"$s2graphUrl/graphs/createLabel").post(newJson).map { resp =>
      resp.status match {
        case HttpStatus.SC_OK =>
          true
        case _ =>
          throw new RuntimeException(s"failed createLabel. errCode: ${resp.status} body: ${resp.body} query: $json")
      }
    }

    Await.result(future, 10 second)
  }

  def deleteLabel(label: String): Boolean = {
    val future = wsClient.url(s"$s2graphUrl/graphs/deleteLabel/$label").put("").map { resp =>
      resp.status match {
        case HttpStatus.SC_OK =>
          true
        case _ =>
          throw new RuntimeException(s"failed deleteLabel. errCode: ${resp.status} body: ${resp.body}")
      }
    }

    Await.result(future, 10 second)
  }
} 
Example 194
Source File: ExperimentController.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.rest.play.controllers

import org.apache.s2graph.core.rest.RestHandler
import play.api.mvc._

import scala.concurrent.ExecutionContext.Implicits.global

object ExperimentController extends Controller {
  private val rest: RestHandler = org.apache.s2graph.rest.play.Global.s2rest

  import ApplicationController._

  def experiments() = experiment("", "", "")
  def experiment(accessToken: String, experimentName: String, uuid: String) = withHeaderAsync(jsonText) { request =>
    val body = request.body
    val res = rest.doPost(request.uri, body, request.headers)
    res.body.map { case js =>
      val headers = res.headers :+ ("result_size" -> rest.calcSize(js).toString)
      jsonResponse(js, headers: _*)
    } recoverWith ApplicationController.requestFallback(body)
  }
} 
Example 195
Source File: HistoryMessagesBatchSender.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.history

import akka.actor.{Actor, Cancellable}
import com.wavesplatform.dex.history.HistoryRouter.{HistoryMsg, StopAccumulate}

import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.reflect.ClassTag

abstract class HistoryMessagesBatchSender[M <: HistoryMsg: ClassTag] extends Actor {

  val batchLinger: Long
  val batchEntries: Long

  def createAndSendBatch(batchBuffer: Iterable[M]): Unit

  private val batchBuffer: mutable.Set[M] = mutable.Set.empty[M]

  private def scheduleStopAccumulating: Cancellable = context.system.scheduler.scheduleOnce(batchLinger.millis, self, StopAccumulate)

  private def sendBatch(): Unit = {
    if (batchBuffer.nonEmpty) {
      createAndSendBatch(batchBuffer)
      batchBuffer.clear()
    }
  }

  def receive: Receive = awaitingHistoryMessages

  private def awaitingHistoryMessages: Receive = {
    case msg: M =>
      scheduleStopAccumulating
      context become accumulateBuffer(scheduleStopAccumulating)
      batchBuffer += msg
  }

  private def accumulateBuffer(scheduledStop: Cancellable): Receive = {
    case msg: M =>
      if (batchBuffer.size == batchEntries) {
        scheduledStop.cancel()
        sendBatch()
        context become accumulateBuffer(scheduleStopAccumulating)
      }

      batchBuffer += msg

    case StopAccumulate => sendBatch(); context become awaitingHistoryMessages
  }
} 
Example 196
Source File: ExchangeTransactionCreatorSpecification.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.model

import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.bytes.ByteStr
import com.wavesplatform.dex.domain.crypto
import com.wavesplatform.dex.domain.crypto.Proofs
import com.wavesplatform.dex.domain.order.Order
import com.wavesplatform.dex.domain.order.OrderOps._
import com.wavesplatform.dex.domain.transaction.ExchangeTransactionV2
import com.wavesplatform.dex.domain.utils.EitherExt2
import com.wavesplatform.dex.{MatcherSpecBase, NoShrink}
import org.scalacheck.Gen
import org.scalamock.scalatest.PathMockFactory
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.{Assertion, BeforeAndAfterAll}
import org.scalatestplus.scalacheck.{ScalaCheckPropertyChecks => PropertyChecks}

import scala.concurrent.ExecutionContext.Implicits.global

class ExchangeTransactionCreatorSpecification
    extends AnyWordSpec
    with Matchers
    with MatcherSpecBase
    with BeforeAndAfterAll
    with PathMockFactory
    with PropertyChecks
    with NoShrink
    with TableDrivenPropertyChecks {

  private def getExchangeTransactionCreator(hasMatcherScript: Boolean = false,
                                            hasAssetScripts: Asset => Boolean = _ => false): ExchangeTransactionCreator = {
    new ExchangeTransactionCreator(MatcherAccount, matcherSettings.exchangeTxBaseFee, hasMatcherScript, hasAssetScripts)
  }

  "ExchangeTransactionCreator" should {
    "create an ExchangeTransactionV2" when {
      (List(1, 2, 3) ++ List(1, 2, 3)).combinations(2).foreach {
        case List(counterVersion, submittedVersion) =>
          s"counterVersion=$counterVersion, submittedVersion=$submittedVersion" in {
            val counter   = buy(wavesBtcPair, 100000, 0.0008, matcherFee = Some(2000L), version = counterVersion.toByte)
            val submitted = sell(wavesBtcPair, 100000, 0.0007, matcherFee = Some(1000L), version = submittedVersion.toByte)

            val tc = getExchangeTransactionCreator()
            val oe = mkOrderExecutedRaw(submitted, counter)

            tc.createTransaction(oe).explicitGet() shouldBe a[ExchangeTransactionV2]
          }
      }
    }

    "take fee from order executed event" when {
      "orders are matched fully" in {
        val preconditions = for { ((_, buyOrder), (_, sellOrder)) <- orderV3MirrorPairGenerator } yield (buyOrder, sellOrder)
        test(preconditions)
      }

      "orders are matched partially" in {
        val preconditions = for { ((_, buyOrder), (senderSell, sellOrder)) <- orderV3MirrorPairGenerator } yield {
          val sellOrderWithUpdatedAmount = sellOrder.updateAmount(sellOrder.amount / 2)
          val newSignature               = crypto.sign(senderSell, sellOrderWithUpdatedAmount.bodyBytes())
          val correctedSellOrder         = sellOrderWithUpdatedAmount.updateProofs(Proofs(Seq(ByteStr(newSignature))))

          (buyOrder, correctedSellOrder)
        }

        test(preconditions)
      }

      def test(preconditions: Gen[(Order, Order)]): Assertion = forAll(preconditions) {
        case (buyOrder, sellOrder) =>
          val tc = getExchangeTransactionCreator()
          val oe = mkOrderExecutedRaw(buyOrder, sellOrder)
          val tx = tc.createTransaction(oe).explicitGet()

          tx.buyMatcherFee shouldBe oe.submittedExecutedFee
          tx.sellMatcherFee shouldBe oe.counterExecutedFee
      }
    }
  }
} 
Example 197
Source File: it.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform

import com.wavesplatform.dex.domain.account.{KeyPair, PublicKey}
import com.wavesplatform.dex.domain.asset.AssetPair
import com.wavesplatform.dex.domain.order.{Order, OrderType}
import com.wavesplatform.dex.waves.WavesFeeConstants._
import com.wavesplatform.it.api.MatcherCommand
import org.scalacheck.Gen

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.{DurationInt, FiniteDuration}
import scala.concurrent.{Await, Future}
import scala.util.Random
import scala.util.control.NonFatal

package object it {

  
  def executeCommands(xs: Seq[MatcherCommand], ignoreErrors: Boolean = true, timeout: FiniteDuration = 3.minutes): Int = {
    Await.result(Future.sequence(xs.map(executeCommand(_, ignoreErrors))), timeout).sum
  }

  private def executeCommand(x: MatcherCommand, ignoreErrors: Boolean): Future[Int] =
    try x match {
      case MatcherCommand.Place(api, order) => api.tryPlace(order).map(_.fold(_ => 0, _ => 1))
      case MatcherCommand.Cancel(api, owner, order) =>
        api.tryCancel(owner, order).map(_.fold(_ => 0, _ => 1))
    } catch {
      case NonFatal(e) =>
        if (ignoreErrors) Future.successful(0)
        else Future.failed(e)
    }

  def orderGen(matcher: PublicKey,
               trader: KeyPair,
               assetPairs: Seq[AssetPair],
               types: Seq[OrderType] = Seq(OrderType.BUY, OrderType.SELL)): Gen[Order] = {
    val ts = System.currentTimeMillis()
    for {
      assetPair      <- Gen.oneOf(assetPairs)
      tpe            <- Gen.oneOf(types)
      amount         <- Gen.choose(10, 100)
      price          <- Gen.choose(10, 100)
      orderVersion   <- Gen.choose[Byte](1, 3)
      expirationDiff <- Gen.choose(600000, 6000000)
    } yield {
      if (tpe == OrderType.BUY)
        Order.buy(
          trader,
          matcher,
          assetPair,
          amount,
          price * Order.PriceConstant,
          ts,
          ts + expirationDiff,
          matcherFee,
          orderVersion
        )
      else
        Order.sell(
          trader,
          matcher,
          assetPair,
          amount,
          price * Order.PriceConstant,
          ts,
          ts + expirationDiff,
          matcherFee,
          orderVersion
        )
    }
  }

  def choose[T](xs: IndexedSeq[T]): T = xs(Random.nextInt(xs.size))
} 
Example 198
Source File: CanExtractInstances.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.it.fp

import cats.{Id, MonadError}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

trait CanExtractInstances extends CanExtractInstances1


trait CanExtractInstances2 {

  implicit val future = new CanExtract[Future] {
    override def extract[ErrorT, ResultT](f: => Future[Either[ErrorT, ResultT]]): Future[ResultT] = f.map {
      case Left(e)  => throw new RuntimeException(s"Can't extract: $e")
      case Right(r) => r
    }
  }

  implicit val id = new CanExtract[Id] {
    override def extract[ErrorT, ResultT](f: => Id[Either[ErrorT, ResultT]]): Id[ResultT] = f match {
      case Left(e)  => throw new RuntimeException(s"Can't extract: $e")
      case Right(r) => r
    }
  }
} 
Example 199
Source File: Philosopher.scala    From didactic-computing-machine   with GNU Affero General Public License v3.0 5 votes vote down vote up
package DiningPhilosophers

import DiningPhilosophers.ForkMessages._
import DiningPhilosophers.PhilosopherMessages._
import akka.actor.{Actor, ActorLogging, ActorRef, ActorSystem, Props}
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.duration.DurationInt
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.ExecutionContext.Implicits.global

class Philosopher(val leftFork: ActorRef, val rightFork: ActorRef) extends Actor with ActorLogging {

  def name = self.path.name

  private val eatingTime = 2500.millis
  private val thinkingTime = 5000.millis
  private val retryTime = 10.millis


  def thinkFor(duration: FiniteDuration) = {
    context.system.scheduler.scheduleOnce(duration, self, Eat)
    context.become(thinking)
  }

  def thinking: Receive = {
    case Eat =>
      log.info(s"Philosopher ${self.path.name} wants to eat")
      leftFork ! Take
      rightFork ! Take
      context.become(hungry)
  }

  def hungry: Receive = {
    case ForkBeingUsed => handleForkBeingUsed()
    case ForkTaken =>
      log.info(s"Philosopher ${self.path.name} found one fork to be taken by other philosopher")
      context.become(waitingForOtherFork)
  }

  def waitingForOtherFork: Receive = {
    case ForkBeingUsed => handleForkBeingUsed()
    case ForkTaken =>
      log.info(s"Philosopher ${self.path.name} starts to eat")
      context.system.scheduler.scheduleOnce(eatingTime, self, Think)
      context.become(eating)
  }

  def eating: Receive = {
    case Think =>
      log.info(s"Philosopher ${self.path.name} starts to think")
      leftFork ! Put
      rightFork ! Put
      thinkFor(thinkingTime)
  }

  def handleForkBeingUsed(): Unit = {
    log.info(s"Philosopher ${self.path.name} found one fork to be in use")
    
    leftFork ! Put
    rightFork ! Put
    thinkFor(retryTime)
  }

  def receive = {
    case Think =>
      log.info(s"Philosopher ${self.path.name} started thinking")
      thinkFor(thinkingTime)

  }
} 
Example 200
Source File: NominatimLookup.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package examples.nominatim

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import play.api.libs.ws.ahc.AhcWSClient
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.JsonNode
import clients.HTTPClient

// SEE: Prefix.cc Lookup - http://prefix.cc/foaf.file.json

class NominatimLookup {

  val http = HTTPClient

  def start() {
    http.start()
  }

  def stop() {
    http.stop()
  }

  def nominatim(address: String) = {

    val url = "http://nominatim.openstreetmap.org/search"

    val parameters = Map(
      "q" -> address,
      "addressdetails" -> "1",
      "format" -> "json",
      "limit" -> "4",
      "addressdetails" -> "1",
      "dedupe" -> "1",
      "extratags" -> "1",
      "namedetails" -> "1").toList

    val ret = http.ws.url(url)
      .withQueryString(parameters: _*)
      .get()
      .map { response =>
        response.status match {
          case 200 => response.body
          case _   => "{}"
        }

      }

    ret

  }

}

object MainNominatimLookup extends App {

  import scala.collection.JavaConversions._
  import scala.collection.JavaConverters._

  val nominatim = new NominatimLookup
  nominatim.start()

  val json_mapper = new ObjectMapper
  val json_reader = json_mapper.reader()

  val result = Await.ready(nominatim.nominatim("135 pilkington avenue, birmingham"), Duration.Inf)
    .value.get.get

  val json_list: List[JsonNode] = json_reader.readTree(result).elements().toList

  // simulazione di output...
  if (json_list.size > 0) {
    println(s"RESULTS [${json_list.size}]")
    json_list
      .zipWithIndex
      .foreach {
        case (node, i) =>
          println(s"result ${i + 1}")
          println(node.get("place_id"))
          println(node.get("address").get("road").asText() + ", " + node.get("address").get("house_number").asText())
      }
  } else {
    println("cannot find results...")
  }

  nominatim.stop()

}