com.typesafe.scalalogging.LazyLogging Scala Examples

The following examples show how to use com.typesafe.scalalogging.LazyLogging. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ComponentsFixture.scala    From daml   with Apache License 2.0 6 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test

import java.util.concurrent.atomic.AtomicReference

import com.daml.navigator.test.config.Arguments
import com.daml.navigator.test.runner.{HeadNavigator, PackagedDamlc, PackagedSandbox}
import com.typesafe.scalalogging.LazyLogging

import scala.io.Source
import scala.util.{Failure, Success, Try}

class ComponentsFixture(
    val args: Arguments,
    val navigatorPort: Int,
    val sandboxPort: Int,
    val scenario: String
) extends LazyLogging {

  // A list of commands on how to destroy started processes
  private val killProcs: AtomicReference[List[Unit => Unit]] = new AtomicReference(List.empty)

  private val onlineUrl = s"http://localhost:$navigatorPort/api/about"

  private def get(
      url: String,
      connectTimeout: Int = 1000,
      readTimeout: Int = 1000,
      requestMethod: String = "GET"
  ): String = {
    import java.net.{URL, HttpURLConnection}
    val connection = (new URL(url)).openConnection.asInstanceOf[HttpURLConnection]
    connection.setConnectTimeout(connectTimeout)
    connection.setReadTimeout(readTimeout)
    connection.setRequestMethod(requestMethod)
    val inputStream = connection.getInputStream
    val content = Source.fromInputStream(inputStream).mkString
    if (inputStream != null) inputStream.close()
    content
  }

  def startup(): Try[Unit] = {
    if (args.startComponents) {
      logger.info("Starting the sandbox and the Navigator")
      for {
        (darFile, tempFiles) <- Try(PackagedDamlc.run(args.damlPath))
        sandbox <- Try(PackagedSandbox.runAsync(sandboxPort, darFile, scenario))
        _ = killProcs.updateAndGet(s => sandbox :: s)
        navigator <- Try(
          HeadNavigator.runAsync(args.navConfPAth, args.navigatorDir, navigatorPort, sandboxPort))
        _ = killProcs.updateAndGet(s => navigator :: s)
      } yield { () }
    } else {
      Success(())
    }
  }

  private def retry[R](action: => R, maxRetries: Int, delayMillis: Int): Try[R] = {
    def retry0(count: Int): Try[R] = {
      Try(action) match {
        case Success(r) => Success(r)
        case Failure(e) =>
          if (count > maxRetries) {
            logger.error(
              s"Navigator is not available after $maxRetries retries with $delayMillis millis interval.")
            Failure(e)
          } else {
            logger.info(s"Navigator is not available yet, waiting $delayMillis millis ")
            Thread.sleep(delayMillis.toLong)
            retry0(count + 1)
          }
      }
    }

    retry0(0)
  }

  def waitForNavigator(): Try[Unit] = {
    logger.info(s"Waiting for the Navigator to start up (waiting for $onlineUrl)")
    retry({ get(onlineUrl); () }, 120, 1000)
  }

  def shutdown(): Unit = {
    killProcs.getAndUpdate(procs => {
      procs.foreach(killAction => Try { killAction(()) })
      List.empty
    })
    ()
  }
} 
Example 2
Source File: AkkaTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.testing

import java.util
import java.util.concurrent.{Executors, ScheduledExecutorService}

import akka.NotUsed
import akka.actor.{ActorSystem, Scheduler}
import akka.stream.scaladsl.{Sink, Source}
import akka.stream.Materializer
import akka.util.ByteString
import com.daml.grpc.adapter.{ExecutionSequencerFactory, SingleThreadExecutionSequencerPool}
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{BeforeAndAfterAll, Suite}

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContextExecutor, Future}
import scala.util.control.NonFatal

trait AkkaTest extends BeforeAndAfterAll with LazyLogging { self: Suite =>
  // TestEventListener is needed for log testing
  private val loggers =
    util.Arrays.asList("akka.event.slf4j.Slf4jLogger", "akka.testkit.TestEventListener")
  protected implicit val sysConfig: Config = ConfigFactory
    .load()
    .withValue("akka.loggers", ConfigValueFactory.fromIterable(loggers))
    .withValue("akka.logger-startup-timeout", ConfigValueFactory.fromAnyRef("30s"))
    .withValue("akka.stdout-loglevel", ConfigValueFactory.fromAnyRef("INFO"))
  protected implicit val system: ActorSystem = ActorSystem("test", sysConfig)
  protected implicit val ec: ExecutionContextExecutor =
    system.dispatchers.lookup("test-dispatcher")
  protected implicit val scheduler: Scheduler = system.scheduler
  protected implicit val schedulerService: ScheduledExecutorService =
    Executors.newSingleThreadScheduledExecutor()
  protected implicit val materializer: Materializer = Materializer(system)
  protected implicit val esf: ExecutionSequencerFactory =
    new SingleThreadExecutionSequencerPool("testSequencerPool")
  protected val timeout: FiniteDuration = 2.minutes
  protected val shortTimeout: FiniteDuration = 5.seconds

  protected def await[T](fun: => Future[T]): T = Await.result(fun, timeout)

  protected def awaitShort[T](fun: => Future[T]): T = Await.result(fun, shortTimeout)

  protected def drain(source: Source[ByteString, NotUsed]): ByteString = {
    val futureResult: Future[ByteString] = source.runFold(ByteString.empty) { (a, b) =>
      a.concat(b)
    }
    awaitShort(futureResult)
  }

  protected def drain[A, B](source: Source[A, B]): Seq[A] = {
    val futureResult: Future[Seq[A]] = source.runWith(Sink.seq)
    awaitShort(futureResult)
  }

  override protected def afterAll(): Unit = {
    try {
      val _ = await(system.terminate())
    } catch {
      case NonFatal(_) => ()
    }
    schedulerService.shutdownNow()
    super.afterAll()
  }
} 
Example 3
Source File: RetryLogger.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.binding.retrying

import java.time.Instant

import com.daml.ledger.api.v1.command_submission_service.SubmitRequest
import com.daml.ledger.client.binding.log.Labels.{
  ERROR_CODE,
  ERROR_DETAILS,
  ERROR_MESSAGE,
  WORKFLOW_ID
}
import com.google.rpc.status.Status
import com.typesafe.scalalogging.LazyLogging

object RetryLogger extends LazyLogging {

  def logFatal(request: SubmitRequest, status: Status, nrOfRetries: Int): Unit = {
    logger.warn(
      s"Encountered fatal error when submitting command after $nrOfRetries retries, therefore retry halted: " +
        format(request, status))
  }

  def logStopRetrying(
      request: SubmitRequest,
      status: Status,
      nrOfRetries: Int,
      firstSubmissionTime: Instant): Unit = {
    logger.warn(
      s"Retrying of command stopped after $nrOfRetries retries. Attempting since $firstSubmissionTime: " +
        format(request, status)
    )
  }

  def logNonFatal(request: SubmitRequest, status: Status, nrOfRetries: Int): Unit = {
    logger.warn(
      s"Encountered non-fatal error when submitting command after $nrOfRetries retries, therefore will retry: " +
        format(request, status)
    )
  }

  private def format(request: SubmitRequest, status: Status): String =
    format(
      (BIM, request.commands.map(_.commandId)),
      (PARTY, request.commands.map(_.party)),
      (WORKFLOW_ID, request.commands.map(_.workflowId)),
      (ERROR_CODE, status.code),
      (ERROR_MESSAGE, status.message),
      (ERROR_DETAILS, status.details.mkString(","))
    )

  @SuppressWarnings(Array("org.wartremover.warts.JavaSerializable", "org.wartremover.warts.Any"))
  private def format(fs: (String, Any)*): String = fs.map(f => s"${f._1} = ${f._2}").mkString(", ")

  private val PARTY = "party"
  private val BIM = "bim"
} 
Example 4
Source File: RetryHelper.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.binding.retrying

import java.lang.Math.floor

import akka.actor.Scheduler
import akka.pattern.after
import com.daml.ledger.client.binding.config.IRetryConfig
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal

object RetryHelper extends LazyLogging {

  
  val always: RetryStrategy = {
    case NonFatal(_) => true
  }

  def retry[T](retryConfig: Option[(Scheduler, IRetryConfig)])(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext): Future[T] = {
    retryConfig match {
      case None =>
        f
      case Some(rc) =>
        implicit val scheduler: Scheduler = rc._1
        retry(Option(rc._2))(retryStrategy)(f)
    }
  }

  def retry[T](retryConfig: Option[IRetryConfig])(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext, s: Scheduler): Future[T] = {
    retryConfig match {
      case None =>
        f
      case Some(rc) =>
        val maxAttempts = floor(rc.timeout / rc.interval).toInt
        retry(maxAttempts, rc.interval)(retryStrategy)(f)
    }
  }

  def retry[T](maxAttempts: Int, delay: FiniteDuration)(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext, s: Scheduler): Future[T] = {

    def shouldRetry(n: Int, e: Throwable): Boolean =
      n > 0 && retryStrategy.applyOrElse(e, (_: Throwable) => false)

    val remainingAttempts = maxAttempts - 1 // the next line will trigger a future evaluation

    f.recoverWith {
      case NonFatal(e) if shouldRetry(remainingAttempts, e) =>
        logWarning(remainingAttempts, e)
        after(delay, s)(retry(remainingAttempts, delay)(retryStrategy)(f))
    }
  }

  private def logWarning(remainingAttempts: Int, e: Throwable): Unit = {
    logger.warn(
      s"Retrying after failure. Attempts remaining: $remainingAttempts. Error: ${e.getMessage}")
  }
} 
Example 5
Source File: ScalaUtil.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.binding.util

import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, TimeUnit}

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise, TimeoutException}

object ScalaUtil {

  implicit class FutureOps[T](val future: Future[T]) extends LazyLogging {

    def timeout(
        name: String,
        failTimeout: FiniteDuration = 1.minute,
        warnTimeout: FiniteDuration = 30.seconds)(
        implicit ec: ExecutionContext,
        scheduler: ScheduledExecutorService): Future[T] = {

      val promise = Promise[T]

      @SuppressWarnings(Array("org.wartremover.warts.JavaSerializable"))
      val warningTask = schedule(warnTimeout) {
        logger.warn("Function {} takes more than {}", name, warnTimeout)
      }

      val errorTask = schedule(failTimeout) {
        val error = new TimeoutException(s"Function call $name took more than $failTimeout")
        promise.tryFailure(error)
        ()
      }

      future.onComplete { outcome =>
        warningTask.cancel(false)
        errorTask.cancel(false)
        promise.tryComplete(outcome)
      }

      promise.future
    }

    private def schedule(timeout: FiniteDuration)(f: => Unit)(
        implicit scheduler: ScheduledExecutorService): ScheduledFuture[_] = {

      val runnable = new Runnable {
        override def run(): Unit = f
      }

      scheduler.schedule(runnable, timeout.toMillis, TimeUnit.MILLISECONDS)
    }

    def timeoutWithDefaultWarn(name: String, failTimeout: FiniteDuration)(
        implicit ec: ExecutionContext,
        scheduler: ScheduledExecutorService): Future[T] = timeout(name, failTimeout, 10.seconds)

  }

} 
Example 6
Source File: Main.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test

import scala.collection._
import scala.collection.JavaConverters._
import java.util.concurrent.ConcurrentHashMap

import com.daml.navigator.test.config.Arguments
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.events._
import org.scalatest.{Args, ConfigMap, Reporter}

import scala.util.Try

object Main extends LazyLogging {
  def main(args: Array[String]): Unit = {
    Arguments.parse(args) match {
      case None =>
        System.exit(1)
      case Some(arguments) =>
        val reporter = new LoggerReporter()
        val status = new BrowserTest(arguments)
          .run(None, Args(reporter = reporter, configMap = new ConfigMap(Map.empty[String, Any])))
        val success = Try(status.succeeds()).getOrElse(false)
        val exitCode = if (success) 0 else 1
        val header =
          """
            | 
            |***************************************************************************
            |
            |     Test Results
            |
            |***************************************************************************
            |
          """.stripMargin
        logger.info(header)
        reporter.results.foreach { kv =>
          logger.info(s"${kv._1}")
          kv._2.foreach(logger.info(_))
        }
        val results = reporter.results.toList
        val allTests = results.size
        val failedTests = results.count(kv => kv._2.isDefined)
        val footer =
          s"""
            | 
            |***************************************************************************
            |
            |     All tests: $allTests; tests failed: $failedTests
            |
            |***************************************************************************
            |
          """.stripMargin
        logger.info(footer)
        System.exit(exitCode)
    }
  }
}

class LoggerReporter extends Reporter {

  // Test statuses with optional errors
  val results: concurrent.Map[String, Option[String]] =
    new ConcurrentHashMap[String, Option[String]]().asScala

  override def apply(e: Event): Unit = {
    e match {
      case t: TestSucceeded =>
        results.put(s"  Test succeeded: ${t.testName}", None)
        ()
      case t: TestFailed =>
        results.put(
          s"  Test failed: ${t.testName}",
          t.throwable.map(_.getMessage).map(e => s"      error: $e")
        )
        ()

      case _ => ()
    }

  }

} 
Example 7
Source File: Arguments.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.config

import com.typesafe.scalalogging.LazyLogging
import scopt.OptionParser

final case class Arguments(
    navigatorPort: Int = 7500,
    damlPath: String = "",
    navConfPAth: String = "",
    navigatorDir: String = "",
    startComponents: Boolean = false,
    browserStackUser: String = "",
    browserStackKey: String = ""
)

object Arguments extends LazyLogging {

  private val argumentParser =
    new OptionParser[Arguments]("navigator-test") {

      opt[Int]("navigator-port")
        .text("the port the tests will run against, 7500 by default")
        .action((navigatorPort, arguments) => arguments.copy(navigatorPort = navigatorPort))

      opt[String]("daml-path")
        .text(
          "the path to the daml file picked up by the sandbox - if sandbox command is not provided")
        .action((damlPath, arguments) => arguments.copy(damlPath = damlPath))

      opt[String]("nav-conf-path")
        .text(
          "the path to the navigator config file picked up by the backend - if navigator command is not provided")
        .action((navConfPAth, arguments) => arguments.copy(navConfPAth = navConfPAth))

      opt[String]("navigator-dir")
        .text("the folder to run the frontend from if navigator command is not provided")
        .action((navigatorDir, arguments) => arguments.copy(navigatorDir = navigatorDir))

      opt[String]("browserstack-user")
        .text("username to run BrowserStack Automate tests")
        .action(
          (browserStackUser, arguments) => arguments.copy(browserStackUser = browserStackUser))

      opt[String]("browserstack-key")
        .text("api key to run BrowserStack Automate tests")
        .action((browserStackKey, arguments) => arguments.copy(browserStackKey = browserStackKey))

    }

  def parse(arguments: Array[String]): Option[Arguments] = {
    argumentParser.parse(arguments, Arguments()) match {
      case None => None
      case Some(args) =>
        if (args.navigatorDir != "" && args.navConfPAth != "" && args.damlPath != "") {
          Some(args.copy(startComponents = true))
        } else {
          Some(args)
        }
    }
  }

  def showUsage(): Unit =
    argumentParser.showUsage()
} 
Example 8
Source File: HeadNavigator.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import java.io.File

import com.typesafe.scalalogging.LazyLogging
import com.daml.navigator.test.runner.Runner.LazyProcessLogger


object HeadNavigator extends LazyLogging {
  def findNavigatorJar(navigatorDir: String): File = {
    val navigatorPattern = """navigator-.*jar""".r

    val distDir = new File(s"$navigatorDir/dist")
    require(
      distDir.exists && distDir.isDirectory,
      s"Navigator dist directory does not exist. Run the navigator build first.")

    val files = distDir.listFiles
      .filter(f => f.isFile && f.canRead)
      .filter(f => navigatorPattern.findFirstIn(f.getName).isDefined)
    require(
      files.length > 0,
      s"No navigator jar file found in $distDir. Run the navigator build first.")
    require(
      files.length < 2,
      s"Multiple navigator jar files found in $distDir: ${files.mkString(" ")}. Delete old ones.")
    files(0)
  }

  def runAsync(
      navConfPAth: String,
      navigatorDir: String,
      navigatorPort: Int,
      sandboxPort: Int): Unit => Unit = {
    val navigatorJar = findNavigatorJar(navigatorDir)
    val commands = List(
      "java",
      "-jar",
      navigatorJar.toString,
      "server",
      "localhost",
      s"$sandboxPort",
      "--config-file",
      navConfPAth,
      "--port",
      s"$navigatorPort"
    )
    val process = Runner.executeAsync(commands, Some(new LazyProcessLogger("[navigator] ")))

    val shutdown = (_: Unit) => {
      if (process.isAlive()) {
        logger.info("Shutting down Navigator")
        process.destroy()
      }
    }

    shutdown
  }
} 
Example 9
Source File: PackagedSandbox.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.test.runner

import java.io.File

import com.daml.navigator.test.runner.Runner.LazyProcessLogger
import com.typesafe.scalalogging.LazyLogging


object PackagedSandbox {

  class SandboxContext(
      val host: String,
      val port: Int,
      val dars: List[File],
      logbackConfig: File,
      scenario: String)
      extends LazyLogging {

    import sys.process._

    require(logbackConfig.exists, s"Logback config does not exist: $logbackConfig")

    private val sandboxJar = locateSandboxJar()
    private var sandboxProcess: Option[Process] = None

    private def locateSandboxJar(): File = {
      val jarKey = "com.daml.sandbox.jar"
      Option(System.getProperty(jarKey))
        .map { path =>
          val sandboxJar = new File(path).getAbsoluteFile
          require(sandboxJar.exists, s"Sandbox JAR does not exist: $sandboxJar")
          sandboxJar
        }
        .getOrElse(throw new IllegalStateException(
          s"Cannot start Sandbox, '$jarKey' system property is not set"))
    }

    def start(): Unit = {
      val command = List(
        "java",
        // s"-Dlogback.configurationFile=${logbackConfig.getAbsolutePath}",
        "-jar",
        sandboxJar.toString,
        "--port",
        s"$port",
        "--scenario",
        scenario
      ) ++ dars.map(_.toString)

      sandboxProcess = Some(Runner.executeAsync(command, Some(new LazyProcessLogger("[sandbox] "))))
    }

    def shutdown(): Unit = {
      if (sandboxProcess.exists(_.isAlive())) {
        logger.info("Shutting down sandbox process")
        sandboxProcess.foreach(_.destroy())
        sandboxProcess = None
      }
    }
  }

  object SandboxContext {

    def apply(
        port: Int,
        dars: List[File],
        sbtConfig: String = "it",
        scenario: String): SandboxContext =
      new SandboxContext("127.0.0.1", port, dars, logbackConfig(sbtConfig), scenario)

    def logbackConfig(sbtConfig: String) = new File(s"src/$sbtConfig/resources/logback-sandbox.xml")

  }

  def runAsync(port: Int, dars: List[File], scenario: String): Unit => Unit = {
    val context = SandboxContext(port, dars, "main", scenario)
    context.start()

    sys addShutdownHook context.shutdown()
    _ =>
      context.shutdown()
  }

} 
Example 10
Source File: SortingPager.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.query

import com.daml.navigator.dotnot.{OnTreeReady, PropertyCursor}
import com.daml.navigator.model._
import com.daml.navigator.query.project._
import com.typesafe.scalalogging.LazyLogging

import scala.annotation.tailrec

sealed abstract class SortingPager[N <: Node[_]](
    criteria: List[SortCriterion],
    project: OnTreeReady[N, ProjectValue, DamlLfTypeLookup],
    ps: DamlLfTypeLookup
) extends PagerDecorator[N]
    with LazyLogging {

  final override def decorate(page: Page[N], ledger: Ledger): Page[N] =
    page.copy(rows = sort(page.rows), sortedLike = criteria)

  def sort(rows: Seq[N]): Seq[N] =
    rows.sortBy(projectAll)(ordering)

  private val ordering =
    new Ordering[List[Option[ProjectValue]]] {
      val optionProjectValueOrdering = Ordering[Option[ProjectValue]]

      override def compare(l1: List[Option[ProjectValue]], l2: List[Option[ProjectValue]]): Int = {
        @tailrec
        def loop(
            l1: List[Option[ProjectValue]],
            l2: List[Option[ProjectValue]],
            criteria: List[SortCriterion]): Int = {
          l1 match {
            case Nil => 0
            case x1 :: xs1 =>
              val x2 :: xs2 = l2
              val c :: cs = criteria
              optionProjectValueOrdering.compare(x1, x2) match {
                case 0 => loop(xs1, xs2, cs)
                case x =>
                  c.direction match {
                    case SortDirection.DESCENDING => x * (-1)
                    case SortDirection.ASCENDING => x
                  }
              }
          }
        }
        loop(l1, l2, criteria)
      }
    }

  private def projectAll(node: N): List[Option[ProjectValue]] =
    criteria.map(project(node))

  private def project(node: N)(criterion: SortCriterion): Option[ProjectValue] = {
    val cursor = PropertyCursor.fromString(criterion.field)
    project.run(node, cursor, "", ps) match {
      case Left(failure) =>
        logger.error(s"Cannot project $node with criterion $criterion: $failure. Using None.")
        None
      case Right(value) =>
        Some(value)
    }
  }
}

final class ContractSorter(
    val criteria: List[SortCriterion],
    ps: DamlLfTypeLookup,
    val delegate: Pager[Contract])
    extends SortingPager[Contract](criteria, contractProject, ps)

final class TemplateSorter(
    val criteria: List[SortCriterion],
    ps: DamlLfTypeLookup,
    val delegate: Pager[Template])
    extends SortingPager[Template](criteria, templateProject, ps)

final class CommandSorter(
    val criteria: List[SortCriterion],
    ps: DamlLfTypeLookup,
    val delegate: Pager[Command])
    extends SortingPager[Command](criteria, commandProject, ps) 
Example 11
Source File: FilteringPager.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.query

import com.daml.navigator.dotnot.{OnTreeReady, PropertyCursor}
import com.daml.navigator.model._
import com.daml.navigator.query.filter._
import com.typesafe.scalalogging.LazyLogging

sealed abstract class FilteringPager[N <: Node[_]](
    criterion: FilterCriterionBase,
    filter: OnTreeReady[N, Boolean, DamlLfTypeLookup],
    ps: DamlLfTypeLookup
) extends PagerDecorator[N]
    with LazyLogging {

  final override def decorate(page: Page[N], ledger: Ledger): Page[N] = {
    val included = page.rows.filter(isIncluded)
    page.copy(total = included.size, rows = included)
  }

  private[query] def isIncluded(node: N): Boolean = {
    def loop(criterion: FilterCriterionBase): Boolean = {
      criterion match {
        case AndFilterCriterion(criteria) =>
          criteria.forall(loop)
        case OrFilterCriterion(criteria) =>
          criteria.exists(loop)
        case criterion: FilterCriterion =>
          matchCriterion(node)(criterion)
      }
    }
    loop(criterion)
  }

  private[query] def matchCriterion(node: N)(criterion: FilterCriterion): Boolean = {
    val cursor = PropertyCursor.fromString(criterion.field)
    filter.run(node, cursor, criterion.value, ps) match {
      case Left(failure) =>
        logger.error(
          s"Cannot match $node and " +
            s"criterion $criterion: $failure. Excluding it.")
        false
      case Right(isMatching) =>
        isMatching
    }
  }
}

final class ContractFilter(
    criterion: FilterCriterionBase,
    ps: DamlLfTypeLookup,
    val delegate: Pager[Contract])
    extends FilteringPager[Contract](criterion, contractFilter, ps)

final class TemplateFilter(
    criterion: FilterCriterionBase,
    ps: DamlLfTypeLookup,
    val delegate: Pager[Template])
    extends FilteringPager[Template](criterion, templateFilter, ps)

final class CommandFilter(
    criterion: FilterCriterionBase,
    ps: DamlLfTypeLookup,
    val delegate: Pager[Command])
    extends FilteringPager[Command](criterion, commandFilter, ps) 
Example 12
Source File: RetryHelper.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator.util

import java.lang.Math.floor

import akka.actor.Scheduler
import akka.pattern.after
import com.daml.grpc.GrpcException
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal


  val always: RetryStrategy = {
    case NonFatal(_) => true
  }

  val failFastOnPermissionDenied: RetryStrategy = {
    case GrpcException.PERMISSION_DENIED() => false
    case NonFatal(_) => true
  }

  def retry[T](retryConfig: Option[(Scheduler, IRetryConfig)])(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext): Future[T] = {
    retryConfig match {
      case None =>
        f
      case Some(rc) =>
        implicit val scheduler: Scheduler = rc._1
        retry(Option(rc._2))(retryStrategy)(f)
    }
  }

  def retry[T](retryConfig: Option[IRetryConfig])(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext, s: Scheduler): Future[T] = {
    retryConfig match {
      case None =>
        f
      case Some(rc) =>
        val maxAttempts = floor(rc.timeout / rc.interval).toInt
        retry(maxAttempts, rc.interval)(retryStrategy)(f)
    }
  }

  def retry[T](maxAttempts: Int, delay: FiniteDuration)(retryStrategy: RetryStrategy)(
      f: => Future[T])(implicit ec: ExecutionContext, s: Scheduler): Future[T] = {

    def shouldRetry(n: Int, e: Throwable): Boolean =
      n > 0 && retryStrategy.applyOrElse(e, (_: Throwable) => false)

    val remainingAttempts = maxAttempts - 1 // the next line will trigger a future evaluation

    f.recoverWith {
      case NonFatal(e) if shouldRetry(remainingAttempts, e) =>
        logWarning(remainingAttempts, e)
        after(delay, s)(retry(remainingAttempts, delay)(retryStrategy)(f))
    }
  }

  private def logWarning(remainingAttempts: Int, e: Throwable): Unit = {
    logger.warn(
      s"Retrying after failure. Attempts remaining: $remainingAttempts. Error: ${e.getMessage}")
  }
} 
Example 13
Source File: GraphQLHandler.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.navigator

import akka.actor.ActorRef
import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.model.StatusCodes._
import com.daml.navigator.graphql._
import com.daml.navigator.graphql.SprayMarshallers._
import com.daml.navigator.model.PartyState
import com.daml.navigator.store.Store.StoreException
import com.typesafe.scalalogging.LazyLogging
import sangria.ast.Document
import sangria.execution._
import sangria.parser.QueryParser
import sangria.renderer.SchemaRenderer
import sangria.schema.Schema
import spray.json._

import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

case class ParseResult(ast: Document, operationName: Option[String], variables: JsValue)


trait GraphQLHandler {
  def schema: Schema[GraphQLContext, Unit]
  def parse(request: String): Try[ParseResult]
  def parse(request: JsValue): Try[ParseResult]
  def executeQuery(parsed: ParseResult, party: PartyState): Future[(StatusCode, JsValue)]
  def renderSchema: String
}

object GraphQLHandler {
  type ParseQuery = JsValue => Try[ParseResult]
  type ExecuteQuery = (ParseResult, PartyState) => Future[(StatusCode, JsValue)]
  type CustomEndpoints = Set[CustomEndpoint[_]]
}

case class DefaultGraphQLHandler(
    customEndpoints: GraphQLHandler.CustomEndpoints,
    platformStore: Option[ActorRef])(
    implicit executionContext: ExecutionContext
) extends GraphQLHandler
    with LazyLogging {

  def schema: Schema[GraphQLContext, Unit] = new GraphQLSchema(customEndpoints).QuerySchema

  def parse(request: String): Try[ParseResult] =
    Try(request.parseJson).flatMap(parse)

  def parse(request: JsValue): Try[ParseResult] =
    for {
      fields <- Try(request.asJsObject.fields)
      JsString(query) <- Try(fields("query"))
      operationName = fields.get("operationName").collect {
        case JsString(value) => value
      }
      vars: JsValue = fields.get("variables") match {
        case Some(obj: JsObject) => obj
        case _ => JsObject.empty
      }
      ast <- QueryParser.parse(query)
    } yield ParseResult(ast, operationName, vars)

  def executeQuery(parsed: ParseResult, party: PartyState): Future[(StatusCode, JsValue)] = {
    platformStore.fold[Future[(StatusCode, JsValue)]](
      Future.successful(InternalServerError -> JsString("Platform store not available"))
    )(store => {
      val context = GraphQLContext(party, store)
      Executor
        .execute(
          schema,
          parsed.ast,
          context,
          variables = parsed.variables,
          operationName = parsed.operationName,
          exceptionHandler = ExceptionHandler {
            case (_, StoreException(message)) => HandledException(message)
          }
        )
        .map(OK -> _)
        .recover {
          case error: QueryAnalysisError =>
            logger.warn(s"GraphQL analysis error ${error.getMessage}.")
            BadRequest -> error.resolveError
          case error: ErrorWithResolver =>
            logger.error("Failed to execute GraphQL query", error)
            InternalServerError -> error.resolveError
        }
    })
  }

  def renderSchema: String = SchemaRenderer.renderSchema(schema)
} 
Example 14
Source File: BigQueryImporter.scala    From pg2bq   with MIT License 5 votes vote down vote up
package com.powerspace.pg2bq

import com.google.cloud.bigquery.JobInfo.WriteDisposition
import com.google.cloud.bigquery._
import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

class BigQueryImporter(spark: SparkSession, tmpBucket: String, dataset: String) extends LazyLogging with DataImporter {

  val bigquery: BigQuery = BigQueryOptions.getDefaultInstance.getService

  // ensure the dataset exists or create it
  getOrCreateDataset(dataset)

  override def createOrOverride(df: DataFrame, tableName: String): Unit = {
    saveIntoGcs(df, tableName)
    loadFromGcsToBq(tableName)
  }

  private def loadFromGcsToBq(tableName: String): Unit = {
    val configuration = LoadJobConfiguration
      .builder(TableId.of(dataset, tableName), s"gs://$tmpBucket/$tableName/*.avro")
      .setFormatOptions(FormatOptions.avro())
      .setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
      .build()

    val job = bigquery.create(JobInfo.newBuilder(configuration).build())

    logger.info(s"Importing $tableName from bucket $tmpBucket to dataset $dataset...")
    job.waitFor()
    logger.info(s"$tableName import done!")
  }

  private def saveIntoGcs(df: DataFrame, tableName: String): Unit = {
    df.write
      .mode(SaveMode.Overwrite)
      .format("com.databricks.spark.avro")
      .save(s"gs://$tmpBucket/$tableName")
  }

  def getOrCreateDataset(datasetName: String): Dataset = {
    scala.Option(bigquery.getDataset(datasetName)) match {
      case Some(ds) =>
        logger.info(s"Dataset $datasetName already exist.")
        ds
      case None =>
        logger.info(s"Dataset $datasetName does not exist, creating...")
        val ds = bigquery.create(DatasetInfo.of(datasetName))
        logger.info(s"Dataset $datasetName created!")
        ds
    }
  }

} 
Example 15
Source File: Server.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.examples.akka.http

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.typesafe.scalalogging.LazyLogging
import io.opencensus.scala.akka.http.TracingDirective._
import io.opencensus.trace.AttributeValue
import org.slf4j.bridge.SLF4JBridgeHandler

import scala.util.{Failure, Success}

object Server extends App with LazyLogging {
  // Forward java.util.Logging to slf4j
  SLF4JBridgeHandler.removeHandlersForRootLogger()
  SLF4JBridgeHandler.install()

  implicit val system: ActorSystem = ActorSystem()
  import system.dispatcher

  val routes: Route = traceRequest { span =>
    complete {
      val attrValue = AttributeValue.stringAttributeValue("test")
      span.putAttribute("my-attribute", attrValue)
      "Hello opencensus"
    }
  }

  logger.info("Binding...")
  Http().bindAndHandle(routes, "0.0.0.0", 8080).onComplete {
    case Success(bound) =>
      logger.info(s"Bound to ${bound.localAddress}")
    case Failure(e) =>
      logger.error("Failed to bind", e)
  }
} 
Example 16
Source File: TracingDirective.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.akka.http

import akka.http.scaladsl.model.{HttpHeader, HttpRequest}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive0, Directive1, ExceptionHandler}
import com.typesafe.scalalogging.LazyLogging
import io.opencensus.scala.Tracing
import io.opencensus.scala.akka.http.propagation.AkkaB3FormatPropagation
import io.opencensus.scala.akka.http.trace.EndSpanResponse
import io.opencensus.scala.akka.http.trace.HttpExtractors._
import io.opencensus.scala.http.{HttpAttributes, ServiceAttributes, ServiceData}
import io.opencensus.scala.http.propagation.Propagation
import io.opencensus.trace.{Span, Status}

import scala.util.control.NonFatal

trait TracingDirective extends LazyLogging {

  protected def tracing: Tracing
  protected def propagation: Propagation[HttpHeader, HttpRequest]

  
  def traceRequestForServiceNoSpan(serviceData: ServiceData): Directive0 =
    traceRequest(serviceData).map(_ => ())

  private def traceRequest(serviceData: ServiceData): Directive1[Span] =
    extractRequest.flatMap { req =>
      val span = buildSpan(req, serviceData)
      recordSuccess(span) & recordException(span) & provide(span)
    }

  private def buildSpan(req: HttpRequest, serviceData: ServiceData): Span = {
    val name = req.uri.path.toString()

    val span = propagation
      .extractContext(req)
      .fold(
        { error =>
          logger.debug("Extracting of parent context failed", error)
          tracing.startSpan(name)
        },
        tracing.startSpanWithRemoteParent(name, _)
      )

    ServiceAttributes.setAttributesForService(span, serviceData)
    HttpAttributes.setAttributesForRequest(span, req)
    span
  }

  private def recordSuccess(span: Span) =
    mapResponse(EndSpanResponse.forServer(tracing, _, span))

  private def recordException(span: Span) =
    handleExceptions(ExceptionHandler {
      case NonFatal(ex) =>
        tracing.endSpan(span, Status.INTERNAL)
        throw ex
    })
}

object TracingDirective extends TracingDirective {
  override protected def tracing: Tracing = Tracing
  override protected def propagation: Propagation[HttpHeader, HttpRequest] =
    AkkaB3FormatPropagation
} 
Example 17
Source File: StatsClient.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.akka.http

import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import com.typesafe.scalalogging.LazyLogging
import io.opencensus.scala.Stats
import io.opencensus.scala.akka.http.stats.HttpStats
import io.opencensus.scala.akka.http.utils.ExecuteAfterResponse

import scala.concurrent.{ExecutionContext, Future}

trait StatsClient extends HttpStats with LazyLogging {

  
  def recorded(
      doRequest: HttpRequest => Future[HttpResponse],
      routeName: String
  )(
      implicit ec: ExecutionContext
  ): HttpRequest => Future[HttpResponse] = req => {
    val start = System.currentTimeMillis()

    doRequest(req).map(response =>
      ExecuteAfterResponse.onComplete(
        response,
        onFinish = () =>
          measureClientRoundtripLatency(
            routeName,
            req.method,
            response.status,
            (System.currentTimeMillis() - start).toDouble
          ).fold(
            error => logger.warn("Failed to measure server latency", error),
            identity
          ),
        onFailure = _ => ()
      )
    )
  }
}

object StatsClient extends StatsClient {
  override private[http] val stats = Stats
} 
Example 18
Source File: StatsDirective.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.akka.http

import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.server.Directive0
import akka.http.scaladsl.server.Directives._
import com.typesafe.scalalogging.LazyLogging
import io.opencensus.scala.Stats
import io.opencensus.scala.akka.http.stats.HttpStats
import io.opencensus.scala.akka.http.utils.ExecuteAfterResponse

trait StatsDirective extends HttpStats with LazyLogging {

  
  def recordRequest(routeName: String): Directive0 =
    extractRequest.flatMap { req =>
      val startTime = System.currentTimeMillis()

      record(req, routeName, startTime)
    }

  private def record(req: HttpRequest, routeName: String, startTime: Long) =
    mapResponse(res =>
      ExecuteAfterResponse
        .onComplete(
          res,
          onFinish = () =>
            measureServerLatency(
              routeName,
              req.method,
              res.status,
              (System.currentTimeMillis() - startTime).toDouble
            ).fold(
              error => logger.warn("Failed to measure server latency", error),
              identity
            ),
          onFailure = _ => ()
        )
    )
}

object StatsDirective extends StatsDirective {
  override private[http] val stats = Stats
} 
Example 19
Source File: Stackdriver.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.trace.exporters

import com.google.auth.oauth2.GoogleCredentials
import com.typesafe.scalalogging.LazyLogging
import io.opencensus.exporter.trace.stackdriver.{
  StackdriverTraceConfiguration,
  StackdriverTraceExporter
}
import io.opencensus.scala.StackdriverTraceExporterConfig

import scala.jdk.CollectionConverters._

private[scala] object Stackdriver extends LazyLogging {

  def init(config: StackdriverTraceExporterConfig): Unit = {
    log(config)
    StackdriverTraceExporter.createAndRegister(buildConfig(config))
  }

  private def buildConfig(
      config: StackdriverTraceExporterConfig
  ): StackdriverTraceConfiguration = {
    import config._

    val stackdriverConfig = StackdriverTraceConfiguration
      .builder()
      .setProjectId(projectId)

    val googleCredentials =
      credentialsFile
        .fold(GoogleCredentials.getApplicationDefault)(path =>
          GoogleCredentials.fromStream(this.getClass.getResourceAsStream(path))
        )
        .createScoped(
          Set(
            "https://www.googleapis.com/auth/cloud-platform",
            "https://www.googleapis.com/auth/trace.append"
          ).asJava
        )
    stackdriverConfig.setCredentials(googleCredentials)
    stackdriverConfig.build()
  }

  private def log(config: StackdriverTraceExporterConfig): Unit = {
    import config._

    val credentialsLogInfo = credentialsFile
      .map(path => s"with credentials file $path")
      .getOrElse("")

    logger.info(
      s"Enabling StackdriverTraceExporter with project id $projectId" + credentialsLogInfo
    )
  }
} 
Example 20
Source File: Instana.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.trace.exporters

import com.typesafe.scalalogging.LazyLogging
import io.opencensus.exporter.trace.instana.{
  InstanaExporterConfiguration,
  InstanaTraceExporter
}
import io.opencensus.scala.InstanaTraceExporterConfig

private[scala] object Instana extends LazyLogging {

  def init(config: InstanaTraceExporterConfig): Unit = {
    logger.info(
      s"Enabling InstanaTraceExporter with agent endpoint ${config.agentEndpoint}"
    )
    InstanaTraceExporter.createAndRegister(
      InstanaExporterConfiguration
        .builder()
        .setAgentEndpoint(config.agentEndpoint)
        .build()
    )
  }
} 
Example 21
Source File: Zipkin.scala    From opencensus-scala   with Apache License 2.0 5 votes vote down vote up
package io.opencensus.scala.trace.exporters

import com.typesafe.scalalogging.LazyLogging
import io.opencensus.exporter.trace.zipkin.{
  ZipkinExporterConfiguration,
  ZipkinTraceExporter
}
import io.opencensus.scala.ZipkinTraceExporterConfig

private[scala] object Zipkin extends LazyLogging {

  def init(config: ZipkinTraceExporterConfig): Unit = {
    logger.info(
      s"Enabling LoggingTraceExporter with url ${config.v2Url} " +
        s"and service name ${config.serviceName}"
    )
    ZipkinTraceExporter.createAndRegister(
      ZipkinExporterConfiguration
        .builder()
        .setV2Url(config.v2Url)
        .setServiceName(config.serviceName)
        .build()
    )
  }
} 
Example 22
Source File: Main.scala    From kafka-configurator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.sky.kafka.configurator

import cats.implicits._
import com.sky.BuildInfo
import com.sky.kafka.configurator.error.ConfiguratorFailure
import com.typesafe.scalalogging.LazyLogging
import org.zalando.grafter._

import scala.util.{Failure, Success, Try}

object Main extends LazyLogging {

  def main(args: Array[String]): Unit = {
    logger.info(s"Running ${BuildInfo.name} ${BuildInfo.version} with args: ${args.mkString(", ")}")

    run(args, sys.env) match {
      case Success((errors, infoLogs)) =>
        errors.foreach(e => logger.warn(s"${e.getMessage}. Cause: ${e.getCause.getMessage}"))
        infoLogs.foreach(msg => logger.info(msg))
        if (errors.isEmpty) System.exit(0) else System.exit(1)
      case Failure(t) =>
        logger.error(t.getMessage)
        System.exit(1)
    }
  }

  def run(args: Array[String], envVars: Map[String, String]): Try[(List[ConfiguratorFailure], List[String])] =
    ConfigParsing.parse(args, envVars).flatMap { conf =>
      val app = KafkaConfiguratorApp.reader(conf)
      val result = app.configureTopicsFrom(conf.files.toList)
      stop(app)
      result
    }

  private def stop(app: KafkaConfiguratorApp): Unit =
    Rewriter.stop(app).value.foreach {
      case StopOk(msg) => logger.debug(s"Component stopped: $msg")
      case StopError(msg, ex) => logger.warn(s"Error whilst stopping component: $msg", ex)
      case StopFailure(msg) => logger.warn(s"Failure whilst stopping component: $msg")
    }
} 
Example 23
Source File: TopicConfigurator.scala    From kafka-configurator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.sky.kafka.configurator

import cats.Eq
import cats.data.Reader
import cats.instances.int._
import cats.instances.vector._
import cats.instances.try_._
import cats.syntax.eq._
import com.sky.kafka.configurator.error.{ReplicationChangeFound, TopicNotFound}
import com.typesafe.scalalogging.LazyLogging

import scala.util.control.NonFatal
import scala.util.{Failure, Success}

case class TopicConfigurator(topicReader: TopicReader, topicWriter: TopicWriter) extends LazyLogging {

  def configure(topic: Topic): Logger[Unit] =
    topicReader.fetch(topic.name) match {
      case Success(currentTopic) =>
        updateTopic(currentTopic, topic)
      case Failure(TopicNotFound(_)) =>
        topicWriter.create(topic)
          .withLog(s"Topic ${topic.name} was not found, so it has been created")
      case Failure(NonFatal(t)) =>
        Failure(t).asWriter
    }

  private def updateTopic(oldTopic: Topic, newTopic: Topic): Logger[Unit] = {

    def ifDifferent[T: Eq](oldValue: T, newValue: T)(updateOperation: (Topic, Topic) => Logger[Unit])(messageIfSame: String): Logger[Unit] =
      if (oldValue =!= newValue)
        updateOperation(oldTopic, newTopic)
      else
        Success(()).withLog(messageIfSame)

    import TopicConfigurator._

    for {
      _ <- ifDifferent(oldTopic.replicationFactor, newTopic.replicationFactor)(failReplicationChange)(s"Replication factor unchanged for ${newTopic.name}.")
      _ <- ifDifferent(oldTopic.partitions, newTopic.partitions)(updatePartitions)(s"No change in number of partitions for ${newTopic.name}")
      _ <- ifDifferent(oldTopic.config, newTopic.config)(updateConfig)(s"No change in config for ${newTopic.name}")
    } yield ()
  }

  private def failReplicationChange(oldTopic: Topic, newTopic: Topic): Logger[Unit] =
    Failure(ReplicationChangeFound).asWriter

  private def updatePartitions(oldTopic: Topic, newTopic: Topic): Logger[Unit] =
    topicWriter
      .updatePartitions(newTopic.name, newTopic.partitions)
      .withLog(s"Updated topic ${newTopic.name} from ${oldTopic.partitions} to ${newTopic.partitions} partition(s)")

  private def updateConfig(oldTopic: Topic, newTopic: Topic): Logger[Unit] =
    topicWriter
      .updateConfig(newTopic.name, newTopic.config)
      .withLog(s"Updated configuration of topic ${newTopic.name}")
}

object TopicConfigurator {
  def reader: Reader[AppConfig, TopicConfigurator] = KafkaTopicAdmin.reader
    .map(kafkaAdminClient => TopicConfigurator(kafkaAdminClient, kafkaAdminClient))

  private implicit val topicConfigIsContained: Eq[Map[String, String]] = Eq.instance { case (left, right) =>
    left.toList.forall(right.toList.contains(_)) || right.toList.forall(left.toList.contains(_))
  }
} 
Example 24
Source File: LogCollector.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import java.io.FileInputStream
import java.util.Properties
import java.util.concurrent.TimeUnit

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.{ ActorMaterializer, Materializer }
import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.common.SolrService
import io.phdata.pulse.solr.SolrProvider
import org.apache.kudu.client.KuduClient.KuduClientBuilder

import scala.concurrent.duration.Duration
import scala.concurrent.{ Await, Future }
import scala.util.{ Failure, Success }


  def main(args: Array[String]): Unit =
    System.getProperty("java.security.auth.login.config") match {
      case null => {
        logger.info(
          "java.security.auth.login.config is not set, continuing without kerberos authentication")
      }
      case _ => {
        KerberosContext.scheduleKerberosLogin(0, 9, TimeUnit.HOURS)
      }

      start(args)

    }

  private def start(args: Array[String]): Unit = {
    val cliParser = new LogCollectorCliParser(args)

    val solrService = SolrProvider.create(cliParser.zkHosts().split(",").toList)
    val solrStream  = new SolrCloudStream(solrService)

    val kuduClient =
      cliParser.kuduMasters.toOption.map(masters =>
        KerberosContext.runPrivileged(new KuduClientBuilder(masters).build()))

    val kuduService =
      kuduClient.map(client => KerberosContext.runPrivileged(new KuduService(client)))

    val routes = new LogCollectorRoutes(solrStream, kuduService)

    cliParser.mode() match {
      case "kafka" => {
        kafka(solrService, cliParser.kafkaProps(), cliParser.topic())
      }
      case _ => {
        http(cliParser.port(), routes)
      }
    }
  }

  // Starts Http Service
  def http(port: Int, routes: LogCollectorRoutes): Future[Unit] = {
    implicit val actorSystem: ActorSystem   = ActorSystem()
    implicit val ec                         = actorSystem.dispatchers.lookup("akka.actor.http-dispatcher")
    implicit val materializer: Materializer = ActorMaterializer.create(actorSystem)

    val httpServerFuture = Http().bindAndHandle(routes.routes, "0.0.0.0", port)(materializer) map {
      binding =>
        logger.info(s"Log Collector interface bound to: ${binding.localAddress}")
    }

    httpServerFuture.onComplete {
      case Success(v) => ()
      case Failure(ex) => {
        logger.error("HTTP server failed, exiting. ", ex)
        System.exit(1)
      }
    }

    Await.ready(
      httpServerFuture,
      Duration.Inf
    )
  }

  // Starts Kafka Consumer
  def kafka(solrService: SolrService, kafkaProps: String, topic: String): Unit = {

    val solrCloudStream = new SolrCloudStream(solrService)

    val kafkaConsumer      = new PulseKafkaConsumer(solrCloudStream)
    val kafkaConsumerProps = new Properties()

    kafkaConsumerProps.load(new FileInputStream(kafkaProps))

    kafkaConsumer.read(kafkaConsumerProps, topic)
  }
} 
Example 25
Source File: LogCollectorRoutes.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import akka.http.scaladsl.common.EntityStreamingSupport
import akka.http.scaladsl.model.{ ContentTypes, HttpEntity, StatusCodes }
import akka.http.scaladsl.server.Directives._
import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.common.domain.{ LogEvent, TimeseriesRequest }


    post {
      // create a streaming Source from the incoming json string
      entity(as[TimeseriesRequest]) { eventRequest =>
        kuduService
          .map { client =>
            eventRequest.payload.map(event => client.save(eventRequest.table_name, List(event)))
          }
          .getOrElse(complete(StatusCodes.NotImplemented))

        complete(HttpEntity(ContentTypes.`application/json`, "ok"))
      }
    }
  }

} 
Example 26
Source File: KerberosContext.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import java.security.PrivilegedAction
import java.util.concurrent.TimeUnit

import com.typesafe.scalalogging.LazyLogging
import javax.security.auth.Subject
import javax.security.auth.login.LoginContext
import monix.execution.Cancelable
import monix.execution.Scheduler.{ global => scheduler }

object KerberosContext extends LazyLogging {

  lazy private val loginContext = new LoginContext("Client")
  private var useKerberos       = false

  def scheduleKerberosLogin(initialDelay: Long, delay: Long, timeUnit: TimeUnit): Cancelable = {
    useKerberos = true
    val runnableLogin = new Runnable {
      def run(): Unit =
        login()
    }
    scheduler.scheduleWithFixedDelay(initialDelay, delay, timeUnit, runnableLogin)
  }

  def runPrivileged[W](work: => W): W =
    if (useKerberos) {
      Subject.doAs(
        loginContext.getSubject,
        new PrivilegedAction[W]() {
          override def run: W = {
            logger.debug("Privileged block started")
            val result = work
            logger.debug("Privileged block complete")
            result
          }
        }
      )
    } else {
      logger.debug("Kerberos disabled. To enable kerberos call the `scheduleKerberosLogin` method.")
      work
    }

  private def login(): Unit = {
    loginContext.login()
    logger.info(s"Logged in with kerberos configuration:\n$getSubject")
  }

  private def getSubject: String =
    if (loginContext.getSubject == null) {
      throw new Exception("Subject for LoginContext is null")
    } else {
      loginContext.getSubject.toString
    }
} 
Example 27
Source File: KuduService.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import java.util.{ ArrayList, Collections }

import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.common.domain.TimeseriesEvent
import org.apache.kudu.client.SessionConfiguration.FlushMode
import org.apache.kudu.client.{ CreateTableOptions, KuduClient, KuduException, KuduTable }
import org.apache.kudu.{ ColumnSchema, Schema, Type }

import scala.collection.concurrent

object TimeseriesEventColumns {
  val TIMESTAMP = "ts"
  val KEY       = "key"
  val TAG       = "tag"
  val VALUE     = "value"
}


  private[logcollector] def getOrCreateTable(tableName: String): KuduTable =
    KerberosContext.runPrivileged {
      if (tableCache.contains(tableName)) {
        tableCache(tableName)
      } else if (!client.tableExists(tableName)) {
        logger.info(s"Kudu table not found: $tableName")
        val columns = new ArrayList[ColumnSchema]
        columns.add(
          new ColumnSchema.ColumnSchemaBuilder(TimeseriesEventColumns.TIMESTAMP,
                                               Type.UNIXTIME_MICROS).key(true).build)
        columns.add(
          new ColumnSchema.ColumnSchemaBuilder(TimeseriesEventColumns.KEY, Type.STRING)
            .key(true)
            .build)
        columns.add(
          new ColumnSchema.ColumnSchemaBuilder(TimeseriesEventColumns.TAG, Type.STRING)
            .key(true)
            .build)
        columns.add(
          new ColumnSchema.ColumnSchemaBuilder(TimeseriesEventColumns.VALUE, Type.DOUBLE)
            .key(false)
            .build)
        val schema = new Schema(columns)
        val opts = new CreateTableOptions()
          .setRangePartitionColumns(Collections.singletonList(TimeseriesEventColumns.TIMESTAMP))
          .addHashPartitions(Collections.singletonList(TimeseriesEventColumns.KEY), 4)
        val table = client.createTable(tableName, schema, opts)
        tableCache.put(tableName, table)
        logger.info(s"Created Kudu table $tableName")
        table
      } else {
        val table = client.openTable(tableName)
        tableCache.put(tableName, table)
        table
      }
    }
} 
Example 28
Source File: PulseKafkaConsumer.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.logcollector

import java.util.{ Collections, Properties }

import org.apache.kafka.clients.consumer.KafkaConsumer
import com.typesafe.scalalogging.LazyLogging
import spray.json.JsonParser.ParsingException

import scala.collection.JavaConverters._
import spray.json._


class PulseKafkaConsumer(solrCloudStream: SolrCloudStream) extends JsonSupport with LazyLogging {
  val MAX_TIMEOUT = 100

  def read(consumerProperties: Properties, topic: String): Unit = {
    val consumer = new KafkaConsumer[String, String](consumerProperties)

    consumer.subscribe(Collections.singletonList(topic))

    while (true) {
      try {
        val records = consumer.poll(MAX_TIMEOUT)
        for (record <- records.asScala) {
          logger.trace("KAFKA: Consuming " + record.value() + " from topic: " + topic)
          val logEventMap = record.value().parseJson.convertTo[Map[String, String]]
          solrCloudStream.put(logEventMap.getOrElse("application", ""), logEventMap)
          // TODO: Add proper error handling when application isn't supplied
        }
      } catch {
        case p: ParsingException => logger.error("Error parsing message from kafka broker", p)
        case e: Exception        => logger.error("Error consuming messages from kafka broker", e)
      }
    }
  }
} 
Example 29
Source File: NotificationMain.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.notification

import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.alertengine.{ AlertRule, MailAlertProfile, TriggeredAlert }


object NotificationMain extends LazyLogging {
  def main(args: Array[String]): Unit = {
    println(
      s"args: <smtp server> <smtp server port> <smtp-tls> <username> <password> <email addresses>")

    val password = if (args(4) == "") None else Some(args(4))
    val service =
      new MailNotificationService(args(0), args(1).toLong, args(3), password, args(2).toBoolean)
    val triggeredAlert =
      TriggeredAlert(AlertRule("query", 1, None, List()), "", Seq(), 1)
    val mailProfile = MailAlertProfile("b", List(args(5)))
    service.notify(Seq(triggeredAlert), mailProfile)
  }
} 
Example 30
Source File: MailNotificationService.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.notification

import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.alertengine.{ MailAlertProfile, TriggeredAlert }

class MailNotificationService(smtpServer: String,
                              port: Long = 25,
                              username: String,
                              password: Option[String],
                              use_smtp_tls: Boolean)
    extends LazyLogging {
  def notify(alerts: Iterable[TriggeredAlert], profile: MailAlertProfile): Unit = {
    logger.info(s"starting notification for profile ${profile.name}")
    val mailer = new Mailer(smtpServer, port, username, password, use_smtp_tls)
    for (alert <- alerts) {
      val formattedBody    = NotificationFormatter.formatMessage(alert)
      val formattedSubject = NotificationFormatter.formatSubject(alert)
      logger.info(s"sending alert")
      mailer.sendMail(profile.addresses, formattedSubject, formattedBody)
    }
  }
} 
Example 31
Source File: SlackNotificationService.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.notification

import com.typesafe.scalalogging.LazyLogging
import io.phdata.pulse.alertengine.{ SlackAlertProfile, TriggeredAlert }
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.DefaultHttpClient

class SlackNotificationService() extends LazyLogging {
  def notify(alerts: Iterable[TriggeredAlert], profile: SlackAlertProfile): Unit =
    for (alert <- alerts) {
      val formattedBody    = NotificationFormatter.formatMessage(alert)
      val formattedSubject = NotificationFormatter.formatSubject(alert)
      sendSlackMsg(profile.url, formattedSubject + formattedBody)
    }

  def sendSlackMsg(url: String, message: String): Unit = {
    val httpClient = new DefaultHttpClient()
    try {
      val httpPost = new HttpPost(url)
      val jsonStr =
        s"""
           |{"text": "$message
           |---------------------------------------------"}
           |""".stripMargin
      val entity = new StringEntity(jsonStr)
      httpPost.setEntity(entity)
      httpPost.setHeader("Content-type", "application/json")
      httpClient.execute(httpPost)
    } catch {
      case e: Exception =>
        logger.error(s"Error sending slack message $message", e)
    } finally {
      httpClient.getConnectionManager.shutdown()
    }
  }
} 
Example 32
Source File: Mailer.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.notification

import javax.mail._
import java.util.{ Date, Properties }
import javax.mail.internet.{ InternetAddress, MimeMessage }
import com.typesafe.scalalogging.LazyLogging

class Mailer(smtpServer: String,
             port: Long = 25,
             username: String,
             password: Option[String],
             use_smtp_tls: Boolean)
    extends LazyLogging {
  private val props = new Properties()
  props.put("mail.smtp.host", smtpServer)
  props.put("mail.smtp.port", port.toString)
  if (use_smtp_tls) {
    props.put("mail.smtp.starttls.enable", "true")
  }

  def sendMail(addresses: List[String], subject: String, body: String): Unit = {
    val session = password.fold {
      logger.info("no password supplied, skipping authentication")
      props.put("mail.smtp.auth", "false")
      Session.getInstance(props)
    } { password =>
      logger.info("authenticating with password")
      val auth = new Authenticator {
        override def getPasswordAuthentication = new PasswordAuthentication(username, password)
      }
      props.put("mail.smtp.auth", "true")
      Session.getInstance(props, auth)
    }

    val message: Message = new MimeMessage(session)
    message.setFrom(new InternetAddress(username))
    message.setSentDate(new Date())
    addresses.foreach { a =>
      message.addRecipients(Message.RecipientType.TO,
                            InternetAddress.parse(a).asInstanceOf[Array[Address]])
    }
    message.setSubject(subject)
    message.setText(body)
    Transport.send(message)
  }
} 
Example 33
Source File: Logging.scala    From gatling-amqp   with MIT License 5 votes vote down vote up
package io.gatling.amqp.infra

import akka.actor._
import pl.project13.scala.rainbow._
import com.typesafe.scalalogging.LazyLogging

trait Logging extends Actor with LazyLogging {
  protected lazy val className = getClass.getSimpleName
  protected val log = logger // gap between LazyLogging and ActorLogging
  protected def stopMessage: String = ""

  override def preStart(): Unit = {
    super.preStart()
    log.info(s"amqp: Start actor `$className'".yellow)
  }

  override def postStop(): Unit = {
    log.info(s"amqp: Stop actor `$className' $stopMessage".yellow)
    super.postStop()
  }
} 
Example 34
Source File: ClickhouseSink.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.stream

import akka.Done
import akka.stream.scaladsl.{Flow, Keep, Sink}
import com.crobox.clickhouse.ClickhouseClient
import com.crobox.clickhouse.internal.QuerySettings
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

case class ClickhouseIndexingException(msg: String, cause: Throwable, payload: Seq[String], table: String)
    extends RuntimeException(msg, cause)
case class Insert(table: String, jsonRow: String)

object ClickhouseSink extends LazyLogging {

  def insertSink(config: Config, client: ClickhouseClient, indexerName: Option[String] = None)(
      implicit ec: ExecutionContext,
      settings: QuerySettings = QuerySettings()
  ): Sink[Insert, Future[Done]] = {
    val indexerGeneralConfig = config.getConfig("crobox.clickhouse.indexer")
    val mergedIndexerConfig = indexerName
      .flatMap(
        theIndexName =>
          if (indexerGeneralConfig.hasPath(theIndexName))
            Some(indexerGeneralConfig.getConfig(theIndexName).withFallback(indexerGeneralConfig))
          else None
      )
      .getOrElse(indexerGeneralConfig)
    Flow[Insert]
      .groupBy(Int.MaxValue, _.table)
      .groupedWithin(mergedIndexerConfig.getInt("batch-size"),
                     mergedIndexerConfig.getDuration("flush-interval").getSeconds.seconds)
      .mapAsyncUnordered(mergedIndexerConfig.getInt("concurrent-requests"))(inserts => {
        val table       = inserts.head.table
        val insertQuery = s"INSERT INTO $table FORMAT JSONEachRow"
        val payload     = inserts.map(_.jsonRow)
        val payloadSql  = payload.mkString("\n")
        client.execute(insertQuery, payloadSql) recover {
          case ex =>
            throw ClickhouseIndexingException("failed to index", ex, payload, table)
        } map (_ => inserts)
      })
      .mergeSubstreams
      .toMat(Sink.ignore)(Keep.right)
  }
} 
Example 35
Source File: ClusterConnectionFlow.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing.discovery.cluster

import akka.actor.{ActorSystem, Cancellable}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.settings.ConnectionPoolSettings
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import com.crobox.clickhouse.balancing.discovery.ConnectionManagerActor.Connections
import com.crobox.clickhouse.internal.QuerySettings.ReadQueries
import com.crobox.clickhouse.internal.{ClickhouseHostBuilder, ClickhouseQueryBuilder, ClickhouseResponseParser, QuerySettings}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

private[clickhouse] object ClusterConnectionFlow
    extends ClickhouseQueryBuilder
    with ClickhouseResponseParser
    with LazyLogging {

  def clusterConnectionsFlow(
      targetHost: => Future[Uri],
      scanningInterval: FiniteDuration,
      cluster: String
  )(implicit system: ActorSystem,
    materializer: Materializer,
    ec: ExecutionContext): Source[Connections, Cancellable] = {
    val http                   = Http(system)
    val settings = ConnectionPoolSettings(system)
      .withMaxConnections(1)
      .withMinConnections(1)
      .withMaxOpenRequests(2)
      .withMaxRetries(3)
      .withUpdatedConnectionSettings(
        _.withIdleTimeout(scanningInterval.plus(1.second))
      )
    Source
      .tick(0.millis, scanningInterval, {})
      .mapAsync(1)(_ => targetHost)
      .mapAsync(1)(host => {
        val query = s"SELECT host_address FROM system.clusters WHERE cluster='$cluster'"
        val request =
          toRequest(host, query, None, QuerySettings(readOnly = ReadQueries, idempotent = Some(true)), None)(
            system.settings.config
          )
        processClickhouseResponse(http.singleRequest(request, settings = settings), query, host, None)
          .map(splitResponse)
          .map(_.toSet.filter(_.nonEmpty))
          .map(result => {
            if (result.isEmpty) {
              throw new IllegalArgumentException(
                s"Could not determine clickhouse cluster hosts for cluster $cluster and host $host. " +
                s"This could indicate that you are trying to use the cluster balancer to connect to a non cluster based clickhouse server. " +
                s"Please use the `SingleHostQueryBalancer` in that case."
              )
            }
            Connections(result.map(ClickhouseHostBuilder.toHost(_, Some(8123))))
          })
      })
  }
} 
Example 36
Source File: HostBalancer.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing

import akka.actor.ActorSystem
import akka.http.scaladsl.model._
import akka.stream.Materializer
import com.crobox.clickhouse.balancing.Connection.{BalancingHosts, ClusterAware, ConnectionType, SingleHost}
import com.crobox.clickhouse.balancing.discovery.ConnectionManagerActor
import com.crobox.clickhouse.balancing.discovery.health.ClickhouseHostHealth
import com.crobox.clickhouse.internal.ClickhouseHostBuilder
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging

import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

trait HostBalancer extends LazyLogging {

  def nextHost: Future[Uri]

}

object HostBalancer extends ClickhouseHostBuilder {

  def apply(
      optionalConfig: Option[Config] = None
  )(implicit system: ActorSystem, materializer: Materializer, ec: ExecutionContext): HostBalancer = {
    val config = optionalConfig.getOrElse(system.settings.config)
    val connectionConfig = config.getConfig("connection")
    val connectionType           = ConnectionType(connectionConfig.getString("type"))
    val connectionHostFromConfig = extractHost(connectionConfig)
    connectionType match {
      case SingleHost => SingleHostBalancer(connectionHostFromConfig)
      case BalancingHosts =>
        val manager = system.actorOf(
          ConnectionManagerActor
            .props(ClickhouseHostHealth.healthFlow(_))
        )
        MultiHostBalancer(connectionConfig
                            .getConfigList("hosts")
                            .asScala
                            .toSet
                            .map((config: Config) => extractHost(config)),
                          manager)
      case ClusterAware =>
        val manager = system.actorOf(
          ConnectionManagerActor.props(ClickhouseHostHealth.healthFlow(_))
        )
        ClusterAwareHostBalancer(
          connectionHostFromConfig,
          connectionConfig.getString("cluster"),
          manager,
          connectionConfig.getDuration("scanning-interval").getSeconds.seconds
        )(system,
          config.getDuration("host-retrieval-timeout").getSeconds.seconds,
          ec,
          materializer)
    }
  }

  def extractHost(connectionConfig: Config): Uri =
    toHost(connectionConfig.getString("host"),
           if (connectionConfig.hasPath("port")) Option(connectionConfig.getInt("port")) else None)
} 
Example 37
Source File: ClickhouseQueryBuilder.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.internal

import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.headers.{HttpEncodingRange, RawHeader}
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, RequestEntity, Uri}
import com.crobox.clickhouse.internal.QuerySettings.ReadQueries
import com.crobox.clickhouse.internal.progress.ProgressHeadersAsEventsStage
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging

import scala.collection.immutable

private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging {

  private val Headers = {
    import HttpEncodingRange.apply
    import akka.http.scaladsl.model.headers.HttpEncodings.{deflate, gzip}
    import akka.http.scaladsl.model.headers.`Accept-Encoding`
    immutable.Seq(`Accept-Encoding`(gzip, deflate))
  }
  private val MaxUriSize = 16 * 1024

  protected def toRequest(uri: Uri,
                          query: String,
                          queryIdentifier: Option[String],
                          settings: QuerySettings,
                          entity: Option[RequestEntity])(config: Config): HttpRequest = {
    val urlQuery = uri.withQuery(Query(Query("query" -> query) ++ settings.withFallback(config).asQueryParams: _*))
    entity match {
      case Some(e) =>
        logger.debug(s"Executing clickhouse query [$query] on host [${uri
          .toString()}] with entity payload of length ${e.contentLengthOption}")
        HttpRequest(
          method = HttpMethods.POST,
          uri = urlQuery,
          entity = e,
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
      case None
          if settings.idempotent.contains(true) && settings.readOnly == ReadQueries && urlQuery
            .toString()
            .getBytes
            .length < MaxUriSize => //max url size
        logger.debug(s"Executing clickhouse idempotent query [$query] on host [${uri.toString()}]")
        HttpRequest(
          method = HttpMethods.GET,
          uri = urlQuery.withQuery(
            urlQuery
              .query()
              .filterNot(
                _._1 == "readonly"
              ) //get requests are readonly by default, if we send the readonly flag clickhouse will fail the request
          ),
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
      case None =>
        logger.debug(s"Executing clickhouse query [$query] on host [${uri.toString()}]")
        HttpRequest(
          method = HttpMethods.POST,
          uri = uri.withQuery(settings.withFallback(config).asQueryParams),
          entity = query,
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
    }
  }

} 
Example 38
Source File: QueryProgress.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.internal.progress
import akka.NotUsed
import akka.stream.scaladsl.{BroadcastHub, Keep, RunnableGraph, Source, SourceQueueWithComplete}
import akka.stream.{ActorAttributes, OverflowStrategy, Supervision}
import com.typesafe.scalalogging.LazyLogging
import spray.json._
import spray.json.DefaultJsonProtocol._
import scala.util.{Failure, Success, Try}

object QueryProgress extends LazyLogging {

  sealed trait QueryProgress
  case object QueryAccepted                                 extends QueryProgress
  case object QueryFinished                                 extends QueryProgress
  case object QueryRejected                                 extends QueryProgress
  case class QueryFailed(cause: Throwable)                  extends QueryProgress
  case class QueryRetry(cause: Throwable, retryNumber: Int) extends QueryProgress

  case class ClickhouseQueryProgress(identifier: String, progress: QueryProgress)
  case class Progress(rowsRead: Long, bytesRead: Long, rowsWritten: Long, bytesWritten: Long, totalRows: Long) extends QueryProgress

  def queryProgressStream: RunnableGraph[(SourceQueueWithComplete[String], Source[ClickhouseQueryProgress, NotUsed])] =
    Source
      .queue[String](1000, OverflowStrategy.dropHead)
      .map[Option[ClickhouseQueryProgress]](queryAndProgress => {
        queryAndProgress.split("\n", 2).toList match {
          case queryId :: ProgressHeadersAsEventsStage.AcceptedMark :: Nil =>
            Some(ClickhouseQueryProgress(queryId, QueryAccepted))
          case queryId :: progressJson :: Nil =>
            Try {
              progressJson.parseJson match {
                case JsObject(fields) if fields.size == 3 =>
                  ClickhouseQueryProgress(
                    queryId,
                    Progress(
                        fields("read_rows").convertTo[String].toLong,
                        fields("read_bytes").convertTo[String].toLong,
                        0,
                        0,
                        fields("total_rows").convertTo[String].toLong
                    )
                  )
                case JsObject(fields) if fields.size == 5 =>
                  ClickhouseQueryProgress(
                    queryId,
                    Progress(
                      fields("read_rows").convertTo[String].toLong,
                      fields("read_bytes").convertTo[String].toLong,
                      fields("written_rows").convertTo[String].toLong,
                      fields("written_bytes").convertTo[String].toLong,
                      fields("total_rows_to_read").convertTo[String].toLong
                    )
                  )
                case _ => throw new IllegalArgumentException(s"Cannot extract progress from $progressJson")
              }
            } match {
              case Success(value) => Some(value)
              case Failure(exception) =>
                logger.warn(s"Failed to parse json $progressJson", exception)
                None
            }
          case other @ _ =>
            logger.warn(s"Could not get progress from $other")
            None

        }
      })
      .collect {
        case Some(progress) => progress
      }
      .withAttributes(ActorAttributes.supervisionStrategy({
        case ex @ _ =>
          logger.warn("Detected failure in the query progress stream, resuming operation.", ex)
          Supervision.Resume
      }))
      .toMat(BroadcastHub.sink)(Keep.both)
} 
Example 39
Source File: RokkuS3Proxy.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy

import akka.Done
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
import com.ing.wbaa.rokku.proxy.api.{ AdminService, HealthService, PostRequestActions, ProxyServiceWithListAllBuckets }
import com.ing.wbaa.rokku.proxy.config.HttpSettings
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }

trait RokkuS3Proxy extends LazyLogging with ProxyServiceWithListAllBuckets with PostRequestActions with HealthService with AdminService {

  protected[this] implicit def system: ActorSystem
  implicit val materializer: ActorMaterializer = ActorMaterializer()(system)

  protected[this] def httpSettings: HttpSettings

  protected[this] implicit val executionContext: ExecutionContext = system.dispatcher

  // The routes we serve.
  final val allRoutes = adminRoute ~ healthRoute ~ proxyServiceRoute

  // Details about the server binding.
  lazy val startup: Future[Http.ServerBinding] =
    Http(system).bindAndHandle(allRoutes, httpSettings.httpBind, httpSettings.httpPort)
      .andThen {
        case Success(binding) => logger.info(s"Proxy service started listening: ${binding.localAddress}")
        case Failure(reason)  => logger.error("Proxy service failed to start.", reason)
      }

  def shutdown(): Future[Done] = {
    startup.flatMap(_.unbind)
      .andThen {
        case Success(_)      => logger.info("Proxy service stopped.")
        case Failure(reason) => logger.error("Proxy service failed to stop.", reason)
      }
  }
} 
Example 40
Source File: HealthService.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.api

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{ Route, StandardRoute }
import com.ing.wbaa.rokku.proxy.data.HealthCheck.{ RGWListBuckets, S3ListBucket }
import com.ing.wbaa.rokku.proxy.handler.radosgw.RadosGatewayHandler
import com.ing.wbaa.rokku.proxy.provider.aws.S3Client
import com.typesafe.scalalogging.LazyLogging
import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success, Try }

object HealthService {
  private def timestamp: Long = System.currentTimeMillis()

  private val statusMap = new ConcurrentHashMap[Long, StandardRoute]()

  private def clearStatus(): Unit = statusMap.clear()

  private def addStatus(probeResult: StandardRoute): StandardRoute = statusMap.put(timestamp, probeResult)

  private def getCurrentStatusMap: Future[mutable.Map[Long, StandardRoute]] = Future.successful(statusMap.asScala)

  private def getRouteStatus(implicit ec: ExecutionContext): Future[Option[StandardRoute]] = getCurrentStatusMap.map(_.headOption.map { case (_, r) => r })

}

trait HealthService extends RadosGatewayHandler with S3Client with LazyLogging {

  protected[this] implicit def executionContext: ExecutionContext

  import HealthService.{ addStatus, getCurrentStatusMap, clearStatus, getRouteStatus, timestamp }

  private lazy val interval = storageS3Settings.hcInterval

  private def updateStatus: Future[StandardRoute] = Future {
    clearStatus()
    storageS3Settings.hcMethod match {
      case RGWListBuckets => addStatus(execProbe(listAllBuckets _))
      case S3ListBucket   => addStatus(execProbe(listBucket _))
    }
  }
  private def updateStatusAndGet: Future[Option[StandardRoute]] =
    for {
      _ <- updateStatus
      s <- getRouteStatus
    } yield s

  def getStatus(currentTime: Long): Future[Option[StandardRoute]] =
    getCurrentStatusMap.flatMap(_ match {
      case m if m.isEmpty =>
        logger.debug("Status cache empty, running probe")
        updateStatusAndGet
      case m => m.keys.map {
        case entryTime if (entryTime + interval) < currentTime =>
          logger.debug("Status entry expired, renewing")
          updateStatusAndGet
        case _ =>
          logger.debug("Serving status from cache")
          Future.successful(m.map { case (_, r) => r }.headOption)
      }.head
    })

  private def execProbe[A](p: () => A): StandardRoute =
    Try {
      p()
    } match {
      case Success(_)  => complete("pong")
      case Failure(ex) => complete(StatusCodes.InternalServerError -> s"storage not available - $ex")
    }

  final val healthRoute: Route =
    path("ping") {
      get {
        onComplete(getStatus(timestamp)) {
          case Success(opt) => opt.getOrElse(complete(StatusCodes.InternalServerError -> "Failed to read status cache"))
          case Failure(e)   => complete(StatusCodes.InternalServerError -> "Failed to read status cache " + e.getMessage)
        }
      }
    }
} 
Example 41
Source File: S3Request.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.data

import akka.http.scaladsl.model.RemoteAddress.Unknown
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.{ HttpMethod, MediaType, MediaTypes, RemoteAddress }
import com.ing.wbaa.rokku.proxy.util.S3Utils
import com.typesafe.scalalogging.LazyLogging


case class S3Request(
    credential: AwsRequestCredential,
    s3BucketPath: Option[String],
    s3Object: Option[String],
    accessType: AccessType,
    clientIPAddress: RemoteAddress = Unknown,
    headerIPs: HeaderIPs = HeaderIPs(),
    mediaType: MediaType = MediaTypes.`text/plain`
) {
  def userIps: UserIps = UserIps(clientIPAddress, headerIPs)
}

object S3Request extends LazyLogging {

  def apply(credential: AwsRequestCredential, path: Path, httpMethod: HttpMethod,
      clientIPAddress: RemoteAddress, headerIPs: HeaderIPs, mediaType: MediaType): S3Request = {

    val pathString = path.toString()
    val s3path = S3Utils.getS3PathWithoutBucketName(pathString)
    val s3Object = S3Utils.getS3FullObjectPath(pathString)

    val accessType = httpMethod.value match {
      case "GET"    => Read(httpMethod.value)
      case "HEAD"   => Head(httpMethod.value)
      case "PUT"    => Put(httpMethod.value)
      case "POST"   => Post(httpMethod.value)
      case "DELETE" => Delete(httpMethod.value)
      case _ =>
        logger.debug("HttpMethod not supported")
        NoAccess
    }

    S3Request(credential, s3path, s3Object, accessType, clientIPAddress, headerIPs, mediaType)
  }
} 
Example 42
Source File: HttpRequestRecorder.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.persistence

import akka.http.scaladsl.model.{ HttpRequest, RemoteAddress }
import akka.persistence.{ PersistentActor, RecoveryCompleted, SaveSnapshotFailure, SaveSnapshotSuccess, SnapshotOffer }
import com.ing.wbaa.rokku.proxy.data.User
import com.ing.wbaa.rokku.proxy.persistence.HttpRequestRecorder.{ ExecutedRequestCmd, LatestRequests, LatestRequestsResult, Shutdown }
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging

sealed trait Evt
case class ExecutedRequestEvt(httpRequest: HttpRequest, userSTS: User, clientIPAddress: RemoteAddress) extends Evt

object HttpRequestRecorder {
  case class ExecutedRequestCmd(httpRequest: HttpRequest, userSTS: User, clientIPAddress: RemoteAddress)
  case class LatestRequests(amount: Int)
  case class LatestRequestsResult(requests: List[ExecutedRequestEvt])
  case object Shutdown
}

case class CurrentRequestsState(requests: List[ExecutedRequestEvt] = Nil) {
  def add(e: ExecutedRequestEvt): CurrentRequestsState = {
    if (size > 200) { copy(requests.reverse.drop(100)) }
    copy(e :: requests)
  }
  def getRequests(n: Int = 100): List[ExecutedRequestEvt] = this.requests.reverse.take(n)
  def size: Int = requests.size
}

class HttpRequestRecorder extends PersistentActor with LazyLogging {
  var state: CurrentRequestsState = CurrentRequestsState()
  val snapShotInterval = ConfigFactory.load().getInt("rokku.requestPersistence.snapshotInterval")

  private def updateState(e: ExecutedRequestEvt) = state = state.add(e)

  override def persistenceId: String = ConfigFactory.load().getString("rokku.requestPersistence.persistenceId")

  override def receiveRecover: Receive = {
    case e: ExecutedRequestEvt => {
      logger.debug("No snapshot, replying event sequence {}", lastSequenceNr)
      updateState(e)
    }
    case SnapshotOffer(metadata, snapshot: CurrentRequestsState) => {
      logger.debug("Received snapshot offer, timestamp: {} for persistenceId: {} ", metadata.timestamp, metadata.persistenceId)
      state = snapshot
    }
    case RecoveryCompleted => logger.debug("Actor State recovery completed!")
  }

  override def receiveCommand: Receive = {
    case SaveSnapshotSuccess(metadata)  => logger.debug("Snapshot saved successfully, seq: {}", metadata.sequenceNr)

    case SaveSnapshotFailure(_, reason) => logger.error("Failed to save snapshot, reason: {}", reason.getMessage)

    case rc: ExecutedRequestCmd =>
      persist(ExecutedRequestEvt(rc.httpRequest, rc.userSTS, rc.clientIPAddress)) { e =>
        logger.debug("Received event for event sourcing {} from user: {}", e.httpRequest.uri, e.userSTS.userName)
        updateState(e)
        if (lastSequenceNr % snapShotInterval == 0 && lastSequenceNr != 0)
          saveSnapshot(state)
      }

    case get: LatestRequests => sender() ! LatestRequestsResult(state.getRequests(get.amount))

    case Shutdown            => context.stop(self)

    case _                   => logger.debug(s"{} Got unsupported message type", HttpRequestRecorder.getClass.getName)
  }

} 
Example 43
Source File: CustomV2Signer.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider.aws

import com.amazonaws.SignableRequest
import com.amazonaws.auth._
import com.amazonaws.services.s3.Headers
import com.amazonaws.services.s3.internal.RestUtils
import com.typesafe.scalalogging.LazyLogging

// we need custom class to override date of request. in V2 there is no direct method like in v4
sealed class CustomV2Signer(httpVerb: String, resourcePath: String, additionalQueryParamsToSign: java.util.Set[String] = null)
  extends AbstractAWSSigner with LazyLogging {

  override def addSessionCredentials(request: SignableRequest[_], credentials: AWSSessionCredentials): Unit =
    request.addHeader("x-amz-security-token", credentials.getSessionToken)

  override def sign(request: SignableRequest[_], credentials: AWSCredentials): Unit = {

    if (resourcePath == null) throw new UnsupportedOperationException("Cannot sign a request using a dummy S3Signer instance with " + "no resource path")
    if (credentials == null || credentials.getAWSSecretKey == null) {
      logger.debug("Canonical string will not be signed, as no AWS Secret Key was provided")
      return
    }
    val sanitizedCredentials = sanitizeCredentials(credentials)
    if (sanitizedCredentials.isInstanceOf[AWSSessionCredentials]) addSessionCredentials(request, sanitizedCredentials.asInstanceOf[AWSSessionCredentials])

    // since we need to append resourcePath with queryParams we skip encodedResourcePath from original class
    // instead of generating new date here, we copy date from original request to avoid drift
    request.addHeader(Headers.DATE, request.getHeaders.get("Date"))

    val canonicalString = RestUtils.makeS3CanonicalString(httpVerb, resourcePath, request, null, additionalQueryParamsToSign)
    logger.debug("Calculated string to sign:\n\"" + canonicalString + "\"")

    val signature = super.signAndBase64Encode(canonicalString, sanitizedCredentials.getAWSSecretKey, SigningAlgorithm.HmacSHA1)

    request.addHeader("Authorization", "AWS " + sanitizedCredentials.getAWSAccessKeyId + ":" + signature)
  }
} 
Example 44
Source File: KerberosLoginProvider.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import java.io.File

import com.ing.wbaa.rokku.proxy.config.KerberosSettings
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.security.UserGroupInformation

import scala.util.{ Failure, Success, Try }

trait KerberosLoginProvider extends LazyLogging {

  protected[this] def kerberosSettings: KerberosSettings

  loginUserFromKeytab(kerberosSettings.keytab, kerberosSettings.principal)

  private def loginUserFromKeytab(keytab: String, principal: String): Unit = {

    if (StringUtils.isNotBlank(keytab) && StringUtils.isNotBlank(principal)) {
      if (!new File(keytab).exists()) {
        logger.info("keytab file does not exist {}", keytab)
      } else {
        Try {
          UserGroupInformation.setConfiguration(new Configuration())
          UserGroupInformation.loginUserFromKeytab(principal, keytab)
        } match {
          case Success(_)         => logger.info("kerberos credentials provided {}", UserGroupInformation.getLoginUser)
          case Failure(exception) => logger.error("kerberos login error {}", exception)
        }
      }
    } else {
      logger.info("kerberos credentials are not provided")
    }
  }

} 
Example 45
Source File: AbstractIpCidrMatcher.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.ranger.plugin.conditionevaluator

import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.net.util.SubnetUtils
import org.apache.ranger.plugin.conditionevaluator.RangerAbstractConditionEvaluator
import org.apache.ranger.plugin.policyengine.RangerAccessRequest

import scala.util.{ Failure, Success, Try }


  override def isMatched(request: RangerAccessRequest): Boolean = {
    val addresses = request.getRemoteIPAddress +: request.getForwardedAddresses.asScala.toList
    logger.debug(s"Checking whether IpAddresses (${addresses}) match any CIDR range")

    if (_allowAny) {
      logger.debug("Always matches! (allowAny flag is true)")
      true
    } else addresses.foldLeft(zero) { (a, b) =>
      combine(a, isRemoteAddressInCidrRange(b))
    }
  }

  protected val zero: Boolean
  protected def combine(a: Boolean, b: Boolean): Boolean

  private def isRemoteAddressInCidrRange(remoteIpAddress: String): Boolean = {
    val remoteIpInCidr = cidrs
      .exists { cidr =>
        val inRange = cidr.isInRange(remoteIpAddress)
        if (inRange) logger.debug(s"RemoteIpAddress $remoteIpAddress matches CIDR range ${cidr.getCidrSignature}")
        inRange
      }

    if (!remoteIpInCidr) logger.warn(s"RemoteIpAddress {} doesn't match any CIDR range", remoteIpAddress)
    remoteIpInCidr
  }
} 
Example 46
Source File: ImapExpungeScenario.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.scenario

import java.util.Calendar

import com.linagora.gatling.imap.PreDef._
import com.typesafe.scalalogging.LazyLogging
import io.gatling.core.Predef._
import io.gatling.core.feeder.FeederBuilder
import io.gatling.core.scenario.Simulation
import io.gatling.core.session.{Expression, _}
import io.gatling.core.structure.ScenarioBuilder

import scala.collection.immutable.Seq
import scala.concurrent.duration._
import scala.util.Random

object ImapExpungeScenario extends Simulation with LazyLogging {
  private val numberOfMailInInbox = Integer.getInteger("numberOfMailInInbox", 1000).intValue()
  private val percentageOfMailToExpunge = Integer.getInteger("percentageOfMailToExpunge", 20).toFloat
  private val maxDurationInMinutes = Integer.getInteger("maxDuration", 15).toFloat minutes

  logger.trace(s"numberOfMailInInbox $numberOfMailInInbox")
  logger.trace(s"percentageOfMailToExpunge $percentageOfMailToExpunge")

  private def getRandomDeleted(): Boolean = Random.nextFloat() < (percentageOfMailToExpunge/ 100.0)

  def flagsWithRandomDeletion: Expression[Session] = (session: Session) => {
    session.set("flags",
      if (getRandomDeleted())
        Some(Seq("\\Flagged", "\\Deleted"))
      else
        Some(Seq("\\Flagged"))
    )
  }

  private val populateMailbox = exec(imap("append").append("INBOX", "${flags}", Option.empty[Calendar],
    """From: [email protected]
      |To: [email protected]
      |Subject: test subject
      |
      |Test content
      |abcdefghijklmnopqrstuvwxyz
      |0123456789""".stripMargin).check(ok))

  private val populateInbox = repeat(numberOfMailInInbox)(exec(flagsWithRandomDeletion).pause(5 millisecond).exec(populateMailbox))

  def apply(feeder: FeederBuilder): ScenarioBuilder = scenario("Imap")
    .feed(feeder)
    .pause(1 second)
    .exec(imap("Connect").connect()).exitHereIfFailed
    .exec(imap("login").login("${username}", "${password}").check(ok))
    .exec(imap("select").select("INBOX").check(ok))
    .exec(populateInbox)
    .exec(imap("expunge").expunge().check(ok))

} 
Example 47
Source File: ArtifactSourceBackedMustacheResolver.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.project.common.template

import java.io.{Reader, StringReader}

import com.atomist.source.ArtifactSource
import com.github.mustachejava.resolver.DefaultResolver
import com.typesafe.scalalogging.LazyLogging

class ArtifactSourceBackedMustacheResolver(artifactSource: ArtifactSource)
  extends DefaultResolver
    with LazyLogging{

  override def getReader(resourceName: String): Reader = {
    logger.debug(s"Need to return Reader for $resourceName")
    artifactSource.findFile(resourceName) match {
      case Some(f) => new StringReader(f.content)
      case _ => new StringReader(resourceName)
    }
  }
} 
Example 48
Source File: AbstractInMemAntlrGrammar.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.tree.content.text.grammar.antlr

import com.atomist.rug.RugRuntimeException
import com.typesafe.scalalogging.LazyLogging
import org.antlr.v4.Tool
import org.antlr.v4.tool.{ANTLRMessage, ANTLRToolListener}
import org.snt.inmemantlr.GenericParser
import org.snt.inmemantlr.tool.ToolCustomizer
import org.stringtemplate.v4.ST

import scala.collection.mutable.ListBuffer

case class ParserSetup(
                        grammars: Seq[String],
                        parser: GenericParser,
                        production: String
                      )


  protected def setup: ParserSetup

  private def compileGrammar(parser: GenericParser): Unit = {
    try {
      parser.compile()
    }
    catch {
      case t: Throwable =>
        logger.warn(s"Encountered Antlr exception ${t.getMessage}", t)
    }
    finally {
      if (errorStore.hasErrors)
        throw new RugRuntimeException(null, errorStore.toMessage, null)
    }
  }

  protected val config: ParserSetup = setup

  logger.debug(s"Compiling grammar-----\n$config\n-----")
  compileGrammar(config.parser)

  override def customize(tool: Tool): Unit = {
    errorStore.setTool(tool)
    tool.addListener(errorStore)
  }
}

@FunctionalInterface
trait ToolListenerCreator {

  def createListener(tool: Tool): ANTLRToolListener
}

class ErrorStoringToolListener extends ANTLRToolListener {

  private var tool: Tool = _

  private val _errors = new ListBuffer[String]

  def setTool(t: Tool): Unit = {
    this.tool = t
  }

  def errors: Seq[String] = _errors

  private def toSingleLineIfNecessary(msg: String) =
    if (tool.errMgr.formatWantsSingleLineMessage)
      msg.replace('\n', ' ')
    else msg

  override def info(msg: String) {
    val toShow = toSingleLineIfNecessary(msg)
  }

  override def error(msg: ANTLRMessage) {
    val msgST: ST = tool.errMgr.getMessageTemplate(msg)
    val outputMsg: String = msgST.render
    _errors.append(toSingleLineIfNecessary(outputMsg))
  }

  override def warning(msg: ANTLRMessage) {
    val msgST: ST = tool.errMgr.getMessageTemplate(msg)
    val outputMsg: String = msgST.render
  }

  def hasErrors: Boolean = _errors.nonEmpty

  def toMessage: String = {
    errors.mkString("\n")
  }
} 
Example 49
Source File: LinkedJsonGraphDeserializer.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.tree.marshal

import com.atomist.graph.GraphNode
import com.atomist.rug.ts.Cardinality
import com.atomist.tree.{SimpleTerminalTreeNode, TreeNode}
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
import com.typesafe.scalalogging.LazyLogging


  def fromJson(json: String): GraphNode = {
    val l = toListOfMaps(json)
    nodeify(l)
  }

  private def toListOfMaps(json: String): List[Map[String, Object]] = {
    mapper.readValue(json, classOf[List[Map[String, Object]]])
  }

  private def nodeify(l: List[Map[String, Object]]): GraphNode = {
    // Pass 1: Get all nodes individually and put them in a map
    var idToNode: Map[String, LinkableContainerTreeNode] = Map()
    val nodes: Seq[LinkableContainerTreeNode] =
      for {
        m <- l
        if !m.contains(StartNodeId)
      } yield {
        val nodeType: String = m.get(Type) match {
          case Some(l: Seq[_]) => l.last.toString
          case None => throw new IllegalArgumentException(s"Type is required")
          case _ => ???
        }
        val nodeTags: Set[String] = m.get(Type) match {
          case Some(l: Seq[_]) => l.map(_.toString).toSet
          case None => throw new IllegalArgumentException(s"Type is required")
          case _ => ???
        }
        val nodeName = nodeType
        val simpleFields =
          for {
            k <- m.keys
            if !SpecialProperties.contains(k)
          } yield {
            val nodeValue = m.get(k) match {
              case Some(s: String) => s
              case Some(ns) => ns.toString
              case None => null
            }
            SimpleTerminalTreeNode(k, nodeValue)
          }
        val ctn = new LinkableContainerTreeNode(nodeName, nodeTags + TreeNode.Dynamic, simpleFields.toSeq)
        val nodeId: String = requiredStringEntry(m, NodeId)
        idToNode += (nodeId -> ctn)
        ctn
      }

    // Create the linkages
    for {
      m <- l
      if m.contains(StartNodeId)
    } {
      val startNodeId: String = requiredStringEntry(m, StartNodeId)
      val endNodeId: String = requiredStringEntry(m, EndNodeId)
      val cardinality: Cardinality =
        Cardinality(defaultedStringEntry(m, CardinalityStr, Cardinality.One2One))
      val link: String = requiredStringEntry(m, Type)
      logger.debug(s"Creating link from $startNodeId to $endNodeId")
      idToNode.get(startNodeId) match {
        case Some(parent) => parent.link(
          idToNode.getOrElse(endNodeId,
            throw new IllegalArgumentException(s"Cannot link to end node $endNodeId: not found")),
          link,
          cardinality)
        case None =>
          throw new IllegalArgumentException(s"Cannot link to start node $startNodeId: not found")
      }
    }

    if (nodes.nonEmpty) nodes.head else new EmptyContainerGraphNode
  }

  private def requiredStringEntry(m: Map[String,Any], key: String): String =
    m.get(key) match {
      case None => throw new IllegalArgumentException(s"Property [$key] was required, but not found in map with keys [${m.keySet.mkString(",")}]")
      case Some(s: String) => s
      case Some(x) => x.toString
    }

  private def defaultedStringEntry(m: Map[String,Any], key: String, default: String): String =
    m.get(key) match {
      case None => default
      case Some(s: String) => s
      case Some(x) => x.toString
    }
} 
Example 50
Source File: ApplicationPropertiesToApplicationYamlEditor.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java.spring

import _root_.java.util

import com.atomist.param.{Parameter, ParameterValues, Tag}
import com.atomist.project.edit._
import com.atomist.rug.kind.java.ExtractApplicationProperties
import com.atomist.rug.kind.java.support.JavaAssertions
import com.atomist.source.{ArtifactSource, FileArtifact, StringFileArtifact}
import com.atomist.tree.content.project.{ConfigValue, Configuration}
import com.atomist.util.yaml.{MapToYamlStringSerializer, PropertiesToMapStructureParser}
import com.typesafe.scalalogging.LazyLogging


object ApplicationPropertiesToApplicationYamlEditor
  extends ProjectEditor
    with LazyLogging {

  import JavaAssertions.ApplicationPropertiesFilePath

  val ApplicationYamlPath = "src/main/resources/application.yml"

  private val configExtractor = new ExtractApplicationProperties(source = ApplicationPropertiesFilePath)

  override def modify(as: ArtifactSource, pmi: ParameterValues): ModificationAttempt = {
    as.findFile(ApplicationPropertiesFilePath).map(f => {
      val config = configExtractor(f)
      val applicationYaml: FileArtifact = StringFileArtifact(ApplicationYamlPath, toYamlString(config))
      val result = as + applicationYaml - ApplicationPropertiesFilePath
      SuccessfulModification(result)
    }).getOrElse(FailedModificationAttempt(s"Did not find application.properties file at $ApplicationPropertiesFilePath in ${as.id}"))
  }

  override def applicability(as: ArtifactSource): Applicability =
    Applicability(JavaAssertions.isSpring(as) && JavaAssertions.hasApplicationProperties(as), "Checked Spring and application.properties")

  override def description: String = "Atomist Core Editor: Convert application.properties to application.yml (application.properties->application.yml)"

  override def name: String = "ApplicationProperties2Yaml"

  override def tags: Seq[Tag] = Seq(
    Tag("spring", "Spring Framework"), Tag("spring-boot", "Spring Boot")
  )

  override def parameters: Seq[Parameter] = Seq()

  def toYamlString(cvs: Configuration): String = {
    logger.debug(s"Parsing configuration $cvs to YAML")

    val yamlMap = new util.HashMap[String, Object]()

    cvs.configurationValues foreach ((configurationValue: ConfigValue) => {
      PropertiesToMapStructureParser.populateYamlForPeriodScopedProperty(configurationValue.name, configurationValue.value, yamlMap)
    })

    MapToYamlStringSerializer.toYamlString(yamlMap)
  }
} 
Example 51
Source File: AddClassAnnotationEditor.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java

import com.atomist.param.ParameterValues
import com.atomist.source.ArtifactSource
import com.atomist.util.lang.{JavaConstants, JavaHelpers, JavaParserUtils}
import com.github.javaparser.JavaParser
import com.github.javaparser.ast.CompilationUnit
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration
import com.typesafe.scalalogging.LazyLogging

import scala.collection.JavaConverters._


// TODO this supports only marker annotations: Need superclass to generalize
class AddClassAnnotationEditor(selector: ClassOrInterfaceDeclaration => Boolean,
                               annotationPackageName: Option[String],
                               annotationName: String,
                               javaSourcePath: String = JavaConstants.DefaultBaseSourcePath)
  extends JavaParserProjectEditor("AddClassAnnotation", javaSourcePath)
    with LazyLogging {

  private def annotationFqn: String =
    if (annotationPackageName.isEmpty)
      annotationName
    else
      annotationPackageName.get + "." + annotationName

  // Could pull into superclass, using Parser
  override def meetsPostcondition(as: ArtifactSource): Boolean = {
    val annotatedFiles = as.files
      .filter(JavaHelpers.isJavaSourceArtifact)
      .exists(f => {
        val cu = JavaParser.parse(f.inputStream())
        cu.getTypes.asScala.exists(_.getAnnotations.asScala.exists(_.getNameAsString.equals(annotationName)))
      })
    annotatedFiles
  }

  override protected def maybeModifyCompilationUnit(cu: CompilationUnit, poa: ParameterValues): Option[CompilationUnit] = {
    val modifiedTypes: Traversable[ClassOrInterfaceDeclaration] = cu.getTypes.asScala. collect {
      case coit: ClassOrInterfaceDeclaration if selector(coit) && JavaTypeType.annotationAddedTo(coit, annotationName) =>
        coit
    }

    if (modifiedTypes.nonEmpty) {
      if (annotationPackageName.isDefined)
        JavaParserUtils.addImportsIfNeeded(Seq(annotationFqn), cu)

      Some(cu)
    } else
      None
  }

  override def description: String = s"Add @$annotationFqn annotation to class"
} 
Example 52
Source File: GitHubJavaParserExtractor.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java

import java.io.InputStreamReader
import java.util.{List => JList}

import com.atomist.source.{ArtifactSourceException, FileArtifact}
import com.github.javaparser.ast.CompilationUnit
import com.github.javaparser.{JavaParser, ParseException}
import com.typesafe.scalalogging.LazyLogging

case class FileAndCompilationUnit(file: FileArtifact, compilationUnit: CompilationUnit)

import com.atomist.util.Utils.withCloseable

import scala.collection.JavaConverters._


object GitHubJavaParserExtractor extends Function[JList[FileArtifact], Seq[FileAndCompilationUnit]] with LazyLogging {

  override def apply(javaFiles: JList[FileArtifact]): Seq[FileAndCompilationUnit] = {
    javaFiles.asScala.map(f => {
      logger.debug(s"Looking at Java artifact $f using $this")
      withCloseable(f.inputStream())(is =>
        withCloseable(new InputStreamReader(is))(reader => {
          try {
            FileAndCompilationUnit(f, JavaParser.parse(reader))
          } catch {
            case pex: ParseException =>
              throw new ArtifactSourceException(s"Parsing error in ${f.path},content was\n${f.content}", pex)
          }
        })
      )
    })
  }
} 
Example 53
Source File: JavaSourceType.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java

import com.atomist.graph.GraphNode
import com.atomist.rug.kind.core.{FileArtifactBackedMutableView, ProjectMutableView}
import com.atomist.rug.spi._
import com.typesafe.scalalogging.LazyLogging


class JavaSourceType
  extends Type
    with ReflectivelyTypedType
    with LazyLogging {

  override def description = "Java source file"

  override def runtimeClass: Class[JavaSourceMutableView] = classOf[JavaSourceMutableView]

  override def findAllIn(context: GraphNode): Option[Seq[MutableView[_]]] = context match {
      case pv: ProjectMutableView =>
        Some(JavaProjectMutableView(pv).javaSourceViews)
      case fmv: FileArtifactBackedMutableView if fmv.path.endsWith(JavaSourceType.JavaExtension)=>
        val jpv = JavaProjectMutableView(fmv.parent)
        Some(Seq(new JavaSourceMutableView(fmv.currentBackingObject, jpv)))
      case _ => None
    }
}

object JavaSourceType {

  val JavaExtension = ".java"

  val FieldAlias = "JavaField"
} 
Example 54
Source File: ServiceLoaderBackedExtensionProvider.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.util

import java.util.ServiceLoader

import com.atomist.rug.RugRuntimeException
import com.typesafe.scalalogging.LazyLogging

import scala.collection.JavaConverters._
import scala.reflect.{ClassTag, _}


class ServiceLoaderBackedExtensionProvider[T: ClassTag](val keyProvider: T => String)
  extends LazyLogging {

  // The following can be cached as it creates issues in shared class loader hierarchies
  def providerMap: Map[String, T] = {
    logger.debug(s"Loading providers of type ${classTag[T].runtimeClass.getName} and class loader ${Thread.currentThread().getContextClassLoader}")
    ServiceLoader.load(classTag[T].runtimeClass).asScala.map {
      case t: T =>
        val key = keyProvider.apply(t)
        logger.debug(s"Registered provider '$key' with class '${t.getClass}'")
        key -> t
      case wtf =>
        throw new RugRuntimeException("Extension", s"Provider class ${wtf.getClass} must implement ${classTag[T].runtimeClass.getName} interface", null)
    }.toMap
  }
} 
Example 55
Source File: RugCompilerTest.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug

import com.atomist.source._
import com.typesafe.scalalogging.LazyLogging

object RugCompilerTest extends LazyLogging {

  val JavaAndText: ArtifactSource = new SimpleFileBasedArtifactSource("name",
    Seq(
      StringFileArtifact("pom.xml", "<maven></maven"),
      StringFileArtifact("message.txt", "// I'm talkin' about ethics"),
      StringFileArtifact("/src/main/java/Dog.java",
        """class Dog {}""".stripMargin)
    )
  )

  def show(as: ArtifactSource): Unit = {
    as.allFiles.foreach(f => {
      logger.debug(f.path + "\n" + f.content + "\n\n")
    })
  }
} 
Example 56
Source File: PomUsageTest.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.pom

import com.atomist.project.edit.SuccessfulModification
import com.atomist.rug.kind.java.JavaTypeUsageTest
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FlatSpec, Matchers}

class PomUsageTest extends FlatSpec with Matchers with LazyLogging {

  import com.atomist.rug.TestUtils._

  it should "update an existing property" in {
    val ed = editorInSideFile(this, "UpdateProperty.ts")
    ed.modify(JavaTypeUsageTest.NewSpringBootProject) match {
      case _: SuccessfulModification =>
      case _ => ???
    }
  }

  it should "add a new dependency" in {
    val ed = editorInSideFile(this, "AddOrReplaceDependency.ts")
    ed.modify(JavaTypeUsageTest.NewSpringBootProject) match {
      case success: SuccessfulModification =>
      case _ => ???
    }
  }

} 
Example 57
Source File: ApplicationPropertiesToApplicationYamlEditorTest.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.java.spring

import com.atomist.param.SimpleParameterValues
import com.atomist.parse.java.spring.ApplicationYamlAssertions
import com.atomist.project.edit.SuccessfulModification
import com.atomist.rug.kind.java.ExtractApplicationProperties
import com.atomist.rug.kind.java.support.JavaAssertions
import com.atomist.source.{ArtifactSource, ArtifactSourceUtils, EmptyArtifactSource, StringFileArtifact}
import com.atomist.tree.content.project.Configuration
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FlatSpec, Matchers}

class ApplicationPropertiesToApplicationYamlEditorTest extends FlatSpec with Matchers with LazyLogging {

  val eap = new ExtractApplicationProperties(JavaAssertions.ApplicationPropertiesFilePath)

  val SpringDocsSampleFile = StringFileArtifact(JavaAssertions.ApplicationPropertiesFilePath,
    """
      |spring.application.name=cruncher
      |spring.datasource.driverClassName=com.mysql.jdbc.Driver
      |spring.datasource.url=jdbc:mysql://localhost/test
      |server.port=9000
    """.stripMargin
  )

  val SpringDocsOutputYamlFile = StringFileArtifact(ApplicationYamlAssertions.ApplicationYamlFilePath,
    """spring:
      |  application:
      |    name: cruncher
      |  datasource:
      |    driverClassName: com.mysql.jdbc.Driver
      |    url: jdbc:mysql://localhost/test
      |server:
      |  port: 9000
      |""".stripMargin
  )

  val SpringDocsSampleArtifactSource = EmptyArtifactSource("") + SpringDocsSampleFile

  "ApplicationPropertiesToApplicationYamlEditor" should "not be applicable to empty ArtifactSource" in {
    assert(ApplicationPropertiesToApplicationYamlEditor.applicability(new EmptyArtifactSource("")).canApply === false)
  }

  it should "transform Spring docs sample" in testAgainst(SpringDocsSampleArtifactSource)

  it should "delete application.properties" in {
    val result = testAgainst(SpringDocsSampleArtifactSource)
    result.findFile(JavaAssertions.ApplicationPropertiesFilePath) should not be defined
  }

  it should "construct an application.yml" in {
    val result = testAgainst(SpringDocsSampleArtifactSource)
    result.findFile(ApplicationYamlAssertions.ApplicationYamlFilePath) should be(defined)
  }

  it should "construct an application.yaml with the correct contents" in {
    val result = testAgainst(SpringDocsSampleArtifactSource)
    val aYaml = result.findFile(ApplicationPropertiesToApplicationYamlEditor.ApplicationYamlPath)
    aYaml.get.content shouldBe SpringDocsOutputYamlFile.content
  }

  private  def testAgainst(as: ArtifactSource): ArtifactSource = {
    // Read config first for comparison
    // Wouldn't normally call get without checking, but if it fails the test that's fine
    val config = eap(as.findFile(JavaAssertions.ApplicationPropertiesFilePath).get)

    val mr = ApplicationPropertiesToApplicationYamlEditor.modify(as, SimpleParameterValues.Empty)
    mr match {
      case sma: SuccessfulModification =>
        val aYaml = sma.result.findFile(ApplicationPropertiesToApplicationYamlEditor.ApplicationYamlPath)
        aYaml should be(defined)
        // TODO assertions about ayml
        logger.debug(aYaml.get.content)
        validateYamlRepresentationOfConfiguration(aYaml.get.content, config)
        sma.result
      case whatInGodsHolyNameAreYouBlatheringAbout =>
        logger.debug(ArtifactSourceUtils.prettyListFiles(as))
        fail(s"Unexpected modification result $whatInGodsHolyNameAreYouBlatheringAbout")
    }
  }

  private  def validateYamlRepresentationOfConfiguration(yamlString: String, config: Configuration): Unit = {
    logger.debug(s"Config length = ${config.configurationValues.size}, yaml=[$yamlString]")
    // yamlString should not equal ""
    // compare to expected yaml
  }
} 
Example 58
Source File: PropertiesUsageTest.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.kind.properties

import com.atomist.param.SimpleParameterValues
import com.atomist.project.edit.SuccessfulModification
import com.atomist.rug.TestUtils
import com.atomist.rug.kind.java.JavaTypeUsageTest
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FlatSpec, Matchers}

class PropertiesUsageTest extends FlatSpec with Matchers with LazyLogging {

  lazy val ed = TestUtils.editorInSideFile(this, "SetProperty.ts")

  it should "update an existing property" in {
    ed.modify(JavaTypeUsageTest.NewSpringBootProject,
      SimpleParameterValues("value", "server.port")) match {
      case success: SuccessfulModification =>
      case _ => ???
    }
  }

  it should "create a new property" in {
    ed.modify(JavaTypeUsageTest.NewSpringBootProject,
      SimpleParameterValues("value", "server.portlet")) match {
      case success: SuccessfulModification =>
      case _ => ???
    }
  }

} 
Example 59
Source File: ServiceLoaderCompilerRegistryTest.scala    From rug   with GNU General Public License v3.0 5 votes vote down vote up
package com.atomist.rug.compiler

import com.atomist.rug.ts.TypeScriptBuilder
import com.atomist.source.{SimpleFileBasedArtifactSource, StringFileArtifact}
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{FlatSpec, Matchers}

class ServiceLoaderCompilerRegistryTest extends FlatSpec with Matchers with LazyLogging {

  it should "compile a simple editor" in {
    val as = SimpleFileBasedArtifactSource(StringFileArtifact(".atomist/Thing.ts",
      """
        |import {Editor} from '@atomist/rug/operations/Decorators'
        |import {Project} from '@atomist/rug/model/Core'
        |
        |@Editor("Nothing special")
        |class TestEditor  {
        |    edit(p: Project) {
        |       // Do nothing
        |    }
        |}
      """.stripMargin))

    val compiled = TypeScriptBuilder.compileWithModel(as)
    for (f <- compiled.allFiles) {
      logger.debug(f.path)
    }
  }
} 
Example 60
Source File: GenCodecServer.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web.guide.demos.rpc

import com.typesafe.scalalogging.LazyLogging
import io.udash.web.guide.demos.rpc.GenCodecServerRPC.{DemoCaseClass, DemoClass, Fruit}

import scala.concurrent.Future

class GenCodecServer extends GenCodecServerRPC with LazyLogging {
  import io.udash.web.Implicits._

  override def sendInt(el: Int): Future[Int] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendDouble(el: Double): Future[Double] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendSealedTrait(el: Fruit): Future[Fruit] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendString(el: String): Future[String] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendMap(el: Map[String, Int]): Future[Map[String, Int]] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendClass(el: DemoClass): Future[DemoClass] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendSeq(el: Seq[String]): Future[Seq[String]] = Future {
    logger.debug(el.toString)
    el
  }

  override def sendCaseClass(el: DemoCaseClass): Future[DemoCaseClass] = Future {
    logger.debug(el.toString)
    el
  }
} 
Example 61
Source File: Launcher.scala    From udash-core   with Apache License 2.0 5 votes vote down vote up
package io.udash.web

import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging
import io.udash.web.server.ApplicationServer

object Launcher extends LazyLogging {
  def main(args: Array[String]): Unit = {
    val startTime = System.nanoTime

    createApplicationServer().start()
    
    val duration: Long = (System.nanoTime - startTime) / 1000000000
    logger.info(s"Udash Homepage & Dev's Guide started in ${duration}s.")
  }


  private[udash] def createApplicationServer(): ApplicationServer = {
    val serverConfig = ConfigFactory.load().getConfig("ui.server")
    new ApplicationServer(
      port = serverConfig.getInt("port"),
      homepageResourceBase = serverConfig.getString("homepageResourceBase"),
      guideResourceBase = serverConfig.getString("guideResourceBase")
    )
  }
} 
Example 62
Source File: CommandRunner.scala    From ScalaStan   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.scalastan

import java.io.File

import com.typesafe.scalalogging.LazyLogging

trait CommandRunner extends LazyLogging {
  def runCommand(dir: File, command: Seq[String]): Int = {
    val pb = new ProcessBuilder(command: _*).directory(dir).redirectErrorStream(true)
    val process = pb.start()
    io.Source.fromInputStream(process.getInputStream).getLines.foreach { line =>
      logger.info(line)
    }
    process.waitFor()
  }
} 
Example 63
Source File: JsonSerde.scala    From kafka-streams-scala   with Apache License 2.0 5 votes vote down vote up
package com.github.aseigneurin.kafka.serialization.scala

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.typesafe.scalalogging.LazyLogging

import scala.reflect.{ClassTag, classTag}

class JsonSerde[T >: Null : ClassTag] extends BaseSerde[T] with LazyLogging {

  val mapper = new ObjectMapper
  mapper.registerModule(DefaultScalaModule)

  override def deserialize(topic: String, data: Array[Byte]): T = data match {
    case null => null
    case _ =>
      try {
        mapper.readValue(data, classTag[T].runtimeClass.asInstanceOf[Class[T]])
      } catch {
        case e: Exception =>
          val jsonStr = new String(data, "UTF-8")
          logger.warn(s"Failed parsing ${jsonStr}", e)
          null
      }
  }

  override def serialize(topic: String, obj: T): Array[Byte] = {
    mapper.writeValueAsBytes(obj)
  }

} 
Example 64
Source File: DataApi.scala    From scala-for-beginners   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.donutstore.data

import com.allaboutscala.donutstore.common.{Donut, Donuts}
import com.typesafe.scalalogging.LazyLogging

import scala.collection.concurrent.TrieMap
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global


  private val donutDatabase = TrieMap.empty[String, Donut]

  override def createDonut(donut: Donut): Future[String] = Future {
    logger.info(s"Create donut = $donut")
    val donutExists = donutDatabase.putIfAbsent(donut.name, donut)
    donutExists match {
      case Some(d) => s"${d.name} already exists in database."
      case None => s"${donut.name} has been added to the database."
    }
  }

  override def fetchDonuts(): Future[Donuts] = Future {
    logger.info("Fetching all donuts")
    Donuts(donutDatabase.values.toSeq)
  }

  override def updateDonutIngredients(donut: Donut): Future[String] = Future {
    logger.info(s"Updating ingredients = ${donut.ingredients} for donutName = ${donut.name}")
    val someDonut = donutDatabase.get(donut.name)
    someDonut match {
      case Some(d) =>
        donutDatabase.replace(d.name, donut)
        s"Updated donut ingredients for donutName = ${donut.name}"

      case None =>
        s"Donut ${donut.name} does not exist in database. The update operation was not run."
    }
  }

  override def deleteDonut(donutName: String): Future[String] = Future {
    logger.info(s"Deleting donut = $donutName")
    val someDonut = donutDatabase.get(donutName)
    someDonut match {
      case Some(d) =>
        donutDatabase.remove(d.name)
        s"Deleted $d from database."

      case None =>
        s"$donutName does not exist in database. The delete operation was not run."
    }
  }
} 
Example 65
Source File: DonutStoreHttpController.scala    From scala-for-beginners   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.donutstore.httpserver

import akka.http.scaladsl.Http
import com.typesafe.scalalogging.LazyLogging

import scala.io.StdIn
import scala.util.{Failure, Success}



trait DonutStoreHttpController extends LazyLogging {
  this: DonutStoreServices =>

  def startAndBind(): Unit = {
    logger.info("Initializing and binding Akka HTTP server")
    val httpServerFuture = Http().bindAndHandle(donutApiRoutes, cfg.httpServer.ip, cfg.httpServer.port)
    httpServerFuture.onComplete {
      case Success(binding) =>
        logger.info(s"Akka Http Server is bound to ${binding.localAddress}")
        logger.info(s"To stop the server, press the [Enter] key in IntelliJ's console.")

      case Failure(e) =>
        logger.error(s"Akka Http server failed to bind to ${cfg.httpServer.ip}:${cfg.httpServer.port}",e)
        system.terminate()
    }

    // pressing enter key will kill the server
    StdIn.readLine()
    for {
      serverBinding <- httpServerFuture
      _             <- serverBinding.unbind()
      terminated    <- system.terminate()
    } yield logger.info(s"Akka Http server was terminated = $terminated")
  }
} 
Example 66
Source File: DataApi.scala    From scala-for-beginners   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.donutstore.data


import com.allaboutscala.donutstore.common.{Donut, Donuts}
import com.typesafe.scalalogging.LazyLogging

import scala.collection.concurrent.TrieMap
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future


  private val donutDatabase = TrieMap.empty[String, Donut]

  override def createDonut(donut: Donut): Future[String] = Future {
    logger.info(s"Create donut = $donut")
    val donutExists = donutDatabase.putIfAbsent(donut.name, donut)
    donutExists match {
      case Some(d) => s"${d.name} already exists in database."
      case None => s"${donut.name} has been added to the database."
    }
  }

  override def fetchDonuts(): Future[Donuts] = Future {
    logger.info("Fetching all donuts")
    Donuts(donutDatabase.values.toSeq)
  }

  override def updateDonutIngredients(donut: Donut): Future[String] = Future {
    logger.info(s"Updating ingredients = ${donut.ingredients} for donutName = ${donut.name}")
    val someDonut = donutDatabase.get(donut.name)
    someDonut match {
      case Some(d) =>
        donutDatabase.replace(d.name, donut)
        s"Updated donut ingredients for donutName = ${donut.name}"

      case None =>
        s"Donut ${donut.name} does not exist in database. The update operation was not run."
    }
  }

  override def deleteDonut(donutName: String): Future[String] = Future {
    logger.info("Deleting donut = $donutName")
    val someDonut = donutDatabase.get(donutName)
    someDonut match {
      case Some(d) =>
        donutDatabase.remove(d.name)
        s"Deleted ${d.name} from database."

      case None =>
        s"$donutName does not exist in database. The delete operation was not run."
    }
  }
} 
Example 67
Source File: DonutStoreHttpController.scala    From scala-for-beginners   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.donutstore.httpserver


import akka.http.scaladsl.Http
import com.typesafe.scalalogging.LazyLogging

import scala.io.StdIn
import scala.util.{Failure, Success}



trait DonutStoreHttpController extends LazyLogging {
  this: DonutStoreServices =>

  def startAndBind(): Unit = {
    logger.info("Initializing and binding Akka HTTP server")
    val httpServerFuture = Http().bindAndHandle(donutApiRoutes, cfg.httpServer.ip, cfg.httpServer.port)
    httpServerFuture.onComplete {
      case Success(binding) =>
        logger.info(s"Akka Http Server is bound to ${binding.localAddress}")
        logger.info(s"To stop the server, press the [Enter] key in IntelliJ's console.")

      case Failure(e) =>
        logger.error(s"Akka Http server failed to bind to ${cfg.httpServer.ip}:${cfg.httpServer.port}",e)
        system.terminate()
    }

    // pressing enter key will kill the server
    StdIn.readLine()
    for {
      serverBinding <- httpServerFuture
      _             <- serverBinding.unbind()
      terminated    <- system.terminate()
    } yield logger.info(s"Akka Http server was terminated = $terminated")
  }
} 
Example 68
Source File: Decode.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.xdr

import java.io.EOFException
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.time.Instant

import cats.Eval
import cats.data.{IndexedStateT, State}
import com.typesafe.scalalogging.LazyLogging

import scala.util.Try

trait Decode extends LazyLogging {

  private def decode[T](bs: Seq[Byte], len: Int)(decoder: Seq[Byte] => T): (Seq[Byte], T) = {
    if (bs.length < len) throw new EOFException("Insufficient data remains to parse.")
    val t = decoder(bs.take(len))
    logger.trace(s"Dropping {} to make {}", len, t)
    bs.drop(len) -> t
  }

  val int: State[Seq[Byte], Int] = State[Seq[Byte], Int] { bs =>
    decode(bs, 4) { in => ByteBuffer.wrap(in.toArray).getInt }
  }

  val long: State[Seq[Byte], Long] = State[Seq[Byte], Long] { bs =>
    decode(bs, 8) { in => ByteBuffer.wrap(in.toArray).getLong }
  }

  val instant: State[Seq[Byte], Instant] = long.map(Instant.ofEpochSecond)

  val bool: State[Seq[Byte], Boolean] = int.map(_ == 1)

  def bytes(len: Int): State[Seq[Byte], Seq[Byte]] = State[Seq[Byte], Seq[Byte]] { bs =>
    decode(bs, len) { _.take(len) }
  }

  val bytes: State[Seq[Byte], Seq[Byte]] = for {
    len <- int
    bs <- bytes(len)
  } yield bs

  def padded(multipleOf: Int = 4): State[Seq[Byte], Seq[Byte]] = for {
    len <- int
    bs <- bytes(len)
    _ <- bytes((multipleOf - (len % multipleOf)) % multipleOf)
  } yield bs

  val string: State[Seq[Byte], String] = padded().map(_.toArray).map(new String(_, StandardCharsets.UTF_8))

  def switch[T](zero: State[Seq[Byte], T], others: State[Seq[Byte], T]*): IndexedStateT[Eval, Seq[Byte], Seq[Byte], T] = int.flatMap {
    case 0 => zero
    case n =>  Try(others(n - 1)).getOrElse {
      throw new IllegalArgumentException(s"No parser defined for discriminant $n")
    }
  }

  // TODO (jem) - All switches should use this instead and Discriminators should be held in the parent (switcher not switchee).
  def switchInt[T](zero: State[Seq[Byte], T], others: State[Seq[Byte], T]*): State[Seq[Byte], (T, Int)] = int.flatMap {
    case 0 => zero.map(_ -> 0)
    case n => Try(others(n - 1).map(_ -> n)).getOrElse {
      throw new IllegalArgumentException(s"No parser defined for discriminant $n")
    }
  }

  def opt[T](parseT: State[Seq[Byte], T]): State[Seq[Byte], Option[T]] = bool.flatMap {
    case true => parseT.map(Some(_))
    case false => State.pure(None)
  }

  def arr[T](parseT: State[Seq[Byte], T]): State[Seq[Byte], Seq[T]] = int.flatMap(seq(_, parseT))

  // $COVERAGE-OFF$
  // For debugging XDR only.
  def log[T](t: T): State[Seq[Byte], Unit] = State[Seq[Byte], Unit] { bs =>
    logger.debug("{}\n", t)
    bs -> ()
  }
  // $COVERAGE-ON$

  def seq[T](qty: Int, parseT: State[Seq[Byte], T]): State[Seq[Byte], Seq[T]] = {
    (0 until qty).foldLeft(State.pure[Seq[Byte], Seq[T]](Seq.empty[T])) { case (state, _) =>
      for {
        ts <- state
        t <- parseT
      } yield ts :+ t
    }
  }

  def drop[T](parse: State[Seq[Byte], _])(t: T): State[Seq[Byte], T] = for {
    _ <- parse
  } yield t

  def widen[A, W, O <: W](s: State[A, O]): State[A, W] = s.map(w => w: W)
} 
Example 69
Source File: FederationServer.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk

import java.net.HttpURLConnection.HTTP_NOT_FOUND

import com.typesafe.scalalogging.LazyLogging
import okhttp3.{Headers, HttpUrl, OkHttpClient, Request}
import org.json4s.native.{JsonMethods, Serialization}
import org.json4s.{Formats, NoTypeHints}
import stellar.sdk.inet.RestException
import stellar.sdk.model.response.{FederationResponse, FederationResponseDeserialiser}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

case class FederationServer(base: HttpUrl) extends LazyLogging {

  implicit val formats: Formats = Serialization.formats(NoTypeHints) + FederationResponseDeserialiser
  private val client = new OkHttpClient()
  private val headers = Headers.of(
    "X-Client-Name", BuildInfo.name,
    "X-Client-Version", BuildInfo.version)

  def byName(name: String)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    fetchFederationResponse(base.newBuilder()
      .addQueryParameter("q", name)
      .addQueryParameter("type", "name")
      .build(),  _.copy(address = name))

  def byAccount(account: PublicKey)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    fetchFederationResponse(base.newBuilder()
      .addQueryParameter("q", account.accountId)
      .addQueryParameter("type", "id")
      .build(), _.copy(account = account))


  private def fetchFederationResponse(url: HttpUrl, fillIn: FederationResponse => FederationResponse)
                                     (implicit ec: ExecutionContext): Future[Option[FederationResponse]] =
    Future(client.newCall(new Request.Builder().url(url).headers(headers).build()).execute())
      .map { response =>
        response.code() match {
          case HTTP_NOT_FOUND => None
          case e if e >= 500 => throw RestException(response.body().string())
          case _ =>
            Try(response.body().string())
              .map(JsonMethods.parse(_))
              .map(_.extract[FederationResponse])
              .map(fillIn)
              .map(validate) match {
              case Success(fr) => Some(fr)
              case Failure(t) => throw RestException("Could not parse document as FederationResponse.", t)
            }
        }
      }


  private def validate(fr: FederationResponse): FederationResponse = {
    if (fr.account == null) throw RestException(s"Document did not contain account_id")
    if (fr.address == null) throw RestException(s"Document did not contain stellar_address")
    fr
  }
}

object FederationServer {
  def apply(uriString: String): FederationServer = new FederationServer(HttpUrl.parse(uriString))
} 
Example 70
Source File: TransactionLedgerEntriesSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.ledger

import com.typesafe.scalalogging.LazyLogging
import org.specs2.mutable.Specification

import scala.util.{Failure, Try}

class TransactionLedgerEntriesSpec extends Specification with LedgerEntryGenerators with LazyLogging {

  "a ledger entry" should {
    "serde to/from XDR" >> prop { entries: TransactionLedgerEntries =>
      val triedEntries = Try(TransactionLedgerEntries.decode.run(entries.encode).value._2)
      triedEntries match {
        case Failure(_) => logger.error(s"Failed to decode $entries")
        case _ =>
      }
      triedEntries must beSuccessfulTry(entries)
    }
  }

} 
Example 71
Source File: LedgerEntrySpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.ledger

import com.typesafe.scalalogging.LazyLogging
import org.specs2.mutable.Specification

import scala.util.{Failure, Try}

class LedgerEntrySpec extends Specification with LedgerEntryGenerators with LazyLogging {

  "a ledger entry" should {
    "serde to/from XDR" >> prop { entry: LedgerEntry =>
      val triedEntry = Try(LedgerEntry.decode.run(entry.encode).value._2)
      triedEntry match {
        case Failure(_) => logger.error(s"Failed to decode $entry")
        case _ =>
      }
      triedEntry must beSuccessfulTry(entry)
    }
  }

} 
Example 72
Source File: LedgerEntryChangeSpec.scala    From scala-stellar-sdk   with Apache License 2.0 5 votes vote down vote up
package stellar.sdk.model.ledger

import com.typesafe.scalalogging.LazyLogging
import org.specs2.mutable.Specification

import scala.util.{Failure, Try}

class LedgerEntryChangeSpec extends Specification with LedgerEntryGenerators with LazyLogging {

  "a ledger entry change" should {
    "serde to/from XDR" >> prop { change: LedgerEntryChange =>
      val triedChange = Try(LedgerEntryChange.decode.run(change.encode).value._2)
      triedChange match {
        case Failure(_) => logger.error(s"Failed to decode $change")
        case _ =>
      }
      triedChange must beSuccessfulTry(change)
    }
  }

} 
Example 73
Source File: BackpropClassifierTest.scala    From swiftlearner   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package com.danylchuk.swiftlearner.nn.backprop

import com.danylchuk.swiftlearner.MemoryTesting
import com.danylchuk.swiftlearner.data.{FisherIris, Mnist}
import com.typesafe.scalalogging.LazyLogging
import org.specs2.mutable.Specification


class BackpropClassifierTest extends Specification with LazyLogging with MemoryTesting {
  "BackpropClassifier" should {
    "classify the flowers from the Fisher Iris dataset" >> {
      val (trainingSet, testSet) = FisherIris.trainingAndTestDataFloat()

      // Normalize the input values to speed up learning
      def normalize(x: Float): Float = (x - 25) / 25

      val start = System.currentTimeMillis
      val classifier = new BackpropClassifier(trainingSet, 3, 5000, normalize = normalize)

      val accuracy = (for ((species, params) <- testSet) yield {
        classifier.predict(params) == species
      }).count { x: Boolean => x } / testSet.size.toDouble

      val time = (System.currentTimeMillis - start) / 1000.0
      logger.info(s"Fisher Iris backprop time: ${time}s")

      accuracy must be_>(0.8)  // 0.96 is typical
    }

    "classify the handwritten digits from the MNIST dataset" >> {
      val seed = Some(0L)
      val nHidden = 70
      val nRepeat = 1  // increase for better results
      val learnRate = 1.0f
      val expectedAccuracy = 0.87  // 0.95 with nRepeat=20

      val (trainingSet, testSet) = Mnist.trainingAndTestDataFloat()

      // Careful normalization is essential with this simple network.
      def scale(x: Float): Float = x / 256.0f // source byte range to (0; 1)
      val mean = trainingSet.flatMap(_._2).take(10000).map(scale).sum / 10000.0f
      def normalize(x: Float): Float = scale(x) - mean // balance around 0 for stability

      logger.info("creating the classifier")

      val start = System.currentTimeMillis

      val classifier = new BackpropClassifier(trainingSet, nHidden, nRepeat, learnRate, normalize, seed)

      logger.info("checking the accuracy")
      val accuracy = (for ((digit, params) <- testSet) yield {
        classifier.predict(params) == digit
      }).count { x: Boolean => x } / testSet.size.toDouble

      val time = (System.currentTimeMillis - start) / 1000.0
      logger.info(s"MNIST backprop time: ${time}s")

      accuracy must be_>(expectedAccuracy)
    }

    "use memory sparingly in predict" >> skipped {
      val vector = Seq.tabulate(100)(_.toFloat)
      val classifier = new BackpropClassifier(Seq((0, vector)), 10, 1)
      classifier.predict(vector) // allocate the fixed structures and train

      countAllocatedRepeat(10) {
        classifier.predict(vector)
      } must_== 0L  // passes
    }
  }
} 
Example 74
Source File: PerceptronTest.scala    From swiftlearner   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package com.danylchuk.swiftlearner.nn.perceptron

import com.danylchuk.swiftlearner.coll.TraversableOp._
import com.typesafe.scalalogging.LazyLogging
import org.specs2.execute.Result
import org.specs2.mutable.Specification


class PerceptronTest extends Specification with LazyLogging {
  def learnAndTest(examples: Seq[(Vector[Double], Boolean)], times: Int) = {
    val p = new Perceptron(Vector(0.0, 0.0))
    val learned = p.learn(examples.repeat(times))
    Result.foreach(examples) { example =>
      learned.f(example._1) must_== example._2
    }
  }

  "Perceptron" should {
    "calculate the activation function correctly" >> {
      val p = new Perceptron(Vector(2.0, 3.0), -25.0)
      p.f(Vector(4.0, 5.0)) must beFalse
      p.f(Vector(4.0, 6.0)) must beTrue
    }

    "learn the AND function" >> {
      val AndExamples = Seq(
        (Vector[Double](0, 0), false),
        (Vector[Double](0, 1), false),
        (Vector[Double](1, 0), false),
        (Vector[Double](1, 1), true))

      learnAndTest(AndExamples, 10)
    }

    "learn the OR function" >> {
      val OrExamples = Seq(
        (Vector[Double](0, 0), false),
        (Vector[Double](0, 1), true),
        (Vector[Double](1, 0), true),
        (Vector[Double](1, 1), true))

      learnAndTest(OrExamples, 10)
    }
  }
} 
Example 75
Source File: SoftmaxTest.scala    From swiftlearner   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package com.danylchuk.swiftlearner.softmax

import com.danylchuk.swiftlearner.data.{FisherIris, Mnist}
import com.typesafe.scalalogging.LazyLogging
import org.specs2.matcher.DataTables
import org.specs2.mutable.Specification


class SoftmaxTest extends Specification with LazyLogging with DataTables {
  val randomSeed = Some(0L)

  "Softmax" should {
    "operate on primitives" >> {
      val examples = Vector((Array(1.0f, 1.0f), Array(1.0f)),
                            (Array(1.0f, 0.0f), Array(0.0f)),
                            (Array(0.0f, 1.0f), Array(0.0f)),
                            (Array(0.0f, 0.0f), Array(0.0f)))
      val softmax = Softmax.withRandomWeights(2, 1, 0.1f, 10)
      val trained = softmax.learnSeq(examples)

      "elementClass" |
        trained.weights(0).getClass |
        trained.target(0)(0).getClass |
        trained.input(0)(0).getClass |
        trained.lineOut(0)(0).getClass |
        trained.predicted(0)(0).getClass |> { elementClass =>
        elementClass.isPrimitive must beTrue
      }
    }

    "classify the flowers from the Fisher Iris dataset" >> {
      val (trainingSet, testSet) = FisherIris.trainingAndTestDataFloat(randomSeed)

      // Normalize the input values to speed up learning
      def normalize(x: Float): Float = (x - 25.0f) / 25.0f

      val classifier = new SoftmaxClassifier(trainingSet, 1, 0.5f,
        normalize = normalize, randomSeed = randomSeed)

      val accuracy = (for ((species, params) <- testSet) yield {
        val predicted = classifier.predict(params)
        logger.trace(s"Predicted: $predicted; actual: $species")
        predicted == species
      }).count { x: Boolean => x } / testSet.size.toFloat

      accuracy must be_>(0.9f)  // 0.94 with the current settings and seed
    }

    "classify the handwritten digits from the MNIST dataset" >> {
      val nRepeat = 50  // usually reaches a minimum and stops much sooner
      // val learnSpeed = 0.01  // for not normalized stable: 0.01 => 0.855
      val learnSpeed = 0.1f  // for normalized naive
      val batchSize = 1  // 1 works best most of the time
      val useStable = false  // "false" achieves better accuracy for normalized inputs
      val stuckIterationLimit = 300  // Increase to 100000 for better results
      val numberOfExamplesToLoad = 30000  // Increase to 60000 for the full training set
      val expectedAccuracy = 0.7f  // 0.92 with stuckIterationLimit = 100000 and full training set

      val (trainingSet, testSet) = Mnist.shuffledTrainingAndTestDataFloat(numberOfExamplesToLoad, randomSeed = randomSeed)

      val start = System.currentTimeMillis

      def normalize(x: Float): Float = x / 256.0f  // source byte range to (0.0; 1.0)

      logger.info("creating the classifier")
      val classifier = new SoftmaxClassifier(trainingSet, nRepeat,
        learnSpeed, stuckIterationLimit, batchSize, normalize, randomSeed, useStable)

      logger.info("checking the accuracy")
      val accuracyData = for ((digit, params) <- testSet) yield {
        val predicted = classifier.predict(params)
        logger.trace(s"Predicted: $predicted; actual: $digit")
        (predicted == digit, predicted, digit)
      }

      val overallAccuracy = accuracyData.count { x: (Boolean, Int, Int) => x._1 } / testSet.size.toFloat

      val time = (System.currentTimeMillis - start) / 1000.0
      logger.debug(s"MNIST softmax time: ${time}s")

      overallAccuracy must be_>(expectedAccuracy)
    }
  }
} 
Example 76
Source File: EntityConstraints.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.extraction

import com.typesafe.scalalogging.LazyLogging
import org.clulab.odin.Mention
import org.clulab.wm.eidos.utils.TagSet

import scala.annotation.tailrec
import scala.util.matching.Regex


object EntityConstraints extends LazyLogging {

  val COORD_DEPS: Set[Regex] = Set("^conj_".r, "^cc".r)

  // Ensure final token of mention span is valid
  def validFinalTag(mention: Mention, tagSet: TagSet): Boolean =
    mention.tags.isEmpty || tagSet.isValidFinal(mention.tags.get.last)

  // Limit entity mentions to at most n tokens
  def withinMaxLength(mention: Mention, n: Int): Boolean = mention.words.size <= n

  // Check if brackets and braces match
  def matchingBrackets(mention: Mention): Boolean =
    matchingBrackets(mention.words)

  def matchingBrackets(words: Seq[String]): Boolean =
    TagSet.BRACKETS.forall(pair => matchingBrackets(words, pair._1, pair._2))

  // Each of the brackets is on a different "channel" so that ([)] is valid.
  // Otherwise, a stack of outstanding unmatched brackets is required.
  def matchingBrackets(words: Seq[String], opening: String, closing: String): Boolean = {


    @tailrec
    def matchingBrackets(index: Int, extraOpening: Int): Boolean = {
      if (extraOpening < 0)
        false // too many closing without opening
      else if (index >= words.length)
        extraOpening == 0 // if it is just right
      else if (words(index) == opening)
        matchingBrackets(index + 1, extraOpening + 1)
      else if (words(index) == closing)
        matchingBrackets(index + 1, extraOpening - 1)
      else
        matchingBrackets(index + 1, extraOpening)
    }


    matchingBrackets(0, 0)
  }

  // Decide if the sentence element is a conjunction using just the POS tag
  def isConjunction(i: Int, mention: Mention, tagSet: TagSet): Boolean =
      if (i > 0 && tagSet.isAnyAdjective(mention.sentenceObj.tags.get(i - 1))) false
      else tagSet.isCoordinating(mention.sentenceObj.tags.get(i))
} 
Example 77
Source File: AbstractAkkaConnection.scala    From scredis   with Apache License 2.0 5 votes vote down vote up
package scredis.io

import java.util.concurrent.{CountDownLatch, TimeUnit}

import akka.actor._
import com.typesafe.scalalogging.LazyLogging
import scredis.protocol.Request
import scredis.protocol.requests.ConnectionRequests.{Auth, Quit, Select}
import scredis.protocol.requests.ServerRequests.{ClientSetName, Shutdown}

import scala.concurrent.duration._

abstract class AbstractAkkaConnection(
  protected val system: ActorSystem,
  val host: String,
  val port: Int,
  @volatile protected var passwordOpt: Option[String],
  @volatile protected var database: Int,
  @volatile protected var nameOpt: Option[String],
  protected val decodersCount: Int,
  protected val receiveTimeoutOpt: Option[FiniteDuration],
  protected val connectTimeout: FiniteDuration,
  protected val maxWriteBatchSize: Int,
  protected val tcpSendBufferSizeHint: Int,
  protected val tcpReceiveBufferSizeHint: Int,
  protected val akkaListenerDispatcherPath: String,
  protected val akkaIODispatcherPath: String,
  protected val akkaDecoderDispatcherPath: String
) extends Connection with LazyLogging {
  
  private val shutdownLatch = new CountDownLatch(1)
  
  @volatile protected var isShuttingDown = false
  
  override implicit val dispatcher = system.dispatcher
  
  protected val listenerActor: ActorRef
  
  protected def updateState(request: Request[_]): Unit = request match {
    case Auth(password) => if (password.isEmpty) {
      passwordOpt = None
    } else {
      passwordOpt = Some(password)
    }
    case Select(db) => database = db
    case ClientSetName(name) => if (name.isEmpty) {
      nameOpt = None
    } else {
      nameOpt = Some(name)
    }
    case Quit() | Shutdown(_) => isShuttingDown = true
    case _            =>
  }
  
  protected def getPasswordOpt: Option[String] = passwordOpt
  protected def getDatabase: Int = database
  protected def getNameOpt: Option[String] = nameOpt
  
  protected def watchTermination(): Unit = system.actorOf(
    Props(
      classOf[WatchActor],
      listenerActor,
      shutdownLatch
    )
  )
  
  
  def awaitTermination(timeout: Duration = Duration.Inf): Unit = {
    if (timeout.isFinite) {
      shutdownLatch.await(timeout.toMillis, TimeUnit.MILLISECONDS)
    } else {
      shutdownLatch.await()
    }
  }
  
}

class WatchActor(actor: ActorRef, shutdownLatch: CountDownLatch) extends Actor {
  def receive: Receive = {
    case Terminated(_) => {
      shutdownLatch.countDown()
      context.stop(self)
    }
  }
  context.watch(actor)
} 
Example 78
Source File: PubSubGrpcClientBase.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.client.grpc

import com.google.api.services.pubsub.PubsubScopes
import com.google.auth.oauth2.GoogleCredentials
import com.google.pubsub.v1.{PublisherGrpc, SubscriberGrpc}
import com.qubit.pubsub.client.PubSubApiConfig
import com.typesafe.scalalogging.LazyLogging
import io.grpc._
import io.grpc.auth.MoreCallCredentials
import io.grpc.netty.{NegotiationType, NettyChannelBuilder}

trait PubSubGrpcClientBase extends LazyLogging {
  def apiConfig: PubSubApiConfig

  lazy private val grpcChannel = createGrpcChannel

  protected def createPublisherStub =
    PublisherGrpc
      .newFutureStub(grpcChannel)
      .withCallCredentials(callCredentials)

  protected def createSubscriberStub =
    SubscriberGrpc
      .newFutureStub(grpcChannel)
      .withCallCredentials(callCredentials)

  private def createGrpcChannel: Channel = {
    logger.info("Creating gRPC channel for: [{}]", apiConfig)
    NettyChannelBuilder
      .forAddress(apiConfig.apiHost, apiConfig.apiPort)
      .negotiationType(if (apiConfig.tlsEnabled) NegotiationType.TLS
      else NegotiationType.PLAINTEXT)
      .build()
  }

  private def callCredentials: CallCredentials = {
    val credentials =
      GoogleCredentials.getApplicationDefault.createScoped(PubsubScopes.all())
    MoreCallCredentials.from(credentials)
  }
}

object PubSubGrpcClientBase {

  sealed trait PubSubRequest extends Product with Serializable {
    def project: String
  }

  final case class ListTopics(override val project: String)
      extends PubSubRequest

} 
Example 79
Source File: RetryPolicyDefaults.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub.client.retry

import java.util.concurrent.Executors

import com.gilt.gfc.concurrent.ThreadFactoryBuilder
import com.typesafe.scalalogging.LazyLogging
import io.grpc.{Status, StatusRuntimeException}

import scala.concurrent.ExecutionContext
import scala.util.Failure

object RetryPolicyDefaults extends LazyLogging {
  import atmos.dsl._
  import Slf4jSupport._

  import scala.concurrent.duration._

  private val unrecoverableErrorCodes = Set(Status.Code.PERMISSION_DENIED,
                                            Status.Code.UNAUTHENTICATED,
                                            Status.Code.INVALID_ARGUMENT)
  private val rateLimitingErrorCodes =
    Set(Status.Code.RESOURCE_EXHAUSTED, Status.Code.UNAVAILABLE)

  val retryPolicy = retryFor {
    10.attempts
  } using selectedBackoff {
    case Failure(sre: StatusRuntimeException)
        if rateLimitingErrorCodes.contains(sre.getStatus.getCode) =>
      linearBackoff { 50.seconds }
    case _ =>
      exponentialBackoff { 30.seconds } randomized 10.second -> 100.seconds
  } monitorWith {
    logger.underlying
  } onError {
    case sre: StatusRuntimeException
        if unrecoverableErrorCodes.contains(sre.getStatus.getCode) =>
      stopRetrying
  }

  val retryExecCtx = ExecutionContext.fromExecutor(
    Executors.newFixedThreadPool(
      10,
      ThreadFactoryBuilder("retry-pool", "retry-worker").build()
    ))
} 
Example 80
Source File: Retry.scala    From akka-cloudpubsub   with Apache License 2.0 5 votes vote down vote up
package com.qubit.pubsub

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.{FiniteDuration, _}
import scala.util.{Random, Try}

object Retry extends LazyLogging {

  def apply[T](operationName: String,
               operation: () => Try[T],
               maxRetries: Int,
               retryDelay: FiniteDuration,
               minJitterSecs: Int,
               maxJitterSecs: Int): Try[T] = {
    var successful: Boolean = false
    var attemptNum: Int = 1
    var attemptResult: Try[T] = operation()

    while ((!successful) && (attemptNum <= maxRetries)) {
      if (attemptResult.isFailure) {
        logger.warn(s"Attempt failed for operation [$operationName]",
                    attemptResult.failed.get)
        val retryWaitTime = (retryDelay * attemptNum) + (Random.nextInt(
            maxJitterSecs - minJitterSecs) + minJitterSecs).seconds
        attemptNum += 1

        logger.info(
          s"Retrying [$operationName] in ${retryWaitTime.toSeconds} seconds (attempt $attemptNum)")
        Try(Thread.sleep(retryWaitTime.toMillis))
        attemptResult = operation()
      } else {
        successful = true
      }
    }
    attemptResult
  }

} 
Example 81
Source File: Post.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.observer

import com.typesafe.scalalogging.LazyLogging

import scala.collection.mutable.ListBuffer

case class Post(user: User, text: String) extends Observable[Post] {
  
  val comments = ListBuffer[Comment]()
  
  def addComment(comment: Comment): Unit = {
    comments.+=:(comment)
    notifyObservers()
  }
}

case class Comment(user: User, text: String)

case class User(name: String) extends Observer[Post] {
  override def handleUpdate(subject: Post): Unit = {
    System.out.println(s"Hey, I'm ${name}. The post got some new comments: ${subject.comments}")
  }
}

object PostExample extends LazyLogging {
  def main(args: Array[String]): Unit = {
    val userIvan = User("Ivan")
    val userMaria = User("Maria")
    val userJohn = User("John")
    
    logger.info("Create a post")
    val post = Post(userIvan, "This is a post about the observer design pattern")
    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "I hope you like the post!"))

    logger.info("John and Maria subscribe to the comments.")
    post.addObserver(userJohn)
    post.addObserver(userMaria)

    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "Why are you so quiet? Do you like it?"))
    logger.info("Add a comment")
    post.addComment(Comment(userMaria, "It is amazing! Thanks!"))
  }
} 
Example 82
Source File: Post.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.observer

import com.typesafe.scalalogging.LazyLogging

import scala.collection.mutable.ListBuffer

case class Post(user: User, text: String) extends Observable[Post] {
  
  val comments = ListBuffer[Comment]()
  
  def addComment(comment: Comment): Unit = {
    comments.+=:(comment)
    notifyObservers()
  }
}

case class Comment(user: User, text: String)

case class User(name: String) extends Observer[Post] {
  override def handleUpdate(subject: Post): Unit = {
    System.out.println(s"Hey, I'm ${name}. The post got some new comments: ${subject.comments}")
  }
}

object PostExample extends LazyLogging {
  def main(args: Array[String]): Unit = {
    val userIvan = User("Ivan")
    val userMaria = User("Maria")
    val userJohn = User("John")
    
    logger.info("Create a post")
    val post = Post(userIvan, "This is a post about the observer design pattern")
    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "I hope you like the post!"))

    logger.info("John and Maria subscribe to the comments.")
    post.addObserver(userJohn)
    post.addObserver(userMaria)

    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "Why are you so quiet? Do you like it?"))
    logger.info("Add a comment")
    post.addComment(Comment(userMaria, "It is amazing! Thanks!"))
  }
} 
Example 83
Source File: Master.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import java.time.LocalDateTime
import java.util.concurrent.TimeUnit

import akka.actor.{Props, Cancellable, Actor}
import akka.routing.RoundRobinPool
import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done}
import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.Duration
import scala.collection.mutable.ListBuffer
import scala.concurrent.ExecutionContext.Implicits.global

class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging {
  val cancelables = ListBuffer[Cancellable]()
  
  val router = context.actorOf(
    Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)),
    "scheduler-master-worker-router"
  )
  
  override def receive: Receive = {
    case Done(name, command, jobType, success) =>
      if (success) {
        logger.info("Successfully completed {} ({}).", name, command)
      } else {
        logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command)
      }
    case Schedule(configs) => 
      configs.foreach {
        case config =>
          val cancellable = this.context.system.scheduler.schedule(
            config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency),
            config.frequency match {
              case Hourly => Duration.create(1, TimeUnit.HOURS)
              case Daily => Duration.create(1, TimeUnit.DAYS)
            },
            router,
            Work(config.name, config.command, config.jobType)
          )
          cancellable +: cancelables
          logger.info("Scheduled: {}", config)
      }
  }
  
  override def postStop(): Unit = {
    cancelables.foreach(_.cancel())
  }
} 
Example 84
Source File: Worker.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import akka.actor.Actor
import com.ivan.nikolov.scheduler.actors.messages.{Done, Work}
import com.ivan.nikolov.scheduler.config.job.{Sql, Console}
import com.ivan.nikolov.scheduler.dao.DaoService
import com.typesafe.scalalogging.LazyLogging

import sys.process._

class Worker(daoService: DaoService) extends Actor with LazyLogging {
  
  private def doWork(work: Work): Unit = {
    work.jobType match {
      case Console =>
        val result = work.command.! // note - the ! are different methods
        sender ! Done(work.name, work.command, work.jobType, result == 0)
      case Sql =>
        val connection = daoService.getConnection()
        try {
          val statement = connection.prepareStatement(work.command)
          val result: List[String] = daoService.executeSelect(statement) {
            case rs =>
              val metadata = rs.getMetaData
              val numColumns = metadata.getColumnCount
              daoService.readResultSet(rs) {
                case row =>
                  (1 to numColumns).map {
                    case i =>
                      row.getObject(i)
                  }.mkString("\t")
              }
          }
          logger.info("Sql query results: ")
          result.foreach(r => logger.info(r))
          sender ! Done(work.name, work.command, work.jobType, true)
        } finally {
          connection.close()
        }
    }
  }
  
  override def receive: Receive = {
    case w @ Work(name, command, jobType) => doWork(w)
  }
} 
Example 85
Source File: Scheduler.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler

import akka.actor.{Props, ActorSystem}
import com.ivan.nikolov.scheduler.actors.messages.Schedule
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.Await
import scala.concurrent.duration.Duration

object Scheduler extends LazyLogging {
  import com.ivan.nikolov.scheduler.registry.ComponentRegistry._
  def main(args: Array[String]): Unit = {
    logger.info("Running migrations before doing anything else.")
    migrationService.runMigrations()
    logger.info("Migrations done!")
    
    val system = ActorSystem("scheduler")
    
    val master = system.actorOf(
      Props(actorFactory.createMasterActor()),
      "scheduler-master"
    )
    
    sys.addShutdownHook({
      logger.info("Awaiting actor system termination.")
      // not great...
      Await.result(system.terminate(), Duration.Inf)
      logger.info("Actor system terminated. Bye!")
    })
    
    master ! Schedule(jobConfigReaderService.readJobConfigs())
    logger.info("Started! Use CTRL+C to exit.")
  }
} 
Example 86
package com.ivan.nikolov.scheduler.services

import java.io.File

import com.ivan.nikolov.scheduler.config.app.AppConfigComponent
import com.ivan.nikolov.scheduler.config.job.{JobTypeSerializer, JobFrequencySerializer, JobConfig}
import com.ivan.nikolov.scheduler.io.IOServiceComponent
import com.typesafe.scalalogging.LazyLogging
import org.json4s._
import org.json4s.jackson.JsonMethods._

trait JobConfigReaderServiceComponent {
  this: AppConfigComponent
    with IOServiceComponent =>
  
  val jobConfigReaderService: JobConfigReaderService
  
  class JobConfigReaderService() extends LazyLogging {
    private val customSerializers = List(
      JobFrequencySerializer,
      JobTypeSerializer
    )
    implicit val formats = DefaultFormats ++ customSerializers + JobConfig.jobConfigFieldSerializer
    
    def readJobConfigs(): List[JobConfig] =
      ioService.getAllFilesWithExtension(appConfigService.configPath, appConfigService.configExtension).flatMap {
        case path => try {
          val config = parse(FileInput(new File(path))).extract[JobConfig]
          Some(config)
        } catch {
          case ex: Throwable =>
            logger.error("Error reading config: {}", path, ex)
            None
        }
        
      }
  }
} 
Example 87
Source File: Master.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import java.time.LocalDateTime
import java.util.concurrent.TimeUnit

import akka.actor.{Props, Cancellable, Actor}
import akka.routing.RoundRobinPool
import com.ivan.nikolov.scheduler.actors.messages.{Work, Schedule, Done}
import com.ivan.nikolov.scheduler.config.job.{Daily, Hourly}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration.Duration
import scala.collection.mutable.ListBuffer
import scala.concurrent.ExecutionContext.Implicits.global

class Master(numWorkers: Int, actorFactory: ActorFactory) extends Actor with LazyLogging {
  val cancelables = ListBuffer[Cancellable]()
  
  val router = context.actorOf(
    Props(actorFactory.createWorkerActor()).withRouter(RoundRobinPool(numWorkers)),
    "scheduler-master-worker-router"
  )
  
  override def receive: Receive = {
    case Done(name, command, jobType, success) =>
      if (success) {
        logger.info("Successfully completed {} ({}).", name, command)
      } else {
        logger.error("Failure! Command {} ({}) returned a non-zero result code.", name, command)
      }
    case Schedule(configs) => 
      configs.foreach {
        case config =>
          val cancellable = this.context.system.scheduler.schedule(
            config.timeOptions.getInitialDelay(LocalDateTime.now(), config.frequency),
            config.frequency match {
              case Hourly => Duration.create(1, TimeUnit.HOURS)
              case Daily => Duration.create(1, TimeUnit.DAYS)
            },
            router,
            Work(config.name, config.command, config.jobType)
          )
          cancellable +: cancelables
          logger.info("Scheduled: {}", config)
      }
  }
  
  override def postStop(): Unit = {
    cancelables.foreach(_.cancel())
  }
} 
Example 88
Source File: Worker.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler.actors

import akka.actor.Actor
import com.ivan.nikolov.scheduler.actors.messages.{Done, Work}
import com.ivan.nikolov.scheduler.config.job.{Sql, Console}
import com.ivan.nikolov.scheduler.dao.DaoService
import com.typesafe.scalalogging.LazyLogging

import sys.process._

class Worker(daoService: DaoService) extends Actor with LazyLogging {
  
  private def doWork(work: Work): Unit = {
    work.jobType match {
      case Console =>
        val result = work.command.! // note - the ! are different methods
        sender ! Done(work.name, work.command, work.jobType, result == 0)
      case Sql =>
        val connection = daoService.getConnection()
        try {
          val statement = connection.prepareStatement(work.command)
          val result: List[String] = daoService.executeSelect(statement) {
            case rs =>
              val metadata = rs.getMetaData
              val numColumns = metadata.getColumnCount
              daoService.readResultSet(rs) {
                case row =>
                  (1 to numColumns).map {
                    case i =>
                      row.getObject(i)
                  }.mkString("\t")
              }
          }
          logger.info("Sql query results: ")
          result.foreach(r => logger.info(r))
          sender ! Done(work.name, work.command, work.jobType, true)
        } finally {
          connection.close()
        }
    }
  }
  
  override def receive: Receive = {
    case w @ Work(name, command, jobType) => doWork(w)
  }
} 
Example 89
Source File: Scheduler.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.scheduler

import akka.actor.{Props, ActorSystem}
import com.ivan.nikolov.scheduler.actors.messages.Schedule
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.Await
import scala.concurrent.duration.Duration

object Scheduler extends LazyLogging {
  import com.ivan.nikolov.scheduler.registry.ComponentRegistry._
  def main(args: Array[String]): Unit = {
    logger.info("Running migrations before doing anything else.")
    migrationService.runMigrations()
    logger.info("Migrations done!")
    
    val system = ActorSystem("scheduler")
    
    val master = system.actorOf(
      Props(actorFactory.createMasterActor()),
      "scheduler-master"
    )
    
    sys.addShutdownHook({
      logger.info("Awaiting actor system termination.")
      // not great...
      Await.result(system.terminate(), Duration.Inf)
      logger.info("Actor system terminated. Bye!")
    })
    
    master ! Schedule(jobConfigReaderService.readJobConfigs())
    logger.info("Started! Use CTRL+C to exit.")
  }
} 
Example 90
package com.ivan.nikolov.scheduler.services

import java.io.File

import com.ivan.nikolov.scheduler.config.app.AppConfigComponent
import com.ivan.nikolov.scheduler.config.job.{JobTypeSerializer, JobFrequencySerializer, JobConfig}
import com.ivan.nikolov.scheduler.io.IOServiceComponent
import com.typesafe.scalalogging.LazyLogging
import org.json4s._
import org.json4s.jackson.JsonMethods._

trait JobConfigReaderServiceComponent {
  this: AppConfigComponent
    with IOServiceComponent =>
  
  val jobConfigReaderService: JobConfigReaderService
  
  class JobConfigReaderService() extends LazyLogging {
    private val customSerializers = List(
      JobFrequencySerializer,
      JobTypeSerializer
    )
    implicit val formats = DefaultFormats ++ customSerializers + JobConfig.jobConfigFieldSerializer
    
    def readJobConfigs(): List[JobConfig] =
      ioService.getAllFilesWithExtension(appConfigService.configPath, appConfigService.configExtension).flatMap {
        case path => try {
          val config = parse(FileInput(new File(path))).extract[JobConfig]
          Some(config)
        } catch {
          case ex: Throwable =>
            logger.error("Error reading config: {}", path, ex)
            None
        }
        
      }
  }
} 
Example 91
Source File: Post.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.observer

import com.typesafe.scalalogging.LazyLogging

import scala.collection.mutable.ListBuffer

case class Post(user: User, text: String) extends Observable[Post] {
  
  val comments = ListBuffer[Comment]()
  
  def addComment(comment: Comment): Unit = {
    comments.+=:(comment)
    notifyObservers()
  }
}

case class Comment(user: User, text: String)

case class User(name: String) extends Observer[Post] {
  override def handleUpdate(subject: Post): Unit = {
    System.out.println(s"Hey, I'm ${name}. The post got some new comments: ${subject.comments}")
  }
}

object PostExample extends LazyLogging {
  def main(args: Array[String]): Unit = {
    val userIvan = User("Ivan")
    val userMaria = User("Maria")
    val userJohn = User("John")
    
    logger.info("Create a post")
    val post = Post(userIvan, "This is a post about the observer design pattern")
    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "I hope you like the post!"))

    logger.info("John and Maria subscribe to the comments.")
    post.addObserver(userJohn)
    post.addObserver(userMaria)

    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "Why are you so quiet? Do you like it?"))
    logger.info("Add a comment")
    post.addComment(Comment(userMaria, "It is amazing! Thanks!"))
  }
} 
Example 92
Source File: Post.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.observer

import com.typesafe.scalalogging.LazyLogging

import scala.collection.mutable.ListBuffer

case class Post(user: User, text: String) extends Observable[Post] {
  
  val comments = ListBuffer[Comment]()
  
  def addComment(comment: Comment): Unit = {
    comments.+=:(comment)
    notifyObservers()
  }
}

case class Comment(user: User, text: String)

case class User(name: String) extends Observer[Post] {
  override def handleUpdate(subject: Post): Unit = {
    System.out.println(s"Hey, I'm ${name}. The post got some new comments: ${subject.comments}")
  }
}

object PostExample extends LazyLogging {
  def main(args: Array[String]): Unit = {
    val userIvan = User("Ivan")
    val userMaria = User("Maria")
    val userJohn = User("John")
    
    logger.info("Create a post")
    val post = Post(userIvan, "This is a post about the observer design pattern")
    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "I hope you like the post!"))

    logger.info("John and Maria subscribe to the comments.")
    post.addObserver(userJohn)
    post.addObserver(userMaria)

    logger.info("Add a comment")
    post.addComment(Comment(userIvan, "Why are you so quiet? Do you like it?"))
    logger.info("Add a comment")
    post.addComment(Comment(userMaria, "It is amazing! Thanks!"))
  }
} 
Example 93
Source File: DataDownloader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.facade

import com.typesafe.scalalogging.LazyLogging

trait DataDownloader extends LazyLogging {
  
  def download(url: String): Array[Byte] = {
    logger.info("Downloading from: {}", url)
    Thread.sleep(5000)
//    {
//      "name": "Ivan",
//      "age": 26
//    }
//    the string below is the Base64 encoded Json above.
    "ew0KICAgICJuYW1lIjogIkl2YW4iLA0KICAgICJhZ2UiOiAyNg0KfQ==".getBytes
  }
} 
Example 94
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 95
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 96
Source File: DataDownloader.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.facade

import com.typesafe.scalalogging.LazyLogging

trait DataDownloader extends LazyLogging {
  
  def download(url: String): Array[Byte] = {
    logger.info("Downloading from: {}", url)
    Thread.sleep(5000)
//    {
//      "name": "Ivan",
//      "age": 26
//    }
//    the string below is the Base64 encoded Json above.
    "ew0KICAgICJuYW1lIjogIkl2YW4iLA0KICAgICJhZ2UiOiAyNg0KfQ==".getBytes
  }
} 
Example 97
package com.ivan.nikolov.structural.decorator

import java.io.{BufferedInputStream, InputStreamReader, BufferedReader, ByteArrayOutputStream}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

trait CapitalizedInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

trait CompressingInputReaderTrait extends InputReader with LazyLogging {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

trait Base64EncoderInputReaderTrait extends InputReader {
  abstract override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object StackableTraitsExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object StackableTraitsBigExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new AdvancedInputReader(stream) with CapitalizedInputReaderTrait with Base64EncoderInputReaderTrait with CompressingInputReaderTrait
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 98
Source File: InputReaderDecorator.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.structural.decorator

import java.io.{InputStreamReader, BufferedInputStream, ByteArrayOutputStream, BufferedReader}
import java.nio.charset.Charset
import java.util.Base64
import java.util.zip.GZIPOutputStream

import com.ivan.nikolov.structural.decorator.common.{AdvancedInputReader, InputReader}
import com.typesafe.scalalogging.LazyLogging

abstract class InputReaderDecorator(inputReader: InputReader) extends InputReader {
  override def readLines(): Stream[String] = inputReader.readLines()
}

class CapitalizedInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map(_.toUpperCase)
}

class CompressingInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) with LazyLogging {
  override def readLines(): Stream[String] = super.readLines().map {
    case line =>
      val text = line.getBytes(Charset.forName("UTF-8"))
      logger.info("Length before compression: {}", text.length.toString)
      val output = new ByteArrayOutputStream()
      val compressor = new GZIPOutputStream(output)
      try {
        compressor.write(text, 0, text.length)
        val outputByteArray = output.toByteArray
        logger.info("Length after compression: {}", outputByteArray.length.toString)
        new String(outputByteArray, Charset.forName("UTF-8"))
      } finally {
        compressor.close()
        output.close()
      }
  }
}

class Base64EncoderInputReader(inputReader: InputReader) extends InputReaderDecorator(inputReader) {
  override def readLines(): Stream[String] = super.readLines().map {
    case line => Base64.getEncoder.encodeToString(line.getBytes(Charset.forName("UTF-8")))
  }
}

object DecoratorExample {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CapitalizedInputReader(new AdvancedInputReader(stream))
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
}

object DecoratorExampleBig {
  def main(args: Array[String]): Unit = {
    val stream = new BufferedReader(
      new InputStreamReader(
        new BufferedInputStream(this.getClass.getResourceAsStream("data.txt"))
      )
    )
    try {
      val reader = new CompressingInputReader(
        new Base64EncoderInputReader(
          new CapitalizedInputReader(
            new AdvancedInputReader(stream)
          )
        )
      )
      reader.readLines().foreach(println)
    } finally {
      stream.close()
    }
  }
} 
Example 99
Source File: ConnectionPoolActor.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package redis.actor

import akka.actor.{Actor, ActorRef, Props}
import com.avsystem.commons.redis.NodeAddress
import com.avsystem.commons.redis.actor.ConnectionPoolActor._
import com.avsystem.commons.redis.config.{ConnectionConfig, NodeConfig}
import com.avsystem.commons.redis.exception.RedisException
import com.typesafe.scalalogging.LazyLogging

import scala.annotation.tailrec

class ConnectionPoolActor(address: NodeAddress, config: NodeConfig, queue: JDeque[QueuedConn])
  extends Actor with LazyLogging {

  import context._

  private val connections = new MHashSet[ActorRef]

  if (config.maxBlockingIdleTime.isFinite) {
    val interval = config.blockingCleanupInterval
    system.scheduler.scheduleWithFixedDelay(interval, interval, self, Cleanup)
  }

  def receive: Receive = {
    case CreateNewConnection if connections.size < config.maxBlockingPoolSize =>
      logger.info(s"Creating new blocking connection to $address")
      val connConfig: ConnectionConfig = config.blockingConnectionConfigs(connections.size)
      val props = Props(new RedisConnectionActor(address, connConfig))
      val connection = connConfig.actorName.fold(actorOf(props))(actorOf(props, _))
      connections += connection
      connection ! RedisConnectionActor.Open(mustInitiallyConnect = false, Promise[Unit]())
      sender() ! NewConnection(connection)
    case CreateNewConnection =>
      sender() ! Full
    case Cleanup =>
      cleanup(System.nanoTime(), config.maxBlockingIdleTime.toNanos)
    case Close(cause, stopSelf) =>
      connections.foreach(_ ! RedisConnectionActor.Close(cause, stopSelf))
      if (stopSelf) {
        stop(self)
      }
  }

  private def cleanup(nowNanos: Long, maxIdleNanos: Long): Unit = {
    @tailrec def loop(dequeue: Boolean): Unit = {
      val last = if (dequeue) queue.pollLast() else queue.peekLast()
      last match {
        case QueuedConn(conn, enqueuedAt) =>
          val stale = (nowNanos - enqueuedAt) > maxIdleNanos
          if (!dequeue && stale) {
            loop(dequeue = true)
          } else if (dequeue && stale) {
            conn ! RedisConnectionActor.Close(new RedisException("Idle blocking connection closed"), stop = true)
            context.stop(conn)
            connections.remove(conn)
            loop(dequeue = false)
          } else if (dequeue && !stale) {
            // unlikely situation where we dequeued something else than we peeked before
            queue.addLast(last)
          }
        case null =>
      }
    }
    loop(dequeue = false)
  }
}
object ConnectionPoolActor {
  final case class QueuedConn(conn: ActorRef, enqueuedAt: Long)

  object CreateNewConnection
  final case class Close(cause: Throwable, stop: Boolean)
  object Cleanup

  final case class NewConnection(connection: ActorRef)
  object Full
} 
Example 100
Source File: RemoveVersion.scala    From databus-maven-plugin   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.dbpedia.databus

import java.util

import better.files.File
import com.typesafe.scalalogging.LazyLogging
import org.apache.maven.plugin.{AbstractMojo, MojoExecutionException}
import org.apache.maven.plugins.annotations.{LifecyclePhase, Mojo, Parameter}



@Mojo(name = "rm", requiresOnline = true, threadSafe = true)
class RemoveVersion extends Operations {


  @throws[MojoExecutionException]
  override def execute(): Unit = {

    getLog.info("the following version folders will be deleted:")

    if (isParent()) {
      skipmodules.skipmodules = true
      modules.forEach(m => {
        getLog.info(
          s"""${listFiles(m).size} files in ${File(s"$m/$version").toJava.getAbsoluteFile}""".stripMargin)
      })

      getLog.info("proceed? [y/N]")
      val c: String = scala.io.StdIn.readLine()
      if (c.trim.equalsIgnoreCase("y")) {
        getLog.info("deleting")
        modules.forEach(m => {
          //DELETE
          val vdir = File(s"$m/$version")
          vdir.delete(true)
          getLog.info(s"${!vdir.isDirectory} $vdir")
        })


      }else {
        println(s"aborted, read '$c'")
      }


    } else {
      if (!skipmodules.skipmodules) {
        getLog.info(
          s"""##########
             |databus:rm works only on group to delete current version of all artifacts, use:
             |rm -r $artifactId/$version"
           """.stripMargin)
      }
      return
    }
  }

} 
Example 101
Source File: Operations.scala    From databus-maven-plugin   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.dbpedia.databus

import java.util

import better.files.File
import com.typesafe.scalalogging.LazyLogging
import org.apache.maven.plugin.AbstractMojo
import org.apache.maven.plugins.annotations.Parameter

object skipmodules {
  var skipmodules = false
}

abstract class Operations extends AbstractMojo with LazyLogging with Properties {

  @Parameter(
    property = "modules",
    defaultValue = "${project.modules}"
  )
  val modules: util.ArrayList[String] = new util.ArrayList[String]

  def listFiles(): List[File] = {
    val dir = File(s"$version")
    if (dir.isDirectory) {
      dir.list.toList
    } else {
      List[File]()
    }
  }

  def listFiles(artifact: String): List[File] = {
    val dir = File(s"$artifact/$version")
    if (dir.isDirectory) {
      dir.list.toList
    } else {
      List[File]()
    }

  }

} 
Example 102
Source File: PlatformDiscovery.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api.routes.platform.discovery

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.typesafe.scalalogging.LazyLogging
import endpoints.akkahttp
import endpoints.algebra.Documentation
import tech.cryptonomic.conseil.api.metadata.MetadataService
import tech.cryptonomic.conseil.common.generic.chain.DataTypes.AttributesValidationError
import tech.cryptonomic.conseil.common.metadata.{EntityPath, NetworkPath, PlatformPath}


  val route: Route =
    concat(
      platformsRoute,
      networksRoute,
      entitiesRoute,
      attributesRoute,
      attributesValuesRoute,
      attributesValuesWithFilterRoute
    )

  override def validatedAttributes[A](
      response: A => Route,
      invalidDocs: Documentation
  ): Either[List[AttributesValidationError], A] => Route = {
    case Left(errors) =>
      complete(StatusCodes.BadRequest -> s"Errors: \n${errors.mkString("\n")}")
    case Right(success) =>
      response(success)
  }
} 
Example 103
Source File: TezosApi.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api.routes.platform

import akka.actor.ActorSystem
import com.typesafe.scalalogging.LazyLogging
import tech.cryptonomic.conseil.api.config.ConseilConfiguration
import tech.cryptonomic.conseil.api.metadata.MetadataService
import tech.cryptonomic.conseil.api.routes.platform.data.ApiDataRoutes
import tech.cryptonomic.conseil.api.routes.platform.data.tezos.{TezosDataOperations, TezosDataRoutes}
import tech.cryptonomic.conseil.common.config.MetadataConfiguration

import scala.concurrent.ExecutionContext

class TezosApi(metadataOverrides: MetadataConfiguration, server: ConseilConfiguration)(
    implicit system: ActorSystem
) extends Api
    with LazyLogging {

  implicit private val apiDispatcher: ExecutionContext =
    system.dispatchers.lookup("akka.http.dispatcher")

  private lazy val dataOperations: TezosDataOperations = new TezosDataOperations()

  override def dataEndpoint(metadataService: MetadataService): ApiDataRoutes =
    TezosDataRoutes(metadataService, metadataOverrides, dataOperations, server.maxQueryResultSize)
} 
Example 104
Source File: Conseil.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.api

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import com.typesafe.scalalogging.LazyLogging
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import tech.cryptonomic.conseil.api.config.{ConseilAppConfig, ConseilConfiguration}
import tech.cryptonomic.conseil.api.util.Retry.retry
import tech.cryptonomic.conseil.common.config.Platforms.PlatformsConfiguration
import tech.cryptonomic.conseil.common.config._

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor}
import scala.language.postfixOps
import scala.util.Failure

object Conseil extends App with LazyLogging with ConseilAppConfig with FailFastCirceSupport with ConseilMainOutput {

  loadApplicationConfiguration(args) match {
    case Left(errors) =>
    //nothing to do
    case Right(config) =>
      implicit val system: ActorSystem = ActorSystem("conseil-system")
      implicit val materializer: ActorMaterializer = ActorMaterializer()
      implicit val executionContext: ExecutionContextExecutor = system.dispatcher

      val retries = if (config.failFast.on) Some(0) else None

      val serverBinding =
        retry(maxRetry = retries, deadline = Some(config.server.startupDeadline fromNow))(ConseilApi.create(config)).andThen {
          case Failure(error) =>
            logger.error(
              "The server was not started correctly, I failed to create the required Metadata service",
              error
            )
            Await.ready(system.terminate(), 10.seconds)
        }.flatMap(
          runServer(_, config.server, config.platforms, config.verbose)
        )

      sys.addShutdownHook {
        serverBinding
          .flatMap(_.unbind().andThen { case _ => logger.info("Server stopped...") })
          .andThen {
            case _ => system.terminate()
          }
          .onComplete(_ => logger.info("We're done here, nothing else to see"))
      }

  }

  
  def runServer(
      api: ConseilApi,
      server: ConseilConfiguration,
      platforms: PlatformsConfiguration,
      verbose: VerboseOutput
  )(implicit executionContext: ExecutionContext, system: ActorSystem, mat: ActorMaterializer) = {
    val bindingFuture = Http().bindAndHandle(api.route, server.hostname, server.port)
    displayInfo(server)
    if (verbose.on) displayConfiguration(platforms)
    bindingFuture

  }
} 
Example 105
Source File: Lorre.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer

import com.typesafe.scalalogging.LazyLogging
import tech.cryptonomic.conseil.common.config.Platforms.{TezosConfiguration, UnknownPlatformConfiguration}
import tech.cryptonomic.conseil.indexer.config.LorreAppConfig.LORRE_FAILURE_IGNORE_VAR
import tech.cryptonomic.conseil.indexer.config.LorreAppConfig
import tech.cryptonomic.conseil.indexer.logging.LorreInfoLogging
import tech.cryptonomic.conseil.indexer.tezos.TezosIndexer

import scala.concurrent.Await
import scala.concurrent.duration._


object Lorre extends App with LazyLogging with LorreAppConfig with LorreInfoLogging {

  //reads all configuration upstart, will only complete if all values are found
  val config = loadApplicationConfiguration(args)

  //stop if conf is not available
  config.left.foreach { _ =>
    sys.exit(1)
  }

  //unsafe call, will only be reached if loadedConf is a Right, otherwise the merge will fail
  val LorreAppConfig.CombinedConfiguration(
    lorreConf,
    platformConf,
    callsConf,
    streamingClientConf,
    batchingConf,
    verbose
  ) = config.merge

  //whatever happens we try to clean up
  sys.addShutdownHook(shutdown())

  //creates the indexer based on the given configuration, which is picked based on platform and network from argument variables
  val indexer = platformConf match {
    case conf: TezosConfiguration =>
      logger.info("Initializing indexer for Tezos Blockchain.")
      TezosIndexer.fromConfig(lorreConf, conf, callsConf, streamingClientConf, batchingConf)
    case _: UnknownPlatformConfiguration =>
      logger.error("Could not initialize indexer. Unsupported platform has been read from configuration file.")
      sys.exit(1)
  }

  try {
    //displaying information for better debugging
    displayInfo(indexer.platform.name, platformConf.network)
    if (verbose.on)
      displayConfiguration(
        indexer.platform,
        platformConf,
        lorreConf,
        (LORRE_FAILURE_IGNORE_VAR, sys.env.get(LORRE_FAILURE_IGNORE_VAR))
      )

    //actual start of the indexer
    indexer.start()
  } finally shutdown()

  private def shutdown(): Unit = {
    logger.info("Doing clean-up.")
    Await.result(indexer.stop(), 10.seconds)
    logger.info("All things closed.")
  }

} 
Example 106
Source File: TezosFeeOperations.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.tezos

import com.typesafe.scalalogging.LazyLogging
import slick.dbio.DBIOAction
import tech.cryptonomic.conseil.indexer.tezos.{TezosDatabaseOperations => TezosDb}

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}


  def processTezosAverageFees(numberOfFeesAveraged: Int)(implicit ex: ExecutionContext): Future[Option[Int]] = {
    logger.info("Processing latest Tezos fee data...")
    val computeAndStore = for {
      fees <- DBIOAction.sequence(operationKinds.map(TezosDb.calculateAverageFees(_, numberOfFeesAveraged)))
      dbWrites <- TezosDb.writeFees(fees.collect { case Some(fee) => fee })
    } yield dbWrites

    db.run(computeAndStore).andThen {
      case Success(Some(written)) => logger.info("Wrote {} average fees to the database.", written)
      case Success(None) => logger.info("Wrote average fees to the database.")
      case Failure(e) => logger.error("Could not write average fees to the database because", e)
    }

  }

} 
Example 107
Source File: BlocksProcessor.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.tezos.processing

import com.typesafe.scalalogging.LazyLogging
import scala.util.{Failure, Success, Try}
import scala.concurrent.{ExecutionContext, Future}
import slick.jdbc.PostgresProfile.api._
import cats.implicits._

import tech.cryptonomic.conseil.indexer.tezos.{
  TezosNamesOperations,
  TezosNodeOperator,
  TezosGovernanceOperations,
  TezosDatabaseOperations => TezosDb
}
import tech.cryptonomic.conseil.common.tezos.TezosTypes.{Block, Voting}
import tech.cryptonomic.conseil.common.tezos.TezosTypes.Syntax._
import tech.cryptonomic.conseil.indexer.tezos.michelson.contracts.TokenContracts
import tech.cryptonomic.conseil.indexer.tezos.michelson.contracts.TNSContract


  private def processBlocksForGovernance(bakerRollsByBlock: Map[Block, List[Voting.BakerRolls]])(
      implicit ec: ExecutionContext
  ): Future[Unit] =
    TezosGovernanceOperations
      .extractGovernanceAggregations(db, nodeOperator)(bakerRollsByBlock)
      .flatMap(aggregates => db.run(TezosDb.insertGovernance(aggregates)))
      .void

} 
Example 108
Source File: TezosNamesOperations.scala    From Conseil   with Apache License 2.0 5 votes vote down vote up
package tech.cryptonomic.conseil.indexer.tezos

import cats.Show
import cats.implicits._
import com.typesafe.scalalogging.LazyLogging
import slick.dbio.DBIO
import tech.cryptonomic.conseil.common.tezos.TezosOptics.Operations.extractAppliedTransactions
import tech.cryptonomic.conseil.common.tezos.TezosTypes.{AccountId, Block, ContractId, ScriptId}
import tech.cryptonomic.conseil.indexer.tezos.michelson.contracts.TNSContract
import tech.cryptonomic.conseil.indexer.tezos.michelson.contracts.TNSContract.{
  BigMapId,
  LookupMapReference,
  Name,
  NameRecord
}
import tech.cryptonomic.conseil.common.util.JsonUtil.JsonString

import scala.concurrent.{ExecutionContext, Future}


  private val extractNameMapsReferences: List[Block] => Map[Block, List[LookupMapReference]] =
    blocks =>
      blocks.map { b =>
        val refs = extractAppliedTransactions(b).filter {
          case Left(op) => tnsContracts.isKnownRegistrar(op.destination)
          case Right(op) => tnsContracts.isKnownRegistrar(op.destination)
        }.map(tnsContracts.readLookupMapReference).flattenOption

        b -> refs

      }.toMap

} 
Example 109
Source File: TransitionOrder.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.assignments

import at.forsyte.apalache.tla.imp.src.SourceLocation
import at.forsyte.apalache.tla.lir.UID
import at.forsyte.apalache.tla.lir.src.{SourceLocation, SourcePosition, SourceRegion}
import at.forsyte.apalache.tla.lir.storage.SourceLocator
import com.typesafe.scalalogging.LazyLogging

class TransitionOrder( sourceLocator : SourceLocator ) extends LazyLogging {
  
  private def seqLocLT( a : Seq[SourceLocation], b : Seq[SourceLocation] ) : Boolean =
    lexCmpSeqs( locCmp )( a, b ) < 0

  private def getSortedLocs( s : SymbTrans ) : Seq[SourceLocation] = {
    def findLoc(uid: UID): SourceLocation = {
      sourceLocator.sourceOf(uid) match {
        case Some(loc) => loc
        case None =>
          // degrading without throwing an exception
          logger.warn(s"Missing source location for UID = $UID")
          SourceLocation("unknown", new SourceRegion(SourcePosition(1), SourcePosition(2)))
      }
    }

    s._1 map findLoc sortWith locLT
  }

  private def transLT( a: SymbTrans, b: SymbTrans ): Boolean = seqLocLT( getSortedLocs( a ), getSortedLocs( b ) )
} 
Example 110
Source File: DfsStrategy.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt.search

import at.forsyte.apalache.tla.bmcmt.CheckerInput
import at.forsyte.apalache.tla.bmcmt.search.SearchStrategy.{BacktrackOnce, Command, Finish}
import com.typesafe.scalalogging.LazyLogging

import scala.util.Random

class DfsStrategy(input: CheckerInput, stepsBound: Int, randomize: Boolean) extends SearchStrategy with LazyLogging {
  var stepNo = 0
  var terminate = false
  var unexploredIndices: Seq[Seq[Int]] = Seq()

  override def getCommand: Command = {
    if (terminate) {
      SearchStrategy.Finish()
    } else if (stepNo > stepsBound) {
      logger.debug(s"DFS: backtrack, bound reached")
      unexploredIndices = Seq() +: unexploredIndices // add an empty sequence for the response handler to pop of the stack
      BacktrackOnce()
    } else if (stepNo == 0) {
      if (unexploredIndices.isEmpty) {
        val allIndices = shuffleIfNeeded(input.initTransitions.indices)
        unexploredIndices = Seq(allIndices.tail)
        // start with the head
        val hd = allIndices.head
        logger.debug(s"DFS: step $stepNo, transition $hd")
        SearchStrategy.NextStep(stepNo, Seq(hd))
      } else {
        assert(unexploredIndices.length == 1)
        if (unexploredIndices.head.isEmpty) {
          // all transitions at level 0 were enumerated, which means that all bounded paths were enumerated
          Finish()
        } else {
          val (hd, tl) = (unexploredIndices.head.head, unexploredIndices.head.tail)
          unexploredIndices = Seq(tl)
          // explore the next transition
          logger.debug(s"DFS: step $stepNo, transition $hd")
          SearchStrategy.NextStep(stepNo, Seq(hd), popContext = true)
        }
      }
    } else { // step > 0
      if (unexploredIndices.length - 1 < stepNo) {
        // explore all transitions at this depth
        val allIndices = shuffleIfNeeded(input.nextTransitions.indices)
        unexploredIndices = allIndices.tail +: unexploredIndices
        // start with the head
        val hd = allIndices.head
        logger.debug(s"DFS: step $stepNo, transition $hd")
        SearchStrategy.NextStep(stepNo, Seq(hd))
      } else {
        val unexplored = unexploredIndices.head
        if (unexplored.isEmpty) {
          // all transitions at this level were enumerated, backtrack
          logger.debug(s"DFS: backtrack from step $stepNo")
          BacktrackOnce()
        } else {
          val (hd, tl) = (unexplored.head, unexplored.tail)
          unexploredIndices = tl +: unexploredIndices.tail
          // explore the i-th transition
          logger.debug(s"DFS: step $stepNo, transition $hd")
          SearchStrategy.NextStep(stepNo, Seq(hd), popContext = true)
        }
      }
    }
  }

  override def registerResponse(response: SearchStrategy.Response): Unit = {
    response match {
      case SearchStrategy.NextStepFired() =>
        logger.debug(s"DFS response: fired")
        stepNo += 1

      case SearchStrategy.Backtracked() =>
        logger.debug(s"DFS response: backtracked")
        stepNo -= 1
        unexploredIndices = unexploredIndices.tail

      case SearchStrategy.NextStepDisabled() =>
        logger.debug(s"DFS response: disabled")
        () // nothing to do, just continue at this level
    }
  }

  private def shuffleIfNeeded(transitions: Seq[Int]): Seq[Int] = {
    if (randomize) {
      Random.shuffle(transitions)
    } else {
      transitions
    }
  }
} 
Example 111
Source File: VCGenerator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.convenience.tla
import at.forsyte.apalache.tla.lir.oper.TlaBoolOper
import at.forsyte.apalache.tla.lir.transformations.TransformationTracker
import at.forsyte.apalache.tla.lir.transformations.standard.DeepCopy
import at.forsyte.apalache.tla.pp.NormalizedNames
import com.typesafe.scalalogging.LazyLogging


  def gen(module: TlaModule, invName: String): TlaModule = {
    module.declarations.find(_.name == invName) match {
      case Some(inv: TlaOperDecl) if inv.formalParams.isEmpty =>
        new TlaModule(module.name, module.declarations ++ introConditions(inv.body))

      case Some(decl: TlaOperDecl) =>
        val message = s"Expected a nullary operator $invName, found ${decl.formalParams.length} arguments"
        throw new MalformedTlaError(message, decl.body)

      case Some(decl) =>
        val message = s"Expected a nullary operator $invName, found ${decl.getClass.getSimpleName}"
        throw new MalformedTlaError(message, NullEx)

      case None =>
        throw new MalformedTlaError(s"Invariant candidate $invName not found", NullEx)
    }
  }

  private def introConditions(inputInv: TlaEx): Seq[TlaOperDecl] = {
    def mapToDecls(smallInv: TlaEx, index: Int): Seq[TlaOperDecl] = {
      val deepCopy = DeepCopy(tracker)
      val positive = TlaOperDecl(NormalizedNames.VC_INV_PREFIX + index, List(), deepCopy(smallInv))
      val negative = TlaOperDecl(NormalizedNames.VC_NOT_INV_PREFIX + index, List(), tla.not(deepCopy(smallInv)))
      Seq(positive, negative)
    }

    val fragments = splitInv(Seq(), inputInv)
    logger.info(s"  > VCGen produced ${fragments.length} verification condition(s)")
    fragments.zipWithIndex.flatMap { case (e, i) => mapToDecls(e, i) }
  }

  private def splitInv(universalsRev: Seq[(String, TlaEx)], inv: TlaEx): Seq[TlaEx] = {
    inv match {
      case OperEx(TlaBoolOper.forall, NameEx(varName), set, pred) =>
        // \A x \in S: B /\ C is equivalent to (\A x \in S: B) /\ (\A x \in S: C)
        splitInv((varName, set) +: universalsRev, pred)

      case OperEx(TlaBoolOper.and, args @ _*) =>
        // we split A /\ B into the set {A, B}
        args.flatMap(splitInv(universalsRev, _))

      case _ =>
        // nothing to split, just add quantifiers that were collected on the way
        Seq(decorateWithUniversals(universalsRev, inv))
    }
  }

  private def decorateWithUniversals(universalsRev: Seq[(String, TlaEx)], expr: TlaEx): TlaEx = {
    universalsRev match {
      case Nil =>
        expr

      case (name, set) :: tail =>
        decorateWithUniversals(tail, tla.forall(NameEx(name), set, expr))
    }
  }
} 
Example 112
Source File: SkolemizationMarker.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt.analyses

import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.oper.{BmcOper, TlaBoolOper, TlaControlOper}
import at.forsyte.apalache.tla.lir.transformations.{TlaExTransformation, TransformationTracker}
import com.google.inject.Inject
import com.typesafe.scalalogging.LazyLogging
import at.forsyte.apalache.tla.lir.convenience._


class SkolemizationMarker @Inject()(tracker: TransformationTracker)
    extends TlaExTransformation with LazyLogging {

  override def apply(e: TlaEx): TlaEx = { transform(e) }

  def transform: TlaExTransformation = tracker.track {
    case OperEx(TlaBoolOper.exists, name, set, pred) =>
      OperEx(BmcOper.skolem, tla.exists(name, set, transform(pred)))

    case OperEx(TlaBoolOper.forall, name, set, pred) =>
      // it is fine to skolemize existentials under \A, as \A is translated into a conjunction
      tla.forall(name, set, transform(pred))

    case ex @ OperEx(TlaBoolOper.not, _) =>
      ex // stop here. This should be a leaf (and rare) expression, as we are dealing with the NNF.

    case OperEx(TlaBoolOper.and, args@_*) =>
      tla.and(args map transform :_*)

    case OperEx(TlaBoolOper.or, args@_*) =>
      tla.or(args map transform :_*)

    case OperEx(TlaControlOper.ifThenElse, cond, left, right) =>
      // try to identify existentials in the both arms
      tla.ite(cond, transform(left), transform(right))
      // We omit skolemization of the existentials in the predicate,
      // as the predicate is used in both the negated and non-negated forms.
      // Effectively, IF-THEN-ELSE requires both \E and \A forms

    case LetInEx(body, defs @ _*) =>
      // at this point, we only have nullary let-in definitions
      def mapDef(df: TlaOperDecl) = {
        TlaOperDecl(df.name, df.formalParams, transform(df.body))
      }
      LetInEx(transform(body), defs map mapDef :_*)

    case OperEx(oper, args @ _*) =>
      // try to descend in the children, which may contain Boolean operations, e.g., { \E x \in S: P }
      OperEx(oper, args map transform :_*)

    case terminal =>
      terminal // terminal expression, stop here
  }

} 
Example 113
Source File: HintFinder.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt.analyses

import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.oper.TlaBoolOper
import com.google.inject.Inject
import com.typesafe.scalalogging.LazyLogging


class HintFinder @Inject()(val hintsStore: FormulaHintsStoreImpl) extends LazyLogging {
  def introHints(ex: TlaEx): Unit = ex match {
    case OperEx(TlaBoolOper.exists, _, _, quantifiedEx) =>
      introHints(quantifiedEx)

    case OperEx(TlaBoolOper.forall, _, _, quantifiedEx) =>
      introHints(quantifiedEx)

    case OperEx(TlaBoolOper.and, args@_*) =>
      hintsStore.store.put(ex.ID, FormulaHintsStore.HighAnd())
      args foreach introHints

    case _ =>
      () // do not explore any further
  }
} 
Example 114
Source File: SubstTranslator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.imp

import at.forsyte.apalache.tla.imp.src.SourceStore
import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.oper.{TlaActionOper, TlaTempOper}
import com.typesafe.scalalogging.LazyLogging
import tla2sany.semantic._


class SubstTranslator(sourceStore: SourceStore, context: Context) extends LazyLogging {

  def translate(substInNode: SubstInNode, body: TlaEx): TlaEx = {
    subExpr(mkRenaming(substInNode), body)
  }

  private def subExpr(renaming: Map[String, TlaEx], ex: TlaEx): TlaEx = {
    def subRec(ex: TlaEx): TlaEx = ex match {
      case NameEx(name) =>
        renaming.getOrElse(name, NameEx(name))

      case LetInEx( body, defs@_* ) =>
        def subDecl( d : TlaOperDecl ) = d.copy( body = subRec( d.body ) )
        LetInEx( subRec(body), defs map subDecl :_*)

      case OperEx(op, args@_*) =>
        if (renaming.nonEmpty
          && Seq(TlaActionOper.enabled, TlaActionOper.composition, TlaTempOper.leadsTo).exists(_.name == op.name)) {
          // TODO: find out how to deal with ENABLED and other tricky operators
          logger.warn("Substitution of %s needs care. The current implementation may fail to work.".format(op.name))
        }
        OperEx(op, args map subRec: _*)

      case d => d
    }

    subRec(ex)
  }

  private def mkRenaming(substInNode: SubstInNode): Map[String, TlaEx] = {
    val exprTranslator = ExprOrOpArgNodeTranslator(sourceStore, context, OutsideRecursion())

    def eachSubst(s: Subst): (String, TlaEx) = {
      val replacement = exprTranslator.translate(s.getExpr)
      // TODO: what if a constant happens to be an operator?
      if (s.getOp.getKind != ASTConstants.ConstantDeclKind && s.getOp.getKind != ASTConstants.VariableDeclKind) {
        throw new SanyImporterException("Expected a substituted name %s to be a CONSTANT or a VARIABLE, found kind %d"
          .format(s.getOp.getName, s.getOp.getKind))
      }
      // As all declarations have unique names, it should be sufficient to map the name to the expression.
      // SANY should have checked the syntactic and semantic rules for the substitution.
      s.getOp.getName.toString -> replacement
    }

    Map(substInNode.getSubsts map eachSubst: _*)
  }
}

object SubstTranslator {
  def apply(sourceStore: SourceStore, context: Context) : SubstTranslator = {
    new SubstTranslator(sourceStore, context)
  }
} 
Example 115
Source File: ConstAndDefRewriter.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.pp

import at.forsyte.apalache.tla.lir._
import at.forsyte.apalache.tla.lir.oper.TlaOper
import at.forsyte.apalache.tla.lir.transformations.standard.{ModuleByExTransformer, ReplaceFixed}
import at.forsyte.apalache.tla.lir.transformations.{TlaModuleTransformation, TransformationTracker}
import com.typesafe.scalalogging.LazyLogging


class ConstAndDefRewriter(tracker: TransformationTracker) extends TlaModuleTransformation with LazyLogging {
  override def apply(mod: TlaModule): TlaModule = {
    val overrides = findOverrides(mod.operDeclarations)

    def transformDef: TlaDecl => TlaDecl = {
      case TlaConstDecl(name) if overrides.contains(name) =>
        val overridingDef = overrides(name)
        if (overridingDef.formalParams.nonEmpty) {
          val nargs = overridingDef.formalParams.size
          val msg = s"Replacing constant $name with an operator body that has $nargs parameters"
          logger.error(msg)
          logger.error("If you need support for n-ary CONSTANTS, write a feature request.")
          throw new OverridingError(msg, overridingDef.body)
        } else {
          logger.info(s"Replaced CONSTANT $name with OVERRIDE_$name")
          TlaOperDecl(name, List(), overridingDef.body)
        }

      case df @ TlaOperDecl(name, dfParams, _) if overrides.contains(name) =>
        val overridingDef = overrides(name)
        if (overridingDef.formalParams.size != dfParams.size) {
          val odNargs = overridingDef.formalParams.size
          val dfNargs = dfParams.size
          val msg = s"Replacing operator $name ($dfNargs) with an operator ${overridingDef.name} that has $odNargs parameters"
          throw new OverridingError(msg, overridingDef.body)
        } else {
          logger.info(s"Replaced operator $name with OVERRIDE_$name")
          TlaOperDecl(name, overridingDef.formalParams, overridingDef.body)
        }

      case df @ TlaVarDecl(name) if overrides.contains(name) =>
        val msg = s"Trying to replace variable $name with an operator OVERRIDE_$name. Use INSTANCE ... WITH"
        throw new OverridingError(msg, NullEx)

      case df => df // keep the definition intact
    }

    // substitute the constant definitions and operator definitions with the replacement operators
    val transformed = mod.declarations map transformDef
    val filtered = transformed filter (!_.name.startsWith(ConstAndDefRewriter.OVERRIDE_PREFIX))

    // Importantly, for every constant c, replace NameEx(c) with OperEx(TlaOper.apply, replacement).
    // This is needed as we distinguish the operator calls from constant and variable use.

    def replaceConstWithCall(mod: TlaModule, name: String): TlaModule = {
      val xform = ReplaceFixed(NameEx(name), OperEx(TlaOper.apply, NameEx(name)), tracker)
      val moduleXform = ModuleByExTransformer(xform)
      moduleXform(mod)
    }

    val replacedConsts = mod.declarations.collect { case TlaConstDecl(name) if overrides.contains(name) => name }
    val replaced = replacedConsts.foldLeft(new TlaModule(mod.name, filtered))(replaceConstWithCall)
    replaced
  }

  private def findOverrides(defs: Seq[TlaDecl]): Map[String, TlaOperDecl] = {
    val overrides =
      defs.collect {
        case df: TlaOperDecl if df.name.startsWith(ConstAndDefRewriter.OVERRIDE_PREFIX) =>
          df.name.substring(ConstAndDefRewriter.OVERRIDE_PREFIX.length) -> df
      }

    Map(overrides :_*)
  }
}

object ConstAndDefRewriter {
  val OVERRIDE_PREFIX = "OVERRIDE_"
} 
Example 116
Source File: PassChainExecutor.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.infra.passes

import com.google.inject.Inject
import com.google.inject.name.Named
import com.typesafe.scalalogging.LazyLogging



class PassChainExecutor @Inject()(val options: WriteablePassOptions,
                                  @Named("InitialPass") val initialPass: Pass)
  extends LazyLogging {

  def run(): Option[Pass] = {
    def exec(seqNo: Int, passToRun: Pass): Option[Pass] = {
      logger.info("PASS #%d: %s".format(seqNo, passToRun.name))
      val result = passToRun.execute()
      val outcome = if (result) "[OK]" else "[FAIL]"
      logger.debug("PASS #%d: %s %s".format(seqNo, passToRun.name, outcome))
      if (!result) {
        None // return the negative result
      } else {
        val nextPass = passToRun.next()
        if (nextPass.isDefined) {
          exec(1 + seqNo, nextPass.get) // call the next pass in line
        } else {
          Some(passToRun) // finished
        }
      }
    }

    exec(0, initialPass)
  }
} 
Example 117
Source File: SeedElasticSearch.scala    From scaladex   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package ch.epfl.scala.index.data.elastic

import akka.actor.ActorSystem
import akka.stream.Materializer

import build.info.BuildInfo

import ch.epfl.scala.index.data.DataPaths
import ch.epfl.scala.index.data.github.GithubDownload
import ch.epfl.scala.index.data.maven.PomsReader
import ch.epfl.scala.index.data.ProgressBar
import ch.epfl.scala.index.data.project._
import ch.epfl.scala.index.search.DataRepository

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
import scala.util.Success
import ch.epfl.scala.index.model.Project
import ch.epfl.scala.index.model.Release
import ch.epfl.scala.index.model.release.ScalaDependency

class SeedElasticSearch(
    paths: DataPaths,
    githubDownload: GithubDownload,
    dataRepository: DataRepository
)(
    implicit val ec: ExecutionContext
) extends LazyLogging {

  def run(): Unit = {

    val resetIndex = for {
      _ <- dataRepository.deleteAll()
      _ = logger.info("creating index")
      _ <- dataRepository.create()
    } yield ()

    Await.result(resetIndex, Duration.Inf)

    logger.info("loading update data")
    val projectConverter = new ProjectConvert(paths, githubDownload)
    val allData = projectConverter.convertAll(PomsReader.loadAll(paths), Map())

    var count = 0
    allData.foreach {
      case (project, releases, dependencies) =>
        logger.info(s"indexing ${project.reference}")
        val indexProjectF = dataRepository.insertProject(project)
        val indexReleasesF = dataRepository.insertReleases(releases)
        val indexDependenciesF = dataRepository.insertDependencies(dependencies)

        val indexAll = for {
          _ <- indexProjectF
          releasesResult <- indexReleasesF
          dependenciesResult <- indexDependenciesF
        } yield {
          if (releasesResult.hasFailures || dependenciesResult.hasFailures) {
            logger.error(s"indexing projects ${project.reference} failed")
            releasesResult.failures.foreach(p => logger.error(p.failureMessage))
            dependenciesResult.failures.foreach(
              p => logger.error(p.failureMessage)
            )
          }
        }
        Await.result(indexAll, Duration.Inf)
        count += 1
    }
    logger.info(s"$count projects indexed")
  }

}

object SeedElasticSearch {
  def run(dataPaths: DataPaths)(implicit sys: ActorSystem,
                                mat: Materializer): Unit = {
    import sys.dispatcher
    for (dataRepository <- DataRepository.open(BuildInfo.baseDirectory)) {
      val githubDownload = new GithubDownload(dataPaths)
      val seed =
        new SeedElasticSearch(dataPaths, githubDownload, dataRepository)
      seed.run()
    }
  }
} 
Example 118
Source File: NodeActor.scala    From ForestFlow   with Apache License 2.0 5 votes vote down vote up
package ai.forestflow.serving.cluster

import java.io.File

import akka.actor.{Actor, ActorLogging, ActorRef, Props, Timers}
import akka.cluster.Cluster
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator.Subscribe
import ai.forestflow.domain.CleanupLocalStorage
import org.apache.commons.io.FileUtils
import com.typesafe.scalalogging.LazyLogging
import ai.forestflow.utils.ThrowableImplicits._

import scala.util.{Failure, Success, Try}

/***
 * This actor is responsible for node-level (host-level) stuff that should be done on a per-node basis.
 * A good example of this is file system cleanup tasks.
 */
object NodeActor extends LazyLogging {
  
  def props(): Props =
    Props(new NodeActor)
      .withDispatcher("blocking-io-dispatcher")

  def cleanupLocalStorage(path: String): Unit = {
    val localDir = new File(path)
    val localDirExists = localDir.exists()
    logger.info(s"Cleaning up local storage: Local Directory: $localDir , exists? $localDirExists")
    if (localDirExists)
      Try(FileUtils.deleteDirectory(localDir)) match {
        case Success(_) => logger.info(s"Local Directory $localDir cleaned up successfully")
        case Failure(ex) => logger.error(s"Local Directory $localDir cleanup failed! Reason: ${ex.printableStackTrace}")
      }
  }
}

class NodeActor extends Actor
  with ActorLogging
  with Timers {

  
  implicit val cluster: Cluster = Cluster(context.system)
  val mediator: ActorRef = DistributedPubSub(context.system).mediator

  mediator ! Subscribe(classOf[CleanupLocalStorage].getSimpleName, self)

  override def receive: Receive = {
    case CleanupLocalStorage(path) =>
      NodeActor.cleanupLocalStorage(path)
  }
} 
Example 119
Source File: CustomTypeMappers.scala    From ForestFlow   with Apache License 2.0 5 votes vote down vote up
package ai.forestflow.scalapb

import akka.actor.ActorRef
import com.typesafe.scalalogging.LazyLogging
import scalapb.TypeMapper

object CustomTypeMappers {

  object ActorTypeMappers extends LazyLogging {
    import ai.forestflow.akka.extensions.ExternalAddress
    import ai.forestflow.startup.ActorSystemStartup

    def serializeAkkaDefault(ref: ActorRef): String =
      ref.path.toSerializationFormatWithAddress(ExternalAddress(ActorSystemStartup.system).addressForAkka) // toSerializationFormat //

    def deserializeAkkaDefault(refString: String): ActorRef =
      ExternalAddress(ActorSystemStartup.system).akkaActorRefFromString(refString)


    implicit val actorRefTypeMapper: TypeMapper[String, akka.actor.ActorRef] =
      TypeMapper[String, akka.actor.ActorRef] {
        from =>
          // logger.trace(s"Deserializing actor ref string: $from to ${deserializeAkkaDefault(from)}")
          deserializeAkkaDefault(from) } {
        ref =>
          // logger.trace(s"Serializing actor ref $ref to ${serializeAkkaDefault(ref)}")
          serializeAkkaDefault(ref)
      }
  }

} 
Example 120
Source File: PahoConnectionListener.scala    From gatling-mqtt-protocol   with Apache License 2.0 5 votes vote down vote up
package com.github.jeanadrien.gatling.mqtt.client

import akka.actor.ActorRef
import com.typesafe.scalalogging.LazyLogging
import org.eclipse.paho.client.mqttv3.{IMqttDeliveryToken, MqttCallback, MqttMessage}


class PahoConnectionListener(actor : ActorRef) extends MqttCallback with LazyLogging {
    override def deliveryComplete(token : IMqttDeliveryToken) : Unit = {
        // nop
    }

    override def messageArrived(
        topic : String, message : MqttMessage
    ) : Unit = {
        val payload = message.getPayload
        logger.trace(s"Paho listener receives: topic=${topic}, body length=${payload.length}")

        actor ! MqttCommands.OnPublish(topic, payload)
    }

    override def connectionLost(cause : Throwable) : Unit = {
        logger.debug(s"Client has been disconnected.")
        // support this for reconnection
    }
} 
Example 121
Source File: MutationCallbackSchemaExecutor.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.deprecated.actions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.ClientInjector
import cool.graph.client.database.{DeferredResolverProvider, SimpleManyModelDeferredResolver, SimpleToManyDeferredResolver}
import cool.graph.cuid.Cuid.createCuid
import cool.graph.deprecated.actions.schemas.{ActionUserContext, MutationMetaData}
import cool.graph.shared.models.{Model, Project}
import cool.graph.shared.schema.JsonMarshalling._
import sangria.execution.Executor
import sangria.parser.QueryParser
import sangria.schema.Schema
import spray.json.{JsObject, JsString}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success}

case class Event(id: String, url: String, payload: Option[JsObject])

class MutationCallbackSchemaExecutor(project: Project,
                                     model: Model,
                                     schema: Schema[ActionUserContext, Unit],
                                     nodeId: String,
                                     fragment: String,
                                     url: String,
                                     mutationId: String)(implicit injector: ClientInjector)
    extends LazyLogging {
  def execute: Future[Event] = {
    implicit val inj = injector.toScaldi

    val dataFut = QueryParser.parse(fragment) match {
      case Success(queryAst) =>
        Executor.execute(
          schema,
          queryAst,
          deferredResolver = new DeferredResolverProvider(
            new SimpleToManyDeferredResolver,
            new SimpleManyModelDeferredResolver,
            skipPermissionCheck = true
          ),
          userContext = ActionUserContext(
            requestId = "",
            project = project,
            nodeId = nodeId,
            mutation = MutationMetaData(id = mutationId, _type = "Create"),
            log = (x: String) => logger.info(x)
          )
        )
      case Failure(error) =>
        Future.successful(JsObject("error" -> JsString(error.getMessage)))
    }

    dataFut
      .map {
        case JsObject(dataMap) => Event(id = createCuid(), url = url, payload = Some(dataMap("data").asJsObject))
        case json              => sys.error(s"Must only receive JsObjects here. But got instead: ${json.compactPrint}")
      }

  }
} 
Example 122
Source File: ProjectLockdownMiddleware.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client

import com.typesafe.scalalogging.LazyLogging
import cool.graph.shared.errors.CommonErrors.{MutationsNotAllowedForProject, QueriesNotAllowedForProject}
import cool.graph.RequestContextTrait
import cool.graph.shared.models.Project
import sangria.ast.{OperationDefinition, OperationType}
import sangria.execution._

case class ProjectLockdownMiddleware(project: Project) extends Middleware[RequestContextTrait] with LazyLogging {

  override type QueryVal = Unit

  override def beforeQuery(context: MiddlewareQueryContext[RequestContextTrait, _, _]): Unit = {
    val isQuery: Boolean = context.queryAst.definitions collect {
      case x: OperationDefinition if x.operationType == OperationType.Query || x.operationType == OperationType.Subscription =>
        x
    } isDefinedAt (0)

    val isMutation: Boolean = context.queryAst.definitions collect {
      case x: OperationDefinition if x.operationType == OperationType.Mutation =>
        x
    } isDefinedAt (0)

    if (isQuery && !project.allowQueries) {
      throw new QueriesNotAllowedForProject(project.id)
    }

    if (isMutation && !project.allowMutations) {
      throw new MutationsNotAllowedForProject(project.id)
    }

    ()
  }

  override def afterQuery(queryVal: Unit, context: MiddlewareQueryContext[RequestContextTrait, _, _]): Unit = ()
} 
Example 123
Source File: ApiMetricsMiddleware.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.metrics

import akka.actor.{ActorRef, ActorSystem}
import akka.stream.ActorMaterializer
import com.typesafe.scalalogging.LazyLogging
import cool.graph.RequestContextTrait
import cool.graph.client.ApiFeatureMetric
import cool.graph.shared.externalServices.TestableTime
import sangria.execution._

class ApiMetricsMiddleware(
    testableTime: TestableTime,
    apiMetricActor: ActorRef
)(
    implicit system: ActorSystem,
    materializer: ActorMaterializer
) extends Middleware[RequestContextTrait]
    with LazyLogging {

  def afterQuery(queryVal: QueryVal, context: MiddlewareQueryContext[RequestContextTrait, _, _]) = {
    (context.ctx.requestIp, context.ctx.projectId, context.ctx.clientId) match {
      case (requestIp, Some(projectId), clientId) =>
        // todo: generate list of features

        apiMetricActor ! ApiFeatureMetric(requestIp, testableTime.DateTime, projectId, clientId, context.ctx.listFeatureMetrics, context.ctx.isFromConsole)
      case _ => println("missing data for FieldMetrics")
    }
  }

  override type QueryVal = Unit

  override def beforeQuery(context: MiddlewareQueryContext[RequestContextTrait, _, _]): Unit = Unit
} 
Example 124
Source File: ActionWebhookForCreateDataItemSync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas.CreateSchema
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.shared.errors.UserAPIErrors.UnsuccessfulSynchronousMutationCallback
import cool.graph.shared.errors.{SystemErrors, UserFacingError}
import cool.graph.shared.models.{Action, Model, Project}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionWebhookForCreateDataItemSync(model: Model, project: Project, nodeId: Id, action: Action, mutationId: Id, requestId: String)(
    implicit injector: ClientInjector)
    extends ActionWebhookMutaction
    with LazyLogging {

  override def execute: Future[MutactionExecutionResult] = {

    val webhookCaller = injector.webhookCaller
    implicit val inj  = injector.toScaldi

    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new CreateSchema(model = model, modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project), project = project).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload
      .flatMap(
        payload =>
          webhookCaller
            .call(payload.url, payload.payload.map(_.compactPrint).getOrElse(""))
            .map {
              case true  => MutactionExecutionSuccess()
              case false => throw UnsuccessfulSynchronousMutationCallback()
          })
      .recover {
        case x: UserFacingError => throw x
        case x =>
          SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
      }
  }
} 
Example 125
Source File: ActionWebhookForCreateDataItemAsync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas.CreateSchema
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.messagebus.QueuePublisher
import cool.graph.shared.errors.SystemErrors
import cool.graph.shared.models.{Action, Model, Project}
import cool.graph.webhook.Webhook
import scaldi._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionWebhookForCreateDataItemAsync(model: Model, project: Project, nodeId: Id, action: Action, mutationId: Id, requestId: String)(
    implicit injector: ClientInjector)
    extends ActionWebhookMutaction
    with LazyLogging {

  override def execute: Future[MutactionExecutionResult] = {

    val webhookPublisher = injector.webhookPublisher
    implicit val inj     = injector.toScaldi

    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new CreateSchema(model = model, modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project), project = project).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload.onSuccess {
      case event: Event =>
        val whPayload = event.payload.map(p => p.compactPrint).getOrElse("")
        webhookPublisher.publish(Webhook(project.id, "", requestId, event.url, whPayload, event.id, Map.empty))
    }

    payload.map(_ => MutactionExecutionSuccess()).recover {
      case x => SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
    }
  }
} 
Example 126
Source File: S3UpdateFileName.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.database.DataResolver
import cool.graph.client.files.FileUploader
import cool.graph.shared.errors.UserAPIErrors.DataItemDoesNotExist
import cool.graph.shared.models.{Model, Project}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

case class S3UpdateFileName(model: Model, project: Project, fileId: String, newName: String, resolver: DataResolver)(implicit injector: ClientInjector)
    extends Mutaction
    with LazyLogging {

  var fileSecret: Option[String] = None

  override def execute: Future[MutactionExecutionResult] = {

    val uploader = new FileUploader(project)

    uploader.setFilename(project, fileSecret.get, newName)

    Future.successful(MutactionExecutionSuccess())
  }

  override def verify(): Future[Try[MutactionVerificationSuccess] with Product with Serializable] = {
    resolver.resolveByUnique(model, "id", fileId) map {
      case None => Failure(DataItemDoesNotExist(model.id, fileId))
      case node =>
        fileSecret = node.get.getOption[String]("secret")

        Success(MutactionVerificationSuccess())
    }
  }
} 
Example 127
Source File: PublishSubscriptionEvent.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.JsonFormats.AnyJsonFormat
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.deprecated.actions.EventJsonProtocol
import cool.graph.messagebus.pubsub.Only
import cool.graph.shared.models.Project
import spray.json._

import scala.concurrent.Future

case class PublishSubscriptionEvent(project: Project, value: Map[String, Any], mutationName: String)(implicit injector: ClientInjector)
    extends Mutaction
    with LazyLogging {
  import EventJsonProtocol._

  val publisher = injector.sssEventsPublisher

  override def execute: Future[MutactionExecutionResult] = {
    val topic = Only(s"subscription:event:${project.id}:$mutationName")

    publisher.publish(topic, value.toJson.compactPrint)
    Future.successful(MutactionExecutionSuccess())
  }
} 
Example 128
Source File: ActionWebhookForDeleteDataItemAsync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas._
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.messagebus.QueuePublisher
import cool.graph.shared.errors.SystemErrors
import cool.graph.shared.models.{Action, Model, Project}
import cool.graph.webhook.Webhook
import scaldi._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Success

case class ActionWebhookForDeleteDataItemAsync(model: Model, project: Project, nodeId: Id, action: Action, mutationId: Id, requestId: String)(
    implicit injector: ClientInjector)
    extends ActionWebhookForDeleteDataItem
    with LazyLogging {

  // note: as the node is being deleted we need to resolve the query before executing this mutaction.
  // This is different than the normal execution flow for mutactions, so please be careful!
  var data: Option[Webhook]               = None
  var prepareDataError: Option[Exception] = None

  def prepareData: Future[Event] = {
    implicit val inj = injector.toScaldi

    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new DeleteSchema(model = model, modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project), project = project).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload.andThen({
      case Success(event) =>
        val whPayload = event.payload.map(p => p.compactPrint).getOrElse("")
        data = Some(Webhook(project.id, "", requestId, event.url, whPayload, event.id, Map.empty))
    })
  }

  override def execute: Future[MutactionExecutionResult] = {

    prepareDataError match {
      case Some(x) =>
        SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
        Future.successful(MutactionExecutionSuccess())

      case None =>
        require(data.nonEmpty, "prepareData should be invoked and awaited before executing this mutaction")
        val webhookPublisher = injector.webhookPublisher
        webhookPublisher.publish(data.get)
        Future.successful(MutactionExecutionSuccess())
    }
  }
} 
Example 129
Source File: ActionWebhookForUpdateDataItemAsync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas._
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.messagebus.QueuePublisher
import cool.graph.shared.errors.SystemErrors
import cool.graph.shared.models.{Action, Model, Project}
import cool.graph.webhook.Webhook
import scaldi._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionWebhookForUpdateDataItemAsync(model: Model,
                                               project: Project,
                                               nodeId: Id,
                                               action: Action,
                                               updatedFields: List[String],
                                               mutationId: Id,
                                               requestId: String,
                                               previousValues: DataItem)(implicit injector: ClientInjector)
    extends ActionWebhookMutaction
    with LazyLogging {

  import cool.graph.deprecated.actions.EventJsonProtocol._

  override def execute: Future[MutactionExecutionResult] = {

    val webhookPublisher = injector.webhookPublisher
    implicit val inj     = injector.toScaldi

    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new UpdateSchema(
          model = model,
          modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project),
          project = project,
          updatedFields = updatedFields,
          previousValues = previousValues
        ).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload.onSuccess {
      case event: Event =>
        val whPayload = event.payload.map(p => p.compactPrint).getOrElse("")
        webhookPublisher.publish(Webhook(project.id, "", requestId, event.url, whPayload, event.id, Map.empty))
    }

    payload.map(_ => MutactionExecutionSuccess()).recover {
      case x => SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
    }
  }
} 
Example 130
Source File: ActionWebhookForDeleteDataItemSync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph.shared.errors.UserAPIErrors.UnsuccessfulSynchronousMutationCallback
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.deprecated.actions.schemas._
import cool.graph.client.database.DataResolver
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.shared.models.{Action, Model, Project}
import cool.graph.shared.errors.SystemErrors
import cool.graph.webhook.WebhookCaller
import scaldi._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Success

abstract class ActionWebhookForDeleteDataItem extends ActionWebhookMutaction {
  def prepareData: Future[Event]
}

case class ActionWebhookForDeleteDataItemSync(model: Model, project: Project, nodeId: Id, action: Action, mutationId: Id, requestId: String)(
    implicit injector: ClientInjector)
    extends ActionWebhookForDeleteDataItem
    with LazyLogging {

  // note: as the node is being deleted we need to resolve the query before executing this mutaction.
  // This is different than the normal execution flow for mutactions, so please be careful!
  def prepareData: Future[Event] = {

    implicit val inj = injector.toScaldi
    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new DeleteSchema(model = model, modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project), project = project).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload.andThen({ case Success(x) => data = Some(x) })
  }

  var data: Option[Event]                 = None
  var prepareDataError: Option[Exception] = None

  override def execute: Future[MutactionExecutionResult] = {

    prepareDataError match {
      case Some(x) =>
        SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
        Future.successful(MutactionExecutionSuccess())

      case None =>
        data match {
          case None =>
            sys.error("prepareData should be invoked and awaited before executing this mutaction")

          case Some(event) =>
            val webhookCaller = injector.webhookCaller

            webhookCaller
              .call(event.url, event.payload.map(_.compactPrint).getOrElse(""))
              .map {
                case true  => MutactionExecutionSuccess()
                case false => throw UnsuccessfulSynchronousMutationCallback()
              }
        }
    }
  }
} 
Example 131
Source File: ActionWebhookForUpdateDataItemSync.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.mutactions

import com.typesafe.scalalogging.LazyLogging
import cool.graph.Types.Id
import cool.graph._
import cool.graph.client.ClientInjector
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas._
import cool.graph.deprecated.actions.{Event, MutationCallbackSchemaExecutor}
import cool.graph.shared.errors.UserAPIErrors.UnsuccessfulSynchronousMutationCallback
import cool.graph.shared.errors.{SystemErrors, UserFacingError}
import cool.graph.shared.models.{Action, Model, Project}
import cool.graph.webhook.WebhookCaller
import scaldi._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionWebhookForUpdateDataItemSync(model: Model,
                                              project: Project,
                                              nodeId: Id,
                                              action: Action,
                                              updatedFields: List[String],
                                              mutationId: Id,
                                              requestId: String,
                                              previousValues: DataItem)(implicit injector: ClientInjector)
    extends ActionWebhookMutaction
    with LazyLogging {

  override def execute: Future[MutactionExecutionResult] = {

    val webhookCaller = injector.webhookCaller
    implicit val inj  = injector.toScaldi

    val payload: Future[Event] =
      new MutationCallbackSchemaExecutor(
        project,
        model,
        new UpdateSchema(
          model = model,
          modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project = project),
          project = project,
          updatedFields = updatedFields,
          previousValues = previousValues
        ).build(),
        nodeId,
        action.triggerMutationModel.get.fragment,
        action.handlerWebhook.get.url,
        mutationId
      ).execute

    payload
      .flatMap(
        payload =>
          webhookCaller
            .call(payload.url, payload.payload.map(_.compactPrint).getOrElse(""))
            .map {
              case true  => MutactionExecutionSuccess()
              case false => throw UnsuccessfulSynchronousMutationCallback()
          })
      .recover {
        case x: UserFacingError => throw x
        case x                  => SystemErrors.UnknownExecutionError(x.getMessage, x.getStackTrace.map(_.toString).mkString(", "))
      }
  }
} 
Example 132
Source File: FieldMetrics.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph

import sangria.execution._
import sangria.schema._
import spray.json.DefaultJsonProtocol._
import spray.json._
import com.typesafe.scalalogging.LazyLogging
import cool.graph.shared.logging.{LogData, LogKey}

import scala.collection.concurrent.TrieMap

class FieldMetricsMiddleware
    extends Middleware[RequestContextTrait]
    with MiddlewareAfterField[RequestContextTrait]
    with MiddlewareErrorField[RequestContextTrait]
    with LazyLogging {

  type QueryVal = TrieMap[String, List[Int]]
  type FieldVal = Long

  def beforeQuery(context: MiddlewareQueryContext[RequestContextTrait, _, _]) =
    TrieMap()
  def afterQuery(queryVal: QueryVal, context: MiddlewareQueryContext[RequestContextTrait, _, _]) = {

    import TimingProtocol._

    val total  = queryVal.foldLeft(0)(_ + _._2.sum)
    val sumMap = queryVal.toMap.mapValues(_.sum) + ("__total" -> total)
//    logger.info(
//      LogData(
//        key = LogKey.RequestMetricsFields,
//        requestId = context.ctx.requestId,
//        clientId = Some(context.ctx.clientId),
//        projectId = context.ctx.projectId,
//        payload = Some(sumMap)
//      ).json)
  }

  def beforeField(queryVal: QueryVal, mctx: MiddlewareQueryContext[RequestContextTrait, _, _], ctx: Context[RequestContextTrait, _]) =
    continue(System.currentTimeMillis())

  def afterField(queryVal: QueryVal,
                 fieldVal: FieldVal,
                 value: Any,
                 mctx: MiddlewareQueryContext[RequestContextTrait, _, _],
                 ctx: Context[RequestContextTrait, _]) = {
    val key  = ctx.parentType.name + "." + ctx.field.name
    val list = queryVal.getOrElse(key, Nil)

    queryVal.update(key, list :+ (System.currentTimeMillis() - fieldVal).toInt)
    None
  }

  def fieldError(queryVal: QueryVal,
                 fieldVal: FieldVal,
                 error: Throwable,
                 mctx: MiddlewareQueryContext[RequestContextTrait, _, _],
                 ctx: Context[RequestContextTrait, _]) = {
    val key    = ctx.parentType.name + "." + ctx.field.name
    val list   = queryVal.getOrElse(key, Nil)
    val errors = queryVal.getOrElse("ERROR", Nil)

    queryVal.update(key, list :+ (System.currentTimeMillis() - fieldVal).toInt)
    queryVal.update("ERROR", errors :+ 1)
  }
} 
Example 133
Source File: PermissionSchemaResolver.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.shared.queryPermissions

import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.UserContext
import cool.graph.client.database.DeferredTypes.ManyModelExistsDeferred
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.shared.{ApiMatrixFactory, models}
import cool.graph.shared.models.Project
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.schema.{Context, Field, ObjectType, Schema}
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

class PermissionSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {

  import sangria.marshalling.sprayJson._

  def resolve(project: Project): Future[String] = {

    implicit val system       = inject[ActorSystem](identified by "actorSystem")
    implicit val materializer = inject[ActorMaterializer](identified by "actorMaterializer")

    val permissionSchema = PermissionSchemaResolver.permissionSchema(project)

    Executor
      .execute(
        schema = permissionSchema,
        queryAst = introspectionQuery,
        userContext = new UserContext(
          project = project,
          authenticatedRequest = None,
          requestId = "PermissionSchemaResolver-request-id",
          requestIp = "PermissionSchemaResolver-request-ip",
          clientId = "PermissionSchemaResolver-client-id",
          log = (_) => (),
          queryAst = Some(introspectionQuery)
        )
      )
      .map { response =>
        val JsObject(fields) = response
        fields("data").compactPrint
      }
  }
}

object PermissionSchemaResolver extends Injectable {
  def permissionSchema(project: Project)(implicit inj: Injector): Schema[UserContext, Unit] = {
    val apiMatrix      = inject[ApiMatrixFactory].create(project)
    val includedModels = project.models.filter(model => apiMatrix.includeModel(model.name))
    val schemaBuilder  = new SimpleSchemaModelObjectTypeBuilder(project, None)

    def getConnectionArguments(model: models.Model) = {
      schemaBuilder.mapToListConnectionArguments(model)
    }

    def resolveGetAllItemsQuery(model: models.Model, ctx: Context[UserContext, Unit]): sangria.schema.Action[UserContext, Boolean] = {
      val arguments = schemaBuilder.extractQueryArgumentsFromContext(model, ctx)

      ManyModelExistsDeferred(model, arguments)
    }

    def getModelField(model: models.Model): Field[UserContext, Unit] = {
      Field(
        s"Some${model.name.capitalize}Exists",
        fieldType = sangria.schema.BooleanType,
        arguments = getConnectionArguments(model),
        resolve = (ctx) => {
          resolveGetAllItemsQuery(model, ctx)
        }
      )
    }

    val query    = ObjectType("Query", includedModels.map(getModelField))
    val mutation = None

    Schema(query, mutation)
  }
} 
Example 134
Source File: ActionSchemaResolver.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system

import com.typesafe.scalalogging.LazyLogging
import cool.graph.DataItem
import cool.graph.Types.Id
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.deprecated.actions.schemas._
import cool.graph.shared.{ApiMatrixFactory}
import cool.graph.shared.models.{ActionTriggerMutationModelMutationType, ActionTriggerMutationRelationMutationType, ActionTriggerType, Project}
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.marshalling.sprayJson._
import sangria.schema.Schema
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ActionSchemaPayload(
    triggerType: ActionTriggerType.Value,
    mutationModel: Option[ActionSchemaPayloadMutationModel],
    mutationRelation: Option[ActionSchemaPayloadMutationRelation]
)

case class ActionSchemaPayloadMutationModel(
    modelId: Id,
    mutationType: ActionTriggerMutationModelMutationType.Value
)

case class ActionSchemaPayloadMutationRelation(
    relationId: Id,
    mutationType: ActionTriggerMutationRelationMutationType.Value
)

class ActionSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {

  def resolve(project: Project, payload: ActionSchemaPayload): Future[String] = {
    val apiMatrix = inject[ApiMatrixFactory].create(project)

    payload.triggerType match {
      case ActionTriggerType.MutationModel =>
        val model = apiMatrix.filterModel(project.getModelById_!(payload.mutationModel.get.modelId))

        model match {
          case None =>
            Future.successful(JsObject.empty.prettyPrint)
          case Some(model) =>
            val modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project)

            val schema: Schema[ActionUserContext, Unit] =
              payload.mutationModel.get.mutationType match {
                case ActionTriggerMutationModelMutationType.Create =>
                  new CreateSchema(model = model, modelObjectTypes = modelObjectTypes, project = project).build()
                case ActionTriggerMutationModelMutationType.Update =>
                  new UpdateSchema(model = model,
                                   modelObjectTypes = modelObjectTypes,
                                   project = project,
                                   updatedFields = List(),
                                   previousValues = DataItem("dummy", Map())).build()
                case ActionTriggerMutationModelMutationType.Delete =>
                  new DeleteSchema(model = model, modelObjectTypes = modelObjectTypes, project = project).build()
              }

            Executor
              .execute(
                schema = schema,
                queryAst = introspectionQuery,
                userContext = ActionUserContext(
                  requestId = "",
                  project = project,
                  nodeId = model.id,
                  mutation = MutationMetaData(id = "", _type = ""),
                  log = (x: String) => logger.info(x)
                )
              )
              .map { response =>
                val JsObject(fields) = response
                fields("data").compactPrint
              }
        }
    }
  }
} 
Example 135
Source File: SearchProviderAlgolia.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system.schema.types

import com.typesafe.scalalogging.LazyLogging
import cool.graph.client.schema.simple.SimpleSchemaModelObjectTypeBuilder
import cool.graph.shared.algolia.schemas.AlgoliaSchema
import cool.graph.shared.algolia.AlgoliaContext
import cool.graph.shared.models
import cool.graph.system.SystemUserContext
import cool.graph.system.schema.types.AlgoliaSyncQuery.AlgoliaSyncQueryContext
import sangria.execution.Executor
import sangria.introspection.introspectionQuery
import sangria.marshalling.sprayJson._
import sangria.relay.{Connection, ConnectionArgs, Node}
import sangria.schema._
import scaldi.{Injectable, Injector}
import spray.json.JsObject

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

object SearchProviderAlgolia {
  case class SearchProviderAlgoliaContext(project: models.Project, algolia: models.SearchProviderAlgolia) extends Node with models.Integration {
    override val id              = algolia.id
    override val subTableId      = algolia.subTableId
    override val isEnabled       = algolia.isEnabled
    override val name            = algolia.name
    override val integrationType = algolia.integrationType
  }
  lazy val Type: ObjectType[SystemUserContext, SearchProviderAlgoliaContext] =
    ObjectType(
      "SearchProviderAlgolia",
      "This is a SearchProviderAlgolia",
      interfaces[SystemUserContext, SearchProviderAlgoliaContext](nodeInterface, Integration.Type),
      () =>
        idField[SystemUserContext, SearchProviderAlgoliaContext] ::
          fields[SystemUserContext, SearchProviderAlgoliaContext](
          Field("applicationId", StringType, resolve = _.value.algolia.applicationId),
          Field("apiKey", StringType, resolve = _.value.algolia.apiKey),
          Field(
            "algoliaSyncQueries",
            algoliaSyncQueryConnection,
            arguments = Connection.Args.All,
            resolve = ctx =>
              Connection.connectionFromSeq(ctx.value.algolia.algoliaSyncQueries
                                             .sortBy(_.id.toString)
                                             .map(s => AlgoliaSyncQueryContext(ctx.value.project, s)),
                                           ConnectionArgs(ctx))
          ),
          Field(
            "algoliaSchema",
            StringType,
            arguments = List(Argument("modelId", IDType)),
            resolve = ctx => {
              val modelId =
                ctx.args.raw.get("modelId").get.asInstanceOf[String]
              ctx.ctx.getSearchProviderAlgoliaSchema(ctx.value.project, modelId)
            }
          )
      )
    )
}

class SearchProviderAlgoliaSchemaResolver(implicit inj: Injector) extends Injectable with LazyLogging {
  def resolve(project: models.Project, modelId: String): Future[String] = {
    val model = project.getModelById_!(modelId)
    Executor
      .execute(
        schema = new AlgoliaSchema(
          project = project,
          model = model,
          modelObjectTypes = new SimpleSchemaModelObjectTypeBuilder(project)
        ).build(),
        queryAst = introspectionQuery,
        userContext = AlgoliaContext(
          project = project,
          requestId = "",
          nodeId = "",
          log = (x: String) => logger.info(x)
        )
      )
      .map { response =>
        val JsObject(fields) = response
        fields("data").compactPrint
      }
  }
} 
Example 136
Source File: RemoraApp.scala    From remora   with MIT License 5 votes vote down vote up
import java.io.IOException
import java.net.ConnectException
import java.util.concurrent.{TimeUnit, TimeoutException}

import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
import com.amazonaws.services.cloudwatch.{AmazonCloudWatchAsync, AmazonCloudWatchAsyncClientBuilder}
import com.blacklocus.metrics.CloudWatchReporterBuilder
import com.codahale.metrics.jvm.{GarbageCollectorMetricSet, MemoryUsageGaugeSet, ThreadStatesGaugeSet}
import com.typesafe.scalalogging.LazyLogging
import config.{KafkaSettings, MetricsSettings}
import kafka.admin.RemoraKafkaConsumerGroupService
import reporter.RemoraDatadogReporter

import scala.concurrent.duration._
import scala.util.control.NonFatal

object RemoraApp extends App with nl.grons.metrics.scala.DefaultInstrumented with LazyLogging {

  private val actorSystemName: String = "remora"
  implicit val actorSystem = ActorSystem(actorSystemName)

  metricRegistry.registerAll(new GarbageCollectorMetricSet)
  metricRegistry.registerAll(new MemoryUsageGaugeSet)
  metricRegistry.registerAll(new ThreadStatesGaugeSet)

  lazy val decider: Supervision.Decider = {
    case _: IOException | _: ConnectException | _: TimeoutException => Supervision.Restart
    case NonFatal(err: Throwable) =>
      actorSystem.log.error(err, "Unhandled Exception in Stream: {}", err.getMessage)
      Supervision.Stop
  }

  implicit val materializer = ActorMaterializer(
    ActorMaterializerSettings(actorSystem).withSupervisionStrategy(decider))(actorSystem)

  implicit val executionContext = actorSystem.dispatchers.lookup("kafka-consumer-dispatcher")
  val kafkaSettings = KafkaSettings(actorSystem.settings.config)
  val consumer = new RemoraKafkaConsumerGroupService(kafkaSettings)
  val kafkaClientActor = actorSystem.actorOf(KafkaClientActor.props(consumer), name = "kafka-client-actor")

  Api(kafkaClientActor).start()

  val metricsSettings = MetricsSettings(actorSystem.settings.config)

  if (metricsSettings.registryOptions.enabled) {
    val exportConsumerMetricsToRegistryActor =
      actorSystem.actorOf(ExportConsumerMetricsToRegistryActor.props(kafkaClientActor),
        name = "export-consumer-metrics-actor")
    actorSystem.scheduler.schedule(0 second, metricsSettings.registryOptions.intervalSeconds second, exportConsumerMetricsToRegistryActor, "export")
  }

  if (metricsSettings.cloudWatch.enabled) {
    logger.info("Reporting metricsRegistry to Cloudwatch")
    val amazonCloudWatchAsync: AmazonCloudWatchAsync = AmazonCloudWatchAsyncClientBuilder.defaultClient

    new CloudWatchReporterBuilder()
      .withNamespace(metricsSettings.cloudWatch.name)
      .withRegistry(metricRegistry)
      .withClient(amazonCloudWatchAsync)
      .build()
      .start(metricsSettings.cloudWatch.intervalMinutes, TimeUnit.MINUTES)
  }

  if (metricsSettings.dataDog.enabled) {
    logger.info(s"Reporting metricsRegistry to Datadog at ${metricsSettings.dataDog.agentHost}:${metricsSettings.dataDog.agentPort}")
    val datadogReporter = new RemoraDatadogReporter(metricRegistry, metricsSettings.dataDog)
    datadogReporter.startReporter()
  }

} 
Example 137
Source File: Runner.scala    From sansible   with MIT License 5 votes vote down vote up
package ansible

import scala.io.Source
import scala.sys.process.{Process, ProcessIO}

import ansible.IniEncode._
import ansible.IniEncoders._
import better.files.File
import com.typesafe.scalalogging.LazyLogging

object Runner extends LazyLogging {
  def runPlaybook(inv: Inventory)(pb: Playbook, opts: Option[String] = None): Unit = {
    val invFile = File.newTemporaryFile("ansible-inventory")
    val pbFile  = File.newTemporaryFile("ansible-playbook", ".yml")

    val pio = new ProcessIO(
      _ => (),
      out  => Source.fromInputStream(out).getLines.foreach(println),
      err => Source.fromInputStream(err).getLines.foreach(System.err.println)
    )

    val cmd = s"ansible-playbook ${opts.getOrElse("")} -i ${invFile.path} ${pbFile.path}"
    val env = Seq("ANSIBLE_FORCE_COLOR" -> "true")
    val process = Process(cmd, cwd = None, env: _*).run(pio)

    invFile.write(inv.iniEncode)
    pbFile.write(YAML.fromPlaybook(pb))
    logger.info(cmd)

    val exitCode = process.exitValue()
    logger.info(s"run completed with exit code: $exitCode")
    process.destroy()
  }
} 
Example 138
Source File: Logging.scala    From akka-dddd-template   with Apache License 2.0 5 votes vote down vote up
package com.boldradius.util

import akka.actor.{Actor, ActorLogging}
import com.typesafe.scalalogging.LazyLogging
import scala.language.implicitConversions

trait ALogging extends ActorLogging{  this: Actor =>

  implicit def toLogging[V](v: V) : FLog[V] = FLog(v)

  case class FLog[V](v : V)  {
    def logInfo(f: V => String): V = {log.info(f(v)); v}
    def logDebug(f: V => String): V = {log.debug(f(v)); v}
    def logError(f: V => String): V = {log.error(f(v)); v}
    def logWarn(f: V => String): V = {log.warning(f(v)); v}
    def logTest(f: V => String): V = {println(f(v)); v}
  }
}
trait LLogging extends LazyLogging{

  implicit def toLogging[V](v: V) : FLog[V] = FLog(v)

  case class FLog[V](v : V)  {
    def logInfo(f: V => String): V = {logger.info(f(v)); v}
    def logDebug(f: V => String): V = {logger.debug(f(v)); v}
    def logError(f: V => String): V = {logger.error(f(v)); v}
    def logWarn(f: V => String): V = {logger.warn(f(v)); v}
    def logTest(f: V => String): V = {println(f(v)); v}
  }
} 
Example 139
Source File: package.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util

import com.typesafe.scalalogging.LazyLogging

import scala.xml._
import scala.xml.transform._


package object xml extends LazyLogging {
  private def addChild(n: Node, newChild: Node) = n match {
    case Elem(prefix, label, attribs, scope, child @ _*) =>
      Elem(prefix, label, attribs, scope, true, child ++ newChild: _*)
    case _ => {
      logger.error("Can only add children to elements!")
      n
    }
  }

  private class AddChildrenTo(label: String, newChild: Node) extends RewriteRule {
    override def transform(n: Node) = n match {
      case n @ Elem(_, `label`, _, _, _*) => addChild(n, newChild)
      case other                          => other
    }
  }

  def addChildInXml(xmlDoc: Node, parentNameInXmlDoc: String, childToAddUnderParent: Node): Node =
    new RuleTransformer(new AddChildrenTo(parentNameInXmlDoc, childToAddUnderParent)).transform(xmlDoc).head
} 
Example 140
Source File: SimpleScheduler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.util.concurrent

import java.util.concurrent.{ScheduledExecutorService, ScheduledFuture, ScheduledThreadPoolExecutor}

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.util.Try


object SimpleScheduler extends LazyLogging {
  private[this] lazy val timer = {
    val executor = new ScheduledThreadPoolExecutor(1)
    executor.setRemoveOnCancelPolicy(true)
    executor.asInstanceOf[ScheduledExecutorService]
  }

  //method is private, since we must keep execution on the expense of out timer thread to be as limited as possible.
  //this method can be used if and only if we know `body` is a safe and small job.
  private[util] def scheduleInstant[T](duration: FiniteDuration)(body: => T) = {
    val p = Promise[T]()
    val cancellable = timer.schedule(
      new Runnable {
        override def run(): Unit = {
          // body must not be expensive to compute since it will be run in our only timer thread expense.
          p.complete(Try(body))
        }
      },
      duration.toMillis,
      java.util.concurrent.TimeUnit.MILLISECONDS
    )
    p.future -> Cancellable(cancellable)
  }

  def scheduleAtFixedRate(initialDelay: FiniteDuration, period: FiniteDuration, mayInterruptIfRunning: Boolean = false)(
    task: => Any
  )(implicit executionContext: ExecutionContext): Cancellable = {
    // memoize runnable task
    val runnable: Runnable = new Runnable {
      override def run(): Unit = Try(task).failed.foreach { err =>
        logger.error("schedueled task failed", err)
      }
    }

    val cancellable = timer.scheduleAtFixedRate(new Runnable {
      override def run(): Unit = executionContext.execute(runnable)
    }, initialDelay.toMillis, period.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS)

    Cancellable(cancellable, mayInterruptIfRunning)
  }

  def schedule[T](duration: FiniteDuration)(body: => T)(implicit executionContext: ExecutionContext): Future[T] = {
    val p = Promise[T]()
    timer.schedule(
      new Runnable {
        override def run(): Unit = {
          // body may be expensive to compute, and must not be run in our only timer thread expense,
          // so we compute the task inside a `Future` and make it run on the expense of the given executionContext.
          p.completeWith(Future(body)(executionContext))
        }
      },
      duration.toMillis,
      java.util.concurrent.TimeUnit.MILLISECONDS
    )
    p.future
  }

  def scheduleFuture[T](duration: Duration)(body: => Future[T]): Future[T] = {
    val p = Promise[T]()
    timer.schedule(new Runnable {
      override def run(): Unit = p.completeWith(body)
    }, duration.toMillis, java.util.concurrent.TimeUnit.MILLISECONDS)
    p.future
  }
}

object Cancellable {
  def apply(scheduledFuture: ScheduledFuture[_], mayInterruptIfRunning: Boolean = false)=
    new Cancellable { override def cancel(): Boolean = scheduledFuture.cancel(mayInterruptIfRunning) }
}

trait Cancellable {
  def cancel(): Boolean
} 
Example 141
Source File: NoncesManager.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package security

import akka.actor.Actor
import com.typesafe.scalalogging.LazyLogging


class NoncesManager extends Actor with LazyLogging {
  private var nonces = Set.empty[String]

  override def receive: Receive = {
    case AddNonce(nonce)                      => nonces += nonce
    case ConsumeNonce(nonce) if nonces(nonce) => nonces -= nonce; sender ! NonceConsumed
    case ConsumeNonce(nonce)                  => sender ! NonceNotExist
  }
}

case class AddNonce(nonce: String)
case class ConsumeNonce(nonce: String)

trait NonceStatus
case object NonceNotExist extends NonceStatus
case object NonceConsumed extends NonceStatus 
Example 142
Source File: SparqlTriggeredProcessorMonitor.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package actions

import com.typesafe.scalalogging.LazyLogging
import k.grid.{Grid, WhoAreYou, WhoIAm}
import akka.pattern._
import akka.util.Timeout
import cmwell.ctrl.checkers.StpChecker.{RequestStats, ResponseStats, Row, Table}
import cmwell.domain.{FileContent, FileInfoton, SystemFields, VirtualInfoton}
import cmwell.ws.Settings
import org.joda.time.{DateTime, DateTimeZone}

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global


object SparqlTriggeredProcessorMonitor extends LazyLogging {
  implicit val timeout = Timeout(30.seconds)

  def getAddress =
    (stpManager ? WhoAreYou)
      .mapTo[WhoIAm]
      .map(_.address)
      .recover { case err: Throwable => "NA" }

  def stpManager = Grid.serviceRef("sparql-triggered-processor-manager")

  def jobsDataToTuple(lines: Iterable[Row]) = for (line <- lines) yield MarkdownTuple(line.toSeq: _*)

  def generateTables(path: String, dc: String, isAdmin: Boolean): Future[Option[VirtualInfoton]] = {
    val jobsDataFuture = (stpManager ? RequestStats(isAdmin))
      .mapTo[ResponseStats]
      .map { case ResponseStats(tables) => tables }

    val future = for {
      address <- getAddress
      tables <- jobsDataFuture

    } yield {
      val title =
        s"""
          |# Sparql Triggered Processor Monitor<br>
          |## Current host: $address  <br>
        """.stripMargin

      val tablesFormattedData = tables.map { table =>

        val mdTable = MarkdownTable(
          header = MarkdownTuple(table.header.toSeq: _*),
          body = jobsDataToTuple(table.body).toSeq
        )

        s"""
         |${table.title.mkString("### ", "<br>\n### ", "<br>")}
         |${mdTable.get} <hr>""".stripMargin
      }

      title + "\n\n" + tablesFormattedData.mkString("\n\n")
    }
    future.map { content =>
      Some(
        VirtualInfoton(
          FileInfoton(SystemFields(path, new DateTime(DateTimeZone.UTC), "VirtualInfoton", dc, None, "", "http"),
            None, content = Some(FileContent(content.getBytes("utf-8"), "text/x-markdown")))
        )
      )
    }
  }
} 
Example 143
Source File: InfotonValidator.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package logic
import cmwell.domain._
import com.typesafe.scalalogging.LazyLogging


object InfotonValidator extends LazyLogging {
  def isInfotonNameValid(path: String): Boolean = {
    val noSlash = if (path.startsWith("/")) path.dropWhile(_ == '/') else path
    (!(noSlash.matches("_(.)*|(ii|zz|proc)(/(.)*)?"))) match {
      case true  => true
      case false => logger.warn(s"validation failed for infoton path: $path"); false
    }
  }

  type Fields[K] = Map[K, Set[FieldValue]]
  def validateValueSize[K](fields: Fields[K]): Unit =
    if (fields.exists { case (_, s) => s.exists(_.size > cmwell.ws.Settings.maxValueWeight) })
      throw new IllegalArgumentException("uploaded infoton, contains a value heavier than 16K.")
} 
Example 144
Source File: ServicesRoutesCache.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package logic.services

import akka.actor.ActorSystem
import cmwell.domain.{DeletedInfoton, Infoton, ObjectInfoton}
import cmwell.fts.PathFilter
import cmwell.ws.Settings
import com.typesafe.scalalogging.LazyLogging
import javax.inject.{Inject, Singleton}
import logic.CRUDServiceFS

import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}

@Singleton
class ServicesRoutesCache @Inject()(crudService: CRUDServiceFS)(implicit ec: ExecutionContext, sys: ActorSystem) extends LazyLogging {
  private var services: Map[String, ServiceDefinition] = Map.empty
  private val (initialDelay, interval) = Settings.servicesRoutesCacheInitialDelay -> Settings.servicesRoutesCacheRefreshInterval

  sys.scheduler.schedule(initialDelay, interval)(populate())

  def find(path: String): Option[ServiceDefinition] =
    services.values.find(sd => path.startsWith(sd.route))

  def list: Set[String] = services.values.map(_.route).toSet

  def populate(): Future[Unit] = {
    //TODO use consume API, don't get everything each time
    crudService.search(Some(PathFilter("/meta/services", descendants = false)), withData = true, withDeleted = true).andThen {
      case Success(sr) =>
        val toAddOrUpdate = sr.infotons.collect { case oi: ObjectInfoton => oi }
        val toRemove = sr.infotons.collect { case di: DeletedInfoton => di }

        toAddOrUpdate.map(desrialize).
          collect { case Success(se) => se }.
          foreach { se => services += se.infotonPath -> se.serviceDefinition }

        toRemove.foreach(services -= _.systemFields.path)

      case Failure(t) => logger.error("Could not load Services from /meta/services", t)
    }.map(_ => ())
  }

  private def desrialize(infoton: Infoton): Try[ServiceEntry] = Try {
    val fields = infoton.fields.getOrElse(throw new RuntimeException(s"Infoton with no fields was not expected (path=${infoton.systemFields.path})"))

    def field(name: String): String = fields(name).head.value.toString

    val route = field("route")
    field("type.lzN1FA") match {
      case "cmwell://meta/sys#Redirection" =>
        val sourcePattern = field("sourcePattern")
        val replacement = field("replacement")
        val replceFunc = (input: String) => sourcePattern.r.replaceAllIn(input, replacement)
        ServiceEntry(infoton.systemFields.path, RedirectionService(route, sourcePattern, replceFunc))
      case "cmwell://meta/sys#Source" => ??? //TODO implement the unimplemented
      case "cmwell://meta/sys#Binary" => ??? //TODO implement the unimplemented
      case other => throw new RuntimeException(s"Infoton with type $other was not expected (path=${infoton.systemFields.path})")
    }
  }

  case class ServiceEntry(infotonPath: String, serviceDefinition: ServiceDefinition)
} 
Example 145
Source File: LogLevelHandler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package controllers

import cmwell.ctrl.config.Jvms
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid
import k.grid.monitoring.{MonitorActor, SetNodeLogLevel}
import play.api.mvc._
import security.AuthUtils
import javax.inject._

import filters.Attrs

import scala.util.Try

@Singleton
class LogLevelHandler @Inject()(authUtils: AuthUtils) extends InjectedController with LazyLogging {
  def handleSetLogLevel = Action { implicit req =>
    val tokenOpt = authUtils.extractTokenFrom(req)
    if (authUtils.isOperationAllowedForUser(security.Admin, tokenOpt))
      setLogLevel(req)
    else
      Forbidden("not authorized")
  }

  private def setLogLevel(req: Request[AnyContent]) = {
    val validLogLevels = SetNodeLogLevel.lvlMappings.keySet
    val roleMapping = Map("WEB" -> Jvms.WS, "BG" -> Jvms.BG, "CTRL" -> Jvms.CTRL, "CW" -> Jvms.CW, "DC" -> Jvms.DC)
    val validComponents = roleMapping.keySet
    val validHosts = Grid.availableMachines

    val lvlStr = req.getQueryString("lvl")
    val component = req.getQueryString("comp").map(_.toUpperCase())
    val host = req.getQueryString("host")
    val duration = if (req.getQueryString("duration").isEmpty) Some("10") else req.getQueryString("duration")

    (lvlStr, component, host, duration) match {
      case (Some(l), _, _, _) if !validLogLevels.contains(l.toUpperCase) =>
        BadRequest(s"Bad log level provided, the valid log levels are ${validLogLevels.mkString(", ")}.")
      case (_, Some(c), _, _) if !validComponents.contains(c.toUpperCase) =>
        BadRequest(s"Bad component provided, the valid components are ${validComponents.mkString(", ")}.")
      case (_, _, Some(h), _) if !validHosts.contains(h.toUpperCase) =>
        BadRequest(s"Bad host provided, the valid hosts are ${validHosts.mkString(", ")}.")
      case (_, _, _, Some(d)) if Try(d.toInt).isFailure =>
        BadRequest(
          s"Bad duration provided, please provide a positive int, or 0 if you wish to keep this log level indefinitely."
        )
      case (None, _, _, _) =>
        BadRequest(s"No log level provided, the valid log levels are ${validLogLevels.mkString(", ")}")
      case _ =>
        val lvl = lvlStr.flatMap(SetNodeLogLevel.levelTranslator)

        lvl.foreach { level =>
          val members = {
            val f1 = host
              .map { h =>
                Grid.jvmsAll.filter(_.host == h)
              }
              .getOrElse(Grid.jvmsAll)

            val f2 = component
              .map(c => roleMapping(c))
              .map { c =>
                f1.filter(h => h.identity.isDefined && h.identity.get == c)
              }
              .getOrElse(f1)
            f2
          }

          logger.info(s"Changing the log level of [${members.mkString(", ")}] to $level")

          members.foreach { member =>
            Grid.selectActor(MonitorActor.name, member) ! SetNodeLogLevel(level, duration.map(_.toInt))
          }
        }
        Ok("Done!")
    }
  }
} 
Example 146
Source File: BackPressureToggler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package controllers

import cmwell.ws.Settings
import com.typesafe.scalalogging.LazyLogging
import javax.inject._
import k.grid.dmap.api.SettingsString
import k.grid.dmap.impl.persistent.PersistentDMap
import play.api.mvc.InjectedController
import security.AuthUtils

@Singleton
class BackPressureToggler @Inject()(authUtils: AuthUtils) extends InjectedController with LazyLogging {

  val BACKPRESSURE_TRIGGER = "cmwell.ws.pushbackpressure.trigger"

  def handleBackpressure = Action { implicit req =>
    val tokenOpt = authUtils.extractTokenFrom(req)
    if (authUtils.isOperationAllowedForUser(security.Admin, tokenOpt)) {
      val thresholdFactor = req.getQueryString("pbp")
      thresholdFactor.map(_.toLowerCase) match {
        case Some("enable") =>
          PersistentDMap.set(BACKPRESSURE_TRIGGER, SettingsString("enable"))
          Ok(s"Changed backpressure trigger to enable")
        case Some("disable") =>
          PersistentDMap.set(BACKPRESSURE_TRIGGER, SettingsString("disable"))
          Ok(s"Changed backpressure trigger to disable")
        case Some("block") =>
          PersistentDMap.set(BACKPRESSURE_TRIGGER, SettingsString("block"))
          Ok(s"Changed backpressure trigger to block")
        case None =>
          val curValOpt = PersistentDMap.get(BACKPRESSURE_TRIGGER).flatMap(_.as[String])
          curValOpt match {
            case Some(v) => Ok(s"Please provide the parameter 'pbp'. The current value is: '$v'")
            case None =>
              Ok(s"Please provide the parameter 'pbp'. No value is set; defaulting to ${Settings.pushbackpressure}")
          }
        case Some(unknown) => BadRequest(s"value [$unknown] is invalid. valid values are: [enable,disable,block]")
      }
    } else Forbidden("not authorized")
  }

  def get: String = PersistentDMap.get(BACKPRESSURE_TRIGGER).fold[String](Settings.pushbackpressure) {
    case SettingsString(v) => v
    case unknown           => s"invalid unknown state: $unknown"
  }
} 
Example 147
Source File: TrafficHandler.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package trafficshaping

import com.typesafe.scalalogging.LazyLogging
import k.grid.dmap.impl.persistent.PersistentDMap
import play.api.mvc.{Action, Controller, InjectedController}
import DMapKeys._
import k.grid.dmap.api.SettingsLong
import security.AuthUtils
import javax.inject._

import filters.Attrs

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}


@Singleton
class TrafficHandler @Inject()(authUtils: AuthUtils)(implicit ec: ExecutionContext)
    extends InjectedController
    with LazyLogging {
  // todo: remove this.
  def handleTimeout = Action.async { implicit originalRequest =>
    cmwell.util.concurrent.delayedTask(5.seconds)(Future.successful(Ok)).flatMap(identity)
  }

  def handleThresholdFactor = Action { implicit req =>
    val tokenOpt = authUtils.extractTokenFrom(req)
    if (authUtils.isOperationAllowedForUser(security.Admin, tokenOpt)) {
      val thresholdFactor = req.getQueryString("tf").map(_.toLong)
      thresholdFactor match {
        case Some(l) =>
          PersistentDMap.set(THRESHOLD_FACTOR, SettingsLong(l))
          Ok(s"Changed Threshold factor to $l")
        case None =>
          val curValOpt = PersistentDMap.get(THRESHOLD_FACTOR).flatMap(_.as[Long])
          curValOpt match {
            case Some(curVal) if curVal > 0L =>
              Ok(s"""Please provide the parameter "tf". The current value is: $curVal""")
            case _ => Ok(s"""Traffic shaping is disabled. Please provide the parameter "tf" in order to activate it.""")
          }
      }
    } else Forbidden("not authorized")
  }
} 
Example 148
Source File: CongestionAnalyzer.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package trafficshaping

import akka.actor.Actor
import akka.actor.Actor.Receive
import cmwell.ws.Settings
import cmwell.ws.Settings._
import com.typesafe.scalalogging.LazyLogging
import k.grid.dmap.api.SettingsLong
import k.grid.dmap.impl.persistent.PersistentDMap

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._


case object GetTrafficData
case class TrafficData(requestors: Map[String, RequestorCounter])

object CongestionAnalyzer {
  val name = "CongestionAnalyzer"
  var penalizeTopUsers = 3
}

class CongestionAnalyzer extends Actor with LazyLogging {
  import Settings._
  import DMapKeys._

  val numOfCpus = Runtime.getRuntime.availableProcessors()
  def getThresholdFactor: Long = {
    PersistentDMap
      .get(THRESHOLD_FACTOR)
      .map {
        case SettingsLong(l) => l
        case _               => 0L
      }
      .getOrElse(0L)
  }

  case object AnalyzeCongestion

  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    context.system.scheduler.schedule(0.seconds, checkFrequency.seconds, self, AnalyzeCongestion)
  }

  override def receive: Receive = {
    case AnalyzeCongestion =>
      val thresholdFactor = getThresholdFactor
      val threshold = checkFrequency.seconds.toMillis * thresholdFactor

      TrafficShaper.lastRequests.toVector
        .sortBy(_._2.requestsTime)
        .takeRight(CongestionAnalyzer.penalizeTopUsers)
        .foreach {
          case (k, v) =>
            if (v.requestsTime > threshold) {
              v.penalty = v.penalty.next
              logger.info(s"The user $k is getting ${v.penalty}.")
            } else v.penalty = v.penalty.prev
            v.reset
        }

      TrafficShaper.lastRequests = TrafficShaper.lastRequests.filter {
        case (k, v) => v.penalty != NoPenalty || v.requestsTime > 0L
      }

    case GetTrafficData =>
      sender ! TrafficData(TrafficShaper.getRequestors)
  }
} 
Example 149
Source File: AddFormatParameterIfOnlyAcceptHeaderProvidedFilter.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package filters

import akka.stream.Materializer
import com.typesafe.scalalogging.LazyLogging
import javax.inject._
import play.api.http.MediaType
import play.api.mvc.{Filter, RequestHeader, Result}

import scala.concurrent.Future

class AddFormatParameterIfOnlyAcceptHeaderProvidedFilter @Inject()(implicit val mat: Materializer) extends Filter {
  //MediaType2Format
  private[this] val mt2f: PartialFunction[String, PartialFunction[String, String]] = (mt: String) =>
    mt match {
      case "text" => {
        case "yaml"       => "yaml"
        case "json"       => "json"
        case "rdf+n3"     => "n3"
        case "n3"         => "n3"
        case "plain"      => "ntriples"
        case "ntriples"   => "ntriples"
        case "turtle"     => "ttl"
        case "ttl"        => "ttl"
        case "rdf+turtle" => "ttl"
        case "rdf+ttl"    => "ttl"
        case "n-quads"    => "nquads"
      }
      case "application" => {
        case "json"     => "jsonl"
        case "ld+json"  => "jsonld"
        case "rdf+xml"  => "rdfxml"
        case "x-nquads" => "nquads"
      }
      //    case "xml" => {
      //      case "rdf" => "rdfxml"
      //    }
  }

  private def formatToValidType(mt: MediaType): String = mt2f(mt.mediaType)(mt.mediaSubType)

  private def isCMWellAccepted(mt: MediaType): Boolean =
    mt2f.isDefinedAt(mt.mediaType) && mt2f(mt.mediaType).isDefinedAt(mt.mediaSubType)

  override def apply(next: (RequestHeader) => Future[Result])(request: RequestHeader): Future[Result] = {
    val withFormat =
      if ((request.getQueryString("format").isDefined || request.acceptedTypes.isEmpty) && Set("post", "get")(
            request.method.toLowerCase
          )) request
      else
        request.acceptedTypes.find(isCMWellAccepted(_)) match {
          case Some(mt) =>
            val newTarget = request.target.withQueryString(request.target.queryMap + ("format" -> Seq(formatToValidType(mt))))
            request.withTarget(newTarget)
          case None     => request
        }
    next(withFormat)
  }
} 
Example 150
Source File: IngesterMain.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.tools.data.ingester

import java.io.FileInputStream
import java.util.zip.GZIPInputStream

import akka.stream.scaladsl.Sink
import cmwell.tools.data.utils.akka.stats.IngesterStats
//import cmwell.tools.data.sparql.SparqlProcessorMain.Opts.opt
import cmwell.tools.data.utils.ArgsManipulations._
import cmwell.tools.data.utils.akka.Implicits._
import cmwell.tools.data.utils.akka._
import cmwell.tools.data.utils.ops._
import com.typesafe.scalalogging.LazyLogging
import org.rogach.scallop.ScallopConf

import scala.concurrent.ExecutionContext.Implicits.global

object IngesterMain extends App with LazyLogging {
  object Opts extends ScallopConf(args) {
    version(s"cm-well ingester ${getVersionFromManifest()} (c) 2015")

    val host = opt[String]("host", descr = "cm-well host name", required = true)
    val format = opt[String]("format", descr = "input format (e.g. ntriples, nquads, jsonld)", required = true)
    val file = opt[String]("file", descr = "input file path", default = None)
    val gzip = opt[Boolean]("gzip", descr = "is input file gzipped", default = Some(false))
    val token = opt[String]("token", descr = "cm-well write permission token", default = None)
    val replaceMode =
      opt[Boolean]("with-replace-mode", descr = "replace-mode parameter in cm-well", default = Some(false))
    val force = opt[Boolean]("force", descr = "force parameter in cm-well", default = Some(false))
    val priority = opt[Boolean]("priority", default = Some(false), descr = "ingest data in priority mode")
    val numConnections = opt[Int]("num-connections", descr = "number of http connections to open")

    dependsOnAll(gzip, List(file))
    verify()
  }

  val start = System.currentTimeMillis()

  var totalIngestedBytes = 0L
  var ingestedBytesInWindow = 0
  var ingestedInfotonsInWindow = 0
  var totalIngestedInfotons = 0L
  var totalFailedInfotons = 0L
  var lastTime = start
  var nextPrint = 0L
  var lastMessageSize = 0
  val windowSizeMillis = 1000

  val formatter = java.text.NumberFormat.getNumberInstance

  // resize akka http connection pool
  Opts.numConnections.toOption.map { numConnections =>
    System.setProperty("akka.http.host-connection-pool.max-connections", numConnections.toString)
  }

  val inputStream = if (Opts.file.isSupplied) {
    val inputFile = new FileInputStream(Opts.file())
    if (Opts.gzip()) {
      new GZIPInputStream(inputFile)
    } else {
      inputFile
    }
  } else {
    System.in
  }

  val result = Ingester
    .fromInputStream(
      baseUrl = formatHost(Opts.host()),
      format = Opts.format(),
      writeToken = Opts.token.toOption,
      replaceMode = Opts.replaceMode(),
      force = Opts.force(),
      isPriority = Opts.priority(),
      in = inputStream
    )
    .via(IngesterStats(isStderr = true))
    .runWith(Sink.ignore)

  // actor system is still alive, will be destroyed when finished
  result.onComplete { x =>
    System.err.println("\n")
    System.err.println(s"finished: $x")
    cleanup()
  }
} 
Example 151
Source File: Publisher.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.rts

import akka.actor.{Actor, ActorRef, ActorSelection}
import cmwell.domain.Infoton
import k.grid.Grid
import com.typesafe.scalalogging.LazyLogging


case class AddSubscriber(subscriber: String, rule: Rule)
case class RemoveSubscriber(subscriber: String)

case class Publish(i: Vector[Infoton])
case class PublishOne(uuid: String)
//case class Publish(s : String)

class PublishAgent extends Actor with LazyLogging {
  // need a mapping rule (q) -> actor (label)
  var rules: Map[String, Rule] = Map.empty[String, Rule]
  var subMap: Map[String, ActorSelection] = Map.empty[String, ActorSelection]

  def publish_data(subscriber: String, i: Infoton) {
    // send
    val a = subMap(subscriber)
    logger.debug(s"Send data to $subscriber [$a]")
    a ! PublishOne(i.uuid)
  }

  def receive = {
    // add a rule to the internal map
    case AddSubscriber(subscriber: String, rule: Rule) =>
      val addr = sender().path.address
      val path = s"akka.tcp://${addr.system}@${addr.host.getOrElse("")}:${addr.port.getOrElse(0)}/user/$subscriber"
      rules += (subscriber -> rule)
      subMap += (subscriber -> context.actorSelection(path))
      logger.debug(s"AddRule rules [${rules}] sub map [${subMap}]")
    // remove the rule from the internal map
    case RemoveSubscriber(subscriber: String) =>
      rules -= (subscriber)
      subMap -= (subscriber)
      logger.debug(s"RemoveRule ${subscriber} rules [${rules}] sub map [${subMap}]")

    // this publish the infoton according the rule
    case Publish(infotonVec: Vector[Infoton]) => {
      logger.debug(s"in actor $infotonVec")
      // first lets calc
      infotonVec.foreach { i =>
        rules.foreach {
          case (subscriber, rule) =>
            rule match {
              case NoFilter =>
                publish_data(subscriber, i)
              case PathFilter(path) =>
                if (path.check(i.systemFields.path))
                  publish_data(subscriber, i)
              case MatchFilter(f) =>
                if (i.fields.isDefined && f.check(i.fields.get))
                  publish_data(subscriber, i)
              case PMFilter(p, m) =>
                if (p.check(i.systemFields.path) && i.fields.isDefined && m.check(i.fields.get))
                  publish_data(subscriber, i)
            }
        }
      }
    }
    case _ =>
      logger.debug("Error")
  }

}

object Publisher {
  val publishAgentActor: ActorRef = Grid.create(classOf[PublishAgent], "publisher")
  val p: Publisher = new Publisher(publishAgentActor)
  def init: Unit = {}
  def publish(i: Vector[Infoton]): Unit = p.publish(i)
}

class Publisher(val publishAgentActor: ActorRef) {

  def publish(i: Vector[Infoton]): Unit = {
    // no block call here
    publishAgentActor ! Publish(i)
  }

} 
Example 152
Source File: GridMonitor.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.monitoring

import akka.util.Timeout
import cmwell.util.concurrent._
import akka.actor.Actor
import akka.actor.Actor.Receive
import akka.pattern.ask
import k.grid._
import k.grid.service.LocalServiceManager
import k.grid.service.messages.{RegisterServices, ServiceInstantiationRequest}
import scala.concurrent.Future
import scala.concurrent.duration._
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.ExecutionContext.Implicits.global


case object GetMembersInfo
case class MembersInfo(m: Map[GridJvm, JvmInfo])

object GridMonitor extends LazyLogging {
  implicit val timeout = Timeout(15.seconds)
  lazy private val sel = Grid.selectSingleton(GridMonitor.name, None, Grid.seedMembers.head)
  def name = "GridMonitor"
  def init = Grid.createSingleton(classOf[GridMonitor], name, None)
  def getMembersInfo: Future[MembersInfo] = {
    logger.info("[GridMonitor] getMembersInfo")
    (sel ? GetMembersInfo).mapTo[MembersInfo]
  }
}

case class MemInfoKey(host: String, name: String)
class GridMonitor extends Actor with LazyLogging {
  private implicit val timeout = Timeout(15.seconds)

  private[this] var membersInfo = Map.empty[MemInfoKey, (GridJvm, JvmInfo)]
  private case object SendInfoRequests
  private case class UpdateMembersInfoMap(m: Map[GridJvm, JvmInfo])

  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    context.system.scheduler.schedule(0.seconds, 10.seconds, self, SendInfoRequests)

  }

  override def receive: Receive = {
    case UpdateMembersInfoMap(m) =>
      membersInfo = membersInfo ++ m.map {
        case (k, v) => MemInfoKey(k.host, k.identity.map(_.name).getOrElse("")) -> (k, v)
      }
    case SendInfoRequests => {
      logger.info("SendInfoRequests")
      val jvms = Grid.jvmsAll
      val futures = jvms.map { jvm =>
        ((Grid.selectActor(ClientActor.name, jvm) ? GetClientInfo).mapTo[JvmInfo].map(jvm -> _)).recover {
          case _ => {
            val inf = membersInfo.get(MemInfoKey(jvm.host, jvm.identity.map(_.name).getOrElse(""))) match {
              case Some((gridJvm, jvmInfo)) => jvmInfo.copy(status = Stopped)
              case None                     => JvmInfo(ClientMember, Stopped, -1, 0L, Set.empty[MemoryInfo], Set.empty[GcInfo], "NA", "")
            }
            jvm -> inf
          }
        }
      }

      val future = successes(futures).map(_.toMap)
      future.foreach(m => self ! UpdateMembersInfoMap(m))
    }
    case GetMembersInfo =>
      logger.info("Got GetMembersInfo")
      sender ! MembersInfo(membersInfo.map { case (k1, (k2, v)) => k2 -> v })
  }
} 
Example 153
Source File: MonitorActor.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.monitoring

import akka.actor.{Actor, Cancellable}
import ch.qos.logback.classic.{Level, Logger}
import com.typesafe.scalalogging.LazyLogging
import org.slf4j.LoggerFactory
import akka.pattern.pipe
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

object MonitorActor {
  val name = "MonitorActor"
}

object SetNodeLogLevel {
  val lvlMappings = Map(
    "OFF" -> Level.OFF,
    "ERROR" -> Level.ERROR,
    "WARN" -> Level.WARN,
    "INFO" -> Level.INFO,
    "DEBUG" -> Level.DEBUG,
    "TRACE" -> Level.TRACE,
    "ALL" -> Level.ALL
  )

  def levelTranslator(lvl: String): Option[Level] = {
    lvlMappings.get(lvl.toUpperCase)
  }
}

case object GetNodeLogLevel
case class NodeLogLevel(lvl: String)
case class SetNodeLogLevel(level: Level, levelDuration: Option[Int] = Some(10))

class MonitorActor extends Actor with LazyLogging {
  private[this] var originalLogLevel: Level = _
  private val editableLogger = "ROOT"
  private[this] var scheduledLogLevelReset: Cancellable = _

  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    //FIXME: what if logger is not logback? use jmx or write more defensive code
    originalLogLevel = {
      val l = LoggerFactory.getLogger(editableLogger)
      val f = l.getClass.getProtectionDomain.getCodeSource.getLocation.getFile
      l.info("logger is loaded from: " + f)
      l.asInstanceOf[ch.qos.logback.classic.Logger].getLevel
    }
  }

  override def receive: Receive = {
    case PingChildren =>
      MonitorUtil.pingChildren.pipeTo(sender)

    case SetNodeLogLevel(lvl, duration) =>
      if (scheduledLogLevelReset != null) {
        scheduledLogLevelReset.cancel()
        scheduledLogLevelReset = null
      }

      logger.info(s"Setting $editableLogger to log level $lvl")
      duration.foreach { d =>
        logger.info(s"Scheduling $editableLogger to be in level $originalLogLevel in $d minutes")
        scheduledLogLevelReset =
          context.system.scheduler.scheduleOnce(d.minutes, self, SetNodeLogLevel(originalLogLevel, None))
      }

      LoggerFactory.getLogger(editableLogger).asInstanceOf[ch.qos.logback.classic.Logger].setLevel(lvl)
      //change also the log level of the akka logger
      val akkaLoggerName = "akka"
      LoggerFactory.getLogger(akkaLoggerName) match {
        case akkaLogger: Logger =>
          if (akkaLogger != null)
            akkaLogger.setLevel(lvl)
        case _ =>
      }
    case GetNodeLogLevel =>
      val lvl = LoggerFactory.getLogger(editableLogger).asInstanceOf[ch.qos.logback.classic.Logger].getLevel
      sender ! NodeLogLevel(lvl.toString)
  }
} 
Example 154
Source File: PersistentDMap.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.dmap.impl.persistent

import java.io.{File, FileNotFoundException, PrintWriter}

import com.typesafe.scalalogging.LazyLogging
import k.grid.{Config, Grid}
import k.grid.dmap.api._
import play.api.libs.json.Json

import scala.util.{Failure, Success, Try}
import scala.concurrent.duration._
import json.MapDataJsonProtocol._
import scala.concurrent.ExecutionContext.Implicits.global

object PersistentDMap extends DMapFacade {
  override def masterType: DMapActorInit = DMapActorInit(classOf[PersistentMaster], "PersistentMaster")

  override def slaveType: DMapActorInit = DMapActorInit(classOf[PersistentSlave], "PersistentSlave")
}

class PersistentMaster extends DMapMaster {
  override val facade: DMapFacade = PersistentDMap
  override def onStart: Unit = {}
}

class PersistentSlave extends DMapSlave with LazyLogging {

  Grid.system.scheduler.schedule(5.seconds, 1.second, self, WriteData)

  case class MapHolder(m: Map[String, SettingsValue], timestamp: Long)

  case object NewData extends DMapMessage {
    override def act: Unit = {
      hasNewData = true
    }
  }

  case object WriteData extends DMapMessage {
    override def act: Unit = {
      val m = facade.sm
      if (hasNewData) {
        writeMap(MapData(m, lastTimestamp))
        hasNewData = false
      }

    }
  }

  var hasNewData: Boolean = false

  private val dataFile = new File(s"${Grid.persistentDmapDir}/${Config.clusterName}")

  def readMap: Option[MapData] = {
    val content = Try {
      val src = scala.io.Source.fromFile(dataFile)
      val mData = Json.parse(src.getLines().mkString("\n")).as[MapData]
      src.close()
      mData
    } match {
      case Success(c)                                          => Some(c)
      case Failure(e) if e.isInstanceOf[FileNotFoundException] => None
      case Failure(e) => {
        logger.error(e.getMessage, e)
        None
      }
    }
    content
  }

  def writeMap(md: MapData) = {
    val content = Json.stringify(Json.toJson(md))
    new PrintWriter(dataFile) { write(content); close }
  }

  override val facade: DMapFacade = PersistentDMap
  override def onStart: Unit = {

    if (Grid.isController) {
      import java.io.File
      logger.info(s" *** Will use data dir: ${Grid.persistentDmapDir}")
      Try(new File(Grid.persistentDmapDir).mkdir())

      val mdOpt = readMap

      mdOpt.foreach { md =>
        lastTimestamp = md.timestamp
        facade.sm = md.m
      }
    }
  }

  override protected def onUpdate(oldMap: Map[String, SettingsValue],
                                  newMap: Map[String, SettingsValue],
                                  timestamp: Long): Unit = {
    if (Grid.isController)
      self ! NewData
  }
} 
Example 155
Source File: RegistrationCoordinator.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.registration

import akka.actor.Actor
import akka.actor.Actor.Receive
import akka.pattern.ask
import scala.concurrent.duration._
import com.typesafe.scalalogging.LazyLogging
import k.grid.{Grid, GridJvm}
import k.grid.registration.messages.{GridTopology, RegistrationPing}
import scala.concurrent.ExecutionContext.Implicits.global


object RegistrationCoordinator {
  val name = "GossipCoordinator"

  def init = {
    Grid.createSingleton(classOf[RegistrationCoordinator], RegistrationCoordinator.name, None)
    Grid.selectSingleton(RegistrationCoordinator.name, None)
  }

}
class RegistrationCoordinator extends Actor with LazyLogging {
  private case class GridJvmContainer(gj: GridJvm, ts: Long)
  private case object ClearIdles

  private def currentSeconds = System.currentTimeMillis() / 1000
  private[this] var jvmSet = Set.empty[GridJvmContainer]
  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    context.system.scheduler.schedule(0.seconds, 5.seconds, self, ClearIdles)
  }

  override def receive: Receive = {
    case RegistrationPing(jvm) =>
      jvmSet = jvmSet + GridJvmContainer(jvm, currentSeconds)
      sender ! GridTopology(jvmSet.map(_.gj))
    case ClearIdles =>
      val currentTime = currentSeconds
      jvmSet = jvmSet.filter(jvm => currentTime - jvm.ts < 30)
  }
} 
Example 156
Source File: LocalRegistrationManager.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.registration

import scala.concurrent.duration._
import akka.actor.Actor
import akka.actor.Actor.Receive
import com.typesafe.scalalogging.LazyLogging
import k.grid._
import k.grid.registration.messages.{GridTopology, RegistrationPing}
import scala.concurrent.ExecutionContext.Implicits.global


object LocalRegistrationManager {
  val name = "LocalGossipManager"
  // todo: change to Map[(Host, IdentityName),GridJvm]
  private[LocalRegistrationManager] var _jvms = Set.empty[GridJvm]
  private[LocalRegistrationManager] var _regFails = 0

  def registrationFailure = _regFails == Config.possibleRegFails

  def jvms = _jvms
}
class LocalRegistrationManager extends Actor with LazyLogging {
  private case object SendGossipPing
  private case object IncreaseRegFails

  val isController = Grid.isController
  def registrationCoordinator = Grid.selectSingleton(RegistrationCoordinator.name, None, Grid.clusterProxy)
  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    context.system.scheduler.schedule(5.seconds, 5.seconds, self, SendGossipPing)
    context.system.scheduler.schedule(30.seconds, 30.seconds, self, IncreaseRegFails)

  }

  override def receive: Receive = {
    case SendGossipPing => registrationCoordinator ! RegistrationPing(Grid.thisMember)
    case GridTopology(jvms) =>
      LocalRegistrationManager._regFails = 0

      val jvmsJoined = jvms -- LocalRegistrationManager._jvms
      val jvmsLeft = LocalRegistrationManager._jvms -- jvms

      LocalRegistrationManager._jvms = jvms

      // send the data to the client actor so it can forward it to its subscribers.
      Grid.selectActor(ClientActor.name, Grid.thisMember) ! JvmMembershipReport(jvmsJoined, jvmsLeft)

      logger.debug(s"Current jvms: $jvms")
    case IncreaseRegFails =>
      LocalRegistrationManager._regFails = Math.min(LocalRegistrationManager._regFails + 1, Config.possibleRegFails)
  }
} 
Example 157
Source File: DummyService.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package k.grid.testgrid

import akka.actor.Actor
import akka.actor.Actor.Receive
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid
import k.grid.dmap.api.MapData
import k.grid.dmap.impl.persistent.PersistentDMap
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global


case class DummyMessage(str: String)
case class WriteToPersistentDMap(md: MapData, delay: FiniteDuration = 1.seconds)

class DummyService extends Actor with LazyLogging {

  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {
    logger.info(" *** Starting DummyService")
  }

  override def receive: Receive = {
    case msg @ DummyMessage(str) => {
      logger.info(s" *** DummyService Received $msg")
      sender ! str
    }

    case msg @ WriteToPersistentDMap(md, delay) => {
      logger.info(s" *** DummyService Received $msg")
      Grid.system.scheduler.scheduleOnce(delay) {
        md.m.foreach { tuple =>
          PersistentDMap.set(tuple._1, tuple._2)
        }
      }
    }
  }
} 
Example 158
Source File: AggregationTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Inspectors, Matchers}
import play.api.libs.json.{JsValue, _}

import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._

class AggregationTests extends AsyncFunSpec with Matchers with Inspectors with Helpers with LazyLogging {

  describe("Agg API should") {
    val agg = scala.io.Source.fromURL(this.getClass.getResource("/agg/aggnames_293846.nq"))
    val ingestAgg = {
      Http.post(_in, agg.mkString, Some("text/nquads;charset=UTF-8"), List("format" -> "nquads"), tokenHeader)
    }.map { res =>
        withClue(res) {
          res.status should be(200)
          jsonSuccessPruner(Json.parse(res.payload)) shouldEqual jsonSuccess
        }
    }

    agg.close()


    val path = cmw / "test.agg.org" / "Test201903_05_1501_11" / "testStatsApiTerms"

    val aggForIntField = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field::$http://qa.test.rfnt.com/v1.1/testns/num$,size:3")))
      { r =>
        (Json.parse(r.payload) \ "AggregationResponse" \\ "buckets": @unchecked) match {
          case n: collection.IndexedSeq[JsValue] => (r.status == 200) && n.forall(jsonval=> jsonval.as[JsArray].value.size == 3)
        }
      }.map { res =>
        withClue(res) {
          res.status should be(200)
          val total = (Json.parse(res.payload) \ "AggregationResponse" \\ "buckets").map(jsonval=> jsonval.as[JsArray].value.size)
          total should equal (ArrayBuffer(3))
        }
      }
    }


    val aggForExactTextField = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field::$http://qa.test.rfnt.com/v1.1/testns/Test_Data$,size:2")))
      { r =>
        (Json.parse(r.payload) \ "AggregationResponse" \\ "buckets": @unchecked) match {
          case n: collection.IndexedSeq[JsValue] => (r.status == 200) && n.forall(jsonval=> jsonval.as[JsArray].value.size == 2)
        }
      }.map { res =>
        withClue(res) {
          res.status should be(200)
          val total = (Json.parse(res.payload) \ "AggregationResponse" \\ "buckets").map(jsonval=> jsonval.as[JsArray].value.size)
          total should equal (ArrayBuffer(2))
        }
      }
    }


    val badQueryNonExactTextMatch = executeAfterCompletion(ingestAgg) {
      spinCheck(100.millis, true)(Http.get(
        uri = path,
        queryParams = List("op" -> "stats", "format" -> "json", "ap" -> "type:term,field:$http://qa.test.rfnt.com/v1.1/testns/Test_Data$,size:2")))
      { r =>
        Json.parse(r.payload).toString()
          .contains("Stats API does not support non-exact value operator for text fields. Please use :: instead of :") && r.status == 400
      }.map { res =>
        withClue(res) {
          res.status should be(400)
          val result = (Json.parse(res.payload) \ "error").as[String]
          result should include ("Stats API does not support non-exact value operator for text fields. Please use :: instead of :")
        }
      }
    }



    it("ingest aggnames data successfully")(ingestAgg)
    it("get stats for int field")(aggForIntField)
    it("get exact stats for string field")(aggForExactTextField)
    it("get stats for non exact string field should be bad response")(badQueryNonExactTextMatch)

  }

} 
Example 159
Source File: ServicesRoutesTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Matchers}

import scala.concurrent.duration.DurationInt

class ServicesRoutesTests extends AsyncFunSpec with Matchers with Helpers with LazyLogging {

  import cmwell.util.http.SimpleResponse.Implicits.UTF8StringHandler

  private def putTextualFileInfoton(path: String, payload: String) = {
    Http.post(path, payload, Some("text/plain"),
      headers = tokenHeader :+ "X-CM-Well-Type" -> "File").flatMap { _ =>
      spinCheck(100.millis, true)(Http.get(path))(_.status == 200)
    }
  }

  private def refreshCache() = Http.get(cmw / "_services-cache", List("op" -> "refresh"))

  describe("Services Routes") {
    val ingestServiceInfotonsAndRefreshCache = {
      val totalServices = 2
      val data =
        """@prefix nn:    <cmwell://meta/nn#> .
          |@prefix sys:   <cmwell://meta/sys#> .
          |@prefix o:     <cmwell://meta/services/> .
          |
          |o:PermID  a                 sys:Redirection ;
          |        nn:route            "/permid/" ;
          |        nn:sourcePattern    "/permid/(.*)" ;
          |        nn:replacement      "/graph.link/PID-$1" .
          |
          |
          |o:TempRoute  a              sys:Redirection ;
          |        nn:route            "/temp/" ;
          |        nn:sourcePattern    "/temp/(.*)" ;
          |        nn:replacement      "/graph.link/temp/$1" .
          |
          |        """.stripMargin


      Http.post(_in, data, Some("text/rdf+turtle;charset=UTF-8"), List("format" -> "ttl"), tokenHeader).flatMap { _ =>
        spinCheck(100.millis, true)(Http.get(cmw / "meta" / "services", List("op" -> "stream")))(_.payload.trim().lines.size == totalServices)
      }.flatMap(_ => refreshCache())
    }

    it("Should get an Infoton via Service Route of Redirection type") {
      // testing redirection: GET /permid/XYZ should return content of /graph.link/PID-XYZ

      val payload = "Hello World 789"
      val actualPath = cmw / "graph.link" / "PID-789"
      val ingestTargetInfoton = putTextualFileInfoton(actualPath, payload)

      (ingestTargetInfoton zip ingestServiceInfotonsAndRefreshCache).flatMap { _ =>
        Http.get(cmw / "permid" / "789").map(_.payload should be(payload))
      }
    }

    it("Should remove a service route") {
      val tempRouteServiceInfotonPath = cmw / "meta" / "services" / "TempRoute"
      val virtualPath = cmw / "temp" / "1234"
      val actualPath = cmw / "graph.link" / "temp" / "1234"

      val ingestTargetInfoton = putTextualFileInfoton(actualPath, payload = "Foo Bar")

      (ingestTargetInfoton zip ingestServiceInfotonsAndRefreshCache).flatMap { _ =>
        Http.get(virtualPath).map(_.status should be(200))
      }.flatMap { _ =>
        Http.delete(tempRouteServiceInfotonPath, headers = tokenHeader).flatMap { _ =>
          spinCheck(100.millis, true)(Http.get(cmw / "meta" / "services", List("op" -> "stream")))(!_.payload.contains("TempRoute"))
        }
      }.flatMap { _ =>
        refreshCache()
      }.flatMap { _ =>
        Http.get(virtualPath).map(_.status should be(404))
      }
    }
  }
} 
Example 160
Source File: MetaNsTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import cmwell.it.fixture.NSHashesAndPrefixes
import cmwell.util.formats.JsonEncoder
import cmwell.util.http.SimpleResponse
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Matchers}

import scala.concurrent.duration._

class MetaNsTests extends AsyncFunSpec with Matchers with Helpers with NSHashesAndPrefixes with LazyLogging {
  describe("CM-Well /meta/ns") {

    it("should make sure all meta was uploaded and indexed") {
      val urls = metaNsPaths.mkString("", "\n", "\n")

      spinCheck(100.millis, true)(Http.post(_out, urls, Some("text/plain;charset=UTF-8"), List("format" -> "json"), tokenHeader)){
        case SimpleResponse(_, _, (_, bag)) => {
          JsonEncoder.decodeBagOfInfotons(bag) match {
            case Some(boi) => boi.infotons.length == metaNsPaths.size
            case None => false
          }
        }
      }
      .map {
        case res@SimpleResponse(_, _, (_, bag)) => {
          val infotons = JsonEncoder.decodeBagOfInfotons(bag) match {
            case Some(boi) => boi.infotons
            case None => throw new RuntimeException(s"got bad response: $res")
          }

          withClue {
            val s1 = infotons.map(_.systemFields.path).toSet
            val s2 = metaNsPaths
            s"""
             |s1 (${s1.size}) = infotons.length (${infotons.length})
             |s2 (${s2.size}) = listOfUrls.size (${metaNsPaths.size})
             |s1 &~ s2 = ${s1 &~ s2}
             |s2 &~ s1 = ${s2 &~ s1}
             |""".stripMargin }(infotons.length should be(metaNsPaths.size))
        }
      }
    }
  }
} 
Example 161
Source File: FileInfotonTests.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.it

import java.nio.charset.StandardCharsets

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Matchers, TryValues}
import play.api.libs.json._

import scala.concurrent.duration.DurationInt
import scala.io.Source

class FileInfotonTests extends AsyncFunSpec with Matchers with TryValues with Helpers with LazyLogging {
  describe("file infoton") {
    val path = cmt / "InfoFile4"
    val fileStr = Source.fromURL(this.getClass.getResource("/article.txt")).mkString
    val j = Json.obj("Offcourse" -> Seq("I can do it"),"I'm" -> Seq("a spellbinder"))

    val f0 = Http.post(path, fileStr, Some("text/plain;charset=UTF-8"), Nil, ("X-CM-WELL-TYPE" -> "FILE") :: tokenHeader).map { res =>
      withClue(res){
        Json.parse(res.payload) should be(jsonSuccess)
      }
    }
    val f1 = f0.flatMap {_ => spinCheck(100.millis, true)(Http.get(path)){res =>
      new String(res.payload, StandardCharsets.UTF_8) == fileStr && res.contentType.takeWhile(_ != ';') == "text/plain"}
      .map { res =>
        withClue(res) {
          new String(res.payload, StandardCharsets.UTF_8) should be(fileStr)
          res.contentType.takeWhile(_ != ';') should be("text/plain")
        }
      }}
    val f2 = f1.flatMap(_ => Http.post(path, Json.stringify(j), None, Nil, ("X-CM-WELL-TYPE" -> "FILE_MD") :: tokenHeader)).map {res =>
      withClue(res) {
        Json.parse(res.payload) should be(jsonSuccess)
      }
    }
    val f3 = f2.flatMap(_ => spinCheck(100.millis, true)(Http.get(path, List("format" -> "json"))){
      res =>
        val jsonResult = Json.parse(res.payload).transform(fieldsSorter andThen (__ \ 'fields).json.pick)
        jsonResult match {
          case JsSuccess(value, _) => value == j
          case JsError(_) => false
        }
    }.map{ res =>
        withClue(res) {
          Json
            .parse(res.payload)
            .transform(fieldsSorter andThen (__ \ 'fields).json.pick)
            .get shouldEqual j
        }
      }
    )
    val f4 = f3.flatMap(_ => Http.delete(uri = path, headers = tokenHeader).map { res =>
       withClue(res) {
         Json.parse(res.payload) should be(jsonSuccess)
       }
    })
    val lenna = cmt / "lenna"
    val f5 = {
      val lennaInputStream = this.getClass.getResource("/Lenna.png").openStream()
      Http.post(lenna / "Lenna.png", () => lennaInputStream, Some("image/png"), Nil, ("X-CM-WELL-TYPE" -> "FILE") :: tokenHeader).transform { res =>
        // first, close the stream
        lennaInputStream.close()
        withClue(res)(res.map { r =>
          Json.parse(r.payload) should be(jsonSuccess)
        })
      }
    }
    val f6 = spinCheck(100.millis,true,1.minute)(Http.get(lenna,List("op" -> "search","qp" -> "content.mimeType:image/png", "format" -> "json"))){ res =>
        res.status match {
          case 503 => Recoverable
          case 200 => {
            val j = Json.parse(res.payload) \ "results"
            (j \ "total": @unchecked) match {
              case JsDefined(JsNumber(n)) => n.intValue == 1
            }
          }
          case _ => UnRecoverable
        }
      }.map { res =>
      withClue(res) {
        val j = Json.parse(res.payload) \ "results"
        (j \ "infotons": @unchecked) match {
          case JsDefined(JsArray(arr)) => (arr.head \ "system" \ "path": @unchecked) match {
            case JsDefined(JsString(lennaPath)) =>
              lennaPath shouldEqual "/cmt/cm/test/lenna/Lenna.png"
          }
        }
      }
    }

    it("should put File infoton")(f0)
    it("should get previously inserted file with text/plain mimetype")(f1)
    it("should put file infoton metadata")(f2)
    it("should get file infoton metadata")(f3)
    it("should delete file infoton")(f4)
    it("should upload Lenna.png image")(f5)
    it("should search by content.mimeType")(f6)
  }
} 
Example 162
Source File: DataToolsLogging.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.tools.data.utils.logging

import com.typesafe.scalalogging.{LazyLogging, Logger}
import org.slf4j.LoggerFactory

case class LabelId(id: String)

trait DataToolsLogging {
  private[data] lazy val redLogger = Logger(LoggerFactory.getLogger("tools-red-logger"))
  private[data] lazy val badDataLogger = Logger(LoggerFactory.getLogger("tools-bad-data"))

  val label: Option[String] = None

  protected lazy val logger: Logger = {
    val loggerName = if (label.isEmpty) getClass.getName else s"${getClass.getName} [${label.get}]"
    Logger(LoggerFactory.getLogger(loggerName))
  }
} 
Example 163
Source File: TaskExecutorActor.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.tasks

import akka.actor.{Actor, ActorRef}
import akka.actor.Actor.Receive
import akka.util.Timeout
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.duration._
import scala.util.{Failure, Success}
import scala.concurrent.ExecutionContext.Implicits.global


trait TaskStatus
case object InProgress extends TaskStatus
case object Complete extends TaskStatus
case object Failed extends TaskStatus

object TaskExecutorActor {
  val name = "TaskExecutorActor"
}

class TaskExecutorActor extends Actor with LazyLogging {
  implicit val timeout = Timeout(15.seconds)
  private var status: TaskStatus = _
  private var s: ActorRef = _
  private case class TaskFinished(status: TaskStatus)
  @throws[Exception](classOf[Exception])
  override def preStart(): Unit = {}

  override def receive: Receive = {
    case t: Task => {
      logger.info(s"Starting task: $t")
      s = sender()
      status = InProgress
      t.exec.onComplete {
        case Success(tr) => {
          tr match {
            case TaskSuccessful =>
              self ! TaskFinished(Complete)
              s ! TaskSuccessful
            case TaskFailed =>
              self ! TaskFinished(Failed)
              s ! TaskFailed
          }
        }
        case Failure(err) =>
          self ! TaskFinished(Failed)
          s ! TaskFailed
      }
    }
    case TaskFinished(stat) => {
      status = stat
    }
  }
} 
Example 164
Source File: AddNode.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.tasks

import akka.actor.ActorSelection
import akka.actor.FSM.Failure
import akka.util.Timeout
import cmwell.ctrl.checkers.{ComponentState, GreenStatus, YellowStatus}
import cmwell.ctrl.commands._
import cmwell.ctrl.hc._
import cmwell.ctrl.server.CommandActor
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid

import scala.concurrent.{Future, Promise}
import scala.concurrent.duration._
import akka.pattern.ask

import scala.util.Success
import scala.concurrent.ExecutionContext.Implicits.global


case class AddNode(node: String) extends Task with LazyLogging {
  implicit val timeout = Timeout(15.seconds)

  private def startElasticsearch(cmd: ActorSelection, prom: Promise[Unit]): Unit = {
    logger.info(s"Starting Elasticsearch on node $node")
    cmd ! StartElasticsearch
    Grid.system.scheduler.scheduleOnce(60.seconds) {
      (HealthActor.ref ? GetElasticsearchDetailedStatus).mapTo[ElasticsearchGridStatus].map { f =>
        f.getStatesMap.get(node) match {
          case Some(s) =>
            if (s.getColor == GreenStatus || s.getColor == YellowStatus) prom.success(())
            else startElasticsearch(cmd, prom)
          case None => startElasticsearch(cmd, prom)
        }
      }
    }
  }

  private def startCassandra(cmd: ActorSelection, prom: Promise[Unit]): Unit = {
    logger.info(s"Starting Cassandra on node $node")
    cmd ! StartCassandra
    Grid.system.scheduler.scheduleOnce(60.seconds) {
      (HealthActor.ref ? GetCassandraDetailedStatus).mapTo[CassandraGridStatus].map { f =>
        f.getStatesMap.get(node) match {
          case Some(s) =>
            if (s.getColor == GreenStatus) prom.success(())
            else startCassandra(cmd, prom)
          case None => startCassandra(cmd, prom)
        }
      }
    }
  }

  override def exec: Future[TaskResult] = {
    val cmd = CommandActor.select(node)
    val esPromise = Promise[Unit]
    val casPromise = Promise[Unit]

    startElasticsearch(cmd, esPromise)
    startCassandra(cmd, casPromise)

    val esCancelable = cancel(esPromise, 24.hours)
    val casCancelable = cancel(casPromise, 24.hours)
    val esFuture = esPromise.future
    val casFuture = casPromise.future

    // cancel the cancelables when the future succeeded
    esFuture.foreach(x => esCancelable.cancel())
    casFuture.foreach(x => casCancelable.cancel())

    val fut = for {
      esStarted <- esFuture
      casStarted <- casFuture
    } yield {
      logger.info("Starting CM-WELL components")
      cmd ! StartKafka
      cmd ! StartBg
      cmd ! StartWebserver
      cmd ! StartCw
      cmd ! StartDc
    }

    fut.map(r => TaskSuccessful).recover { case err: Throwable => TaskFailed }
  }
} 
Example 165
Source File: ProcUtil.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.utils

import com.typesafe.scalalogging.LazyLogging

import scala.util.{Failure, Success, Try}
import scala.sys.process._
import scala.language.postfixOps


object ProcUtil extends LazyLogging {

  def executeCommand(com: String): Try[String] = {
    logger.debug(s"executing $com")
    Try(Seq("bash", "-c", com) !!)
  }

  def rsync(from: String, user: String, host: String, path: String, flags: String = "-Pvaz"): Try[String] = {
    _rsync(from, s"$user@$host:$path", flags)
  }

  private def _rsync(from: String, to: String, flags: String = "-Pvaz"): Try[String] = {
    Try(Seq("rsync", flags, from, to) !!)
  }

  def checkIfProcessRun(processName: String): Int = {
    executeCommand(s"ps aux | grep java | grep -v starter | grep -v grep | grep $processName | wc -l") match {
      case Success(str) => str.trim.toInt
      case Failure(err) => 0
    }
  }

} 
Example 166
Source File: CtrlServer.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.server

import akka.util.Timeout
import cmwell.ctrl.config.Config._
import com.typesafe.scalalogging.LazyLogging
import k.grid._
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J

import scala.concurrent.duration._
import scala.language.postfixOps


object CtrlServer extends App with LazyLogging {
  logger.info("Starting CtrlServer process")
  //SLF4J initialization is not thread safe, so it's "initialized" by writing some log and only then using sendSystemOutAndErrToSLF4J.
  //Without it there will be en error in stderr and some log line at the beginning will be lost
  SysOutOverSLF4J.sendSystemOutAndErrToSLF4J()

  implicit val timeout = Timeout(3 seconds)
  Grid.setGridConnection(GridConnection(memberName = "ctrl"))
  Grid.join
  Grid.create(classOf[CommandActor], commandActorName)
} 
Example 167
Source File: ComponentController.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.controllers

import akka.actor.ActorSelection
import cmwell.ctrl.controllers.CassandraController._
import cmwell.ctrl.config.Config
import cmwell.ctrl.utils.ProcUtil
import com.typesafe.scalalogging.LazyLogging
import k.grid.Grid

import scala.concurrent.{blocking, Future}
import scala.util.{Failure, Success}
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global


abstract class ComponentController(startScriptLocation: String, psIdentifier: String, dirIdentifier: Set[String]) {
  object ComponentControllerLogger extends LazyLogging {
    lazy val l = logger
  }

  protected val startScriptPattern: String = "start[0-9]*.sh"

  def getStartScriptLocation = startScriptLocation

  def getStartScripts(location: String): Set[String] = {
    ProcUtil.executeCommand(s"ls -1 $location/ | grep $startScriptPattern") match {
      case Success(str) =>
        str.trim.split("\n").toSet
      case Failure(err) => Set.empty[String]
    }
  }

  def getDataDirs(location: String, id: String): Set[String] = {
    ProcUtil.executeCommand(s"ls -1 $location | grep $id[0-9]*") match {
      case Success(str) =>
        str.trim.split("\n").toSet
      case Failure(err) => Set.empty[String]
    }
  }

  private def doStart: Unit = {
    getStartScripts(startScriptLocation).foreach { sScript =>
      val runScript = s"HAL=9000 $startScriptLocation/$sScript"
      ProcUtil.executeCommand(runScript)
    }
  }

  def start {
    blocking {
      Future {
        doStart
      }
    }
  }

  private def doStop(forceKill: Boolean = false, tries: Int = 5): Unit = {
    val cmd =
      s"ps aux | grep $psIdentifier | egrep -v 'grep|starter' | awk '{print $$2}' | xargs kill ${if (forceKill) "-9"
      else ""}"
    ComponentControllerLogger.l.info(s"executing $cmd")
    ProcUtil.executeCommand(cmd)
    val isDead =
      ProcUtil.executeCommand(s"ps aux | grep $psIdentifier | egrep -v 'grep|starter' | awk '{print $$2}'").get.isEmpty
    if (!isDead) {
      if (tries > 1) doStop(false, tries - 1) else doStop(true, tries - 1)
    }

  }
  def stop {
    Future {
      blocking {
        doStop()
      }
    }
  }

  def restart: Unit = {
    Future {
      blocking {
        doStop()
        doStart
      }
    }
  }

  def clearData {
    Future {
      blocking {
        dirIdentifier.foreach { id =>
          getDataDirs(s"${Config.cmwellHome}/data/", id).foreach { dir =>
            ProcUtil.executeCommand(s"rm -rf ${Config.cmwellHome}/data/$dir/")
          }
        }
      }
    }
  }
} 
Example 168
Source File: WebChecker.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.checkers

import cmwell.ctrl.config.Config
import cmwell.ctrl.controllers.WebserverController
import cmwell.util.http.{SimpleHttpClient => Http}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.duration.FiniteDuration


object WebChecker extends Checker with RestarterChecker with LazyLogging {

  override val storedStates: Int = 10

  override def restartAfter: FiniteDuration = 10.minutes

  override def doRestart: Unit = {
    WebserverController.restart
    logger.warn("Webservice was restarted.")
  }

  private val req = s"http://${Config.webAddress}:${Config.webPort}"

  override def check: Future[ComponentState] = {
    val res = Http.get(req)
    val startTime = System.currentTimeMillis()
    res
      .map { x =>
        val now = System.currentTimeMillis()
        if (x.status < 400 || x.status == 503) WebOk((now - startTime).toInt)
        else {
          logger.warn(s"WebChecker.check: got a bad response when GET $req. response = $x")
          WebBadCode(x.status, (now - startTime).toInt)
        }
      }
      .recover {
        case _: Throwable =>
          WebDown()
      }
  }
} 
Example 169
Source File: Checker.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.checkers

import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.Future
import scala.concurrent.duration.{Duration, FiniteDuration}


case class StateStore(size: Int) extends LazyLogging {
  private[this] val s = new scala.collection.mutable.Queue[ComponentState]()

  def add(cs: ComponentState): Unit = {
    s.enqueue(cs)
    if (s.size > size) s.dequeue()
    logger.debug(s"StateStore: $s, max-size: $size, current-size: ${s.size}")
  }

  def getLastStates(num: Int): Vector[ComponentState] = {
    s.reverse.take(num).toVector
  }

  def reset: Unit = {
    s.clear()
  }
}

trait Checker {
  val storedStates: Int = 5
  private lazy val states = StateStore(storedStates)
  def check: Future[ComponentState]
  def storeState(cs: ComponentState): Unit = {
    states.add(cs)
  }

  def getLastStates(num: Int) = states.getLastStates(num)
  def resetStates = states.reset
}

trait RestarterChecker {
  def restartAfter: FiniteDuration
  def doRestart: Unit
} 
Example 170
Source File: CassandraChecker.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.checkers

import cmwell.ctrl.ddata.DData
import cmwell.ctrl.hc.HealthActor
import cmwell.ctrl.utils.ProcUtil
import cmwell.ctrl.config.Config._
import com.typesafe.scalalogging.LazyLogging

import scala.util.{Failure, Success}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._


object CassandraChecker extends Checker with LazyLogging {
  override val storedStates: Int = 10

  private def getCassandraStatus(host: String = ""): ComponentState = {
    val pingIp = DData.getPingIp //HealthActor.getPingIp
    val com =
      if (host != "")
        s"$cmwellHome/conf/cas/cassandra-status-viewer $host"
      else if (pingIp != "")
        s"$cmwellHome/conf/cas/cassandra-status-viewer $pingIp"
      else
        s"$cmwellHome/conf/cas/cassandra-status-viewer"

    ProcUtil.executeCommand(com) match {
      case Success(res) =>
        val stats = res
          .split("\n")
          .map { r =>
            val t = r.split(" ")
            t(1) -> t(0)
          }
          .toMap

        val racks = res
          .split("\n")
          .map { r =>
            val t = r.split(" ")
            t(1) -> t(2)
          }
          .toMap
        val racksReversed = racks.groupBy { _._2 }.map { case (key, value) => (key, value.unzip._1) }
        CassandraOk(stats, racksReversed)
      case Failure(err) =>
        logger.error("Could not parse cassandra-status-viewer response", err)
        CassandraDown()
    }
  }

  override def check: Future[ComponentState] = {
    blocking {
      Future {
        getCassandraStatus()
      }
    }
  }

  def getReachableHost(hosts: Set[String]): Option[String] = {
    hosts.collectFirst {
      case host if (getCassandraStatus(host).isInstanceOf[CassandraOk]) => host
    }
  }
} 
Example 171
Source File: ElasticsearchChecker.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.checkers

import cmwell.ctrl.config.Config
import cmwell.ctrl.utils.ProcUtil
import cmwell.util.http.{SimpleHttpClient => Http}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.Future
import play.api.libs.json.{JsValue, Json}

import scala.concurrent.ExecutionContext.Implicits.global


object ElasticsearchChecker extends Checker with LazyLogging {
  override val storedStates: Int = 10
  override def check: Future[ComponentState] = {
    val url = s"http://${Config.pingIp}:9201/_cluster/health"
    val res = Http.get(url)
    val hasMaster = ProcUtil.checkIfProcessRun("es-master") > 0
    res
      .map { r =>
        if (r.status == 200) {
          val json: JsValue = Json.parse(r.payload)
          val status = json.\("status").as[String]
          val n = (json \ "number_of_nodes").as[Int]
          val d = (json \ "number_of_data_nodes").as[Int]
          val p = (json \ "active_primary_shards").as[Int]
          val s = (json \ "active_shards").as[Int](implicitly)
          status match {
            case "green"  => ElasticsearchGreen(n, d, p, s, hasMaster)
            case "yellow" => ElasticsearchYellow(n, d, p, s, hasMaster)
            case "red"    => ElasticsearchRed(n, d, p, s, hasMaster)
            case _        => throw new Exception("Bad status")
          }
        } else
          ElasticsearchBadCode(r.status, hasMaster)
      }
      .recover {
        case e: Throwable => {
          logger.error("ElasticsearchChecker check failed with an exception: ", e)
          ElasticsearchDown(hasMaster)
        }
      }
  }
} 
Example 172
Source File: SystemChecker.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.ctrl.checkers

import cmwell.ctrl.utils.ProcUtil
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.Future
import java.net._
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}


case class DiskUsage(name: String, usage: Float)
object SystemChecker extends Checker with LazyLogging {

  private def deviceUsage: Set[DiskUsage] = {
    val resTry =
      ProcUtil.executeCommand("""df -h | awk '{print $5 " " $6}' | awk -F "% " '{print $1 " " $2}' | tail -n+2""")
    resTry match {
      case Success(res) =>
        res.trim
          .split("\n")
          .map { t =>
            val vals = t.split(" ")
            DiskUsage(vals(1), vals(0).toFloat)
          }
          .toSet
      case Failure(err) =>
        logger.error("Couldn't retrieve disk usage", err)
        Set.empty[DiskUsage]
    }
  }

  override val storedStates: Int = 10
  override def check: Future[ComponentState] = {
//    val usages = deviceUsage
//    usages.foreach {
//      usage =>
//        val name = usage.name
//        val usagePercent = usage.usage
//        logger.info(s"DiskUsage: $name $usagePercent")
//    }

    val interfaces = NetworkInterface.getNetworkInterfaces.asScala
      .flatMap(_.getInetAddresses.asScala.toList)
      .filter(addr => addr != null && addr.getHostAddress.matches("""\d+.\d+.\d+.\d+"""))
      .toVector
    val name = InetAddress.getLocalHost().getHostName().split('.')(0)
    Future.successful(SystemResponse(interfaces.map(inet => inet.getHostAddress), name))
  }
} 
Example 173
Source File: DebugStage.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.dc.stream.akkautils

import akka.stream.{Attributes, FlowShape, Inlet, Outlet}
import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
import com.typesafe.scalalogging.LazyLogging


object DebugStage {
  def apply[A](name: String): DebugStage[A] = new DebugStage(name)
}

class DebugStage[A](name: String) extends GraphStage[FlowShape[A, A]] with LazyLogging {
  val in = Inlet[A]("DebugStage.in")
  val out = Outlet[A]("DebugStage.out")
  override val shape = FlowShape.of(in, out)
  override def createLogic(attr: Attributes): GraphStageLogic =
    new GraphStageLogic(shape) {
      setHandler(
        in,
        new InHandler {
          override def onPush(): Unit = {
            logger.info(s"[$name]: grabbing element")
            val elem = grab(in)
            logger.info(s"[$name]: pushing the grabbed element $elem")
            push(out, elem)
          }
          override def onUpstreamFinish(): Unit = {
            logger.info(s"[$name]: onUpstreamFinish")
            super.onUpstreamFinish()
          }
          override def onUpstreamFailure(ex: Throwable): Unit = {
            logger.info(s"[$name]: onUpstreamFailure")
            super.onUpstreamFailure(ex)
          }
        }
      )
      setHandler(
        out,
        new OutHandler {
          override def onPull(): Unit = {
            logger.info(s"[$name]: pulling element")
            pull(in)
          }
          override def onDownstreamFinish(): Unit = {
            logger.info(s"[$name]: onDownstreamFinish")
            super.onDownstreamFinish()
          }
        }
      )
    }
} 
Example 174
Source File: Runner.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.bg

import cmwell.common.ZStoreOffsetsService
import cmwell.driver.Dao
import cmwell.fts.FTSService
import cmwell.irw.IRWService
import cmwell.zstore.ZStore
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
import com.typesafe.scalalogging.LazyLogging
import k.grid.service.ServiceTypes
import k.grid.{Grid, GridConnection}
import uk.org.lidalia.sysoutslf4j.context.SysOutOverSLF4J

import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.language.postfixOps


        neighborhoodPartitions.foldLeft(basicServiceTypes) { (st,par) =>
          st.add(s"BGActor$par", classOf[CMWellBGActor], par,
            config.withValue("cmwell.bg.persist.commands.partition", ConfigValueFactory.fromAnyRef(par))
              .withValue("cmwell.bg.index.commands.partition", ConfigValueFactory.fromAnyRef(par)),
            irwService, ftsService, zStore, offsetsService
          )
        }
      }

      Grid.setGridConnection(GridConnection(memberName = "bg", labels = Set("bg")))
      Grid.declareServices(serviceTypes)

      Grid.joinClient

      Thread.sleep(60000)
    } catch {
      case t: Throwable =>
        logger.error(s"BG Process failed to start thus exiting. Reason:\n${cmwell.common.exception.getStackTrace(t)}")
        sys.exit(1)
    }
  }
} 
Example 175
Source File: RefsEnricher.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.bg.imp

import akka.NotUsed
import akka.stream.FlowShape
import akka.stream.contrib.PartitionWith
import akka.stream.scaladsl.{Flow, GraphDSL, Merge, Partition}
import cmwell.bg.BGMetrics
import cmwell.common.formats.BGMessage
import cmwell.common._
import cmwell.zstore.ZStore
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.ExecutionContext

object RefsEnricher extends LazyLogging {

  def toSingle(bgm: BGMetrics, irwReadConcurrency: Int, zStore: ZStore)
              (implicit ec: ExecutionContext): Flow[BGMessage[Command], BGMessage[SingleCommand], NotUsed] = {

    Flow.fromGraph(GraphDSL.create() { implicit b =>
      import GraphDSL.Implicits._

      // CommandRef goes left, all rest go right
      // update metrics for each type of command
      val commandsPartitioner = b.add(PartitionWith[BGMessage[Command], BGMessage[CommandRef], BGMessage[Command]] {
        case bgm @ BGMessage(_, CommandRef(_)) => Left(bgm.asInstanceOf[BGMessage[CommandRef]])
        case bgm => Right(bgm)
      })

      val commandRefsFetcher = Flow[BGMessage[CommandRef]].mapAsync(irwReadConcurrency) {
        case bgMessage @ BGMessage(_, CommandRef(ref)) => {
          zStore.get(ref).map { payload =>
            bgMessage.copy(message = CommandSerializer.decode(payload))
          }
        }
      }

      val singleCommandsMerge = b.add(Merge[BGMessage[Command]](2))

      commandsPartitioner.out0 ~> commandRefsFetcher ~> singleCommandsMerge.in(0)

      commandsPartitioner.out1 ~> singleCommandsMerge.in(1)

      FlowShape(commandsPartitioner.in,singleCommandsMerge.out.map {
        bgMessage => {
          // cast to SingleCommand while updating metrics
          bgMessage.message match {
            case wc: WriteCommand           => bgm.writeCommandsCounter += 1
                                               bgm.infotonCommandWeightHist += wc.infoton.weight
            case oc: OverwriteCommand       => bgm.overrideCommandCounter += 1
                                               bgm.infotonCommandWeightHist += oc.infoton.weight
            case _: UpdatePathCommand       => bgm.updatePathCommandsCounter += 1
            case _: DeletePathCommand       => bgm.deletePathCommandsCounter += 1
            case _: DeleteAttributesCommand => bgm.deleteAttributesCommandsCounter += 1
            case unknown                    => logger.error(s"unknown command [$unknown]")
          }
          bgm.commandMeter.mark()
          bgMessage.copy(message = bgMessage.message.asInstanceOf[SingleCommand])
        }
      }.outlet)
    })
  }
} 
Example 176
Source File: IotHubSinkTask.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
package com.microsoft.azure.iot.kafka.connect.sink

import java.util

import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization
import com.microsoft.azure.sdk.iot.service.{DeliveryAcknowledgement, Message}
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.consumer.OffsetAndMetadata
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.connect.sink.{SinkRecord, SinkTask}

import scala.collection.JavaConverters._

class IotHubSinkTask extends SinkTask with LazyLogging with JsonSerialization {

  // Protected for testing purposes
  protected     var messageSender     : Option[MessageSender]                = None
  protected     var acknowledgement   : DeliveryAcknowledgement              = DeliveryAcknowledgement.None
  private[this] var isClosing         : Boolean                              = false

  override def stop(): Unit = {
    logger.info("Stopping IotHubSink Task")
    if (this.messageSender.isDefined && !this.isClosing) {
      this.messageSender.synchronized {
        if (!this.isClosing) {
          this.isClosing = true
          logger.info("Closing IotHub clients")
          this.messageSender.get.close()
        }
      }
    }
  }

  override def put(records: util.Collection[SinkRecord]): Unit = {
    if (this.messageSender.isDefined && !this.isClosing) {
      this.messageSender.synchronized {
        if (!this.isClosing) {
          logger.info(s"Received ${records.size()} messages to be sent to devices. ")
          for (record: SinkRecord ← records.asScala) {
            val c2DMessage = C2DMessageConverter.validateSchemaAndGetMessage(record)
            this.sendMessage(c2DMessage)
          }
          logger.info(s"Started tasks to send ${records.size()} messages to devices.")
        }
      }
    } else {
      logger.info(s"Unable to send messages to devices - MessageSender is undefined " +
        s"= ${messageSender.isEmpty.toString}, isClosing = ${this.isClosing.toString}")
    }
  }

  private def sendMessage(c2DMessage: C2DMessage): Unit = {
    logger.info(s"Sending c2d message ${c2DMessage.toString}")
    val message = new Message(c2DMessage.message)
    message.setMessageId(c2DMessage.messageId)
    message.setDeliveryAcknowledgement(acknowledgement)
    if (c2DMessage.expiryTime.isDefined) {
      message.setExpiryTimeUtc(c2DMessage.expiryTime.get)
    }
    this.messageSender.get.sendMessage(c2DMessage.deviceId, message)
  }

  override def flush(offsets: util.Map[TopicPartition, OffsetAndMetadata]): Unit = {}

  override def start(props: util.Map[String, String]): Unit = {
    logger.info("Starting IotHub Sink")
    val connectionString = props.get(IotHubSinkConfig.IotHubConnectionString)
    this.messageSender = Some(this.getMessageSender(connectionString))
    this.acknowledgement =
      DeliveryAcknowledgement.valueOf(props.get(IotHubSinkConfig.IotHubMessageDeliveryAcknowledgement))
  }

  protected def getMessageSender(connectionString: String): MessageSender = {
    new IotHubMessageSender(connectionString)
  }

  override def version(): String = getClass.getPackage.getImplementationVersion
} 
Example 177
Source File: IotHubSinkConnector.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
package com.microsoft.azure.iot.kafka.connect.sink

import java.util

import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.common.config.{ConfigDef, ConfigException}
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.sink.SinkConnector

import scala.collection.JavaConverters._

class IotHubSinkConnector extends SinkConnector with LazyLogging with JsonSerialization {

  private[this] var props: Map[String, String] = _

  override def taskClass(): Class[_ <: Task] = classOf[IotHubSinkTask]

  override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
    (1 to maxTasks).map(_ => this.props.asJava).toList.asJava
  }

  override def stop(): Unit = {
    logger.info("Stopping IotHubSinkConnector")
  }

  override def config(): ConfigDef = IotHubSinkConfig.configDef

  override def start(props: util.Map[String, String]): Unit = {

    logger.info("Starting IotHubSinkConnector")

    try {
      val iotHubSinkConfig = IotHubSinkConfig.getConfig(props)
      this.props = Map[String, String](
        IotHubSinkConfig.IotHubConnectionString -> iotHubSinkConfig.getString(IotHubSinkConfig.IotHubConnectionString),
        IotHubSinkConfig.IotHubMessageDeliveryAcknowledgement →
          iotHubSinkConfig.getString(IotHubSinkConfig.IotHubMessageDeliveryAcknowledgement)
      )
    } catch {
      case ex: ConfigException ⇒ throw new ConnectException("Could not start IotHubSinkConnector due to a " +
        "configuration exception", ex)
    }
  }

  override def version(): String = getClass.getPackage.getImplementationVersion
} 
Example 178
Source File: IotHubPartitionSource.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.util.{Collections, Map}

import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.connect.data.Struct
import org.apache.kafka.connect.errors.ConnectException
import org.apache.kafka.connect.source.SourceRecord

import scala.collection.mutable.ListBuffer
import scala.util.control.NonFatal

class IotHubPartitionSource(val dataReceiver: DataReceiver,
    val partition: String,
    val topic: String,
    val batchSize: Int,
    val eventHubName: String,
    val sourcePartition: Map[String, String])
  extends LazyLogging
    with JsonSerialization {

  def getRecords: List[SourceRecord] = {

    logger.debug(s"Polling for data from eventHub $eventHubName partition $partition")
    val list = ListBuffer.empty[SourceRecord]
    try {
      val messages: Iterable[IotMessage] = this.dataReceiver.receiveData(batchSize)

      if (messages.isEmpty) {
        logger.debug(s"Finished processing all messages from eventHub $eventHubName " +
          s"partition ${this.partition}")
      } else {
        logger.debug(s"Received ${messages.size} messages from eventHub $eventHubName " +
          s"partition ${this.partition} (requested $batchSize batch)")

        for (msg: IotMessage <- messages) {

          val kafkaMessage: Struct = IotMessageConverter.getIotMessageStruct(msg)
          val sourceOffset = Collections.singletonMap("EventHubOffset",
            kafkaMessage.getString(IotMessageConverter.offsetKey))
          val sourceRecord = new SourceRecord(sourcePartition, sourceOffset, this.topic, kafkaMessage.schema(),
            kafkaMessage)
          list += sourceRecord
        }
      }
    } catch {
      case NonFatal(e) =>
        val errorMsg = s"Error while getting SourceRecords for eventHub $eventHubName " +
          s"partition $partition. Exception - ${e.toString} Stack trace - ${e.printStackTrace()}"
        logger.error(errorMsg)
        throw new ConnectException(errorMsg, e)
    }
    logger.debug(s"Obtained ${list.length} SourceRecords from IotHub")
    list.toList
  }
} 
Example 179
Source File: BackPressuredWebSocketActor.scala    From monix-sample   with Apache License 2.0 5 votes vote down vote up
package engine

import akka.actor.{Actor, ActorRef, Props}
import com.typesafe.scalalogging.LazyLogging
import engine.BackPressuredWebSocketActor._
import monix.execution.Scheduler
import monix.execution.rstreams.SingleAssignmentSubscription
import monix.reactive.Observable
import org.reactivestreams.{Subscriber, Subscription}
import play.api.libs.json._

import scala.concurrent.duration._
import scala.util.Try


class BackPressuredWebSocketActor[T: Writes]
  (producer: Observable[T], out: ActorRef)(implicit s: Scheduler)
  extends Actor with LazyLogging {

  def receive: Receive = {
    case JsNumber(nr) if nr > 0 =>
      Try(nr.toLongExact).foreach(subscription.request)
  }

  private[this] val subscription = SingleAssignmentSubscription()

  def now(): Long =
    System.currentTimeMillis()

  override def preStart(): Unit = {
    super.preStart()

    val source = {
      val initial = Observable.evalOnce(initMessage(now()))
      val obs = initial ++ producer.map(x => Json.toJson(x))
      val timeout = obs.debounceRepeated(5.seconds).map(_ => keepAliveMessage(now()))

      Observable
        .merge(obs, timeout)
        .whileBusyDropEventsAndSignal(nr => onOverflow(nr, now()))
    }

    source.toReactivePublisher.subscribe(new Subscriber[JsValue] {
      def onSubscribe(s: Subscription): Unit = {
        subscription := s
      }

      def onNext(json: JsValue): Unit = {
        out ! json
      }

      def onError(t: Throwable): Unit = {
        logger.warn(s"Error while serving a web-socket stream", t)
        out ! Json.obj(
          "event" -> "error",
          "type" -> t.getClass.getName,
          "message" -> t.getMessage,
          "timestamp" -> now())

        context.stop(self)
      }

      def onComplete(): Unit = {
        out ! Json.obj("event" -> "complete", "timestamp" -> now())
        context.stop(self)
      }
    })
  }

  override def postStop(): Unit = {
    subscription.cancel()
    super.postStop()
  }
}

object BackPressuredWebSocketActor {
  
  def initMessage(now: Long) = {
    Json.obj("event" -> "init", "timestamp" -> now)
  }
} 
Example 180
Source File: ErrorInfo.scala    From shaclex   with MIT License 5 votes vote down vote up
package es.weso.schema
import cats.Show
import com.typesafe.scalalogging.LazyLogging
import io.circe.JsonObject._
import io.circe.{ Decoder, Encoder, Json }

case class ErrorInfo(msg: String) {
  def show: String = msg
}

object ErrorInfo extends LazyLogging {
  implicit val showErrorInfo = new Show[ErrorInfo] {
    override def show(e: ErrorInfo): String = e.show
  }

  implicit val encodeErrorInfo: Encoder[ErrorInfo] = new Encoder[ErrorInfo] {
    final def apply(e: ErrorInfo): Json = Json.fromJsonObject(
      singleton("type", Json.fromString("ErrorInfo")).
        add("error", Json.fromString(e.msg)))
  }

  implicit val decodeErrorInfo: Decoder[ErrorInfo] = Decoder.instance { c =>
    logger.debug(s"Decoding error info: $c")
    for {
      msg <- c.get[String]("error")
    } yield ErrorInfo(msg)
  }

} 
Example 181
Source File: MavenDependenciesMappingRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil._
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.{IProjectCtx, IRule, IRuleConfig}

import scala.collection.JavaConversions._


class MavenDependenciesMappingRule(ruleConfig: MavenDependenciesMappingRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    model.modules filter { module =>
      ruleConfig.packageTypes match {
        case Some(set) => set contains module.pomModel.getPackaging
        case None => true
      }
    } foreach { module =>
      val matches = for {
        dep <- module.resolvedDependencies
        from <- ruleConfig.from
        if from matches dep
      } yield dep
      if (matches.size == ruleConfig.from.size) {
        // remove `from`
        (for {
          toBeRemoved <- ruleConfig.from
          dep <- module.pomModel.getDependencies
          if toBeRemoved.key == dep.key
        } yield dep) foreach module.pomModel.removeDependency
        // add `to`
        ruleConfig.to filterNot {dep =>
          module.resolvedDependencies.exists(_.key == dep.key)
        } foreach (module.pomModel.addDependency(_))
        logger.info("{} mapped {} to {} in {}", id, ruleConfig.from, ruleConfig.to, module.pomFile)
        logger.info("Rule {} was applied to 1 files", id)
      }
    }
    model
  }

  override def isEligibleFor(projectCtx: IProjectCtx) = projectCtx.isInstanceOf[MavenProjectCtx]
}

case class MavenDependenciesMappingRuleConfig(from: Set[SimpleDependency],
                                              to: Set[SimpleDependency],
                                              packageTypes: Option[Set[String]] = None) extends IRuleConfig 
Example 182
Source File: MavenExcludeDependenciesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.ebay.rtran.maven.util.{MavenUtil, MavenModelUtil}
import MavenModelUtil._
import MavenUtil._
import com.typesafe.scalalogging.LazyLogging
import org.apache.maven.model.Dependency
import org.eclipse.aether.artifact.Artifact
import org.eclipse.aether.util.filter.ExclusionsDependencyFilter
import org.eclipse.aether.{graph => aether}
import com.ebay.rtran.api.{IProjectCtx, IRule, IRuleConfig}

import scala.collection.JavaConversions._
import scala.util.{Failure, Success, Try}


class MavenExcludeDependenciesRule(ruleConfig: MavenExcludeDependenciesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    var changes = Set.empty[File]
    val modules = model.modules map { module =>
      implicit val props = module.properties
      val managedDependencies = module.managedDependencies.values.toList
      // exclude from dependencyManagement
      Option(module.pomModel.getDependencyManagement).map(_.getDependencies.toList) getOrElse List.empty foreach {md =>
        val transitives = MavenUtil.getTransitiveDependencies(resolve(md), managedDependencies)
        val exclusions = ruleConfig.exclusions filter { exclusion =>
          transitives.exists(d => d.getGroupId == exclusion.groupId && d.getArtifactId == exclusion.artifactId)
        }
        if (exclusions.nonEmpty) {
          changes += module.pomFile
          logger.info("{} excluded {} from {} in {}", id, exclusions, md, module.pomFile)
        }
        exclusions foreach (md.addExclusion(_))
      }
      // exclude from the dependencies that has explicit version
      module.pomModel.getDependencies.filter(dep => Option(dep.getVersion).nonEmpty) foreach {dep =>
        val transitives = getTransitiveDependencies(resolve(dep), managedDependencies)
        val exclusions = ruleConfig.exclusions filter { exclusion =>
          transitives.exists(d => d.getGroupId == exclusion.groupId && d.getArtifactId == exclusion.artifactId)
        }
        if (exclusions.nonEmpty) {
          changes += module.pomFile
          logger.info("{} excluded {} from {} in {}", id, exclusions, dep, module.pomFile)
        }
        exclusions foreach (dep.addExclusion(_))
      }
      module
    }
    logger.info("Rule {} was applied to {} files", id, changes.size.toString)
    model.copy(modules = modules)
  }

  override def isEligibleFor(projectCtx: IProjectCtx) = projectCtx.isInstanceOf[MavenProjectCtx]
}

case class MavenExcludeDependenciesRuleConfig(exclusions: Set[SimpleExclusion]) extends IRuleConfig

case class SimpleExclusion(groupId: String, artifactId: String) 
Example 183
Source File: MavenAddManagedDependenciesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil.{SimpleDependency, _}
import com.typesafe.scalalogging.LazyLogging
import org.apache.maven.model.DependencyManagement
import com.ebay.rtran.api.{IProjectCtx, IRule, IRuleConfig}

import scala.collection.JavaConversions._


class MavenAddManagedDependenciesRule(ruleConfig: MavenAddManagedDependenciesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    val altered = model.parents map { parent =>
      val dependencyManagement = Option(parent.pomModel.getDependencyManagement) getOrElse new DependencyManagement
      ruleConfig.dependencies filterNot {dep =>
        dependencyManagement.getDependencies.exists(_.key == dep.key)
      } foreach {dep =>
        dependencyManagement.addDependency(dep)
        logger.info("{} added managed dependency {} to {}", id, dep, parent.pomFile)
      }

      parent.pomModel.setDependencyManagement(dependencyManagement)
      parent
    }
    logger.info("Rule {} was applied to {} files", id, altered.size.toString)
    model.copy(modules = model.subModules ++ altered)
  }

  override def isEligibleFor(projectCtx: IProjectCtx) = projectCtx.isInstanceOf[MavenProjectCtx]
}

case class MavenAddManagedDependenciesRuleConfig(dependencies: Set[SimpleDependency]) extends IRuleConfig 
Example 184
Source File: MavenRemoveManagedDependenciesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil.SimpleDependency
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.{IRule, IRuleConfig}

import scala.collection.JavaConversions._
import scala.util.Try


class MavenRemoveManagedDependenciesRule(ruleConfig: MavenRemoveManagedDependenciesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    var changes = Set.empty[File]
    val modules = model.modules map { module =>
      val matches = for {
        md <- Try(module.pomModel.getDependencyManagement.getDependencies.toList) getOrElse List.empty
        resolvedMd <- module.managedDependencies.values
        toBeRemoved <- ruleConfig.dependencies
        if (toBeRemoved matches resolvedMd) && (md.getManagementKey == resolvedMd.getManagementKey)
      } yield md

      Option(module.pomModel.getDependencyManagement) foreach { dm =>
        matches foreach dm.removeDependency
        changes += module.pomFile
      }
      module
    }
    logger.info("Rule {} was applied to {} files", id, changes.size.toString)
    model.copy(modules = modules)
  }
}

case class MavenRemoveManagedDependenciesRuleConfig(dependencies: Set[SimpleDependency]) extends IRuleConfig 
Example 185
Source File: MavenAddDependenciesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil._
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.{IProjectCtx, IRule, IRuleConfig}


class MavenAddDependenciesRule(ruleConfig: MavenAddDependenciesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    var changes = Set.empty[File]
    model.modules filter { module =>
      ruleConfig.packageTypes match {
        case Some(set) => set contains module.pomModel.getPackaging
        case None => true
      }
    } foreach { module =>
      ruleConfig.dependencies filterNot {dep =>
        module.resolvedDependencies.exists(_.key == dep.key)
      } foreach {dep =>
        logger.info("{} added dependency {} to {}", id, dep, module.pomFile)
        module.pomModel.addDependency(dep)
        changes += module.pomFile
      }
    }
    logger.info("Rule {} was applied to {} files", id, changes.size.toString)
    model
  }

  override def isEligibleFor(projectCtx: IProjectCtx) = projectCtx.isInstanceOf[MavenProjectCtx]
}

case class MavenAddDependenciesRuleConfig(dependencies: Set[SimpleDependency],
                                          packageTypes: Option[Set[String]] = None) extends IRuleConfig 
Example 186
Source File: MavenRemoveDependenciesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.ebay.rtran.maven.util.MavenModelUtil
import MavenModelUtil._
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.{IProjectCtx, IRule, IRuleConfig}

import scala.collection.JavaConversions._


class MavenRemoveDependenciesRule(ruleConfig: MavenRemoveDependenciesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    var changes = Set.empty[File]
    model.modules filter { module =>
      ruleConfig.packageTypes match {
        case Some(set) => set contains module.pomModel.getPackaging
        case None => true
      }
    } foreach { module =>
      (for {
        toBeRemoved <- ruleConfig.dependencies
        resolvedDep <- module.resolvedDependencies
        dep <- module.pomModel.getDependencies
        if (toBeRemoved matches resolvedDep) && (resolvedDep.key == dep.key)
      } yield dep) foreach { dep =>
        logger.info("{} removed dependency {} from {}", id, dep, module.pomFile)
        module.pomModel.removeDependency(dep)
        changes += module.pomFile
      }
      module
    }
    logger.info("Rule {} was applied to {} files", id, changes.size.toString)
    model
  }

  override def isEligibleFor(projectCtx: IProjectCtx) = projectCtx.isInstanceOf[MavenProjectCtx]
}

case class MavenRemoveDependenciesRuleConfig(dependencies: Set[SimpleDependency],
                                             packageTypes: Option[Set[String]] = None) extends IRuleConfig 
Example 187
Source File: MavenRemoveRepositoriesRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.IRule

import scala.collection.JavaConversions._


class MavenRemoveRepositoriesRule(ruleConfig: MavenRemoveRepositoriesRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    val modules = model.modules map { module =>
      val toBeRemoved = for {
        repo <- module.pomModel.getRepositories
        pattern <- ruleConfig.repoPatterns
        if repo.getUrl matches pattern
      } yield repo

      toBeRemoved foreach module.pomModel.removeRepository
      if (toBeRemoved.nonEmpty) logger.info("Rule {} was applied to 1 files", id)

      module
    }
    model.copy(modules = modules)
  }
}

case class MavenRemoveRepositoriesRuleConfig(repoPatterns: Set[String]) 
Example 188
Source File: MavenRemovePluginsRule.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.maven

import java.io.File

import com.typesafe.scalalogging.LazyLogging
import org.apache.maven.model.Plugin
import com.ebay.rtran.api.{IRule, IRuleConfig}

import scala.collection.JavaConversions._
import scala.util.Try


class MavenRemovePluginsRule(ruleConfig: MavenRemoveManagedPluginsRuleConfig)
  extends IRule[MultiModuleMavenModel] with LazyLogging {

  override def transform(model: MultiModuleMavenModel): MultiModuleMavenModel = {
    var changes = Set.empty[File]
    val modules = model.modules map { module =>
      val managedPlugins = for {
        mp <- Try(module.pomModel.getBuild.getPluginManagement.getPlugins.toList) getOrElse List.empty
        toBeRemoved <- ruleConfig.plugins
        if toBeRemoved matches mp
      } yield mp

      Try(module.pomModel.getBuild.getPluginManagement) foreach { pm =>
        managedPlugins foreach pm.removePlugin
        changes += module.pomFile
      }

      val plugins = for {
        p <- Try(module.pomModel.getBuild.getPlugins.toList) getOrElse List.empty
        toBeRemoved <- ruleConfig.plugins
        if toBeRemoved matches p
      } yield p

      Try(module.pomModel.getBuild) foreach { b =>
        plugins foreach b.removePlugin
        changes += module.pomFile
      }
      module
    }
    logger.info("Rule {} was applied to {} files", id, changes.size.toString)
    model.copy(modules = modules)
  }
}

case class MavenRemoveManagedPluginsRuleConfig(plugins: Set[SimplePlugin]) extends IRuleConfig

case class SimplePlugin(groupId: Option[String] = None, artifactId: String, version: Option[String] = None) {
  def matches(plugin: Plugin): Boolean = {
    groupId.getOrElse(plugin.getGroupId) == plugin.getGroupId &&
      artifactId == plugin.getArtifactId &&
      version.getOrElse(plugin.getVersion) == plugin.getVersion
  }
} 
Example 189
Source File: FilePathMatcher.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.generic.util

import java.io.File
import java.nio.file.{FileSystems, PathMatcher}

import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.FileUtils
import org.mozilla.universalchardet.CharsetListener

import scala.util.Try


object FilePathMatcher {

  def apply(rootDir: File, pathPattern: String): Try[PathMatcher] = Try {
    val trimmedPattern = new String(pathPattern.trim.toCharArray.dropWhile(_ == '/')).trim
	val path=rootDir.getAbsolutePath.replaceAll("\\\\","/")
    FileSystems.getDefault.getPathMatcher(s"glob:${path}/$trimmedPattern")
    //FileSystems.getDefault.getPathMatcher(s"glob:${rootDir.getAbsolutePath}/$trimmedPattern")
  }
}

object EncodingDetector extends LazyLogging {

  val DEFAULT_ENCODING = "UTF-8"

  def guessEncoding(file: File) = {
    val bytes = FileUtils.readFileToByteArray(file)
    val dummyListener = new CharsetListener {
      override def report(charset: String): Unit = {}
    }
    val detector = new org.mozilla.universalchardet.UniversalDetector(dummyListener)
    detector.handleData(bytes, 0, bytes.length)
    detector.dataEnd()
    val encoding = Option(detector.getDetectedCharset) getOrElse DEFAULT_ENCODING
    logger.debug("Detected encoding {} for {}", detector.getDetectedCharset, file)
    detector.reset()
    (encoding, bytes)
  }
} 
Example 190
Source File: ModelProviderRegistry.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.api.{IModel, IModelProvider, IProjectCtx}

import scala.util.{Failure, Success, Try}
import scala.collection.JavaConversions._


object ModelProviderRegistry extends LazyLogging {

  val PATH_TO_MODEL_PROVIDER = "rtran.model-providers"

  private[this] var modelProviders = loadModelProviders(UpgraderMeta.configs)

  def findProvider(modelClass: Class[_ <: IModel], projectCtxClass: Class[_ <: IProjectCtx]) =
    modelProviders get (modelClass, projectCtxClass) orElse {
      modelProviders find {
        case ((mclass, pclass), provider) => mclass == modelClass && pclass.isAssignableFrom(projectCtxClass)
      } map (_._2)
    }

  def providers = modelProviders.values

  private[rtran] def registerProvider[T <: IModelProvider[IModel, IProjectCtx]](provider: T): Unit = {
    modelProviders += (provider.runtimeModelClass, provider.runtimeProjectCtxClass) -> provider
  }

  private def loadModelProviders(configs: Iterator[Config]) = {
    var providers = Map.empty[(Class[_ <: IModel], Class[_ <: IProjectCtx]), IModelProvider[IModel, IProjectCtx]]
    configs.filter(_.hasPath(PATH_TO_MODEL_PROVIDER)).flatMap(_.getStringList(PATH_TO_MODEL_PROVIDER)) foreach {className =>
      loadModelProvider(className) match {
        case Success(provider) if providers contains (provider.runtimeModelClass, provider.runtimeProjectCtxClass) =>
          val modelClass = provider.runtimeModelClass
          val projectCtxClass = provider.runtimeProjectCtxClass
          if (providers((modelClass, projectCtxClass)).getClass == provider.getClass) {
            logger.warn("Get duplicated model provider definition for {}", provider.getClass)
          } else {
            logger.warn("Model provider {} already exists for {}", provider.getClass, (modelClass, projectCtxClass))
          }
        case Success(provider) =>
          providers += (provider.runtimeModelClass, provider.runtimeProjectCtxClass) -> provider
        case Failure(e) =>
          logger.error("Failed to create provider instance {}, {}", className, e)
      }
    }
    providers
  }

  private def loadModelProvider(className: String) = Try {
    Class.forName(className).asSubclass(classOf[IModelProvider[IModel, IProjectCtx]]).newInstance
  }

} 
Example 191
Source File: RuleRegistry.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import com.ebay.rtran.api.{IModel, IRule, IRuleConfigFactory}
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging

import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.language.postfixOps
import scala.util.{Failure, Success, Try}

case class Rule(name: String, var additionalProperties: Map[String, String], configFactory: Option[IRuleConfigFactory[_]])

object RuleRegistry extends LazyLogging {

  val PATH_TO_RULES = "rtran.rules"

  private[this] lazy val ruleDefinitions = loadRuleDefinitions(UpgraderMeta.configs)
  private[this] val metadataCache = new mutable.HashMap[IRule[_ <:IModel], Map[String, String]]

  def findRuleMetadata(rule: IRule[_ <:IModel]) = metadataCache get rule

  def saveRuleMetadata(rule: IRule[_ <:IModel], mm: Map[String, String]) = metadataCache put (rule, mm)

  def findRuleDefinition(name: String) = ruleDefinitions get name

  def findRuleDefinitionByClazz(name: String) = ruleDefinitions.find(p => p._2._1.getName == name).map(r=>r._2)

  def hasRule(name: String) = ruleDefinitions contains name

  def rules = ruleDefinitions.keySet

  private def loadRuleDefinitions(configs: Iterator[Config]) = {
    var definitions = Map.empty[String, (Class[_ <: IRule[IModel]], Rule)]
    configs.filter(_.hasPath(PATH_TO_RULES)).map(_.getConfig(PATH_TO_RULES)) foreach {config =>
      config.entrySet.map(_.getKey.split("\\.")(0)).toSet[String] foreach { key =>
        if (definitions contains key) {
          logger.warn("Definition of rule: {} already exists to {}", key, definitions(key))
        } else {
          loadRuleDefinition(Try(config.getConfig(key))) match {
            case Success(definition) => definitions += key -> definition
            case Failure(e) => logger.error("Failed to get rule class binding for {}, {}", key, e)
          }
        }
      }
    }
    definitions
  }

  private def loadRuleDefinition(ruleDef: Try[Config]) = ruleDef map { c =>
    val ruleClass = Class.forName(c.getString("class")).asSubclass(classOf[IRule[IModel]])
    val jira = Try(c.getString("jira")).toOption
    val desc = Try(c.getString("description")).toOption
    val sinceDate = Try(c.getString("since_date")).toOption
    val configFactory = Try(c.getString("config-factory")) map { className =>
      Class.forName(className).asSubclass(classOf[IRuleConfigFactory[_]]).newInstance
    } toOption

    val mm = new scala.collection.mutable.HashMap[String, String]
    jira.map(f => mm += ("jira" -> f))
    desc.map(f => mm += ("description" -> f))
    sinceDate.map(f => mm += ("since_date" -> f))
    (ruleClass, Rule(ruleClass.getName,  mm.toMap, configFactory))
  }

} 
Example 192
Source File: UpgradeConfiguration.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import com.fasterxml.jackson.databind.JsonNode
import com.typesafe.scalalogging.LazyLogging
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods._
import com.ebay.rtran.api.{IModel, IRule, IRuleConfigFactory}
import org.json4s.DefaultFormats

import scala.util.{Failure, Success, Try}


trait RuleProducer {
  val ruleInstances: List[_ <: IRule[_ <: IModel]]
}

trait UpgradeConfiguration extends RuleProducer {
  val ruleConfigs: List[JsonRuleConfiguration]
}

case class JsonRuleConfiguration(name: String, metadata: Option[JValue] = None, config: Option[JValue] = None)

case class JsonUpgradeConfiguration(ruleConfigs: List[JsonRuleConfiguration])
  extends UpgradeConfiguration with JsonRuleProducer

trait JsonRuleProducer extends RuleProducer with LazyLogging {self: UpgradeConfiguration =>

  lazy val ruleInstances = ruleConfigs map {
    case JsonRuleConfiguration(name, metadata, configOpt) =>
      logger.info("Creating instance for {} with config {}", name, configOpt)
      implicit val formats = DefaultFormats

      //copy settings from metadata to Rule Registry
      RuleRegistry.findRuleDefinition(name) flatMap { case (ruleClass, rule) =>
        val properties = metadata.map(json => json.extract[Map[String, Any]])
        val configFactory = (rule.configFactory getOrElse DefaultJsonRuleConfigFactory)
          .asInstanceOf[IRuleConfigFactory[JsonNode]]
        configOpt map { config =>
          Try(JsonConfigurableRuleFactory.createRuleWithConfig(ruleClass, configFactory, asJsonNode(config)))
        } getOrElse Try(JsonConfigurableRuleFactory.createRule(ruleClass)) match {
          case Success(instance) =>
            properties.map(m => m.mapValues(_.toString)).map(m => RuleRegistry.saveRuleMetadata(instance, m))
            Some(instance)
          case Failure(e) =>
            logger.warn(e.getMessage)
            None
        }
      }
  } collect {
    case Some(instance) => instance
  }

} 
Example 193
Source File: RuleEngine.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import com.ebay.rtran.api.{IModel, IProjectCtx, IRule}
import com.typesafe.scalalogging.LazyLogging

import scala.collection.JavaConversions._
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}


class RuleEngine extends LazyLogging {

  def execute[P <: IProjectCtx](projectCtx: P, configuration: UpgradeConfiguration): Unit = {
    execute(projectCtx, configuration.ruleInstances)
  }

  def execute[P <: IProjectCtx](projectCtx: P, rules: java.util.List[_ <: IRule[_ <: IModel]]): Unit = {
    rules foreach { rule =>
      logger.info("Executing rule {} ...", rule.id)
      val start = Deadline.now
      executeRule(rule.asInstanceOf[IRule[IModel]], projectCtx)
      val elapsed = (Deadline.now - start).toMillis
      logger.info("Executed rule {} in {} ms", rule.id, elapsed.toString)
    }
  }

  private def executeRule(rule: IRule[IModel], projectCtx: IProjectCtx) = {
    ModelProviderRegistry.findProvider(rule.rutimeModelClass, projectCtx.getClass) match {
      case Some(provider) =>
        val model = provider create projectCtx
        val result = Try(rule transform model) match {
          case Success(newModel) => newModel
          case Failure(e) => logger.error("Failed execute rule {} on model {}, {}", rule.id, model, e)
            throw e
        }
        provider save result
      case None => logger.error("Cannot find provider for {} used in rule {}", rule.rutimeModelClass, rule.id)
    }
  }

} 
Example 194
Source File: ReportTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.report

import java.io.{File, FileOutputStream, OutputStream}
import java.util.Optional
import com.typesafe.scalalogging.LazyLogging
import com.ebay.rtran.report.api.IReportEventSubscriber
import org.scalatest.{FlatSpecLike, Matchers}


class ReportTest extends FlatSpecLike with Matchers with LazyLogging {

  "Report" should "accept log event correctly" in {
    val subscriber = new TestSubscriber
    val outputStream = new FileOutputStream("test")
    Report.createReport(outputStream, subscribers = List(subscriber))(testFunc())
    subscriber.getCount should be (3)
    new File("test").delete
  }

  "Report" should "work in concurrent mode" in {
    val subscribers = (1 to 10) map (_ => new TestSubscriber)

    subscribers foreach {sub =>
      new Thread(new TestThread(sub)).start()
    }

    val waitPeriod = 1000

    Thread.sleep(waitPeriod)

    subscribers foreach (_.getCount should be (3))
  }

  class TestThread(subscriber: IReportEventSubscriber[_]) extends Runnable {
    override def run(): Unit = {
      val file = new File(s"test-${System.nanoTime}")
      val outputStream = new FileOutputStream(file)
      Report.createReport(outputStream, subscribers = List(subscriber))(testFunc())
      file.delete
    }
  }

  def testFunc(): Unit = {
    val str = "hello"
    val number = 2000
    logger.info("String {}", str)
    logger.info(s"number ${number + 2}")
    logger.info("String {} number {}", str, number.toString)
  }

  class TestSubscriber extends IReportEventSubscriber[Int] {
    import scala.compat.java8.OptionConverters._

    private var count = 0

    def getCount = count

    override def filter(event: scala.Any): Optional[Int] = Some(1).asJava

    override def dumpTo(outputStream: OutputStream): Unit = {}

    override def doAccept(event: Int): Unit = count += event
  }

} 
Example 195
Source File: ReportAndLogSupportTest.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.report

import java.io.File

import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}

import scala.io.Source


class ReportAndLogSupportTest extends FlatSpecLike with Matchers with BeforeAndAfterEach with LazyLogging {

  val projectRoot = new File(getClass.getClassLoader.getResource(".").getFile, "testdir")
  projectRoot.mkdirs

  val report = new ReportAndLogSupport {
    override val warnLogPrefix: String = "test-warn-log"
    override val debugLogPrefix: String = "test-debug-log"
    override val reportFilePrefix: String = "test-report"
  }

  "report" should "get all subscribers that implement IReportEventSubscriber" in {
    report.allSubscribers(projectRoot, "com.ebay.rtran.report").size should not be 0
  }

  "report" should "create the logs and report" in {
    report.createReportAndLogs(projectRoot, None) {
      logger.info("This is an info")
      logger.warn("This is a warning")
      logger.debug("Debug this")
    }
    val reportFile = new File(projectRoot, report.reportFilePrefix + ".md")
    reportFile.exists should be (true)
    val warnLog = new File(projectRoot, report.warnLogPrefix + ".log")
    warnLog.exists should be (true)
    Source.fromFile(warnLog).getLines.mkString should include ("This is a warning")
    val debugLog = new File(projectRoot, report.debugLogPrefix + ".log")
    debugLog.exists should be (true)
    val content = Source.fromFile(debugLog).getLines.mkString
    content should include ("This is an info")
    content should include ("This is a warning")
    content should include ("Debug this")

    reportFile.delete
    warnLog.delete
    debugLog.delete
  }
} 
Example 196
Source File: MemoryStore.scala    From shield   with MIT License 5 votes vote down vote up
package shield.kvstore

import java.util.concurrent.atomic.AtomicInteger

import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap
import com.typesafe.scalalogging.LazyLogging
import shield.metrics.Instrumented
import spray.http.{MediaType, HttpResponse}
import scala.concurrent.{ExecutionContext, Future}
import scala.collection.concurrent

class LazyWrapper[A](builder: => A) {
  lazy val value : A = builder
}

class MemoryStore(id: String, maxHashCapacity: Int, maxKeyCapacity: Int, maxLimitCapacity: Int)(implicit context: ExecutionContext) extends KVStore with LazyLogging with Instrumented {
  def getMillis():Long = System.currentTimeMillis
  private val setStore = new ConcurrentLinkedHashMap.Builder[String, LazyWrapper[TrieSet[String]]]
    .initialCapacity(1000)
    .maximumWeightedCapacity(Math.max(1000, maxHashCapacity))
    .build()
  // todo: tweak capacity - can we do by memory size? (weigher to weigh by memory footprint)
  private val keyStore = new ConcurrentLinkedHashMap.Builder[String, HttpResponse]
    .initialCapacity(1000)
    .maximumWeightedCapacity(Math.max(1000, maxKeyCapacity))
    .build()
  private val limitStore = new ConcurrentLinkedHashMap.Builder[String, AtomicInteger]
    .initialCapacity(1000)
    .maximumWeightedCapacity(Math.max(1000, maxLimitCapacity))
    .build()

  // todo: profiling optimization - triesets are expensive to build.  Is there a better data structure we can use?
  private def getOrSet[V](set: ConcurrentLinkedHashMap[String, V], key: String, default: V) = set.putIfAbsent(key, default) match {
    case null => default
    case existing => existing
  }

  val setGetTimer = timing("setGet", id)
  def setGet(key: String) : Future[Seq[String]] = setGetTimer {
    Future.successful(getOrSet(setStore, key, new LazyWrapper[TrieSet[String]](TrieSet[String]())).value.toSeq)
  }
  val setDeleteTimer = timing("setDelete", id)
  def setDelete(key: String) : Future[Long] = setDeleteTimer {
    setStore.remove(key)
    // todo: implement these according to the same semantics as RedisStore
    Future.successful(0L)
  }
  val setAddTimer = timing("setAdd", id)
  def setAdd(key: String, value: String) : Future[Long] = setAddTimer {
    getOrSet(setStore, key, new LazyWrapper[TrieSet[String]](TrieSet[String]())).value += value
    Future.successful(0L)
  }
  val setRemoveTimer = timing("setRemove", id)
  def setRemove(key: String, value: String) : Future[Long] = setRemoveTimer {
    getOrSet(setStore, key, new LazyWrapper[TrieSet[String]](TrieSet[String]())).value -= value
    Future.successful(0L)
  }
  val keyGetTimer = timing("keyGet", id)
  def keyGet(key: String) : Future[Option[HttpResponse]] = keyGetTimer {
    Future.successful(Option(keyStore.get(key)))
  }
  val keySetTimer = timing("keySet", id)
  def keySet(key: String, value: HttpResponse) : Future[Boolean] = keySetTimer {
    keyStore.put(key, value)
    Future.successful(true)
  }
  val keyDeleteTimer = timing("keyDelete", id)
  def keyDelete(key: String) : Future[Long] = keyDeleteTimer {
    keyStore.remove(key)
    Future.successful(0L)
  }

  val tokenTimer = timing("tokenRateLimit", id)
  def tokenRateLimit(key: String, rate: Int, perSeconds: Int) : Future[Boolean] = tokenTimer {
    // we could set up a concurrent system for actively pruning expired entries or....
    // we could just let them get evicted via lru policy
    val floored = Math.floor(getMillis() / (perSeconds * 1000)).toLong
    val fullKey = s"rl:$floored:$key"
    val counter = getOrSet(limitStore, fullKey, new AtomicInteger(0))
    // doesn't matter if we increment over the count (ie count rate limited requests), since it won't spill
    // over to the next bucket
    Future.successful(counter.incrementAndGet() <= rate)
  }
} 
Example 197
Source File: HttpImplicits.scala    From shield   with MIT License 5 votes vote down vote up
package shield.implicits

import com.typesafe.scalalogging.LazyLogging
import spray.http.HttpHeaders.RawHeader
import spray.http.Uri.Path
import spray.http._

import scala.language.implicitConversions

// todo: Do not like/want.  Do this better
class ImplicitHttpResponse(msg: HttpResponse) extends LazyLogging {
   def withAdditionalHeaders(header: HttpHeader*) : HttpResponse = {
      msg.withHeaders(header.toList ++ msg.headers)
   }

   def withReplacedHeaders(header: HttpHeader*) : HttpResponse = {
      val headerNames = header.map(_.lowercaseName).toSet
      msg.withHeaders(header.toList ++ msg.headers.filterNot(h => headerNames.contains(h.lowercaseName)))
   }

   def withStrippedHeaders(headers: Set[String]) : HttpResponse = {
      msg.withHeaders(msg.headers.filterNot(h => headers.exists(s => s.toLowerCase.equals(h.lowercaseName))))
   }
}

class ImplicitHttpRequest(msg: HttpRequest) extends LazyLogging {
   def withAdditionalHeaders(header: HttpHeader*) : HttpRequest = {
      msg.withHeaders(header.toList ++ msg.headers)
   }

   def withReplacedHeaders(header: HttpHeader*) : HttpRequest = {
      val headerNames = header.map(_.lowercaseName).toSet
      msg.withHeaders(header.toList ++ msg.headers.filterNot(h => headerNames.contains(h.lowercaseName)))
   }

   def withStrippedHeaders(headers: Set[String]) : HttpRequest = {
      msg.withHeaders(msg.headers.filterNot(h => headers.contains(h.lowercaseName)))
   }

   def withTrustXForwardedFor(trustProxies : Int) : HttpRequest = {
      val forwardedList:Array[String] = msg.headers.find(_.lowercaseName == "x-forwarded-for").map(_.value.split(",")).getOrElse(Array())
      val remoteHeader = msg.headers.find(_.lowercaseName == "remote-address").map(_.value).getOrElse("127.0.0.1")
      val combinedList = (forwardedList :+ remoteHeader).reverse //List containing [Remote-Address header, most recent x-forwarded-for, 2nd most recent x-forwarded-for, etc]
      val clientAddress =  RawHeader("client-address", combinedList(if(trustProxies < combinedList.length) trustProxies else combinedList.length-1).trim)
      withReplacedHeaders(clientAddress)
   }

   def withTrustXForwardedProto(trustProto : Boolean) : HttpRequest = {
      if (trustProto) {
         val proto = msg.headers.find(_.lowercaseName == "x-forwarded-proto").map(_.value).getOrElse(msg.uri.scheme)
         try {
            msg.copy(uri = msg.uri.copy(scheme = proto))
         } catch {
            case e: spray.http.IllegalUriException =>
               logger.error("Received invalid protocol \"" + proto + "\" in the 'X-Forwarded-Proto' header, using original request.",e)
               msg
         }
      } else {
         msg
      }
   }

   def withStrippedExtensions(extensions : Set[String]) : HttpRequest = {
      val trimmedPath = getExtension(msg.uri.path.toString) match {
         case (path, Some(extension)) if extensions.contains(extension) => Path(path)
         case (path, _) => msg.uri.path
      }

      msg.copy(uri = msg.uri.copy(path = trimmedPath))
   }

   protected def getExtension(path: String) : (String, Option[String]) = {
      val extensionPos = path.lastIndexOf('.')
      val lastDirSeparator = path.lastIndexOf('/')

      if (lastDirSeparator < extensionPos) {
         val t = path.splitAt(extensionPos)

         t._1 -> Some(t._2.toLowerCase())
      } else {
         path -> None
      }
   }
}

object HttpImplicits {
   implicit def toHttpMethod(s: String) : HttpMethod = HttpMethods.getForKey(s.toUpperCase).get
   implicit def betterResponse(response: HttpResponse) : ImplicitHttpResponse = new ImplicitHttpResponse(response)
   implicit def betterRequest(request: HttpRequest) : ImplicitHttpRequest = new ImplicitHttpRequest(request)
} 
Example 198
Source File: FutureUtil.scala    From shield   with MIT License 5 votes vote down vote up
package shield.implicits


import akka.pattern.CircuitBreakerOpenException
import com.typesafe.scalalogging.LazyLogging
import shield.metrics.Instrumented

import scala.util.{Failure, Try}

object FutureUtil extends LazyLogging with Instrumented {
  def logFailure[A](identifier: String, handlers: PartialFunction[Throwable, Unit] = PartialFunction.empty) : PartialFunction[Try[A], Unit] = {
    val h : PartialFunction[Throwable, Unit] = handlers orElse {
      case _: CircuitBreakerOpenException => metrics.meter("CircuitOpenException", identifier).mark()
      case throwable => logger.error(s"Failed future '$identifier'", throwable)
    }

    {
      case Failure(r) => h(r)
    }
  }
} 
Example 199
Source File: SimpleHttpCache.scala    From shield   with MIT License 5 votes vote down vote up
package shield.proxying

import com.typesafe.scalalogging.LazyLogging
import shield.implicits.FutureUtil
import shield.kvstore.KVStore
import spray.http.{HttpMethods, HttpResponse}

import scala.concurrent.{Future, ExecutionContext}

class SimpleHttpCache(store: KVStore)(implicit context: ExecutionContext) extends HttpCache with LazyLogging {

  // todo: reasonable timeouts
  def lookup(request: ParsedRequest, cacheKey: String) : Future[HttpResponse] = {
    store.keyGet(s"response|$cacheKey").flatMap {
      case None => Future.failed(CacheLookupError)
      case Some(response) => Future.successful(response)
    }
  }

  def safeSave(request: ParsedRequest, rawResponse: HttpResponse, cacheKey: String) = {
    val response = ParsedResponse.parse(rawResponse)

    // don't bother storing it if it will require validation
    val canStore = (
      request.method == HttpMethods.GET
        && (!request.cache_control_no_store && !response.cache_control_no_store)
        && !response.cache_control_private
        && (!request.authorization || response.cache_control_public)
        && response.canDetermineExpiration
        && !response.mustValidate
      )

    // ss4.4 - Invalidation
    if (!request.method.isSafe && rawResponse.status.isSuccess) {
      // todo: delete other cache entries using response's location and content-location header values
      store.keyDelete(s"response|$cacheKey").andThen(FutureUtil.logFailure("SimpleHttpCache::invalidateOnUnsafe"))
    }

    if (canStore) {
      store.keySet(s"response|$cacheKey", rawResponse).andThen(FutureUtil.logFailure("SimpleHttpCache::saveFullResponse"))
    }
  }
} 
Example 200
Source File: LensManager.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.lenses

import java.sql._

import mimir.Database
import mimir.algebra._
import mimir.ctables._
import mimir.sql._
import mimir.models._
import mimir.util.JDBCUtils
import mimir.util.ExperimentalOptions
import com.typesafe.scalalogging.LazyLogging

class LensManager(db: Database) extends LazyLogging {

  val lensTypes = Map[ID,((Database,ID,String,Operator,Seq[Expression]) => 
                              (Operator,TraversableOnce[Model]))](
    ID("MISSING_VALUE")     -> MissingValueLens.create _,
    ID("DOMAIN")            -> MissingValueLens.create _,
    ID("KEY_REPAIR")        -> RepairKeyLens.create _,
    ID("REPAIR_KEY")        -> RepairKeyLens.create _,
    ID("COMMENT")           -> CommentLens.create _,
    ID("MISSING_KEY")       -> MissingKeyLens.create _,
    ID("PICKER")            -> PickerLens.create _,
    ID("GEOCODE")           -> GeocodingLens.create _
  )

  def init(): Unit =
  {
    // no-op for now.
  }

  def create(
    t: ID, 
    name: ID, 
    query: Operator, 
    args: Seq[Expression],
    humanReadableName: Option[String] = None
  ): Unit =
  {
    logger.debug(s"Create Lens: $name ($humanReadableName)")
    val constructor =
      lensTypes.get(t) match {
        case Some(impl) => impl
        case None => throw new SQLException("Invalid Lens Type '"+t+"'")
      }

    // Construct the appropriate lens
    val (view, models) = constructor(db, name, humanReadableName.getOrElse(name.id), query, args)

    // Create a lens query
    db.views.create(name, view)

    // Persist the associated models
    for(model <- models){
      db.models.persist(model, ID("LENS:",name))
    }
  }

  def drop(name: ID, ifExists: Boolean = false): Unit =
  {
    db.views.drop(name, ifExists)
    db.models.dropOwner(ID("LENS:",name))
  }
}