org.scalatest.Tag Scala Examples

The following examples show how to use org.scalatest.Tag. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Tags.scala    From scredis   with Apache License 2.0 5 votes vote down vote up
package scredis.tags

import org.scalatest.Tag

object V100 extends Tag("V100")
object V101 extends Tag("V101")
object V120 extends Tag("V120")
object V200 extends Tag("V200")
object V220 extends Tag("V220")
object V2212 extends Tag("V2212")
object V223 extends Tag("V223")
object V240 extends Tag("V240")
object V260 extends Tag("V260")
object V269 extends Tag("V269")
object V2612 extends Tag("V2612")
object V280 extends Tag("V280")
object V287 extends Tag("V287")
object V289 extends Tag("V289")
object V2812 extends Tag("V2812")
object V2813 extends Tag("V2813")
object V2950 extends Tag("V2950")
object V300 extends Tag("V300") 
Example 2
Source File: GlowBaseTest.scala    From glow   with Apache License 2.0 5 votes vote down vote up
package io.projectglow.sql

import htsjdk.samtools.util.Log
import org.apache.spark.sql.SparkSession
import org.apache.spark.{DebugFilesystem, SparkConf}
import org.scalatest.concurrent.{AbstractPatienceConfiguration, Eventually}
import org.scalatest.time.{Milliseconds, Seconds, Span}
import org.scalatest.{Args, FunSuite, Status, Tag}

import io.projectglow.Glow
import io.projectglow.SparkTestShim.SharedSparkSessionBase
import io.projectglow.common.{GlowLogging, TestUtils}
import io.projectglow.sql.util.BGZFCodec

abstract class GlowBaseTest
    extends FunSuite
    with SharedSparkSessionBase
    with GlowLogging
    with GlowTestData
    with TestUtils
    with JenkinsTestPatience {

  override protected def sparkConf: SparkConf = {
    super
      .sparkConf
      .set("spark.hadoop.io.compression.codecs", classOf[BGZFCodec].getCanonicalName)
  }

  override def initializeSession(): Unit = ()

  override protected implicit def spark: SparkSession = {
    val sess = SparkSession.builder().config(sparkConf).master("local[2]").getOrCreate()
    Glow.register(sess)
    SparkSession.setActiveSession(sess)
    Log.setGlobalLogLevel(Log.LogLevel.ERROR)
    sess
  }

  protected def gridTest[A](testNamePrefix: String, testTags: Tag*)(params: Seq[A])(
      testFun: A => Unit): Unit = {
    for (param <- params) {
      test(testNamePrefix + s" ($param)", testTags: _*)(testFun(param))
    }
  }

  override def afterEach(): Unit = {
    DebugFilesystem.assertNoOpenStreams()
    eventually {
      assert(spark.sparkContext.getPersistentRDDs.isEmpty)
      assert(spark.sharedState.cacheManager.isEmpty, "Cache not empty.")
    }
    super.afterEach()
  }

  override def runTest(testName: String, args: Args): Status = {
    logger.info(s"Running test '$testName'")
    val res = super.runTest(testName, args)
    if (res.succeeds()) {
      logger.info(s"Done running test '$testName'")
    } else {
      logger.info(s"Done running test '$testName' with a failure")
    }
    res
  }

  protected def withSparkConf[T](configs: Map[String, String])(f: => T): T = {
    val initialConfigValues = configs.keys.map(k => (k, spark.conf.getOption(k)))
    try {
      configs.foreach { case (k, v) => spark.conf.set(k, v) }
      f
    } finally {
      initialConfigValues.foreach {
        case (k, Some(v)) => spark.conf.set(k, v)
        case (k, None) => spark.conf.unset(k)
      }
    }
  }
}


  final override implicit val patienceConfig: PatienceConfig =
    if (sys.env.get("JENKINS_HOST").nonEmpty) {
      // increase the timeout on jenkins where parallelizing causes things to be very slow
      PatienceConfig(Span(10, Seconds), Span(50, Milliseconds))
    } else {
      // use the default timeout on local machines so failures don't hang for a long time
      PatienceConfig(Span(5, Seconds), Span(15, Milliseconds))
    }
} 
Example 3
Source File: BaseTestsSuite.scala    From cats-effect   with Apache License 2.0 5 votes vote down vote up
package cats.effect

import cats.effect.internals.TestUtils
import cats.effect.laws.util.{TestContext, TestInstances}
import org.scalactic.source
import org.scalatest.Tag
import org.scalatest.matchers.should.Matchers
import org.scalatest.funsuite.AnyFunSuite
import org.scalatestplus.scalacheck.Checkers
import org.typelevel.discipline.Laws
import org.typelevel.discipline.scalatest.FunSuiteDiscipline

class BaseTestsSuite
    extends AnyFunSuite
    with Matchers
    with Checkers
    with FunSuiteDiscipline
    with TestInstances
    with TestUtils {

  
  def testAsync[A](name: String, tags: Tag*)(f: TestContext => Unit)(implicit pos: source.Position): Unit =
    // Overriding System.err
    test(name, tags: _*)(silenceSystemErr(f(TestContext())))(pos)

  def checkAllAsync(name: String, f: TestContext => Laws#RuleSet): Unit = {
    val context = TestContext()
    val ruleSet = f(context)

    for ((id, prop) <- ruleSet.all.properties)
      test(name + "." + id) {
        silenceSystemErr(check(prop))
      }
  }
} 
Example 4
Source File: RIntegrationSpec.scala    From piglet   with Apache License 2.0 5 votes vote down vote up
package dbis.piglet

import dbis.piglet.parser.PigParser.parseScript
import dbis.piglet._
import dbis.piglet.op._
import dbis.piglet.schema._
import org.scalatest.{FlatSpec, Matchers}
import org.ddahl.jvmr.RInScala
import scala.io.Source
import org.scalatest.Tag

object RTest extends Tag("R")

class RIntegrationSpec extends FlatSpec with Matchers {
  def checkForWorkingR(): Boolean = {
    try {
      val R = RInScala()
      true
    } catch {
      case e: Exception => false
    }
  }

//  "The R integration" 
  it should "allow to invoke a R script" taggedAs(RTest) in {
    if (checkForWorkingR) {
      val R = RInScala()
      R.x = Array(10.0, 20.0, 30.0)
      R.y = Array(5.0, 6.0, 7.0)
      R.eval("res <- x + y")
      val res = R.toVector[Double]("res")
      res should be(Array(15.0, 26.0, 37.0))
    }
    else
      assume(false, "R not enabled, no test performed")

  }

  it should "run DBSCAN in R" taggedAs(RTest) in {
    if (checkForWorkingR) {
    
    val source = Source.fromFile("./src/test/resources/cluster-data.csv")
    val input = source.getLines().map(line => line.split(","))
    val matrix = input.map(v => v.map(s => s.toDouble))

    val script =
      """
        |library(fpc);
        |db = dbscan(inp, eps=.3, MinPts=5);
        |cluster = cbind(inp, data.frame(db$cluster + 1L))
        |res = data.matrix(cluster)
        |""".stripMargin
    val R = RInScala()
    R.inp = matrix.toArray
    R.eval(script)
    val res = R.toMatrix[Double]("res")
    res.length should be (75)
    println(res)
    }
    else
      assume(false, "R not enabled, no test performed")
  }

//  "The parser"
  it should "accept the SCRIPT statement" taggedAs(RTest) in  {
    parseScript("""a = RSCRIPT b USING 'library(fpc); res <- dbscan($_, eps=0.42, MinPts=5)';""")
  }

} 
Example 5
Source File: HDFSSpec.scala    From piglet   with Apache License 2.0 5 votes vote down vote up
package dbis.piglet

import java.io.File
import dbis.piglet.tools.HDFSService
import org.scalatest.{Matchers, FlatSpec}
import org.scalatest.Tag
import dbis.piglet.tools.HdfsCommand

object HdfsTest extends Tag("hdfs")


class HDFSSpec extends FlatSpec with Matchers {
  
  "The HDFS service" should "create a HDFS directoy" taggedAs HdfsTest in {
    if (HDFSService.isInitialized) {
      HDFSService.createDirectory("/data/blubs")
      // heck whether the directory exists
      HDFSService.exists("/data/blubs") should be(true)
    }
    else
      assume(false, "HDFS not enabled, no test performed")
  }

  it should "copy a file to HDFS" taggedAs HdfsTest in {
    if (HDFSService.isInitialized) {
      HDFSService.copyToRemote("LICENSE", "/data/blubs/LICENSE") should be(true)
      // check whether the file exists
      HDFSService.exists("/data/blubs/LICENSE") should be(true)

      HDFSService.copyToLocal("/data/blubs/LICENSE", "LICENSE-COPY")
      val localFile = new File("LICENSE-COPY")
      localFile.exists() should be(true)

      // cleanup
      HDFSService.removeFile("/data/blubs/LICENSE")
      localFile.delete()
    }
    else
      assume(false, "HDFS not enabled, no test performed")
  }

  it should "remove a directory from HDFS" taggedAs HdfsTest in {
    if (HDFSService.isInitialized) {
      HDFSService.removeDirectory("/data/blubs", true) should be(true)
      // check that the file doesn't exist anymore
      HDFSService.exists("/data/blubs") should be(false)
    }
    else
      assume(false, "HDFS not enabled, no test performed")
  }

  it should "process HDFS commands" taggedAs HdfsTest in {
    if (HDFSService.isInitialized) {
      HDFSService.process(HdfsCommand.MKDIR, List("/data/blubs"))
      HDFSService.exists("/data/blubs") should be(true)

      HDFSService.process(HdfsCommand.COPYTOREMOTE, List("LICENSE", "/data/blubs/LICENSE"))
      HDFSService.exists("/data/blubs/LICENSE") should be(true)

      HDFSService.process(HdfsCommand.COPYTOLOCAL, List("/data/blubs/LICENSE", "LICENSE-COPY"))
      val localFile = new File("LICENSE-COPY")
      localFile.exists() should be(true)
      HDFSService.process(HdfsCommand.RM, List("-r", "/data/blubs"))
      localFile.delete()
      HDFSService.exists("/data/blubs") should be(false)

    }
    else
      assume(false, "HDFS not enabled, no test performed")

  }
} 
Example 6
Source File: JsonSchemaSpec.scala    From cosmos   with Apache License 2.0 5 votes vote down vote up
package com.mesosphere.cosmos.jsonschema

import com.github.fge.jsonschema.main.JsonSchemaFactory
import io.circe.Json
import io.circe.JsonObject
import io.circe.jawn.parse
import io.circe.syntax._
import org.scalatest.FreeSpec
import org.scalatest.Tag
import scala.io.Source
import scala.util.Right

class JsonSchemaSpec extends FreeSpec {

  private[this] implicit val jsf = JsonSchemaFactory.byDefault()

  "JsonSchema should" - {
    "be able to validate a document against a schema" - {
      // the draft v4 json schema itself should be able to validate itself
      val jsonSchemaDraftV4String = classpathJsonString("/draftv4/schema")

      "as io.circe.JsonObject" in {
        val Right(parsedJson: Json) = parse(jsonSchemaDraftV4String)
        val xor = JsonSchema.jsonMatchesSchema(parsedJson, parsedJson)
        assert(xor.isRight)
      }

      "as io.circe.Json" in {
        val Right(parsedJson: Json) = parse(jsonSchemaDraftV4String)
        val jObject: JsonObject = parsedJson.asObject.get
        val xor = JsonSchema.jsonObjectMatchesSchema(jObject, jObject)
        assert(xor.isRight)
      }
    }

    "be able to extract default property values from a schema" - {
      val expected = JsonObject.fromMap(Map(
        "prop1" -> 57.asJson,
        "prop2" -> Json.obj(
          "sub1" -> "ta-da".asJson
        )
      ))

      "when schema does not use definition refs" in {
        val s = classpathJsonString("/com/mesosphere/cosmos/jsonschema/no-definition-ref-used.json")
        val Right(schema) = parse(s)
        val defaults = JsonSchema.extractDefaultsFromSchema(schema.asObject.get)
        assertResult(expected)(defaults)
      }

      "when schema does use definition refs" taggedAs Tag("https://mesosphere.atlassian.net/browse/DCOS-10455") ignore {
        val s = classpathJsonString("/com/mesosphere/cosmos/jsonschema/definition-ref-used.json")
        val Right(schema) = parse(s)
        val defaults = JsonSchema.extractDefaultsFromSchema(schema.asObject.get)
        assertResult(expected)(defaults)
      }

    }
  }

  private[this] def classpathJsonString(resourceName: String): String = {
    Option(this.getClass.getResourceAsStream(resourceName)) match {
      case Some(is) => Source.fromInputStream(is).mkString
      case _ => throw new IllegalStateException(s"Unable to load classpath resource: $resourceName")
    }
  }

} 
Example 7
Source File: PerfTest.scala    From scala-cass   with MIT License 5 votes vote down vote up
package com.weather.scalacass

import org.scalatest.Tag
import com.weather.scalacass.util.CassandraUnitTester
import syntax._

object LongRunning extends Tag("LongRunning")

class PerfTest extends CassandraUnitTester {
  val db = "perfdb"
  val table = "perftable"

  ignore  should "be decent" taggedAs LongRunning in {
    val th = ichi.bench.Thyme.warmed(verbose = print)
    session.execute(s"CREATE KEYSPACE $db WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1};")
    session.execute(s"CREATE TABLE $db.$table (str varchar, str2 varchar, str3 varchar, str4 varchar, PRIMARY KEY ((str)))")
    def n = java.util.UUID.randomUUID.toString
    session.execute(s"INSERT INTO $db.$table (str, str2, str3, str4) VALUES (?,?,?,?)", n, n, n, n)
    val row = session.execute(s"SELECT * FROM $db.$table").one()

    th.pbenchOffWarm(title = "compare implicit and native get")(th.Warm(List.fill(100000)(row.as[String]("str"))), 2048, "withImplicit")(th.Warm(List.fill(100000)(if (row.isNull("str")) throw new IllegalArgumentException(s"""Cassandra: "str" was not defined in ${row.getColumnDefinitions.getTable("str")}""") else row.getString("str"))), 2048, "native")

    th.pbenchOffWarm(title = "compare implicit and native getAs")(th.Warm(List.fill(100000)(row.getAs[String]("str"))), 2048, "with implicit")(th.Warm(List.fill(100000)(if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None)), 2048, "native")

    case class Strings(str: String, str2: String, str3: String, str4: Option[String])
    def g(name: String) = if (row.isNull("str")) throw new IllegalArgumentException(s"""Cassandra: "str" was not defined in ${row.getColumnDefinitions.getTable("str")}""") else row.getString("str")
    th.pbenchOffWarm(title = "compare implicit and native case class as")(th.Warm(List.fill(100000)(row.as[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(Strings(g("str"), g("str2"), g("str3"), if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None))), 2048, "native")

    def fAs() = {
      implicit val c: CCCassFormatDecoder[Strings] = shapeless.cachedImplicit
      th.pbenchOffWarm(title = "compare implicit and native case class as with cachedImplicit")(th.Warm(List.fill(100000)(row.as[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(Strings(g("str"), g("str2"), g("str3"), if (row.getColumnDefinitions.contains("str") && !row.isNull("str")) Some(row.getString("str")) else None))), 2048, "native")
    }

    fAs()

    def ga(name: String) = if (row.getColumnDefinitions.contains(name) && !row.isNull(name)) Some(row.getString(name)) else None
    def getAs = for {
      s1 <- ga("str")
      s2 <- ga("str2")
      s3 <- ga("str3")
      s4 = ga("str4")
    } yield Strings(s1, s2, s3, s4)
    th.pbenchOffWarm(title = "compare implicit and native case class getAs")(th.Warm(List.fill(100000)(row.getAs[Strings])), 2048, "with implicit")(th.Warm(List.fill(100000)(getAs)), 2048, "native")

    def fgetAs() = {
      implicit val c: CCCassFormatDecoder[Strings] = shapeless.cachedImplicit
      th.pbenchOffWarm(title = "compare implicit and native case class getAs with cachedImplicit")(th.Warm(List.fill(100000)(row.getAs[Strings])), 2048, "with cachedImplicit")(th.Warm(List.fill(100000)(getAs)), 2038, "native")
    }

    fgetAs()
  }
} 
Example 8
Source File: HiveVersionSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.client

import org.apache.hadoop.conf.Configuration
import org.scalactic.source.Position
import org.scalatest.Tag

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.hive.HiveUtils

private[client] abstract class HiveVersionSuite(version: String) extends SparkFunSuite {
  protected var client: HiveClient = null

  protected def buildClient(hadoopConf: Configuration): HiveClient = {
    // Hive changed the default of datanucleus.schema.autoCreateAll from true to false and
    // hive.metastore.schema.verification from false to true since 2.0
    // For details, see the JIRA HIVE-6113 and HIVE-12463
    if (version == "2.0" || version == "2.1") {
      hadoopConf.set("datanucleus.schema.autoCreateAll", "true")
      hadoopConf.set("hive.metastore.schema.verification", "false")
    }
    HiveClientBuilder
      .buildClient(version, hadoopConf, HiveUtils.formatTimeVarsForHiveClient(hadoopConf))
  }

  override def suiteName: String = s"${super.suiteName}($version)"

  override protected def test(testName: String, testTags: Tag*)(testFun: => Any)
      (implicit pos: Position): Unit = {
    super.test(s"$version: $testName", testTags: _*)(testFun)
  }
} 
Example 9
Source File: ToolConstants.scala    From scala-debugger   with Apache License 2.0 5 votes vote down vote up
package test

import org.scalatest.Tag
import org.scalatest.time.{Milliseconds, Seconds, Span}


object ToolConstants {
  val DefaultMaxInputQueueSize = 50000
  val DefaultMaxOutputQueueSize = 50000
  val AccumulationTimeout = Span(500, Milliseconds)
  val EventuallyTimeout = Span(10, Seconds)
  val EventuallyInterval = Span(5, Milliseconds)
  val NewInputLineTimeout = Span(10, Seconds)
  val NextOutputLineTimeout = Span(5, Seconds)
  val NoWindows = Tag("NoWindows")
} 
Example 10
Source File: WaitForFundingCreatedStateSpec.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.channel.states.b

import akka.testkit.{TestFSMRef, TestProbe}
import fr.acinq.bitcoin.{ByteVector32, Satoshi}
import fr.acinq.eclair.TestConstants.{Alice, Bob}
import fr.acinq.eclair.blockchain._
import fr.acinq.eclair.channel._
import fr.acinq.eclair.channel.states.StateTestsHelperMethods
import fr.acinq.eclair.transactions.Transactions
import fr.acinq.eclair.wire._
import fr.acinq.eclair.{LongToBtcAmount, TestConstants, TestKitBaseClass, ToMilliSatoshiConversion}
import org.scalatest.funsuite.FixtureAnyFunSuiteLike
import org.scalatest.{Outcome, Tag}

import scala.concurrent.duration._



class WaitForFundingCreatedStateSpec extends TestKitBaseClass with FixtureAnyFunSuiteLike with StateTestsHelperMethods {

  case class FixtureParam(bob: TestFSMRef[State, Data, Channel], alice2bob: TestProbe, bob2alice: TestProbe, bob2blockchain: TestProbe)

  override def withFixture(test: OneArgTest): Outcome = {
    val setup = init()
    import setup._
    val (fundingSatoshis, pushMsat) = if (test.tags.contains("funder_below_reserve")) {
      (1000100 sat, (1000000 sat).toMilliSatoshi) // toLocal = 100 satoshis
    } else {
      (TestConstants.fundingSatoshis, TestConstants.pushMsat)
    }
    val aliceInit = Init(Alice.channelParams.features)
    val bobInit = Init(Bob.channelParams.features)
    within(30 seconds) {
      alice ! INPUT_INIT_FUNDER(ByteVector32.Zeroes, fundingSatoshis, pushMsat, TestConstants.feeratePerKw, TestConstants.feeratePerKw, Alice.channelParams, alice2bob.ref, bobInit, ChannelFlags.Empty, ChannelVersion.STANDARD)
      bob ! INPUT_INIT_FUNDEE(ByteVector32.Zeroes, Bob.channelParams, bob2alice.ref, aliceInit)
      alice2bob.expectMsgType[OpenChannel]
      alice2bob.forward(bob)
      bob2alice.expectMsgType[AcceptChannel]
      bob2alice.forward(alice)
      awaitCond(bob.stateName == WAIT_FOR_FUNDING_CREATED)
      withFixture(test.toNoArgTest(FixtureParam(bob, alice2bob, bob2alice, bob2blockchain)))
    }
  }

  test("recv FundingCreated") { f =>
    import f._
    alice2bob.expectMsgType[FundingCreated]
    alice2bob.forward(bob)
    awaitCond(bob.stateName == WAIT_FOR_FUNDING_CONFIRMED)
    bob2alice.expectMsgType[FundingSigned]
    bob2blockchain.expectMsgType[WatchSpent]
    bob2blockchain.expectMsgType[WatchConfirmed]
  }

  test("recv FundingCreated (funder can't pay fees)", Tag("funder_below_reserve")) { f =>
    import f._
    val fees = Satoshi(Transactions.commitWeight * TestConstants.feeratePerKw / 1000)
    val reserve = Bob.channelParams.channelReserve
    val missing = 100.sat - fees - reserve
    val fundingCreated = alice2bob.expectMsgType[FundingCreated]
    alice2bob.forward(bob)
    val error = bob2alice.expectMsgType[Error]
    assert(error === Error(fundingCreated.temporaryChannelId, s"can't pay the fee: missing=${-missing} reserve=$reserve fees=$fees"))
    awaitCond(bob.stateName == CLOSED)
  }

  test("recv Error") { f =>
    import f._
    bob ! Error(ByteVector32.Zeroes, "oops")
    awaitCond(bob.stateName == CLOSED)
  }

  test("recv CMD_CLOSE") { f =>
    import f._
    bob ! CMD_CLOSE(None)
    awaitCond(bob.stateName == CLOSED)
  }

} 
Example 11
Source File: BasicAppSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import org.scalatest._
import scala.Console._
import scala.sys.process._
import scalaj.http.Http
import org.scalatest.Tag
import org.scalatest.concurrent._
import org.scalatest.exceptions._
import java.io.{ByteArrayOutputStream, PrintWriter}

class BasicAppSpec extends fixture.FunSuite with fixture.ConfigMapFixture with Eventually with IntegrationPatience with Matchers {

  // The configMap passed to each test case will contain the connection information for the running Docker Compose
  // services. The key into the map is "serviceName:containerPort" and it will return "host:hostPort" which is the
  // Docker Compose generated endpoint that can be connected to at runtime. You can use this to endpoint connect to
  // for testing. Each service will also inject a "serviceName:containerId" key with the value equal to the container id.
  // You can use this to emulate service failures by killing and restarting the container.
  val basicServiceName = "basic"
  val basicServiceHostKey = s"$basicServiceName:8080"
  val basicServiceContainerIdKey = s"$basicServiceName:containerId"

  test("Validate that the Docker Compose endpoint returns a success code and the string 'Hello, World!'") {
    configMap =>{
      println(configMap)
      val hostInfo = getHostInfo(configMap)
      val containerId = getContainerId(configMap)

      println(s"Attempting to connect to: $hostInfo, container id is $containerId")

      eventually {
        val output = Http(s"http://$hostInfo").asString
        output.isSuccess shouldBe true
        output.body should include ("Hello, World!")
      }
    }
  }

  test("Validate presence of docker config information in system properties") {
    configMap =>
      Option(System.getProperty(basicServiceHostKey)) shouldBe defined
  }

  def getHostInfo(configMap: ConfigMap): String = getContainerSetting(configMap, basicServiceHostKey)
  def getContainerId(configMap: ConfigMap): String = getContainerSetting(configMap, basicServiceContainerIdKey)

  def getContainerSetting(configMap: ConfigMap, key: String): String = {
    if (configMap.keySet.contains(key)) {
      configMap(key).toString
    }
    else {
      throw new TestFailedException(s"Cannot find the expected Docker Compose service key '$key' in the configMap", 10)
    }
  }
} 
Example 12
Source File: BasicAppSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import org.scalatest._
import scala.Console._
import scala.sys.process._
import scalaj.http.Http
import org.scalatest.Tag
import org.scalatest.concurrent._
import org.scalatest.exceptions._
import java.io.{ByteArrayOutputStream, PrintWriter}

//You can define a specific tag to indicate which test should be run against the Docker Compose instance
object DockerComposeTag extends Tag("DockerComposeTag")

class BasicAppSpec extends fixture.FunSuite with fixture.ConfigMapFixture with Eventually with IntegrationPatience with Matchers {

  // The configMap passed to each test case will contain the connection information for the running Docker Compose
  // services. The key into the map is "serviceName:containerPort" and it will return "host:hostPort" which is the
  // Docker Compose generated endpoint that can be connected to at runtime. You can use this to endpoint connect to
  // for testing. Each service will also inject a "serviceName:containerId" key with the value equal to the container id.
  // You can use this to emulate service failures by killing and restarting the container.
  val basicServiceName = "basic"
  val basicServiceHostKey = s"$basicServiceName:8080"
  val basicServiceContainerIdKey = s"$basicServiceName:containerId"

  test("Validate that the Docker Compose endpoint returns a success code and the string 'Hello, World!'", DockerComposeTag) {
    configMap =>{
      println(configMap)
      val hostInfo = getHostInfo(configMap)
      val containerId = getContainerId(configMap)

      println(s"Attempting to connect to: $hostInfo, container id is $containerId")

      eventually {
        val output = Http(s"http://$hostInfo").asString
        output.isSuccess shouldBe true
        output.body should include ("Hello, World!")
      }
    }
  }

  test("Example Untagged Test. Will not be run.") {
    configMap =>
  }

  test("Validate presence of docker config information in system properties", DockerComposeTag) {
    configMap =>
      Option(System.getProperty(basicServiceHostKey)) shouldBe defined
  }

  def getHostInfo(configMap: ConfigMap): String = getContainerSetting(configMap, basicServiceHostKey)
  def getContainerId(configMap: ConfigMap): String = getContainerSetting(configMap, basicServiceContainerIdKey)

  def getContainerSetting(configMap: ConfigMap, key: String): String = {
    if (configMap.keySet.contains(key)) {
      configMap(key).toString
    }
    else {
      throw new TestFailedException(s"Cannot find the expected Docker Compose service key '$key' in the configMap", 10)
    }
  }
} 
Example 13
Source File: LabelLabelIndexMutateOptionTest.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.core.Integrate

import org.apache.s2graph.core._
import org.scalatest.{BeforeAndAfterEach, Tag}
import play.api.libs.json._

class LabelLabelIndexMutateOptionTest extends IntegrateCommon with BeforeAndAfterEach {

  import TestUtil._

  // called by start test, once
  override def initTestData(): Unit = {
    super.initTestData()

    val insert = "insert"
    val e = "e"
    val weight = "weight"
    val is_hidden = "is_hidden"

    insertEdgesSync(
      toEdge(1, insert, e, 0, 1, testLabelNameLabelIndex),
      toEdge(1, insert, e, 0, 2, testLabelNameLabelIndex),
      toEdge(1, insert, e, 0, 3, testLabelNameLabelIndex)
    )
  }

  def getQuery(ids: Seq[Int], direction: String, indexName: String): Query =
    Query(
      vertices = ids.map(graph.toVertex(testServiceName, testColumnName, _)),
      steps = Vector(
        Step(Seq(QueryParam(testLabelNameLabelIndex, direction = direction, indexName = indexName)))
      )
    )

  
  ignore("index for out direction should drop out direction edge and store degree") {
    val edges = getEdgesSync(getQuery(Seq(0), "out", idxDropOutStoreDegree))
    (edges \ "results").as[Seq[JsValue]].size should be(0)
    (edges \\ "_degree").map(_.as[Long]).sum should be(3)
  }
} 
Example 14
Source File: TnViewCreatorTest.scala    From TopNotch   with Apache License 2.0 5 votes vote down vote up
package com.bfm.topnotch.tnview

import com.bfm.topnotch.SparkApplicationTester
import com.bfm.topnotch.TnTestHelper
import org.scalatest.{Matchers, Tag}

/**
 * The tests for [[com.bfm.topnotch.tnview.TnViewCreator TnViewCreator]].
 */
class TnViewCreatorTest extends SparkApplicationTester with Matchers {
  import TnTestHelper._

  lazy val tnViewCreator = new TnViewCreator(spark)
  lazy val df = spark.read.parquet(getClass.getResource("currentLoans.parquet").getFile).cache

  /**
   * The tags
   */
  object viewCreatorTag extends Tag("TnViewCreator")

  /**
   * The tests for TnViewCreator
   */
  "TnViewCreator" should "do nothing when given Select *" taggedAs viewCreatorTag in {
    dfEquals(tnViewCreator.createView(Seq(df), TnViewParams(Seq("testTable"), "select * from testTable")), df)
  }

  it should "filter out all but the rows with poolNum == 1 when given the SQL where poolNum = 1" taggedAs viewCreatorTag in {
    dfEquals(tnViewCreator.createView(Seq(df), TnViewParams(Seq("testTable"), "select * from testTable where poolNum = 1")),
      df.where("poolNum = 1"))
  }
} 
Example 15
Source File: TnWriterTest.scala    From TopNotch   with Apache License 2.0 5 votes vote down vote up
package com.bfm.topnotch.tnengine

import java.net.URL

import com.bfm.topnotch.SparkApplicationTester
import org.scalatest.{Tag, Matchers}

/**
 * The tests for [[com.bfm.topnotch.tnengine.TnWriter TnWriter]].
 */
class TnWriterTest extends SparkApplicationTester with Matchers {

  /**
   * The tags
   */
  object getWriterTag extends Tag("getWriter")

  lazy val fileReader = new TnFileReader
  lazy val engine = new TnEngine(spark)

  "getWriter" should "return an HDFS writer when given a config missing the io namespace" taggedAs (getWriterTag) in {
    engine.getWriter(fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/emptyPlan.json")) shouldBe a [TnHDFSWriter]
  }

  it should "return an HDFS writer when given HDFS as the config string with no path" taggedAs (getWriterTag) in {
    engine.getWriter(fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/writer/hdfsNoFile.json")) shouldBe a [TnHDFSWriter]
  }

  it should "return an HDFS writer with a non-default destination when given hdfs as the writer with a destination string" taggedAs (getWriterTag) in {
    val writer = engine.getWriter(fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/writer/hdfsWithFile.json"))
    writer shouldBe a [TnHDFSWriter]
    writer.asInstanceOf[TnHDFSWriter].dest.get shouldBe "/user/testUser/"
  }

  it should "return an Hbase writer when given HBase as the config string" taggedAs (getWriterTag) in {
    engine.getWriter(fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/writer/hbase.json")) shouldBe a [TnHBaseWriter]
  }

  it should "return a REST writer with a non-default URL when given rest as the writer with a destination string" taggedAs (getWriterTag) in {
    val writer = engine.getWriter(fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/writer/rest.json"))
    writer shouldBe a [TnRESTWriter]
    writer.asInstanceOf[TnRESTWriter].dest shouldBe "http://www.testurl.com"
  }
} 
Example 16
Source File: TnReaderTest.scala    From TopNotch   with Apache License 2.0 5 votes vote down vote up
package com.bfm.topnotch.tnengine

import org.json4s._
import org.json4s.native.Serialization
import com.bfm.topnotch.SparkApplicationTester
import org.scalatest.{Matchers, Tag}

/**
 * The tests for [[com.bfm.topnotch.tnengine.TnReader TnReader]].
  * Note that most testing is done by the tests for [[com.bfm.topnotch.tnengine.TnEngine TnEngine]]
 */
class TnReaderTest extends SparkApplicationTester with Matchers {

  object getReaderTag extends Tag("getReader")
  object readerVariableTag extends Tag("readerVariables")
  object jarReaderTag extends Tag("jarReader")
  implicit val formats = Serialization.formats(NoTypeHints)

  "getReader" should "throw an IllegalArgumentException when the plan doesn't exist" taggedAs(getReaderTag) in {
    intercept[IllegalArgumentException] {
      val fileReader = new TnFileReader
      fileReader.readConfiguration("src/test/resources/com/bfm/DOESNTEXIST")
    }
  }

  it should "replace variables in a configuration" taggedAs(readerVariableTag) in {
    val fileReader = new TnFileReader(Map("var1" -> "true", "var2" -> "false"))
    val replacedAST = fileReader.readConfiguration("src/test/resources/com/bfm/topnotch/tnengine/cliReplacementTest.json")
    replacedAST \ "trueToBeReplaced" should not equal(JNothing)
    (replacedAST \ "replaceThisValue").extract[String] should equal("false")
  }
} 
Example 17
Source File: ParentTest.scala    From Soteria   with MIT License 5 votes vote down vote up
package com.leobenkel.soteria

import org.apache.commons.logging.{Log, LogFactory}
import org.scalactic.source.Position
import org.scalatest.{FunSuite, Tag}


trait ParentTest extends FunSuite {
  lazy val log: Log = LogFactory.getLog(this.getClass)

  protected def assertEquals[T](
    expected: T,
    result:   T
  )(
    implicit pos: Position
  ): Unit = {
    assertResult(expected)(result)
    ()
  }

  override protected def test(
    testName: String,
    testTags: Tag*
  )(
    testFun: => Any
  )(
    implicit pos: Position
  ): Unit = {
    super.test(testName, testTags: _*) {
      log.debug(s">>> Starting - $testName")
      testFun
    }
  }

  def time[R](block: => R): (R, Long) = {
    val t0 = System.nanoTime()
    val result = block
    val t1 = System.nanoTime()
    val time_ns: Long = t1 - t0
    (result, time_ns)
  }
} 
Example 18
Source File: ChainFixtureTag.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.testkit.chain.fixture

import org.scalatest.Tag


sealed abstract class ChainFixtureTag(name: String) extends Tag(name)

object ChainFixtureTag {
  case object Empty extends ChainFixtureTag("Empty")

  case object GenisisBlockHeaderDAO
      extends ChainFixtureTag("GenisisBlockHeaderDAO")

  case object PopulatedBlockHeaderDAO
      extends ChainFixtureTag("PopulatedBlockHeaderDAO")

  case object GenisisChainHandler extends ChainFixtureTag("GenisisChainHandler")

  case object PopulatedChainHandler
      extends ChainFixtureTag("PopulatedChainHandler")

  case object BitcoindZmqChainHandlerWithBlock
      extends ChainFixtureTag("BitcoindZmqChainHandlerWithBlock")

  val defaultTag: ChainFixtureTag = ChainFixtureTag.Empty

  def from(tag: String): ChainFixtureTag = {
    tag match {
      case Empty.name                   => Empty
      case GenisisBlockHeaderDAO.name   => GenisisBlockHeaderDAO
      case PopulatedBlockHeaderDAO.name => PopulatedBlockHeaderDAO
      case GenisisChainHandler.name     => GenisisChainHandler
      case PopulatedChainHandler.name   => PopulatedChainHandler
      case BitcoindZmqChainHandlerWithBlock.name =>
        BitcoindZmqChainHandlerWithBlock
      case _: String =>
        throw new IllegalArgumentException(s"$tag is not a valid tag")
    }
  }
} 
Example 19
Source File: WeightedMinHashSpec.scala    From gemini   with GNU General Public License v3.0 5 votes vote down vote up
package tech.sourced.gemini

import org.scalatest.{FlatSpec, Matchers}
import scala.io.Source
import org.scalatest.Tag

// Tag to set which tests depend on pyhton
object PythonDep extends Tag("tech.sourced.tags.PythonDep")

class WeightedMinHashSpec extends FlatSpec with Matchers {

  "WeightedMinHash constructor" should "initialize correctly" taggedAs(PythonDep) in {
    val mg = new WeightedMinHash(2, 4, 1)

    mg.rs.length should be(4)
    mg.lnCs.length should be(4)
    mg.betas.length should be(4)
    mg.sampleSize should be(4)
  }

  def readCSV(filename: String): Array[Array[Float]] = {
    Source
      .fromFile(s"src/test/resources/weighted-minhash/csv/${filename}")
      .getLines()
      .map(_.split(",").map(_.trim.toFloat))
      .toArray
  }

  "WeightedMinHash hash" should "hash tiny data" taggedAs(PythonDep) in {
    val input = readCSV("tiny-data.csv")

    val rs = readCSV("tiny-rs.csv")
    val lnCs = readCSV("tiny-ln_cs.csv")
    val betas = readCSV("tiny-betas.csv")

    input.zipWithIndex.foreach {
      case (v, i) =>
        val wmh = new WeightedMinHash(v.length, 128, rs, lnCs, betas)
        val hashes = wmh.hash(v)
        val realHashes = readCSV(s"tiny-hashes-${i}.csv").map(_.map(_.toLong))

        hashes should be(realHashes)
    }
  }

  "WeightedMinHash hash" should "hash big data" taggedAs(PythonDep) in {
    val input = readCSV("big-data.csv")

    val rs = readCSV("big-rs.csv")
    val lnCs = readCSV("big-ln_cs.csv")
    val betas = readCSV("big-betas.csv")

    input.zipWithIndex.foreach {
      case (v, i) =>
        val wmh = new WeightedMinHash(v.length, 128, rs, lnCs, betas)
        val hashes = wmh.hash(v)
        val realHashes = readCSV(s"big-hashes-${i}.csv").map(_.map(_.toLong))

        hashes should be(realHashes)
    }
  }
} 
Example 20
Source File: PlayJsonFormatSpec.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package play.api.libs.json.scalatest

import org.scalacheck.ops._
import org.scalacheck.{Arbitrary, Gen, Shrink}
import org.scalatest.Tag
import org.scalatest.flatspec.AnyFlatSpecLike
import play.api.libs.json.Format
import play.api.libs.json.scalacheck.PlayJsonFormatTests

import scala.reflect.ClassTag
import scala.testing.scalatest.ScalaTestBridge


class PlayJsonFormatSpec[T](examples: Seq[T])(implicit playFormat: Format[T], clsTag: ClassTag[T], shrink: Shrink[T])
  extends PlayJsonFormatTests[T](examples, playFormat, clsTag, shrink)
  with AnyFlatSpecLike
  with ScalaTestBridge {

  override def registerTest(testText: String, testTags: Tag*)(testFun: => Unit): Unit = {
    super[AnyFlatSpecLike].registerTest(testText, testTags: _*)(testFun)
  }

  def this(gen: Gen[T], samples: Int)(implicit playFormat: Format[T], clsTag: ClassTag[T], shrink: Shrink[T]) =
    this(gen.toIterator.take(samples).toSeq)

  def this(gen: Gen[T])(implicit playFormat: Format[T], clsTag: ClassTag[T], shrink: Shrink[T]) = this(gen, 100)

  def this(samples: Int)(implicit playFormat: Format[T], clsTag: ClassTag[T], shrink: Shrink[T], arb: Arbitrary[T]) =
    this(arb.arbitrary, samples)

  def this()(implicit playFormat: Format[T], clsTag: ClassTag[T], shrink: Shrink[T], arb: Arbitrary[T]) =
    this(arb.arbitrary)
} 
Example 21
Source File: ScalaTestBridge.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package scala.testing.scalatest

import org.scalatest.{Suite, Tag}

import scala.testing.TestSuiteBridge


trait ScalaTestBridge extends TestSuiteBridge with Suite {

  override protected def assertEqual[T](left: T, right: T): Unit = assert(left == right)

  override def fail(): Nothing                                  = super[TestSuiteBridge].fail()
  override def fail(message: String): Nothing                   = super[TestSuiteBridge].fail(message)
  override def fail(message: String, cause: Throwable): Nothing = super[TestSuiteBridge].fail(message, cause)
  override def fail(cause: Throwable): Nothing                  = super[TestSuiteBridge].fail(cause)

  override protected def doFail(optReason: Option[String], optCause: Option[Throwable]): Nothing = {
    (optReason, optCause) match {
      case (Some(reason), Some(cause)) => super[Suite].fail(reason, cause)
      case (Some(reason), None)        => super[Suite].fail(reason)
      case (None, Some(cause))         => super[Suite].fail(cause)
      case (None, None)                => super[Suite].fail()
    }
  }

  def registerTest(testText: String, testTags: Tag*)(testFun: => Unit): Unit

  override protected[testing] def registerTests(tests: Map[String, () => Unit]): Unit = {
    for ((name, test) <- tests) {
      registerTest(name)(test())
    }
  }

  // register the tests after the Suite has been initialized
  registerTests()
} 
Example 22
Source File: HiveVersionSuite.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.client

import org.apache.hadoop.conf.Configuration
import org.scalactic.source.Position
import org.scalatest.Tag

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.hive.HiveUtils

private[client] abstract class HiveVersionSuite(version: String) extends SparkFunSuite {
  override protected val enableAutoThreadAudit = false
  protected var client: HiveClient = null

  protected def buildClient(
      hadoopConf: Configuration,
      sharesHadoopClasses: Boolean = true): HiveClient = {
    // Hive changed the default of datanucleus.schema.autoCreateAll from true to false and
    // hive.metastore.schema.verification from false to true since 2.0
    // For details, see the JIRA HIVE-6113 and HIVE-12463
    if (version == "2.0" || version == "2.1" || version == "2.2" || version == "2.3") {
      hadoopConf.set("datanucleus.schema.autoCreateAll", "true")
      hadoopConf.set("hive.metastore.schema.verification", "false")
    }
    HiveClientBuilder.buildClient(
      version,
      hadoopConf,
      HiveUtils.formatTimeVarsForHiveClient(hadoopConf),
      sharesHadoopClasses = sharesHadoopClasses)
  }

  override def suiteName: String = s"${super.suiteName}($version)"

  override protected def test(testName: String, testTags: Tag*)(testFun: => Any)
      (implicit pos: Position): Unit = {
    super.test(s"$version: $testName", testTags: _*)(testFun)
  }
} 
Example 23
Source File: TestSetup.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey

import java.io.File
import java.nio.file.Paths

import org.apache.commons.io.FileUtils
import org.scalatest.Tag

object TestSetup {

  private var runSetup = true

  val configTest = getClass.getResource("/test-fey-configuration.conf")

  def setup(): Unit = {
    if(runSetup){
      println("SETTING UP ...")
      createFeyTmpDirectoriesForTest()
      copyTestActorToTmp()
      copyJSONstoTmp()
      runSetup = false
    }
  }

  private def copyTestActorToTmp(): Unit = {
    copyResourceFileToLocal("/fey-test-actor.jar",s"${CONFIG.JAR_REPOSITORY}/fey-test-actor.jar")
  }

  private def copyJSONstoTmp(): Unit = {
    copyResourceFileToLocal("/json/valid-json.json",s"${CONFIG.JSON_REPOSITORY}/valid-json.json.not")
    copyResourceFileToLocal("/json/invalid-json.json",s"${CONFIG.JSON_REPOSITORY}/invalid-json.json.not")
  }

  private def copyResourceFileToLocal(resourcePath: String, destination: String): Unit = {
    val resourceFile = getClass.getResource(resourcePath)
    val dest = new File(destination)
    FileUtils.copyURLToFile(resourceFile, dest)
  }

  private def createFeyTmpDirectoriesForTest(): Unit = {
    var file = new File(s"/tmp/fey/test/checkpoint")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/json")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/json/watchtest")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/jars")
    file.mkdirs()
    file = new File(s"/tmp/fey/test/jars/dynamic")
    file.mkdirs()
  }

}

object SlowTest extends Tag("org.apache.iota.fey.SlowTest") 
Example 24
Source File: KuduComponentTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.kudu

import io.eels.Row
import io.eels.datastream.DataStream
import io.eels.schema._
import org.scalatest.{FlatSpec, Matchers, Tag}

object Kudu extends Tag("kudu")

class KuduComponentTest extends FlatSpec with Matchers {

  "kudu" should "support end to end sink to source" taggedAs Kudu in {

    val schema = StructType(
      Field("planet", StringType, nullable = false, key = true),
      Field("position", StringType, nullable = true)
    )

    val ds = DataStream.fromValues(
      schema,
      Seq(
        Vector("earth", 3),
        Vector("saturn", 6)
      )
    )

    val master = "localhost:7051"
    ds.to(KuduSink(master, "mytable"))

    val rows = KuduSource(master, "mytable").toDataStream().collect
    rows shouldBe Seq(
      Row(schema, Vector("earth", "3")),
      Row(schema, Vector("saturn", "6"))
    )
  }

  it should "support all basic types" taggedAs Kudu in {

    val schema = StructType(
      Field("planet", StringType, nullable = false, key = true),
      Field("position", ByteType.Signed, nullable = false),
      Field("volume", DoubleType, nullable = false),
      Field("bytes", BinaryType, nullable = false),
      Field("gas", BooleanType, nullable = false),
      Field("distance", LongType.Signed, nullable = false)
    )

    val data = Array("earth", 3: Byte, 4515135988.632, Array[Byte](1, 2, 3), false, 83000000)

    val ds = DataStream.fromValues(schema, Seq(data))

    val master = "localhost:7051"
    ds.to(KuduSink(master, "mytable2"))

    val rows = KuduSource(master, "mytable2").toDataStream().collect
    val values = rows.head.values.toArray
    data(3) = data(3).asInstanceOf[Array[Byte]].toList
    values shouldBe data
  }
}