org.scalatest.WordSpec Scala Examples

The following examples show how to use org.scalatest.WordSpec. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: TimeLimitedFutureSpec.scala    From gfc-concurrent   with Apache License 2.0 9 votes vote down vote up
package com.gilt.gfc.concurrent

import java.util.concurrent.TimeoutException
import scala.concurrent.{ Future, Await }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.scalatest.{WordSpec, Matchers}

class TimeLimitedFutureSpec extends WordSpec with Matchers {
  import TimeLimitedFutureSpec._

  "RichFuture" when {
    import ScalaFutures._

    "waiting for a result to happen" should {
      "return the completed original Future if it completes before the given timeout" in {
        val now = System.currentTimeMillis
        val future: Future[String] = (Future { Thread.sleep(1000); "Here I am" }).withTimeout(Duration(5, "seconds"))
        val msg: String = Await.result(future, Duration(10, "seconds"))
        val elapsed = (System.currentTimeMillis - now)
        msg should equal ("Here I am")
        elapsed should be (2000L +- 1000L)
      }

      "return the failure of the original Future if it fails before the given timeout" in {
        val now = System.currentTimeMillis
        val future = (Future { Thread.sleep(1000); throw new NullPointerException("That hurts!") }).withTimeout(Duration(5, "seconds"))
        a [NullPointerException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed = (System.currentTimeMillis - now)
        elapsed should be (2000L +- 1000L)
      }

      "return the timeout of the original Future if it had one and it went off and was shorter than the given one" in {
        val now = System.currentTimeMillis
        val timingOutEarlier = Timeouts.timeout(Duration(1, "seconds"))
        val future = timingOutEarlier.withTimeout(Duration(5, "seconds"))
        a [TimeoutException] should be thrownBy { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 500l
        elapsed should be <= 4000l
      }

      "return the timeout if the original Future does not timeout of its own" in {
        val now = System.currentTimeMillis
        val timingOutLater = Timeouts.timeout(Duration(3, "seconds"))
        val future = timingOutLater.withTimeout(Duration(1, "seconds"))
        a [TimeoutException] should be thrownBy  { Await.result(future, Duration(10, "seconds")) }
        val elapsed: Long = (System.currentTimeMillis - now)
        elapsed should be >= 1000l
        elapsed should be <= 2500l
      }
    }

    // an example of how it could be used
    "used in our most common use case" should {
      "fit nicely" in {
        val call: Future[String] = svcCall(1000).withTimeout(Duration(5000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call, Duration(10, "seconds")) should be ("data-1000")

        val call2: Future[String] = svcCall(5000).withTimeout(Duration(1000, "milliseconds")).recover {
          case _: TimeoutException => "recover.timeout"
          case other => s"recover.${other.getMessage}"
        }
        Await.result(call2, Duration(10, "seconds")) should be ("recover.timeout")
      }
    }
  }
}

object TimeLimitedFutureSpec {
  def svcCall(latency: Long): Future[String] = Future { Thread.sleep(latency); s"data-${latency}" }
} 
Example 2
Source File: AvroParquetSourceTest.scala    From eel-sdk   with Apache License 2.0 6 votes vote down vote up
package io.eels.component.parquet

import java.nio.file.Paths

import io.eels.component.parquet.avro.AvroParquetSource
import io.eels.component.parquet.util.ParquetLogMute
import io.eels.schema._
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.AvroParquetWriter
import org.scalatest.{Matchers, WordSpec}

class AvroParquetSourceTest extends WordSpec with Matchers {
  ParquetLogMute()

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(conf)

  private val personFile = Paths.get(getClass.getResource("/io/eels/component/parquet/person.avro.pq").toURI)
  private val resourcesDir = personFile.getParent

  "AvroParquetSource" should {
    "read schema" in {
      val people = AvroParquetSource(personFile)
      people.schema shouldBe StructType(
        Field("name", StringType, nullable = false),
        Field("job", StringType, nullable = false),
        Field("location", StringType, nullable = false)
      )
    }
    "read parquet files" in {
      val people = AvroParquetSource(personFile.toAbsolutePath()).toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    "read multiple parquet files using file expansion" in {
      import io.eels.FilePattern._
      val people = AvroParquetSource(s"${resourcesDir.toUri.toString}/*.pq").toDataStream().toSet.map(_.values)
      people shouldBe Set(
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner"),
        Vector("clint eastwood", "actor", "carmel"),
        Vector("elton john", "musician", "pinner")
      )
    }
    // todo add merge to parquet source
    "merge schemas" ignore {

      try {
        fs.delete(new Path("merge1.pq"), false)
      } catch {
        case t: Throwable =>
      }
      try {
        fs.delete(new Path("merge2.pq"), false)
      } catch {
        case t: Throwable =>
      }

      val schema1 = SchemaBuilder.builder().record("schema1").fields().requiredString("a").requiredDouble("b").endRecord()
      val schema2 = SchemaBuilder.builder().record("schema2").fields().requiredInt("a").requiredBoolean("c").endRecord()

      val writer1 = AvroParquetWriter.builder[GenericRecord](new Path("merge1.pq")).withSchema(schema1).build()
      val record1 = new GenericData.Record(schema1)
      record1.put("a", "aaaaa")
      record1.put("b", 124.3)
      writer1.write(record1)
      writer1.close()

      val writer2 = AvroParquetWriter.builder[GenericRecord](new Path("merge2.pq")).withSchema(schema2).build()
      val record2 = new GenericData.Record(schema2)
      record2.put("a", 111)
      record2.put("c", true)
      writer2.write(record2)
      writer2.close()

      ParquetSource(new Path("merge*")).schema shouldBe
        StructType(
          Field("a", StringType, nullable = false),
          Field("b", DoubleType, nullable = false),
          Field("c", BooleanType, nullable = false)
        )

      fs.delete(new Path(".merge1.pq.crc"), false)
      fs.delete(new Path(".merge2.pq.crc"), false)
      fs.delete(new Path("merge1.pq"), false)
      fs.delete(new Path("merge2.pq"), false)
    }
  }
} 
Example 3
Source File: SparkCassBulkWriterSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra

import com.datastax.driver.core.querybuilder.QueryBuilder
import com.datastax.spark.connector.AllColumns
import com.datastax.spark.connector.writer.{ RowWriterFactory, SqlRowWriter }
import com.github.jparkie.spark.cassandra.client.SparkCassSSTableLoaderClientManager
import com.github.jparkie.spark.cassandra.conf.{ SparkCassServerConf, SparkCassWriteConf }
import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.sql.{ Row, SQLContext }
import org.scalatest.{ MustMatchers, WordSpec }

import scala.collection.JavaConverters._

class SparkCassBulkWriterSpec extends WordSpec with MustMatchers with CassandraServerSpecLike with SharedSparkContext {
  val testKeyspace = "test_keyspace"
  val testTable = "test_table"

  override def beforeAll(): Unit = {
    super.beforeAll()

    getCassandraConnector.withSessionDo { currentSession =>
      createKeyspace(currentSession, testKeyspace)

      currentSession.execute(
        s"""CREATE TABLE $testKeyspace.$testTable (
            |  test_key BIGINT PRIMARY KEY,
            |  test_value VARCHAR
            |);
         """.stripMargin
      )
    }
  }

  "SparkCassBulkWriter" must {
    "write() successfully" in {
      val sqlContext = new SQLContext(sc)

      import sqlContext.implicits._

      implicit val testRowWriterFactory: RowWriterFactory[Row] = SqlRowWriter.Factory

      val testCassandraConnector = getCassandraConnector
      val testSparkCassWriteConf = SparkCassWriteConf()
      val testSparkCassServerConf = SparkCassServerConf(
        // See https://github.com/jsevellec/cassandra-unit/blob/master/cassandra-unit/src/main/resources/cu-cassandra.yaml
        storagePort = 7010
      )

      val testSparkCassBulkWriter = SparkCassBulkWriter(
        testCassandraConnector,
        testKeyspace,
        testTable,
        AllColumns,
        testSparkCassWriteConf,
        testSparkCassServerConf
      )

      val testRDD = sc.parallelize(1 to 25)
        .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!"))
      val testDataFrame = testRDD.toDF("test_key", "test_value")

      sc.runJob(testDataFrame.rdd, testSparkCassBulkWriter.write _)

      getCassandraConnector.withSessionDo { currentSession =>
        val queryStatement = QueryBuilder.select("test_key", "test_value")
          .from(testKeyspace, testTable)
          .limit(25)

        val resultSet = currentSession.execute(queryStatement)

        val outputSet = resultSet.all.asScala
          .map(currentRow => (currentRow.getLong("test_key"), currentRow.getString("test_value")))
          .toMap

        for (currentNumber <- 1 to 25) {
          val currentKey = currentNumber.toLong

          outputSet(currentKey) mustEqual s"Hello World: $currentNumber!"
        }
      }

      SparkCassSSTableLoaderClientManager.evictAll()
    }
  }
} 
Example 4
Source File: SparkCassSSTableLoaderClientSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra.client

import com.github.jparkie.spark.cassandra.CassandraServerSpecLike
import com.github.jparkie.spark.cassandra.conf.SparkCassServerConf
import org.apache.cassandra.tools.BulkLoadConnectionFactory
import org.scalatest.{ MustMatchers, WordSpec }

class SparkCassSSTableLoaderClientSpec extends WordSpec with MustMatchers with CassandraServerSpecLike {
  val testKeyspace = "test_keyspace"
  val testTable = "test_table"

  override def beforeAll(): Unit = {
    super.beforeAll()

    getCassandraConnector.withSessionDo { currentSession =>
      createKeyspace(currentSession, testKeyspace)

      currentSession.execute(
        s"""CREATE TABLE $testKeyspace.$testTable (
           |  test_key VARCHAR PRIMARY KEY,
           |  test_value BIGINT
           |);
         """.stripMargin
      )
    }
  }

  "SparkCassSSTableLoaderClient" must {
    "initialize successfully" in {
      getCassandraConnector.withSessionDo { currentSession =>
        val testSession = currentSession
        val testSparkCassServerConf = SparkCassServerConf()
        val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf)

        testSparkCassSSTableLoaderClient.init(testKeyspace)
      }
    }

    "ensure tables contain TableIdentifier(testKeyspace, testTable)" in {
      getCassandraConnector.withSessionDo { currentSession =>
        val testSession = currentSession
        val testSparkCassServerConf = SparkCassServerConf()
        val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf)

        testSparkCassSSTableLoaderClient.init(testKeyspace)

        assert(testSparkCassSSTableLoaderClient.tables
          .contains(SparkCassSSTableLoaderClient.TableIdentifier(testKeyspace, testTable)))
      }
    }

    "retrieve CFMetaData" in {
      getCassandraConnector.withSessionDo { currentSession =>
        val testSession = currentSession
        val testSparkCassServerConf = SparkCassServerConf()
        val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf)

        testSparkCassSSTableLoaderClient.init(testKeyspace)

        val outputCFMetaData = testSparkCassSSTableLoaderClient.getCFMetaData(testKeyspace, testTable)
        outputCFMetaData.ksName mustEqual testKeyspace
        outputCFMetaData.cfName mustEqual testTable
      }
    }

    "getConnectionFactory successfully" in {
      getCassandraConnector.withSessionDo { currentSession =>
        val testSession = currentSession
        val testSparkCassServerConf = SparkCassServerConf()
        val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf)

        testSparkCassSSTableLoaderClient.init(testKeyspace)

        val outputConnectionFactory = testSparkCassSSTableLoaderClient
          .getConnectionFactory

        assert(outputConnectionFactory.isInstanceOf[BulkLoadConnectionFactory])
      }
    }

    "close session on stop()" in {
      val testSession = getCassandraConnector.openSession()
      val testSparkCassServerConf = SparkCassServerConf()
      val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf)

      testSparkCassSSTableLoaderClient.stop()

      assert(testSession.isClosed)
    }
  }
} 
Example 5
Source File: SparkCassSSTableLoaderClientManagerSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra.client

import com.github.jparkie.spark.cassandra.CassandraServerSpecLike
import com.github.jparkie.spark.cassandra.conf.SparkCassServerConf
import org.scalatest.{ MustMatchers, WordSpec }

class SparkCassSSTableLoaderClientManagerSpec extends WordSpec with MustMatchers with CassandraServerSpecLike {
  "SparkCassSSTableLoaderClientManager" must {
    "return one SparkCassSSTableLoaderClient in getClient()" in {
      val testSparkCassServerConf = SparkCassServerConf()

      SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf)
      SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf)
      SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf)

      assert(SparkCassSSTableLoaderClientManager.internalClients.size == 1)

      SparkCassSSTableLoaderClientManager.evictAll()
    }

    "evictAll() ensures all sessions are stopped and internalClients is empty" in {
      val testSparkCassServerConf = SparkCassServerConf()

      val outputClient = SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf)

      SparkCassSSTableLoaderClientManager.evictAll()

      assert(outputClient.session.isClosed)
      assert(SparkCassSSTableLoaderClientManager.internalClients.isEmpty)
    }
  }
} 
Example 6
Source File: SparkCassDataFrameFunctionsSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra.sql

import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.sql.SQLContext
import org.scalatest.{ MustMatchers, WordSpec }

class SparkCassDataFrameFunctionsSpec extends WordSpec with MustMatchers with SharedSparkContext {
  "Package com.github.jparkie.spark.cassandra.sql" must {
    "lift DataFrame into SparkCassDataFrameFunctions" in {
      val sqlContext = new SQLContext(sc)

      import sqlContext.implicits._

      val testRDD = sc.parallelize(1 to 25)
        .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!"))
      val testDataFrame = testRDD.toDF("test_key", "test_value")

      // If internalSparkContext is available, RDD was lifted.
      testDataFrame.internalSparkContext
    }
  }
} 
Example 7
Source File: SparkCassRDDFunctionsSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra.rdd

import com.holdenkarau.spark.testing.SharedSparkContext
import org.scalatest.{ MustMatchers, WordSpec }

class SparkCassRDDFunctionsSpec extends WordSpec with MustMatchers with SharedSparkContext {
  "Package com.github.jparkie.spark.cassandra.rdd" must {
    "lift RDD into SparkCassRDDFunctions" in {
      val testRDD = sc.parallelize(1 to 25)
        .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!"))

      // If internalSparkContext is available, RDD was lifted.
      testRDD.internalSparkContext
    }
  }
} 
Example 8
Source File: SparkCassWriteConfSpec.scala    From Spark2Cassandra   with Apache License 2.0 5 votes vote down vote up
package com.github.jparkie.spark.cassandra.conf

import org.apache.cassandra.dht.{ ByteOrderedPartitioner, Murmur3Partitioner, RandomPartitioner }
import org.apache.spark.SparkConf
import org.scalatest.{ MustMatchers, WordSpec }

class SparkCassWriteConfSpec extends WordSpec with MustMatchers {
  "SparkCassWriteConf" must {
    "be extracted from SparkConf successfully" in {
      val inputSparkConf = new SparkConf()
        .set("spark.cassandra.bulk.write.partitioner", "org.apache.cassandra.dht.ByteOrderedPartitioner")
        .set("spark.cassandra.bulk.write.throughput_mb_per_sec", "1")
        .set("spark.cassandra.bulk.write.connection_per_host", "2")

      val outputSparkCassWriteConf = SparkCassWriteConf.fromSparkConf(inputSparkConf)

      outputSparkCassWriteConf.partitioner mustEqual "org.apache.cassandra.dht.ByteOrderedPartitioner"
      outputSparkCassWriteConf.throughputMiBPS mustEqual 1
      outputSparkCassWriteConf.connectionsPerHost mustEqual 2
    }

    "set defaults when no properties set in SparkConf" in {
      val inputSparkConf = new SparkConf()

      val outputSparkCassWriteConf = SparkCassWriteConf.fromSparkConf(inputSparkConf)

      outputSparkCassWriteConf.partitioner mustEqual
        SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_PARTITIONER.default
      outputSparkCassWriteConf.throughputMiBPS mustEqual
        SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_THROUGHPUT_MB_PER_SEC.default
      outputSparkCassWriteConf.connectionsPerHost mustEqual
        SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_CONNECTIONS_PER_HOST.default
    }

    "reject invalid partitioner in SparkConf" in {
      val inputSparkConf = new SparkConf()
        .set("spark.cassandra.bulk.write.partitioner", "N/A")

      intercept[IllegalArgumentException] {
        SparkCassWriteConf.fromSparkConf(inputSparkConf)
      }
    }

    "getIPartitioner() correctly per partitioner" in {
      val sparkCassWriteConf1 = SparkCassWriteConf("org.apache.cassandra.dht.Murmur3Partitioner")
      assert(sparkCassWriteConf1.getIPartitioner.isInstanceOf[Murmur3Partitioner])
      val sparkCassWriteConf2 = SparkCassWriteConf("org.apache.cassandra.dht.RandomPartitioner")
      assert(sparkCassWriteConf2.getIPartitioner.isInstanceOf[RandomPartitioner])
      val sparkCassWriteConf3 = SparkCassWriteConf("org.apache.cassandra.dht.ByteOrderedPartitioner")
      assert(sparkCassWriteConf3.getIPartitioner.isInstanceOf[ByteOrderedPartitioner])
    }
  }
} 
Example 9
Source File: FilePatternTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import java.nio.file.Files

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

class FilePatternTest extends WordSpec with Matchers {

  implicit val fs = FileSystem.get(new Configuration())

  "FilePattern" should {
    "detect single hdfs path without name server" ignore {
      FilePattern("hdfs:///mypath").toPaths() shouldBe List(new Path("hdfs:///mypath"))
    }
    "detect single hdfs path with name server" ignore {
      FilePattern("hdfs://nameserver/mypath").toPaths() shouldBe List(new Path("hdfs://nameserver/mypath"))
    }
    "detect absolute local file" in {
      FilePattern("file:///absolute/file").toPaths() shouldBe List(new Path("file:///absolute/file"))
    }
    "detect relative local file" in {
      FilePattern("file:///local/file").toPaths() shouldBe List(new Path("file:///local/file"))
    }
    "detect relative local file expansion" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it =>
        dir.resolve(it)
      }
      val hdfsPaths = files.map { it =>
        new Path(it.toUri)
      }
      files.foreach(file => Files.createFile(file))
      FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
      files.foreach(Files.deleteIfExists)
      Files.deleteIfExists(dir)
    }

    //not working on windows
    "detect relative local file expansion with schema" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it =>
        dir.resolve(it)
      }
      val hdfsPaths = files.map { it =>
        new Path(it.toUri)
      }
      files.foreach(file => Files.createFile(file))
      FilePattern(dir.toUri.toString() + "/*").toPaths().toSet shouldBe hdfsPaths.toSet
      files.foreach(Files.deleteIfExists)
      Files.deleteIfExists(dir)
    }

    "use filter if supplied" in {
      val dir = Files.createTempDirectory("filepatterntest")
      val files = List("a", "b", "c").map { it => dir.resolve(it) }
      files.foreach { it => Files.createFile(it) }
      val a = FilePattern(dir.toAbsolutePath().toString() + "/*")
        .withFilter(_.toString().endsWith("a"))
        .toPaths.toSet
      a shouldBe Set(new Path("file:///" + dir.resolve("a")))
      files.foreach { it => Files.deleteIfExists(it) }
      Files.deleteIfExists(dir)
    }
  }
} 
Example 10
Source File: ToSetActionTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import io.eels.datastream.DataStream
import io.eels.schema._
import org.scalatest.{Matchers, WordSpec}

class ToSetActionTest extends WordSpec with Matchers {

  "ToSetPlan" should {
    "createReader set from frame" in {
      val schema = StructType(
        Field("name"),
        Field("location")
      )
      val ds = DataStream.fromValues(
        schema,
        Seq(
          List("sam", "aylesbury"),
          List("sam", "aylesbury"),
          List("sam", "aylesbury"),
          List("jam", "aylesbury"),
          List("jam", "aylesbury"),
          List("jam", "aylesbury"),
          List("ham", "buckingham")
        )
      )
      ds.toSet shouldBe Set(
        Row(ds.schema, "sam", "aylesbury"),
        Row(ds.schema, "jam", "aylesbury"),
        Row(ds.schema, "ham", "buckingham")
      )
    }
  }
} 
Example 11
Source File: GroupedDataStreamTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import io.eels.datastream.DataStream
import io.eels.schema.{Field, IntType, LongType, StructType}
import org.scalatest.{Matchers, WordSpec}

class GroupedDataStreamTest extends WordSpec with Matchers {

  val schema = StructType(
    Field("artist"),
    Field("year", IntType()),
    Field("album"),
    Field("sales", LongType())
  )
  val ds = DataStream.fromRows(schema,
    Row(schema, Vector("Elton John", 1969, "Empty Sky", 1433)),
    Row(schema, Vector("Elton John", 1971, "Madman Across the Water", 7636)),
    Row(schema, Vector("Elton John", 1972, "Honky Château", 2525)),
    Row(schema, Vector("Elton John", 1973, "Goodbye Yellow Brick Road", 4352)),
    Row(schema, Vector("Elton John", 1975, "Rock of the Westies", 5645)),
    Row(schema, Vector("Kate Bush", 1978, "The Kick Inside", 2577)),
    Row(schema, Vector("Kate Bush", 1978, "Lionheart", 745)),
    Row(schema, Vector("Kate Bush", 1980, "Never for Ever", 7444)),
    Row(schema, Vector("Kate Bush", 1982, "The Dreaming", 8253)),
    Row(schema, Vector("Kate Bush", 1985, "Hounds of Love", 2495))
  )

  "grouped operations" should {
    "support sum" ignore {
      ds.groupBy("artist").sum("sales").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 21591), Vector("Kate Bush", 21514.0))
    }
    "support count" ignore {
      ds.groupBy("artist").count("album").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 5), Vector("Kate Bush", 5))
    }
    "support avg" ignore {
      ds.groupBy("artist").avg("sales").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 4318.2), Vector("Kate Bush", 4302.8))
    }
    "support min" ignore {
      ds.groupBy("artist").min("year").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 1969), Vector("Kate Bush", 1978))
    }
    "support max" ignore {
      ds.groupBy("artist").max("year").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 1975), Vector("Kate Bush", 1985))
    }
    "support multiple aggregations" ignore {
      ds.groupBy("artist").avg("year").sum("sales").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector("Elton John", 1972.0, 21591.0), Vector("Kate Bush", 1980.6, 21514.0))
    }
    "support aggregations on entire dataset" ignore {
      ds.aggregated().avg("year").sum("sales").toDataStream.toSet.map(_.values) shouldBe
        Set(Vector(1976.3, 43105.0))
    }
  }
} 
Example 12
Source File: ListenerTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import java.util.concurrent.{CountDownLatch, TimeUnit}

import io.eels.component.csv.{CsvSink, CsvSource}
import io.eels.datastream.DataStream
import io.eels.schema.StructType
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

import scala.util.Random

class ListenerTest extends WordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.get(conf)

  val schema = StructType("a", "b", "c", "d", "e")
  val rows = List.fill(1000)(Row(schema, Random.nextBoolean(), Random.nextFloat(), Random.nextGaussian(), Random.nextLong(), Random.nextString(10)))
  val ds = DataStream.fromRows(schema, rows)

  val path = new Path("listener_test.csv")

  "DataStream" should {
    "support user's listeners" in {

      val latch = new CountDownLatch(1000)
      fs.delete(path, false)

      ds.listener(new Listener {
        override def onNext(value: Row): Unit = latch.countDown()
        override def onError(e: Throwable): Unit = ()
        override def onComplete(): Unit = ()
      }).to(CsvSink(path))

      latch.await(20, TimeUnit.SECONDS) shouldBe true

      fs.delete(path, false)
    }
    "propagate errors in listeners" in {

      class TestSink extends Sink {
        override def open(schema: StructType): SinkWriter = new SinkWriter {
          override def close(): Unit = ()
          override def write(row: Row): Unit = ()
        }
      }

      try {
        ds.listener(new Listener {
          override def onNext(value: Row): Unit = sys.error("boom")
          override def onError(e: Throwable): Unit = ()
          override def onComplete(): Unit = ()
        }).to(new TestSink)
        assert(false)
      } catch {
        case _: Throwable =>
      }
    }
  }

  "Source.toDataStream" should {
    "call on next for each row" in {

      val latch = new CountDownLatch(1000)

      fs.delete(path, false)
      ds.to(CsvSink(path))

      CsvSource(path).toDataStream(new Listener {
        override def onNext(value: Row): Unit = latch.countDown()
        override def onError(e: Throwable): Unit = ()
        override def onComplete(): Unit = ()
      }).collect

      latch.await(5, TimeUnit.SECONDS) shouldBe true
      fs.delete(path, false)
    }
    "call on complete once finished" in {

      val latch = new CountDownLatch(1001)

      fs.delete(path, false)
      ds.to(CsvSink(path))

      CsvSource(path).toDataStream(new Listener {
        override def onNext(value: Row): Unit = latch.countDown()
        override def onError(e: Throwable): Unit = ()
        override def onComplete(): Unit = latch.countDown()
      }).collect

      latch.await(5, TimeUnit.SECONDS) shouldBe true
      fs.delete(path, false)
    }
  }
} 
Example 13
Source File: RowUtilsTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import java.util.Date

import io.eels.schema.{BooleanType, DateType, DoubleType, Field, IntType, StringType, StructType}
import org.scalatest.{Matchers, WordSpec}

class RowUtilsTest extends WordSpec with Matchers {
  "RowUtils.rowAlign" should {
    "rowAlign should reorder in line with target schema" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("b"))
      RowUtils.rowAlign(row, targetSchema) shouldBe Row(StructType(Field("c"), Field("b")), "ccc", "bbb")
    }
    "rowAlign should lookup missing data" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("d"))
      RowUtils.rowAlign(row, targetSchema, Map("d" -> "ddd")) shouldBe Row(StructType(Field("c"), Field("d")), "ccc", "ddd")
    }
    "rowAlign should throw an error if a field is missing" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("d"))
      intercept[RuntimeException] {
        RowUtils.rowAlign(row, targetSchema)
      }
    }
  }

  "RowUtils.coerce" should {
    "coerce values to match types" in {
      val schema = StructType(Field("a", StringType), Field("b", DoubleType), Field("c", BooleanType), Field("d", DateType), Field("e", IntType.Signed))
      val row = Row(schema, Vector(1, "1.4", "true", "1123123244493", "1"))
      RowUtils.coerce(row) shouldBe Row(schema, "1", 1.4D, true, new Date(1123123244493L), 1)
    }
  }
} 
Example 14
Source File: AvroSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import java.nio.file.Paths

import com.typesafe.config.ConfigFactory
import io.eels.schema.{Field, StructType}
import org.apache.avro.util.Utf8
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.scalatest.{Matchers, WordSpec}

class AvroSourceTest extends WordSpec with Matchers {

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(new Configuration())

  "AvroSource" should {
    "read schema" in {
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath)
      people.schema shouldBe StructType(Field("name", nullable = false), Field("job", nullable = false), Field("location", nullable = false))
    }
    "read strings as java.lang.String when eel.avro.java.string is true" in {
      System.setProperty("eel.avro.java.string", "true")
      ConfigFactory.invalidateCaches()
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath).toDataStream().toSet
      people.map(_.values) shouldBe Set(
        List("clint eastwood", "actor", "carmel"),
        List("elton john", "musician", "pinner"),
        List("issac newton", "scientist", "heaven")
      )
      System.setProperty("eel.avro.java.string", "false")
      ConfigFactory.invalidateCaches()
    }
    "read strings as utf8 when eel.avro.java.string is false" in {
      System.setProperty("eel.avro.java.string", "false")
      ConfigFactory.invalidateCaches()
      val people = AvroSource(Paths.get(getClass.getResource("/test.avro").toURI).toAbsolutePath).toDataStream().toSet
      people.map(_.values) shouldBe Set(
        List(new Utf8("clint eastwood"), new Utf8("actor"), new Utf8("carmel")),
        List(new Utf8("elton john"), new Utf8("musician"), new Utf8("pinner")),
        List(new Utf8("issac newton"), new Utf8("scientist"), new Utf8("heaven"))
      )
      System.setProperty("eel.avro.java.string", "true")
      ConfigFactory.invalidateCaches()
    }
  }
} 
Example 15
Source File: ConverterTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import org.apache.avro.SchemaBuilder
import org.scalatest.{Matchers, WordSpec}

class ConverterTest extends WordSpec with Matchers {

  "Converter" should {
    "convert to long" in {
      AvroSerializer(SchemaBuilder.builder().longType()).serialize("123") shouldBe 123l
      AvroSerializer(SchemaBuilder.builder().longType()).serialize(14555) shouldBe 14555l
    }
    "convert to String" in {
      AvroSerializer(SchemaBuilder.builder().stringType()).serialize(123l) shouldBe "123"
      AvroSerializer(SchemaBuilder.builder().stringType).serialize(124) shouldBe "124"
      AvroSerializer(SchemaBuilder.builder().stringType).serialize("Qweqwe") shouldBe "Qweqwe"
    }
    "convert to boolean" in {
      AvroSerializer(SchemaBuilder.builder().booleanType).serialize(true) shouldBe true
      AvroSerializer(SchemaBuilder.builder().booleanType).serialize(false) shouldBe false
      AvroSerializer(SchemaBuilder.builder().booleanType).serialize("true") shouldBe true
      AvroSerializer(SchemaBuilder.builder().booleanType()).serialize("false") shouldBe false
    }
    "convert to Double" in {
      AvroSerializer(SchemaBuilder.builder().doubleType).serialize("213.4") shouldBe 213.4d
      AvroSerializer(SchemaBuilder.builder().doubleType).serialize("345.11") shouldBe 345.11d
      AvroSerializer(SchemaBuilder.builder().doubleType()).serialize(345) shouldBe 345.0
    }
  }
} 
Example 16
Source File: AvroSchemaFnsTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import java.util

import io.eels.schema._
import org.apache.avro.SchemaBuilder
import org.codehaus.jackson.node.NullNode
import org.scalatest.{Matchers, WordSpec}

import scala.collection.JavaConverters._

class AvroSchemaFnsTest extends WordSpec with Matchers {

  "toAvro" should {
    "use a union of [null, type] for a nullable column" in {
      val schema = StructType(Field("a", StringType, true))
      val fields = AvroSchemaFns.toAvroSchema(schema).getFields.asScala
      fields.head.schema().getType shouldBe org.apache.avro.Schema.Type.UNION
      fields.head.schema().getTypes.get(0).getType shouldBe org.apache.avro.Schema.Type.NULL
      fields.head.schema().getTypes.get(1).getType shouldBe org.apache.avro.Schema.Type.STRING
    }
    "set default type of NullNode for a nullable column" in {
      val schema = StructType(Field("a", StringType, true))
      val fields = AvroSchemaFns.toAvroSchema(schema).getFields
      fields.get(0).defaultValue() shouldBe NullNode.getInstance()
    }
    "not set a default value for a non null column" in {
      val schema = StructType(Field("a", IntType(true), false))
      val fields = AvroSchemaFns.toAvroSchema(schema).getFields
      (fields.get(0).defaultVal() == null) shouldBe true
      fields.get(0).schema().getType shouldBe org.apache.avro.Schema.Type.INT
    }
  }

  "fromAvroSchema" should {
    "convert avro unions [null, string] to nullable columns" in {
      val avro = SchemaBuilder.record("dummy").fields().optionalString("str").endRecord()
      AvroSchemaFns.fromAvroSchema(avro) shouldBe StructType(Field("str", StringType, true))
    }
    "convert avro unions [null, double] to nullable double columns" in {
      val union = org.apache.avro.Schema.createUnion(util.Arrays.asList(SchemaBuilder.builder().doubleType(), SchemaBuilder.builder().nullType()))
      val avro = SchemaBuilder.record("dummy").fields().name("u").`type`(union).noDefault().endRecord()
      AvroSchemaFns.fromAvroSchema(avro) shouldBe StructType(Field("u", DoubleType, true))
    }
  }
} 
Example 17
Source File: AvroSinkTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import io.eels.Row
import io.eels.datastream.DataStream
import io.eels.schema.{ArrayType, Field, MapType, StringType, StructType}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

class AvroSinkTest extends WordSpec with Matchers {

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(new Configuration())

  private val ds = DataStream.fromValues(
    StructType("name", "job", "location"),
    Seq(
      List("clint eastwood", "actor", "carmel"),
      List("elton john", "musician", "pinner"),
      List("issac newton", "scientist", "heaven")
    )
  )

  "AvroSink" should {
    "write to avro" in {
      val path = new Path("avro.test")
      fs.delete(path, false)
      ds.to(AvroSink(path))
      fs.delete(path, false)
    }
    "support overwrite option" in {
      val path = new Path("overwrite_test", ".avro")
      fs.delete(path, false)
      ds.to(AvroSink(path))
      ds.to(AvroSink(path).withOverwrite(true))
      fs.delete(path, false)
    }
    "write lists and maps" in {
      val ds = DataStream.fromValues(
        StructType(
          Field("name"),
          Field("movies", ArrayType(StringType)),
          Field("characters", MapType(StringType, StringType))
        ),
        Seq(
          List(
            "clint eastwood",
            List("fistful of dollars", "high plains drifters"),
            Map("preacher" -> "high plains", "no name" -> "good bad ugly")
          )
        )
      )

      val path = new Path("array_map_avro", ".avro")
      fs.delete(path, false)
      ds.to(AvroSink(path))
      AvroSource(path).toDataStream().collect shouldBe Seq(
        Row(
          ds.schema,
          Seq(
            "clint eastwood",
            List("fistful of dollars", "high plains drifters"),
            Map("preacher" -> "high plains", "no name" -> "good bad ugly")
          )
        )
      )

      fs.delete(path, true)
    }
  }
} 
Example 18
Source File: AvroSchemaMergeTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import org.apache.avro.SchemaBuilder
import org.scalatest.{Matchers, WordSpec}

class AvroSchemaMergeTest extends WordSpec with Matchers {
  "AvroSchemaMerge" should {
    "merge all fields" in {
      val schema1 = SchemaBuilder.record("record1").fields().nullableString("str1", "moo").requiredFloat("f").endRecord()
      val schema2 = SchemaBuilder.record("record2").fields().nullableString("str2", "foo").requiredFloat("g").endRecord()
      AvroSchemaMerge("finalname", "finalnamespace", List(schema1, schema2)) shouldBe
          SchemaBuilder.record("finalname").namespace("finalnamespace")
              .fields()
              .nullableString("str1", "moo")
              .requiredFloat("f")
              .nullableString("str2", "foo")
              .requiredFloat("g")
              .endRecord()
    }

    "drop duplicates" in {
      val schema1 = SchemaBuilder.record("record1").fields().nullableString("str1", "moo").requiredFloat("f").endRecord()
      val schema2 = SchemaBuilder.record("record2").fields().nullableString("str2", "foo").requiredFloat("f").endRecord()
      AvroSchemaMerge("finalname", "finalnamespace", List(schema1, schema2)) shouldBe
          SchemaBuilder.record("finalname").namespace("finalnamespace")
              .fields()
              .nullableString("str1", "moo")
              .requiredFloat("f")
              .nullableString("str2", "foo")
              .endRecord()
    }
  }
} 
Example 19
Source File: AvroSerializerTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import io.eels.schema.{ArrayType, Field, IntType, StructType}
import io.eels.Row
import org.apache.avro.SchemaBuilder
import org.scalatest.{Matchers, WordSpec}
import scala.collection.JavaConverters._

class AvroSerializerTest extends WordSpec with Matchers {

  private val avroSchema = SchemaBuilder.record("row").fields().requiredString("s").requiredLong("l").requiredBoolean("b").endRecord()
  private val serializer = new RowSerializer(avroSchema)

  "AvroRecordMarshaller" should {
    "createReader field from values in row" in {
      val eelSchema = StructType(Field("s"), Field("l"), Field("b"))
      val record = serializer.serialize(Row(eelSchema, "a", 1L, false))
      record.get("s") shouldBe "a"
      record.get("l") shouldBe 1L
      record.get("b") shouldBe false
    }
    "only accept rows with same number of values as schema fields" in {
      intercept[IllegalArgumentException] {
        val eelSchema = StructType(Field("a"), Field("b"))
        serializer.serialize(Row(eelSchema, "a", 1L))
      }
      intercept[IllegalArgumentException] {
        val eelSchema = StructType(Field("a"), Field("b"), Field("c"), Field("d"))
        serializer.serialize(Row(eelSchema, "1", "2", "3", "4"))
      }
    }
    "support rows with a different ordering to the write schema" in {
      val eelSchema = StructType(Field("l"), Field("b"), Field("s"))
      val record = serializer.serialize(Row(eelSchema, 1L, false, "a"))
      record.get("s") shouldBe "a"
      record.get("l") shouldBe 1L
      record.get("b") shouldBe false
    }
    "convert strings to longs" in {
      val record = serializer.serialize(Row(AvroSchemaFns.fromAvroSchema(avroSchema), "1", "2", "true"))
      record.get("l") shouldBe 2L
    }
    "convert strings to booleans" in {
      val record = serializer.serialize(Row(AvroSchemaFns.fromAvroSchema(avroSchema), "1", "2", "true"))
      record.get("b") shouldBe true
    }
    "convert longs to strings" in {
      val record = serializer.serialize(Row(AvroSchemaFns.fromAvroSchema(avroSchema), 1L, "2", "true"))
      record.get("s") shouldBe "1"
    }
    "convert booleans to strings" in {
      val record = serializer.serialize(Row(AvroSchemaFns.fromAvroSchema(avroSchema), true, "2", "true"))
      record.get("s") shouldBe "true"
    }
    "support arrays" in {
      val schema = StructType(Field("a", ArrayType(IntType.Signed)))
      val serializer = new RowSerializer(AvroSchemaFns.toAvroSchema(schema))
      val record = serializer.serialize(Row(schema, Array(1, 2)))
      record.get("a").asInstanceOf[java.util.List[_]].asScala.toList shouldBe List(1, 2)
    }
    "support lists" in {
      val schema = StructType(Field("a", ArrayType(IntType.Signed)))
      val serializer = new RowSerializer(AvroSchemaFns.toAvroSchema(schema))
      val record = serializer.serialize(Row(schema, Array(1, 2)))
      record.get("a").asInstanceOf[java.util.List[_]].asScala.toList shouldBe List(1, 2)
    }
    "support sets" in {
      val schema = StructType(Field("a", ArrayType(IntType(true))))
      val serializer = new RowSerializer(AvroSchemaFns.toAvroSchema(schema))
      val record = serializer.serialize(Row(schema, Set(1, 2)))
      record.get("a").asInstanceOf[java.util.List[_]].asScala.toList shouldBe List(1, 2)
    }
    "support iterables" in {
      val schema = StructType(Field("a", ArrayType(IntType(true))))
      val serializer = new RowSerializer(AvroSchemaFns.toAvroSchema(schema))
      val record = serializer.serialize(Row(schema, Iterable(1, 2)))
      record.get("a").asInstanceOf[java.util.List[_]].asScala.toList shouldBe List(1, 2)
    }
  }
} 
Example 20
Source File: AvroDeserializerTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.avro

import com.typesafe.config.ConfigFactory
import io.eels.Row
import io.eels.schema._
import org.apache.avro.generic.GenericData
import org.scalatest.{Matchers, WordSpec}

class AvroDeserializerTest extends WordSpec with Matchers {

  private  val config = ConfigFactory.parseString(""" eel.avro.fillMissingValues = true """)

  "toRow" should {
    "create eel row from supplied avro record" in {
      val schema = StructType(Field("a", nullable = false), Field("b", nullable = false), Field("c", nullable = false))
      val record = new GenericData.Record(AvroSchemaFns.toAvroSchema(schema))
      record.put("a", "aaaa")
      record.put("b", "bbbb")
      record.put("c", "cccc")
      val row = new AvroDeserializer(true).toRow(record)
      row.schema shouldBe schema
      row shouldBe Row(schema, "aaaa", "bbbb", "cccc")
    }
    "support arrays" in {
      val schema = StructType(Field("a"), Field("b", ArrayType(BooleanType)))
      val record = new GenericData.Record(AvroSchemaFns.toAvroSchema(schema))
      record.put("a", "aaaa")
      record.put("b", Array(true, false))
      new AvroDeserializer().toRow(record).values.head shouldBe "aaaa"
      new AvroDeserializer().toRow(record).values.last.asInstanceOf[Array[Boolean]].toList shouldBe List(true, false)
    }
  }
} 
Example 21
Source File: GenericJdbcDialectTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.jdbc

import java.sql.Types
import javax.sql.rowset.RowSetMetaDataImpl

import io.eels.component.jdbc.dialect.GenericJdbcDialect
import io.eels.schema._
import org.scalatest.{Matchers, WordSpec}

class GenericJdbcDialectTest extends WordSpec with Matchers {

  "GenericJdbcDialect.fromJdbcType" should {
    "convert int field to signed IntType" in {
      val meta = new RowSetMetaDataImpl
      meta.setColumnCount(1)
      meta.setColumnType(1, Types.INTEGER)
      new GenericJdbcDialect().fromJdbcType(1, meta) shouldBe IntType.Signed
    }
    "convert bigint field to signed LongType" in {
      val meta = new RowSetMetaDataImpl
      meta.setColumnCount(1)
      meta.setColumnType(1, Types.BIGINT)
      new GenericJdbcDialect().fromJdbcType(1, meta) shouldBe LongType.Signed
    }
  }

  "GenericJdbcDialect" should {
    "convert int field to int type" in {
      new GenericJdbcDialect().toJdbcType(Field("a", IntType(true), false)) shouldBe "int"
    }
    "convert Boolean field to int type" in {
      new GenericJdbcDialect().toJdbcType(Field("b", BooleanType, false)) shouldBe "boolean"
    }
    "convert short field to SMALLINT type" in {
      new GenericJdbcDialect().toJdbcType(Field("a", ShortType.Signed, false)) shouldBe "smallint"
    }
    "convert String field to text" in {
      new GenericJdbcDialect().toJdbcType(Field("a", StringType, false)) shouldBe "text"
    }
    "convert varchar field to VARCHAR using size" in {
      new GenericJdbcDialect().toJdbcType(Field("a", VarcharType(242), false)) shouldBe "varchar(242)"
    }
  }
} 
Example 22
Source File: HashPartitionStrategyTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.jdbc

import java.sql.DriverManager

import org.scalatest.{Matchers, WordSpec}

class HashPartitionStrategyTest extends WordSpec with Matchers {

  Class.forName("org.h2.Driver")

  private val db = "hash_test"
  private val uri = s"jdbc:h2:mem:$db"
  private val conn = DriverManager.getConnection(uri)
  conn.createStatement().executeUpdate("create table hash_test (a integer)")
  for (k <- 0 until 20) {
    conn.createStatement().executeUpdate(s"insert into hash_test (a) values ($k)")
  }

  "HashPartitionStrategy" should {
    "return correct number of ranges" in {
      JdbcSource(() => DriverManager.getConnection(uri), "select * from hash_test")
        .withPartitionStrategy(HashPartitionStrategy("mod(a)", 10))
        .parts().size shouldBe 10
    }
    "return full and non overlapping data" in {
      JdbcSource(() => DriverManager.getConnection(uri), "select * from hash_test")
        .withPartitionStrategy(HashPartitionStrategy("mod(a, 10)", 10))
        .toDataStream().collect.flatMap(_.values).toSet shouldBe
        Vector.tabulate(20) { k => k }.toSet
    }
  }
} 
Example 23
Source File: JdbcSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.jdbc

import java.sql.DriverManager

import io.eels.schema._
import org.scalatest.{Matchers, WordSpec}

class JdbcSourceTest extends WordSpec with Matchers {

  Class.forName("org.h2.Driver")

  "JdbcSource" should {
    "read schema" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:test")
      conn.createStatement().executeUpdate("create table mytable (a integer, b bit, c bigint)")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('1','2','3')")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('4','5','6')")
      JdbcSource("jdbc:h2:mem:test", "select * from mytable").schema shouldBe
        StructType(
          Field("A", IntType(true), true),
          Field("B", BooleanType, true),
          Field("C", LongType.Signed, true)
        )
    }
    "use supplied query" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:test3")
      conn.createStatement().executeUpdate("create table mytable (a integer, b bit, c bigint)")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('1','2','3')")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('4','5','6')")
      JdbcSource(() => DriverManager.getConnection("jdbc:h2:mem:test3"), "select * from mytable where a=4").toDataStream().size shouldBe 1
      val a = JdbcSource("jdbc:h2:mem:test3", "select a,c from mytable where a=4").toDataStream().toVector
      a.head.values.head shouldBe 4
      a.head.values(1) shouldBe 6L
    }
    "read decimal precision and scale" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:decimal")
      conn.createStatement().executeUpdate("create table mytable (a decimal(15,5))")
      conn.createStatement().executeUpdate("insert into mytable (a) values (1.234)")
      val schema = JdbcSource(() => DriverManager.getConnection("jdbc:h2:mem:decimal"), "select * from mytable").schema
      schema shouldBe
        StructType(Vector(Field("A",DecimalType(Precision(15),Scale(5)))))
    }
    "read numeric precision and scale" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:numeric")
      conn.createStatement().executeUpdate("create table mytable (a numeric(3,2))")
      conn.createStatement().executeUpdate("insert into mytable (a) values (1.234)")
      val schema = JdbcSource(() => DriverManager.getConnection("jdbc:h2:mem:numeric"), "select * from mytable").schema
      schema shouldBe
        StructType(Vector(Field("A",DecimalType(Precision(3),Scale(2)))))
    }
    "read from jdbc" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:test4")
      conn.createStatement().executeUpdate("create table mytable (a integer, b bit, c bigint)")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('1','2','3')")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('4','5','6')")
      JdbcSource("jdbc:h2:mem:test4", "select * from mytable").toDataStream().size shouldBe 2
    }
    "support bind" in {
      val conn = DriverManager.getConnection("jdbc:h2:mem:test5")
      conn.createStatement().executeUpdate("create table mytable (a integer, b bit, c bigint)")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('1','2','3')")
      conn.createStatement().executeUpdate("insert into mytable (a,b,c) values ('4','5','6')")
      JdbcSource("jdbc:h2:mem:test5", "select * from mytable where a=?").withBind { it =>
        it.setLong(1, 4)
      }.toDataStream().size shouldBe 1
    }
  }
} 
Example 24
Source File: RangePartitionStrategyTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.jdbc

import java.sql.DriverManager

import org.scalatest.{Matchers, WordSpec}

import scala.util.Random

class RangePartitionStrategyTest extends WordSpec with Matchers {

  Class.forName("org.h2.Driver")

  private val conn = DriverManager.getConnection("jdbc:h2:mem:rangetest")
  conn.createStatement().executeUpdate("create table bucket_test (a integer)")
  for (k <- 0 until 20) {
    conn.createStatement().executeUpdate(s"insert into bucket_test (a) values (${Random.nextInt(10000)})")
  }

  "BucketPartitionStrategy" should {
    "generate evenly spaced ranges" in {
      RangePartitionStrategy("a", 10, 2, 29).ranges shouldBe List((2, 4), (5, 7), (8, 10), (11, 13), (14, 16), (17, 19), (20, 22), (23, 25), (26, 27), (28, 29))
      RangePartitionStrategy("a", 2, 2, 30).ranges shouldBe List((2, 16), (17, 30))
      RangePartitionStrategy("a", 1, 4, 5).ranges shouldBe List((4, 5))
      RangePartitionStrategy("a", 1, 4, 4).ranges shouldBe List((4, 4))
      RangePartitionStrategy("a", 6, 1, 29).ranges shouldBe List((1, 5), (6, 10), (11, 15), (16, 20), (21, 25), (26, 29))
    }
    "return correct number of ranges" in {
      JdbcSource(() => DriverManager.getConnection("jdbc:h2:mem:rangetest"), "select * from bucket_test")
        .withPartitionStrategy(RangePartitionStrategy("a", 4, 0, 10000))
        .parts().size shouldBe 4
    }
    "return full and non overlapping data" in {
      JdbcSource(() => DriverManager.getConnection("jdbc:h2:mem:rangetest"), "select * from bucket_test")
        .withPartitionStrategy(RangePartitionStrategy("a", 4, 0, 10000))
        .toDataStream().collect.size shouldBe 20
    }
  }
} 
Example 25
Source File: JsonSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.json

import io.eels.Row
import io.eels.schema.{ArrayType, Field, StringType, StructType}
import org.scalatest.{Matchers, WordSpec}

class JsonSourceTest extends WordSpec with Matchers {

  "JsonSource" should {
    "read multiple json docs from a file" in {
      val schema = StructType(Field("name"), Field("location"))
      JsonSource(() => getClass.getResourceAsStream("/io/eels/component/json/test.json")).toDataStream().toSet shouldBe
        Set(
          Row(schema, "sammy", "aylesbury"),
          Row(schema, "ant", "greece")
        )
    }
    "return schema for nested fields" in {
      JsonSource(() => getClass.getResourceAsStream("/io/eels/component/json/nested.json")).schema shouldBe
        StructType(
          Field("name", StringType),
          Field("alias", StringType),
          Field("friends", ArrayType(
            StructType(
              Field("name", StringType),
              Field("location", StringType)
            )
          ))
        )
    }
    "support all primitives" in {
      val schema = StructType(Field("int"), Field("double"), Field("long"), Field("boolean"))
      JsonSource(() => getClass.getResourceAsStream("/io/eels/component/json/prims.json")).toDataStream().toSet shouldBe
        Set(
          Row(schema, 145342, 369.235195, 10151589328923L, true)
        )
    }
    "support maps" in {
      val schema = StructType(Field("name"), Field("location"), Field("skills", StructType(Field("karate"), Field("chess"), Field("100m"))))
      JsonSource(() => getClass.getResourceAsStream("/io/eels/component/json/maps.json")).toDataStream().toSet shouldBe
        Set(
          Row(schema, Seq("sammy", "aylesbury", Map("karate" -> "black belt", "chess" -> "grandmaster", "100m" -> 9.23)))
        )
    }
    "support arrays" in {
      val schema = StructType(Field("name"), Field("skills", ArrayType(StringType)))
      JsonSource(() => getClass.getResourceAsStream("/io/eels/component/json/arrays.json")).toDataStream().toSet shouldBe
        Set(
          Row(schema, Seq("sammy", Seq("karate", "chess", "running")))
        )
    }
  }
} 
Example 26
Source File: JsonSinkTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.json

import io.eels.datastream.DataStream
import io.eels.schema.{Field, StructType}
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

class JsonSinkTest extends WordSpec with Matchers {

  val path = new Path("test.json")
  implicit val fs: FileSystem = FileSystem.get(new Configuration())

  "JsonSink" should {
    "write multiple json docs to a file" in {
      if (fs.exists(path))
        fs.delete(path, false)

      val schema = StructType(Field("name"), Field("location"))
      val ds = DataStream.fromValues(
        schema,
        Seq(
          Vector("sam", "aylesbury"),
          Vector("jam", "aylesbury"),
          Vector("ham", "buckingham")
        )
      )

      ds.to(JsonSink(path))
      val input = IOUtils.toString(fs.open(path))
      input should include("""{"name":"sam","location":"aylesbury"}""")
      input should include("""{"name":"jam","location":"aylesbury"}""")
      input should include("""{"name":"ham","location":"buckingham"}""")

      fs.delete(path, false)
    }
    "support arrays" in {
      if (fs.exists(path))
        fs.delete(path, false)

      val schema = StructType(Field("name"), Field("skills"))
      val frame = DataStream.fromValues(
        schema,
        Seq(Vector("sam", Array("karate", "kung fu")))
      )

      frame.to(JsonSink(path))
      val input = IOUtils.toString(fs.open(path))
      input.trim shouldBe """{"name":"sam","skills":["karate","kung fu"]}"""

      fs.delete(path, false)
    }
    "support maps" in {
      if (fs.exists(path))
        fs.delete(path, false)

      val schema = StructType(Field("name"), Field("locations"))
      val frame = DataStream.fromValues(
        schema,
        Seq(Vector("sam", Map("home" -> "boro", "work" -> "london")))
      )

      frame.to(JsonSink(path))
      val input = IOUtils.toString(fs.open(path))
      input.trim shouldBe """{"name":"sam","locations":{"home":"boro","work":"london"}}"""

      fs.delete(path, false)
    }
    "support structs" in {

      case class Foo(home: String, work: String)

      if (fs.exists(path))
        fs.delete(path, false)

      val schema = StructType(Field("name"), Field("locations"))
      val frame = DataStream.fromValues(
        schema,
        Seq(Vector("sam", Foo("boro", "london")))
      )

      frame.to(JsonSink(path))
      val input = IOUtils.toString(fs.open(path))
      input.trim shouldBe """{"name":"sam","locations":{"home":"boro","work":"london"}}"""

      fs.delete(path, false)
    }
  }
} 
Example 27
Source File: SequenceSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.sequence

import io.eels.Row
import io.eels.datastream.DataStream
import io.eels.schema.{Field, StringType, StructType}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.{Matchers, WordSpec}

class SequenceSourceTest extends WordSpec with Matchers {

  private implicit val conf = new Configuration()

  private val schema = StructType(Field("name"), Field("location"))
  private val ds = DataStream.fromValues(
    schema,
    Seq(
      Vector("name", "location"),
      Vector("sam", "aylesbury"),
      Vector("jam", "aylesbury"),
      Vector("ham", "buckingham")
    )
  )

  "SequenceSource" should {
    "read sequence files" in {
      val schema = StructType(
        Field("a", StringType),
        Field("b", StringType),
        Field("c", StringType),
        Field("d", StringType)
      )
      val path = new Path(getClass.getResource("/test.seq").getFile)
      val rows = SequenceSource(path).toDataStream().toSet
      rows shouldBe Set(
        Row(schema, "1", "2", "3", "4"),
        Row(schema, "5", "6", "7", "8")
      )
    }
    "read header as schema" in {
      val path = new Path(getClass.getResource("/test.seq").getFile)
      SequenceSource(path).schema shouldBe StructType(
        Field("a", StringType),
        Field("b", StringType),
        Field("c", StringType),
        Field("d", StringType)
      )
    }
  }
} 
Example 28
Source File: SequenceSinkTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.sequence

import io.eels.datastream.DataStream
import io.eels.schema.StructType
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.io.{BytesWritable, IntWritable, SequenceFile}
import org.scalatest.{Matchers, WordSpec}

class SequenceSinkTest extends WordSpec with Matchers {

  private val ds = DataStream.fromValues(
    StructType("a", "b", "c", "d"),
    Seq(
      List("1", "2", "3", "4"),
      List("5", "6", "7", "8")
    )
  )

  "SequenceSink" should {
    "write sequence files" in {

      implicit val conf = new Configuration
      implicit val fs = FileSystem.get(conf)

      val path = new Path("seqsink.seq")
      if (fs.exists(path))
        fs.delete(path, true)

      ds.to(SequenceSink(path))

      val reader = new SequenceFile.Reader(new Configuration, SequenceFile.Reader.file(path))

      val k = new IntWritable
      val v = new BytesWritable

      val set = for (_ <- 1 to 3) yield {
        reader.next(k, v)
        new String(v.copyBytes)
      }

      set.toSet shouldBe Set(
        "a,b,c,d",
        "1,2,3,4",
        "5,6,7,8"
      )

      reader.close()

      fs.delete(path, true)
    }
  }
} 
Example 29
Source File: AvroParquetSinkTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.parquet

import io.eels.Row
import io.eels.component.parquet.avro.{AvroParquetSink, AvroParquetSource}
import io.eels.component.parquet.util.ParquetLogMute
import io.eels.datastream.DataStream
import io.eels.schema.{Field, StringType, StructType}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.{Matchers, WordSpec}

class AvroParquetSinkTest extends WordSpec with Matchers {
  ParquetLogMute()

  private val schema = StructType(
    Field("name", StringType, nullable = false),
    Field("job", StringType, nullable = false),
    Field("location", StringType, nullable = false)
  )
  private val ds = DataStream.fromValues(
    schema,
    Seq(
      Vector("clint eastwood", "actor", "carmel"),
      Vector("elton john", "musician", "pinner")
    )
  )

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(new Configuration())
  private val path = new Path("test.pq")

  "ParquetSink" should {
    "write schema" in {
      if (fs.exists(path))
        fs.delete(path, false)
      ds.to(AvroParquetSink(path))
      val people = ParquetSource(path)
      people.schema shouldBe StructType(
        Field("name", StringType, false),
        Field("job", StringType, false),
        Field("location", StringType, false)
      )
      fs.delete(path, false)
    }
    "write data" in {
      if (fs.exists(path))
        fs.delete(path, false)
      ds.to(AvroParquetSink(path))
      AvroParquetSource(path).toDataStream().toSet.map(_.values) shouldBe
        Set(
          Vector("clint eastwood", "actor", "carmel"),
          Vector("elton john", "musician", "pinner")
        )
      fs.delete(path, false)
    }
    "support overwrite" in {

      val path = new Path("overwrite_test.pq")
      fs.delete(path, false)

      val schema = StructType(Field("a", StringType))
      val ds = DataStream.fromRows(schema,
        Row(schema, Vector("x")),
        Row(schema, Vector("y"))
      )

      ds.to(AvroParquetSink(path))
      ds.to(AvroParquetSink(path).withOverwrite(true))
      fs.delete(path, false)
    }
  }
} 
Example 30
Source File: AvroParquetReaderFnTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.parquet

import java.util.UUID

import io.eels.component.avro.AvroSchemaFns
import io.eels.component.parquet.avro.AvroParquetReaderFn
import io.eels.schema.{DoubleType, Field, LongType, StructType}
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.avro.util.Utf8
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.AvroParquetWriter
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}

class AvroParquetReaderFnTest extends WordSpec with Matchers with BeforeAndAfterAll {

  private implicit val conf = new Configuration()
  private implicit val fs = FileSystem.get(new Configuration())

  private val path = new Path(UUID.randomUUID().toString())

  override def afterAll(): Unit = {
    val fs = FileSystem.get(new Configuration())
    fs.delete(path, false)
  }

  private val avroSchema = SchemaBuilder.record("com.chuckle").fields()
    .requiredString("str").requiredLong("looong").requiredDouble("dooble").endRecord()

  private val writer = AvroParquetWriter.builder[GenericRecord](path)
    .withSchema(avroSchema)
    .build()

  private val record = new GenericData.Record(avroSchema)
  record.put("str", "wibble")
  record.put("looong", 999L)
  record.put("dooble", 12.34)
  writer.write(record)
  writer.close()

  val schema = StructType(Field("str"), Field("looong", LongType(true), true), Field("dooble", DoubleType, true))

  "AvroParquetReaderFn" should {
    "support projections on doubles" in {

      val reader = AvroParquetReaderFn(path, None, Option(AvroSchemaFns.toAvroSchema(schema.removeField("looong"))))
      val record = reader.read()
      reader.close()

      record.get("str").asInstanceOf[Utf8].toString shouldBe "wibble"
      record.get("dooble") shouldBe 12.34
    }
    "support projections on longs" in {

      val reader = AvroParquetReaderFn(path, None, Option(AvroSchemaFns.toAvroSchema(schema.removeField("str"))))
      val record = reader.read()
      reader.close()

      record.get("looong") shouldBe 999L
    }
    "support full projections" in {

      val reader = AvroParquetReaderFn(path, None, Option(AvroSchemaFns.toAvroSchema(schema)))
      val record = reader.read()
      reader.close()

      record.get("str").asInstanceOf[Utf8].toString shouldBe "wibble"
      record.get("looong") shouldBe 999L
      record.get("dooble") shouldBe 12.34

    }
    "support non projections" in {

      val reader = AvroParquetReaderFn(path, None, None)
      val group = reader.read()
      reader.close()

      group.get("str").asInstanceOf[Utf8].toString shouldBe "wibble"
      group.get("looong") shouldBe 999L
      group.get("dooble") shouldBe 12.34

    }
  }
} 
Example 31
Source File: CsvSourceTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.csv

import java.nio.file.Paths

import io.eels.schema.{Field, StringType, StructType}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.scalatest.{Matchers, WordSpec}

class CsvSourceTest extends WordSpec with Matchers {

  implicit val conf = new Configuration()
  implicit val fs = FileSystem.getLocal(conf)

  "CsvSource" should {
    "read schema" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).schema shouldBe StructType(
        Field("a", StringType, true),
        Field("b", StringType, true),
        Field("c", StringType, true)
      )
    }
    "support null cell value option as null" in {
      val file = getClass.getResource("/io/eels/component/csv/csvwithempty.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withNullValue(null).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", null, "3"))
    }
    "support null cell value replacement value" in {
      val file = getClass.getResource("/io/eels/component/csv/csvwithempty.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withNullValue("foo").toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", "foo", "3"))
    }
    "read from path" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().size shouldBe 3
      CsvSource(path).withHeader(Header.None).toDataStream().size shouldBe 4
    }
    "allow specifying manual schema" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      val schema = StructType(
        Field("test1", StringType, true),
        Field("test2", StringType, true),
        Field("test3", StringType, true)
      )
      CsvSource(path).withSchema(schema).toDataStream().schema shouldBe schema
    }
    "support reading header" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().collect.map(_.values).toSet shouldBe
        Set(Vector("e", "f", "g"), Vector("1", "2", "3"), Vector("4", "5", "6"))
    }
    "support skipping header" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.None).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("a", "b", "c"), Vector("e", "f", "g"), Vector("1", "2", "3"), Vector("4", "5", "6"))
    }
    "support delimiters" in {
      val file = getClass.getResource("/io/eels/component/csv/psv.psv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withDelimiter('|').toDataStream().collect.map(_.values).toSet shouldBe
        Set(Vector("e", "f", "g"))
      CsvSource(path).withDelimiter('|').withHeader(Header.None).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("a", "b", "c"), Vector("e", "f", "g"))
    }
    "support comments for headers" in {
      val file = getClass.getResource("/io/eels/component/csv/comments.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstComment).schema shouldBe StructType(
        Field("a", StringType, true),
        Field("b", StringType, true),
        Field("c", StringType, true)
      )
      CsvSource(path).withHeader(Header.FirstComment).toDataStream().toSet.map(_.values) shouldBe
        Set(Vector("1", "2", "3"), Vector("e", "f", "g"), Vector("4", "5", "6"))
    }
    "terminate if asking for first comment but no comments" in {
      val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstComment).schema shouldBe StructType(
        Field("", StringType, true)
      )
    }
    "support skipping corrupt rows" ignore {
      val file = getClass.getResource("/io/eels/component/csv/corrupt.csv").toURI()
      val path = Paths.get(file)
      CsvSource(path).withHeader(Header.FirstRow).toDataStream().toVector.map(_.values) shouldBe
        Vector(Vector("1", "2", "3"))
    }
  }
} 
Example 32
Source File: CsvSourceTypeConversionTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.csv

import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets

import io.eels.schema._
import org.scalatest.{Ignore, Matchers, WordSpec}

@Ignore
class CsvSourceTypeConversionTest extends WordSpec with Matchers {
  "CsvSource" should {
    "read schema" in {
      val exampleCsvString =
        """A,B,C,D
          |1,2.2,3,foo
          |4,5.5,6,bar
        """.stripMargin

      val stream = new ByteArrayInputStream(exampleCsvString.getBytes(StandardCharsets.UTF_8))
      val schema = new StructType(Vector(
        Field("A", IntType.Signed),
        Field("B", DoubleType),
        Field("C", IntType.Signed),
        Field("D", StringType)
      ))
      val source = new CsvSource(() => stream)
        .withSchema(schema)
      
      source.schema.fields.foreach(println)
      val ds = source.toDataStream()
      val firstRow = ds.iterator.toIterable.head
      val firstRowA = firstRow.get("A")
      println(firstRowA) // prints 1 as expected
      println(firstRowA.getClass.getTypeName) // prints java.lang.String
      assert(firstRowA == 1) // this assertion will fail because firstRowA is not an Int
    }
  }
} 
Example 33
Source File: StructTypeInferrerTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.schema

import java.nio.file.Paths

import io.eels.component.csv.{CsvSource, Header}
import io.eels.{DataTypeRule, SchemaInferrer}
import org.scalatest.{Matchers, WordSpec}

class StructTypeInferrerTest extends WordSpec with Matchers {

  val file = getClass.getResource("/io/eels/component/csv/csvtest.csv").toURI()
  val path = Paths.get(file)

  "SchemaInferrer" should {
    "use rules to infer column types" in {
      val inferrer = SchemaInferrer(StringType, DataTypeRule("a", IntType(true), false), DataTypeRule("b", BooleanType))
      CsvSource(path).withHeader(Header.FirstRow).withSchemaInferrer(inferrer).schema shouldBe StructType(
        Field("a", IntType(true), false),
        Field("b", BooleanType, true),
        Field("c", StringType, true)
      )
    }
  }
} 
Example 34
Source File: DecimalTypeTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.schema

import org.scalatest.{Matchers, WordSpec}

class DecimalTypeTest extends WordSpec with Matchers {

  "decimal type" should {
    "match on wildcard" in {
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(-1), Scale(1)) shouldBe true
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(2), Scale(-1)) shouldBe true
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(-1), Scale(-1)) shouldBe true
      DecimalType(Precision(2), Scale(1)) matches DecimalType.Wildcard shouldBe true
    }
    "match on values" in {
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(2), Scale(1)) shouldBe true
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(3), Scale(3)) shouldBe false
      DecimalType(Precision(2), Scale(1)) matches DecimalType(Precision(2), Scale(2)) shouldBe false
    }
  }
} 
Example 35
Source File: ShowSchemaMainTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.cli

import java.io.{ByteArrayOutputStream, PrintStream}

import org.scalatest.{Matchers, WordSpec}

class ShowSchemaMainTest extends WordSpec with Matchers {

  "SchemaMain" should {
    "display schema for specified avro source" in {
      val baos = new ByteArrayOutputStream
      val out = new PrintStream(baos)
      ShowSchemaMain(Seq("--source", "avro:" + getClass.getResource("/test.avro").getFile), out)
      new String(baos.toByteArray).trim shouldBe """{"type":"record","name":"row","namespace":"namespace","fields":[{"name":"name","type":"string"},{"name":"job","type":"string"},{"name":"location","type":"string"}]}"""
    }
  }
} 
Example 36
Source File: HiveDatasetUriTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.hive

import org.scalatest.{WordSpec, Matchers}

class HiveDatasetUriTest extends WordSpec with Matchers {

  "HiveDatasetUri" should {
    "match string" in {
      HiveDatasetUri("hive:mydb:mytab") shouldBe HiveDatasetUri("mydb", "mytab")
    }
    "unapply string" in {
      "hive:mydb:mytab" match {
        case HiveDatasetUri(db, table) =>
          db shouldBe "mydb"
          table shouldBe "mytab"
        case _ => sys.error("failure")
      }
    }
  }
} 
Example 37
Source File: StateSpec.scala    From learning-fpinscala   with MIT License 5 votes vote down vote up
package com.satansk.fpinscala.state

import org.scalatest.{Matchers, WordSpec}


class StateSpec extends WordSpec with Matchers {
  import com.satansk.fpinscala.state.RNG._

  val rng = SimpleRNG(1)

  "SimpleRNG" should {
    "generates same random number given the same RNG" in {
      val (v1, _) = rng.nextInt
      val (v2, _) = rng.nextInt
      v1 shouldEqual v2
    }

    "generates different random numbers given different RNGs" in {
      val (v1, r1) = rng.nextInt
      val (v2, _) = r1.nextInt
      v1 should not equal v2
    }
  }

  "randomPairSame" should {
    "generates a pair with same value" in {
      val (v1, v2) = randomPairSame(rng)
      v1 shouldEqual v2
    }
  }

  "randomPair" should {
    "generates a pair with different values" in {
      val (v1, v2) = randomPair(rng)
      v1 should not equal v2
    }
  }

  "double" should {
    "generates double in [0, 1)" in {
      double(rng)._1 shouldEqual 1.7916224896907806E-4
    }
  }

  "ints" should {
    "generates a list of ints with the given number" in {
      ints(1)(rng)._1 shouldEqual List(384748)
      ints(2)(rng)._1 shouldEqual List(-1151252339, 384748)
      ints(3)(rng)._1 shouldEqual List(-549383847, -1151252339, 384748)
    }
  }

  "ints2" should {
    "generates a list of ints with the given number" in {
      ints2(1)(rng)._1 shouldEqual List(384748)
      ints2(2)(rng)._1 shouldEqual List(384748, -1151252339)
      ints2(3)(rng)._1 shouldEqual List(384748, -549383847, -1151252339)
    }
  }

  "doubleViaMap" should {
    "generates double in [0, 1)" in {
      doubleViaMap(rng)._1 shouldEqual 1.7916224896907806E-4
    }
  }

  "map2" should {
    "both" in {
      both(nonNegativeInt, double)(rng)._1 shouldEqual (384748, 0.5360936461947858)
    }
  }

  "intsViaSequence" should {
    "generates a list of ints with the given number" in {
      intsViaSequence(1)(rng)._1 shouldEqual List(384748)
      intsViaSequence(2)(rng)._1 shouldEqual List(-1151252339, 384748)
      intsViaSequence(3)(rng)._1 shouldEqual List(-549383847, -1151252339, 384748)
    }
  }

  "nonNegativeLessThan" should {
    "generates random numbers in [0, n)" in {
      nonNegativeLessThan(1)(rng)._1 shouldEqual 0
      nonNegativeLessThan(10)(rng)._1 shouldEqual 8
    }
  }
} 
Example 38
Source File: TreeSpec.scala    From learning-fpinscala   with MIT License 5 votes vote down vote up
package com.satansk.fpinscala.datastructures

import org.scalatest.{Matchers, WordSpec}


class TreeSpec extends WordSpec with Matchers {
  import com.satansk.fpinscala.datastructures.Tree._

  "sizet" should {
    "return sum of leaf and branch" in {
      sizet(Branch(Leaf(1), Leaf(2))) shouldEqual 3
      sizet(Leaf(2)) shouldEqual 1
    }
  }

  "maximum" should {
    "return the max element in the tree" in {
      maximum(Branch(Leaf(1), Leaf(2))) shouldEqual 2
      maximum(Branch(Branch(Leaf(1), Leaf(2)), Branch(Leaf(40), Leaf(0)))) shouldEqual 40
      maximum(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40))) shouldEqual 40
    }
  }

  "depth" should {
    "return the max depth of a tree" in {
      depth(Branch(Leaf(1), Leaf(2))) shouldEqual 1
      depth(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40))) shouldEqual 2
    }
  }

  "map" should {
    "apply f to all element" in {
      map(Leaf(1))(_ ⇒ "A") shouldEqual Leaf("A")
      map(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40)))(_ + 1) shouldEqual Branch(Branch(Leaf(2), Leaf(3)), Leaf(41))
    }
  }

  "fold" should {
    "be able to form size" in {
      sizeViaFold(Branch(Leaf(1), Leaf(2))) shouldEqual 3
      sizeViaFold(Leaf(2)) shouldEqual 1
    }

    "be able to form maximum" in {
      maximumViaFold(Branch(Leaf(1), Leaf(2))) shouldEqual 2
      maximumViaFold(Branch(Branch(Leaf(1), Leaf(2)), Branch(Leaf(40), Leaf(0)))) shouldEqual 40
      maximumViaFold(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40))) shouldEqual 40
    }

    "be able to form depth" in {
      depthViaFold(Branch(Leaf(1), Leaf(2))) shouldEqual 1
      depthViaFold(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40))) shouldEqual 2
    }

    "be able to form map" in {
      mapViaFold(Leaf(1))(_ ⇒ "A") shouldEqual Leaf("A")
      mapViaFold(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40)))(_ + 1) shouldEqual Branch(Branch(Leaf(2), Leaf(3)), Leaf(41))

      mapViaFold2(Leaf(1))(_ ⇒ "A") shouldEqual Leaf("A")
      mapViaFold2(Branch(Branch(Leaf(1), Leaf(2)), Leaf(40)))(_ + 1) shouldEqual Branch(Branch(Leaf(2), Leaf(3)), Leaf(41))
    }
  }

} 
Example 39
Source File: EitherSpec.scala    From learning-fpinscala   with MIT License 5 votes vote down vote up
package com.satansk.fpinscala.errorhandling

import org.scalatest.{Matchers, WordSpec}


class EitherSpec extends WordSpec with Matchers {
  import com.satansk.fpinscala.errorhandling.EitherT._

  "map" should {
    "work fine with Left and Right" in {
      Left("xxx") map (x ⇒ x) shouldEqual Left("xxx")
      Right(10) map (x ⇒ x * 3) shouldEqual Right(30)
    }
  }

  "flatMap" should {
    "work fine with Left and Right" in {
      Left("xxx") flatMap (x ⇒ x) shouldEqual Left("xxx")
      Right(10) flatMap (x ⇒ Right(x * 3)) shouldEqual Right(30)
    }
  }

  "orElse" should {
    "return the default Either on Left" in {
      Left("xxx") orElse Right(10) shouldEqual Right(10)
    }
    "return the original Either on Right" in {
      Right(10) orElse Right(-10) shouldEqual Right(10)
    }
  }

  "map2" should {
    "be able to convert parameters to Option" in {
      (Right("A") map2 Right(101)) (_ + _) shouldEqual Right("A101")
      (Right("A") map2 Left("xxx")) (_ + _) shouldEqual Left("xxx")
    }
  }

  "map2ViaFor" should {
    "be able to convert parameters to Option" in {
      (Right("A") map2ViaFor Right(101)) (_ + _) shouldEqual Right("A101")
      (Right("A") map2ViaFor Left("xxx")) (_ + _) shouldEqual Left("xxx")
    }
  }

  "sequence" should {
    "return Some(Nil) if the given Option list contains None" in {
      sequence(Left("xxx") :: Nil) shouldEqual Left("xxx")
      sequence(List(Right("A"), Right("B"), Left(101), Left(102))) shouldEqual Left(101)
    }
    "convert List[Option] to Option[List]" in {
      sequence(List(Right("A"), Right("B"), Right(101))) shouldEqual Right(List("A", "B", 101))
    }
  }

  "sequence2" should {
    "return Some(Nil) if the given Option list contains None" in {
      sequence2(Left("xxx") :: Nil) shouldEqual Left("xxx")
      sequence2(List(Right("A"), Right("B"), Left(101), Left(102))) shouldEqual Left(101)
    }
    "convert List[Option] to Option[List]" in {
      sequence2(List(Right("A"), Right("B"), Right(101))) shouldEqual Right(List("A", "B", 101))
    }
  }

  "traverse" should {
    "work fine with empty list" in {
      traverse(Nil)(Right(_)) shouldEqual Right(Nil)
    }
    "work fine on non empty list" in {
      traverse(List(1, 2, 3, 4))(Right(_)) shouldEqual Right(List(1, 2, 3, 4))
    }
    "return None if one element get None after applied f" in {
      traverse(List(1, 2, 3, 4))(x ⇒ if (x % 2 == 0) Right(x) else Left(x)) shouldEqual Left(1)
    }
  }

  "traverse2" should {
    "work fine with empty list" in {
      traverse2(Nil)(Right(_)) shouldEqual Right(Nil)
    }
    "work fine on non empty list" in {
      traverse2(List(1, 2, 3, 4))(Right(_)) shouldEqual Right(List(1, 2, 3, 4))
    }
    "return None if one element get None after applied f" in {
      traverse2(List(1, 2, 3, 4))(x ⇒ if (x % 2 == 0) Right(x) else Left(x)) shouldEqual Left(1)
    }
  }

} 
Example 40
Source File: ImplicitsSpec.scala    From amadou   with Apache License 2.0 5 votes vote down vote up
package com.mediative.amadou

import org.scalatest.{WordSpec, Matchers}

object SparkImplicitsSpec {
  case class Account(id: Int, name: String, externalId: String)
}

class SparkImplicitsSpec extends WordSpec with Matchers with SparkJobSuiteBase {
  import SparkImplicitsSpec._

  "SparkColumnOps.isDefined" should {
    "filter values which are null, empty or the string 'null'" in {
      import spark.implicits._

      val accounts = List(
        Account(0, "Account #0", ""),
        Account(1, "Account #1", "00000000001"),
        Account(2, "Account #2", "null"),
        Account(3, "Account #3", "Not null"),
        Account(4, "Account #4", null),
        Account(5, "Account #5", "00000000002")
      )
      val data = spark.createDataset(accounts)
      data.count shouldBe 6

      val filtered = data.filter($"externalId".isDefined)
      filtered.collect.toList shouldBe List(
        Account(1, "Account #1", "00000000001"),
        Account(3, "Account #3", "Not null"),
        Account(5, "Account #5", "00000000002")
      )
    }
  }

  "SparkColumnOps.nullify" should {
    "turn 'null' string values into null" in {
      import spark.implicits._

      val accounts = List(
        Account(0, "Account #0", ""),
        Account(1, "Account #1", "00000000001"),
        Account(2, "Account #2", "null"),
        Account(3, "Account #3", "Not null"),
        Account(4, "Account #4", null),
        Account(5, "Account #5", "00000000002")
      )
      val data = spark.createDataset(accounts)
      data.count shouldBe 6

      val filtered = data
        .select(
          $"id",
          $"name".nullify as "name",
          $"externalId".nullify as "externalId"
        )
        .as[Account]

      filtered.collect.toList shouldBe List(
        Account(0, "Account #0", ""),
        Account(1, "Account #1", "00000000001"),
        Account(2, "Account #2", null),
        Account(3, "Account #3", "Not null"),
        Account(4, "Account #4", null),
        Account(5, "Account #5", "00000000002")
      )
    }
  }
} 
Example 41
Source File: ConfigLoaderSpec.scala    From amadou   with Apache License 2.0 5 votes vote down vote up
package com.mediative.amadou

import org.scalatest.{WordSpec, Matchers}
import com.typesafe.config.ConfigFactory
import java.util.Properties

object ConfigLoaderSpec {
  case class Database(url: String, properties: Properties)
}

class ConfigLoaderSpec extends WordSpec with Matchers with ConfigLoader {
  import ConfigLoaderSpec.Database

  "propertiesValueReader" should {
    "load from given path" in {
      val config =
        ConfigFactory.parseString("""
        database {
          url = "jdbc:postgresql:testdb"
          properties = src/test/resources/config-reader-spec.properties
        }
      """)
      val db = config.as[Database]("database")
      db.properties.size should be(2)
      db.properties.getProperty("user") should be("john")
      db.properties.getProperty("pass") should be("secret")
    }

    "be empty when no path is given" in {
      val config = ConfigFactory.parseString("""
        database.url = "jdbc:postgresql:testdb"
      """)
      val db     = config.as[Database]("database")
      db.properties.isEmpty should be(true)
    }

    "fail when given path does not exist" in {
      val config =
        ConfigFactory.parseString("""
        database {
          url = "jdbc:postgresql:testdb"
          properties = src/test/resources/doesn-not-exists.properties
        }
      """)

      the[java.io.FileNotFoundException] thrownBy {
        config.as[Database]("database")
      } should have message "src/test/resources/doesn-not-exists.properties (No such file or directory)"
    }
  }
} 
Example 42
Source File: EvaluatorSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.api

import io.radicalbit.flink.pmml.scala.api.exceptions.EmptyEvaluatorException
import io.radicalbit.flink.pmml.scala.utils.{PmmlEvaluatorKit, PmmlLoaderKit}
import org.jpmml.evaluator.ModelEvaluatorFactory
import org.scalatest.{Matchers, WordSpec}

class EvaluatorSpec extends WordSpec with Matchers with PmmlEvaluatorKit with PmmlLoaderKit {

  private val pmmlModel = ModelEvaluatorFactory.newInstance.newModelEvaluator(getPMMLResource(Source.KmeansPmml))
  private val pmmlEvaluator = Evaluator(pmmlModel)

  private val emptyEvaluator =
    Evaluator.empty

  "Evaluator" should {

    "have right box called PmmlEvaluator and with right model" in {
      pmmlEvaluator shouldBe PmmlEvaluator(pmmlModel)
    }

    "have right empty box called EmptyEvaluator and with right value" in {
      emptyEvaluator shouldBe EmptyEvaluator
    }

    "have a model method that return the pmml model" in {
      pmmlEvaluator.model shouldBe pmmlModel
    }

    "have a getOrElse method that return pmml model" in {
      emptyEvaluator.getOrElse(pmmlModel) shouldBe pmmlModel
      pmmlEvaluator.getOrElse(pmmlModel) shouldBe pmmlModel
    }

    "throw an EmptyEvaluatorException if call model on emptyEvaluator" in {
      an[EmptyEvaluatorException] should be thrownBy emptyEvaluator.model
    }
  }
} 
Example 43
Source File: MetadataManagerSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.api.managers

import io.radicalbit.flink.pmml.scala.models.control.{AddMessage, DelMessage}
import io.radicalbit.flink.pmml.scala.models.core.{ModelId, ModelInfo}
import io.radicalbit.flink.pmml.scala.utils.PmmlLoaderKit
import org.scalatest.{Matchers, WordSpec}

import scala.collection.immutable

abstract class MetadataManagerSpec[M: MetadataManager] extends WordSpec with Matchers with PmmlLoaderKit {

  val modelName = "model"
  val modelVersion = 1
  val modelPath: String = getPMMLSource(Source.KmeansPmml)

  val modelId: ModelId = ModelId(modelName, modelVersion)
  val modelInfo = ModelInfo(modelPath)

  val in = immutable.Map(modelId -> modelInfo)
  val unknownIn = immutable.Map(ModelId("unknown-id", scala.util.Random.nextLong()) -> modelInfo)

  def outOnKnown: immutable.Map[ModelId, ModelInfo] = toOut(in)
  def outOnUnknown: immutable.Map[ModelId, ModelInfo] = toOut(unknownIn)

  def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo]
  def controlMessage: M

  "MetadataManager" should {

    "manage metadata correctly if targeted model is not already in metadata (Add add metadata, Del returns input)" in {
      MetadataManager(controlMessage, unknownIn) shouldBe outOnUnknown
    }

    "manage metadata correctly if targeted model already exists (Add returns input, Del removes metadata)" in {
      MetadataManager(controlMessage, in) shouldBe outOnKnown
    }

  }

}

class AddMetadataManagerSpec extends MetadataManagerSpec[AddMessage] {
  override lazy val controlMessage: AddMessage =
    AddMessage(modelName, modelVersion, modelPath, System.currentTimeMillis())

  override def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo] =
    in.get(modelId) match {
      case Some(_) => in
      case None => in + (modelId -> modelInfo)
    }
}

class RemoveMetadataManagerSpec extends MetadataManagerSpec[DelMessage] {
  override lazy val controlMessage: DelMessage = DelMessage(modelName, modelVersion, System.currentTimeMillis())

  override def toOut(in: immutable.Map[ModelId, ModelInfo]): immutable.Map[ModelId, ModelInfo] =
    in - ModelId(modelName, modelVersion)
} 
Example 44
Source File: PredictionSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.models.prediction

import io.radicalbit.flink.pmml.scala.api.exceptions._
import org.jpmml.evaluator.EvaluationException
import org.scalatest.{Matchers, WordSpec}

import scala.util.Try

class PredictionSpec extends WordSpec with Matchers {

  private def throwableFunc[E <: Exception](exception: E)(f: PartialFunction[Throwable, Prediction]) =
    Try(throw exception).recover(f).get

  "Prediction" should {

    "extract prediction if the extraction is Success" in {
      Prediction.extractPrediction(Try(2.0)) shouldBe Prediction(Score(2.0))
    }

    "testing prediction getOrElse EmptyScore" in {
      val emptyPrediction: Prediction = Prediction(Target.empty)
      emptyPrediction.value.getOrElse(-1.0) shouldBe -1.0
    }

    "tesing prediction getOrElse with Score" in {
      val prediction: Prediction = Prediction(Score(3.0))
      prediction.value.getOrElse(-1.0) shouldBe 3.0
    }

    "extract empty prediction if the extraction is Failure" in {
      Prediction.extractPrediction(Try(2 / 0)) shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and JPMMLExtractionException" in {
      throwableFunc(new JPMMLExtractionException("")) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and InputPreparationException" in {
      throwableFunc(new InputPreparationException("")) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and InputValidationException" in {
      throwableFunc(new InputValidationException("")) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and EvaluationException" in {
      throwableFunc(new EvaluationException) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and ClassCastException" in {
      throwableFunc(new ClassCastException) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

    "return None if onFailedPrediction is active and whatever Exception" in {
      throwableFunc(new Exception) {
        case e: Throwable => Prediction.onFailedPrediction(e)
      } shouldBe Prediction(EmptyScore)
    }

  }

} 
Example 45
Source File: TargetSpec.scala    From flink-jpmml   with GNU Affero General Public License v3.0 5 votes vote down vote up
package io.radicalbit.flink.pmml.scala.models.prediction

import org.scalatest.{Matchers, WordSpec}

class TargetSpec extends WordSpec with Matchers {
  private val target = Target(3.0)
  private val emptyTarget = Target.empty

  "Target" should {

    "have right box called Score and with right value" in {
      target shouldBe Score(3.0)
    }

    "have right empty box called EmptyScore and with right value " in {
      emptyTarget shouldBe EmptyScore
    }

    "have a get method" in {
      target.get shouldBe 3.0
    }

    "throw a NoSuchElementException if get on emptyTarget" in {
      an[NoSuchElementException] should be thrownBy emptyTarget.get
    }

    "have a getOrElse method and return vale if is score" in {
      target.getOrElse(-1.0) shouldBe 3.0
    }

    "have a getOrElse method and return empty value if is empty score" in {
      emptyTarget.getOrElse(-1.0) shouldBe -1.0
    }

  }
} 
Example 46
Source File: EndpointFilterSpec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate

import com.rbmhtechnology.eventuate.EndpointFilters.sourceFilters
import com.rbmhtechnology.eventuate.EndpointFilters.targetAndSourceFilters
import com.rbmhtechnology.eventuate.EndpointFilters.targetFilters
import com.rbmhtechnology.eventuate.EndpointFilters.targetOverwritesSourceFilters
import com.rbmhtechnology.eventuate.ReplicationFilter.NoFilter
import org.scalatest.Matchers
import org.scalatest.WordSpec

object EndpointFilterSpec {
  def newFilter: ReplicationFilter = new ReplicationFilter {
    override def apply(event: DurableEvent): Boolean = true
  }
  val targetFilter = newFilter
  val sourceFilter = newFilter

  val targetLogId = "targetLogId"
  val sourceLogName = "sourceLogName"
}

class EndpointFilterSpec extends WordSpec with Matchers {
  import EndpointFilterSpec._

  "EndpointFilters" must {
    "and source and target filters" in {
      val endpointFilters = targetAndSourceFilters(Map(targetLogId -> targetFilter), Map(sourceLogName -> sourceFilter))

      endpointFilters.filterFor(targetLogId, sourceLogName) should be(targetFilter and sourceFilter)
      endpointFilters.filterFor("", sourceLogName) should be(sourceFilter)
      endpointFilters.filterFor(targetLogId, "") should be(targetFilter)
      endpointFilters.filterFor("", "") should be(NoFilter)
    }
    "overwrite source by target filters" in {
      val endpointFilters = targetOverwritesSourceFilters(Map(targetLogId -> targetFilter), Map(sourceLogName -> sourceFilter))

      endpointFilters.filterFor(targetLogId, sourceLogName) should be(targetFilter)
      endpointFilters.filterFor("", sourceLogName) should be(sourceFilter)
      endpointFilters.filterFor(targetLogId, "") should be(targetFilter)
      endpointFilters.filterFor("", "") should be(NoFilter)
    }
    "use source filters only" in {
      val endpointFilters = sourceFilters(Map(sourceLogName -> sourceFilter))

      endpointFilters.filterFor(targetLogId, sourceLogName) should be(sourceFilter)
      endpointFilters.filterFor(targetLogId, "") should be(NoFilter)
    }
    "use target filters only" in {
      val endpointFilters = targetFilters(Map(targetLogId -> targetFilter))

      endpointFilters.filterFor(targetLogId, sourceLogName) should be(targetFilter)
      endpointFilters.filterFor("", sourceLogName) should be(NoFilter)
    }
  }
} 
Example 47
Source File: BinaryPayloadManifestFilterSpec.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate

import com.google.protobuf.ByteString
import org.scalatest.Matchers
import org.scalatest.WordSpec

object BinaryPayloadManifestFilterSpec {
  def durableEventWithBinaryPayloadManifest(manifest: Option[String]): DurableEvent =
    DurableEvent(BinaryPayload(ByteString.EMPTY, 0, manifest, isStringManifest = true), "emitterId")
}

class BinaryPayloadManifestFilterSpec extends WordSpec with Matchers {

  import BinaryPayloadManifestFilterSpec._

  "BinaryPayloadManifestFilter" must {
    "pass BinaryPayloads with matching manifest" in {
      BinaryPayloadManifestFilter("a.*".r).apply(durableEventWithBinaryPayloadManifest(Some("abc"))) should be(true)
    }
    "filter BinaryPayloads with partially matching manifest" in {
      BinaryPayloadManifestFilter("b".r).apply(durableEventWithBinaryPayloadManifest(Some("abc"))) should be(false)
    }
    "filter BinaryPayloads with non-matching manifest" in {
      BinaryPayloadManifestFilter("a.*".r).apply(durableEventWithBinaryPayloadManifest(Some("bc"))) should be(false)
    }
    "filter BinaryPayloads without manifest" in {
      BinaryPayloadManifestFilter("a.*".r).apply(durableEventWithBinaryPayloadManifest(None)) should be(false)
    }
    "filter other payload" in {
      BinaryPayloadManifestFilter("a.*".r).apply(DurableEvent("payload", "emitterId")) should be(false)
    }
  }
} 
Example 48
Source File: RecoverySpecCassandra.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate

import com.rbmhtechnology.eventuate.ReplicationIntegrationSpec.replicationConnection
import com.rbmhtechnology.eventuate.utilities._
import org.scalatest.Matchers
import org.scalatest.WordSpec

class RecoverySpecCassandra extends WordSpec with Matchers with MultiLocationSpecCassandra {
  "ReplicationEndpoint recovery" must {
    "leave the index in an consistent state" in { // test for issue #393
      def newLocationA = location("A")
      val locationA1 = newLocationA
      val locationB = location("B")

      def newEndpointA(l: Location) = l.endpoint(Set("L1"), Set(replicationConnection(locationB.port)), activate = false)
      val endpointA1 = newEndpointA(locationA1)
      locationB.endpoint(Set("L1"), Set(replicationConnection(locationA1.port)))

      val logA = endpointA1.target("L1")
      write(logA, List("1"), Some("A1"))

      endpointA1.recover().await
      locationA1.terminate().await

      val locationA2 = newLocationA
      val endpointA2 = newEndpointA(locationA2)
      locationA2.listener(endpointA2.logs("L1"), Some("A1")).waitForMessage("1")
    }
  }

} 
Example 49
Source File: DeleteEventsSpecLeveldb.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate

import com.rbmhtechnology.eventuate.log.EventLogWriter
import com.rbmhtechnology.eventuate.utilities.AwaitHelper
import com.typesafe.config.ConfigFactory

import org.scalatest.{ Matchers, WordSpec }

object DeleteEventsSpecLeveldb {
  def emitter(endpoint: ReplicationEndpoint, logName: String): EventLogWriter =
    new EventLogWriter(s"${endpoint.id}_Emitter", endpoint.logs(logName))(endpoint.system)

  val config = ConfigFactory.parseString(
    """
      |eventuate.log.replication.retry-delay = 1s
      |eventuate.log.replication.remote-read-timeout = 2s
      |eventuate.log.recovery.remote-operation-retry-max = 10
      |eventuate.log.recovery.remote-operation-retry-delay = 1s
      |eventuate.log.recovery.remote-operation-timeout = 1s
    """.stripMargin)

  val L1 = "L1"
}

class DeleteEventsSpecLeveldb extends WordSpec with Matchers with MultiLocationSpecLeveldb {
  import DeleteEventsSpecLeveldb._
  import ReplicationIntegrationSpec.replicationConnection

  "Deleting events" must {
    "not replay deleted events on restart" in {
      def newLocationA = location("A", customConfig = DeleteEventsSpecLeveldb.config)
      def newEndpointA(l: Location) = l.endpoint(Set(L1), Set(), activate = false)

      val locationA1 = newLocationA
      val endpointA1 = newEndpointA(locationA1)

      val listenerA = locationA1.listener(endpointA1.logs(L1))
      val emitterA = emitter(endpointA1, L1)

      emitterA.write(0 to 5)
      listenerA.waitForMessage(5)

      endpointA1.delete(L1, 3, Set.empty).await shouldBe 3
      locationA1.terminate().await

      val locationA2 = newLocationA
      def endpointA2 = newEndpointA(locationA2)

      locationA2.listener(endpointA2.logs(L1)).expectMsgAllOf(3 to 5: _*)
    }
  }

  "Conditionally deleting events" must {
    "keep event available for corresponding remote log" in {
      val locationA = location("A", customConfig = DeleteEventsSpecLeveldb.config)
      val locationB = location("B", customConfig = DeleteEventsSpecLeveldb.config)
      val locationC = location("C", customConfig = DeleteEventsSpecLeveldb.config)

      val endpointA = locationA.endpoint(Set(L1), Set(replicationConnection(locationB.port), replicationConnection(locationC.port)), activate = false)
      val endpointB = locationB.endpoint(Set(L1), Set(replicationConnection(locationA.port)), activate = false)
      val endpointC = locationC.endpoint(Set(L1), Set(replicationConnection(locationA.port)), activate = false)

      val emitterA = emitter(endpointA, L1)

      val listenerA = locationA.listener(endpointA.logs(L1))
      val listenerB = locationB.listener(endpointB.logs(L1))
      val listenerC = locationC.listener(endpointC.logs(L1))

      emitterA.write(0 to 5)
      listenerA.waitForMessage(5)

      endpointA.delete(L1, 3, Set(endpointB.id, endpointC.id)).await shouldBe 3

      endpointA.activate()
      endpointB.activate()
      listenerB.expectMsgAllOf(0 to 5: _*)

      endpointC.activate()
      listenerC.expectMsgAllOf(0 to 5: _*)
    }
  }
} 
Example 50
Source File: PersistOnEventWithRecoverySpecLeveldb.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate

import java.util.UUID

import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Props
import akka.testkit.TestProbe
import com.rbmhtechnology.eventuate.ReplicationIntegrationSpec.replicationConnection
import com.rbmhtechnology.eventuate.utilities._
import org.apache.commons.io.FileUtils
import org.scalatest.Matchers
import org.scalatest.WordSpec

import scala.concurrent.duration.DurationInt

object PersistOnEventWithRecoverySpecLeveldb {
  class OnBEmitRandomActor(val eventLog: ActorRef, probe: TestProbe) extends EventsourcedActor with PersistOnEvent {

    override def id = getClass.getName

    override def onCommand = Actor.emptyBehavior

    override def onEvent = {
      case "A"          =>
      case "B"          => persistOnEvent(UUID.randomUUID().toString)
      case uuid: String => probe.ref ! uuid
    }
  }

  def persistOnEventProbe(locationA1: Location, log: ActorRef) = {
    val probe = locationA1.probe
    locationA1.system.actorOf(Props(new OnBEmitRandomActor(log, probe)))
    probe
  }

  val noMsgTimeout = 100.millis
}

class PersistOnEventWithRecoverySpecLeveldb extends WordSpec with Matchers with MultiLocationSpecLeveldb {
  import RecoverySpecLeveldb._
  import PersistOnEventWithRecoverySpecLeveldb._

  override val logFactory: String => Props =
    id => SingleLocationSpecLeveldb.TestEventLog.props(id, batching = true)

  "An EventsourcedActor with PersistOnEvent" must {
    "not re-attempt persistence on successful write after reordering of events through disaster recovery" in {
      val locationB = location("B", customConfig = RecoverySpecLeveldb.config)
      def newLocationA = location("A", customConfig = RecoverySpecLeveldb.config)
      val locationA1 = newLocationA

      val endpointB = locationB.endpoint(Set("L1"), Set(replicationConnection(locationA1.port)))
      def newEndpointA(l: Location, activate: Boolean) = l.endpoint(Set("L1"), Set(replicationConnection(locationB.port)), activate = activate)
      val endpointA1 = newEndpointA(locationA1, activate = true)

      val targetA = endpointA1.target("L1")
      val logDirA = logDirectory(targetA)
      val targetB = endpointB.target("L1")
      val a1Probe = persistOnEventProbe(locationA1, targetA.log)

      write(targetA, List("A"))
      write(targetB, List("B"))
      val event = a1Probe.expectMsgClass(classOf[String])
      assertConvergence(Set("A", "B", event), endpointA1, endpointB)

      locationA1.terminate().await
      FileUtils.deleteDirectory(logDirA)

      val locationA2 = newLocationA
      val endpointA2 = newEndpointA(locationA2, activate = false)
      endpointA2.recover().await

      val a2Probe = persistOnEventProbe(locationA2, endpointA2.logs("L1"))
      a2Probe.expectMsg(event)
      a2Probe.expectNoMsg(noMsgTimeout)
      assertConvergence(Set("A", "B", event), endpointA2, endpointB)
    }
  }
} 
Example 51
Source File: ConsulCoordinationSpec.scala    From constructr-consul   with Apache License 2.0 5 votes vote down vote up
package com.tecsisa.constructr.coordination.consul

import akka.Done
import akka.actor.{ ActorSystem, AddressFromURIString }
import akka.testkit.{ TestDuration, TestProbe }
import com.typesafe.config.ConfigFactory
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }
import scala.concurrent.duration.{ Duration, DurationInt, FiniteDuration }
import scala.concurrent.{ Await, Awaitable }
import scala.util.Random

object ConsulCoordinationSpec {

  private val coordinationHost = {
    val dockerHostPattern = """tcp://(\S+):\d{1,5}""".r
    sys.env
      .get("DOCKER_HOST")
      .collect { case dockerHostPattern(address) => address }
      .getOrElse("127.0.0.1")
  }
}

class ConsulCoordinationSpec extends WordSpec with Matchers with BeforeAndAfterAll {
  import ConsulCoordinationSpec._

  private implicit val system = {
    val config =
      ConfigFactory
        .parseString(s"constructr.coordination.host = $coordinationHost")
        .withFallback(ConfigFactory.load())
    ActorSystem("default", config)
  }

  private val address1 = AddressFromURIString("akka.tcp://default@a:2552")
  private val address2 = AddressFromURIString("akka.tcp://default@b:2552")

  "ConsulCoordination" should {
    "correctly interact with consul" in {
      val coordination = new ConsulCoordination(randomString(), system)

      // Getting nodes
      resultOf(coordination.getNodes()) shouldBe 'empty

      // Lock (ttl >= 10s)
      resultOf(coordination.lock(address1, 10.seconds)) shouldBe true
      resultOf(coordination.lock(address1, 10.seconds)) shouldBe true
      resultOf(coordination.lock(address2, 10.seconds)) shouldBe false

      // Add self
      resultOf(coordination.addSelf(address1, 10.seconds)) shouldBe Done
      resultOf(coordination.getNodes()) shouldBe Set(address1)

      // Refresh
      resultOf(coordination.refresh(address1, 10.seconds)) shouldBe Done
      resultOf(coordination.getNodes()) shouldBe Set(address1)

      val probe = TestProbe()
      import probe._
      awaitAssert(
        resultOf(coordination.getNodes()) shouldBe 'empty,
        25.seconds // Wait until open sessions expire
      )
    }
  }

  override protected def afterAll() = {
    Await.ready(system.terminate(), Duration.Inf)
    super.afterAll()
  }

  private def resultOf[A](awaitable: Awaitable[A], max: FiniteDuration = 3.seconds.dilated) =
    Await.result(awaitable, max)

  private def randomString() = math.abs(Random.nextInt).toString
} 
Example 52
Source File: LoginHandlerSpec.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.protocol.command

import akka.actor.ActorSystem
import akka.testkit.TestProbe
import com.linagora.gatling.imap.Fixture.bart
import com.linagora.gatling.imap.protocol.{Command, Response, UserId}
import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer}
import com.sun.mail.imap.protocol.IMAPResponse
import org.scalatest.matchers.{MatchResult, Matcher}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec}
import org.slf4j
import org.slf4j.LoggerFactory

import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global

class LoginHandlerSpec extends WordSpec with ImapTestUtils with BeforeAndAfterEach with Matchers {
  val logger: slf4j.Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName)

  private val server: RunningServer = CyrusServer.start()

  override def beforeEach(): Unit = {
    server.addUser(bart)
  }

  override protected def afterEach(): Unit = {
    system.terminate()
    server.stop()
  }

  implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec")
  "Login handler" should {
    "send the response back when logged in" in {
      val probe = TestProbe()
      val sessionFuture = connect(server.mappedImapPort())
      sessionFuture.onComplete(session => {
        val handler = system.actorOf(LoginHandler.props(session.get))
        probe.send(handler, Command.Login(UserId(1), bart))
      })
      probe.expectMsgPF(1.minute) {
        case Response.LoggedIn(responses) => responses.isOk shouldBe true
      }
    }
  }

  object IMAPResponseMatchers {

    class HasTagMatcher(tag: String) extends Matcher[IMAPResponse] {
      def apply(left: IMAPResponse): MatchResult = {
        val name = left.getTag
        MatchResult(
          name == tag,
          s"""ImapResponse doesn't have tag "$tag"""",
          s"""ImapResponse has tag "$tag""""
        )
      }
    }

    class IsOkMatcher() extends Matcher[IMAPResponse] {
      def apply(left: IMAPResponse): MatchResult = {
        MatchResult(
          left.isOK,
          s"""ImapResponse isn't OK """,
          s"""ImapResponse is OK """
        )
      }
    }

    def isOk = new IsOkMatcher()

    def hasTag(tag: String) = new HasTagMatcher(tag)
  }

} 
Example 53
Source File: ImapSessionsSpec.scala    From gatling-imap   with GNU Affero General Public License v3.0 5 votes vote down vote up
package com.linagora.gatling.imap.protocol.command

import java.util.Properties

import akka.actor.ActorSystem
import akka.testkit.TestProbe
import com.linagora.gatling.imap.Fixture.bart
import com.linagora.gatling.imap.protocol.{Command, ImapProtocol, ImapResponses, ImapSessions, Response, UserId}
import com.linagora.gatling.imap.{CyrusServer, ImapTestUtils, RunningServer}
import org.scalatest.{BeforeAndAfterEach, Matchers, WordSpec}
import org.slf4j.{Logger, LoggerFactory}

import scala.concurrent.duration._

class ImapSessionsSpec extends WordSpec with Matchers with ImapTestUtils with BeforeAndAfterEach {
  val logger: Logger = LoggerFactory.getLogger(this.getClass.getCanonicalName)

  private val server: RunningServer = CyrusServer.start()

  override def beforeEach(): Unit = {
    server.addUser(bart)
  }

  override protected def afterEach(): Unit = {
    system.terminate()
    server.stop()
  }

  implicit lazy val system: ActorSystem = ActorSystem("LoginHandlerSpec")
  "the imap sessions actor" should {
    "log a user in" in {
      val config = new Properties()
      val protocol = ImapProtocol("localhost", server.mappedImapPort(), config)

      val sessions = system.actorOf(ImapSessions.props(protocol))
      val probe = TestProbe()
      val userId = UserId(1)
      probe.send(sessions, Command.Connect(userId))
      probe.expectMsg(10.second, Response.Connected(ImapResponses.empty))
      probe.send(sessions, Command.Login(userId, bart))
      probe.expectMsgPF(10.second) {
        case Response.LoggedIn(responses: ImapResponses) => responses.isOk shouldBe true
      }
    }
  }

} 
Example 54
Source File: BaseActionSpec.scala    From sbt-whitesource   with Apache License 2.0 5 votes vote down vote up
package sbtwhitesource

import java.net.URL

import sbt.Artifact

import org.scalatest.WordSpec
import org.scalatest.Matchers

class BaseActionSpec extends WordSpec with Matchers {
  "The base action" should {
    "merge standard and native jars of the same artifacts" in {
      val nativeUrl = new URL("https://repo1.maven.org/maven2/com/github/jnr/jffi/1.2.16/jffi-1.2.16-native.jar")
      val nativeArtifact: Artifact = Artifact("jffi", "jar", "jar", Some("native"), Vector(), Some(nativeUrl))
      val native = ModuleInfo("com.github", "jffi", "1.2.16", Some((nativeArtifact, null)))
      
      val javaUrl = new URL("https://repo1.maven.org/maven2/com/github/jnr/jffi/1.2.16/jffi-1.2.16.jar")
      val javaArtifact: Artifact = Artifact("jffi", "jar", "jar", None, Vector(), Some(javaUrl))
      val java = ModuleInfo("com.github", "jffi", "1.2.16", Some((javaArtifact, null)))
      
      BaseAction.mergeModuleInfo(native, java) should be(Some(java))
      BaseAction.mergeModuleInfo(java, native) should be(Some(java))
    }

    "merge platform-specific artifacts with matching platform-independent artifacts" in {
      val nativeUrl = new URL("https://repo1.maven.org/maven2/io/netty/netty-transport-native-epoll/4.1.42.Final/netty-transport-native-epoll-4.1.42.Final-linux-x86_64.jar")
      val nativeArtifact: Artifact = Artifact("netty-transport-native-epoll", "jar", "jar", Some("linux-x86_64"), Vector(), Some(nativeUrl))
      val native = ModuleInfo("io.netty", "netty-transport-native-epoll", "4.1.42.Final", Some((nativeArtifact, null)))

      val javaUrl = new URL("https://repo1.maven.org/maven2/io/netty/netty-transport-native-epoll/4.1.42.Final/netty-transport-native-epoll-4.1.42.Final.jar")
      val javaArtifact: Artifact = Artifact("netty-transport-native-epoll", "jar", "jar", None, Vector(), Some(javaUrl))
      val java = ModuleInfo("io.netty", "netty-transport-native-epoll", "4.1.42.Final", Some((javaArtifact, null)))

      BaseAction.mergeModuleInfo(native, java) should be(Some(java))
      BaseAction.mergeModuleInfo(java, native) should be(Some(java))
    }

    "upgrade 'jar' to 'bundle' when both types are present" in {
      val url = new URL("https://repo1.maven.org/maven2/com/example/osgi/fake-osgi-bundle/1.0.0/fake-osgi-bundle-1.0.0.jar")

      val bundleArtifact: Artifact = Artifact("fake-osgi-bundle", "bundle", "jar", None, Vector(), Some(url))
      val bundle = ModuleInfo("com.example.osgi", "fake-osgi-bundle", "1.0.0", Some((bundleArtifact, null)))

      val jarArtifact: Artifact = Artifact("fake-osgi-bundle", "jar", "jar", None, Vector(), Some(url))
      val jar = ModuleInfo("com.example.osgi", "fake-osgi-bundle", "1.0.0", Some((jarArtifact, null)))

      BaseAction.mergeModuleInfo(bundle, jar) should be(Some(bundle))
      BaseAction.mergeModuleInfo(jar, bundle) should be(Some(bundle))
    }
  }
} 
Example 55
Source File: GenericWordSpecSuite.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import org.scalatest.WordSpec

import org.apache.spark.sql.Dataset


class GenericWordSpecSuite extends WordSpec with SharedSparkSession {
  import testImplicits._

  private def ds = Seq((1, 1), (2, 1), (3, 2), (4, 2), (5, 3), (6, 3), (7, 4), (8, 4)).toDS

  "A Simple Dataset" when {
    "looked at as complete rows" should {
      "have the specified number of elements" in {
        assert(8 === ds.count)
      }
      "have the specified number of unique elements" in {
        assert(8 === ds.distinct.count)
      }
    }
    "refined to specific columns" should {
      "have the specified number of elements in each column" in {
        assert(8 === ds.select("_1").count)
        assert(8 === ds.select("_2").count)
      }
      "have the correct number of distinct elements in each column" in {
        assert(8 === ds.select("_1").distinct.count)
        assert(4 === ds.select("_2").distinct.count)
      }
    }
  }
} 
Example 56
Source File: UTCFormatSpec.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package play.api.libs.json.ops.v4

import org.joda.time.{DateTime, DateTimeZone}
import org.scalatest.WordSpec
import play.api.libs.json.{Format, Json}

case class NotUTC(when: DateTime)
object NotUTC {
  implicit val format: Format[NotUTC] = Json.format[NotUTC]
}

case class UseUTC(when: DateTime)
object UseUTC extends UTCFormats {
  implicit val format: Format[UseUTC] = Json.format[UseUTC]
}

class UTCFormatSpec extends WordSpec {

  private[this] val pacificTimeZone = DateTimeZone.forID("US/Pacific")

  "Json.format by default" should {
    "deserialize with the current time zone" in {
      val dt = new DateTime(pacificTimeZone)
      assertResult(DateTimeZone.getDefault) {
        val notUTC = Json.toJson(NotUTC(dt)).as[NotUTC]
        notUTC.when.getZone
      }
    }
  }

  "UTCFormats" should {

    "override the standard Format[DateTime]" in {
      val dt = new DateTime(pacificTimeZone)
      assertResult(DateTimeZone.UTC) {
        val useUTC = Json.toJson(UseUTC(dt)).as[UseUTC]
        useUTC.when.getZone
      }
    }
  }
} 
Example 57
Source File: DurationGeneratorsSpec.scala    From play-json-ops   with MIT License 5 votes vote down vote up
package play.api.libs.json.scalacheck

import org.scalatest.WordSpec
import org.scalatest.prop.GeneratorDrivenPropertyChecks._
import play.api.libs.json.scalacheck.DurationGenerators._

import scala.concurrent.duration.{Duration, FiniteDuration}

class DurationGeneratorsSpec extends WordSpec {

  "Arbitrary[FiniteDuration]" should {
    "always produce a valid finite value" in {
      forAll() { (duration: FiniteDuration) =>
        assert(duration.isFinite())
      }
    }
  }

  "Arbitrary[Duration]" should {
    "always produce a valid value" in {
      forAll() { (duration: Duration) =>
        assert(duration ne null)
      }
    }
  }
} 
Example 58
Source File: HTTPInterfaceSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka

import akka.event.Logging
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{TestActorRef, TestProbe}
import com.omearac.consumers.{DataConsumer, EventConsumer}
import com.omearac.http.routes.{ConsumerCommands, ProducerCommands}
import com.omearac.producers.DataProducer
import org.scalatest.{Matchers, WordSpec}

import scala.concurrent.Future


class HTTPInterfaceSpec extends WordSpec
    with Matchers with ScalatestRouteTest
    with ConsumerCommands with ProducerCommands {

    val log = Logging(system, this.getClass.getName)

    //Mocks for DataConsumer Tests
    val dataConsumer = TestActorRef(new DataConsumer)
    val manager = TestProbe()
    dataConsumer.underlyingActor.consumerStreamManager = manager.ref

    //Mocks for EventConsumer Tests
    val eventConsumer = TestActorRef(new EventConsumer)
    eventConsumer.underlyingActor.consumerStreamManager = manager.ref

    //Mocks for DataProducer Tests
    val dataProducer = TestActorRef(new DataProducer)
    val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
        override def complete(): Unit = println("complete")

        override def fail(ex: Throwable): Unit = println("fail")

        override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued}

        override def watchCompletion(): Future[Done] = Future{Done}
    }


    "The HTTP interface to control the DataConsumerStream" should {
        "return a Already Stopped message for GET requests to /data_consumer/stop" in {
            Get("/data_consumer/stop") ~> dataConsumerHttpCommands ~> check {
                responseAs[String] shouldEqual "Data Consumer Stream Already Stopped"
            }
        }

        "return a Stream Started response for GET requests to /data_consumer/start" in {
            Get("/data_consumer/start") ~> dataConsumerHttpCommands ~> check {
                responseAs[String] shouldEqual "Data Consumer Stream Started"
            }
        }
    }

    "The HTTP interface to control the EventConsumerStream" should {
        "return a Already Stopped message for GET requests to /event_consumer/stop" in {
            Get("/event_consumer/stop") ~> eventConsumerHttpCommands ~> check {
                responseAs[String] shouldEqual "Event Consumer Stream Already Stopped"
            }
        }

        "return a Stream Started response for GET requests to /data_consumer/start" in {
            Get("/event_consumer/start") ~> eventConsumerHttpCommands ~> check {
                responseAs[String] shouldEqual "Event Consumer Stream Started"
            }
        }
    }

    "The HTTP interface to tell the DataProducer Actor to publish messages to Kafka" should {
        "return a Messages Produced message for GET requests to /data_producer/produce/10" in {
            dataProducer.underlyingActor.producerStream = mockProducerStream
            val producing = dataProducer.underlyingActor.publishData
            dataProducer.underlyingActor.context.become(producing)

            Get("/data_producer/produce/10") ~> producerHttpCommands ~> check {
                responseAs[String] shouldEqual "10 messages Produced as Ordered, Boss!"
            }
        }
    }
} 
Example 59
Source File: HttpMetricsSpec.scala    From kamon-http4s   with Apache License 2.0 5 votes vote down vote up
package kamon.http4s

import cats.effect._
import kamon.testkit.InstrumentInspection
import org.http4s.HttpRoutes
import org.http4s.dsl.io._
import org.http4s.server.Server
import org.http4s.server.blaze.BlazeServerBuilder
import org.scalatest.concurrent.Eventually
import org.scalatest.time.SpanSugar
import org.scalatest.{Matchers, OptionValues, WordSpec}
import cats.implicits._
import kamon.http4s.middleware.server.KamonSupport
import kamon.instrumentation.http.HttpServerMetrics
import org.http4s.client.blaze.BlazeClientBuilder
import org.http4s.client.Client

import scala.concurrent.ExecutionContext
import org.http4s.implicits._

class HttpMetricsSpec extends WordSpec
  with Matchers
  with Eventually
  with SpanSugar
  with InstrumentInspection.Syntax
  with OptionValues
 {

  implicit val contextShift: ContextShift[IO] = IO.contextShift(ExecutionContext.global)
  implicit val timer: Timer[IO] = IO.timer(ExecutionContext.global)

  val srv =
    BlazeServerBuilder[IO]
      .bindLocal(43567)
      .withHttpApp(KamonSupport(HttpRoutes.of[IO] {
        case GET -> Root / "tracing" / "ok" =>  Ok("ok")
        case GET -> Root / "tracing" / "not-found"  => NotFound("not-found")
        case GET -> Root / "tracing" / "error"  => InternalServerError("This page will generate an error!")
      }, "/127.0.0.1", 43567).orNotFound)
      .resource

  val client =
    BlazeClientBuilder[IO](ExecutionContext.global).withMaxTotalConnections(10).resource

   val metrics =
    Resource.liftF(IO(HttpServerMetrics.of("http4s.server", "/127.0.0.1", 43567)))


  def withServerAndClient[A](f: (Server[IO], Client[IO], HttpServerMetrics.HttpServerInstruments) => IO[A]): A =
   (srv, client, metrics).tupled.use(f.tupled).unsafeRunSync()

  private def get[F[_]: ConcurrentEffect](path: String)(server: Server[F], client: Client[F]): F[String] = {
    client.expect[String](s"http://127.0.0.1:${server.address.getPort}$path")
  }

  "The HttpMetrics" should {

    "track the total of active requests" in withServerAndClient { (server, client, serverMetrics) =>

      val requests = List
        .fill(100) {
          get("/tracing/ok")(server, client)
        }.parSequence_

      val test = IO {
        serverMetrics.activeRequests.distribution().max should be > 1L
        serverMetrics.activeRequests.distribution().min shouldBe 0L
      }
      requests *> test
    }

    "track the response time with status code 2xx" in withServerAndClient { (server, client, serverMetrics) =>
      val requests: IO[Unit] = List.fill(100)(get("/tracing/ok")(server, client)).sequence_

      val test = IO(serverMetrics.requestsSuccessful.value should be >= 0L)

      requests *> test
    }

    "track the response time with status code 4xx" in withServerAndClient { (server, client, serverMetrics) =>
      val requests: IO[Unit] = List.fill(100)(get("/tracing/not-found")(server, client).attempt).sequence_

      val test = IO(serverMetrics.requestsClientError.value should be >= 0L)

      requests *> test
    }

    "track the response time with status code 5xx" in withServerAndClient { (server, client, serverMetrics) =>
      val requests: IO[Unit] = List.fill(100)(get("/tracing/error")(server, client).attempt).sequence_

      val test = IO(serverMetrics.requestsServerError.value should be >= 0L)

      requests *> test
    }
  }
} 
Example 60
Source File: CheckJsStringSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.shrinkable

import org.scalacheck.Shrink
import org.scalatest.{MustMatchers, WordSpec}

class CheckJsStringSpec extends WordSpec with MustMatchers {
  "JsString" should {
    "not shrink formatted" in {
      val original = CheckJsString.formatted("0123456789abcdefghijklmnopqrstuvxyz")

      val shrink = Shrink.shrink(original)

      shrink mustBe empty
    }

    "shrink without min length" in {
      val original = CheckJsString.unformatted( "0123456789abcdefghijklmnopqrstuvwxyz")

      val shrink = Shrink.shrink(original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.minLength mustBe empty
          value.value.length must be <= 36
      }
    }

    "shrink with min length" in {
      val original = CheckJsString(formatted = false, Some(30), "0123456789abcdefghijklmnopqrstuvwxyz")

      val shrink = Shrink.shrink(original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.minLength mustBe Some(30)
          value.value.length must be <= 36
          value.value.length must be >= 30
      }
    }

    "not shrink beneath min length" in {
      val original = CheckJsString(formatted = false, Some(36), "0123456789abcdefghijklmnopqrstuvwxyz")

      val shrink = Shrink.shrink(original)

      shrink mustBe empty
    }
  }
} 
Example 61
Source File: CheckJsValueSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.shrinkable

import org.scalacheck.util.Pretty
import org.scalatest.{MustMatchers, WordSpec}

class CheckJsValueSpec extends WordSpec with MustMatchers {
  "JsValueDeserializer" should {
    "deserialize null to JsNull" in {
      CheckJsValue.parse("null") mustEqual CheckJsNull
    }

    "deserialize integers to JsInteger" in {
      val result = CheckJsValue.parse("1234")

      result mustBe an[CheckJsInteger]
      result.asInstanceOf[CheckJsInteger].value mustEqual BigInt(1234)
      result.asInstanceOf[CheckJsInteger].min mustEqual Some(BigInt(1234))
    }

    "deserialize floats to JsNumber" in {
      val result = CheckJsValue.parse("1234.5")

      result mustBe an[CheckJsNumber]
      result.asInstanceOf[CheckJsNumber].value mustEqual BigDecimal(1234.5)
      result.asInstanceOf[CheckJsNumber].min mustEqual Some(BigDecimal(1234.5))
    }

    "deserialize strings to JsFormatterString" in {
      val result = CheckJsValue.parse( """"one piece of string"""")

      result mustBe an[CheckJsString]
      result.asInstanceOf[CheckJsString].formatted mustBe true
      result.asInstanceOf[CheckJsString].value mustEqual "one piece of string"
    }

    "deserialize booleans to JsBoolean" in {
      CheckJsValue.parse("true") mustEqual CheckJsBoolean(true)
      CheckJsValue.parse("false") mustEqual CheckJsBoolean(false)
    }

    "deserialize arrays to JsArray" in {
      val result = CheckJsValue.parse( """[1234, 1234.5, true, "one piece of string"]""")

      result mustBe an[CheckJsArray]
      result.asInstanceOf[CheckJsArray].elements mustEqual Seq(
        CheckJsInteger.fixed(1234),
        CheckJsNumber.fixed(1234.5),
        CheckJsBoolean(true),
        CheckJsString.formatted("one piece of string")
      )
      result.asInstanceOf[CheckJsArray].minSize mustEqual Some(4)
    }

    "deserialize objects to JsObject" in {
      val result = CheckJsValue.parse( """{"one": 1234, "two": true, "three": "one piece of string"}""")

      result mustBe an[CheckJsObject]
      result.asInstanceOf[CheckJsObject].required mustEqual Set(
        "one", "two", "three"
      )
      result.asInstanceOf[CheckJsObject].fields mustEqual Map(
        "one" -> CheckJsInteger.fixed(1234),
        "two" -> CheckJsBoolean(true),
        "three" -> CheckJsString.formatted("one piece of string")
      )
    }

    "convertable to Pretty" in {
      val pretty = CheckJsValue.prettyJsValue(
        CheckJsObject.empty.copy(fields = Map("the" -> CheckJsString.formatted("value"))))

      pretty(Pretty.defaultParams) mustBe """{"the":"value"}"""
      pretty(Pretty.Params(1)) mustBe
        """{
          |  "the" : "value"
          |}""".stripMargin
    }
  }
} 
Example 62
Source File: CheckJsArraySpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.shrinkable

import org.scalacheck.Shrink
import org.scalatest.{MustMatchers, WordSpec}

class CheckJsArraySpec extends WordSpec with MustMatchers {
  "JsArray" should {
    "shrink without min size" in {
      val original = CheckJsArray(None, Seq(
        CheckJsInteger(None, None, 1000000),
        CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"),
        CheckJsBoolean(true),
        CheckJsBoolean(false),
        CheckJsInteger(None, None, 10000),
        CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210")
      ))
      val originalJson = original.minified

      val shrink = Shrink.shrink(original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.minSize mustBe empty
          value.elements.length must be <= 6
      }
    }

    "shrink with min size" in {
      val original = CheckJsArray(Some(4), Seq(
        CheckJsInteger(None, None, 1000000),
        CheckJsString.unformatted( "0123456789abcdefghijklmnopqrstuvwxyz"),
        CheckJsBoolean(true),
        CheckJsBoolean(false),
        CheckJsInteger(None, None, 10000),
        CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210")
      ))
      val originalJson = original.minified

      val shrink = Shrink.shrink(original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.minSize mustBe Some(4)
          value.elements.length must be <= 6
          value.elements.length must be >= 4
      }
    }

    "not shrink beneath min size" in {
      val original = CheckJsArray(Some(6), Seq(
        CheckJsInteger(None, None, 12345678),
        CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"),
        CheckJsBoolean(true),
        CheckJsBoolean(false),
        CheckJsInteger(None, None, 87654321),
        CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210")
      ))
      val originalJson = original.minified

      val shrink = Shrink.shrink(original)

      shrink mustBe empty
    }
  }
} 
Example 63
Source File: CheckJsObjectSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.shrinkable

import org.scalacheck.Shrink
import org.scalatest.{MustMatchers, WordSpec}

class CheckJsObjectSpec extends WordSpec with MustMatchers {
  "JsObject" should {
    "shrink without order or required" in {
      val original = CheckJsObject(Set.empty, None, Map(
        "one" -> CheckJsInteger(None, None, 1000000),
        "two" -> CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"),
        "three" -> CheckJsBoolean(true),
        "four" -> CheckJsBoolean(false),
        "five" ->CheckJsInteger(None, None, 10000),
        "six" -> CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210")
      ))
      val originalJson = original.minified

      val shrink = Shrink.shrink[CheckJsObject](original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.fields.keySet -- original.fields.keySet mustBe empty
      }
    }

    "only shrink values if all required" in {
      val original = CheckJsObject(Set("one", "two", "three", "four", "five", "six"), None, Map(
        "one" -> CheckJsInteger(None, None, 1000000),
        "two" -> CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"),
        "three" -> CheckJsBoolean(true),
        "four" -> CheckJsBoolean(false),
        "five" ->CheckJsInteger(None, None, 10000),
        "six" -> CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210")
      ))
      val originalJson = original.minified

      val shrink = Shrink.shrink[CheckJsObject](original)

      shrink must not be empty
      shrink.foreach {
        value =>
          value.fields must have size 6
      }
    }
  }
} 
Example 64
Source File: StringFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.gen.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import org.scalatest.{MustMatchers, WordSpec}

class StringFormatsSpec extends WordSpec with MustMatchers {
  "URL string format" should {
    val format = GeneratableStringFormats.defaultFormats("url")

    "be valid for urls" in {
      format.validate(JsonPath(), "http://localhost/something").isSuccess mustBe true
      format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true
    }

    "fail for non-urls" in {
      format.validate(JsonPath(), "something").isSuccess mustBe false
    }
  }

  "URI string format" should {
    val format = GeneratableStringFormats.defaultFormats("uri")
    
    "be valid for uris" in {
      format.validate(JsonPath(), "/something").isSuccess mustBe true
      format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true
    }

    "fail for non-uris" in {
      format.validate(JsonPath(), ":?something").isSuccess mustBe false
    }
  }

  "UUID string format" should {
    val format = GeneratableStringFormats.defaultFormats("uuid")
    
    "be valid for uuids" in {
      format.validate(JsonPath(), "2df6e079-4028-4aa5-9bdb-bb59a314cdad").isSuccess mustBe true
      format.validate(JsonPath(), "864C67DF-51BB-4688-8A5B-105EC5FDD1D2").isSuccess mustBe true
    }

    "fail for non-uuids" in {
      format.validate(JsonPath(), "864C67DF-51BB-4688").isSuccess mustBe false
    }
  }

  "Email string format" should {
    val format = GeneratableStringFormats.defaultFormats("email")
    
    "be valid for emails" in {
      format.validate(JsonPath(), "[email protected]").isSuccess mustBe true
      format.validate(JsonPath(), "[email protected]").isSuccess mustBe true
    }

    "fail for non-emails" in {
      format.validate(JsonPath(), "someone").isSuccess mustBe false
    }
  }

  "Date string format" should {
    val format = GeneratableStringFormats.defaultFormats("date")
    
    "be valid for dates" in {
      format.validate(JsonPath(), "1856-12-20").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30").isSuccess mustBe true
    }

    "fail for non-dates" in {
      format.validate(JsonPath(), "23200130").isSuccess mustBe false
      format.validate(JsonPath(), "2320-01-50").isSuccess mustBe false
    }
  }

  "DateTime string format" should {
    val format = GeneratableStringFormats.defaultFormats("date-time")
    
    "be valid for datetimes" in {
      format.validate(JsonPath(), "1856-12-20T12:34:56").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123").isSuccess mustBe true
      format.validate(JsonPath(), "1856-12-20T12:34:56Z").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123Z").isSuccess mustBe true
      format.validate(JsonPath(), "1856-12-20T12:34:56+01:00").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123+01:00").isSuccess mustBe true
    }

    "fail for non-datetimes" in {
      format.validate(JsonPath(), "2320013012:34:56").isSuccess mustBe false
      format.validate(JsonPath(), "2320-01-5012:34:56").isSuccess mustBe false
    }
  }
} 
Example 65
Source File: IntegerFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.gen.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import de.leanovate.swaggercheck.schema.model.formats.IntegerFormats
import org.scalatest.{MustMatchers, WordSpec}

class IntegerFormatsSpec extends WordSpec with MustMatchers {
  "Int32 format" should {
    val format = GeneratableIntegerFormats.defaultFormats("int32")

    "fail for numbers out of 32-bit range" in {
      format.validate(JsonPath(), BigInt(Int.MaxValue) + BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Int.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigInt(Int.MinValue) - BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Int.MinValue)).isSuccess mustBe true
    }
  }

  "Int64 format" should {
    val format = GeneratableIntegerFormats.defaultFormats("int64")

    "fail for numbers out of 64-bit range" in {
      format.validate(JsonPath(), BigInt(Long.MaxValue) + BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Long.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigInt(Long.MinValue) - BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Long.MinValue)).isSuccess mustBe true
    }
  }
} 
Example 66
Source File: NumberFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.gen.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import org.scalatest.{MustMatchers, WordSpec}

class NumberFormatsSpec extends WordSpec with MustMatchers {
  "Float format" should {
    val format = GeneratableNumberFormats.defaultFormats("float")

    "fail for numbers out of range" in {
      format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue) + BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue) - BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue)).isSuccess mustBe true
    }
  }

  "Double format" should {
    val format = GeneratableNumberFormats.defaultFormats("double")

    "fail for numbers out of range" in {
      format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue) + BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue) - BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue)).isSuccess mustBe true
    }
  }
} 
Example 67
Source File: ValidatingReadsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.play

import de.leanovate.swaggercheck.schema.model.DefaultSchema
import de.leanovate.swaggercheck.schema.play.Implicits._
import de.leanovate.swaggercheck.schema.play.model.ProductModel
import org.scalatest.{MustMatchers, WordSpec}
import play.api.libs.json.{JsError, Json, Reads}

class ValidatingReadsSpec extends WordSpec with MustMatchers {
  val schema: DefaultSchema = Json
    .parse(getClass.getClassLoader.getResourceAsStream("schema/simple1.json"))
    .as[DefaultSchema]

  val atLeastOneTagRead: Reads[Seq[ProductModel]] =
    ValidatingReads.validating[Seq[ProductModel]](schema)

  "ValidatingReads" should {
    "reject invalid json input" in {
      val json = Json.parse("""[
        |    {
        |        "id": 12345678,
        |        "name": "thename",
        |        "price": 1234.67,
        |        "tags": []
        |    }
        |]""".stripMargin)

      val result = json.validate(atLeastOneTagRead)

      result mustBe a[JsError]
    }
  }
} 
Example 68
Source File: DefinitionFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.play

import de.leanovate.swaggercheck.schema.model.{Definition, ObjectDefinition, StringDefinition}
import org.scalatest.{MustMatchers, WordSpec}
import play.api.libs.json.{JsSuccess, Json}
import de.leanovate.swaggercheck.schema.play.Implicits._

class DefinitionFormatsSpec extends WordSpec with MustMatchers {
  "DefinitionFormats" should {
    "deserialize object_definition" in {
      val json = Json.parse(getClass.getClassLoader.getResourceAsStream("object_definition.json"))

      val JsSuccess(definition, _) = json.validate[Definition]
      val ObjectDefinition(required, properties, additionalProperties) = definition

      required mustBe Some(Set("field1"))
      properties mustBe Some(Map("field1" -> StringDefinition(None, None, None, None, None)))
      additionalProperties mustBe Left(true)
    }

  }
} 
Example 69
Source File: FutureResultsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.playhelper

import org.scalatest.{MustMatchers, WordSpec}
import play.api.mvc.Results

import scala.concurrent.Future

class FutureResultsSpec extends WordSpec with MustMatchers {
  "FutureResults" should {
    "extract data from a play Future[Result]" in {
      val result = Results.Status(202)("{}").withHeaders("some" -> "header", "something" -> "else")
      val futureResult = Future.successful(result)

      FutureResults.responseExtractor.status(futureResult) mustBe 202
      FutureResults.responseExtractor.body(futureResult) mustBe "{}"
      FutureResults.responseExtractor.headers(futureResult) mustBe Map(
        "some" -> "header",
        "something" -> "else")
    }
  }
} 
Example 70
Source File: FakeRequestsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.playhelper

import de.leanovate.swaggercheck.playhelper
import de.leanovate.swaggercheck.shrinkable.CheckJsObject
import org.scalatest.{MustMatchers, WordSpec}

class FakeRequestsSpec extends WordSpec with MustMatchers {
  "FakeRequests" should {
    "create an empty FakeRequest" in {
      val request = playhelper.requestCreator.createEmpty("GET", "/the/uri", Seq("header1" -> "value1", "header2" -> "value2"))

      request.method mustBe "GET"
      request.uri mustBe "/the/uri"
      request.headers.get("header1") mustBe Some("value1")
      request.headers.get("header2") mustBe Some("value2")
    }

    "create a FakeRequest with body" in {
      val request = playhelper.requestCreator.createJson("POST", "/the/uri", Seq("header1" -> "value1", "header2" -> "value2"), CheckJsObject.empty)

      request.method mustBe "POST"
      request.uri mustBe "/the/uri"
      request.headers.get("header1") mustBe Some("value1")
      request.headers.get("header2") mustBe Some("value2")
      request.body mustBe "{}"
    }
  }
} 
Example 71
Source File: OperationResponseSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema

import de.leanovate.swaggercheck.SwaggerChecks
import de.leanovate.swaggercheck.schema.model.{Definition, JsonPath, ValidationResult}
import de.leanovate.swaggercheck.shrinkable.{CheckJsString, CheckJsValue}
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.scalatest.{MustMatchers, WordSpec}
import org.scalatestplus.mockito.MockitoSugar

class OperationResponseSpec extends WordSpec with MustMatchers with MockitoSugar {
  "OperationResponse" should {
    "verify response body" in {
      val swaggerChecks = mock[SwaggerChecks]
      val bodySchema = mock[Definition]
      val response = OperationResponse(Some(bodySchema), Seq.empty)

      when(bodySchema.validate(any(), any(), any())(any())).thenReturn(ValidationResult.success)

      response.verify(swaggerChecks, Map.empty, "{}").isSuccess mustBe true

      verify(bodySchema).validate(swaggerChecks, JsonPath(), CheckJsValue.parse("{}"))(CheckJsValue.Adapter)
    }

    "verify response headers" in {
      val swaggerChecks = mock[SwaggerChecks]
      val headerSchema = mock[Definition]
      val response = OperationResponse(None, Seq("some header" -> headerSchema))

      when(headerSchema.validate(any(), any(), any())(any())).thenReturn(ValidationResult.success)

      response.verify(swaggerChecks, Map.empty, "{}").isSuccess mustBe true

      verifyZeroInteractions(headerSchema)

      response.verify(swaggerChecks, Map("some header" -> "something"), "{}").isSuccess mustBe true

      verify(headerSchema).validate[CheckJsValue](swaggerChecks, JsonPath(), CheckJsString.formatted("something"))(CheckJsValue.Adapter)
    }
  }
} 
Example 72
Source File: SchemaModuleSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.jackson

import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import de.leanovate.swaggercheck.schema.model.{StringDefinition, ObjectDefinition, Definition}
import org.scalatest.{MustMatchers, WordSpec}

class SchemaModuleSpec extends WordSpec with MustMatchers {
  val mapper = new ObjectMapper().registerModule(DefaultScalaModule).registerModule(JsonSchemaModule)

  "SchemaModule" should {
    "deserialize object_definition" in {
      val ObjectDefinition(required, properties, additionalProperties) = mapper.readValue(getClass.getClassLoader.getResource("object_definition.json"), classOf[Definition])

      required mustBe Some(Set("field1"))
      properties mustBe Some(Map("field1" -> StringDefinition(None, None, None, None, None)))
      additionalProperties mustBe Left(true)
    }
  }
} 
Example 73
Source File: ArrayDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.mockito.Mockito._
import org.scalatest.{MustMatchers, WordSpec}
import org.scalatestplus.mockito.MockitoSugar

class ArrayDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "ArrayDefinition" should {
    "accept any array if no item definition is set" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(array = Some(Seq(TestNode(), TestNode())))
      val schema = mock[Schema]

      val definition = ArrayDefinition(None, None, None)

      val result = definition.validate(schema, path, node)

      result mustBe ValidationSuccess
    }

    "succeed if item definition succeeds on all elements" in {
      val path = JsonPath("jsonpath")
      val item1 = TestNode()
      val item2 = TestNode()
      val node = TestNode(array = Some(Seq(item1, item2)))
      val schema = mock[Schema]
      val itemDefinition = mock[Definition]

      when(itemDefinition.validate(schema, path.index(0), item1)).thenReturn(ValidationSuccess)
      when(itemDefinition.validate(schema, path.index(1), item2)).thenReturn(ValidationSuccess)

      val definition = ArrayDefinition(None, None, Some(itemDefinition))

      definition.validate(schema, path, node) mustBe ValidationSuccess

      verify(itemDefinition).validate(schema, path.index(0), item1)
      verify(itemDefinition).validate(schema, path.index(1), item2)
    }

    "fail if item definition fails on one element" in {
      val path = JsonPath("jsonpath")
      val item1 = TestNode()
      val item2 = TestNode()
      val node = TestNode(array = Some(Seq(item1, item2)))
      val schema = mock[Schema]
      val itemDefinition = mock[Definition]

      when(itemDefinition.validate(schema, path.index(0), item1)).thenReturn(ValidationResult.error("error"))
      when(itemDefinition.validate(schema, path.index(1), item2)).thenReturn(ValidationSuccess)

      val definition = ArrayDefinition(None, None, Some(itemDefinition))

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head mustBe "error"
    }

    "fail if array has less then minItems" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(array = Some(Seq(TestNode(), TestNode())))
      val schema = mock[Schema]

      val definition = ArrayDefinition(Some(3), None, None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should have at least 3 items in path jsonpath")
    }

    "fail if array has more then maxItems" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(array = Some(Seq(TestNode(), TestNode())))
      val schema = mock[Schema]

      val definition = ArrayDefinition(None, Some(1), None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should have at least 1 items in path jsonpath")
    }

    "fail validation on everything that is not an array" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      val definition = ArrayDefinition(None, None, None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should be an array in path jsonpath")
    }
  }
} 
Example 74
Source File: StringFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import org.scalatest.{MustMatchers, WordSpec}

class StringFormatsSpec extends WordSpec with MustMatchers {
  "URL string format" should {
    val format = StringFormats.defaultFormats("url")

    "be valid for urls" in {
      format.validate(JsonPath(), "http://localhost/something").isSuccess mustBe true
      format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true
    }

    "fail for non-urls" in {
      format.validate(JsonPath(), "something").isSuccess mustBe false
    }
  }

  "URI string format" should {
    val format = StringFormats.defaultFormats("uri")

    "be valid for uris" in {
      format.validate(JsonPath(), "/something").isSuccess mustBe true
      format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true
    }

    "fail for non-uris" in {
      format.validate(JsonPath(), ":?something").isSuccess mustBe false
    }
  }

  "UUID string format" should {
    val format = StringFormats.defaultFormats("uuid")

    "be valid for uuids" in {
      format.validate(JsonPath(), "2df6e079-4028-4aa5-9bdb-bb59a314cdad").isSuccess mustBe true
      format.validate(JsonPath(), "864C67DF-51BB-4688-8A5B-105EC5FDD1D2").isSuccess mustBe true
    }

    "fail for non-uuids" in {
      format.validate(JsonPath(), "864C67DF-51BB-4688").isSuccess mustBe false
    }
  }

  "Email string format" should {
    val format = StringFormats.defaultFormats("email")

    "be valid for emails" in {
      format.validate(JsonPath(), "[email protected]").isSuccess mustBe true
      format.validate(JsonPath(), "[email protected]").isSuccess mustBe true
    }

    "fail for non-emails" in {
      format.validate(JsonPath(), "someone").isSuccess mustBe false
    }
  }

  "Date string format" should {
    val format = StringFormats.defaultFormats("date")

    "be valid for dates" in {
      format.validate(JsonPath(), "1856-12-20").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30").isSuccess mustBe true
    }

    "fail for non-dates" in {
      format.validate(JsonPath(), "23200130").isSuccess mustBe false
      format.validate(JsonPath(), "2320-01-50").isSuccess mustBe false
    }
  }

  "DateTime string format" should {
    val format = StringFormats.defaultFormats("date-time")

    "be valid for datetimes" in {
      format.validate(JsonPath(), "1856-12-20T12:34:56").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123").isSuccess mustBe true
      format.validate(JsonPath(), "1856-12-20T12:34:56Z").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123Z").isSuccess mustBe true
      format.validate(JsonPath(), "1856-12-20T12:34:56+01:00").isSuccess mustBe true
      format.validate(JsonPath(), "2320-01-30T12:34:56.123+01:00").isSuccess mustBe true
    }

    "fail for non-datetimes" in {
      format.validate(JsonPath(), "2320013012:34:56").isSuccess mustBe false
      format.validate(JsonPath(), "2320-01-5012:34:56").isSuccess mustBe false
    }
  }
} 
Example 75
Source File: IntegerFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import org.scalatest.{MustMatchers, WordSpec}

class IntegerFormatsSpec extends WordSpec with MustMatchers {
  "Int32 format" should {
    val format = IntegerFormats.defaultFormats("int32")

    "fail for numbers out of 32-bit range" in {
      format.validate(JsonPath(), BigInt(Int.MaxValue) + BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Int.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigInt(Int.MinValue) - BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Int.MinValue)).isSuccess mustBe true
    }
  }

  "Int64 format" should {
    val format = IntegerFormats.defaultFormats("int64")

    "fail for numbers out of 64-bit range" in {
      format.validate(JsonPath(), BigInt(Long.MaxValue) + BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Long.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigInt(Long.MinValue) - BigInt(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigInt(Long.MinValue)).isSuccess mustBe true
    }
  }
} 
Example 76
Source File: NumberFormatsSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model.formats

import de.leanovate.swaggercheck.schema.model.JsonPath
import org.scalatest.{MustMatchers, WordSpec}

class NumberFormatsSpec extends WordSpec with MustMatchers {
  "Float format" should {
    val format = NumberFormats.defaultFormats("float")

    "fail for numbers out of range" in {
      format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue) + BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue) - BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue)).isSuccess mustBe true
    }
  }

  "Double format" should {
    val format = NumberFormats.defaultFormats("double")

    "fail for numbers out of range" in {
      format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue) + BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue)).isSuccess mustBe true
      format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue) - BigDecimal(1)).isSuccess mustBe false
      format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue)).isSuccess mustBe true
    }
  }
} 
Example 77
Source File: IntegerDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import de.leanovate.swaggercheck.schema.model.formats.ValueFormat
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

class IntegerDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "IntegerDefinition" should {
    "accept any integer if no format or range is defined" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(integer = Some(BigInt(Long.MaxValue) + 12345))
      val schema = mock[Schema]

      val definition = IntegerDefinition(None, None, None)

      definition.validate(schema, path, node) mustBe ValidationSuccess
    }

    "accept values that match the defined format" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(integer = Some(BigInt(12345)))
      val schema = mock[Schema]
      val format = mock[ValueFormat[BigInt]]

      when(schema.findIntegerFormat("theformat")).thenReturn(Some(format))
      when(format.validate(path, BigInt(12345))).thenReturn(ValidationResult.success)

      val definition = IntegerDefinition(Some("theformat"), None, None)

      definition.validate(schema, path, node) mustBe ValidationSuccess

      verify(schema).findIntegerFormat("theformat")
      verify(format).validate(path, BigInt(12345))
    }

    "fail validation if value is less than minimum" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(integer = Some(BigInt(12345)))
      val schema = mock[Schema]

      val definition = IntegerDefinition(None, Some(BigInt(123456)), None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("has to be greater than 123456 in path jsonpath")
    }

    "fail validation if value is greater than maximum" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(integer = Some(BigInt(123456)))
      val schema = mock[Schema]

      val definition = IntegerDefinition(None, None, Some(BigInt(12345)))

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("has to be less than 12345 in path jsonpath")
    }

    "fail validation on everything that is not an integer" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      val definition = IntegerDefinition(None, None, None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should be an integer in path jsonpath")
    }
  }
} 
Example 78
Source File: ReferenceDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.scalatest.{MustMatchers, WordSpec}
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar

class ReferenceDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "ReferenceDefinition" should {
    "delegate validation to referenced definition" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]
      val referencedDefinition = mock[Definition]

      when(schema.findByRef("reference")).thenReturn(Some(referencedDefinition))
      when(referencedDefinition.validate(schema, path, node)).thenReturn(ValidationResult.error("error1"))

      val definition = ReferenceDefinition("reference")

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head mustBe "error1"
    }

    "fail validation if referenced definition does not exists" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      when(schema.findByRef("reference")).thenReturn(None)

      val definition = ReferenceDefinition("reference")

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head mustBe "Referenced definition does not exists: reference"
    }
  }
} 
Example 79
Source File: NumberDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import de.leanovate.swaggercheck.schema.model.formats.ValueFormat
import org.mockito.Mockito._
import org.scalatest.{MustMatchers, WordSpec}
import org.scalatestplus.mockito.MockitoSugar

class NumberDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "NumberDefinition" should {
    "accept any integer if no format or range is defined" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(number = Some(BigDecimal(Long.MaxValue) + 12345))
      val schema = mock[Schema]

      val definition = NumberDefinition(None, None, None)

      definition.validate(schema, path, node) mustBe ValidationSuccess
    }

    "accept values that match the defined format" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(number = Some(BigDecimal(12345.67)))
      val schema = mock[Schema]
      val format = mock[ValueFormat[BigDecimal]]

      when(schema.findNumberFormat("theformat")).thenReturn(Some(format))
      when(format.validate(path, BigDecimal(12345.67))).thenReturn(ValidationResult.success)

      val definition = NumberDefinition(Some("theformat"), None, None)

      definition.validate(schema, path, node) mustBe ValidationSuccess

      verify(schema).findNumberFormat("theformat")
      verify(format).validate(path, BigDecimal(12345.67))
    }

    "fail validation if value is less than minimum" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(number = Some(BigDecimal(12345.6)))
      val schema = mock[Schema]

      val definition = NumberDefinition(None, Some(BigDecimal(123456.7)), None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("has to be greater than 123456.7 in path jsonpath")
    }

    "fail validation if value is greater than maximum" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(number = Some(BigDecimal(123456.7)))
      val schema = mock[Schema]

      val definition = NumberDefinition(None, None, Some(BigDecimal(12345.6)))

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("has to be less than 12345.6 in path jsonpath")
    }

    "fail validation on everything that is not an integer" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      val definition = NumberDefinition(None, None, None)

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should be a number in path jsonpath")
    }
  }
} 
Example 80
Source File: EmptyDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

class EmptyDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "EmptyDefinition" should {
    "validate anything" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      val definition = EmptyDefinition

      definition.validate(schema, path, node) mustBe ValidationSuccess
    }
  }
} 
Example 81
Source File: OneOfDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

class OneOfDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "OneOfDefinition" should {
    "succeed validation if one child succeed" in {
      val definition1 = mock[Definition]
      val definition2 = mock[Definition]
      val definition3 = mock[Definition]
      val schema = mock[Schema]
      val path = JsonPath("path")
      val node = TestNode()

      when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.error("error1"))
      when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.success)
      when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.error("error2"))

      val definition = OneOfDefinition(Seq(definition1, definition2, definition3))

      definition.validate(schema, path, node) mustBe ValidationSuccess

      verify(definition1).validate(schema, path, node)
      verify(definition2).validate(schema, path, node)
      verify(definition3).validate(schema, path, node)
    }

    "fail validation if one child fails" in {
      val definition1 = mock[Definition]
      val definition2 = mock[Definition]
      val definition3 = mock[Definition]
      val schema = mock[Schema]
      val path = JsonPath("path")
      val node = TestNode()

      when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.error("error1"))
      when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.error("error2"))
      when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.error("error3"))

      val definition = OneOfDefinition(Seq(definition1, definition2, definition3))

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result mustBe Seq("error1", "error2", "error3")
    }
  }
} 
Example 82
Source File: AllOfDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}
import org.mockito.Mockito._

class AllOfDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "AllOfDefinition" should {
    "succeed validation if all children succeed" in {
      val definition1 = mock[Definition]
      val definition2 = mock[Definition]
      val definition3 = mock[Definition]
      val schema = mock[Schema]
      val path = JsonPath("path")
      val node = TestNode()

      when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.success)
      when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.success)
      when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.success)

      val definition = AllOfDefinition(Seq(definition1, definition2, definition3))

      definition.validate(schema, path, node) mustBe ValidationSuccess

      verify(definition1).validate(schema, path, node)
      verify(definition2).validate(schema, path, node)
      verify(definition3).validate(schema, path, node)
    }

    "fail validation if one child fails" in {
      val definition1 = mock[Definition]
      val definition2 = mock[Definition]
      val definition3 = mock[Definition]
      val schema = mock[Schema]
      val path = JsonPath("path")
      val node = TestNode()

      when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.success)
      when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.error("error"))
      when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.success)

      val definition = AllOfDefinition(Seq(definition1, definition2, definition3))

      val result = definition.validate(schema, path, node)

      result mustBe ValidationResult.error("error")
    }
  }
} 
Example 83
Source File: BooleanDefinitionSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

class BooleanDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers {
  "BooleanDefinition" should {
    "succeed on any boolean value" in {
      val path = JsonPath("jsonpath")
      val node = TestNode(boolean = Some(true))
      val schema = mock[Schema]

      val definition = BooleanDefinition

      definition.validate(schema, path, node)  mustBe ValidationSuccess
    }

    "fail validation on everything that is not a boolean" in {
      val path = JsonPath("jsonpath")
      val node = TestNode()
      val schema = mock[Schema]

      val definition = BooleanDefinition

      val ValidationFailure(result) = definition.validate(schema, path, node)

      result must have size 1
      result.head must endWith("should be a boolean in path jsonpath")
    }
  }
} 
Example 84
Source File: JsonPathSpec.scala    From swagger-check   with MIT License 5 votes vote down vote up
package de.leanovate.swaggercheck.schema.model

import org.scalatest.{MustMatchers, WordSpec}

class JsonPathSpec extends WordSpec with MustMatchers {
  "JsonPath" should {
    "has toString" in {
      val jsonPath = JsonPath("the.path")

      jsonPath.toString mustBe "the.path"
    }

    "concat fields names and indexes" in {
      val root = JsonPath()
      val sub1 = root.field("field1")
      val sub2 = sub1.field("field2")
      val sub3 = sub2.index(10)
      val sub4 = sub3.field("field3")

      root.toString mustBe ""
      sub1.toString mustBe "field1"
      sub2.toString mustBe "field1.field2"
      sub3.toString mustBe "field1.field2[10]"
      sub4.toString mustBe "field1.field2[10].field3"
    }
  }
} 
Example 85
Source File: FlakyCommandTest.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import java.io.File

import flaky.history.{Git, History}
import org.scalatest.{Matchers, WordSpec}
import sbt.FileFilter

class FlakyCommandTest extends WordSpec with Unzip with Matchers {

  private val zippedGitRepo = new File("./src/test/resources", "gitrepo.zip")
  private val unzippedGitDir = new File("target/")

  val log = new DummySbtLogger()

  "FlakyCommandTest" should {

    "createHtmlReports" in {
      //Goal of this test is also to generate report for visual check
      val reportDir = new File("./target/history8/20170523-231535")
      unzip(new File("./src/test/resources/history8/20170523-231535.zip"), reportDir)
      val dirs: Array[String] = reportDir.listFiles(new FileFilter {
        override def accept(pathname: File): Boolean = pathname.isDirectory
      }).map(_.getName)

      val history = new History("Project x", new File("./src/test/resources/history8/"), new File(""), new File("."))
      val historyReport1 = history.createHistoryReport()
      val timeDetails = TimeDetails(System.currentTimeMillis() - 9000000L, System.currentTimeMillis())
      val report = Flaky.createReport("Project X", timeDetails, dirs.toList, reportDir)

      unzip(zippedGitRepo, unzippedGitDir)
      val git = Git(new File(unzippedGitDir, "gitrepo/"))
      val htmlReportDir = new File("./target/example-report")
      FlakyCommand.createHtmlReports("Project x", report, Some(historyReport1), htmlReportDir, git, log)

      new File(htmlReportDir,"index.html").exists shouldBe true
      new File(htmlReportDir,"flaky-report.html").exists shouldBe true
      new File(htmlReportDir,"flaky-report-history.html").exists shouldBe true
    }

  }
} 
Example 86
Source File: FlakyTestReportSpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import org.scalatest.{Matchers, WordSpec}

class FlakyTestReportSpec extends WordSpec with Matchers {

  private val test = Test("", "")
  private val timeDetails = TimeDetails(0, 0)
  private val someFailureDetails = Some(FailureDetails("", "", ""))
  private val testCase = TestCase("", test, 0, someFailureDetails)
  private val flakyTest = FlakyTest(
    test,
    10,
    List(
      testCase
    )
  )
  private val testRuns: List[TestRun] = (0 until 10).map(i => TestRun(s"a$i", List(testCase))).toList

  "FlakyTestReportSpec" should {

    "successProbabilityPercent with one test" in {

      val report: FlakyTestReport = FlakyTestReport(
        "",
        timeDetails,
        testRuns,
        List(flakyTest)
      )
      report.successProbabilityPercent() shouldBe 90.0
    }

    "successProbabilityPercent with two tests" in {

      val report: FlakyTestReport = FlakyTestReport(
        "",
        timeDetails,
        testRuns,
        List(flakyTest, flakyTest, FlakyTest(test, 10, List.empty))
      )

      val fl: Float = report.successProbabilityPercent()
      fl.toDouble shouldBe (81.0 +- 2)
    }

    "successProbabilityPercent with no flaky tests" in {

      val report: FlakyTestReport = FlakyTestReport(
        "",
        timeDetails,
        testRuns,
        List(FlakyTest(test, 10, List.empty))
      )
      report.successProbabilityPercent() shouldBe 100.0
    }

    "successProbabilityPercent with  \"(It is not a test)\"" in {
      val thisIsNotATest: Test = Test("A", "(It is not a test)")

      val report: FlakyTestReport = FlakyTestReport(
        "",
        timeDetails,
        testRuns,
        List(
          FlakyTest(thisIsNotATest, 1, List.empty),
          FlakyTest(test, 9, List.empty)
        )
      )
      report.successProbabilityPercent() shouldBe 90.0
    }
  }
} 
Example 87
Source File: TimeReportSpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import org.scalatest.{Matchers, WordSpec}

class TimeReportSpec extends WordSpec with Matchers {
  "TimeReport" should {

    "estimate how long 5 more runs will take" in {
      TimeReport(10, 60*1000L).estimate(5) shouldBe "0m 30s"
    }

    "estimate how many runs will do in 60 seconds" in {
      TimeReport(5, 30*1000L).estimateCountIn(60*1000L) shouldBe "10 times"
    }

    "format time less than minute to human readable format" in {
      TimeReport.formatSeconds(4) shouldBe "0m 4s"
    }

    "format time more than minute to human readable format" in {
      TimeReport.formatSeconds(64) shouldBe "1m 4s"
    }

    "format minute to human readable format" in {
      TimeReport.formatSeconds(60) shouldBe "1m 0s"
    }

  }
} 
Example 88
Source File: package$Spec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import org.scalatest.{Matchers, WordSpec}

class package$Spec extends WordSpec with Matchers {

  "findCommonString" should {
    "return empty for empty list" in {
      findCommonString(List.empty[String]) shouldBe None
    }

    "return element from list for list of 1 element" in {
      findCommonString(List("abc")) shouldBe Some("abc")
    }

    "return element from list for list of the same strings" in {
      findCommonString(List("abc", "abc", "abc")) shouldBe Some("abc")
    }

    "return element part from one character strings when it's at the beginning" in {
      findCommonString(List("abc", "a")) shouldBe Some("a__")
    }

    "return element part from one character strings when it's in the middle" in {
      findCommonString(List("abc", "b")) shouldBe Some("_b_")
    }

    "return element part from one character strings when it's at the end" in {
      findCommonString(List("abc", "c")) shouldBe Some("__c")
    }

    "return common element part of 2 strings - difference at end" in {
      findCommonString(List("abc", "abd")) shouldBe Some("ab_")
    }


    "return common element part of 2 strings - difference at start" in {
      findCommonString(List("abc", "Abc")) shouldBe Some("_bc")
    }

    "return common element part of 2 strings - difference int the middle" in {
      findCommonString(List("abc", "aBc")) shouldBe Some("a_c")
    }

    "return common element part of 3 strings with 1 difference" in {
      findCommonString(List("abcde", "abXde","abcde" )) shouldBe Some("ab_de")
    }

    "return common element part of 3 strings with 2 differences" in {
      findCommonString(List("abcde", "abXde","abcdX" )) shouldBe Some("ab_d_")
    }

    "fill missing chars inside the string" in {
      findCommonString(List("abcccde", "abXde")) shouldBe Some("ab___de")
    }

    "return common element part of 3 strings with different length 1" in {
      findCommonString(List("abcdefg", "abcdefg","abcdefgh")) shouldBe Some("abcdefg_")
    }

    "return common element part of 3 strings with different length 2" in {
      findCommonString(List("0abcdefg", "abcdefg","abcdefg")) shouldBe Some("_abcdefg")
    }

    "process real life case" in {
      val input = """java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(3908304518889162941),P2(xxx),P3(X),192838475652)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(4066995287767169607),P2(xxx),P3(X),1223234)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(3339977301001549636),P2(xxx),P3(X),654556765778)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(220123700341947058),P2(xxx),P3(X),2333223)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(2168806444424252285),P2(xxx),P3(X),988667678)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(5918482956638044904),P2(xxx),P3(X),876866786787)),List(),List())
                    |java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(2848338480078734399),P2(xxx),P3(X),192838475652)),List(),List())""".stripMargin
      val commonS = "java.lang.AssertionError: assertion failed: expected User(Name(Kowalski),List(),List(),List()), found User(Name(Kowalski),List(Property(P1(___________________),P2(xxx),P3(X),____________)),List(),List())"

      findCommonString(input.lines.toList) shouldBe Some(commonS)
    }
  }

} 
Example 89
Source File: FailureDetailsSpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import org.scalatest.{Matchers, WordSpec}

class FailureDetailsSpec extends WordSpec with Matchers {


  "FailureDetails" should {

    "find first non test framework stacktrace line" in {
      val stacktrace =
        """org.junit.ComparisonFailure: expected:&lt;00:00:00.00[2]&gt; but was:&lt;00:00:00.00[0]&gt;
          |	at org.junit.Assert.assertEquals(Assert.java:115)
          |	at org.junit.Assert.assertEquals(Assert.java:144)
          |	at java.lang.Thread.getStackTrace(Thread.java:1552)
          |	at scala.lang.Thread.getStackTrace(Thread.java:1552)
          |	at tests.DateFormattingTest.formatParallelTest(DateFormattingTest.java:27)
          |	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
          |	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
          |	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
          |	at java.lang.reflect.Method.invoke(Method.java:483)
          |	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
          |	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
          |	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
          |	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)""".stripMargin

      val firstNonAssertStacktrace = FailureDetails("msg", "type", stacktrace).firstNonAssertStacktrace()

      firstNonAssertStacktrace shouldBe Some("	at tests.DateFormattingTest.formatParallelTest(DateFormattingTest.java:27)")
    }

    "remove message from stacktrace" in {
      val stacktrace =
        """org.junit.ComparisonFailure: expected:&lt;00:00:00.00[2]&gt; but was:&lt;00:00:00.00[0]&gt;
          |	at org.junit.Assert.assertEquals(Assert.java:115)
          |	at org.junit.Assert.assertEquals(Assert.java:144)
          |	at tests.DateFormattingTest.formatParallelTest(DateFormattingTest.java:27)
          |	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)""".stripMargin

      val expected =
        """
          |	at org.junit.Assert.assertEquals(Assert.java:115)
          |	at org.junit.Assert.assertEquals(Assert.java:144)
          |	at tests.DateFormattingTest.formatParallelTest(DateFormattingTest.java:27)
          |	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)""".stripMargin

      val w: FailureDetails = FailureDetails("msg", "type", stacktrace).withoutStacktraceMessage()

      w.stacktrace shouldBe expected
    }

  }
} 
Example 90
Source File: GitSpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky.history

import java.io.File

import flaky.Unzip
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}

import scala.util.Success

class GitSpec extends WordSpec with Matchers with BeforeAndAfterAll with Unzip {
  private val zipped = new File("./src/test/resources", "gitrepo.zip")
  private val unzipDir = new File("target/")
  private val gitFolder = new File(unzipDir, "gitrepo")

  "Git " should {
    "list all changes" in {
      val git = Git(gitFolder)
      val expected: Seq[GitCommit] = List(
        ("549df19", 1495403128), //Commit 1
        ("a220598", 1495403138), //Commit 2
        ("d4fa72b", 1495403141), //Commit 3
        ("a9958e5", 1495403142), //Commit 4
        ("e5677bb", 1495403143), //Commit 5
        ("2939f3a", 1495403144), //Commit 6
        ("b55953a", 1495403146), //Commit 7
        ("9d74e32", 1495403147)  //Commit 8
      )
      .zipWithIndex
        .map(a => GitCommit(a._1._1, "[email protected]", s"Commit ${a._2 + 1}", a._1._2))
        .reverse
      git.history() shouldBe Success(expected)
    }
    "find commits list between 2 hashes" in {
      val git = Git(gitFolder)
      val commitsList = git.commitsList("d4fa72b", "2939f3a")
      //4,5,6
      commitsList shouldBe Success(List(
        GitCommit("2939f3a", "[email protected]", "Commit 6", 1495403144),
        GitCommit("e5677bb", "[email protected]", "Commit 5", 1495403143),
        GitCommit("a9958e5", "[email protected]", "Commit 4", 1495403142)
      ))
    }
    "find current commit" in {
      val git = Git(gitFolder)
      git.currentId() shouldBe Success("9d74e32")
    }
    "find git root folder" in {
      val gitInSubfolder = Git(new File(unzipDir, "gitrepo/.git/logs/"))
      gitInSubfolder.currentId() shouldBe Success("9d74e32")
    }

    "resolve remote repo" in {
      val git = Git(new File("."))
      git.remoteUrl().map(_.contains("github.com")) shouldBe Success(true)
    }
  }

  override protected def beforeAll(): Unit = {
    unzip(zipped, unzipDir)
  }
} 
Example 91
Source File: GitRepoSpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky.history

import org.scalatest.{Matchers, WordSpec}

class GitRepoSpec extends WordSpec with Matchers {

  "GitRepoSpec" should {

    "fromUrl read http links with user" in {
      val maybeRepo = GitRepo.fromUrl("http://[email protected]/otrebski/sbt-flaky.git")
      maybeRepo shouldBe Some(GitRepo("http","github.com", "otrebski","sbt-flaky"))
    }
    "fromUrl read https links with user" in {
      val maybeRepo = GitRepo.fromUrl("https://[email protected]/otrebski/sbt-flaky.git")
      maybeRepo shouldBe Some(GitRepo("https","github.com", "otrebski","sbt-flaky"))
    }

    "fromUrl read https links without user" in {
      val maybeRepo = GitRepo.fromUrl("https://github.com/otrebski/sbt-flaky.git")
      maybeRepo shouldBe Some(GitRepo("https","github.com", "otrebski","sbt-flaky"))
    }

    "fromUrl read ssh link" in {
      val maybeRepo = GitRepo.fromUrl("[email protected]:owner/repo.git")
      maybeRepo shouldBe Some(GitRepo("https","gitlab.mydomain.com", "owner","repo"))
    }



  }
} 
Example 92
Source File: HistorySpec.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky.history

import java.io.File
import java.text.SimpleDateFormat

import org.scalatest.{Matchers, WordSpec}

class HistorySpec extends WordSpec with Matchers {

  val fileWithDescriptor = "20170516-072750.zip"
  val fileWithoutDescriptor = "20170516-072825.zip"
  val dirWithReports = new File("./src/test/resources/history")


  "HistoryTest" should {

    "loadHistory with descriptor" in {
      val historicalRun: HistoricalRun = History.loadHistory.apply(new File(dirWithReports, fileWithDescriptor))
      historicalRun.historyReportDescription shouldBe HistoryReportDescription(123456L, Some("abcdefg"))
    }
    "loadHistory without descriptor" in {
      //Timestamp can't be hardcoded, because loadHistory tries to parse date from file name
      // with local time zone
      val timestamp = new SimpleDateFormat("yyyyMMdd-HHmmss").parse("20170516-072825").getTime
      val historicalRun: HistoricalRun = History.loadHistory.apply(new File(dirWithReports, fileWithoutDescriptor))
      historicalRun.historyReportDescription shouldBe HistoryReportDescription(timestamp, None)
    }

  }

} 
Example 93
Source File: UsersSpec.scala    From play-quill-jdbc   with MIT License 5 votes vote down vote up
package models

import _root_.test._
import org.scalatest.Matchers._
import org.scalatest.{ TestData, WordSpec }
import org.scalatestplus.play.OneAppPerTest
import play.api._

class UsersSpec extends WordSpec with OneAppPerTest {

  override def newAppForTest(testData: TestData): Application = fakeApp

  "Users" should {
    "create and find" in {
      val users = app.injector.instanceOf(classOf[Users])
      val user = users.create(User(0L, "test1", true))
      user.id !== 0L
      val userFound = users.find(user.id)
      userFound shouldBe defined
      userFound.foreach(_.name shouldBe "test1")
    }


  }
} 
Example 94
Source File: FieldOrderingSQLInterpreterSpec.scala    From playsonify   with MIT License 5 votes vote down vote up
package com.alexitc.playsonify.sql

import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition}
import org.scalatest.MustMatchers._
import org.scalatest.WordSpec

class FieldOrderingSQLInterpreterSpec extends WordSpec {

  import FieldOrderingSQLInterpreterSpec._

  val interpreter = new FieldOrderingSQLInterpreter

  "toOrderByClause" should {
    "set the ascending order" in {
      val ordering = FieldOrdering[PersonField](PersonField.Id, OrderingCondition.AscendingOrder)
      val result = interpreter.toOrderByClause(ordering)

      result must be("ORDER BY Id ASC")
    }

    "set the descending order" in {
      val ordering = FieldOrdering[PersonField](PersonField.Id, OrderingCondition.DescendingOrder)
      val result = interpreter.toOrderByClause(ordering)

      result must be("ORDER BY Id DESC")
    }

    "break ties" in {
      val ordering = FieldOrdering[PersonField](PersonField.Country, OrderingCondition.AscendingOrder)
      val result = interpreter.toOrderByClause(ordering)

      result must be("ORDER BY Country ASC, Id")
    }
  }
}

object FieldOrderingSQLInterpreterSpec {
  sealed trait PersonField
  object PersonField {
    final case object Id extends PersonField
    final case object Country extends PersonField
  }

  implicit val personColumnNameResolver: ColumnNameResolver[PersonField] = new ColumnNameResolver[PersonField] {
    override def getColumnName(field: PersonField): String = field.toString

    override def getUniqueColumnName: String = PersonField.Id.toString
  }
} 
Example 95
Source File: PaginatedQueryValidatorSpec.scala    From playsonify   with MIT License 5 votes vote down vote up
package com.alexitc.playsonify.validators

import com.alexitc.playsonify.models.pagination.{Limit, Offset, PaginatedQuery, PaginatedQueryError}
import org.scalactic.{Bad, Every, Good}
import org.scalatest.{MustMatchers, WordSpec}

class PaginatedQueryValidatorSpec extends WordSpec with MustMatchers {

  val validator = new PaginatedQueryValidator

  "validate" should {
    "succeed on valid query" in {
      val query = PaginatedQuery(Offset(0), Limit(100))
      val maxLimit = 100
      val expected = Good(query)
      val result = validator.validate(query, maxLimit)

      result mustEqual expected
    }

    "fail on offset < 0" in {
      val query = PaginatedQuery(Offset(-1), Limit(1))
      val maxLimit = 100
      val expected = Bad(PaginatedQueryError.InvalidOffset).accumulating
      val result = validator.validate(query, maxLimit)

      result mustEqual expected
    }

    "fail on limit = 0" in {
      val query = PaginatedQuery(Offset(0), Limit(0))
      val maxLimit = 100
      val expected = Bad(PaginatedQueryError.InvalidLimit(maxLimit)).accumulating
      val result = validator.validate(query, maxLimit)

      result mustEqual expected
    }

    "fail on limit > maxLimit" in {
      val query = PaginatedQuery(Offset(0), Limit(101))
      val maxLimit = 100
      val expected = Bad(PaginatedQueryError.InvalidLimit(maxLimit)).accumulating
      val result = validator.validate(query, maxLimit)

      result mustEqual expected
    }

    "accumulate errors when offset and limit are invalid" in {
      val query = PaginatedQuery(Offset(-1), Limit(101))
      val maxLimit = 100
      val expected = Bad(Every(PaginatedQueryError.InvalidOffset, PaginatedQueryError.InvalidLimit(maxLimit)))
      val result = validator.validate(query, maxLimit)

      result mustEqual expected
    }
  }
} 
Example 96
Source File: FieldOrderingParserSpec.scala    From playsonify   with MIT License 5 votes vote down vote up
package com.alexitc.playsonify.parsers

import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition, OrderingError, OrderingQuery}
import org.scalactic.{Bad, Every, Good}
import org.scalatest.{MustMatchers, WordSpec}

class FieldOrderingParserSpec extends WordSpec with MustMatchers {

  import FieldOrderingParserSpec._

  val parser = new CustomFieldParser

  "from" should {
    "parse an empty query to default ordering" in {
      val query = OrderingQuery("")
      val expected = FieldOrdering(Id, OrderingCondition.AscendingOrder)
      val result = parser.from(query)

      result mustEqual Good(expected)
    }

    "parse a field without ordering condition" in {
      val query = OrderingQuery("id")
      val expected = FieldOrdering(Id, OrderingCondition.AscendingOrder)
      val result = parser.from(query)

      result mustEqual Good(expected)
    }

    "parse a field with ordering condition" in {
      val query = OrderingQuery("name:desc")
      val expected = FieldOrdering(Name, OrderingCondition.DescendingOrder)
      val result = parser.from(query)

      result mustEqual Good(expected)
    }

    "reject unknown field" in {
      val query = OrderingQuery("age:desc")
      val expected = Bad(OrderingError.UnknownField).accumulating
      val result = parser.from(query)

      result mustEqual expected
    }

    "reject unknown ordering condition" in {
      val query = OrderingQuery("id:descending")
      val expected = Bad(OrderingError.InvalidCondition).accumulating
      val result = parser.from(query)

      result mustEqual expected
    }

    "accumulate errors on unknown field and ordering condition" in {
      val query = OrderingQuery("age:descending")
      val expected = Bad(Every(OrderingError.UnknownField, OrderingError.InvalidCondition))
      val result = parser.from(query)

      result mustEqual expected
    }

    "reject bad ordering format" in {
      val query = OrderingQuery("id:desc:x")
      val expected = Bad(OrderingError.InvalidFormat).accumulating
      val result = parser.from(query)

      result mustEqual expected
    }
  }
}

object FieldOrderingParserSpec {

  sealed abstract class CustomField(val string: String)
  case object Id extends CustomField("id")
  case object Name extends CustomField("name")

  class CustomFieldParser extends FieldOrderingParser[CustomField] {
    override protected def defaultField: CustomField = Id

    override protected def parseField(unsafeField: String): Option[CustomField] = unsafeField match {
      case Id.string => Some(Id)
      case Name.string => Some(Name)
      case _ => None
    }
  }
} 
Example 97
Source File: ShopSpec.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch12

import akka.actor.testkit.typed.Effect.NoEffects
import akka.actor.testkit.typed.scaladsl.{BehaviorTestKit, TestInbox}
import akka.actor.typed.receptionist.Receptionist
import akka.actor.typed.receptionist.Receptionist.Register
import ch12.Bakery.Groceries
import ch12.Manager.ReceiveGroceries
import ch12.Shop.{SellByList, ShoppingList}
import org.scalatest.WordSpec

import scala.language.postfixOps

class ShopSpec extends WordSpec {

  "A seller in the shop" should {
    "return groceries if given a shopping list" in {
      val receptionist = TestInbox[Receptionist.Command]()
      val mockReceptionist: Shop.ReceptionistFactory = _ => receptionist.ref
      val seller = BehaviorTestKit(Shop.seller(mockReceptionist))
      val inbox = TestInbox[Manager.Command]()
      val message = ShoppingList(1,1,1,1)
      seller.run(SellByList(message, inbox.ref))
      inbox.expectMessage(ReceiveGroceries(Groceries(1, 1, 1, 1)))
      receptionist.expectMessage(Register(Shop.SellerKey, seller.ref))
      seller.expectEffect(NoEffects)
    }
  }
} 
Example 98
Source File: ServerSpec.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
import ch14.{Config, Server}
import cats.effect.IO
import cats.implicits._
import io.circe.Json
import io.circe.literal._
import org.http4s.circe._
import org.http4s.client.blaze.Http1Client
import org.http4s.{Method, Request, Status, Uri}
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}
import org.http4s.server.{Server => Http4sServer}

class ServerSpec extends WordSpec with Matchers with BeforeAndAfterAll {
  private lazy val client = Http1Client[IO]().unsafeRunSync()

  private lazy val configIO = Config.load("test.conf")
  private lazy val config = configIO.unsafeRunSync()

  private lazy val rootUrl = s"http://${config.server.host}:${config.server.port}"

  private val server: Option[Http4sServer[IO]] = (for {
    builder <- Server.createServer(configIO)
  } yield builder.start.unsafeRunSync()).compile.last.unsafeRunSync()


  override def afterAll(): Unit = {
    client.shutdown.unsafeRunSync()
    server.foreach(_.shutdown.unsafeRunSync())
  }

  "The server" should {
    "get an empty inventory" in {
      val json = client.expect[Json](s"$rootUrl/inventory").unsafeRunSync()
      json shouldBe json"""{}"""
    }
    "create articles" in {
      val eggs = Request[IO](method = Method.POST, uri = Uri.unsafeFromString(s"$rootUrl/articles/eggs"))
      client.status(eggs).unsafeRunSync() shouldBe Status.NoContent
      val chocolate = Request[IO](method = Method.POST, uri = Uri.unsafeFromString(s"$rootUrl/articles/chocolate"))
      client.status(chocolate).unsafeRunSync() shouldBe Status.NoContent
      val json = client.expect[Json](s"$rootUrl/inventory").unsafeRunSync()
      json shouldBe json"""{"eggs" : 0,"chocolate" : 0}"""
    }
    "update inventory" in {
      val restock = Request[IO](method = Method.POST, uri = Uri.unsafeFromString(s"$rootUrl/restock")).withBody(json"""{ "inventory" : { "eggs": 10, "chocolate": 20 }}""")
      client.expect[Json](restock).unsafeRunSync() shouldBe json"""{ "eggs" : 10, "chocolate" : 20 }"""
      client.expect[Json](restock).unsafeRunSync() shouldBe json"""{ "eggs" : 20, "chocolate" : 40 }"""
    }
    "deliver purchase if there is enough inventory" in {
      val purchase = Request[IO](method = Method.POST, uri = Uri.unsafeFromString(s"$rootUrl/purchase")).withBody(json"""{ "order" : { "eggs": 5, "chocolate": 5 }}""")
      client.expect[Json](purchase).unsafeRunSync() shouldBe json"""{ "eggs" : 5, "chocolate" : 5 }"""
    }
    "not deliver purchase if there is not enough inventory" in {
      val purchase = Request[IO](method = Method.POST, uri = Uri.unsafeFromString(s"$rootUrl/purchase")).withBody(json"""{ "order" : { "eggs": 5, "chocolate": 45 }}""")
      client.expect[Json](purchase).unsafeRunSync() shouldBe json"""{ "eggs" : 0, "chocolate" : 0 }"""
    }
  }

} 
Example 99
Source File: RoutesSpec.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch14

import akka.actor.ActorRef
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch14.Commands.{PurchaseArticles, RestockArticles}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{Matchers, WordSpec}

import scala.concurrent.duration._

class RoutesSpec
    extends WordSpec
    with Matchers
    with ScalaFutures
    with ScalatestRouteTest
    with Routes {

  override lazy val config: Config = Config.load()

  DB.initialize(config.database)

  override lazy val inventory: ActorRef =
    system.actorOf(InventoryActor.props, "inventory")

  "Routes" should {
    "return no articles in the beginning" in {
      val request = HttpRequest(uri = "/inventory")
      implicit val timeout: Duration = 3.seconds
      request ~> routes ~> check {
        status shouldBe StatusCodes.OK
        contentType shouldBe ContentTypes.`application/json`
        entityAs[String] shouldBe """{"state":{}}"""
      }
    }
    "be able to add article (POST /articles/eggs)" in {
      val request = Post("/articles/eggs")
      request ~> routes ~> check {
        status shouldBe StatusCodes.Created
        contentType shouldBe ContentTypes.`application/json`
        entityAs[String] shouldBe """{"name":"eggs","count":0}"""
      }
    }
    "not be able to delete article (delete /articles/no)" in {
      val request = Delete("/articles/no-such-article")
      request ~> Route.seal(routes) ~> check {
        status shouldBe StatusCodes.NotFound
      }
    }
    "not be able to add article twice (POST /articles/eggs)" in {
      val request = Post("/articles/eggs")
      request ~> routes ~> check {
        status shouldBe StatusCodes.Conflict
      }
    }
    "be able to restock articles (POST /restock)" in {
      val restock = RestockArticles(Map("eggs" -> 10, "chocolate" -> 20))
      val entity  = Marshal(restock).to[MessageEntity].futureValue // futureValue is from ScalaFutures
      val request = Post("/restock").withEntity(entity)
      request ~> routes ~> check {
        status shouldBe StatusCodes.OK
        contentType shouldBe ContentTypes.`application/json`
        entityAs[String] shouldBe """{"stock":{"eggs":10,"chocolate":20}}"""
      }
    }
    "be able to purchase articles (POST /purchase)" in {
      val restock = PurchaseArticles(Map("eggs" -> 5, "chocolate" -> 10))
      val entity  = Marshal(restock).to[MessageEntity].futureValue // futureValue is from ScalaFutures
      val request = Post("/purchase").withEntity(entity)
      request ~> routes ~> check {
        status shouldBe StatusCodes.OK
        contentType shouldBe ContentTypes.`application/json`
        entityAs[String] shouldBe """{"order":{"eggs":5,"chocolate":10}}"""
      }
    }
    "not be able to purchase articles (POST /purchase)" in {
      val restock = PurchaseArticles(Map("eggs" -> 50, "chocolate" -> 10))
      val entity  = Marshal(restock).to[MessageEntity].futureValue // futureValue is from ScalaFutures
      val request = Post("/purchase").withEntity(entity)
      request ~> routes ~> check {
        status shouldBe StatusCodes.Conflict
      }
    }
  }
} 
Example 100
Source File: MoneySpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core

import java.net.InetAddress

import com.comcast.money.core.handlers.AsyncSpanHandler
import com.typesafe.config.ConfigFactory
import org.scalatest.{ Matchers, WordSpec }

class MoneySpec extends WordSpec with Matchers {

  val defaultConfig = ConfigFactory.load().getConfig("money")
  "Money" should {
    "load the reference config by default" in {
      val result = Money.Environment

      result.applicationName shouldBe "unknown"
      result.enabled shouldBe true
      result.factory shouldBe a[CoreSpanFactory]
      result.handler shouldBe an[AsyncSpanHandler]
      result.hostName shouldBe InetAddress.getLocalHost.getCanonicalHostName
      result.tracer should not be DisabledTracer
    }

    "load a Disabled Environment if money is disabled" in {
      val config = ConfigFactory.parseString(
        """
          |money {
          | enabled = false
          | application-name = "unknown"
          |}
        """.stripMargin)

      val result = Money(config.getConfig("money"))
      result.tracer shouldBe DisabledTracer
      result.factory shouldBe DisabledSpanFactory
      result.handler shouldBe DisabledSpanHandler
      result.enabled shouldBe false
    }
  }
} 
Example 101
Source File: CoreSpanFactorySpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core

import com.comcast.money.api.{ Note, SpanHandler, SpanId }
import com.comcast.money.core.handlers.TestData
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, WordSpec }

class CoreSpanFactorySpec extends WordSpec with Matchers with MockitoSugar with TestData {

  val handler = mock[SpanHandler]
  val underTest = new CoreSpanFactory(handler)

  "CoreSpanFactory" should {
    "create a new span" in {
      val result = underTest.newSpan("foo").asInstanceOf[CoreSpan]

      result.info.name shouldBe "foo"
      result.handler shouldBe handler
    }

    "create a new span given an existing span id" in {
      val existingId = new SpanId()
      val result = underTest.newSpan(existingId, "foo").asInstanceOf[CoreSpan]

      result.id shouldBe existingId
    }

    "create a child span whos id descends from an existing span" in {
      val result = underTest.childSpan("child", testSpan)

      val parent = testSpan.info
      val child = result.info

      child.id.traceId shouldBe parent.id.traceId
      child.id.parentId shouldBe parent.id.selfId
      child.id.selfId == parent.id.selfId shouldBe false
    }

    "propagate sticky notes to a child span" in {

      val parentSpan = underTest.newSpan("parent")
      val stickyNote = Note.of("foo", "bar", true)
      val nonStickyNote = Note.of("other", "one", false)
      parentSpan.record(stickyNote)
      parentSpan.record(nonStickyNote)

      val childSpan = underTest.childSpan("child", parentSpan, true)
      val childInfo = childSpan.info

      childInfo.notes should contain value stickyNote
      childInfo.notes shouldNot contain value nonStickyNote
    }

    "create a child span from a well-formed x-moneytrace header" in {
      val parentSpan = underTest.newSpan("parent")

      Formatters.toHttpHeaders(parentSpan.info.id, (headerName, headerValue) => headerName match {
        case Formatters.MoneyTraceHeader => {
          val childSpan = underTest.newSpanFromHeader("child", _ => headerValue)

          childSpan.info.id.traceId shouldBe parentSpan.info.id.traceId
          childSpan.info.id.parentId shouldBe parentSpan.info.id.selfId
          childSpan.info.id.selfId == parentSpan.info.id.selfId shouldBe false
        }
        case _ =>
      })
    }

    "create a root span from a malformed x-moneytrace header" in {
      val parentSpan = underTest.newSpan("parent")
      val traceContextHeader = "mangled header value"
      val childSpan = underTest.newSpanFromHeader("child", headerName => traceContextHeader)

      childSpan.info.id.traceId == parentSpan.info.id.traceId shouldBe false
      childSpan.info.id.parentId == parentSpan.info.id.selfId shouldBe false
      childSpan.info.id.selfId == parentSpan.info.id.selfId shouldBe false
      childSpan.info.id.selfId shouldBe childSpan.info.id.parentId
    }
  }
} 
Example 102
Source File: TraceLoggingSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.logging

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, OneInstancePerTest, WordSpec }
import org.slf4j.Logger

class TraceLoggingSpec extends WordSpec with Matchers with MockitoSugar with OneInstancePerTest {

  val mockLogger = mock[Logger]

  "TraceLogging" should {
    "capture exceptions into a log" in {
      val testTraceLogging = new TraceLogging {
        override lazy val shouldLogExceptions: Boolean = true
        override val logger: Logger = mockLogger
      }

      val t = mock[Throwable]
      testTraceLogging.logException(t)
      verify(mockLogger).error("Tracing exception", t)
    }
    "not capture exceptions if log exceptions is not enabled" in {
      val testTraceLogging = new TraceLogging {
        override lazy val shouldLogExceptions: Boolean = false
        override val logger: Logger = mockLogger
      }
      val t = mock[Throwable]
      testTraceLogging.logException(t)
      verifyZeroInteractions(mockLogger)
    }
  }
} 
Example 103
Source File: CoreSpanSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core

import com.comcast.money.api.{ SpanInfo, SpanHandler, SpanId }
import com.comcast.money.core.handlers.TestData
import org.mockito.ArgumentCaptor
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, WordSpec }

class CoreSpanSpec extends WordSpec with Matchers with TestData with MockitoSugar {

  "CoreSpan" should {
    "set the startTimeMillis and startTimeMicros when started" in {
      val underTest = CoreSpan(new SpanId(), "test", null)
      underTest.start()

      val state = underTest.info

      state.startTimeMicros.toInt should not be 0
      state.startTimeMillis.toInt should not be 0
    }

    "record a timer" in {
      val underTest = CoreSpan(new SpanId(), "test", null)

      underTest.startTimer("foo")
      underTest.stopTimer("foo")

      underTest.info.notes should contain key "foo"
    }

    "record a note" in {
      val underTest = CoreSpan(new SpanId(), "test", null)

      underTest.record(testLongNote)

      underTest.info.notes should contain value testLongNote
    }

    "set the endTimeMillis and endTimeMicros when stopped" in {
      val handler = mock[SpanHandler]
      val underTest = CoreSpan(new SpanId(), "test", handler)

      underTest.stop(true)

      val state = underTest.info

      state.endTimeMicros.toInt should not be 0
      state.endTimeMillis.toInt should not be 0
    }

    "invoke the span handler when stopped" in {
      val handler = mock[SpanHandler]
      val handleCaptor = ArgumentCaptor.forClass(classOf[SpanInfo])
      val underTest = CoreSpan(new SpanId(), "test", handler)

      underTest.start()
      underTest.record(testLongNote)
      underTest.stop(true)

      verify(handler).handle(handleCaptor.capture())

      val handledInfo = handleCaptor.getValue

      handledInfo.id shouldBe underTest.id
      handledInfo.startTimeMicros.toInt should not be 0
      handledInfo.startTimeMillis.toInt should not be 0
      handledInfo.endTimeMicros.toInt should not be 0
      handledInfo.endTimeMillis.toInt should not be 0
      handledInfo.notes should contain value testLongNote
    }
  }
} 
Example 104
Source File: MDCSupportSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.internal

import com.comcast.money.api.SpanId
import org.scalatest.{ BeforeAndAfterEach, Matchers, OneInstancePerTest, WordSpec }
import org.slf4j.MDC

import scala.collection.JavaConverters._
import scala.collection.mutable

class MDCSupportSpec extends WordSpec with Matchers with BeforeAndAfterEach with OneInstancePerTest {

  val testMDCSupport = new MDCSupport
  val spanId = new SpanId()

  override def beforeEach() = {
    SpanLocal.clear()
  }

  "MDCSupport" should {
    "set the span in MDC when provide" in {
      testMDCSupport.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldEqual MDCSupport.format(spanId)
    }
    "clear the MDC value when set to None" in {
      testMDCSupport.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldEqual MDCSupport.format(spanId)

      testMDCSupport.setSpanMDC(None)
      MDC.get("moneyTrace") shouldBe null
    }
    "not be run if tracing is disabled" in {
      val disabled = new MDCSupport(false)
      disabled.setSpanMDC(Some(spanId))
      MDC.get("moneyTrace") shouldBe null
    }
    "not propogate MDC if disabled" in {
      val mdcContext: mutable.Map[_, _] = mutable.HashMap("FINGERPRINT" -> "print")
      val disabled = new MDCSupport(false)
      disabled.propogateMDC(Some(mdcContext.asJava))
      MDC.get("FINGERPRINT") shouldBe null
    }
    "propogate MDC if not disabled" in {
      val mdcContext: mutable.Map[_, _] = mutable.HashMap("FINGERPRINT" -> "print")

      testMDCSupport.propogateMDC(Some(mdcContext.asJava))
      MDC.get("FINGERPRINT") shouldBe "print"
    }
    "clear MDC if given an empty context" in {
      MDC.put("FINGERPRINT", "print")
      testMDCSupport.propogateMDC(None)
      MDC.get("FINGERPRINT") shouldBe null
    }
    "set span name" in {
      testMDCSupport.setSpanNameMDC(Some("foo"))
      MDC.get("spanName") shouldBe "foo"
      testMDCSupport.getSpanNameMDC shouldBe Some("foo")
    }
    "clear span name from MDC when given an empty value" in {
      MDC.put("spanName", "shouldBeRemoved")
      testMDCSupport.setSpanNameMDC(None)
      MDC.get("spanName") shouldBe null
      testMDCSupport.getSpanNameMDC shouldBe None
    }
  }
} 
Example 105
Source File: SpanLocalSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.internal

import com.comcast.money.api.SpanId
import com.comcast.money.core.handlers.TestData
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ OneInstancePerTest, BeforeAndAfterEach, Matchers, WordSpec }
import org.slf4j.MDC

class SpanLocalSpec extends WordSpec
  with Matchers with OneInstancePerTest with BeforeAndAfterEach with MockitoSugar with TestData {

  override def afterEach() = {
    SpanLocal.clear()
  }

  "SpanLocal" when {
    "an item exists in span local" should {
      "return the span local value" in {
        SpanLocal.push(testSpan)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "clear the stored value" in {
        SpanLocal.push(testSpan)

        SpanLocal.clear()
        SpanLocal.current shouldEqual None
      }
      "do nothing if trying to push a null value" in {
        SpanLocal.push(testSpan)
        SpanLocal.push(null)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "add to the existing call stack" in {
        val nested = testSpan.copy(new SpanId())

        SpanLocal.push(testSpan)
        SpanLocal.push(nested)
        SpanLocal.current shouldEqual Some(nested)
      }
      "pop the last added item from the call stack" in {
        val nested = testSpan.copy(new SpanId())
        SpanLocal.push(testSpan)
        SpanLocal.push(nested)

        val popped = SpanLocal.pop()
        popped shouldEqual Some(nested)
        SpanLocal.current shouldEqual Some(testSpan)
      }
      "set the MDC value on push" in {
        SpanLocal.push(testSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
      }
      "remove the MDC value on pop" in {
        SpanLocal.push(testSpan)
        SpanLocal.pop()

        MDC.get("moneyTrace") shouldBe null
        MDC.get("spanName") shouldBe null
      }
      "reset the MDC value on pop" in {
        SpanLocal.push(testSpan)
        SpanLocal.push(childSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(childSpan.id)
        MDC.get("spanName") shouldEqual childSpan.name

        SpanLocal.pop()

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
      }
      "remove the MDC value on clear" in {
        SpanLocal.push(testSpan)

        MDC.get("moneyTrace") shouldEqual MDCSupport.format(testSpan.id)
        MDC.get("spanName") shouldEqual testSpan.name
        SpanLocal.clear()

        MDC.get("moneyTrace") shouldBe null
        MDC.get("spanName") shouldBe null
      }
    }
  }
} 
Example 106
Source File: TraceFriendlyExecutionContextExecutorSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.concurrent

import com.comcast.money.api.SpanId
import com.comcast.money.core.SpecHelpers
import com.comcast.money.core.internal.SpanLocal
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ BeforeAndAfterEach, Matchers, OneInstancePerTest, WordSpec }
import org.slf4j.MDC

import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future }

class TraceFriendlyExecutionContextExecutorSpec extends WordSpec
  with Matchers
  with MockitoSugar
  with OneInstancePerTest
  with ConcurrentSupport
  with SpecHelpers
  with BeforeAndAfterEach {

  import com.comcast.money.core.concurrent.TraceFriendlyExecutionContextExecutor.Implicits.global

  override def beforeEach() = {
    SpanLocal.clear()
    MDC.clear()
  }

  // brings in the implicit executor

  "TraceFriendlyExecutionContext" should {
    "propagate the current trace local value" in {
      val originalSpanId = new SpanId("1", 2L, 3L)
      val originalSpan = testSpan(originalSpanId)
      SpanLocal.push(originalSpan)

      val future = Future {
        SpanLocal.current.get.info.id
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual originalSpanId
    }
    "propagate no span value if none is present" in {
      SpanLocal.clear()

      val future = Future {
        SpanLocal.current
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual None
    }
    "propagate only the latest span id value" in {
      val spanId1 = new SpanId()
      val spanId2 = new SpanId()
      SpanLocal.push(testSpan(spanId1))
      SpanLocal.push(testSpan(spanId2))

      val future = Future {
        SpanLocal.current.get.info.id
      }

      val futureResult = Await.result(future, 100 millis)
      futureResult shouldEqual spanId2
    }
    "delegate reportFailure to the wrapped executor" in {
      val mockExecutionContext = mock[ExecutionContext]
      val traceFriendly = TraceFriendlyExecutionContextExecutor(mockExecutionContext)
      val failure = new IllegalArgumentException()

      traceFriendly.reportFailure(failure)
      verify(mockExecutionContext).reportFailure(failure)
    }
    "propogate MDC data" in {
      MDC.put("FINGERPRINT", "print")
      val future = Future {
        MDC.get("FINGERPRINT")
      }
      MDC.get("FINGERPRINT") shouldEqual "print"
      Await.result(future, 100 millis) shouldEqual "print"
    }

    "Child MDC should not escape to parent " in {
      val future = Future {
        MDC.put("FINGERPRINT", "print")
        MDC.get("FINGERPRINT")
      }
      MDC.get("FINGERPRINT") shouldBe null
      Await.result(future, 100 millis) shouldEqual "print"
    }
  }
} 
Example 107
Source File: HandlerFactorySpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.handlers

import com.typesafe.config.ConfigFactory
import org.scalatest.{ Matchers, WordSpec }

class HandlerFactorySpec extends WordSpec with Matchers {

  "HandlerFactory" should {
    "create a span handler based on class" in {
      val config = ConfigFactory.parseString(s"class=${classOf[NonConfiguredHandler].getCanonicalName}")

      val createdHandler = HandlerFactory.create(config)
      createdHandler shouldBe a[NonConfiguredHandler]
    }

    "create a configurable span handle and call configure on it" in {
      val config = ConfigFactory.parseString(s"class=${classOf[ConfiguredHandler].getCanonicalName}")

      val createdHandler = HandlerFactory.create(config)
      createdHandler shouldBe a[ConfiguredHandler]

      createdHandler.asInstanceOf[ConfiguredHandler].calledConfigure shouldBe true
    }
  }
} 
Example 108
Source File: MetricsHandlerSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.handlers

import com.codahale.metrics.{ Meter, Histogram, MetricRegistry }
import com.typesafe.config.Config
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ OneInstancePerTest, Matchers, WordSpec }

class MetricsHandlerSpec extends WordSpec with Matchers with MockitoSugar with TestData with OneInstancePerTest {

  val conf = mock[Config]
  doReturn(true).when(conf).hasPath("metrics-registry.class-name")
  doReturn("com.comcast.money.core.metrics.MockMetricRegistryFactory").when(conf).getString("metrics-registry.class-name")

  "MetricsSpanHandler" should {
    "configure the metrics registry" in {
      val underTest = new MetricsSpanHandler()
      underTest.configure(conf)

      underTest.metricRegistry shouldBe a[MetricRegistry]
    }

    "save latency metric" in {
      val underTest = new MetricsSpanHandler()
      underTest.configure(conf)

      val latencyMetric = mock[Histogram]
      val errorMetric = mock[Meter]
      doReturn(latencyMetric).when(underTest.metricRegistry).histogram(anyString())
      doReturn(errorMetric).when(underTest.metricRegistry).meter(anyString())

      underTest.handle(testSpanInfo)

      verify(latencyMetric).update(testSpanInfo.durationMicros)
      verifyZeroInteractions(errorMetric)
    }

    "update the error metric" in {
      val underTest = new MetricsSpanHandler()
      underTest.configure(conf)

      val latencyMetric = mock[Histogram]
      val errorMetric = mock[Meter]
      doReturn(latencyMetric).when(underTest.metricRegistry).histogram(anyString())
      doReturn(errorMetric).when(underTest.metricRegistry).meter(anyString())

      underTest.handle(testSpanInfo.copy(success = false))

      verify(latencyMetric).update(testSpanInfo.durationMicros)
      verify(errorMetric).mark()
    }
  }
} 
Example 109
Source File: HandlerChainSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.handlers

import com.comcast.money.api.SpanHandler
import com.typesafe.config.ConfigFactory
import org.mockito.Mockito
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, WordSpec }

class HandlerChainSpec extends WordSpec with Matchers with MockitoSugar with TestData {

  "HandlerChain" should {

    "invoke all handlers in the chain in order" in {
      val handler1 = mock[SpanHandler]
      val handler2 = mock[SpanHandler]
      val handler3 = mock[SpanHandler]

      val ordered = Mockito.inOrder(handler1, handler2, handler3)

      val underTest = HandlerChain(Seq(handler1, handler2, handler3))

      underTest.handle(testSpanInfo)

      ordered.verify(handler1).handle(testSpanInfo)
      ordered.verify(handler2).handle(testSpanInfo)
      ordered.verify(handler3).handle(testSpanInfo)
    }

    "continues invocation of chain if one of the handlers throws an exception" in {

      val handler1 = mock[SpanHandler]
      val handler2 = mock[SpanHandler]
      val handler3 = mock[SpanHandler]

      doThrow(classOf[RuntimeException]).when(handler1).handle(testSpanInfo)
      val ordered = Mockito.inOrder(handler1, handler2, handler3)

      val underTest = HandlerChain(Seq(handler1, handler2, handler3))

      underTest.handle(testSpanInfo)

      ordered.verify(handler1).handle(testSpanInfo)
      ordered.verify(handler2).handle(testSpanInfo)
      ordered.verify(handler3).handle(testSpanInfo)
    }

    "create a sequence of handlers" in {
      val config = ConfigFactory.parseString(
        """
          |{
          | async = false
          | handlers = [
          |   {
          |     class = "com.comcast.money.core.handlers.ConfiguredHandler"
          |   },
          |   {
          |     class = "com.comcast.money.core.handlers.ConfiguredHandler"
          |   },
          |   {
          |     class = "com.comcast.money.core.handlers.NonConfiguredHandler"
          |   }
          | ]
          |}
        """.stripMargin)

      val result = HandlerChain(config)

      result shouldBe a[HandlerChain]
      result.asInstanceOf[HandlerChain].handlers should have size 3
    }

    "wrap the handler chain in an async handler if async is set to true" in {
      val config = ConfigFactory.parseString(
        """
          |{
          | async = true
          | handlers = [
          |   {
          |     class = "com.comcast.money.core.handlers.ConfiguredHandler"
          |   },
          |   {
          |     class = "com.comcast.money.core.handlers.ConfiguredHandler"
          |   },
          |   {
          |     class = "com.comcast.money.core.handlers.NonConfiguredHandler"
          |   }
          | ]
          |}
        """.stripMargin)

      val result = HandlerChain(config)

      result shouldBe an[AsyncSpanHandler]
      result.asInstanceOf[AsyncSpanHandler]
        .wrapped.asInstanceOf[HandlerChain]
        .handlers should have size 3
    }
  }
} 
Example 110
Source File: AsyncSpanHandlerSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core.handlers

import com.comcast.money.api.{ SpanHandler, SpanInfo }
import com.comcast.money.core.SpecHelpers
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, WordSpec }

class AsyncSpanHandlerSpec extends WordSpec with Matchers with MockitoSugar with TestData with SpecHelpers {

  class Wrapped extends SpanHandler {
    var called = false
    override def handle(span: SpanInfo): Unit = called = true
  }

  "AsyncSpanHandler" should {
    "asynchronously invoke the span handler" in {
      val spanHandler = new Wrapped()
      val underTest = new AsyncSpanHandler(scala.concurrent.ExecutionContext.global, spanHandler)

      underTest.handle(testSpanInfo)

      awaitCond(spanHandler.called)
    }
  }
} 
Example 111
Source File: CoreSpanInfoSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.core

import com.comcast.money.api.SpanId
import org.scalatest.{ Matchers, WordSpec }

class CoreSpanInfoSpec extends WordSpec with Matchers {

  "CoreSpanInfo" should {
    "have acceptable default values" in {
      val spanId = new SpanId()
      val underTest = CoreSpanInfo(spanId, "test")

      underTest.id shouldBe spanId
      underTest.name shouldBe "test"
      underTest.appName shouldBe Money.Environment.applicationName
      underTest.host shouldBe Money.Environment.hostName
      underTest.notes shouldBe empty
      underTest.success shouldBe true
      underTest.durationMicros shouldBe 0L
      underTest.startTimeMicros shouldBe 0L
      underTest.startTimeMillis shouldBe 0L
      underTest.endTimeMicros shouldBe 0L
      underTest.endTimeMillis shouldBe 0L
    }
  }
} 
Example 112
Source File: SpanIdSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.api

import org.scalatest.{ Matchers, WordSpec }

class SpanIdSpec extends WordSpec with Matchers {

  "SpanId" should {
    "take 3 constructor arguments" in {
      val spanId = new SpanId("foo", 1L, 2L)

      spanId.traceId shouldBe "foo"
      spanId.parentId shouldBe 1L
      spanId.selfId shouldBe 2L
    }

    "set self id to a random long if not specified in the constructor" in {
      val spanId = new SpanId("foo", 1L)

      spanId.traceId shouldBe "foo"
      spanId.parentId shouldBe 1L
      Long.box(spanId.selfId) should not be null
    }

    "set the self and parent id to a random long if not specified" in {
      val spanId = new SpanId("foo")

      spanId.traceId shouldBe "foo"
      Long.box(spanId.parentId) should not be null
      Long.box(spanId.selfId) should not be null
    }

    "set the self id to the parent id when neither is specified" in {
      val spanId: SpanId = new SpanId()
      assert(spanId.parentId === spanId.selfId)
    }

    "generate a string matching SpanId~%s~%s~%s" in {
      val format = "SpanId~%s~%s~%s"
      val expected = format.format("foo", 1L, 2L)

      val spanId = new SpanId("foo", 1L, 2L)
      val result = spanId.toString

      result shouldEqual expected
    }

    "parse a string into a span id" in {
      val spanId = new SpanId("foo", 1L, 2L)
      val str = spanId.toString

      val parsed = SpanId.fromString(str)
      parsed.traceId shouldBe spanId.traceId
      parsed.parentId shouldBe spanId.parentId
      parsed.selfId shouldBe spanId.selfId
    }

    "default traceId to UUID if set to null" in {
      val spanId = new SpanId(null, 1L)

      spanId.traceId should not be null
    }
  }
} 
Example 113
Source File: TracedMethodAdvisorSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.spring

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ Matchers, WordSpec }
import org.springframework.aop.support.StaticMethodMatcherPointcut

class TracedMethodAdvisorSpec extends WordSpec with Matchers with MockitoSugar {

  val springTracer = mock[SpringTracer]
  val interceptor = new TracedMethodInterceptor(springTracer)
  val advisor = new TracedMethodAdvisor(interceptor)

  "Trace Advisor" should {
    "bump up code coverage" in {
      advisor.getPointcut shouldBe a[StaticMethodMatcherPointcut]
      advisor.getAdvice shouldBe interceptor
    }
  }
} 
Example 114
Source File: KafkaSpanHandlerSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.kafka

import com.comcast.money.api.Note
import com.comcast.money.{ api, core }
import com.typesafe.config.{ Config, ConfigFactory }
import kafka.message.{ CompressionCodec, GZIPCompressionCodec }
import kafka.producer.{ KeyedMessage, Producer }
import org.mockito.ArgumentCaptor
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }

import scala.collection.JavaConverters._

trait MockProducerMaker extends ProducerMaker {

  val mockProducer = mock(classOf[Producer[Array[Byte], Array[Byte]]])

  def makeProducer(conf: Config): Producer[Array[Byte], Array[Byte]] = mockProducer
}

class TestKafkaSpanHandler extends KafkaSpanHandler {

  var producerWasMade = false
  val mockProducer = mock(classOf[Producer[Array[Byte], Array[Byte]]])

  override def makeProducer(conf: Config): Producer[Array[Byte], Array[Byte]] = {
    producerWasMade = true
    mockProducer
  }
}

class KafkaSpanHandlerSpec extends WordSpec
  with Matchers
  with MockitoSugar
  with BeforeAndAfterAll {

  trait KafkaFixture {
    val testConfig = mock[Config]
    when(testConfig.getString("topic")).thenReturn("test-topic")

    val underTest = new TestKafkaSpanHandler()
    underTest.configure(testConfig)

    val testProducer = underTest.mockProducer
    val sampleData = core.CoreSpanInfo(
      id = new api.SpanId("foo", 1L),
      name = "key",
      appName = "app",
      host = "host",
      startTimeMillis = 1L,
      success = true,
      durationMicros = 35L,
      notes = Map[String, Note[_]]("what" -> api.Note.of("what", 1L), "when" -> api.Note.of("when", 2L), "bob" -> api.Note.of("bob", "craig")).asJava)
  }

  "A KafkaEmitter" should {
    "make a producer in configure" in new KafkaFixture {
      underTest.producerWasMade shouldBe true
    }
    "send a message to the producer for a span" in new KafkaFixture {
      underTest.handle(sampleData)

      val captor = ArgumentCaptor.forClass(classOf[KeyedMessage[Array[Byte], Array[Byte]]])
      verify(testProducer).send(captor.capture())
    }
  }

  "A ConfigDrivenProducerMaker" should {
    "set the properties from the config" in {
      val config = ConfigFactory.parseString(
        """
          | topic = "money"
          | compression.codec = "1"
          | producer.type = "async"
          | batch.num.messages = "1"
          | message.send.max.retries = "3"
          | request.required.acks = "0"
          | metadata.broker.list = "localhost:9092"
        """.stripMargin)
      val testHandler = new KafkaSpanHandler()
      testHandler.configure(config)

      val producerConfig = testHandler.producer.config
      producerConfig.brokerList shouldBe "localhost:9092"
      producerConfig.compressionCodec shouldBe GZIPCompressionCodec
      producerConfig.producerType shouldBe "async"
      producerConfig.batchNumMessages shouldBe 1
      producerConfig.messageSendMaxRetries shouldBe 3
      producerConfig.requestRequiredAcks shouldBe 0
    }
  }
} 
Example 115
Source File: AvroConversionSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.wire

import com.comcast.money.api.{ Note, SpanId, SpanInfo }
import com.comcast.money.core.CoreSpanInfo
import org.scalatest.{ Inspectors, Matchers, WordSpec }

class AvroConversionSpec extends WordSpec with Matchers with Inspectors {

  import AvroConversions._

  import scala.collection.JavaConverters._

  "Avro Conversion" should {
    "roundtrip" in {
      val orig = CoreSpanInfo(
        id = new SpanId("foo", 1L),
        name = "key",
        appName = "app",
        host = "host",
        startTimeMillis = 1L,
        success = true,
        durationMicros = 35L,
        notes = Map[String, Note[_]](
          "what" -> Note.of("what", 1L),
          "when" -> Note.of("when", 2L),
          "bob" -> Note.of("bob", "craig"),
          "none" -> Note.of("none", null),
          "bool" -> Note.of("bool", true),
          "dbl" -> Note.of("dbl", 1.0)).asJava).asInstanceOf[SpanInfo]

      val bytes = orig.convertTo[Array[Byte]]
      val roundtrip = bytes.convertTo[SpanInfo]

      roundtrip.appName shouldEqual orig.appName
      roundtrip.name shouldEqual orig.name
      roundtrip.durationMicros shouldEqual orig.durationMicros
      roundtrip.host shouldEqual orig.host
      roundtrip.id shouldEqual orig.id
      roundtrip.success shouldEqual orig.success
      roundtrip.startTimeMillis shouldEqual orig.startTimeMillis
      roundtrip.notes shouldEqual orig.notes
    }
  }
} 
Example 116
Source File: JsonConversionSpec.scala    From money   with Apache License 2.0 5 votes vote down vote up
package com.comcast.money.wire

import com.comcast.money.api.{ Note, SpanId, SpanInfo }
import com.comcast.money.core.CoreSpanInfo
import org.scalatest.{ Inspectors, Matchers, WordSpec }

class JsonConversionSpec extends WordSpec with Matchers with Inspectors {

  import JsonConversions._

  import scala.collection.JavaConverters._

  val orig = CoreSpanInfo(
    id = new SpanId("foo", 1L),
    name = "key",
    appName = "app",
    host = "host",
    startTimeMillis = 1L,
    success = true,
    durationMicros = 35L,
    notes = Map[String, Note[_]](
      "what" -> Note.of("what", 1L),
      "when" -> Note.of("when", 2L),
      "bob" -> Note.of("bob", "craig"),
      "none" -> Note.of("none", null),
      "bool" -> Note.of("bool", true),
      "dbl" -> Note.of("dbl", 1.0)).asJava).asInstanceOf[SpanInfo]

  "Json Conversion" should {
    "roundtrip" in {

      val json = orig.convertTo[String]
      val converted = json.convertTo[SpanInfo]

      converted.appName shouldEqual orig.appName
      converted.name shouldEqual orig.name
      converted.durationMicros shouldEqual orig.durationMicros
      converted.host shouldEqual orig.host
      converted.id shouldEqual orig.id
      converted.success shouldEqual orig.success
      converted.startTimeMillis shouldEqual orig.startTimeMillis
      converted.notes shouldEqual orig.notes
    }
  }
} 
Example 117
Source File: ExampleHiveActivitySpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.examples

import org.scalatest.WordSpec
import org.json4s.JsonDSL._
import org.json4s._

class ExampleHiveActivitySpec extends WordSpec {
  "ExampleHiveActivitySpec" should {
    "produce correct pipeline JSON" in {
      val pipelineJson = ExampleHiveActivity.toJson
      val objectsField = pipelineJson.children.head.children.sortBy(o => (o \ "name").toString)
      assert(objectsField.size == 5)

      val defaultObj = objectsField(1)
      val defaultObjShouldBe = ("id" -> "Default") ~
        ("name" -> "Default") ~
        ("scheduleType" -> "cron") ~
        ("failureAndRerunMode" -> "CASCADE") ~
        ("pipelineLogUri" -> "s3://your-bucket/datapipeline-logs/") ~
        ("role" -> "DataPipelineDefaultRole") ~
        ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
        ("schedule" -> ("ref" -> "PipelineSchedule"))
      assert(defaultObj === defaultObjShouldBe)

      val mapReduceCluster = objectsField.head
      val mapReduceClusterId = (mapReduceCluster \ "id").values.toString
      assert(mapReduceClusterId.startsWith("EmrCluster_"))
      val mapReduceClusterShouldBe =
        ("id" -> mapReduceClusterId) ~
          ("name" -> "Cluster with release label") ~
          ("bootstrapAction" -> Seq.empty[String]) ~
          ("masterInstanceType" -> "m3.xlarge") ~
          ("coreInstanceType" -> "m3.xlarge") ~
          ("coreInstanceCount" -> "2") ~
          ("taskInstanceType" -> "#{my_InstanceType}") ~
          ("taskInstanceCount" -> "#{my_InstanceCount}") ~
          ("terminateAfter" -> "8 hours") ~
          ("keyPair" -> "your-aws-key-pair") ~
          ("type" -> "EmrCluster") ~
          ("region" -> "us-east-1") ~
          ("role" -> "DataPipelineDefaultRole") ~
          ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
          ("releaseLabel" -> "emr-4.4.0") ~
          ("initTimeout" -> "1 hours")
      assert(mapReduceCluster === mapReduceClusterShouldBe)

      val pipelineSchedule = objectsField(3)
      val pipelineScheduleShouldBe =
        ("id" -> "PipelineSchedule") ~
          ("name" -> "PipelineSchedule") ~
          ("period" -> "1 days") ~
          ("startAt" -> "FIRST_ACTIVATION_DATE_TIME") ~
          ("occurrences" -> "3") ~
          ("type" -> "Schedule")
      assert(pipelineSchedule === pipelineScheduleShouldBe)

      val dataNode = objectsField(4)
      val dataNodeId = (dataNode \ "id").values.toString
      assert(dataNodeId.startsWith("S3Folder_"))
      val dataNodeShouldBe =
        ("id" -> dataNodeId) ~
          ("name" -> dataNodeId) ~
          ("directoryPath" -> "#{my_S3Location}") ~
          ("type" -> "S3DataNode")
      assert(dataNode === dataNodeShouldBe)

      val hiveActivity = objectsField(2)
      val hiveActivityId = (hiveActivity \ "id").values.toString
      assert(hiveActivityId.startsWith("HiveActivity_"))
      val hiveActivityShouldBe =
        ("id" -> hiveActivityId) ~
          ("name" -> hiveActivityId) ~
          ("hiveScript" -> s"INSERT OVERWRITE TABLE $${output1} SELECT x.a FROM $${input1} x JOIN $${input2} y ON x.id = y.id;") ~
          ("stage" -> "true") ~
          ("input" -> Seq("ref" -> dataNodeId, "ref" -> dataNodeId)) ~
          ("output" -> Seq("ref" -> dataNodeId)) ~
          ("runsOn" -> ("ref" -> mapReduceClusterId)) ~
          ("type" -> "HiveActivity")
      assert(hiveActivity === hiveActivityShouldBe)
    }
  }
} 
Example 118
Source File: ExampleGoogleUploadActivitySpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.examples

import org.scalatest.WordSpec
import org.json4s.JsonDSL._
import org.json4s._

class ExampleGoogleUploadActivitySpec extends WordSpec {

  "ExampleGoogleUploadActivitySpec" should {
    "produce correct pipeline JSON" in {
      val pipelineJson = ExampleGoogleUploadActivity.toJson
      val objectsField = pipelineJson.children.head.children.sortBy(o => (o \ "name").toString)
      assert(objectsField.size == 6)

      val defaultObj = objectsField.head
      val defaultObjShouldBe = ("id" -> "Default") ~
        ("name" -> "Default") ~
        ("scheduleType" -> "cron") ~
        ("failureAndRerunMode" -> "CASCADE") ~
        ("pipelineLogUri" -> "s3://your-bucket/datapipeline-logs/") ~
        ("role" -> "DataPipelineDefaultRole") ~
        ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
        ("schedule" -> ("ref" -> "PipelineSchedule"))
      assert(defaultObj === defaultObjShouldBe)

      val ec2 = objectsField(1)
      val ec2Id: String = (ec2 \ "id").values.toString
      assert(ec2Id.startsWith("Ec2Resource"))
      val ec2ShouldBe =
        ("id" -> ec2Id) ~
          ("name" -> ec2Id) ~
          ("terminateAfter" -> "8 hours") ~
          ("imageId" -> "ami-f6795a8c") ~
          ("instanceType" -> "m1.small") ~
          ("region" -> "us-east-1") ~
          ("securityGroups" -> Seq("your-security-group")) ~
          ("associatePublicIpAddress" -> "false") ~
          ("keyPair" -> "your-aws-key-pair") ~
          ("type" -> "Ec2Resource") ~
          ("role" -> "DataPipelineDefaultRole") ~
          ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
          ("initTimeout" -> "1 hours")
      assert(ec2 === ec2ShouldBe)

      val pipelineSchedule = objectsField(4)
      val pipelineScheduleShouldBe =
        ("id" -> "PipelineSchedule") ~
          ("name" -> "PipelineSchedule") ~
          ("period" -> "1 days") ~
          ("startAt" -> "FIRST_ACTIVATION_DATE_TIME") ~
          ("type" -> "Schedule")
      assert(pipelineSchedule === pipelineScheduleShouldBe)

      val dataNode = objectsField(5)
      val dataNodeId = (dataNode \ "id").values.toString
      assert(dataNodeId.startsWith("S3File_"))
      val dataNodeShouldBe =
        ("id" -> dataNodeId) ~
          ("name" -> dataNodeId) ~
          ("filePath" -> "s3://the_source") ~
          ("type" -> "S3DataNode") ~
          ("s3EncryptionType" -> "SERVER_SIDE_ENCRYPTION")
      assert(dataNode === dataNodeShouldBe)

      val uploadActivity = objectsField(3)
      val uploadActivityId = (uploadActivity \ "id").values.toString
      assert(uploadActivityId.startsWith("GoogleStorageUploadActivity_"))
      val uploadActivityShouldBe =
        ("id" -> uploadActivityId) ~
          ("name" -> "Google Upload Activity") ~
          ("scriptUri" -> "s3://your-bucket/datapipeline/scripts/activities/gsutil-upload.sh") ~
          ("scriptArgument" -> Seq("s3://the config location", "gs://upload_location", "false")) ~
          ("stage" -> "true") ~
          ("input" -> Seq("ref" -> dataNodeId)) ~
          ("runsOn" -> ("ref" -> ec2Id)) ~
          ("type" -> "ShellCommandActivity")
      assert(uploadActivity === uploadActivityShouldBe)

      val recursiveUploadActivity = objectsField(2)
      val recursiveUploadActivityId = (recursiveUploadActivity \ "id").values.toString
      assert(recursiveUploadActivityId.startsWith("GoogleStorageUploadActivity_"))
      val recursiveUploadActivityShouldBe =
        ("id" -> recursiveUploadActivityId) ~
          ("name" -> "Google Upload Activity - Recursive") ~
          ("scriptUri" -> "s3://your-bucket/datapipeline/scripts/activities/gsutil-upload.sh") ~
          ("scriptArgument" -> Seq("s3://the config location", "gs://upload_location", "true")) ~
          ("stage" -> "true") ~
          ("input" -> Seq("ref" -> dataNodeId)) ~
          ("runsOn" -> ("ref" -> ec2Id)) ~
          ("dependsOn" -> List("ref" -> uploadActivityId)) ~
          ("type" -> "ShellCommandActivity")
      assert(recursiveUploadActivity === recursiveUploadActivityShouldBe)
    }
  }

} 
Example 119
Source File: ExampleS3DistCpWorkflowSpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.examples

import org.scalatest.WordSpec
import org.json4s.JsonDSL._
import org.json4s._

class ExampleS3DistCpWorkflowSpec extends WordSpec {

  "ExampleS3DistCpWorkflowSpec" should {

    "produce correct pipeline JSON" in {

      val pipelineJson = ExampleS3DistCpWorkflow.toJson
      val objectsField = pipelineJson.children.head.children.sortBy(o => (o \ "name").toString)

      // have the correct number of objects
      assert(objectsField.size === 4)

      // the first object should be Default
      val defaultObj = objectsField(1)
      val defaultObjShouldBe = ("id" -> "Default") ~
        ("name" -> "Default") ~
        ("scheduleType" -> "cron") ~
        ("failureAndRerunMode" -> "CASCADE") ~
        ("pipelineLogUri" -> "s3://your-bucket/datapipeline-logs/") ~
        ("role" -> "DataPipelineDefaultRole") ~
        ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
        ("schedule" -> ("ref" -> "PipelineSchedule"))
      assert(defaultObj === defaultObjShouldBe)

      val pipelineSchedule = objectsField(2)
      val pipelineScheduleShouldBe =
        ("id" -> "PipelineSchedule") ~
        ("name" -> "PipelineSchedule") ~
        ("period" -> "1 days") ~
        ("startAt" -> "FIRST_ACTIVATION_DATE_TIME") ~
        ("occurrences" -> "3") ~
        ("type" -> "Schedule")
      assert(pipelineSchedule === pipelineScheduleShouldBe)

      val mapReduceCluster = objectsField(0)
      val mapReduceClusterId = (mapReduceCluster \ "id").values.toString
      assert(mapReduceClusterId.startsWith("EmrCluster_"))
      val mapReduceClusterShouldBe =
        ("id" -> mapReduceClusterId) ~
        ("name" -> "Cluster with release label") ~
        ("bootstrapAction" -> Seq.empty[String]) ~
        ("masterInstanceType" -> "m3.xlarge") ~
        ("coreInstanceType" -> "m3.xlarge") ~
        ("coreInstanceCount" -> "2") ~
        ("taskInstanceType" -> "#{my_InstanceType}") ~
        ("taskInstanceCount" -> "#{my_InstanceCount}") ~
        ("terminateAfter" -> "8 hours") ~
        ("keyPair" -> "your-aws-key-pair") ~
        ("type" -> "EmrCluster") ~
        ("region" -> "us-east-1") ~
        ("role" -> "DataPipelineDefaultRole") ~
        ("resourceRole" -> "DataPipelineDefaultResourceRole") ~
        ("releaseLabel" -> "emr-4.4.0") ~
        ("initTimeout" -> "1 hours")
      assert(mapReduceCluster === mapReduceClusterShouldBe)

      val s3DistCpActivity = objectsField(3)
      val s3DistCpActivityyId = (s3DistCpActivity \ "id").values.toString
      assert(s3DistCpActivityyId.startsWith("S3DistCpActivity_"))
      val filterActivityShouldBe =
        ("id" -> s3DistCpActivityyId) ~
        ("name" -> "s3DistCpActivity") ~
        ("runsOn" -> ("ref" -> mapReduceClusterId)) ~
        ("step" -> List("command-runner.jar,s3-dist-cp,--src,s3://the-source,--dest,#{my_HdfsLocation},--outputCodec,gz")) ~
        ("type" -> "EmrActivity")
      assert(s3DistCpActivity === filterActivityShouldBe)

    }
  }
} 
Example 120
Source File: RedshiftUnloadActivitySpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.activity

import com.typesafe.config.ConfigFactory
import org.scalatest.WordSpec

import com.krux.hyperion.common.S3Uri._
import com.krux.hyperion.database.RedshiftDatabase
import com.krux.hyperion.expression.{Parameter, ParameterValues}
import com.krux.hyperion.HyperionContext
import com.krux.hyperion.resource.Ec2Resource


class RedshiftUnloadActivitySpec extends WordSpec {

  "RedshiftUnloadActivity" should {

    implicit val hc: HyperionContext = new HyperionContext(ConfigFactory.load("example"))
    implicit val pv: ParameterValues = new ParameterValues()

    val ec2 = Ec2Resource()

    val awsAccessKeyId = Parameter("AwsAccessKeyId", "someId").encrypted
    val awsAccessKeySecret = Parameter.encrypted("AwsAccessKeySecret", "someSecret")

    val mockRedshift = RedshiftDatabase("mockuser", "mockpass", "mock-redshift")
      .named("_MockRedshift")
      .withDatabaseName("mock_db")

    "Produce the correct unload script" in {
      val testingQuery = """
        |select * from t where
        |id = 'myid'
        |and {tim'e} = #{format(@actualRunTime, 'yyyy-MM-dd')}
        |and some{OtherWeird'Forma}t = #{"{ } a'dfa {" + ' { ex"aef { }'}
        |and name = 'abcdefg'
        |limit 10""".stripMargin

      val escapedUnloadScript = """
        |UNLOAD ('
        |select * from t where
        |id = \\'myid\\'
        |and {tim\\'e} = #{format(@actualRunTime, 'yyyy-MM-dd')}
        |and some{OtherWeird\\'Forma}t = #{"{ } a'dfa {" + ' { ex"aef { }'}
        |and name = \\'abcdefg\\'
        |limit 10')
        |TO 's3://not-important/'
        |WITH CREDENTIALS AS
        |'aws_access_key_id=#{*my_AwsAccessKeyId};aws_secret_access_key=#{*my_AwsAccessKeySecret}'
      """.stripMargin

      val act = RedshiftUnloadActivity(
          mockRedshift, testingQuery, s3 / "not-important/", awsAccessKeyId, awsAccessKeySecret
        )(ec2)

      assert(act.unloadScript.trim === escapedUnloadScript.trim)

    }
  }
} 
Example 121
Source File: PipelineObjectIdSpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion

import com.krux.hyperion.common.{ NameGroupObjectId, RandomizedObjectId }
import org.scalatest.WordSpec

class PipelineObjectIdSpec extends WordSpec {

  "RandomizedObjectId" should {
    "generate a stable id" in {
      val r = RandomizedObjectId("seed")
      assert(r.toString == r.copy().toString)
    }
  }

  "NameGroupObjectId" should {
    "generate a stable id with name" in {
      val n = NameGroupObjectId("name", "")
      assert(n.toString == n.copy().toString)
    }

    "generate a stable id with group" in {
      val g = NameGroupObjectId("", "g")
      assert(g.toString == g.copy().toString)
    }
  }

} 
Example 122
Source File: AdpDataPipelineObjectSpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.aws

import org.json4s.JsonDSL._
import org.scalatest.WordSpec

class AdpDataPipelineObjectSpec extends WordSpec {

  class TestObject extends AdpDataPipelineObject {
    val id: String = "TestId"
    val name: Option[String] = None
    val `type`: String = "TestObject"
  }

  class TestObjectWithName extends AdpDataPipelineObject {
    val id: String = "TestId"
    val name: Option[String] = Option("TestName")
    val `type`: String = "TestObject"
  }

  "DataPipelineObject" should {

    "produce JSON" in {
      val resultShouldBe = ("id" -> "TestId") ~ ("type" -> "TestObject")
      val testObj = new TestObject()
      assert(AdpJsonSerializer(testObj) === resultShouldBe)
    }

  }
} 
Example 123
Source File: AdpDataFormatsSpec.scala    From hyperion   with Apache License 2.0 5 votes vote down vote up
package com.krux.hyperion.aws

import org.json4s.JsonDSL._
import org.scalatest.WordSpec

class AdpDataFormatsSpec extends WordSpec {
  "TsvDataFormat" should {
    "converts to Json" in {
      val testObj = AdpTsvDataFormat(
        "tsv",
        None,
        None,
        Some("\\")
      )
      val objShouldBe = ("id" -> "tsv") ~
        ("escapeChar" -> "\\") ~
        ("type" -> "TSV")

      assert(AdpJsonSerializer(testObj) === objShouldBe)
    }
  }
} 
Example 124
Source File: RandomCoreTest.scala    From jvm-toxcore-c   with GNU General Public License v3.0 5 votes vote down vote up
package im.tox.core.random

import org.scalacheck.Gen
import org.scalatest.WordSpec
import org.scalatest.prop.PropertyChecks

@SuppressWarnings(Array("org.wartremover.warts.Equals"))
final class RandomCoreTest extends WordSpec with PropertyChecks {

  "entropy" should {
    "be 0 for the empty sequence" in {
      assert(RandomCore.entropy(Nil) == 0)
    }

    "be 0 for sequences of all the same element" in {
      forAll { (bytes: Seq[Byte], filler: Byte) =>
        assert(RandomCore.entropy(bytes.map(_ => filler)) == 0)
      }
    }

    "be near 1 for long sequences" in {
      assert(RandomCore.entropy((0 to 1000).map(_.toByte)) > 0.999)
    }

    "be 1 for sequences containing every byte equally often (maximum)" in {
      forAll(Gen.choose(1, 10)) { count =>
        val bytes = (1 to count).flatMap(_ => (0 to 255).map(_.toByte))
        assert(RandomCore.entropy(bytes) == 1)
      }
    }

    "be sorting-insensitive (symmetry)" in {
      forAll { (bytes: Seq[Byte]) =>
        assert(RandomCore.entropy(bytes) == RandomCore.entropy(bytes.sorted))
        assert(RandomCore.entropy(bytes) == RandomCore.entropy(bytes.reverse))
      }
    }
  }

} 
Example 125
Source File: KafkaProducerSpec.scala    From freestyle-kafka   with Apache License 2.0 5 votes vote down vote up
package freestyle
package kafka

import freestyle.free._
import net.manub.embeddedkafka.EmbeddedKafka
import org.scalatest.WordSpec

import scala.concurrent.duration._

class KafkaProducerSpec extends WordSpec with FSKafkaAlgebraSpec {

  "Producer can be reused after closed" in {
    withProducer[String].apply { producer =>
      for {
        _                 <- producer.close()
        isClosed          <- producer.isClosed
        _                 <- producer.metrics
        isClosedAfterUsed <- producer.isClosed
      } yield (isClosed, isClosedAfterUsed)
    } shouldBe Right((true, false))
  }

  "Producer can be reused after closed with a timeout" in {
    withProducer[String].apply { producer =>
      for {
        _                 <- producer.closeWaitingFor(5.seconds)
        isClosed          <- producer.isClosed
        _                 <- producer.metrics
        isClosedAfterUsed <- producer.isClosed
      } yield (isClosed, isClosedAfterUsed)
    } shouldBe Right((true, false))
  }

  "Producer can send a message to a topic" in {
    withProducer[String].apply { producer =>
      for {
        _       <- producer.sendToTopic("mytopic", ("key", "mymessage"))
        _       <- producer.flush()
        message <- FreeS.pure(EmbeddedKafka.consumeFirstStringMessageFrom("mytopic", true))
      } yield message
    } shouldBe Right("mymessage")
  }

  "Producer can send many messages to a topic" in {
    val records = List("key" -> "mymessage1", "key2" -> "mymessage2")
    withProducer[String].apply { producer =>
      for {
        _        <- producer.sendManyToTopic("mytopic", records)
        _        <- producer.flush()
        messages <- FreeS.pure(EmbeddedKafka.consumeNumberStringMessagesFrom("mytopic", 2, true))
      } yield messages
    } shouldBe Right(List("mymessage1", "mymessage2"))
  }

  "Producer can obtain metrics" in {
    withProducer[String].apply { _.metrics }.isRight shouldBe true
  }

} 
Example 126
Source File: CarTraitsTest.scala    From scala-tutorials   with MIT License 5 votes vote down vote up
package com.baeldung.scala.classcompositionwithtraits

import com.baeldung.scala.classcompositionwithtraits.CarTraits._
import org.scalatest.{Matchers, WordSpec}



class CarTraitsTest extends WordSpec with Matchers {

  "Class that extends Car" should {
    "inherit abstract class fields" in {
      val bmw = new BMW0("F15", 309) with Printable
      bmw.horsePower shouldBe 309
      bmw.model shouldBe "F15"
      bmw.print() shouldBe s"the model ${bmw.model} has ${bmw.horsePower} HP under the hood. "
    }
  }

  "Objects that extends Car" should {
    "inherit abstract class fields and methods" in {
      val bmwX7 = BMW.X7()
      bmwX7.horsePower shouldBe 335
      bmwX7.model shouldBe "X7"
      bmwX7.print() shouldBe s"the model ${bmwX7.model} has ${bmwX7.horsePower} HP under the hood. "
    }
  }

  "Classes that extends Car with SimpleMarshaller" should {
    "inherit abstract class fields and methods" +
      "and be marshallable" in {
      val bmw0 = new BMW0("F15", 309) with SimpleMarshaller
      bmw0.toJson shouldBe "{\"model\":F15,\n\"horsePower\":309}"
    }
  }

  "Classes that extends Car with Marshaller" should {
    "inherit abstract class fields and methods" +
      "and be marshallable" in {
      val bmw0 = new BMW0("F15", 309) with Marshaller
      bmw0.toJson shouldBe "{\"model\":F15,\n\"horsePower\":309}"
    }
  }

  // in this case the print method pf the rightmost trait calls the print method of the previously mixed trait (
  // see super.print... in the implementation)
  "Classes that extends Car with PrettyPrintable with ShortPrintable" should {
    "behave differently depending from the mixing order" in {
      val bmwPrintable1 = new BMWPrintable("F15", 309) with PrettyPrintable with ShortPrintable
      val bmwPrintable2 = new BMWPrintable("F15", 309) with ShortPrintable with PrettyPrintable
      bmwPrintable1.printInfo() shouldBe s"the model ${bmwPrintable1.model} has ${bmwPrintable1.horsePower}" +
        s" HP under the hood. You'll definitelly enjoy driving!"
      bmwPrintable2.printInfo() shouldBe s"the model ${bmwPrintable1.model} has ${bmwPrintable1.horsePower} " +
        s"HP under the hood. "
    }
  }

} 
Example 127
Source File: MoneyTest.scala    From scala-tutorials   with MIT License 5 votes vote down vote up
package com.baeldung.scala.implicitclasses

import org.scalatest.{Matchers, WordSpec}

class MoneyTest extends WordSpec with Matchers {
  val amount: Double = 30.5

  "MoneySyntax" should {
    import MoneySyntax._
    "create dollars" in {
      amount.dollars shouldBe Money(amount, Currency.USD)
    }
    "create euros" in {
      amount.euros shouldBe Money(amount, Currency.EUR)
    }
    "create pounds" in {
      amount.pounds shouldBe Money(amount, Currency.GBP)
    }
  }
} 
Example 128
Source File: TuplesUnitTest.scala    From scala-tutorials   with MIT License 5 votes vote down vote up
package com.baeldung.scala.tuples

import org.scalatest.{Matchers, WordSpec}

class TuplesUnitTest extends WordSpec with Matchers {
  val tuple = ("Joe", 34)

  "Tuples" should {
    "accessing values use _.1, _.2 syntax" in {
      tuple._1 shouldBe "Joe"
      tuple._2 shouldBe 34
    }
    "accessing values using pattern matching" in {
      val (name, age) = tuple
      name shouldBe "Joe"
      age shouldBe 34
    }
    "not contain more then 22 elements" in {
      "(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23)" shouldNot compile
    }
  }
} 
Example 129
Source File: SortingUnitTest.scala    From scala-tutorials   with MIT License 5 votes vote down vote up
package com.baeldung.scala.sorting

import org.junit.Test
import org.scalatest.{Matchers, WordSpec}

class SortingUnitTest extends WordSpec with Matchers {

  case class User(name: String, age: Int) extends Ordered[User] {
    override def compare(that: User): Int =
      java.lang.Integer.compare(age, that.age)
  }

  val users = List(
    User("Mike", 43),
    User("Mike", 16),
    User("Kelly", 21)
  )

  @Test
  def givenUsers_whenSorted_thenSortUsingOrdered(): Unit = {
    users.sorted shouldBe List(
      User("Mike", 16),
      User("Kelly", 21),
      User("Mike", 43),
    )
  }

  @Test
  def givenUsers_whenSorted_thenSortUsingOrdering(): Unit = {
    implicit val userOrdering: Ordering[User] = Ordering.by(_.age)

    users.sorted shouldBe List(
      User("Mike", 16),
      User("Kelly", 21),
      User("Mike", 43),
    )
  }

  @Test
  def givenUsers_whenSorted_thenSortUsingReverseOrdering(): Unit = {
    implicit val userOrdering: Ordering[User] =
      Ordering.by[User, Int](_.age).reverse

    users.sorted shouldBe List(
      User("Mike", 43),
      User("Kelly", 21),
      User("Mike", 16),
    )
  }

  @Test
  def givenUsers_whenSortBy_thenSortByField(): Unit = {
    users.sortBy(_.name) shouldBe List(
      User("Kelly", 21),
      User("Mike", 43),
      User("Mike", 16),
    )
  }

  @Test
  def givenUsers_whenSortWith_thenSortWithCompareFunction(): Unit = {
    users.sortWith(_.age > _.age) shouldBe List(
      User("Mike", 43),
      User("Kelly", 21),
      User("Mike", 16),
    )
  }

  @Test
  def givenUsers_whenSortBy_thenSortByMultipleFields(): Unit = {
    users.sortBy(u => (u.name, u.age)) shouldBe List(
      User("Kelly", 21),
      User("Mike", 16),
      User("Mike", 43),
    )
  }
} 
Example 130
Source File: FeatureListEncoderTest.scala    From ecosystem   with Apache License 2.0 5 votes vote down vote up
package org.tensorflow.spark.datasources.tfrecords.serde

import org.scalatest.{Matchers, WordSpec}
import org.tensorflow.spark.datasources.tfrecords.TestingUtils._

import scala.collection.JavaConverters._

class FeatureListEncoderTest extends WordSpec with Matchers {

  "Int64 feature list encoder" should {

    "Encode inputs to feature list of Int64" in {
      val longListOfLists = Seq(Seq(3L,5L,Int.MaxValue+6L), Seq(-1L,-6L))
      val longFeatureList = Int64FeatureListEncoder.encode(longListOfLists)

      longFeatureList.getFeatureList.asScala.map(_.getInt64List.getValueList.asScala.toSeq) should equal (longListOfLists)
    }

    "Encode empty array to empty feature list" in {
      val longFeatureList = Int64FeatureListEncoder.encode(Seq.empty[Seq[Long]])
      assert(longFeatureList.getFeatureList.size() === 0)
    }
  }

  "Float feature list encoder" should {

    "Encode inputs to feature list of Float" in {
      val floatListOfLists = Seq(Seq(-2.67F, 1.5F, 0F), Seq(-1.4F,-6F))
      val floatFeatureList = FloatFeatureListEncoder.encode(floatListOfLists)

      assert(floatFeatureList.getFeatureList.asScala.map(_.getFloatList.getValueList.asScala.map(_.toFloat).toSeq) ~== floatListOfLists)
    }

    "Encode empty array to empty feature list" in {
      val floatFeatureList = FloatFeatureListEncoder.encode(Seq.empty[Seq[Float]])
      assert(floatFeatureList.getFeatureList.size() === 0)
    }
  }

  "Bytes feature list encoder" should {

    "Encode inputs to feature list of bytes" in {
      val bytesListOfLists = Seq(Seq("alice".getBytes, "bob".getBytes), Seq("charles".getBytes))
      val bytesFeatureList = BytesFeatureListEncoder.encode(bytesListOfLists)

      assert(bytesFeatureList.getFeatureList.asScala.map(_.getBytesList.getValueList.asScala.toSeq.map(_.toByteArray.deep)) === bytesListOfLists.map(_.map(_.deep)))
    }

    "Encode empty array to empty feature list" in {
      val bytesFeatureList = BytesFeatureListEncoder.encode(Seq.empty[Seq[Array[Byte]]])
      assert(bytesFeatureList.getFeatureList.size() === 0)
    }
  }
} 
Example 131
Source File: FeatureEncoderTest.scala    From ecosystem   with Apache License 2.0 5 votes vote down vote up
package org.tensorflow.spark.datasources.tfrecords.serde

import org.scalatest.{Matchers, WordSpec}
import org.tensorflow.spark.datasources.tfrecords.TestingUtils._
import scala.collection.JavaConverters._

class FeatureEncoderTest extends WordSpec with Matchers {

  "Int64List feature encoder" should {
    "Encode inputs to Int64List" in {
      val longFeature = Int64ListFeatureEncoder.encode(Seq(10L))
      val longListFeature = Int64ListFeatureEncoder.encode(Seq(3L,5L,6L))

      assert(longFeature.getInt64List.getValueList.asScala.toSeq === Seq(10L))
      assert(longListFeature.getInt64List.getValueList.asScala.toSeq === Seq(3L, 5L, 6L))
    }

    "Encode empty list to empty feature" in {
      val longListFeature = Int64ListFeatureEncoder.encode(Seq.empty[Long])
      assert(longListFeature.getInt64List.getValueList.size() === 0)
    }
  }

  "FloatList feature encoder" should {
    "Encode inputs to FloatList" in {
      val floatFeature = FloatListFeatureEncoder.encode(Seq(2.5F))
      val floatListFeature = FloatListFeatureEncoder.encode(Seq(1.5F,6.8F,-3.2F))

      assert(floatFeature.getFloatList.getValueList.asScala.toSeq.map(_.toFloat) ~== Seq(2.5F))
      assert(floatListFeature.getFloatList.getValueList.asScala.toSeq.map(_.toFloat) ~== Seq(1.5F,6.8F,-3.2F))
    }

    "Encode empty list to empty feature" in {
      val floatListFeature = FloatListFeatureEncoder.encode(Seq.empty[Float])
      assert(floatListFeature.getFloatList.getValueList.size() === 0)
    }
  }

  "ByteList feature encoder" should {
    "Encode inputs to ByteList" in {
      val binFeature = BytesListFeatureEncoder.encode(Seq(Array(0xff.toByte, 0xd8.toByte)))
      val binListFeature = BytesListFeatureEncoder.encode(Seq(Array(0xff.toByte, 0xd8.toByte), Array(0xff.toByte, 0xd9.toByte)))

      assert(binFeature.getBytesList.getValueList.asScala.toSeq.map(_.toByteArray.deep) === Seq(Array(0xff.toByte, 0xd8.toByte).deep))
      assert(binListFeature.getBytesList.getValueList.asScala.map(_.toByteArray.deep) === Seq(Array(0xff.toByte, 0xd8.toByte).deep, Array(0xff.toByte, 0xd9.toByte).deep))
    }

    "Encode empty list to empty feature" in {
      val binListFeature = BytesListFeatureEncoder.encode(Seq.empty[Array[Byte]])
      assert(binListFeature.getBytesList.getValueList.size() === 0)
    }
  }
} 
Example 132
Source File: KafkaJsonConsumerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import kafka.consumer._
import kafka.message.MessageAndMetadata
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

class KafkaJsonConsumerSpec extends WordSpec with Matchers with MockitoSugar {
	"KafkaJsonConsumer" should {
		"provide a stream" in {
			val consumer = KafkaJsonConsumer()
			intercept[IllegalArgumentException] {
        		consumer.stream("abc", new Properties())
      		}
		}
	}

	"KafkaJsonStream" should {
		val fakeConnection = mock[ConsumerConnector]
		doNothing.when(fakeConnection).commitOffsets

		val fakeMessage = mock[MessageAndMetadata[Array[Byte], JValue]]
		when(fakeMessage.key()).thenReturn("TestKey".getBytes)
		when(fakeMessage.message()).thenReturn(parse( """{ "json": "test" }"""))

		val fakeIterator = mock[ConsumerIterator[Array[Byte], JValue]]
		when(fakeIterator.hasNext()).thenReturn(true).thenReturn(false)
		when(fakeIterator.next()).thenReturn(fakeMessage)

		val fakeStream = mock[KafkaStream[Array[Byte], JValue]]
		when(fakeStream.iterator()).thenReturn(fakeIterator)

		"provide a next value" in {
			val kjs = new KafkaJsonStream(fakeConnection, fakeStream)
			kjs.hasNextInTime shouldBe true
			kjs.next shouldBe parse( """{ "json": "test" }""")
		}
	}

	"JsonDecoder" should {
		"convert bytes to Json object" in {
			val jsonString = """{ "hello": "json" }"""
			val bytes = jsonString.getBytes
			val jsonValue = parse(jsonString)
			JsonDecoder.fromBytes(bytes) shouldBe jsonValue
		}

		"return JNothing for invalid JSon" in {
			val jsonString = """hello"""
			val bytes = jsonString.getBytes
			JsonDecoder.fromBytes(bytes) shouldBe JNothing
		}
	}
} 
Example 133
Source File: KafkaJsonProducerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.scalatest.{Matchers, WordSpec}
import org.json4s.jackson.JsonMethods._
import kafka.producer.{ProducerConfig, KeyedMessage, Producer}
import org.mockito.{Mockito, ArgumentCaptor}
import org.mockito.Mockito._
import scala.collection.mutable

class KafkaJsonProducerSpec extends WordSpec with Matchers {
	"A KafkaJsonProducer" should {
		"create a KafkaJsonProducer with the JsonEncoder" in {
			val producer = KafkaJsonProducer()
			assert(producer.getClass == classOf[KafkaJsonProducer[JsonEncoder]])
		}

		"create a KafkaJsonProducer with the specified Encoder" in {
			val producer = KafkaJsonProducer(classOf[MyEncoder])
			assert(producer.getClass == classOf[KafkaJsonProducer[MyEncoder]])
		}

		"create a sender" in {
			val producer = new MyKafkaJsonProducer
			producer.createSender("topic", new Properties)
			val serializer = producer.receivedProperties.get("serializer.class")
			assert(serializer == classOf[MyEncoder].getName)
		}
	}

	"A KafkaSender" should {
		"send the JSON provided without a key to Kafka" in {
			val messageJson = """{"key1": "value1", "key2": "value2"}"""

			val keyedMessage = sendMessage(None, messageJson)

			assert(keyedMessage.topic == "test")
			assert(keyedMessage.hasKey == false)
			assert(keyedMessage.message == parse(messageJson))
		}

		"send the JSON provided with a key to Kafka" in {
			val messageJson = """{"key3": "value3", "key4": "value4"}"""

			val keyedMessage = sendMessage(Some("key"), messageJson)

			assert(keyedMessage.key == "key")
			assert(keyedMessage.topic == "test")
			assert(keyedMessage.message == parse(messageJson))
		}
	}

	"A JsonEncoder" should {
		"encode the provided json" in {
			val json = """{"key1": "value1"}"""
			val encoder = new JsonEncoder(new VerifiableProperties)
			val result = encoder.toBytes(parse(json))
			assert(parse(new String(result, "UTF-8")) == parse(json))
		}
	}

	private def sendMessage(key: Option[String], messageJson: String): KeyedMessage[String, JValue] = {
		val producer = Mockito.mock(classOf[Producer[String, JValue]])
		val sender = new KafkaSender("test", producer)
		sender.send(key, parse(messageJson).asInstanceOf[JObject])

		val argumentCaptor = ArgumentCaptor.forClass(classOf[KeyedMessage[String, JValue]])
		verify(producer).send(argumentCaptor.capture())

		val keyedMessages = argumentCaptor.getAllValues
		assert(keyedMessages.size == 1)

		// The following construction is necessary because capturing of parameters
		// with Mockito, Scala type interference, and multiple arguments
		// don't work together without explicit casts.
		keyedMessages.get(0).asInstanceOf[mutable.WrappedArray.ofRef[KeyedMessage[String, JValue]]](0)
	}
}

class MyEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		Array()
	}
}

class MyKafkaJsonProducer extends KafkaJsonProducer(classOf[MyEncoder]) {
	var receivedProperties: Properties = _

	override def createProducer(props: Properties): Producer[String, JValue] = {
		receivedProperties = props
		Mockito.mock(classOf[Producer[String, JValue]])
	}
} 
Example 134
Source File: WebSocketRoutesSpec.scala    From sync3k-server   with Apache License 2.0 5 votes vote down vote up
package sync3k.routes

import akka.http.scaladsl.testkit.{ ScalatestRouteTest, WSProbe }
import net.manub.embeddedkafka.EmbeddedKafka
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }

class WebSocketRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest with WebSocketRoutes with EmbeddedKafka with BeforeAndAfterAll {
  override implicit var kafkaServer: String = "localhost:6001"

  val baseRoots = Table(
    "base url",
    "/ws",
    "/kafka/test-1",
    "/kafka/test-2"
  )

  override def beforeAll(): Unit = {
    super.beforeAll()
    EmbeddedKafka.start()
  }

  override def afterAll(): Unit = {
    EmbeddedKafka.stop()
    super.afterAll()
  }

  forAll(baseRoots) { baseRoot =>
    baseRoot should {
      "echo updates" in {
        val wsClient = WSProbe()

        WS(s"$baseRoot/0", wsClient.flow) ~> webSocketRoutes ~> check {
          isWebSocketUpgrade shouldBe true

          wsClient.sendMessage("test1")
          wsClient.expectMessage("""{"id":0,"message":"test1"}""")

          wsClient.sendMessage("test2")
          wsClient.expectMessage("""{"id":1,"message":"test2"}""")

          wsClient.sendCompletion()
          wsClient.expectCompletion()
        }
      }

      "replay updates" in {
        val wsClient2 = WSProbe()

        WS(s"$baseRoot/0", wsClient2.flow) ~> webSocketRoutes ~> check {
          isWebSocketUpgrade shouldBe true

          wsClient2.expectMessage("""{"id":0,"message":"test1"}""")
          wsClient2.expectMessage("""{"id":1,"message":"test2"}""")
          wsClient2.sendCompletion()
          wsClient2.expectCompletion()
        }
      }

      "skip to offset" in {
        val wsClient3 = WSProbe()

        WS(s"$baseRoot/1", wsClient3.flow) ~> webSocketRoutes ~> check {
          isWebSocketUpgrade shouldBe true

          wsClient3.expectMessage("""{"id":1,"message":"test2"}""")
          wsClient3.sendCompletion()
          wsClient3.expectCompletion()
        }
      }
    }
  }
} 
Example 135
Source File: UtilTests.scala    From sparkplug   with MIT License 5 votes vote down vote up
package springnz.sparkplug
import springnz.sparkplug.util.SerializeUtils._

import org.scalatest.{ ShouldMatchers, WordSpec }

case class TestObject(a: String, b: Int, c: Vector[String], d: List[Int])

class UtilTests extends WordSpec with ShouldMatchers {

  "serialise utils" should {
    "serialise and deserialise a local object" in {
      val testObject = TestObject("hello", 42, Vector("test", "array"), List(42, 108))
      val byteArray = serialize(testObject)
      val inflatedObject = deserialize[TestObject](byteArray, this.getClass.getClassLoader)

      inflatedObject should equal(testObject)
    }

    "serialise and deserialise a local object with its class loader" in {
      val testObject = TestObject("hello", 42, Vector("test", "array"), List(42, 108))
      val byteArray = serialize(testObject)
      val inflatedObject = deserialize[TestObject](byteArray, TestObject.getClass.getClassLoader)

      inflatedObject should equal(testObject)
    }

    "serialise and deserialise a local object with default class loader" in {
      val testObject = TestObject("hello", 42, Vector("test", "array"), List(42, 108))
      val byteArray = serialize(testObject)
      val inflatedObject = deserialize[TestObject](byteArray)

      inflatedObject should equal(testObject)
    }
  }
} 
Example 136
Source File: EtcdCoordinationSpec.scala    From constructr   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.constructr.coordination.etcd

import akka.Done
import akka.actor.{ ActorSystem, AddressFromURIString }
import akka.testkit.{ TestDuration, TestProbe }
import com.typesafe.config.ConfigFactory
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpec }
import scala.concurrent.duration.{ Duration, DurationInt, FiniteDuration }
import scala.concurrent.{ Await, Awaitable }
import scala.util.Random

object EtcdCoordinationSpec {

  private val coordinationHost = {
    val dockerHostPattern = """tcp://(\S+):\d{1,5}""".r
    sys.env
      .get("DOCKER_HOST")
      .collect { case dockerHostPattern(address) => address }
      .getOrElse("127.0.0.1")
  }
}

class EtcdCoordinationSpec extends WordSpec with Matchers with BeforeAndAfterAll {
  import EtcdCoordinationSpec._

  private implicit val system = {
    val config =
      ConfigFactory
        .parseString(s"constructr.coordination.host = $coordinationHost")
        .withFallback(ConfigFactory.load())
    ActorSystem("default", config)
  }

  private val address  = AddressFromURIString("akka.tcp://default@a:2552")
  private val address2 = AddressFromURIString("akka.tcp://default@b:2552")

  "EtcdCoordination" should {
    "correctly interact with etcd" in {
      val coordination = new EtcdCoordination(randomString(), system)

      resultOf(coordination.getNodes()) shouldBe 'empty

      resultOf(coordination.lock(address, 10.seconds.dilated)) shouldBe true
      resultOf(coordination.lock(address, 10.seconds.dilated)) shouldBe true
      resultOf(coordination.lock(address2, 10.seconds.dilated)) shouldBe false

      resultOf(coordination.addSelf(address, 10.seconds.dilated)) shouldBe Done
      resultOf(coordination.getNodes()) shouldBe Set(address)

      resultOf(coordination.refresh(address, 1.second.dilated)) shouldBe Done
      resultOf(coordination.getNodes()) shouldBe Set(address)

      val probe = TestProbe()
      probe.within(5.seconds.dilated) { // 2 seconds should be enough, but who knows hows ...
        probe.awaitAssert {
          resultOf(coordination.getNodes()) shouldBe 'empty
        }
      }
    }
  }

  override protected def afterAll() = {
    Await.ready(system.terminate(), Duration.Inf)
    super.afterAll()
  }

  private def resultOf[A](awaitable: Awaitable[A], max: FiniteDuration = 3.seconds.dilated) =
    Await.result(awaitable, max)

  private def randomString() = math.abs(Random.nextInt).toString
} 
Example 137
Source File: KafkaConsumerSpec.scala    From freestyle-kafka   with Apache License 2.0 5 votes vote down vote up
package freestyle
package kafka

import freestyle.free._
import net.manub.embeddedkafka.EmbeddedKafka
import org.scalatest.WordSpec

import scala.concurrent.duration._
import cats.implicits._
import org.apache.kafka.clients.consumer.ConsumerRecords
import scala.collection.JavaConverters._

class KafkaConsumerSpec extends WordSpec with FSKafkaAlgebraSpec {

  "Consumer can be reused after closed" in {
    withConsumer[String].apply { consumer =>
      for {
        _                 <- consumer.close()
        isClosed          <- consumer.isClosed
        _                 <- consumer.metrics
        isClosedAfterUsed <- consumer.isClosed
      } yield (isClosed, isClosedAfterUsed)
    } shouldBe Right((true, false))
  }

  "Consumer can be reused after closed with a timeout" in {
    withConsumer[String].apply { consumer =>
      for {
        _                 <- consumer.closeWaitingFor(5.seconds)
        isClosed          <- consumer.isClosed
        _                 <- consumer.metrics
        isClosedAfterUsed <- consumer.isClosed
      } yield (isClosed, isClosedAfterUsed)
    } shouldBe Right((true, false))
  }

  "Consumer can subscribe to topics" in {
    val topics = "topicsubscription" :: Nil
    createCustomTopic(topics.head)
    withConsumer[String].apply { consumer =>
      for {
        _      <- consumer.subscribe(topics)
        topics <- consumer.subscription
      } yield topics
    } shouldBe Right(topics)
  }

  "Consumer can read a message from a topic" in {
    val topic   = "mytopic"
    val key     = "key"
    val message = "mymessage"
    withProducerAndConsumer[String].apply { (producer, consumer) =>
      for {
        _       <- producer.sendToTopic(topic, (key, message))
        _       <- producer.flush()
        _       <- consumer.subscribe(topic :: Nil)
        _       <- consumer.commitSync()
        records <- consumer.poll(10.seconds)
        message = records.records(topic).asScala.toList.headOption.map(_.value)
      } yield message
    } shouldBe Right(Some("mymessage"))
  }

  "Consumer can obtain metrics" in {
    withProducer[String].apply { _.metrics }.isRight shouldBe true
  }

} 
Example 138
Source File: VersionNumberCalculatorSpec.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import org.scalatest.{Matchers, TryValues, WordSpec}

class VersionNumberCalculatorSpec extends WordSpec with Matchers with TryValues {

  import VersionNumberCalculator._

  "VersionNumberCalculator" should {
    "calculate 0.8.2 given a release candidate of 0.8.1-4-ge733d26 and release type of HOTFIX" in {
      calculateTarget(ReleaseCandidateVersion("0.8.1-4-ge733d26"), ReleaseType.HOTFIX).success.value.value shouldBe "0.8.2"
    }

    "calculate 0.9.0 given a release candidate of 0.8.1-4-ge733d26 and release type of MINOR" in {
      calculateTarget(ReleaseCandidateVersion("0.8.1-4-ge733d26"), ReleaseType.MINOR).success.value.value shouldBe "0.9.0"
    }

    "calculate 1.0.0 given a release candidate of 0.8.1-4-ge733d26 and release type of MAJOR" in {
      calculateTarget(ReleaseCandidateVersion("0.8.1-4-ge733d26"), ReleaseType.MAJOR).success.value.value shouldBe "1.0.0"
    }

    "calculate 11.12.20 given a release candidate of 11.12.19-4-ge733d26 and release type of MAJOR" in {
      calculateTarget(ReleaseCandidateVersion("11.12.19-4-ge733d26"), ReleaseType.HOTFIX).success.value.value shouldBe "11.12.20"
    }

    "return a failure given an invalid release number of 0.0.1-SNAPSHOT and release type of PATCH" in {
      calculateTarget(ReleaseCandidateVersion("0.0.1-SNAPSHOT"), ReleaseType.HOTFIX).failure.exception.getMessage should include("SNAPSHOT")
    }
  }
} 
Example 139
Source File: ArtefactsSpecs.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import java.nio.file.Files

import org.scalatest.{Matchers, OptionValues, WordSpec}
import uk.gov.hmrc.releaser.github.Repo

class ArtefactsSpecs extends WordSpec with Matchers with OptionValues{

  "MavenArtefacts.transformersForSupportedFiles" should {
    "generate the correct list of transformers" in {

      val ver = mavenVersionMapping("time", "time")
      val artefacts = new MavenArtefacts(ver, Files.createTempDirectory("test"))
      val files = List(
        "time/uk/gov/hmrc/time_2.11/1.3.0-1-g21312cc/time_2.11-1.3.0-1-g21312cc.jar",
        "time/time_2.11-1.3.0-1-g21312cc.zip",
        "time/time_2.11-1.3.0-1-g21312cc.tgz",
        "time/time_2.11-1.3.0-1-g21312cc-assembly.jar",
        "time/time_2.11-1.3.0-1-g21312cc-sources.jar",
        "time/uk/gov/hmrc/time_2.11/1.3.0-1-g21312cc/time_2.11-1.3.0-1-g21312cc.pom",
        "time/time_2.11-1.3.0-1-g21312cc.pom.md5",
        "time/time_2.11-other-1.3.0-1-g21312cc.tgz",
        "time/time_2.11-other-1.3.0-1-g21312cc.jar"
      )

      val result: Map[String, Option[Transformer]] = artefacts.transformersForSupportedFiles(filePaths = files).toMap

      result foreach println

      result.size shouldBe 8
      result("time/uk/gov/hmrc/time_2.11/1.3.0-1-g21312cc/time_2.11-1.3.0-1-g21312cc.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("time/time_2.11-1.3.0-1-g21312cc.zip").get.isInstanceOf[CopyAndRenameTransformer] shouldBe true
      result("time/time_2.11-1.3.0-1-g21312cc.tgz").get.isInstanceOf[TgzTransformer] shouldBe true
      result("time/time_2.11-1.3.0-1-g21312cc-assembly.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("time/time_2.11-1.3.0-1-g21312cc-sources.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("time/uk/gov/hmrc/time_2.11/1.3.0-1-g21312cc/time_2.11-1.3.0-1-g21312cc.pom").get.isInstanceOf[PomTransformer] shouldBe true
      result("time/time_2.11-other-1.3.0-1-g21312cc.tgz").get.isInstanceOf[TgzTransformer] shouldBe true
      result("time/time_2.11-other-1.3.0-1-g21312cc.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
    }
  }

  "IvyArtefacts.transformersForSupportedFiles" should {
    "generate the correct list of transformers" in {

      val ver = mavenVersionMapping("sbt-bobby", "sbt-bobby")
      val artefacts = new IvyArtefacts(ver, Files.createTempDirectory("test"))
      val files = List(
        "sbt-bobby.jar",
        "sbt-bobby.zip",
        "sbt-bobby.tgz",
        "sbt-bobby-assembly.jar",
        "sbt-bobby-other.jar",
        "sbt-bobby-sources.jar",
        "ivy.xml",
        "ivy.xml.md5"
      )

      val result: Map[String, Option[Transformer]] = artefacts.transformersForSupportedFiles(filePaths = files).toMap

      result.size shouldBe 7
      result("sbt-bobby.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("sbt-bobby.zip").get.isInstanceOf[CopyAndRenameTransformer] shouldBe true
      result("sbt-bobby.tgz").get.isInstanceOf[CopyAndRenameTransformer] shouldBe true
      result("sbt-bobby-assembly.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("sbt-bobby-other.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("sbt-bobby-sources.jar").get.isInstanceOf[JarManifestTransformer] shouldBe true
      result("ivy.xml").get.isInstanceOf[IvyTransformer] shouldBe true
      result.contains("ivy.xml.md5") shouldBe false
    }
  }

  private def mavenVersionMapping(artefactName:String = "a",
                                  repoName:String = "a",
                                  rcVersion:String = "1.3.0-1-g21312cc",
                                  releaseVersion:String = "1.4.0") =
    VersionMapping(
      RepoFlavours.mavenRepository,
      artefactName,
      Repo(repoName),
      ReleaseCandidateVersion(rcVersion),
      ReleaseVersion(releaseVersion))
} 
Example 140
Source File: BintrayMavenPathsSpecs.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.bintray

import org.scalatest.{Matchers, WordSpec}

class BintrayMavenPathsSpecs extends WordSpec with Matchers{

  "BintrayMavenPathsSpecs" should {

    val mavenPaths = new BintrayMavenPaths() {
//      override def scalaVersion: String = "2.10"
    }

    "Generate URL for a jar and pom file on Bintray" in {
//      val expectedJarUrl = "https://bintray.com/artifact/download/hmrc/release-candidates/uk/gov/hmrc/time_2.10/1.3.0-1-g21312cc/time_2.10-1.3.0-1-g21312cc.jar"
      val expectedPomUrl = "https://bintray.com/artifact/download/hmrc/release-candidates/uk/gov/hmrc/time_2.10/1.3.0-1-g21312cc/time_2.10-1.3.0-1-g21312cc.pom"

      val repoName = "release-candidates"
      val artefactName = "time"
      val githubRepoName = "time"
      val releaseCandidateVersion = "1.3.0-1-g21312cc"

      val version = VersionDescriptor(repoName, artefactName, githubRepoName, releaseCandidateVersion)

//      mavenPaths.jarFilenameFor(version) shouldBe "time_2.10-1.3.0-1-g21312cc.jar"
      mavenPaths.fileDownloadFor(version, "uk/gov/hmrc/time_2.10/1.3.0-1-g21312cc/time_2.10-1.3.0-1-g21312cc.pom") shouldBe expectedPomUrl
    }
  }
} 
Example 141
Source File: BintrayIvyPathsSpecs.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.bintray

import org.scalatest.{Matchers, WordSpec}

class BintrayIvyPathsSpecs extends WordSpec with Matchers{

  "BintrayIvyPathsSpecs" should {

    val ivyPaths = new BintrayIvyPaths() {}

    val repoName = "sbt-plugin-release-candidates"
    val artefactName = "sbt-bobby"
    val githubRepoName = "sbt-bobby"

    "Generate URL for files on Bintray" in {
      val expectedAssemblyJarUrl = "https://bintray.com/artifact/download/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/jars/sbt-bobby-assembly.jar"
      val expectedJarUrl = "https://bintray.com/artifact/download/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/jars/sbt-bobby.jar"
      val expectedPomUrl = "https://bintray.com/artifact/download/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/ivys/ivy.xml"

      val releaseCandidateVersion = "0.8.1-4-ge733d26"

      val version = VersionDescriptor(repoName, artefactName, githubRepoName, releaseCandidateVersion)

//      ivyPaths.jarFilenameFor(version) shouldBe "sbt-bobby.jar"
      ivyPaths.fileDownloadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/jars/sbt-bobby-assembly.jar") shouldBe expectedAssemblyJarUrl
      ivyPaths.fileDownloadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/ivys/ivy.xml") shouldBe expectedPomUrl
    }

    "Generate correct URL for uploading files to Bintray" in {
      val expectedJarUrl = "https://bintray.com/api/v1/content/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/jars/sbt-bobby.jar"
      val expectedIvyUrl = "https://bintray.com/api/v1/content/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/ivys/ivy.xml"
      val expectedSrcUrl = "https://bintray.com/api/v1/content/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/srcs/sbt-bobby-sources.jar"
      val expectedDocUrl = "https://bintray.com/api/v1/content/hmrc/sbt-plugin-release-candidates/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/docs/sbt-bobby-javadoc.jar"

      val version = VersionDescriptor(repoName, artefactName, githubRepoName, "0.9.0")

      ivyPaths.fileUploadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/jars/sbt-bobby.jar") shouldBe expectedJarUrl
      ivyPaths.fileUploadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/ivys/ivy.xml") shouldBe expectedIvyUrl
      ivyPaths.fileUploadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/srcs/sbt-bobby-sources.jar") shouldBe expectedSrcUrl
      ivyPaths.fileUploadFor(version, "uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.9.0/docs/sbt-bobby-javadoc.jar") shouldBe expectedDocUrl
    }
  }
} 
Example 142
Source File: ArtefactMetaDataProviderSpecs.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser

import java.nio.file.Paths

import org.scalatest.{Matchers, TryValues, WordSpec}
import uk.gov.hmrc.releaser.github.GithubConnector

import scala.util.Failure

class ArtefactMetaDataProviderSpecs extends WordSpec with Matchers with TryValues {

  "ArtefactMetaData" should {
    "build instance from file" in {
      val md = new ArtefactMetaDataProvider().fromJarFile(Paths.get(this.getClass.getResource("/sbt-bobby/uk.gov.hmrc/sbt-bobby/scala_2.10/sbt_0.13/0.8.1-4-ge733d26/jars/sbt-bobby.jar").toURI))  match {
        case Failure(e) => fail(e)
        case s => s
      }

      md.success.value.commitAuthor shouldBe "Charles Kubicek"
      md.success.value.sha  shouldBe "e733d26fa504c040f2c95ecd25a3a55399a00883"
      md.success.value.commitDate shouldBe GithubConnector.githubDateTimeFormatter.parseDateTime("2015-04-09T10:18:12.000Z")
    }
  }
} 
Example 143
Source File: GithubHttpSpecs.scala    From releaser   with Apache License 2.0 5 votes vote down vote up
package uk.gov.hmrc.releaser.github

import org.scalatest.{Matchers, OptionValues, WordSpec}
import play.api.libs.json.{JsNumber, JsObject}
import play.api.libs.ws.{EmptyBody, WSAuthScheme}
import uk.gov.hmrc.ServiceCredentials

class GithubHttpSpecs extends WordSpec with Matchers with OptionValues{

  "GithubHttpSpecs" should {
    "build request holder" in {
      val githubHttp = new GithubHttp(ServiceCredentials("Charles", "123"))

      val body = JsObject(Seq("a" -> JsNumber(1)))
      val call = githubHttp.buildCall("POST", "http://example.com", Some(body))

      call.method shouldBe "POST"
      call.body should not be EmptyBody
      call.url shouldBe "http://example.com"
      call.auth.value shouldBe (("Charles", "123", WSAuthScheme.BASIC))
    }
  }

} 
Example 144
Source File: security_api_yaml.scala    From api-first-hand   with MIT License 5 votes vote down vote up
package security.api.yaml

import de.zalando.play.controllers._
import org.scalacheck._
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
import org.scalacheck.Test._
import org.specs2.mutable._
import org.specs2.execute._
import play.api.test.Helpers._
import play.api.test._
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc._

import org.junit.runner.RunWith
import java.net.URLEncoder
import com.fasterxml.jackson.databind.ObjectMapper

import play.api.http.Writeable
import play.api.libs.Files.TemporaryFile
import play.api.test.Helpers.{status => requestStatusCode_}
import play.api.test.Helpers.{contentAsString => requestContentAsString_}
import play.api.test.Helpers.{contentType => requestContentType_}

import org.scalatest.{OptionValues, WordSpec}
import org.scalatestplus.play.{OneAppPerTest, WsScalaTestClient}

import Generators._

import de.zalando.play.controllers.ArrayWrapper

//noinspection ScalaStyle
class Security_api_yamlSpec extends WordSpec with OptionValues with WsScalaTestClient with OneAppPerTest  {
    def toPath[T](value: T)(implicit binder: PathBindable[T]): String = Option(binder.unbind("", value)).getOrElse("")
    def toQuery[T](key: String, value: T)(implicit binder: QueryStringBindable[T]): String = Option(binder.unbind(key, value)).getOrElse("")
    def toHeader[T](value: T)(implicit binder: QueryStringBindable[T]): String = Option(binder.unbind("", value)).getOrElse("")

  def checkResult(props: Prop): org.specs2.execute.Result =
    Test.check(Test.Parameters.default, props).status match {
      case Failed(args, labels) =>
        val failureMsg = labels.mkString("\n") + " given args: " + args.map(_.arg).mkString("'", "', '","'")
        org.specs2.execute.Failure(failureMsg)
      case Proved(_) | Exhausted | Passed => org.specs2.execute.Success()
      case PropException(_, e: IllegalStateException, _) => org.specs2.execute.Error(e.getMessage)
      case PropException(_, e, labels) =>
        val error = if (labels.isEmpty) e.getLocalizedMessage else labels.mkString("\n")
        org.specs2.execute.Failure(error)
    }

  private def parserConstructor(mimeType: String) = PlayBodyParsing.jacksonMapper(mimeType)

  def parseResponseContent[T](mapper: ObjectMapper, content: String, mimeType: Option[String], expectedType: Class[T]) =
    if (expectedType.getCanonicalName == "scala.runtime.Null$") null else mapper.readValue(content, expectedType)

} 
Example 145
Source File: instagram_api_yaml.scala    From api-first-hand   with MIT License 5 votes vote down vote up
package instagram.api.yaml

import de.zalando.play.controllers._
import org.scalacheck._
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
import org.scalacheck.Test._
import org.specs2.mutable._
import org.specs2.execute._
import play.api.test.Helpers._
import play.api.test._
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc._

import org.junit.runner.RunWith
import java.net.URLEncoder
import com.fasterxml.jackson.databind.ObjectMapper

import play.api.http.Writeable
import play.api.libs.Files.TemporaryFile
import play.api.test.Helpers.{status => requestStatusCode_}
import play.api.test.Helpers.{contentAsString => requestContentAsString_}
import play.api.test.Helpers.{contentType => requestContentType_}

import org.scalatest.{OptionValues, WordSpec}
import org.scalatestplus.play.{OneAppPerTest, WsScalaTestClient}

import Generators._

import scala.math.BigInt
import scala.math.BigDecimal

//noinspection ScalaStyle
class Instagram_api_yamlSpec extends WordSpec with OptionValues with WsScalaTestClient with OneAppPerTest  {
    def toPath[T](value: T)(implicit binder: PathBindable[T]): String = Option(binder.unbind("", value)).getOrElse("")
    def toQuery[T](key: String, value: T)(implicit binder: QueryStringBindable[T]): String = Option(binder.unbind(key, value)).getOrElse("")
    def toHeader[T](value: T)(implicit binder: QueryStringBindable[T]): String = Option(binder.unbind("", value)).getOrElse("")

  def checkResult(props: Prop): org.specs2.execute.Result =
    Test.check(Test.Parameters.default, props).status match {
      case Failed(args, labels) =>
        val failureMsg = labels.mkString("\n") + " given args: " + args.map(_.arg).mkString("'", "', '","'")
        org.specs2.execute.Failure(failureMsg)
      case Proved(_) | Exhausted | Passed => org.specs2.execute.Success()
      case PropException(_, e: IllegalStateException, _) => org.specs2.execute.Error(e.getMessage)
      case PropException(_, e, labels) =>
        val error = if (labels.isEmpty) e.getLocalizedMessage else labels.mkString("\n")
        org.specs2.execute.Failure(error)
    }

  private def parserConstructor(mimeType: String) = PlayBodyParsing.jacksonMapper(mimeType)

  def parseResponseContent[T](mapper: ObjectMapper, content: String, mimeType: Option[String], expectedType: Class[T]) =
    if (expectedType.getCanonicalName == "scala.runtime.Null$") null else mapper.readValue(content, expectedType)

} 
Example 146
Source File: DonutQueryParametersTest.scala    From learn-akka   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.learn.akka.http

import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.testkit.ScalatestRouteTest
import com.allaboutscala.learn.akka.http.routes.DonutRoutes
import org.scalatest.{Matchers, WordSpec}


class DonutQueryParametersTest
  extends WordSpec
    with Matchers
    with ScalatestRouteTest {

  val donutRoutes = new DonutRoutes().route()

  "Query Parameters Tests" should {
    "match the output for the URL /donut/prices?donutName" in {
      Get("/donut/prices?donutName=plain%20donut") ~> donutRoutes ~> check {
        responseAs[String] shouldEqual "Received parameter: donutName=plain donut"
        status shouldEqual StatusCodes.OK
      }
    }



    "Check for required donutName query parameter at /donut/prices" in {
      Get("/donut/prices?") ~> Route.seal(donutRoutes) ~> check {
        responseAs[String] shouldEqual "Request is missing required query parameter 'donutName'"
        status shouldEqual StatusCodes.NotFound
      }
    }



    "Validate the pass-through of required and optional parameters in /donut/bake" in {
      Get("/donut/bake?donutName=plain%20donut&topping=chocolate") ~> donutRoutes ~> check {
        responseAs[String] shouldEqual "Received parameters: donutName=plain donut and topping=chocolate"
        status shouldEqual StatusCodes.OK
      }
    }



    "Verify the optional parameter topping for /donut/bake" in {
      Get("/donut/bake?donutName=plain%20donut") ~> donutRoutes ~> check {
        responseAs[String] shouldEqual "Received parameters: donutName=plain donut and topping=sprinkles"
        status shouldEqual StatusCodes.OK
      }
    }



    "Verify typed parameters for /ingredients" in {
      Get("/ingredients?donutName=plain%20donut&priceLevel=1.50") ~> donutRoutes ~> check {
        responseAs[String] shouldEqual "Received parameters: donutName=plain donut, priceLevel=1.5"
        status shouldEqual StatusCodes.OK
      }
    }



    "Check for wrong types being passed through to the priceLevel query param at /ingredients" in {
      Get("/ingredients?donutName=plain%20donut&priceLevel=cheap") ~> Route.seal(donutRoutes) ~> check {
        responseAs[String] shouldEqual """The query parameter 'priceLevel' was malformed:
                                         |'cheap' is not a valid 64-bit floating point value""".stripMargin
        status shouldEqual StatusCodes.BadRequest
      }
    }



    "Verify CSV parameters for /bake-donuts" in {
      Get("/bake-donuts?ingredients=flour,sugar,vanilla") ~> donutRoutes ~> check {
        responseAs[String] shouldEqual "Received CSV parameter: ingredients=List(flour, sugar, vanilla)"
        status shouldEqual StatusCodes.OK
      }
    }
  }
} 
Example 147
Source File: CreateDonutTest.scala    From learn-akka   with Apache License 2.0 5 votes vote down vote up
package com.allaboutscala.learn.akka.http

import akka.http.scaladsl.model.{HttpEntity, HttpMethods, HttpRequest, MediaTypes}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.util.ByteString
import com.allaboutscala.learn.akka.http.routes.DonutRoutes
import org.scalatest.{Matchers, WordSpec}


class CreateDonutTest
  extends WordSpec
    with Matchers
    with ScalatestRouteTest {

  val donutRoutes = new DonutRoutes().route()

  "Donut API" should {
    "Create a valid Donut when posting JSON to /create-donut path" in {
      val jsonDonutInput = ByteString("""{"name":"plain donut", "price":1.50}""")
      val httpPostCreateDonut = HttpRequest(
        uri = "http://localhost:8080/create-donut",
        method = HttpMethods.POST,
        entity = HttpEntity(MediaTypes.`application/json`, jsonDonutInput))

      httpPostCreateDonut ~> donutRoutes ~> check {
        status.isSuccess() shouldEqual true
        status.intValue() shouldEqual 201
        status.reason shouldEqual "Created"
      }
    }
  }

} 
Example 148
Source File: TestReThinkSinkConstants.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.rethink.config

import org.scalatest.WordSpec


class TestReThinkSinkConstants extends WordSpec {

  // Constants
  val RETHINK_HOST = "connect.rethink.host"
  val RETHINK_DB = "connect.rethink.db"
  val RETHINK_PORT = "connect.rethink.port"
  val ROUTE_QUERY = "connect.rethink.kcql"
  val ERROR_POLICY = "connect.rethink.error.policy"
  val ERROR_RETRY_INTERVAL = "connect.rethink.retry.interval"
  val NBR_OF_RETRIES = "connect.rethink.max.retries"
  val BATCH_SIZE = "connect.rethink.batch.size"

  "RETHINK_HOST should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_HOST.equals(ReThinkConfigConstants.RETHINK_HOST))
  }

  "RETHINK_DB should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_DB.equals(ReThinkConfigConstants.RETHINK_DB))
  }

  "RETHINK_PORT should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_PORT.equals(ReThinkConfigConstants.RETHINK_PORT))
  }

  "ROUTE_QUERY should have the same key in ReThinkSinkConfigConstants" in {
    assert(ROUTE_QUERY.equals(ReThinkConfigConstants.KCQL))
  }

  "ERROR_POLICY should have the same key in ReThinkSinkConfigConstants" in {
    assert(ERROR_POLICY.equals(ReThinkConfigConstants.ERROR_POLICY))
  }

  "ERROR_RETRY_INTERVAL should have the same key in ReThinkSinkConfigConstants" in {
    assert(ERROR_RETRY_INTERVAL.equals(ReThinkConfigConstants.ERROR_RETRY_INTERVAL))
  }

  "NBR_OF_RETRIES should have the same key in ReThinkSinkConfigConstants" in {
    assert(NBR_OF_RETRIES.equals(ReThinkConfigConstants.NBR_OF_RETRIES))
  }
} 
Example 149
Source File: NodeStateWithResultsSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.models.workflows

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.commons.models.Entity
import io.deepsense.deeplang.exceptions.DeepLangException
import io.deepsense.deeplang.inference.InferenceWarnings
import io.deepsense.deeplang.{DKnowledge, DOperable}
import io.deepsense.graph.NodeInferenceResult
import io.deepsense.reportlib.model.ReportContent

class NodeStateWithResultsSpec extends WordSpec with Matchers with MockitoSugar {

  "NodeStateWithResults" should {

    "copy knowledge, keep warnings and clear errors for nonempty DOperable list" in {
      val draftNode = draftNodeState
      val (entityIds, operables, reportsMap, operablesMap) = executionResultFixture(2)
      val finished = draftNode.enqueue.start.finish(entityIds, reportsMap, operablesMap)

      finished.nodeState.isCompleted shouldBe true
      finished.knowledge shouldBe Some(NodeInferenceResult(
        operables.map(DKnowledge(_)).toVector,
        draftNode.knowledge.get.warnings,
        Vector()))
    }
    "copy knowledge, keep warnings and clear errors for empty DOperable list" in {
      val draftNode = draftNodeState
      val (entityIds, operables, reportsMap, operablesMap) = executionResultFixture(0)
      val finished = draftNode.enqueue.start.finish(entityIds, reportsMap, operablesMap)

      finished.nodeState.isCompleted shouldBe true
      finished.knowledge shouldBe Some(NodeInferenceResult(
        Vector(),
        draftNode.knowledge.get.warnings,
        Vector()))
    }
  }

  private def draftNodeState = {
    NodeStateWithResults.draft.withKnowledge(
      NodeInferenceResult(
        Vector(DKnowledge(mock[DOperable])),
        mock[InferenceWarnings],
        Vector(mock[DeepLangException])))
  }

  private def executionResultFixture(dOperableCount: Int):
      (Seq[Entity.Id], Seq[DOperable], Map[Entity.Id, ReportContent], Map[Entity.Id, DOperable]) = {
    val entityIds = (1 to dOperableCount).map(_ => Entity.Id.randomId).toList
    val operables = entityIds.map(_ => mock[DOperable])
    val reportsMap = entityIds.map(id => id -> mock[ReportContent]).toMap
    val operablesMap = entityIds.zip(operables).toMap
    (entityIds, operables, reportsMap, operablesMap)
  }
} 
Example 150
Source File: ExecutorIdExtenderPluginTest.scala    From marathon-example-plugins   with Apache License 2.0 5 votes vote down vote up
package mesosphere.marathon.example.plugin.executorid

import com.typesafe.scalalogging.StrictLogging
import org.apache.mesos.Protos.Environment.Variable
import org.apache.mesos.Protos._
import org.scalatest.{GivenWhenThen, Matchers, WordSpec}

class ExecutorIdExtenderPluginTest extends WordSpec with Matchers with GivenWhenThen with StrictLogging {

  "Given an MARATHON_EXECUTOR_ID label an executorID should be injected" in {
    val f = new Fixture

    Given("a TaskInfo with a MARATHON_EXECUTOR_ID label")
    val taskInfo = TaskInfo.newBuilder.
      setExecutor(ExecutorInfo.newBuilder.
          setCommand(CommandInfo.newBuilder.
            setEnvironment(Environment.newBuilder.addVariables(
                Variable.newBuilder.setName("foo").setValue("bar")
            )
          )).
        setExecutorId(ExecutorID.newBuilder.setValue("task.12345"))
      ).
      setLabels(Labels.newBuilder.addLabels(Label.newBuilder.
        setKey(f.plugin.ExecutorIdLabel)
          .setValue("customer-executor-id")
      ))

    When("handled by the plugin")
    f.plugin.taskInfo(null, taskInfo)

    Then("ExecutorInfo.ExecutorId should be changed")
    taskInfo.getExecutor.getExecutorId.getValue shouldBe "customer-executor-id"

    And("Environment variables should be removed")
    taskInfo.getExecutor.getCommand.getEnvironment.getVariablesCount shouldBe 0
  }

  "Given no MARATHON_EXECUTOR_ID label an executorID should be untouched" in {
    val f = new Fixture

    Given("a TaskInfo with a MARATHON_EXECUTOR_ID label")
    val taskInfo = TaskInfo.newBuilder.
      setExecutor(ExecutorInfo.newBuilder.
        setCommand(CommandInfo.newBuilder.
          setEnvironment(Environment.newBuilder.addVariables(
            Variable.newBuilder.setName("foo").setValue("bar")
          )
          )).
        setExecutorId(ExecutorID.newBuilder.setValue("task.12345"))
      ).
      setLabels(Labels.newBuilder.addLabels(Label.newBuilder.
        setKey("baz")
        .setValue("wof")
      ))

    When("handled by the plugin")
    f.plugin.taskInfo(null, taskInfo)

    Then("ExecutorInfo.ExecutorId should stay the same")
    taskInfo.getExecutor.getExecutorId.getValue shouldBe "task.12345"

    And("environment variables should be kept")
    taskInfo.getExecutor.getCommand.getEnvironment.getVariablesCount shouldBe 1
  }

  class Fixture {
    val plugin = new ExecutorIdExtenderPlugin()
  }
} 
Example 151
Source File: GraphJsonTestSupport.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.models.json.graph

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{DefaultJsonProtocol, JsObject}

import io.deepsense.deeplang.DOperation
import io.deepsense.graph.Endpoint

trait GraphJsonTestSupport
  extends WordSpec
  with MockitoSugar
  with DefaultJsonProtocol
  with Matchers {

  def assertEndpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Unit = {
    assert(edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString)
    assert(edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex)
  }

  def endpointMatchesJsObject(edgeEnd: Endpoint, edgeEndJs: JsObject): Boolean = {
    edgeEndJs.fields("nodeId").convertTo[String] == edgeEnd.nodeId.value.toString &&
    edgeEndJs.fields("portIndex").convertTo[Int] == edgeEnd.portIndex
  }

  def mockOperation(
      inArity: Int,
      outArity: Int,
      id: DOperation.Id,
      name: String): DOperation = {

    val dOperation = mock[DOperation]
    when(dOperation.inArity).thenReturn(inArity)
    when(dOperation.outArity).thenReturn(outArity)
    when(dOperation.id).thenReturn(id)
    when(dOperation.name).thenReturn(name)
    dOperation
  }
} 
Example 152
Source File: PythonCustomCodeEntryPointTest.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor.pythongateway

import java.util.concurrent.TimeoutException

import scala.concurrent.duration._

import org.apache.spark.SparkContext
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.deeplang.{CustomCodeExecutor, DataFrameStorage, OperationExecutionDispatcher}
import io.deepsense.sparkutils.SparkSQLSession
import io.deepsense.workflowexecutor.customcode.CustomCodeEntryPoint

class PythonCustomCodeEntryPointTest extends WordSpec with MockitoSugar with Matchers {

  "PythonEntryPoint" should {
    "throw on uninitialized code executor" in {
      val entryPoint = createEntryPoint
      a[TimeoutException] shouldBe thrownBy {
        entryPoint.getCodeExecutor(100.millis)
      }
    }

    "throw on uninitialized callback server port" in {
      val entryPoint = createEntryPoint
      a[TimeoutException] shouldBe thrownBy {
        entryPoint.getPythonPort(100.millis)
      }
    }

    "return initialized code executor" in {
      val entryPoint = createEntryPoint
      val mockExecutor = mock[CustomCodeExecutor]
      entryPoint.registerCodeExecutor(mockExecutor)
      entryPoint.getCodeExecutor(100.millis) shouldBe mockExecutor
    }

    "return initialized callback server port" in {
      val entryPoint = createEntryPoint
      entryPoint.registerCallbackServerPort(4412)
      entryPoint.getPythonPort(100.millis) shouldBe 4412
    }

    "return code executor initialized while waiting on it" in {
      val entryPoint = createEntryPoint
      val mockExecutor = mock[CustomCodeExecutor]

      new Thread(new Runnable {
        override def run(): Unit = {
          Thread.sleep(1000)
          entryPoint.registerCodeExecutor(mockExecutor)
        }
      }).start()

      entryPoint.getCodeExecutor(2.seconds) shouldBe mockExecutor
    }
  }

  private def createEntryPoint: CustomCodeEntryPoint =
    new CustomCodeEntryPoint(
      mock[SparkContext],
      mock[SparkSQLSession],
      mock[DataFrameStorage],
      mock[OperationExecutionDispatcher])
} 
Example 153
Source File: SparkRBackendSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor

import org.apache.spark.api.r._
import org.scalatest.concurrent.TimeLimits
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, PrivateMethodTester, WordSpec}

import io.deepsense.workflowexecutor.customcode.CustomCodeEntryPoint

class SparkRBackendSpec
  extends WordSpec
  with MockitoSugar
  with Matchers
  with TimeLimits
  with PrivateMethodTester {

  "Spark R Backend" should {
    "return 0 for Entry Point Id" in {
      val sparkRBackend = new SparkRBackend()
      val customCodeEntryPoint = mock[CustomCodeEntryPoint]
      sparkRBackend.start(customCodeEntryPoint)
      sparkRBackend.entryPointId shouldBe "0"
      sparkRBackend.close()
    }
  }
} 
Example 154
Source File: TableSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.reportlib.model

import org.scalatest.{Matchers, WordSpec}

import io.deepsense.commons.types.ColumnType

class TableSpec extends WordSpec with Matchers  {

  "Table" should {
    "throw IllegalArgumentException" when {
      "created with columnNames and columnTypes of different size" in {
        an[IllegalArgumentException] should be thrownBy
          Table(
            "Name",
            "Description",
            Some(List("col1", "col2")),
            List(ColumnType.string, ColumnType.string, ColumnType.string),
            None,
            List(
              List(Some("v1"), None, None))
          )
      }
      "created one data row of size different than columnTypes size" in {
        an[IllegalArgumentException] should be thrownBy
          Table(
            "Name",
            "Description",
            Some(List("col1", "col2", "col3")),
            List(ColumnType.string, ColumnType.string, ColumnType.string),
            None,
            List(
              List(Some("v1"), None))
          )
      }
    }
    "get created" when {
      "no column names are passed" in {
        Table(
          "Name",
          "Description",
          None,
          List(ColumnType.string, ColumnType.string, ColumnType.string),
          None,
          List(
            List(Some("v1"), None, None))
        )
        info("Table created")
      }
      "no data rows are passed" in {
        Table(
          "Name",
          "Description",
          None,
          List(ColumnType.string, ColumnType.string, ColumnType.string),
          None,
          List()
        )
        info("Table created")
      }
    }
  }
} 
Example 155
Source File: ReportContentJsonSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.reportlib.model

import io.deepsense.reportlib.model.factory.ReportContentTestFactory
import org.scalatest.{Matchers, WordSpec}
import spray.json._

class ReportContentJsonSpec
  extends WordSpec
  with Matchers
  with ReportContentTestFactory
  with ReportJsonProtocol {

  import ReportContentTestFactory._

  "ReportContent" should {

    val emptyReportJson: JsObject = JsObject(
      "name" -> JsString(reportName),
      "reportType" -> JsString(reportType.toString),
      "tables" -> JsArray(),
      "distributions" -> JsObject()
    )
    val report = testReport
    val reportJson: JsObject = JsObject(
      "name" -> JsString(reportName),
      "reportType" -> JsString(reportType.toString),
      "tables" -> JsArray(report.tables.map(_.toJson): _*),
      "distributions" -> JsObject(report.distributions.mapValues(_.toJson))
    )

    "serialize" when {
      "empty" in {
        val report = ReportContent(reportName, reportType)
        report.toJson shouldBe emptyReportJson
      }
      "filled report" in {
        val json = report.toJson
        json shouldBe reportJson
      }
    }
    "deserialize" when {
      "empty report" in {
        emptyReportJson.convertTo[ReportContent] shouldBe ReportContent(
          reportName, reportType)
      }
      "full report" in {
        reportJson.convertTo[ReportContent] shouldBe report
      }
    }
  }
} 
Example 156
Source File: TableJsonSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.reportlib.model

import org.scalatest.{Matchers, WordSpec}
import spray.json._

import io.deepsense.commons.types.ColumnType
import io.deepsense.commons.types.ColumnType.ColumnType
import io.deepsense.commons.types.ColumnType.ColumnType
import io.deepsense.reportlib.model.factory.TableTestFactory

class TableJsonSpec extends WordSpec with Matchers with TableTestFactory with ReportJsonProtocol {

  "Table" should {
    "serialize" when {
      val rowNames: List[String] = List("rowName1", "rowName2")
      val columnNames: List[String] = List("A", "B")
      val columnTypes: List[ColumnType] = List(ColumnType.string, ColumnType.numeric)
      val values: List[List[Option[String]]] = List(List(Some("11"), None), List(None, Some("34")))
      "columnsNames specified" in {
        val json = testTableWithLabels(Some(columnNames), columnTypes, None, values).toJson
        json shouldBe jsonTable(Some(columnNames), columnTypes, None, values)
      }
      "rowsNames specified" in {
        val json = testTableWithLabels(None, columnTypes, Some(rowNames), values).toJson
        json shouldBe jsonTable(None, columnTypes, Some(rowNames), values)
      }
      "rowsNames, columnNames and columTypes specified" in {
        val json = testTableWithLabels(
          Some(columnNames), columnTypes, Some(rowNames), values).toJson
        json shouldBe jsonTable(Some(columnNames), columnTypes, Some(rowNames), values)
      }
      "is empty" in {
        val json = testEmptyTable.toJson
        json shouldBe jsonTable(None, List(), None, List())
      }
    }
    "deserialize" when {
      "filled table" in {
        val columnNames: Some[List[String]] = Some(List("A", "B"))
        val rowNames: Some[List[String]] = Some(List("1", "2"))
        val columnTypes: List[ColumnType] = List(ColumnType.string, ColumnType.numeric)
        val values: List[List[Option[String]]] =
          List(List(Some("a"), Some("1")), List(Some("b"), Some("2")))
        val json = jsonTable(columnNames, columnTypes, rowNames, values)
        json.convertTo[Table] shouldBe testTableWithLabels(
          columnNames, columnTypes, rowNames, values)
      }
      "empty table" in {
        val json = jsonTable(None, List(), None, List())
        json.convertTo[Table] shouldBe testTableWithLabels(None, List(), None, List())
      }
    }
  }

  private def jsonTable(
    columnsNames: Option[List[String]],
    columnTypes: List[ColumnType],
    rowsNames: Option[List[String]],
    values: List[List[Option[String]]]): JsObject = JsObject(Map[String, JsValue](
    "name" -> JsString(TableTestFactory.tableName),
    "description" -> JsString(TableTestFactory.tableDescription),
    "columnNames" -> toJsValue(columnsNames),
    "columnTypes" -> toJsValue(Some(columnTypes.map(_.toString))),
    "rowNames" -> toJsValue(rowsNames),
    "values" ->
      JsArray(
        values.map(row => JsArray(row.map(op => op.map(JsString(_)).getOrElse(JsNull)).toVector))
          .toVector)
  ))

  def toJsValue(values: Option[List[String]]): JsValue with Product with Serializable = {
    values
      .map(values => JsArray(values.map(JsString(_)).toVector)).getOrElse(JsNull)
  }
} 
Example 157
Source File: GraphKnowledgeSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.graph

import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.deeplang.exceptions.DeepLangException

class GraphKnowledgeSpec
  extends WordSpec
  with MockitoSugar
  with Matchers {

  "GraphKnowledge" should {
    "return proper errors map" in {
      val node1Id = Node.Id.randomId
      val node2Id = Node.Id.randomId
      val inferenceResultsWithErrors = mock[NodeInferenceResult]
      val errors = Vector(mock[DeepLangException], mock[DeepLangException])
      when(inferenceResultsWithErrors.errors).thenReturn(errors)
      val inferenceResultsWithoutErrors = mock[NodeInferenceResult]
      when(inferenceResultsWithoutErrors.errors).thenReturn(Vector.empty)

      val knowledge = GraphKnowledge()
        .addInference(node1Id, inferenceResultsWithErrors)
        .addInference(node2Id, inferenceResultsWithoutErrors)

      knowledge.errors shouldBe Map(node1Id -> errors)
    }
  }
} 
Example 158
Source File: ParamsWithSparkWrappersSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params.wrappers.spark

import org.apache.spark.ml
import org.apache.spark.ml.param._
import org.apache.spark.sql.types.StructType
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.deeplang.params.BooleanParam
import io.deepsense.deeplang.params.choice.{ChoiceParam, Choice}

class ParamsWithSparkWrappersSpec extends WordSpec
  with Matchers
  with MockitoSugar {

  import ParamsWithSparkWrappersSpec._

  "ParamsWithSparkWrappers" should {
    "calculate sparkParamWrappers" in {
      val paramsWithSparkWrappers = ParamsWithSparkWrappersClass()
      paramsWithSparkWrappers.sparkParamWrappers shouldBe
        Array(paramsWithSparkWrappers.paramA, paramsWithSparkWrappers.paramB)
    }
    "return parameter values" in {
      val paramsWithSparkWrappers = ParamsWithSparkWrappersClass().setParamA("a").setParamB(0.0)
      paramsWithSparkWrappers.sparkParamMap(
        paramsWithSparkWrappers.exampleSparkParams, StructType(Seq())).toSeq.toSet shouldBe
        Set(
          paramsWithSparkWrappers.exampleSparkParams.sparkParamA -> "a",
          paramsWithSparkWrappers.exampleSparkParams.sparkParamB -> 0)
    }
    "return wrappers nested in choice parameter values" in {
      val paramsWithSparkWrappers = ParamsWithSparkWrappersClass()
        .setChoice(OneParamChoiceWithWrappers().setParamC("c"))
      paramsWithSparkWrappers.sparkParamMap(
        paramsWithSparkWrappers.exampleSparkParams, StructType(Seq())).toSeq.toSet shouldBe
        Set(
          paramsWithSparkWrappers.exampleSparkParams.sparkParamC -> "c")
    }
  }
}

object ParamsWithSparkWrappersSpec {

  class ExampleSparkParams extends ml.param.Params {
    override val uid: String = "id"
    val sparkParamA = new Param[String]("", "paramA", "descA")
    val sparkParamB = new IntParam("", "paramB", "descB")
    val sparkParamC = new Param[String]("", "paramC", "descC")

    override def copy(extra: ParamMap): Params = ???
  }

  case class ParamsWithSparkWrappersClass() extends ParamsWithSparkWrappers {

    val exampleSparkParams = new ExampleSparkParams

    val paramA = new StringParamWrapper[ExampleSparkParams]("paramA", Some("descA"), _.sparkParamA)
    val paramB = new IntParamWrapper[ExampleSparkParams]("paramB", Some("descB"), _.sparkParamB)
    val choiceWithParamsInValues = new ChoiceParam[ChoiceWithWrappers]("choice", Some("descChoice"))
    val notWrappedParam = BooleanParam("booleanParamName", Some("booleanParamDescription"))

    val params: Array[io.deepsense.deeplang.params.Param[_]] =
      Array(paramA, paramB, choiceWithParamsInValues, notWrappedParam)

    def setParamA(v: String): this.type = set(paramA, v)
    def setParamB(v: Double): this.type = set(paramB, v)
    def setChoice(v: ChoiceWithWrappers): this.type = set(choiceWithParamsInValues, v)
  }

  sealed trait ChoiceWithWrappers extends Choice with ParamsWithSparkWrappers {
    override val choiceOrder: List[Class[_ <: ChoiceWithWrappers]] = List(
      classOf[OneParamChoiceWithWrappers],
      classOf[EmptyChoiceWithWrappers])
  }

  case class OneParamChoiceWithWrappers() extends ChoiceWithWrappers {
    val paramC = new StringParamWrapper[ExampleSparkParams]("paramC", Some("descC"), _.sparkParamC)
    def setParamC(v: String): this.type = set(paramC, v)

    override val name = "one param"
    val params: Array[io.deepsense.deeplang.params.Param[_]] = Array(paramC)
  }

  case class EmptyChoiceWithWrappers() extends ChoiceWithWrappers {
    override val name = "no params"
    val params: Array[io.deepsense.deeplang.params.Param[_]] = Array()
  }
} 
Example 159
Source File: WrappersDefaultValidationSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params.wrappers.spark

import org.apache.spark.ml
import org.apache.spark.ml.param._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

class WrappersDefaultValidationSpec
  extends WordSpec
  with Matchers
  with MockitoSugar {

  class ExampleSparkParams extends ml.param.Params {
    override val uid: String = "id"
    val intSparkParam = new IntParam("", "name", "description")
    val floatSparkParam = new FloatParam("", "name", "description")
    val doubleSparkParam = new DoubleParam("", "name", "description")

    override def copy(extra: ParamMap): Params = ???
  }

  "IntParamWrapper" should {

    val intParamWrapper = new IntParamWrapper[ExampleSparkParams](
      "name",
      Some("description"),
      _.intSparkParam)

    "validate whole Int range" in {
      intParamWrapper.validate(Int.MinValue + 1) shouldBe empty
      intParamWrapper.validate(Int.MaxValue - 1) shouldBe empty
    }
    "reject fractional values" in {
      intParamWrapper.validate(Int.MinValue + 0.005) should have size 1
      intParamWrapper.validate(Int.MaxValue - 0.005) should have size 1
    }
  }

  "FloatParamWrapper" should {

    val floatParamWrapper = new FloatParamWrapper[ExampleSparkParams](
      "name",
      Some("description"),
      _.floatSparkParam)

    "validate whole Float range" in {
      floatParamWrapper.validate(Float.MinValue + 1) shouldBe empty
      floatParamWrapper.validate(Float.MaxValue - 1) shouldBe empty
    }
    "reject values out of Float range" in {
      floatParamWrapper.validate(Double.MinValue + 1) should have size 1
      floatParamWrapper.validate(Double.MaxValue - 1) should have size 1
    }
  }

  "DoubleParamWrapper" should {
    "validate whole Double range" in {
      val doubleParamWrapper = new DoubleParamWrapper[ExampleSparkParams](
        "name",
        Some("description"),
        _.doubleSparkParam)
      doubleParamWrapper.validate(Double.MinValue + 1) shouldBe empty
      doubleParamWrapper.validate(Double.MinValue - 1) shouldBe empty
    }
  }
} 
Example 160
Source File: AbstractParamSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.params

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import spray.json.{JsObject, JsValue}

abstract class AbstractParamSpec[T, U <: Param[T]]
  extends WordSpec
  with Matchers
  with MockitoSugar {

  def className: String

  def paramFixture: (U, JsValue)  // param + its json description

  def valueFixture: (T, JsValue)  // value + its json description

  val defaultValue: T = valueFixture._1

  def serializeDefaultValue(default: T): JsValue = paramFixture._1.valueToJson(default)

  className should {
    "serialize itself to JSON" when {
      "default value is not provided" in {
        val (param, expectedJson) = paramFixture
        param.toJson(maybeDefault = None) shouldBe expectedJson
      }
      "default value is provided" in {
        val (param, expectedJson) = paramFixture
        val expectedJsonWithDefault = JsObject(
          expectedJson.asJsObject.fields + ("default" -> serializeDefaultValue(defaultValue))
        )
        param.toJson(maybeDefault = Some(defaultValue)) shouldBe expectedJsonWithDefault
      }
    }
  }

  it should {
    "serialize value to JSON" in {
      val param = paramFixture._1
      val (value, expectedJson) = valueFixture
      param.valueToJson(value) shouldBe expectedJson
    }
  }

  it should {
    "deserialize value from JSON" in {
      val param = paramFixture._1
      val (expectedValue, valueJson) = valueFixture
      val extractedValue = param.valueFromJson(valueJson)
      extractedValue shouldBe expectedValue
    }
  }
} 
Example 161
Source File: TestReThinkSourceConstants.scala    From stream-reactor   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.rethink.config

import org.scalatest.WordSpec


class TestReThinkSourceConstants extends WordSpec {

  // Constants
  val RETHINK_HOST ="connect.rethink.host"
  val RETHINK_DB ="connect.rethink.db"
  val RETHINK_PORT ="connect.rethink.port"
  val ROUTE_QUERY ="connect.rethink.kcql"

  "RETHINK_HOST should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_HOST.equals(ReThinkConfigConstants.RETHINK_HOST))
  }

  "RETHINK_DB should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_DB.equals(ReThinkConfigConstants.RETHINK_DB))
  }

  "RETHINK_PORT should have the same key in ReThinkSinkConfigConstants" in {
    assert(RETHINK_PORT.equals(ReThinkConfigConstants.RETHINK_PORT))
  }

  "ROUTE_QUERY should have the same key in ReThinkSinkConfigConstants" in {
    assert(ROUTE_QUERY.equals(ReThinkConfigConstants.KCQL))
  }
} 
Example 162
Source File: EntitiesMapSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.models.workflows

import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.commons.models.Entity
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.reportlib.model.ReportContent

class EntitiesMapSpec
  extends WordSpec
  with Matchers
  with MockitoSugar {

  "EntitiesMap" should {
    "be correctly created from results and reports" in {

      val entity1Id = Entity.Id.randomId
      val doperable1 = new DataFrame()
      val report1 = mock[ReportContent]

      val entity2Id = Entity.Id.randomId
      val doperable2 = new DataFrame()

      val results = Map(entity1Id -> doperable1, entity2Id -> doperable2)
      val reports = Map(entity1Id -> report1)

      EntitiesMap(results, reports) shouldBe EntitiesMap(Map(
        entity1Id -> EntitiesMap.Entry(
          "io.deepsense.deeplang.doperables.dataframe.DataFrame", Some(report1)),
        entity2Id -> EntitiesMap.Entry(
          "io.deepsense.deeplang.doperables.dataframe.DataFrame", None)
      ))
    }
  }
} 
Example 163
Source File: DatasourceListJsonProtocolSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.commons.json.envelope

import org.joda.time.DateTime
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.api.datasourcemanager.model.{AccessLevel, Datasource, DatasourceParams, DatasourceType}
import io.deepsense.commons.datasource.DatasourceTestData
import io.deepsense.commons.json.datasources.DatasourceListJsonProtocol

class DatasourceListJsonProtocolSpec
  extends WordSpec
  with MockitoSugar
  with Matchers {

  val uuid = "123e4567-e89b-12d3-a456-426655440000"
  val externalFile = DatasourceType.EXTERNALFILE

  val dsList = List(DatasourceTestData.multicharSeparatorLibraryCsvDatasource)

  "DatasourceJsonProtocolSpec" should {
    "serialize and deserialize single datasource" in {
      val datasourcesJson = DatasourceListJsonProtocol.toString(dsList)
      val asString = datasourcesJson.toString
      val datasources = DatasourceListJsonProtocol.fromString(asString)
      info(s"Datasource: $datasources, json: $asString")
      datasources should contain theSameElementsAs dsList
    }

    "serialize no datasource" in {
      val datasourcesJson = DatasourceListJsonProtocol.toString(List.empty[Datasource])
      datasourcesJson shouldBe "[]"
    }
  }
} 
Example 164
Source File: RetrySpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.commons.utils

import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.language.postfixOps

import akka.actor.ActorSystem
import akka.util.Timeout
import org.scalatest.{Matchers, WordSpec}

import io.deepsense.commons.utils.RetryActor.{RetriableException, RetryLimitReachedException}

class RetrySpec extends WordSpec with Matchers {

  val uutName = classOf[Retry[_]].getSimpleName.filterNot(_ == '$')

  trait Setup {
    def generateUUT[T](retryLimitCount: Int)(toDo: => Future[T]): Retry[T] = new {
      override val workDescription = Some("test work")

      override val actorSystem: ActorSystem = ActorSystem()

      override val retryInterval = 1 nano

      override val retryLimit = retryLimitCount

      override val timeout = Timeout(1 minute)

    } with Retry[T] {
      override def work: Future[T] = toDo
    }
  }

  s"A $uutName" should {
    "complete its work" when {
      "no exceptions are thrown" in {
        new Setup {
          val uut = generateUUT(0) {
            Future.successful(2 * 3 + 8)
          }

          Await.result(
            uut.tryWork, Duration.Inf) shouldBe 14
        }
      }

      "only retriable exceptions are thrown and retry limit is not reached" in {
        new Setup {
          var count = 3
          val uut = generateUUT(3) {
            if (count > 0) {
              count -= 1
              Future.failed(RetriableException(s"Thrown because count is ${count + 1}", None))
            } else {
              Future.successful("success")
            }
          }

          Await.result(
            uut.tryWork, Duration.Inf
          ) shouldBe "success"

          count shouldBe 0
        }
      }
    }

    "fail" when {
      "retry limit is reached" in {
        new Setup {
          val uut = generateUUT(10) {
            Future.failed(RetriableException(s"This will never succeed, yet we keep trying", None))
          }

          a [RetryLimitReachedException] shouldBe thrownBy (Await.result(uut.tryWork, Duration.Inf))

        }
      }

      "unexpected exception is thrown" in {
        var count = 1
        new Setup {
          val uut = generateUUT(10) {
            if (count == 0) {
              Future.failed(new RuntimeException("Thrown because counter reached zero"))
            } else {
              count -= 1
              Future.failed(RetriableException(s"Thrown because counter was ${count + 1}", None))
            }
          }

          a [RuntimeException] shouldBe thrownBy (Await.result(uut.tryWork, Duration.Inf))
          count shouldBe 0
        }
      }
    }
  }
} 
Example 165
Source File: AutoDerivationTest.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos

import ru.tinkoff.phobos.testString._
import org.scalatest.{Matchers, WordSpec}

class AutoDerivationTest extends WordSpec with Matchers {
  "Automatic derivation" should {
    "derive codecs" in {
      """
        | import ru.tinkoff.phobos.encoding._
        | import ru.tinkoff.phobos.decoding._
        | import ru.tinkoff.phobos.syntax._
        | import ru.tinkoff.phobos.derivation.auto._
        |
        | case class Foo(@attr bar: Int, @attr baz: Double, @text txt: String)
        | case class Bar(@attr quxx: Float, foo: Foo, qux: Byte)
        |
        | implicitly[ElementEncoder[Bar]]
        | implicitly[ElementDecoder[Bar]]
        |""".stripMargin should compile
    }

    "not derive encoder if not imported" in {
      """
        | import ru.tinkoff.phobos.encoding._
        | import ru.tinkoff.phobos.syntax._
        | case class Foo(@attr bar: Int, @attr baz: Double, @text txt: String)
        | case class Bar(@attr quxx: Float, foo: Foo, qux: Byte)
        |
        | implicitly[ElementEncoder[Bar]]
        |""".stripMargin shouldNot compile
    }


    "not derive decoder if not imported" in {
      """
        | import ru.tinkoff.phobos.decoding._
        | import ru.tinkoff.phobos.syntax._
        |
        | case class Foo(@attr bar: Int, @attr baz: Double, @text txt: String)
        | case class Bar(@attr quxx: Float, foo: Foo, qux: Byte)
        |
        | implicitly[ElementDecoder[Bar]]
        |""".stripMargin shouldNot compile
    }

    "derive codecs correctly" in {
      import ru.tinkoff.phobos.syntax._
      import ru.tinkoff.phobos.decoding._
      import ru.tinkoff.phobos.encoding._
      import ru.tinkoff.phobos.derivation.auto._

      case class Foo(@attr bar: Int, @attr baz: Double, @text txt: String)
      case class Bar(foo: Float)
      case class Baz(foo: Foo, bars: List[Bar], maybebar: Option[Bar])

      val baz = Baz(
        foo = Foo(42, 144.12, "keke"),
        bars = List(Bar(1), Bar(2), Bar(3)),
        maybebar = Some(Bar(4))
      )
      val bazXml =
      """<?xml version='1.0' encoding='UTF-8'?>
       | <baz>
       |     <foo bar="42" baz="144.12">keke</foo>
       |     <bars><foo>1.0</foo></bars>
       |     <bars><foo>2.0</foo></bars>
       |     <bars><foo>3.0</foo></bars>
       |     <maybebar><foo>4.0</foo></maybebar>
       | </baz>
       |""".stripMargin

      val encoder = XmlEncoder.fromElementEncoder[Baz]("baz")
      assert(encoder.encode(baz) == bazXml.minimized)

      val decoder = XmlDecoder.fromElementDecoder[Baz]("baz")
      assert(decoder.decode(bazXml) == Right(baz))
    }
  }
} 
Example 166
Source File: XmlEncoderTest.scala    From phobos   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.phobos

import org.scalatest.{Matchers, WordSpec}
import ru.tinkoff.phobos.annotations.XmlCodec
import ru.tinkoff.phobos.encoding.XmlEncoder

class XmlEncoderTest extends WordSpec with Matchers {
  "XmlEncoder with config" should {
    "ignore prolog if configured" in {
      @XmlCodec("Foo")
      final case class Foo(a: Int, b: String, c: Double)

      XmlEncoder[Foo].encodeWithConfig(Foo(1, "abc", 1.0), XmlEncoder.defaultConfig.withoutProlog) shouldBe
        "<Foo><a>1</a><b>abc</b><c>1.0</c></Foo>"
    }

    "not ignore prolog by default" in {
      @XmlCodec("Foo")
      final case class Foo(a: Int, b: String, c: Double)

      XmlEncoder[Foo].encodeWithConfig(Foo(1, "abc", 1.0), XmlEncoder.defaultConfig) shouldBe
        "<?xml version='1.0' encoding='UTF-8'?><Foo><a>1</a><b>abc</b><c>1.0</c></Foo>"
    }

    "overwrite prolog information if configured" in {
      @XmlCodec("Foo")
      final case class Foo(a: Int, b: String, c: Double)

      XmlEncoder[Foo].encodeWithConfig(Foo(1, "abc", 1.0), XmlEncoder.XmlEncoderConfig("UTF-16", "1.1", true)) shouldBe
        "<?xml version='1.1' encoding='UTF-16'?><Foo><a>1</a><b>abc</b><c>1.0</c></Foo>"
    }
  }
} 
Example 167
Source File: DataGroupTest.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.example

import org.scalatest.{ Matchers, WordSpec }

class DataGroupTest extends WordSpec with Matchers {

  "DataGroup" should {
    val groupSize = 20
    // simulate the behavior of the data producer
    val data = (0 to groupSize * 10)
      .map(i ⇒ (i.toLong / groupSize, i.toLong))
      .groupBy { case (k, _) ⇒ k }
      .map { case (k, seqKV) ⇒ (k, seqKV.map { case (_, v) ⇒ v }) }

    "report completed when it has received all data" in {
      val dataGroup = DataGroup(3, groupSize, data(3))
      assert(dataGroup.isComplete, "dataGroup should be complete with the data sample")
    }

    "report missing elements when it doesn't have all data for its group" in {
      val dataSubset = data(5).drop(3)
      val dataGroup = DataGroup(5, groupSize, dataSubset)
      assert(!dataGroup.isComplete, "dataGroup should be incomplete")
      dataGroup.missing should be(data(5).take(3).toSet)
      dataGroup.missingReport should be("(100,102)")
    }

  }
} 
Example 168
Source File: SparkProductOperationsSpec.scala    From pipelines-examples   with Apache License 2.0 5 votes vote down vote up
package pipelines.example.warez

import org.scalatest.{ Matchers, WordSpec }
import org.scalatest.OptionValues._

import scala.collection.immutable.Seq
import warez.{ PriceUpdate, Product, Sku, StockUpdate }

class SparkProductOperationsSpec extends WordSpec with Matchers {

  "A Product" should {
    "be updated correctly" in {
      val skus = Array(
        Sku("1", "Small Hole", Some(10), Some(5)),
        Sku("2", "Medium Hole", Some(10), Some(10)),
        Sku("3", "Large Hole", Some(15), Some(20))
      )
      val description = "A cartoon hole that can be applied to any surface."
      val keywords = Array("black", "hole", "gag", "plot device", "roger rabbit")

      val p = new Product(
        "123456789",
        "Acme Portable Hole",
        description,
        keywords,
        skus
      )

      val priceUpdate = PriceUpdate(
        "123456789",
        "1",
        10
      )
      val stockUpdate = StockUpdate(
        "123456789",
        "1",
        10
      )
      val zero = SparkProductJoiner.emptyProduct
      val p1 = SparkProductJoiner.updateProduct(zero, Seq(p).toIterator)
      p1 == p should equal(true)
      val prodPrice = SparkProductJoiner.priceUpdate2Products(priceUpdate)
      val p2 = SparkProductJoiner.updateProduct(p1, Seq(prodPrice).toIterator)
      p2.skus.find(_.id == "1").value.price should equal(Some(10))
      val prodStock = SparkProductJoiner.stockUpdate2Product(stockUpdate)
      val p3 = SparkProductJoiner.updateProduct(p2, Seq(prodStock).toIterator)
      p3.skus.find(_.id == "1").value.stock should equal(Some(20))
      // the same price update should cause no change here
      val p4 = SparkProductJoiner.updateProduct(p3, Seq(prodPrice).toIterator)
      p4.skus.find(_.id == "1").value.price should equal(Some(10))
      p4.skus.find(_.id == "1").value.stock should equal(Some(20))
      p4.description should be(description)
      p4.keywords should be(keywords)
    }
  }
} 
Example 169
Source File: EnforceAfterVersionRuleSpec.scala    From sbt-git-versioning   with MIT License 5 votes vote down vote up
package com.rallyhealth.sbt.semver.level.rule

import com.rallyhealth.sbt.semver.TestSnapshotVersion
import com.rallyhealth.sbt.versioning.SemVerReleaseType.Major
import com.rallyhealth.sbt.versioning.{ReleaseVersion, SemVerReleaseType, SemanticVersion}
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.{Matchers, WordSpec}

class EnforceAfterVersionRuleSpec
  extends WordSpec
  with Matchers
  with TypeCheckedTripleEquals {

  case class Scenario(enforceAfter: Option[ReleaseVersion], majorAllowed: Seq[SemanticVersion] = Nil, noOpinion: Seq[SemanticVersion] = Nil)

  val scenarios = Seq(
    Scenario(
      enforceAfter = None,
      noOpinion = Seq(
        ReleaseVersion(0, 0, 0),
        ReleaseVersion(0, 0, 1),
        ReleaseVersion(0, 1, 1),
        ReleaseVersion(1, 0, 0),
        ReleaseVersion(1, 1, 1),
        TestSnapshotVersion(0, 0, 0),
        TestSnapshotVersion(0, 1, 1),
        TestSnapshotVersion(1, 0, 0),
        TestSnapshotVersion(1, 1, 1)
      )
    ),
    Scenario(
      enforceAfter = Some(ReleaseVersion(1, 0, 0)),
      majorAllowed = Seq(
        ReleaseVersion(0, 0, 0),
        ReleaseVersion(0, 0, 1),
        ReleaseVersion(0, 1, 1),
        ReleaseVersion(1, 0, 0),
        TestSnapshotVersion(0, 0, 0),
        TestSnapshotVersion(0, 1, 1),
        TestSnapshotVersion(1, 0, 0)
      ),
      noOpinion = Seq(
        ReleaseVersion(1, 1, 1),
        TestSnapshotVersion(1, 1, 1)
      )
    )
  )

  "maybeEnforceAfterVersion" when {

    for (Scenario(maybeEnforceAfter, majorAllowed, noOpinion) <- scenarios) {
      maybeEnforceAfter.toString should {

        def calcRelaseType(ver: SemanticVersion): Option[SemVerReleaseType] = {
          val maybeLevel = EnforceAfterVersionRule(ver, maybeEnforceAfter).calcLevel()
          maybeLevel.map(_.releaseType)
        }

        for (ver <- majorAllowed) {
          s"allow major changes for $ver" in {
            assert(calcRelaseType(ver) === Some(Major))
          }
        }

        for (ver <- noOpinion) {
          s"not have an opinion about $ver" in {
            assert(calcRelaseType(ver) === None)
          }
        }
      }
    }
  }
} 
Example 170
Source File: VersionDiffRuleSpec.scala    From sbt-git-versioning   with MIT License 5 votes vote down vote up
package com.rallyhealth.sbt.semver.level.rule

import com.rallyhealth.sbt.semver.TestSnapshotVersion
import com.rallyhealth.sbt.versioning.SemVerReleaseType.{Major, Minor, Patch}
import com.rallyhealth.sbt.versioning.{ReleaseVersion, SemVerReleaseType, SemanticVersion}
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.{Matchers, WordSpec}

class VersionDiffRuleSpec
  extends WordSpec
  with Matchers
  with TypeCheckedTripleEquals {

  val releaseVersions = Seq(
    ReleaseVersion(0, 0, 0),
    ReleaseVersion(0, 0, 1),
    ReleaseVersion(0, 1, 1),
    ReleaseVersion(1, 0, 0),
    ReleaseVersion(1, 1, 1)
  )

  case class Scenario(
    prevRelease: Option[ReleaseVersion],
    shouldAllowForVersions: Map[SemVerReleaseType, Seq[SemanticVersion]]
  )

  val scenarios = Seq(
    Scenario(
      prevRelease = None,
      shouldAllowForVersions = Map(
        Major -> Seq(
          ReleaseVersion(0, 0, 0),
          ReleaseVersion(0, 0, 1),
          ReleaseVersion(0, 1, 1),
          ReleaseVersion(1, 0, 0),
          ReleaseVersion(1, 1, 1),
          TestSnapshotVersion(0, 0, 0),
          TestSnapshotVersion(0, 1, 1),
          TestSnapshotVersion(1, 0, 0),
          TestSnapshotVersion(1, 1, 1)
        )
      )
    ),
    Scenario(
      prevRelease = Some(ReleaseVersion(0, 0, 0)),
      shouldAllowForVersions = Map(
        Patch -> Seq(
          ReleaseVersion(0, 0, 1),
          ReleaseVersion(0, 0, 2)
        ),
        Minor -> Seq(
          ReleaseVersion(0, 1, 0),
          ReleaseVersion(0, 1, 1),
          TestSnapshotVersion(0, 0, 1), // because snapshots
          TestSnapshotVersion(0, 0, 2), // because snapshots
          TestSnapshotVersion(0, 1, 0),
          TestSnapshotVersion(0, 1, 1)
        ),
        Major -> Seq(
          ReleaseVersion(1, 0, 0),
          ReleaseVersion(1, 1, 1),
          TestSnapshotVersion(1, 0, 0),
          TestSnapshotVersion(1, 1, 1)
        )
      )
    )
  )

  for (scn <- scenarios) {
    s"prevRelease: ${scn.prevRelease}" should {
      for ((expected, versions) <- scn.shouldAllowForVersions) {
        s"allow $expected" when {
          for (ver <- versions) {
            s"current version: $ver" in {
              val maybeReleaseType = VersionDiffRule(ver, scn.prevRelease).calcLevel().map(_.releaseType)
              assert(maybeReleaseType === Some(expected))
            }
          }
        }
      }
    }
  }
} 
Example 171
Source File: InitialDevelopmentRuleSpec.scala    From sbt-git-versioning   with MIT License 5 votes vote down vote up
package com.rallyhealth.sbt.semver.level.rule

import com.rallyhealth.sbt.semver.TestSnapshotVersion
import com.rallyhealth.sbt.versioning.ReleaseVersion
import com.rallyhealth.sbt.versioning.SemVerReleaseType.Major
import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.{LoneElement, Matchers, WordSpec}

class InitialDevelopmentRuleSpec
  extends WordSpec
  with Matchers
  with LoneElement
  with TypeCheckedTripleEquals {

  val scenarios = Seq(
    ReleaseVersion(0, 0, 0) -> Some(Major),
    ReleaseVersion(0, 0, 1) -> Some(Major),
    ReleaseVersion(0, 1, 1) -> Some(Major),
    ReleaseVersion(1, 0, 0) -> None,
    ReleaseVersion(1, 1, 1) -> None,
    TestSnapshotVersion(0, 0, 0) -> Some(Major),
    TestSnapshotVersion(0, 1, 1) -> Some(Major),
    TestSnapshotVersion(1, 0, 0) -> None,
    TestSnapshotVersion(1, 1, 1) -> None
  )

  for ((version, expected) <- scenarios) {
    s"conclude ${version} is $expected" in {
      val maybeLevel = InitialDevelopmentRule(version).calcLevel()
      val maybeReleaseType = maybeLevel.map(_.releaseType)

      assert(maybeReleaseType === expected)
    }
  }
} 
Example 172
Source File: SemVerReleaseTypeSpec.scala    From sbt-git-versioning   with MIT License 5 votes vote down vote up
package com.rallyhealth.sbt.versioning

import org.scalactic.TypeCheckedTripleEquals
import org.scalatest.{Matchers, WordSpec}

class SemVerReleaseTypeSpec
  extends WordSpec
  with Matchers
  with TypeCheckedTripleEquals {

  "Major > Minor > Patch" in {
    assert(SemVerReleaseType.Major === SemVerReleaseType.Major)
    assert(SemVerReleaseType.Major > SemVerReleaseType.Minor)
    assert(SemVerReleaseType.Major > SemVerReleaseType.Patch)

    assert(SemVerReleaseType.Minor < SemVerReleaseType.Major)
    assert(SemVerReleaseType.Minor === SemVerReleaseType.Minor)
    assert(SemVerReleaseType.Minor > SemVerReleaseType.Patch)

    assert(SemVerReleaseType.Patch < SemVerReleaseType.Major)
    assert(SemVerReleaseType.Patch < SemVerReleaseType.Minor)
    assert(SemVerReleaseType.Patch === SemVerReleaseType.Patch)
  }

} 
Example 173
Source File: StatesTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.analyzers

import com.amazon.deequ.SparkContextSpec
import com.amazon.deequ.utils.FixtureSupport
import org.scalatest.{Matchers, WordSpec}

class StatesTest extends WordSpec with Matchers with SparkContextSpec with FixtureSupport {

  "FrequenciesAndNumRows" should {
    "merge correctly" in withSparkSession { session =>

      import session.implicits._

      val dataA = Seq("A", "A", "B").toDF("att1")
      val dataB = Seq("A", "C", "C").toDF("att1")

      val stateA = FrequencyBasedAnalyzer.computeFrequencies(dataA, "att1" :: Nil)
      val stateB = FrequencyBasedAnalyzer.computeFrequencies(dataB, "att1" :: Nil)

      val stateAB = stateA.sum(stateB)

      println(stateA.frequencies.schema)
      stateA.frequencies.collect().foreach { println }
      println()

      println(stateB.frequencies.schema)
      stateB.frequencies.collect().foreach { println }
      println()

      println(stateAB.frequencies.schema)
      stateAB.frequencies.collect().foreach { println }

      val mergedFrequencies = stateAB.frequencies.collect()
        .map { row => row.getString(0) -> row.getLong(1) }
        .toMap

      assert(mergedFrequencies.size == 3)
      assert(mergedFrequencies.get("A").contains(3))
      assert(mergedFrequencies.get("B").contains(1))
      assert(mergedFrequencies.get("C").contains(2))
    }
  }
} 
Example 174
Source File: LocalityReadNodesSelectionSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.cluster.logic

import io.radicalbit.nsdb.model.Location
import org.scalatest.{Matchers, WordSpec}

class LocalityReadNodesSelectionSpec extends WordSpec with Matchers {

  val localityReadNodesSelection = new LocalityReadNodesSelection("this")

  private def testLocations(node: String, start: Long, end: Long) = (start to end).map { i =>
    Location("metric", node, i, i + 1)
  }

  "LocalityReadNodesSelection" should {
    "privilege local nodes" in {

      val completelyLocalLocations = testLocations("this", 0, 9) ++
        testLocations("that", 0, 9) ++
        testLocations("this", 10, 19) ++
        testLocations("that2", 10, 19)

      val uniqueLocations = localityReadNodesSelection.getDistinctLocationsByNode(completelyLocalLocations)
      uniqueLocations.keySet shouldBe Set("this")
      uniqueLocations("this").sortBy(_.from) shouldBe testLocations("this", 0, 9) ++ testLocations("this", 10, 19)
    }

    "use the minimum amount of other locations when some shards is not locally available" in {

      val scatteredLocations = testLocations("this", 0, 9) ++
        testLocations("that", 0, 9) ++
        testLocations("that", 10, 19) ++
        testLocations("that2", 10, 19) ++
        testLocations("that", 20, 29) ++
        testLocations("that2", 20, 29) ++
        testLocations("that2", 30, 39)

      val uniqueLocations = localityReadNodesSelection.getDistinctLocationsByNode(scatteredLocations)
      uniqueLocations.keySet shouldBe Set("this", "that", "that2")
      uniqueLocations("this").sortBy(_.from) shouldBe testLocations("this", 0, 9)
      uniqueLocations("that").sortBy(_.from) shouldBe testLocations("that", 10, 19) ++ testLocations("that", 20, 29)
      uniqueLocations("that2").sortBy(_.from) shouldBe testLocations("that2", 30, 39)
    }
  }

} 
Example 175
Source File: DiskMetricsSelectorSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.cluster.metrics

import java.nio.file.{Files, Paths}

import akka.actor.Address
import akka.cluster.metrics.StandardMetrics._
import akka.cluster.metrics.{Metric, NodeMetrics}
import io.radicalbit.nsdb.cluster.metrics.NSDbMetrics._
import org.scalatest.{Matchers, WordSpec}
import org.scalatest.OptionValues._

class DiskMetricsSelectorSpec extends WordSpec with Matchers {

  val emptyNode      = Address("nsdb", "NSDb", "emptyNode", 2552)
  val almostFullNode = Address("nsdb", "NSDb", "node1", 2552)
  val node2          = Address("nsdb", "NSDb", "node2", 2552)
  val node3          = Address("nsdb", "NSDb", "node3", 2552)
  val node4          = Address("nsdb", "NSDb", "node4", 2552)
  val realNode       = Address("nsdb", "NSDb", "real", 2552)

  val fs = Files.getFileStore(Paths.get("."))

  val nodeMetrics1 = NodeMetrics(
    almostFullNode,
    System.currentTimeMillis,
    Set(
      Metric.create(DiskTotalSpace, 1000000, None),
      Metric.create(DiskFreeSpace, 100, None),
      Metric.create(HeapMemoryMax, 512, None),
      Metric.create(CpuCombined, 0.2, None),
      Metric.create(CpuStolen, 0.1, None),
      Metric.create(SystemLoadAverage, 0.5, None),
      Metric.create(Processors, 8, None)
    ).flatten
  )

  val emptyNodeMetric = NodeMetrics(
    emptyNode,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 0, None)).flatten
  )

  val nodeMetrics2 = NodeMetrics(
    node2,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 750000, None)).flatten
  )

  val nodeMetrics3 = NodeMetrics(
    node3,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, 1000000, None), Metric.create(DiskFreeSpace, 1000000, None)).flatten
  )

  val nodeMetrics4 = NodeMetrics(
    node4,
    System.currentTimeMillis,
    Set()
  )

  val realNodeMetrics = NodeMetrics(
    realNode,
    System.currentTimeMillis,
    Set(Metric.create(DiskTotalSpace, fs.getTotalSpace, None), Metric.create(DiskFreeSpace, fs.getUsableSpace, None)).flatten
  )

  val nodeMetrics = Set(emptyNodeMetric, nodeMetrics1, nodeMetrics2, nodeMetrics3, nodeMetrics4, realNodeMetrics)

  "DiskMetricsSelector" must {
    "calculate capacity of heap metrics" in {
      val capacity = DiskMetricsSelector.capacity(nodeMetrics)
      capacity.get(emptyNode) shouldBe Some(0.0)
      capacity.get(almostFullNode) shouldBe Some(0.0001)
      capacity.get(node2) shouldBe Some(0.75)
      capacity.get(node3) shouldBe Some(1)
      capacity.get(node4) shouldBe None
      //for a real node the capacity must be between 0 and 1. There's no way to estimate a reasonable capacity value and mocking is not the point here
      capacity.get(realNode).value shouldBe >(0.0)
      capacity.get(realNode).value shouldBe <(1.0)
    }
  }

} 
Example 176
Source File: ASCIITableBuilderSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.cli

import io.radicalbit.nsdb.cli.table.ASCIITableBuilder
import io.radicalbit.nsdb.common.protocol.{Bit, SQLStatementExecuted}
import org.scalatest.{Matchers, WordSpec}

import scala.util.Success

class ASCIITableBuilderSpec extends WordSpec with Matchers {

  def statementFor(res: Seq[Bit]) = SQLStatementExecuted(db = "db", namespace = "registry", metric = "people", res)

  "A parser instance" when {

    "receive a select projecting a wildcard" should {
      "parse it successfully" in {

        val input = List(
          Bit(timestamp = 1L,
              value = 10,
              dimensions = Map("name" -> "Roger", "surname" -> "Sterling", "age" -> 65),
              Map.empty),
          Bit(timestamp = 2L, value = 20, dimensions = Map("name"    -> "Don", "surname" -> "Draper"), Map.empty),
          Bit(timestamp = 3L, value = 30, dimensions = Map("age"     -> 28, "surname" -> "Olson"), Map.empty),
          Bit(timestamp = 4L, value = 40, dimensions = Map("name"    -> "Pete"), Map.empty),
          Bit(timestamp = 5L, value = 50, dimensions = Map("age"     -> "32"), Map.empty),
          Bit(timestamp = 6L, value = 60, dimensions = Map("surname" -> "Holloway"), Map.empty)
        )

        val expected = """+-----------------------+-----+---+-----+--------+
          ?|timestamp              |value|age|name |surname |
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.001|10   |65 |Roger|Sterling|
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.002|20   |   |Don  |Draper  |
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.003|30   |28 |     |Olson   |
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.004|40   |   |Pete |        |
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.005|50   |32 |     |        |
          ?+-----------------------+-----+---+-----+--------+
          ?|1970-01-01T00:00:00.006|60   |   |     |Holloway|
          ?+-----------------------+-----+---+-----+--------+""".stripMargin('?')

        val tableBuilder = new ASCIITableBuilder(100)
        tableBuilder.tableFor(statementFor(input)) shouldBe Success(expected)
      }
    }
  }
} 
Example 177
Source File: CommandStatementSpec.scala    From NSDb   with Apache License 2.0 5 votes vote down vote up
package io.radicalbit.nsdb.sql.parser

import io.radicalbit.nsdb.common.statement._
import io.radicalbit.nsdb.sql.parser.StatementParserResult._
import org.scalatest.{Matchers, WordSpec}

class CommandStatementSpec extends WordSpec with Matchers {

  private val parser = new CommandStatementParser("db")

  "A Command parser instance" when {

    "receive the request to show the namespaces" should {
      "parse it successfully" in {
        val command = "show namespaces"
        parser.parse(None, command) should be(CommandStatementParserSuccess(command, ShowNamespaces))
      }
    }

    "receive the request to use a namespace" should {
      "parse it successfully" in {
        val command = "use registry"
        parser.parse(None, command) should be(CommandStatementParserSuccess(command, UseNamespace("registry")))
      }
    }

    "receive the request to show the metrics" should {
      "not parsing it without specifying a namespace" in {
        parser.parse(None, "show metrics") shouldBe a[CommandStatementParserFailure]
      }

      "parse it successfully specifying a namespace" in {
        val command = "show metrics"
        parser.parse(Some("registry"), command) should be(
          CommandStatementParserSuccess(command, ShowMetrics("db", "registry")))
      }
    }

    "receive the request to describe a metric" should {
      "not parsing it without specifying a namespace" in {
        parser.parse(None, "describe people") shouldBe a[CommandStatementParserFailure]
      }

      "not parsing it without specifying a metric" in {
        parser.parse(Some("registry"), "describe") shouldBe a[CommandStatementParserFailure]
      }

      "not parsing it without specifying a namespace and a metric" in {
        parser.parse(None, "describe") shouldBe a[CommandStatementParserFailure]
      }

      "parse it successfully specifying a namespace and a metric" in {
        val command = "describe people"
        parser.parse(Some("registry"), command) should be(
          CommandStatementParserSuccess(command, DescribeMetric("db", "registry", "people")))
      }
    }
  }
} 
Example 178
Source File: IndexerTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire

import ingraph.bulkloader.csv.data.{CsvEdge, CsvVertex}
import org.scalatest.WordSpec

import scala.collection.JavaConverters._
class IndexerTest extends WordSpec {
  val indexer = new Indexer()
  indexer.addVertex(new CsvVertex(1L, Map("age" -> 5).asJava),  Set("dog"))
  indexer.addVertex(new CsvVertex(2L, Map("age" -> 25).asJava), Set("person"))
  indexer.addVertex(new CsvVertex(3L, Map("age" -> 7).asJava),  Set("cat"))
  indexer.addEdge(new CsvEdge(2L, 4L, 1L), "person", "owns",  "dog"   )
  indexer.addEdge(new CsvEdge(3L, 5L, 2L), "cat",    "owns",  "person")
  indexer.addEdge(new CsvEdge(1L, 6L, 3L), "dog",    "hates", "cat"   )
  indexer.addEdge(new CsvEdge(3L, 7L, 1L), "cat",    "hates", "dog"   )
  indexer.addEdge(new CsvEdge(1L, 8L, 1L), "dog",    "eats",  "dog"   )
  indexer.addEdge(new CsvEdge(3L, 9L, 2L), "cat",    "hates", "person")

  "IngraphEdge" should {
    "reverse itself" in {
      val edge: IngraphEdge = indexer.edgesByType("eats").toSeq.head
      val inverse = edge.inverse()
      assert(edge.sourceVertex == inverse.targetVertex)
      assert(inverse.sourceVertex == edge.targetVertex)
    }
  }

  "Indexer" should {
    "return edges by type" in {
      assert(indexer.edgesByType("hates").map(_.id).toSet == Set(6, 7, 9))
      assert(indexer.edgesByType("eats").map(_.id).toSet == Set(8))
    }

    "can query all vertices" in {
      assert(indexer.vertices().map(_.id).toSet == Set(1, 2, 3))
    }

    "deleting edge removes both references" in {
      val v1 = IngraphVertex(1L, Set())
      val v2 = IngraphVertex(2L, Set())
      val e = IngraphEdge(3L, v1, v2, "broken")
      val indexer = new Indexer()
      indexer.addVertex(v1)
      indexer.addVertex(v2)
      indexer.addEdge(e)
      assert(v1.edgesOut.nonEmpty)
      assert(v2.edgesIn.nonEmpty)
      indexer.removeEdgeById(3L)
      assert(v1.edgesOut.isEmpty)
      assert(v1.edgesIn.isEmpty)
    }


  }
} 
Example 179
Source File: SizingTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes

import akka.actor.{ActorRef, Props, actorRef2Scala}
import ingraph.ire.datatypes.{JoinCache, Tuple}
import ingraph.ire.engine.RelationalEngine
import ingraph.ire.inputs.InputTransactionFactory
import ingraph.ire.messages.{ChangeSet, Primary, Secondary, Terminator}
import ingraph.ire.nodes.binary.JoinNode
import ingraph.ire.nodes.unary.{ProductionNode, SelectionNode}
import ingraph.ire.util.SizeCounter
import ingraph.ire.util.TestUtil.{mask, tuple}
import ingraph.ire.messages.Terminator
import ingraph.ire.nodes.unary.SelectionNode
import org.scalatest.WordSpec
import org.scalatest.concurrent.TimeLimits

import scala.collection.mutable

class SizingTest extends WordSpec with TimeLimits {

  import ingraph.ire.util.Utils.conversions._
  class TestQuery1 extends RelationalEngine {
    override val production: ActorRef = system.actorOf(Props(new ProductionNode("TestQuery")))
    override val inputLookup: Map[String, (ChangeSet) => Unit] = Map(
      "testval" -> ((cs: ChangeSet) => { joiner ! Primary(cs); joiner ! Secondary(cs) })
    )
    override val terminator: Terminator = Terminator(Vector(forwarder ! _), production)
    val forwarder = newLocal(Props(new SelectionNode(production, a => true)))
    val joiner = newLocal(Props(new JoinNode(forwarder, 2, 2, mask(0), mask(0))), "joiner")
  }

  "SizeCounter" should {
    "count" in {
      val data = mutable.HashMap[Tuple, Int]()
      data(tuple(5, 6, 7)) = 8
      data(tuple(5, 6, 9)) = 10
      assert(SizeCounter.count(data.keys) == 6)
    }

    "count deeper" in {
      val data = new JoinCache
      data.addBinding(tuple(2, 3), tuple(3, 4))
      data.addBinding(tuple(2, 3), tuple(3, 5))
      data.addBinding(tuple(2, 3), tuple(3, 6))
      data.addBinding(tuple(3, 2), tuple(2, 5))
      assert(SizeCounter.countDeeper(data.values) == 8)
    }

    "measure size" in {
      val input = new InputTransactionFactory
      val query = new TestQuery1
      input.subscribe(query.inputLookup)
      val inputTransaction = input.newInputTransaction
      inputTransaction.add("testval", tuple(5, 5))
      inputTransaction.add("testval", tuple(5, 6))
      inputTransaction.add("testval", tuple(5, 7))
      inputTransaction.sendAll()
      assert(query.getCounts == 12)
    }
  }
} 
Example 180
Source File: IterableMultiMapTest.scala    From ingraph   with Eclipse Public License 1.0 5 votes vote down vote up
package ingraph.ire.nodes.collections

import ingraph.ire.util.IterableMultiMap
import org.scalatest.WordSpec

import scala.collection.mutable

class IterableMultiMapTest extends WordSpec {
  "IterableMultiMap.unzip" should {
    "return the right number of values" in {
      val map = new mutable.HashMap[Int, mutable.Set[Int]] with IterableMultiMap[Int, Int]
      map.addBinding(1, 3)
      map.addBinding(1, 4)
      map.addBinding(5, 6)
      val (keys, values) = map.multiUnzip
      assert(keys.size == 3)
      assert(values.size == 3)
    }
    "return the right values" in {
      val map = new mutable.HashMap[Int, mutable.Set[Int]] with IterableMultiMap[Int, Int]
      map.addBinding(1, 3)
      map.addBinding(1, 4)
      map.addBinding(5, 6)
      val (keys, values) = map.multiUnzip
      val checkMap = mutable.Map(
        1 -> mutable.Set(3, 4),
        5 -> mutable.Set(6)
      )
      (keys zip values).foreach(
        kv => checkMap(kv._1).remove(kv._2)
      )
      assert(checkMap == mutable.Map(
        1 -> mutable.Set.empty,
        5 -> mutable.Set.empty)
      )
    }
  }
} 
Example 181
Source File: DataRowSpec.scala    From flink-elasticsearch-source-connector   with Apache License 2.0 5 votes vote down vote up
package com.mnubo.flink.streaming.connectors

import org.apache.flink.api.java.typeutils.TypeExtractor
import org.scalatest.{Matchers, WordSpec}

class DataRowSpec extends WordSpec with Matchers {
  "A data row" should {
    "not accept duplicate field names" in {
      an[IllegalArgumentException] shouldBe thrownBy(DataRow(Value(1, "age"), Value("abc", "age")))
      an[IllegalArgumentException] shouldBe thrownBy(DataRowTypeInfo(Seq("age", "age"), Seq(TypeExtractor.getForClass(classOf[String]), TypeExtractor.getForClass(classOf[String]))))
    }
    "not accept null arguments" in {
      an[IllegalArgumentException] shouldBe thrownBy(DataRow(Value(null, "some_field")))
      an[IllegalArgumentException] shouldBe thrownBy(DataRow(null))
    }
    "be equal to an other data row with the same values" in {
      DataRow(Value(1, "age"), Value("abc", "name")) shouldEqual DataRow(Value(1, "age"), Value("abc", "name"))
    }
    "hashCode should be equal to an other data row hashcode" in {
      DataRow(Value(1, "age"), Value("abc", "name")).hashCode shouldEqual DataRow(Value(1, "age"), Value("abc", "name")).hashCode
    }
    "not be equal to an other data row with different values" in {
      DataRow(Value(2, "age"), Value("abc", "name")) should not equal DataRow(Value(1, "age"), Value("abc", "name"))
    }
    "hashCode should not be equal to an other data row hashcode" in {
      DataRow(Value(2, "age"), Value("abc", "name")).hashCode should not equal DataRow(Value(1, "age"), Value("abc", "name")).hashCode
    }
    "allow to get values by index" in {
      val sut = DataRow(Value(2, "age"), Value("abc", "name"))

      sut[Int](0) shouldEqual 2
      sut[String](1) shouldEqual "abc"
    }
    "allow to get values by name" in {
      val sut = DataRow(Value(2, "age"), Value("abc", "name"))

      sut[Int]("age") shouldEqual 2
      sut[String]("name") shouldEqual "abc"
    }
    "implement Product" in {
      val sut = DataRow(Value(2, "age"), Value("abc", "name"))

      sut.productElement(0) shouldEqual 2
      sut.productElement(1) shouldEqual "abc"
      sut.productArity shouldEqual 2
      sut.canEqual("a string") shouldBe false
      sut.canEqual(null.asInstanceOf[DataRow]) shouldBe false
      sut.canEqual(DataRow(Value(1, "age"), Value("abc", "name"))) shouldBe true
    }
    "return a useful string representation" in {
      DataRow(Value(2, "age"), Value("abc", "name")).toString shouldEqual "DataRow(age=2, name=abc)"
    }
  }
} 
Example 182
Source File: GenericWordSpecSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.test

import org.scalatest.WordSpec

import org.apache.spark.sql.Dataset


class GenericWordSpecSuite extends WordSpec with SharedSparkSession {
  import testImplicits._

  private def ds = Seq((1, 1), (2, 1), (3, 2), (4, 2), (5, 3), (6, 3), (7, 4), (8, 4)).toDS

  "A Simple Dataset" when {
    "looked at as complete rows" should {
      "have the specified number of elements" in {
        assert(8 === ds.count)
      }
      "have the specified number of unique elements" in {
        assert(8 === ds.distinct.count)
      }
    }
    "refined to specific columns" should {
      "have the specified number of elements in each column" in {
        assert(8 === ds.select("_1").count)
        assert(8 === ds.select("_2").count)
      }
      "have the correct number of distinct elements in each column" in {
        assert(8 === ds.select("_1").distinct.count)
        assert(4 === ds.select("_2").distinct.count)
      }
    }
  }
} 
Example 183
Source File: SparkBasicTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ

import org.scalatest.{Matchers, WordSpec}

class SparkBasicTest extends WordSpec with Matchers with SparkContextSpec {
  "check that initializing a spark context and a basic example works" in
    withSparkSession { sparkSession =>
      val sc = sparkSession.sparkContext
      val xs = sc.parallelize(1 to 100)
      val res = xs.sum()
      res should be(5050)
    }

  "check that monitoring spark session works" in
    withMonitorableSparkSession { (sparkSession, sparkMonitor) =>
      val sc = sparkSession.sparkContext
      val xs = sc.parallelize(1 to 100)


      (1 to 2).foreach { index =>
        val res = sparkMonitor.withMonitoringSession { stat =>
          val sum = xs.map(_ * index).sum()
          // Spark jobs are running in different monitoring sessions
          assert(stat.jobCount == 1)
          sum
        }
        res should be(5050 * index)
      }

      sparkMonitor.withMonitoringSession { stat =>
        (1 to 2).foreach { index =>
          xs.map(_ * index).sum()
        }
        // Spark jobs are running in the same monitoring session
        assert(stat.jobCount == 2)
      }
    }
} 
Example 184
Source File: UniquenessTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.analyzers

import com.amazon.deequ.SparkContextSpec
import com.amazon.deequ.analyzers.runners.AnalysisRunner
import com.amazon.deequ.metrics.DoubleMetric
import com.amazon.deequ.utils.FixtureSupport
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.scalatest.{Matchers, WordSpec}

class UniquenessTest extends WordSpec with Matchers with SparkContextSpec with FixtureSupport {

  def uniquenessSampleData(sparkSession: SparkSession): DataFrame = {
    import sparkSession.implicits._

    // Example from https://github.com/awslabs/deequ/issues/178
    Seq(
      ("India", "Xavier House, 2nd Floor", "St. Peter Colony, Perry Road", "Bandra (West)"),
      ("India", "503 Godavari", "Sir Pochkhanwala Road", "Worli"),
      ("India", "4/4 Seema Society", "N Dutta Road, Four Bungalows", "Andheri"),
      ("India", "1001D Abhishek Apartments", "Juhu Versova Road", "Andheri"),
      ("India", "95, Hill Road", null, null),
      ("India", "90 Cuffe Parade", "Taj President Hotel", "Cuffe Parade"),
      ("India", "4, Seven PM", "Sir Pochkhanwala Rd", "Worli"),
      ("India", "1453 Sahar Road", null, null)
    )
      .toDF("Country", "Address Line 1", "Address Line 2", "Address Line 3")
  }

  "Uniqueness" should {

    "be correct for multiple fields" in withSparkSession { session =>

      val data = uniquenessSampleData(session)

      val stateStore = InMemoryStateProvider()

      val uniquenessA1 = Uniqueness("Address Line 1")
      val uniquenessA13 = Uniqueness(Seq("Address Line 1", "Address Line 2", "Address Line 3"))

      val analysis = Analysis(Seq(uniquenessA1, uniquenessA13))

      val result = AnalysisRunner.run(data, analysis, saveStatesWith = Some(stateStore))

      assert(result.metric(uniquenessA1).get.asInstanceOf[DoubleMetric].value.get == 1.0)
      assert(result.metric(uniquenessA13).get.asInstanceOf[DoubleMetric].value.get == 1.0)
    }
  }

  "Filtered Uniqueness" in withSparkSession { sparkSession =>
    import sparkSession.implicits._
    val df = Seq(
      ("1", "unique"),
      ("2", "unique"),
      ("3", "duplicate"),
      ("3", "duplicate"),
      ("4", "unique")
    ).toDF("value", "type")

    val stateStore = InMemoryStateProvider()

    val uniqueness = Uniqueness("value")
    val uniquenessWithFilter = Uniqueness(Seq("value"), Some("type = 'unique'"))

    val analysis = Analysis(Seq(uniqueness, uniquenessWithFilter))

    val result = AnalysisRunner.run(df, analysis, saveStatesWith = Some(stateStore))

    assert(result.metric(uniqueness).get.asInstanceOf[DoubleMetric].value.get == 0.6)
    assert(result.metric(uniquenessWithFilter).get.asInstanceOf[DoubleMetric].value.get == 1.0)
  }
} 
Example 185
Source File: BidirectionalTest.scala    From ScalNet   with Apache License 2.0 5 votes vote down vote up
package org.deeplearning4j.scalnet.layers.recurrent

import org.scalatest.{ Matchers, WordSpec }

class BidirectionalTest extends WordSpec with Matchers {

  "A Bidirectional wrapper layer" should {

    "compile to a DL4J Bidirectional wrapper layer with a LSTM" in {
      val bidirectionalLSTM = Bidirectional(LSTM(10, 100))
      val compiledLayer = bidirectionalLSTM.compile
      compiledLayer.isInstanceOf[org.deeplearning4j.nn.conf.layers.recurrent.Bidirectional] shouldBe true
    }

    "compile to a DL4J Bidirectional wrapper layer with a GravesLSTM" in {
      val bidirectionalLSTM = Bidirectional(GravesLSTM(10, 100))
      val compiledLayer = bidirectionalLSTM.compile
      compiledLayer.isInstanceOf[org.deeplearning4j.nn.conf.layers.recurrent.Bidirectional] shouldBe true
    }

  }
} 
Example 186
Source File: KLLDistanceTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.KLL

import com.amazon.deequ.SparkContextSpec
import com.amazon.deequ.analyzers.{Distance, QuantileNonSample}
import com.amazon.deequ.utils.FixtureSupport
import org.scalatest.{Matchers, WordSpec}

class KLLDistanceTest extends WordSpec with Matchers with SparkContextSpec
  with FixtureSupport{

  "KLL distance calculator should compute correct linf_simple" in {
    var sample1 = new QuantileNonSample[Double](4, 0.64)
    var sample2 = new QuantileNonSample[Double](4, 0.64)
    sample1.reconstruct(4, 0.64, Array(Array(1, 2, 3, 4)))
    sample2.reconstruct(4, 0.64, Array(Array(2, 3, 4, 5)))
    assert(Distance.numericalDistance(sample1, sample2, true) == 0.25)
  }

  "KLL distance calculator should compute correct linf_robust" in {
    var sample1 = new QuantileNonSample[Double](4, 0.64)
    var sample2 = new QuantileNonSample[Double](4, 0.64)
    sample1.reconstruct(4, 0.64, Array(Array(1, 2, 3, 4)))
    sample2.reconstruct(4, 0.64, Array(Array(2, 3, 4, 5)))
    assert(Distance.numericalDistance(sample1, sample2) == 0.0)
  }

  "Categorial distance should compute correct linf_simple" in {
    val sample1 = scala.collection.mutable.Map(
      "a" -> 10L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 5L)
    val sample2 = scala.collection.mutable.Map(
      "a" -> 11L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 10L)
    assert(Distance.categoricalDistance(sample1,
      sample2, true) == 0.06015037593984962)
  }

  "Categorial distance should compute correct linf_robust" in {
    val sample1 = scala.collection.mutable.Map(
      "a" -> 10L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 5L)
    val sample2 = scala.collection.mutable.Map(
      "a" -> 11L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 10L)
    assert(Distance.categoricalDistance(sample1, sample2) == 0.0)
  }

  "Categorial distance should compute correct linf_simple with different bin value" in {
    val sample1 = scala.collection.mutable.Map(
      "a" -> 10L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 5L)
    val sample2 = scala.collection.mutable.Map(
      "f" -> 11L, "a" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 10L)
    assert(Distance.categoricalDistance(sample1,
      sample2, true) == 0.2857142857142857)
  }

  "Categorial distance should compute correct linf_robust with different bin value" in {
    val sample1 = scala.collection.mutable.Map(
      "a" -> 10L, "b" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 5L)
    val sample2 = scala.collection.mutable.Map(
      "f" -> 11L, "a" -> 20L, "c" -> 25L, "d" -> 10L, "e" -> 10L)
    assert(Distance.categoricalDistance(sample1, sample2) == 0.0)
  }
} 
Example 187
Source File: AnomalyDetectorTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import org.scalamock.scalatest.MockFactory
import org.scalatest.{Matchers, PrivateMethodTester, WordSpec}


class AnomalyDetectorTest extends WordSpec with Matchers with MockFactory with PrivateMethodTester {
  private val fakeAnomalyDetector = stub[AnomalyDetectionStrategy]

  val aD = AnomalyDetector(fakeAnomalyDetector)
  val data = Seq((0L, -1.0), (1L, 2.0), (2L, 3.0), (3L, 0.5)).map { case (t, v) =>
    DataPoint[Double](t, Option(v))
  }

  "Anomaly Detector" should {

    "ignore missing values" in {
      val data = Seq(DataPoint[Double](0L, Option(1.0)), DataPoint[Double](1L, Option(2.0)),
        DataPoint[Double](2L, None), DataPoint[Double](3L, Option(1.0)))

      (fakeAnomalyDetector.detect _ when(Vector(1.0, 2.0, 1.0), (0, 3)))
        .returns(Seq((1, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (0L, 4L))

      assert(anomalyResult == DetectionResult(Seq((1L, Anomaly(Option(2.0), 1.0)))))
    }

    "only detect values in range" in {
      (fakeAnomalyDetector.detect _ when(Vector(-1.0, 2.0, 3.0, 0.5), (2, 4)))
        .returns(Seq((2, Anomaly(Option(3.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (2L, 4L))

      assert(anomalyResult == DetectionResult(Seq((2L, Anomaly(Option(3.0), 1.0)))))
    }

    "throw an error when intervals are not ordered" in {
      intercept[IllegalArgumentException] {
        aD.detectAnomaliesInHistory(data, (4, 2))
      }
    }

    "treat ordered values with time gaps correctly" in {
      val data = (for (i <- 1 to 10) yield {
        (i.toLong * 200L) -> 5.0
      }).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }

      (fakeAnomalyDetector.detect _ when(data.map(_.metricValue.get).toVector, (0, 2)))
        .returns (Seq((0, Anomaly(Option(5.0), 1.0)), (1, Anomaly(Option(5.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (200L, 401L))

      assert(anomalyResult == DetectionResult(Seq((200L, Anomaly(Option(5.0), 1.0)),
        (400L, Anomaly(Option(5.0), 1.0)))))
    }

    "treat unordered values with time gaps correctly" in {
      val data = Seq((10L, -1.0), (25L, 2.0), (11L, 3.0), (0L, 0.5)).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }
      val tS = AnomalyDetector(SimpleThresholdStrategy(lowerBound = -0.5, upperBound = 1.0))

      (fakeAnomalyDetector.detect _ when(Vector(0.5, -1.0, 3.0, 2.0), (0, 4)))
        .returns(Seq((1, Anomaly(Option(-1.0), 1.0)), (2, Anomaly(Option(3.0), 1.0)),
          (3, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data)

      assert(anomalyResult == DetectionResult(Seq((10L, Anomaly(Option(-1.0), 1.0)),
        (11L, Anomaly(Option(3.0), 1.0)), (25L, Anomaly(Option(2.0), 1.0)))))
    }

    "treat unordered values without time gaps correctly" in {
      val data = Seq((1L, -1.0), (3L, 2.0), (2L, 3.0), (0L, 0.5)).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }

      (fakeAnomalyDetector.detect _ when(Vector(0.5, -1.0, 3.0, 2.0), (0, 4)))
        .returns(Seq((1, Anomaly(Option(-1.0), 1.0)), (2, Anomaly(Option(3.0), 1.0)),
          (3, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data)

      assert(anomalyResult == DetectionResult(Seq((1L, Anomaly(Option(-1.0), 1.0)),
        (2L, Anomaly(Option(3.0), 1.0)), (3L, Anomaly(Option(2.0), 1.0)))))
    }

  }
} 
Example 188
Source File: RateOfChangeStrategyTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import org.scalatest.{Matchers, WordSpec}


class RateOfChangeStrategyTest extends WordSpec with Matchers {

  "RateOfChange Strategy" should {

    val strategy = RateOfChangeStrategy(Some(-2.0), Some(2.0))
    val data = (for (i <- 0 to 50) yield {
      if (i < 20 || i > 30) {
        1.0
      } else {
        if (i % 2 == 0) i else -i
      }
    }).toVector

    "detect all anomalies if no interval specified" in {
      val anomalyResult = strategy.detect(data)
      val expected = for (i <- 20 to 31) yield {
        (i, Anomaly(Option(data(i)), 1.0))
      }
      assert(anomalyResult == expected)
    }
  }
} 
Example 189
Source File: SimpleThresholdStrategyTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import org.scalatest.{Matchers, WordSpec}

class SimpleThresholdStrategyTest extends WordSpec with Matchers {

  "Simple Threshold Strategy" should {

    val strategy = SimpleThresholdStrategy(upperBound = 1.0)
    val data = Vector(-1.0, 2.0, 3.0, 0.5)
    val expected = Seq((1, Anomaly(Option(2.0), 1.0)), (2, Anomaly(Option(3.0), 1.0)))

    "detect values above threshold" in {
      val anomalyResult = strategy.detect(data, (0, 4))

      assert(anomalyResult == expected)
    }

    "detect all values without range specified" in {
      val anomalyResult = strategy.detect(data)

      assert(anomalyResult == expected)
    }

    "work fine with empty input" in {
      val emptySeries = Vector[Double]()
      val anomalyResult = strategy.detect(emptySeries)

      assert(anomalyResult == Seq[(Int, Anomaly)]())
    }

    "work with upper and lower threshold" in {
      val tS = SimpleThresholdStrategy(lowerBound = -0.5, upperBound = 1.0)
      val anomalyResult = tS.detect(data)

      assert(anomalyResult == Seq((0, Anomaly(Option(-1.0), 1.0)),
        (1, Anomaly(Option(2.0), 1.0)), (2, Anomaly(Option(3.0), 1.0))))
    }

    "throw an error when thresholds are not ordered " in {
      intercept[IllegalArgumentException] {
        val ts = SimpleThresholdStrategy(lowerBound = 2.0, upperBound = 1.0)
      }
    }

    "produce error message with correct value and bounds" in {
      val result = strategy.detect(data)

      result.foreach { case (_, anom) =>
        val (value, lowerBound, upperBound) =
          AnomalyDetectionTestUtils.firstThreeDoublesFromString(anom.detail.get)

        assert(anom.value.isDefined && value === anom.value.get)
        assert(value < lowerBound || value > upperBound)
      }
    }
  }
} 
Example 190
Source File: AnomalyDetectionTestUtilsTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import org.scalatest.{Matchers, WordSpec}

class AnomalyDetectionTestUtilsTest extends WordSpec with Matchers {

  "AnomalyDetectionTestUtilsTest" should {

    "throw an exception if no value found" in {
      intercept[IllegalArgumentException] {
        AnomalyDetectionTestUtils.firstDoubleFromString("noNumber")
      }
      intercept[IllegalArgumentException] {
        AnomalyDetectionTestUtils.firstThreeDoublesFromString("noNumber")
      }
    }

    "find first value" in {
      val str = "xx3.141yyu4.2"
      val value = AnomalyDetectionTestUtils.firstDoubleFromString(str)
      assert(value == 3.141)
    }

    "find all 3 values" in {
      val str = "In this 1 string are 3.000 values, not 42.01"

      val (first, second, third) = AnomalyDetectionTestUtils.firstThreeDoublesFromString(str)
      assert(first === 1)
      assert(second === 3.0)
      assert(third === 42.01)
    }
  }
} 
Example 191
Source File: HistoryUtilsTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import com.amazon.deequ.metrics.{DoubleMetric, Entity}
import org.scalatest.{Matchers, WordSpec}

import scala.util.{Failure, Success}

class HistoryUtilsTest extends WordSpec with Matchers {

  "History Utils" should {
    val sampleException = new IllegalArgumentException()

    val noneMetric = None
    val metricWithNoValue = Some(DoubleMetric(Entity.Column, "metric-name", "instance-name",
      Failure(sampleException)))
    val metricWithValue = Some(DoubleMetric(Entity.Column, "metric-name", "instance-name",
      Success(50)))

    "extract optinal metric value" in {
      assert(HistoryUtils.extractMetricValue[Double](noneMetric).isEmpty)
      assert(HistoryUtils.extractMetricValue[Double](metricWithNoValue).isEmpty)
      assert(HistoryUtils.extractMetricValue[Double](metricWithValue).contains(50))

    }
    "extract optinal metric values" in {
      val metrics = Seq(0L -> noneMetric, 1L -> metricWithNoValue, 2L -> metricWithValue)
      assert(HistoryUtils.extractMetricValues[Double](metrics) == Seq(DataPoint[Double](0L, None),
        DataPoint[Double](1L, None), DataPoint[Double](2, Some(50))))
    }
  }
} 
Example 192
Source File: MetricsTests.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.metrics

import com.amazon.deequ.analyzers.DataTypeInstances
import org.scalatest.{Matchers, WordSpec}

import scala.util.{Failure, Success}


class MetricsTests extends WordSpec with Matchers {
  val sampleException = new IllegalArgumentException()
  "Double metric" should {
    "flatten and return itself" in {
      val metric = DoubleMetric(Entity.Column, "metric-name", "instance-name", Success(50))
      assert(metric.flatten() == List(metric))
    }

    "flatten in case of an error" in {
      val metric = DoubleMetric(Entity.Column, "metric-name", "instance-name",
        Failure(sampleException))
      assert(metric.flatten() == List(metric))
    }
  }

  "Histogram metric" should {
    "flatten matched and unmatched" in {
      val distribution = Distribution(
        Map("a" -> DistributionValue(6, 0.6), "b" -> DistributionValue(4, 0.4)), 2)

      val metric = HistogramMetric("instance-name", Success(distribution))

      val expected = Seq(
        DoubleMetric(Entity.Column, "Histogram.bins", "instance-name", Success(2)),
        DoubleMetric(Entity.Column, "Histogram.abs.a", "instance-name", Success(6)),
        DoubleMetric(Entity.Column, "Histogram.abs.b", "instance-name", Success(4)),
        DoubleMetric(Entity.Column, "Histogram.ratio.a", "instance-name", Success(0.6)),
        DoubleMetric(Entity.Column, "Histogram.ratio.b", "instance-name", Success(0.4))
      ).toSet
      assert(metric.flatten().toSet == expected)
    }

    "flatten matched and unmatched in case of an error" in {
      val metric = HistogramMetric("instance-name", Failure(sampleException))

      val expected = Seq(DoubleMetric(Entity.Column, "Histogram.bins", "instance-name",
        Failure(sampleException))).toSet
      assert(metric.flatten().toSet == expected)
    }
  }

} 
Example 193
Source File: ExamplesTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.examples

import org.scalatest.WordSpec

class ExamplesTest extends WordSpec {

  "all examples" should {
    "run without errors" in {
      BasicExample.main(Array.empty)
      IncrementalMetricsExample.main(Array.empty)
      MetricsRepositoryExample.main(Array.empty)
      UpdateMetricsOnPartitionedDataExample.main(Array.empty)
      DataProfilingExample.main(Array.empty)
      AnomalyDetectionExample.main(Array.empty)
      ConstraintSuggestionExample.main(Array.empty)
    }
  }

} 
Example 194
Source File: FilterableCheckTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ
package checks

import com.amazon.deequ.analyzers.{Completeness, Compliance}
import com.amazon.deequ.utils.FixtureSupport
import org.scalatest.{Matchers, WordSpec}

class FilterableCheckTest extends WordSpec with Matchers with SparkContextSpec with FixtureSupport {

  "Filterable checks" should {
    "build correctly" in {

      val check = Check(CheckLevel.Error, "someCheck")
        .isComplete("col1")
        .isComplete("col2").where("marketplace = 'EU'")
        .hasCompleteness("col3", _ >= 0.9).where("marketplace = 'NA'")
        .satisfies("someCol > 5", "const1")
        .satisfies("someCol > 10", "const2").where("marketplace = 'EU'")

      val completenessAnalyzers =
        check.requiredAnalyzers()
          .filter { _.isInstanceOf[Completeness] }
          .map { _.asInstanceOf[Completeness] }
          .toArray
          .sortBy { _.column }

      assert(completenessAnalyzers.length == 3)
      assert(completenessAnalyzers.head.where.isEmpty)
      assert(completenessAnalyzers(1).where.contains("marketplace = 'EU'"))
      assert(completenessAnalyzers(2).where.contains("marketplace = 'NA'"))

      val complianceAnalyzers =
        check.requiredAnalyzers()
          .filter { _.isInstanceOf[Compliance] }
          .map { _.asInstanceOf[Compliance] }
          .toArray
          .sortBy { _.instance }

      assert(complianceAnalyzers.length == 2)
      assert(complianceAnalyzers.head.where.isEmpty)
      assert(complianceAnalyzers(1).where.contains("marketplace = 'EU'"))
    }
  }

} 
Example 195
Source File: ColumnConditionTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.checks

import org.scalatest.WordSpec

class ColumnConditionTest extends WordSpec {

  "ColumnCondition" should {

    "return the correct isEachNotNull condition" in {
      assert(
        ColumnCondition.isEachNotNull(Seq("att1", "att2", "att3")) ==
        "(((att1 IS NOT NULL) AND (att2 IS NOT NULL)) AND (att3 IS NOT NULL))"
      )
    }

    "return the correct isAnyNotNull condition" in {
      assert(
        ColumnCondition.isAnyNotNull(Seq("att1", "att2", "att3")) ==
          "(((att1 IS NOT NULL) OR (att2 IS NOT NULL)) OR (att3 IS NOT NULL))"
      )
    }
  }

} 
Example 196
Source File: UtilsTest.scala    From sbt-dynamodb   with MIT License 5 votes vote down vote up
package com.teambytes.sbt.dynamodb

import org.scalatest.mock.MockitoSugar
import org.scalatest.{MustMatchers, WordSpec}

class UtilsTest extends WordSpec with MustMatchers with MockitoSugar {

  "Utils" should {

    "extract PID correctly" in {
      val jpsOutput =
        """
          |16706 QuorumPeerMain
          |60405 Boot
          |59022 DynamoDBLocal.jar
          |60479 Jps
          |51449
        """.stripMargin

      Utils.extractDyanmoDBPid(jpsOutput) must equal(Some("59022"))
    }

  }

} 
Example 197
Source File: PrometheusModuleSpec.scala    From play-prometheus-filters   with MIT License 5 votes vote down vote up
package com.github.stijndehaes.playprometheusfilters

import io.prometheus.client.{Collector, CollectorRegistry}
import org.scalatest.{BeforeAndAfter, MustMatchers, PrivateMethodTester, WordSpec}
import org.scalatestplus.play.guice.GuiceOneAppPerTest
import play.api.inject.guice.GuiceApplicationBuilder

class PrometheusModuleSpec extends WordSpec with MustMatchers with BeforeAndAfter with PrivateMethodTester with GuiceOneAppPerTest {

  before {
    // clearing registry before each test
    CollectorRegistry.defaultRegistry.clear()
  }

  "PrometheusModule" should {
    "register default exporters when enabled" in {
      // default enabled
      val app = new GuiceApplicationBuilder()
        .configure(PrometheusModule.defaultExportsKey -> true)
        .build()

      val collector = app.injector.instanceOf[CollectorRegistry]
      val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors)
      (collector invokePrivate collectors()).size must be > 0
    }

    "not register default exporters when disabled" in {
      // disable default exporters
      val app = new GuiceApplicationBuilder()
        .configure(PrometheusModule.defaultExportsKey -> false)
        .build()

      val collector = app.injector.instanceOf[CollectorRegistry]
      val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors)
      (collector invokePrivate collectors()).size must be (0)
    }
  }

  
    def getExporterNames: Seq[String] = {
      val exportNames = collection.mutable.Buffer.empty[String]
      val mfs = registry.metricFamilySamples()
      while(mfs.hasMoreElements) {
        exportNames += mfs.nextElement().name
      }
      exportNames
    }
  }
} 
Example 198
Source File: Sepc.scala    From fusion-data   with Apache License 2.0 5 votes vote down vote up
package sample

import org.scalatest.{MustMatchers, WordSpec}

class SpecMultiJvmNode1 extends WordSpec with MustMatchers {
  "A node" should {
    "be able to say hello" in {
      val message = "Hello from node 1"
      message must be("Hello from node 1")
    }
  }
}

class SpecMultiJvmNode2 extends WordSpec with MustMatchers {
  "A node" should {
    "be able to say hello" in {
      val message = "Hello from node 2"
      message must be("Hello from node 2")
    }
  }
} 
Example 199
Source File: TraitReferenceSpec.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.model.artifact

import org.scalatest.prop.GeneratorDrivenPropertyChecks
import org.scalatest.{ Matchers, OptionValues, WordSpec }

class TraitReferenceSpec extends WordSpec with Matchers with OptionValues with GeneratorDrivenPropertyChecks {

  "TraitReference" should {
    "convert string to TraitReference via: referenceFor" in {
      forAll("cluster", "group", "name") { (cluster: String, group: String, name: String) ⇒
        whenever(!cluster.contains('.') && !group.contains('.') && !name.contains('.')) {
          val traitReferenceString = s"$cluster.$group.$name"
          TraitReference.referenceFor(traitReferenceString).value should be(TraitReference(cluster, group, name))
        }
      }
    }

    "referenceFor & toReference: referenceFor(x.toReference) should yield same result" in {
      forAll("cluster", "group", "name") { (cluster: String, group: String, name: String) ⇒
        whenever(!cluster.contains('.') && !group.contains('.') && !name.contains('.')) {
          TraitReference.referenceFor(TraitReference(cluster, group, name).reference).value should be(TraitReference(cluster, group, name))
        }
      }
    }
  }
} 
Example 200
Source File: UnitSpec.scala    From sbt-coursier   with Apache License 2.0 5 votes vote down vote up
package t

import com.typesafe.config.ConfigFactory
import org.scalatest.{ MustMatchers, WordSpec }

class UnitSpec extends WordSpec with MustMatchers {
  def conf = ConfigFactory.defaultReference()

  "Config" should {
    "return Akka HTTP server provider" in {
      val serverProvider = conf.getString("play.server.provider")
      serverProvider mustBe "play.core.server.AkkaHttpServerProvider"
    }

    "be able to load Netty settings" in {
      val nettyTransport = conf.getString("play.server.netty.transport")
      nettyTransport mustBe "jdk"
    }
  }
}