org.scalatest.FunSuite Scala Examples

The following examples show how to use org.scalatest.FunSuite. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: SparkPFASuiteBase.scala    From aardpfark   with Apache License 2.0 6 votes vote down vote up
package com.ibm.aardpfark.pfa

import com.holdenkarau.spark.testing.DataFrameSuiteBase
import org.apache.spark.SparkConf
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
import org.scalactic.Equality
import org.scalatest.FunSuite

abstract class SparkPFASuiteBase extends FunSuite with DataFrameSuiteBase with PFATestUtils {

  val sparkTransformer: Transformer
  val input: Array[String]
  val expectedOutput: Array[String]

  val sparkConf =  new SparkConf().
    setMaster("local[*]").
    setAppName("test").
    set("spark.ui.enabled", "false").
    set("spark.app.id", appID).
    set("spark.driver.host", "localhost")
  override lazy val spark = SparkSession.builder().config(sparkConf).getOrCreate()
  override val reuseContextIfPossible = true

  // Converts column containing a vector to an array
  def withColumnAsArray(df: DataFrame, colName: String) = {
    val vecToArray = udf { v: Vector => v.toArray }
    df.withColumn(colName, vecToArray(df(colName)))
  }

  def withColumnAsArray(df: DataFrame, first: String, others: String*) = {
    val vecToArray = udf { v: Vector => v.toArray }
    var result = df.withColumn(first, vecToArray(df(first)))
    others.foreach(c => result = result.withColumn(c, vecToArray(df(c))))
    result
  }

  // Converts column containing a vector to a sparse vector represented as a map
  def getColumnAsSparseVectorMap(df: DataFrame, colName: String) = {
    val vecToMap = udf { v: Vector => v.toSparse.indices.map(i => (i.toString, v(i))).toMap }
    df.withColumn(colName, vecToMap(df(colName)))
  }

}

abstract class Result

object ApproxEquality extends ApproxEquality

trait ApproxEquality {

  import org.scalactic.Tolerance._
  import org.scalactic.TripleEquals._

  implicit val seqApproxEq: Equality[Seq[Double]] = new Equality[Seq[Double]] {
    override def areEqual(a: Seq[Double], b: Any): Boolean = {
      b match {
        case d: Seq[Double] =>
          a.zip(d).forall { case (l, r) => l === r +- 0.001 }
        case _ =>
          false
      }
    }
  }

  implicit val vectorApproxEq: Equality[Vector] = new Equality[Vector] {
    override def areEqual(a: Vector, b: Any): Boolean = {
      b match {
        case v: Vector =>
          a.toArray.zip(v.toArray).forall { case (l, r) => l === r +- 0.001 }
        case _ =>
          false
      }
    }
  }
} 
Example 2
Source File: TransformerTest.scala    From incubator-s2graph   with Apache License 2.0 6 votes vote down vote up
package org.apache.s2graph.s2jobs.wal

import org.apache.s2graph.s2jobs.task.TaskConf
import org.apache.s2graph.s2jobs.wal.transformer._
import org.scalatest.{FunSuite, Matchers}
import play.api.libs.json.Json

class TransformerTest extends FunSuite with Matchers {
  val walLog = WalLog(1L, "insert", "edge", "a", "b", "s2graph", "friends", """{"name": 1, "url": "www.google.com"}""")

  test("test default transformer") {
    val taskConf = TaskConf.Empty
    val transformer = new DefaultTransformer(taskConf)
    val dimVals = transformer.toDimValLs(walLog, "name", "1")

    dimVals shouldBe Seq(DimVal("friends:name", "1"))
  }

  test("test ExtractDomain from URL") {
    val taskConf = TaskConf.Empty.copy(options =
      Map("urlDimensions" -> Json.toJson(Seq("url")).toString())
    )
    val transformer = new ExtractDomain(taskConf)
    val dimVals = transformer.toDimValLs(walLog, "url", "http://www.google.com/abc")

    dimVals shouldBe Seq(
      DimVal("host", "www.google.com"),
      DimVal("domain", "www.google.com"),
      DimVal("domain", "www.google.com/abc")
    )
  }
} 
Example 3
Source File: SqlUnitTest.scala    From SparkUnitTestingExamples   with Apache License 2.0 6 votes vote down vote up
package com.cloudera.sa.spark.unittest.sql

import org.apache.spark.sql.Row
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import scala.collection.mutable

class SqlUnitTest extends FunSuite with
BeforeAndAfterEach with BeforeAndAfterAll{

  @transient var sc: SparkContext = null
  @transient var hiveContext: HiveContext = null

  override def beforeAll(): Unit = {

    val envMap = Map[String,String](("Xmx", "512m"))

    val sparkConfig = new SparkConf()
    sparkConfig.set("spark.broadcast.compress", "false")
    sparkConfig.set("spark.shuffle.compress", "false")
    sparkConfig.set("spark.shuffle.spill.compress", "false")
    sparkConfig.set("spark.io.compression.codec", "lzf")
    sc = new SparkContext("local[2]", "unit test", sparkConfig)
    hiveContext = new HiveContext(sc)
  }

  override def afterAll(): Unit = {
    sc.stop()
  }

  test("Test table creation and summing of counts") {
    val personRDD = sc.parallelize(Seq(Row("ted", 42, "blue"),
      Row("tj", 11, "green"),
      Row("andrew", 9, "green")))

    hiveContext.sql("create table person (name string, age int, color string)")

    val emptyDataFrame = hiveContext.sql("select * from person limit 0")

    val personDataFrame = hiveContext.createDataFrame(personRDD, emptyDataFrame.schema)
    personDataFrame.registerTempTable("tempPerson")

    val ageSumDataFrame = hiveContext.sql("select sum(age) from tempPerson")

    val localAgeSum = ageSumDataFrame.take(10)

    assert(localAgeSum(0).get(0) == 62, "The sum of age should equal 62 but it equaled " + localAgeSum(0).get(0))
  }
} 
Example 4
Source File: OffsetLoaderTest.scala    From toketi-iothubreact   with MIT License 6 votes vote down vote up
package com.microsoft.azure.iot.iothubreact.checkpointing

import com.microsoft.azure.iot.iothubreact.config.{IConfiguration, IConnectConfiguration}
import org.scalatest.FunSuite
import org.scalatest.mockito.MockitoSugar
import org.mockito.Mockito.when
import org.scalatest.Matchers._

class OffsetLoaderTest extends FunSuite with MockitoSugar {

  test("test GetSavedOffsets handles None appropriately") {

    val config = mock[IConfiguration]
    val cnConfig = mock[IConnectConfiguration]
    when(config.connect) thenReturn(cnConfig)
    when(cnConfig.iotHubPartitions) thenReturn(10)
    val loader = StubbedLoader(config)
    loader.GetSavedOffsets should be(Map(0 → "Offset 0", 1 → "Offset 1", 3 → "Offset 3"))
  }

  case class StubbedLoader(config: IConfiguration) extends OffsetLoader(config) {

    override private[iothubreact] def GetSavedOffset(partition: Int) = {
      partition match {
        case 0 ⇒ Some("Offset 0")
        case 1 ⇒ Some("Offset 1")
        case 3 ⇒ Some("Offset 3")
        case _ ⇒ None
      }
    }
  }

} 
Example 5
Source File: SchemaLoaderSuite.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
package org.nlp4l.core

import org.apache.lucene.analysis.core.StopFilterFactory
import org.apache.lucene.analysis.custom.CustomAnalyzer
import org.scalatest.FunSuite

class SchemaLoaderSuite extends FunSuite {

  test("load valid schema") {
    val schema = SchemaLoader.load("org/nlp4l/core/testschema1.conf")
    assertResult("org.apache.lucene.analysis.core.KeywordAnalyzer")(schema.defaultAnalyzer.delegate.getClass.getName)

    assertResult(3)(schema.fieldTypes.size)

    assertResult(null)(schema.fieldTypes.get("fieldA").get.analyzer)
    assertResult(false)(schema.fieldTypes.get("fieldA").get.indexed)
    assertResult(false)(schema.fieldTypes.get("fieldA").get.stored)
    assertResult(false)(schema.fieldTypes.get("fieldA").get.termVectors)
    assertResult(false)(schema.fieldTypes.get("fieldA").get.termPositions)
    assertResult(false)(schema.fieldTypes.get("fieldA").get.termOffsets)


    assertResult("org.apache.lucene.analysis.custom.CustomAnalyzer")(schema.fieldTypes("fieldB").analyzer.delegate.getClass.getName)
    val myAnalyzer1 = schema.fieldTypes("fieldB").analyzer.delegate.asInstanceOf[CustomAnalyzer]
    assertResult("org.apache.lucene.analysis.standard.StandardTokenizerFactory")(myAnalyzer1.getTokenizerFactory.getClass.getName)
    assertResult("org.apache.lucene.analysis.core.StopFilterFactory")(myAnalyzer1.getTokenFilterFactories.get(0).getClass.getName)
    val stop = myAnalyzer1.getTokenFilterFactories.get(0).asInstanceOf[StopFilterFactory]
    assertResult(true)(stop.isIgnoreCase)
    assertResult(33)(stop.getStopWords.size)
    assertResult("org.apache.lucene.analysis.core.LowerCaseFilterFactory")(myAnalyzer1.getTokenFilterFactories.get(1).getClass.getName)
    assertResult(true)(schema.fieldTypes.get("fieldB").get.indexed)
    assertResult(true)(schema.fieldTypes.get("fieldB").get.stored)
    assertResult(true)(schema.fieldTypes.get("fieldB").get.termVectors)
    assertResult(true)(schema.fieldTypes.get("fieldB").get.termPositions)
    assertResult(true)(schema.fieldTypes.get("fieldB").get.termOffsets)

    assertResult("org.apache.lucene.analysis.custom.CustomAnalyzer")(schema.fieldTypes("fieldC").analyzer.delegate.getClass.getName)
    val myAnalyzer2 = schema.fieldTypes("fieldC").analyzer.delegate.asInstanceOf[CustomAnalyzer]
    assertResult("org.apache.lucene.analysis.ngram.NGramTokenizerFactory")(myAnalyzer2.getTokenizerFactory.getClass.getName)
  }

  test("except InvalidSchemaException when root path not found") {
    intercept[InvalidSchemaException] {
      val schema = SchemaLoader.load("org/nlp4l/core/testschema_invalid1.conf")
    }
  }

  test("except InvalidSchemaException when the path schema.defAnalyzer not found") {
    intercept[InvalidSchemaException] {
      val schema = SchemaLoader.load("org/nlp4l/core/testschema_invalid2.conf")
    }
  }

  test("except InvalidSchemaException when the path schema.fields not found") {
    intercept[InvalidSchemaException] {
      val schema = SchemaLoader.load("org/nlp4l/core/testschema_invalid3.conf")
    }
  }

  test("except InvalidSchemaException when schema.fields contains no elements") {
    intercept[InvalidSchemaException] {
      val schema = SchemaLoader.load("org/nlp4l/core/testschema_invalid4.conf")
    }
  }

  test("except InvalidSchemaException when the path schema.fields.[N].name not found") {
    intercept[InvalidSchemaException] {
      val schema = SchemaLoader.load("org/nlp4l/core/testschema_invalid5.conf")
    }
  }
} 
Example 6
Source File: SimpleFSTSuite.scala    From attic-nlp4l   with Apache License 2.0 5 votes vote down vote up
package org.nlp4l.core

import org.scalatest.{BeforeAndAfterAll, FunSuite}

import scala.util.Try
import scalax.file.Path

class SimpleFSTSuite extends FunSuite with BeforeAndAfterAll {

  val SOURCE_DATA = List(
    "king", "to", "been", "the", "that", "with", "seem", "have", "alive", "fact", "peculiarities", "caution"
  )
  val r = new scala.util.Random(java.lang.System.currentTimeMillis())
  val DATA: Seq[(String, Long)] = SOURCE_DATA.map(s => (s, r.nextLong.abs))
  val sorted = DATA.sortBy(_._1)

  val fst = SimpleFST()

  private def deleteDir(dir: String): Unit = {
    val path = Path.fromString(dir)
    Try(path.deleteRecursively(continueOnFailure = false))
  }

  override def beforeAll {
    sorted.foreach{ k =>
      fst.addEntry(k._1, k._2)
    }
    fst.finish()
  }

  test("FST in memory random number test for leftMostSubstring"){
    DATA.foreach{ k =>
      val result = fst.leftMostSubstring(k._1, 0)
      assert(result.size == 1)
      assert(result.head._1 == k._1.length)
      assert(result.head._2 == k._2)
    }
  }

  test("FST in memory random number test for exactMatch"){
    DATA.foreach{ k =>
      val result = fst.exactMatch(k._1)
      assert(result === k._2)
    }
    assert(fst.exactMatch("notexist") == -1)
  }

  test("FST save/load with random number test"){
    val dir = "/tmp/testfst"

    fst.save(dir)

    val fst2 = SimpleFST()
    fst2.load(dir)

    DATA.foreach{ k =>
      val result = fst2.leftMostSubstring(k._1, 0)
      assert(result.size == 1)
      assert(result.head._1 == k._1.length)
      assert(result.head._2 == k._2)
    }

    deleteDir(dir)
  }
} 
Example 7
Source File: JsonTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalatest.prop.PropertyChecks.{forAll, PropertyCheckConfiguration }
import org.scalatest.FunSuite
import org.typelevel.jawn.ast.{JValue, JParser}

import GenJson._

class JsonJawnTest extends FunSuite {

  implicit val generatorDrivenConfig =
    PropertyCheckConfiguration(minSuccessful = 500)

  def matches(j1: Json, j2: JValue): Unit = {
    import Json._
    j1 match {
      case JString(str) => assert(j2.asString == str); ()
      case JNumberStr(nstr) => assert(BigDecimal(nstr) == j2.asBigDecimal); ()
      case JNull => assert(j2.isNull); ()
      case JBool(t) => assert(j2.asBoolean == t); ()
      case JArray(js) =>
        js.zipWithIndex.foreach { case (j, idx) =>
          matches(j, j2.get(idx))
        }
      case JObject(map) =>
        map.toMap.foreach { case (k, v) =>
          matches(v, j2.get(k))
        }
    }
  }

  test("Jawn can parse any of the json strings we generate") {
    forAll { (j: Json) =>
      val str = j.render
      val jvalue = JParser.parseUnsafe(str)
      matches(j, jvalue)
    }
  }
} 
Example 8
Source File: ValueTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks.{ forAll, PropertyCheckConfiguration }
import org.scalatest.FunSuite
import Value._

class ValueTest extends FunSuite {
  import GenValue.genValue

  implicit val generatorDrivenConfig =
    PropertyCheckConfiguration(minSuccessful = 500)

  test("SumValue.toString is what we expect") {
    forAll(Gen.choose(0, 1024), GenValue.genProd) { (i, p) =>
      assert(SumValue(i, p).toString == s"SumValue($i, $p)")
    }
  }

  test("Value.equals is false if the class isn't right") {
    forAll(genValue, genValue) { (v1, v2) =>
      if (v1.getClass != v2.getClass) assert(v1 != v2)
      else if (v1 == v2) assert(v1.getClass == v2.getClass)
    }
  }

  test("VOption works") {
    forAll(genValue) { v =>
      VOption.some(v) match {
        case VOption(Some(v1)) => assert(v1 == v)
        case other => fail(s"expected Some($v) got $other")
      }
    }

    forAll(genValue) { v =>
      VOption.unapply(v) match {
        case None => ()
        case Some(None) =>
          assert(v == VOption.none)
        case Some(Some(v1)) =>
          assert(v == VOption.some(v1))
      }
    }

    assert(VOption.unapply(VOption.none) == Some(None))
  }

  test("VList works") {
    forAll(Gen.listOf(genValue)) { vs =>
      VList(vs) match {
        case VList(vs1) => assert(vs1 == vs)
        case other => fail(s"expected VList($vs) got $other")
      }
    }

    forAll(genValue) { v =>
      VList.unapply(v) match {
        case None => ()
        case Some(Nil) =>
          assert(v == VList.VNil)
        case Some(v1) =>
          assert(v == VList(v1))
      }
    }

    assert(VList.unapply(VList.VNil) == Some(Nil))
  }
} 
Example 9
Source File: ValueToDocTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalatest.prop.PropertyChecks.{forAll, PropertyCheckConfiguration }
import org.scalatest.FunSuite

import rankn.{NTypeGen, Type}
import TestUtils.typeEnvOf

class ValueToDocTest extends FunSuite {

  implicit val generatorDrivenConfig =
    PropertyCheckConfiguration(minSuccessful = 1000)

  test("never throw when converting to doc") {
    val vd = ValueToDoc({ _ => None})

    forAll(NTypeGen.genPredefType, GenValue.genValue) { (t, v) =>
      vd.toDoc(t)(v)
      succeed
    }
  }

  test("some hand written cases round trip") {
    val te = typeEnvOf(PackageName.parts("Test"), """

struct MyUnit
# wrappers are removed
struct MyWrapper(item)
struct MyPair(fst, snd)

enum MyEither: L(left), R(right)

enum MyNat: Z, S(prev: MyNat)
""")
    val conv = ValueToDoc(te.toDefinedType(_))

    def stringToType(t: String): Type = {
      val tr = TypeRef.parser.parse(t) match {
        case fastparse.all.Parsed.Success(tr, l) if l == t.length => tr
        case other => sys.error(s"could not parse: $t, $other")
      }

      TypeRefConverter[cats.Id](tr) { cons =>
        te.referencedPackages.toList.flatMap { pack =>
          val const = Type.Const.Defined(pack, TypeName(cons))
          te.toDefinedType(const).map(_ => const)
        }
        .headOption
        .getOrElse(Type.Const.predef(cons.asString))
      }
    }

    def law(tpe: String, v: Value, str: String) = {
      val t = stringToType(tpe)
      val toDoc = conv.toDoc(t)

      toDoc(v) match {
        case Right(doc) => assert(doc.render(80) == str)
        case Left(err) => fail(s"could not handle to Value: $tpe, $v, $err")
      }
    }

    law("Int", Value.VInt(42), "42")
    law("String", Value.Str("hello world"), "'hello world'")
    law("MyUnit", Value.UnitValue, "MyUnit")
    law("MyWrapper[MyUnit]", Value.UnitValue, "MyWrapper { item: MyUnit }")
    law("MyWrapper[MyWrapper[MyUnit]]", Value.UnitValue, "MyWrapper { item: MyWrapper { item: MyUnit } }")
    law("MyPair[MyUnit, MyUnit]", Value.ProductValue.fromList(List(Value.UnitValue, Value.UnitValue)),
        "MyPair { fst: MyUnit, snd: MyUnit }")
    law("MyEither[MyUnit, MyUnit]", Value.SumValue(0, Value.ProductValue.fromList(List(Value.UnitValue))), "L { left: MyUnit }")
    law("MyEither[MyUnit, MyUnit]", Value.SumValue(1, Value.ProductValue.fromList(List(Value.UnitValue))), "R { right: MyUnit }")
  }
} 
Example 10
Source File: TypeRefTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks.{ forAll, PropertyCheckConfiguration }
import org.bykn.bosatsu.rankn.Type

class TypeRefTest extends FunSuite {
  implicit val generatorDrivenConfig =
    //PropertyCheckConfiguration(minSuccessful = 500000)
    PropertyCheckConfiguration(minSuccessful = 5000)

  import Generators.{typeRefGen, shrinkTypeRef}

  test("Ordering is lawful") {
    forAll(typeRefGen, typeRefGen, typeRefGen) { (a, b, c) =>
      OrderingLaws.law(a, b, c)
    }
  }

  test("TypeRef -> Type -> TypeRef") {
    val pn = PackageName.parts("Test")

    forAll(typeRefGen) { tr =>
      val tpe = TypeRefConverter[cats.Id](tr) { c => Type.Const.Defined(pn, TypeName(c)) }
      val tr1 = TypeRef.fromTypes(Some(pn), tpe :: Nil)(tpe)
      assert(tr1 == tr.normalizeForAll)
    }
  }
} 
Example 11
Source File: Regressions.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalatest.FunSuite

class Regressions extends FunSuite {
  import TestUtils._

  test("test complex recursion case from #196") {
    evalFail(List("""
package Foo

struct Field(name: String, extract: a -> b)

def applyFields(fields, row):
  recur fields:
    (f, Some(s)):
      Field(_, fn) = f
      rec = applyFields(s, row)
      (fn(row), Some(rec))
    (f, None):
      Field(_, fn) = f
      (fn(row), None)

hlist = (Field("a", \x -> "a"), Some((Field("b", \x -> "b"), None)))
main = applyFields(hlist, 1)
"""), "Foo") { case PackageError.TypeErrorIn(_, _) => () }
  }
} 
Example 12
Source File: TypeRecursionCheckTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import cats.data.Validated
import org.bykn.bosatsu.rankn.TypeEnv
import org.scalatest.FunSuite

class TypeRecursionCheckTest extends FunSuite {

  def allowed(teStr: String) = {
    val te = TestUtils.typeEnvOf(PackageName.PredefName, teStr)
    VarianceFormula.solve(TypeEnv.empty, te.allDefinedTypes) match {
      case Left(errs) => fail(s"couldn't solve: $errs")
      case Right(teVar) =>
        assert(
          TypeRecursionCheck.checkLegitRecursion(TypeEnv.empty, teVar) ==
            Validated.valid(()))
    }
  }

  def disallowed(teStr: String) = {
    val te = TestUtils.typeEnvOf(PackageName.PredefName, teStr)
    VarianceFormula.solve(TypeEnv.empty, te.allDefinedTypes) match {
      case Left(errs) => fail(s"couldn't solve: $errs")
      case Right(teVar) =>
        assert(
          TypeRecursionCheck.checkLegitRecursion(TypeEnv.empty, teVar).isInvalid)
    }
  }

  test("linked list is allowed") {
    allowed("""#
enum Lst: E, N(head: a, tail: Lst[a])
""")
  }

  test("tree is allowed") {
    allowed("""#
enum Lst: E, N(head: a, tail: Lst[a])

struct Tree(root: a, children: Lst[Tree[a]])
""")
  }

  test("directory example is allowed") {
    allowed("""#
enum Lst: E, N(head: a, tail: Lst[a])

enum Path:
  Dir(name: String, children: Lst[Path])
  File(name: String, content: String)
""")
  }

  test("cont is allowed with Tree") {
    allowed("""#
struct Cont(fn: (a -> b) -> b)

struct Tree(root: a, children: Cont[Tree[a], b])
""")

    disallowed("""#
struct ICont(fn: (a -> a) -> a)

struct Tree(root: a, children: ICont[Tree[a]])
""")
  }

  test("y-combinator type is disallowed") {
    disallowed("""#
struct W(fn: W[a, b] -> a -> b)
""")
  }

  test("mutual recursion is (currently) completely unallowed") {
    disallowed("""#
struct Foo(bar: Bar)
struct Bar(foo: Foo)
""")
  }

  test("recursion with type constructors is disallowed") {
    disallowed("""#
struct Tree(root: a, children: f[Tree[a]])
""")
  }
} 
Example 13
Source File: FreeVarTest.scala    From bosatsu   with Apache License 2.0 5 votes vote down vote up
package org.bykn.bosatsu

import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks.{ forAll, PropertyCheckConfiguration }

import fastparse.all._

class FreeVarTest extends FunSuite {
  implicit val generatorDrivenConfig =
    PropertyCheckConfiguration(minSuccessful = 1000)
    //PropertyCheckConfiguration(minSuccessful = 300)
    //PropertyCheckConfiguration(minSuccessful = 5)

  def assertFreeVars(stmt: String, vars: List[String]) =
    Statement.parser.parse(stmt) match {
      case Parsed.Success(t, idx) =>
        assert(idx == stmt.length)

        val found = Statement.valuesOf(t).flatMap(_.freeVars).toList.sorted
        assert(found == vars.sorted.map(Identifier.Name(_)))
      case Parsed.Failure(exp, idx, extra) =>
        fail(s"failed to parse: $stmt: $exp at $idx with trace: ${extra.traced.trace}")
    }

  test("freeVar examples") {
    assertFreeVars("""x = y""", List("y"))
    assertFreeVars("""y = 1""", Nil)
    assertFreeVars("""external def foo -> Int""", Nil)
    assertFreeVars("""def foo(x): y""", List("y"))
    assertFreeVars("""def foo(x):
  y = x
  y""", Nil)
  }

  test("freeVars is a subset of allNames") {
    forAll(Generators.genStatement(3)) { stmt =>
      Statement.valuesOf(stmt :: Nil)
        .foreach { v =>
          assert(v.freeVars.subsetOf(v.allNames))
        }
    }
  }
} 
Example 14
Source File: SparkRecoverPartitionsCustomTest.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.unit

import com.adidas.analytics.util.SparkRecoverPartitionsCustom
import com.adidas.utils.SparkSessionWrapper
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers, PrivateMethodTester}

import scala.collection.JavaConverters._

class SparkRecoverPartitionsCustomTest extends FunSuite
  with SparkSessionWrapper
  with PrivateMethodTester
  with Matchers
  with BeforeAndAfterAll{

  test("test conversion of String Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue("theValue")

    result should be("'theValue'")
  }

  test("test conversion of Short Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Short.valueOf("2"))

    result should be("2")
  }

  test("test conversion of Integer Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Integer.valueOf("4"))

    result should be("4")
  }

  test("test conversion of null Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(null)
    }
  }

  test("test conversion of not supported Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(false)
    }
  }

  test("test HiveQL statements Generation") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(
      tableName="test",
      targetPartitions = Seq("country","district")
    )

    val rowsInput = Seq(
      Row(1, "portugal", "porto"),
      Row(2, "germany", "herzogenaurach"),
      Row(3, "portugal", "coimbra")
    )

    val inputSchema = StructType(
      List(
        StructField("number", IntegerType, nullable = true),
        StructField("country", StringType, nullable = true),
        StructField("district", StringType, nullable = true)
      )
    )

    val expectedStatements: Seq[String] = Seq(
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='porto')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='germany',district='herzogenaurach')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='coimbra')"
    )

    val testDataset: Dataset[Row] = spark.createDataset(rowsInput)(RowEncoder(inputSchema))

    val createParameterValue = PrivateMethod[Dataset[String]]('generateAddPartitionStatements)

    val producedStatements: Seq[String] = (customSparkRecoverPartitions invokePrivate createParameterValue(testDataset))
      .collectAsList()
      .asScala

    expectedStatements.sorted.toSet should equal(producedStatements.sorted.toSet)
  }

  override def afterAll(): Unit = {
    spark.stop()
  }

} 
Example 15
Source File: RecoverPartitionsCustomTest.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.unit

import com.adidas.analytics.util.RecoverPartitionsCustom
import com.adidas.utils.SparkSessionWrapper
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers, PrivateMethodTester}

import scala.collection.JavaConverters._

class RecoverPartitionsCustomTest extends FunSuite
  with SparkSessionWrapper
  with PrivateMethodTester
  with Matchers
  with BeforeAndAfterAll{

  test("test conversion of String Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue("theValue")

    result should be("'theValue'")
  }

  test("test conversion of Short Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Short.valueOf("2"))

    result should be("2")
  }

  test("test conversion of Integer Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Integer.valueOf("4"))

    result should be("4")
  }

  test("test conversion of null Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(null)
    }
  }

  test("test conversion of not supported Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(false)
    }
  }

  test("test HiveQL statements Generation") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(
      tableName="test",
      targetPartitions = Seq("country","district")
    )

    val rowsInput = Seq(
      Row(1, "portugal", "porto"),
      Row(2, "germany", "herzogenaurach"),
      Row(3, "portugal", "coimbra")
    )

    val inputSchema = StructType(
      List(
        StructField("number", IntegerType, nullable = true),
        StructField("country", StringType, nullable = true),
        StructField("district", StringType, nullable = true)
      )
    )

    val expectedStatements: Seq[String] = Seq(
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='porto')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='germany',district='herzogenaurach')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='coimbra')"
    )

    val testDataset: Dataset[Row] = spark.createDataset(rowsInput)(RowEncoder(inputSchema))

    val createParameterValue = PrivateMethod[Dataset[String]]('generateAddPartitionStatements)

    val producedStatements: Seq[String] = (customSparkRecoverPartitions invokePrivate createParameterValue(testDataset))
      .collectAsList()
      .asScala

    expectedStatements.sorted.toSet should equal(producedStatements.sorted.toSet)
  }

  override def afterAll(): Unit = {
    spark.stop()
  }

} 
Example 16
Source File: InstanceStoppingSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.{ DockerComposePluginLocal, RunningInstanceInfo }
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.{ BeforeAndAfter, FunSuite, OneInstancePerTest }

class InstanceStoppingSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest with MockHelpers {
  test("Validate the proper stopping of a single instance when only one instance is running and no specific instances are passed in as arguments") {
    val instanceId = "instanceId"
    val composePath = "path"
    val serviceName = "service"
    val composeMock = spy(new DockerComposePluginLocal)
    val instance = RunningInstanceInfo(instanceId, serviceName, composePath, List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instance)))

    composeMock.stopRunningInstances(null, Seq.empty)

    //Validate that the instance was stopped and cleaned up
    verify(composeMock, times(1)).dockerComposeStopInstance(instanceId, composePath)
    verify(composeMock, times(1)).dockerComposeRemoveContainers(instanceId, composePath)
  }

  test("Validate the proper stopping of a multiple instances when no specific instances are passed in as arguments") {
    val instanceId = "instanceId"
    val composePath = "path"
    val serviceName = "service"
    val composeMock = spy(new DockerComposePluginLocal)
    val instance = RunningInstanceInfo(instanceId, serviceName, composePath, List.empty)
    val instance2 = RunningInstanceInfo("instanceId2", serviceName, composePath, List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instance, instance2)))

    composeMock.stopRunningInstances(null, Seq.empty)

    //Validate that the instance was stopped and cleaned up
    verify(composeMock, times(2)).dockerComposeStopInstance(anyString, anyString)
    verify(composeMock, times(2)).dockerComposeRemoveContainers(anyString, anyString)
  }

  test("Validate the proper stopping of a single instance when multiple instances are running") {
    val instanceIdStop = "instanceIdStop"
    val instanceIdKeep = "instanceIdKeep"
    val serviceName = "service"
    val composePath = "path"
    val composeMock = spy(new DockerComposePluginLocal)
    val instanceStop = RunningInstanceInfo(instanceIdStop, serviceName, composePath, List.empty)
    val instanceKeep = RunningInstanceInfo(instanceIdKeep, serviceName, composePath, List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instanceStop, instanceKeep)))

    composeMock.stopRunningInstances(null, Seq(instanceIdStop))

    //Validate that only once instance was Stopped and Removed
    verify(composeMock, times(1)).setAttribute(any, any)(any[sbt.State])
    verify(composeMock, times(1)).dockerComposeStopInstance(anyString, anyString)
    verify(composeMock, times(1)).dockerComposeRemoveContainers(anyString, anyString)
  }

  test("Validate that only instances from the current SBT project are stopped when no arguments are supplied to DockerComposeStop") {
    val composeMock = spy(new DockerComposePluginLocal)
    val serviceName = "matchingservice"
    val instance1 = RunningInstanceInfo("instanceName1", serviceName, "path", List.empty)
    val instance2 = RunningInstanceInfo("instanceName2", serviceName, "path", List.empty)
    val instance3 = RunningInstanceInfo("instanceName3", "nonSbtProjectService", "path", List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instance1, instance2, instance3)))

    composeMock.stopRunningInstances(null, Seq.empty)

    //Validate that only once instance was Stopped and Removed
    verify(composeMock, times(1)).setAttribute(any, any)(any[sbt.State])
    verify(composeMock, times(2)).dockerComposeStopInstance(anyString, anyString)
    verify(composeMock, times(2)).dockerComposeRemoveContainers(anyString, anyString)
  }

  test("Validate that instances from any SBT project can be stopped when explicitly passed to DockerComposeStop") {
    val composeMock = spy(new DockerComposePluginLocal)
    val serviceName = "matchingservice"
    val instance1 = RunningInstanceInfo("instanceName1", serviceName, "path", List.empty)
    val instance2 = RunningInstanceInfo("instanceName2", serviceName, "path", List.empty)
    val instance3 = RunningInstanceInfo("instanceName3", "nonSbtProjectService", "path", List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instance1, instance2, instance3)))

    composeMock.stopRunningInstances(null, Seq("instanceName3"))

    //Validate that only once instance was Stopped and Removed
    verify(composeMock, times(1)).setAttribute(any, any)(any[sbt.State])
    verify(composeMock, times(1)).dockerComposeStopInstance(anyString, anyString)
    verify(composeMock, times(1)).dockerComposeRemoveContainers(anyString, anyString)
  }
} 
Example 17
Source File: VersionSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.Version
import org.scalatest.{ BeforeAndAfter, FunSuite, OneInstancePerTest }

class VersionSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest with MockHelpers {
  test("Validate version information is parsed correctly") {
    assert(Version.parseVersion("1.0.0") == Version(1, 0, 0))
    assert(Version.parseVersion("11.1.1") == Version(11, 1, 1))
    assert(Version.parseVersion("1.0.0-SNAPSHOT") == Version(1, 0, 0))
    assert(Version.parseVersion("1.2.3") == Version(1, 2, 3))
    assert(Version.parseVersion("1.2.3-rc3") == Version(1, 2, 3))
    assert(Version.parseVersion("1.2.3rc3") == Version(1, 2, 3))
  }

  test("Validate invalid version information reports an exception") {
    intercept[RuntimeException] {
      Version.parseVersion("")
    }

    intercept[RuntimeException] {
      Version.parseVersion("1.0")
    }

    intercept[RuntimeException] {
      Version.parseVersion("-1.0")
    }

    intercept[RuntimeException] {
      Version.parseVersion("version")
    }
  }
} 
Example 18
Source File: ComposeInstancesSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import sbt._
import com.tapad.docker.{ DockerComposePluginLocal, RunningInstanceInfo, Version }
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.{ BeforeAndAfter, FunSuite, OneInstancePerTest }

class ComposeInstancesSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest with MockHelpers {
  test("Validate that no instances are printed when none are running") {
    val composeMock = spy(new DockerComposePluginLocal)
    val serviceName = "matchingservice"

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, None)

    composeMock.printDockerComposeInstances(null, null)

    verify(composeMock, times(0)).printMappedPortInformation(any[State], any[RunningInstanceInfo], any[Version])
  }

  test("Validate that multiple instances across sbt projects are printed when they are running") {
    val composeMock = spy(new DockerComposePluginLocal)
    val serviceName = "matchingservice"
    val instance1 = RunningInstanceInfo("instanceName1", serviceName, "path", List.empty)
    val instance2 = RunningInstanceInfo("instanceName2", serviceName, "path", List.empty)
    val instance3 = RunningInstanceInfo("instanceName3", "nonSbtProjectService", "path", List.empty)

    mockDockerCommandCalls(composeMock)
    mockSystemSettings(composeMock, serviceName, Some(List(instance1, instance2, instance3)))

    composeMock.printDockerComposeInstances(null, null)

    verify(composeMock, times(3)).printMappedPortInformation(any[State], any[RunningInstanceInfo], any[Version])
  }
} 
Example 19
Source File: TagProcessingSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.DockerComposePlugin._
import org.scalatest.{ BeforeAndAfter, FunSuite, OneInstancePerTest }

class TagProcessingSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest {

  val imageNoTag = "testImage"
  val imageLatestTag = "testImage:latest"
  val imageWithTag = "testImage:tag"
  val imagePrivateRegistryNoTag = "registry/testImage"
  val imagePrivateRegistryWithLatest = "registry/testImage:latest"
  val imagePrivateRegistryWithTag = "registry/testImage:tag"
  val imagePrivateRegistryWithOrgNoTag = "registry/org/testImage"
  val imagePrivateRegistryWithOrgWithTag = "registry/org/testImage:tag"
  val imageCustomTag = "testImage<localbuild>"
  val imageTagAndCustomTag = "testImage:latest<localbuild>"

  // Boundary
  val badImageWithColon = "testImage:"
  val badImageWithMultipleColon = "testImage:fooImage:latest"
  val badImageWithOnlyColon = ":::::::"

  test("Validate various image tag formats are properly replaced") {
    val replacementTag = "replaceTag"
    assert(replaceDefinedVersionTag(imageNoTag, replacementTag) == imageNoTag)

    assert(replaceDefinedVersionTag(imageLatestTag, replacementTag) == imageLatestTag)

    assert(replaceDefinedVersionTag(imageWithTag, replacementTag) == s"testImage:$replacementTag")

    assert(replaceDefinedVersionTag(imagePrivateRegistryNoTag, replacementTag) == imagePrivateRegistryNoTag)

    assert(replaceDefinedVersionTag(imagePrivateRegistryWithLatest, replacementTag) == imagePrivateRegistryWithLatest)

    assert(replaceDefinedVersionTag(imagePrivateRegistryWithTag, replacementTag) == s"registry/testImage:$replacementTag")
  }

  test("Validate image tag retrieval from various formats") {
    assert(getTagFromImage(imageNoTag) == "latest")

    assert(getTagFromImage(imageLatestTag) == "latest")

    assert(getTagFromImage(imageWithTag) == "tag")

    assert(getTagFromImage(imagePrivateRegistryNoTag) == "latest")

    assert(getTagFromImage(imagePrivateRegistryWithLatest) == "latest")

    assert(getTagFromImage(imagePrivateRegistryWithTag) == "tag")
  }

  test("Validate custom tags get removed") {
    assert(processImageTag(null, null, imageCustomTag) == "testImage")
    assert(processImageTag(null, null, imageTagAndCustomTag) == "testImage:latest")
  }

  test("Validate the removal of a tag from various image formats") {
    assert(getImageNameOnly(imageNoTag) == imageNoTag)
    assert(getImageNameOnly(imageLatestTag) == "testImage")
    assert(getImageNameOnly(imagePrivateRegistryNoTag) == "testImage")
    assert(getImageNameOnly(imagePrivateRegistryWithLatest) == "testImage")
    assert(getImageNameOnly(imagePrivateRegistryWithTag) == "testImage")
    assert(getImageNameOnly(imagePrivateRegistryWithOrgWithTag) == "testImage")
    assert(getImageNameOnly(imagePrivateRegistryWithOrgWithTag, removeOrganization = false) == "org/testImage")
  }

  test("Validate getting image name with no tag") {
    assert(getImageNoTag("") == "")
    assert(getImageNoTag(imageNoTag) == imageNoTag)
    assert(getImageNoTag(imageLatestTag) == imageNoTag)
    assert(getImageNoTag(imagePrivateRegistryNoTag) == imagePrivateRegistryNoTag)
    assert(getImageNoTag(imagePrivateRegistryWithLatest) == imagePrivateRegistryNoTag)
    assert(getImageNoTag(imagePrivateRegistryWithTag) == imagePrivateRegistryNoTag)
    assert(getImageNoTag(imagePrivateRegistryWithOrgWithTag) == imagePrivateRegistryWithOrgNoTag)
    assert(getImageNoTag(badImageWithColon) == imageNoTag)
    assert(getImageNoTag(badImageWithMultipleColon) == badImageWithMultipleColon.split(":").dropRight(1).mkString(":"))
    assert(getImageNoTag(badImageWithOnlyColon) == badImageWithOnlyColon.dropRight(1))
  }
} 
Example 20
Source File: ImagePullingSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.DockerComposePlugin._
import com.tapad.docker.{ ServiceInfo, DockerComposePluginLocal }
import org.mockito.Mockito._
import org.scalatest.{ OneInstancePerTest, BeforeAndAfter, FunSuite }

class ImagePullingSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest {
  test("Validate that when the 'skipPull' argument is passed in no imaged are pull from the Docker registry") {
    val instanceMock = new DockerComposePluginLocal with MockOutput

    instanceMock.pullDockerImages(Seq(skipPullArg), null, suppressColor = false)
    assert(instanceMock.messages.exists(_.contains("Skipping Docker Repository Pull for all images.")))
  }

  test("Validate that images with a 'build' source not pulled from the Docker registry") {
    val instanceMock = new DockerComposePluginLocal with MockOutput
    val imageName = "buildImageName"
    val serviceInfo = ServiceInfo("serviceName", imageName, buildImageSource, null)

    instanceMock.pullDockerImages(null, List(serviceInfo), suppressColor = false)
    assert(instanceMock.messages.contains(s"Skipping Pull of image: $imageName"))
  }

  test("Validate that images with a 'defined' source are pulled from the Docker registry") {
    val instanceMock = spy(new DockerComposePluginLocal)
    val imageName = "buildImageName"
    val serviceInfo = ServiceInfo("serviceName", imageName, definedImageSource, null)

    doNothing().when(instanceMock).dockerPull(imageName)

    instanceMock.pullDockerImages(null, List(serviceInfo), suppressColor = false)

    verify(instanceMock, times(1)).dockerPull(imageName)
  }

  test("Validate that images with a 'cache' source are not pulled from the Docker registry") {
    val instanceMock = new DockerComposePluginLocal with MockOutput
    val imageName = "cacheImageName"
    val serviceInfo = ServiceInfo("serviceName", imageName, cachedImageSource, null)

    instanceMock.pullDockerImages(null, List(serviceInfo), suppressColor = false)
    assert(instanceMock.messages.contains(s"Skipping Pull of image: $imageName"))
  }
} 
Example 21
Source File: InstancePersistenceSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.{ RunningInstanceInfo, DockerComposePluginLocal }
import com.tapad.docker.DockerComposeKeys._
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ BeforeAndAfter, FunSuite, OneInstancePerTest }

class InstancePersistenceSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest with MockitoSugar {

  test("Validate that only running instances from this sbt session are returned") {
    val instanceMock = spy(new DockerComposePluginLocal)

    val runningInstanceMatch = RunningInstanceInfo("instanceNameMatch", "matchingservice", "composePath", List.empty)
    val runningInstanceNoMatch = RunningInstanceInfo("instanceNameNoMatch", "nomatchingservice", "composePath", List.empty)

    doReturn("matchingservice").when(instanceMock).getSetting(composeServiceName)(null)
    doReturn(Option(List(runningInstanceMatch, runningInstanceNoMatch))).when(instanceMock).getAttribute(runningInstances)(null)

    val instanceIds = instanceMock.getServiceRunningInstanceIds(null)

    assert(instanceIds.size == 1)
    assert(instanceIds.contains("instanceNameMatch"))
  }

  test("Validate that only matching instance ids are returned") {
    val instanceMock = spy(new DockerComposePluginLocal)

    val runningInstanceMatch = RunningInstanceInfo("instanceNameMatch", "matchingservice", "composePath", List.empty)
    val runningInstanceNoMatch = RunningInstanceInfo("instanceNameNoMatch", "nomatchingservice", "composePath", List.empty)

    doReturn("matchingservice").when(instanceMock).getSetting(composeServiceName)(null)
    doReturn(Option(List(runningInstanceMatch, runningInstanceNoMatch))).when(instanceMock).getAttribute(runningInstances)(null)

    val instance = instanceMock.getMatchingRunningInstance(null, Seq("instanceNameMatch"))

    assert(instance.isDefined)
    assert(instance.get.instanceName == "instanceNameMatch")
  }
} 
Example 22
Source File: ImageBuildingSpec.scala    From sbt-docker-compose   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
import com.tapad.docker.DockerComposeKeys._
import com.tapad.docker.DockerComposePlugin._
import com.tapad.docker.DockerComposePluginLocal
import org.mockito.Mockito._
import org.scalatest.{ OneInstancePerTest, BeforeAndAfter, FunSuite }

class ImageBuildingSpec extends FunSuite with BeforeAndAfter with OneInstancePerTest {
  test("Validate that a Docker image is built when 'skipBuild' and 'noBuild' are not set") {
    val composeMock = spy(new DockerComposePluginLocal)

    doReturn(false).when(composeMock).getSetting(suppressColorFormatting)(null)
    doReturn(false).when(composeMock).getSetting(composeNoBuild)(null)
    doNothing().when(composeMock).buildDockerImageTask(null)

    composeMock.buildDockerImage(null, null)

    verify(composeMock, times(1)).buildDockerImageTask(null)
  }

  test("Validate that a Docker image is not built when 'skipBuild' is passed as an argument") {
    val composeMock = spy(new DockerComposePluginLocal)

    doReturn(false).when(composeMock).getSetting(suppressColorFormatting)(null)
    doReturn(false).when(composeMock).getSetting(composeNoBuild)(null)
    doNothing().when(composeMock).buildDockerImageTask(null)

    composeMock.buildDockerImage(null, Seq(skipBuildArg))

    verify(composeMock, times(0)).buildDockerImageTask(null)
  }

  test("Validate that a Docker image is not built when the 'noBuild' setting is true") {
    val composeMock = spy(new DockerComposePluginLocal)

    doReturn(false).when(composeMock).getSetting(suppressColorFormatting)(null)
    doReturn(true).when(composeMock).getSetting(composeNoBuild)(null)
    doNothing().when(composeMock).buildDockerImageTask(null)

    composeMock.buildDockerImage(null, null)

    verify(composeMock, times(0)).buildDockerImageTask(null)
  }
} 
Example 23
Source File: DriverTest.scala    From practical-data-science-with-hadoop-and-spark   with Apache License 2.0 5 votes vote down vote up
package dsbook.sentimentanalysis

import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite

import org.apache.spark.SparkContext

class DriverTest extends FunSuite with LocalSparkContext {

  test("test completely accurate") {
    val positiveDocs = List("I had a great time!"
                           ,"I loved the movie.  I thought it was the best I've ever seen!"
                           , "My favorite part of the movie was the first part.  It was the best."
                           )
    val negativeDocs = List("I had a terrible time!"
                           ,"I hated the movie.  I thought it was the worst I've ever seen!"
                           , "My least favorite part of the movie was the first part.  It was the worst."
                           )
    val positiveRDD = sc.makeRDD(positiveDocs)
    val negativeRDD = sc.makeRDD(negativeDocs)
    val results = Driver.evaluateSentiment(positiveRDD, negativeRDD)
    val accuracy = results.get("ACCURACY").get
    //6 / 6 correct
    assert(Math.abs(accuracy - 100*(6.0/6)) < 1e-6)
  }
  test("test one miss in each category") {
    val positiveDocs = List("I had a great time!"
      ,"I hated the movie.  I thought it was the worst I've ever seen!"
                           , "My favorite part of the movie was the first part.  It was the best."
                           )
    val negativeDocs = List("I had a terrible time!"
      ,"I loved the movie.  I thought it was the best I've ever seen!"
                           , "My least favorite part of the movie was the first part.  It was the worst."
                           )
    val positiveRDD = sc.makeRDD(positiveDocs)
    val negativeRDD = sc.makeRDD(negativeDocs)
    val results = Driver.evaluateSentiment(positiveRDD, negativeRDD)
    val accuracy = results.get("ACCURACY").get
    //4 correct, 2 wrong
    assert(Math.abs(accuracy - 100*(4.0/6)) < 1e-6)
  }
} 
Example 24
Source File: RadixTreeLawsCheck.scala    From radixtree   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.radixtree

import algebra.instances.all._
import org.scalacheck.Arbitrary
import org.scalatest.FunSuite
import org.typelevel.discipline.scalatest.Discipline
import Instances._
import algebra.laws.RingLaws
import cats.kernel.laws.discipline.MonoidTests

class RadixTreeLawsCheck extends FunSuite with Discipline {

  implicit def arbRadixTree[K: Arbitrary : RadixTree.Key, V: Arbitrary]: Arbitrary[RadixTree[K, V]] = Arbitrary {
    for {
      kvs ← Arbitrary.arbitrary[List[(K, V)]]
    } yield
    RadixTree(kvs: _*)
  }

  checkAll("MonoidTests[RadixTree[String, String]].monoid", MonoidTests[RadixTree[String, String]].monoid)
  checkAll("MonoidTests[RadixTree[Array[Byte], Array[Byte]]].monoid", MonoidTests[RadixTree[Array[Byte], Array[Byte]]].monoid)
  checkAll("RingLaws[RadixTree[String, Byte]].additiveMonoid", RingLaws[RadixTree[String, Short]].additiveMonoid)
  checkAll("RingLaws[RadixTree[Array[Byte], Int]].additiveMonoid", RingLaws[RadixTree[String, Int]].additiveMonoid)
} 
Example 25
Source File: MetricsStatsReceiverTest.scala    From finagle-metrics   with MIT License 5 votes vote down vote up
package com.twitter.finagle.metrics

import com.twitter.finagle.metrics.MetricsStatsReceiver._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite

@RunWith(classOf[JUnitRunner])
class MetricsStatsReceiverTest extends FunSuite {

  private[this] val receiver = new MetricsStatsReceiver()

  private[this] def readGauge(name: String): Option[Number] =
    Option(metrics.getGauges.get(name)) match {
      case Some(gauge) => Some(gauge.getValue.asInstanceOf[Float])
      case _ => None
    }

  private[this] def readCounter(name: String): Option[Number] =
    Option(metrics.getMeters.get(name)) match {
      case Some(counter) => Some(counter.getCount)
      case _ => None
    }

  private[this] def readStat(name: String): Option[Number] =
    Option(metrics.getHistograms.get(name)) match {
      case Some(stat) => Some(stat.getSnapshot.getValues.toSeq.sum)
      case _ => None
    }

  test("MetricsStatsReceiver should store and read gauge into the Codahale Metrics library") {
    val x = 1.5f
    receiver.addGauge("my_gauge")(x)

    assert(readGauge("my_gauge") === Some(x))
  }

  test("MetricsStatsReceiver should always assume the latest value of an already created gauge") {
    val gaugeName = "my_gauge2"
    val expectedValue = 8.8f

    receiver.addGauge(gaugeName)(2.2f)
    receiver.addGauge(gaugeName)(9.9f)
    receiver.addGauge(gaugeName)(expectedValue)

    assert(readGauge(gaugeName) === Some(expectedValue))
  }

  test("MetricsStatsReceiver should store and remove gauge into the Codahale Metrics Library") {
    val gaugeName = "temp-gauge"
    val expectedValue = 2.8f

    val tempGauge = receiver.addGauge(gaugeName)(expectedValue)
    assert(readGauge(gaugeName) === Some(expectedValue))

    tempGauge.remove()

    assert(readGauge(gaugeName) === None)
  }

  test("MetricsStatsReceiver should store and read stat into the Codahale Metrics library") {
    val x = 1
    val y = 3
    val z = 5

    val s = receiver.stat("my_stat")
    s.add(x)
    s.add(y)
    s.add(z)

    assert(readStat("my_stat") === Some(x + y + z))
  }

  test("MetricsStatsReceiver should store and read counter into the Codahale Metrics library") {
    val x = 2
    val y = 5
    val z = 8

    val c = receiver.counter("my_counter")
    c.incr(x)
    c.incr(y)
    c.incr(z)

    assert(readCounter("my_counter") === Some(x + y + z))
  }

} 
Example 26
Source File: ReportPlugin.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.plugin

import java.io

import com.testerhome.appcrawler.{Report, URIElement}
import com.testerhome.appcrawler._
import org.scalatest.FunSuite
import org.scalatest.tools.Runner
import sun.misc.{Signal, SignalHandler}

import scala.collection.mutable.ListBuffer
import scala.reflect.io.File


class ReportPlugin extends Plugin with Report {
  var lastSize=0
  override def start(): Unit ={
    reportPath=new java.io.File(getCrawler().conf.resultDir).getCanonicalPath
    log.info(s"reportPath=${reportPath}")
    val tmpDir=new io.File(s"${reportPath}/tmp/")
    if(tmpDir.exists()==false){
      log.info(s"create ${reportPath}/tmp/ directory")
      tmpDir.mkdir()
    }
  }

  override def stop(): Unit ={
    generateReport()
  }

  override def afterElementAction(element: URIElement): Unit ={
    val count=getCrawler().store.clickedElementsList.length
    log.info(s"clickedElementsList size = ${count}")
    val curSize=getCrawler().store.clickedElementsList.size
    if(curSize-lastSize > curSize/10+20 ){
      log.info(s"${curSize}-${lastSize} > ${curSize}/10+10  ")
      log.info("generate test report ")
      generateReport()
    }
  }

  def generateReport(): Unit ={
    Report.saveTestCase(getCrawler().store, getCrawler().conf.resultDir)
    Report.store=getCrawler().store
    Report.runTestCase()

    lastSize=getCrawler().store.clickedElementsList.size
  }


} 
Example 27
Source File: AutomationSuite.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler

import org.scalatest
import org.scalatest.{BeforeAndAfterAllConfigMap, ConfigMap, FunSuite, Matchers}


class AutomationSuite extends FunSuite with Matchers with BeforeAndAfterAllConfigMap with CommonLog {
  var crawler: Crawler = _

  override def beforeAll(configMap: ConfigMap): Unit = {
    log.info("beforeAll")
    crawler = configMap.get("crawler").get.asInstanceOf[Crawler]
  }

  test("run steps") {
    log.info("testcase start")
    val conf = crawler.conf
    val driver = crawler.driver

    val cp = new scalatest.Checkpoints.Checkpoint

    conf.testcase.steps.foreach(step => {


      if(step.xpath!=null && step.action!=null){
        step.when=When(step.xpath, step.action)
      }
      if(step.when!=null) {
        val when = step.when
        val xpath = when.xpath
        val action = when.action

        driver.getListFromXPath(xpath).headOption match {
          case Some(v) => {
            val ele = URIElement(v, "Steps")
            crawler.doElementAction(ele, action)
          }
          case None => {
            log.info("not found")
            //用于生成steps的用例
            val ele = URIElement("Steps", "", "", "NOT_FOUND", xpath)
            crawler.doElementAction(ele, "")
          }
        }
      }


      if(step.then!=null) {
        step.then.foreach(existAssert => {
          log.debug(existAssert)
          cp {
            withClue(s"${existAssert} 不存在\n") {
              driver.getListFromXPath(existAssert).size should be > 0
            }
          }

        })
      }
    })


    cp.reportAll()
    log.info("finish run steps")
  }
} 
Example 28
Source File: TestDataRecord.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.testerhome.appcrawler.CommonLog
import com.testerhome.appcrawler.DataRecord
import org.scalatest.FunSuite


class TestDataRecord extends FunSuite with CommonLog{
  test("diff int"){
    val stringDiff=new DataRecord()
    stringDiff.append(22)
    Thread.sleep(1000)
    stringDiff.append(33333)
    log.info(stringDiff.isDiff())
    log.info(stringDiff.intervalMS())
  }

  test("test interval"){
    val diff=new DataRecord
    assert(0==diff.intervalMS(), diff)
    diff.append("0")
    Thread.sleep(500)
    diff.append("500")
    assert(diff.intervalMS()>=500, diff)
    Thread.sleep(2000)
    diff.append("2000")
    assert(diff.intervalMS()>=2000, diff)
    assert(diff.intervalMS()<=2200, diff)



  }

  test("diff first"){
    val stringDiff=new DataRecord
    assert(false==stringDiff.isDiff, stringDiff)
    stringDiff.append("xxxx")
    assert(false==stringDiff.isDiff, stringDiff)
    stringDiff.append("3333")
    assert(true==stringDiff.isDiff, stringDiff)
    stringDiff.append("3333")
    assert(false==stringDiff.isDiff, stringDiff)



  }

} 
Example 29
Source File: TestGetClassFile.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.testerhome.appcrawler.plugin.FlowDiff
import com.testerhome.appcrawler.{DiffSuite, Report}
import org.apache.commons.io.FileUtils
import org.scalatest.Checkpoints.Checkpoint
import org.scalatest.{FunSuite, Matchers}


class TestGetClassFile extends FunSuite with Matchers{



  test("test checkpoints"){
    markup {
      """
        |dddddddd
      """.stripMargin
    }
    markup("xxxx")
    val cp = new Checkpoint()
    val (x, y) = (1, 2)
    cp { x should be < 0 }
    cp { y should be > 9 }
    cp.reportAll()
  }

  test("test markup"){
    markup {
      """
        |dddddddd
      """.stripMargin
    }
    markup("xxxx")

  }

  test("get class file"){
    val location=classOf[DiffSuite].getProtectionDomain.getCodeSource.getLocation
    println(location)
    val f=getClass.getResource("/com/xueqiu/qa/appcrawler/ut/TestDiffReport.class").getFile
    println(f)
    FileUtils.copyFile(new java.io.File(f), new java.io.File("/tmp/1.class"))



    println(getClass.getClassLoader.getResources("com/xueqiu/qa/appcrawler/ut/TestDiffReport.class"))
  }
} 
Example 30
Source File: TestStringTemplate.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.testerhome.appcrawler.CommonLog
import org.scalatest.FunSuite


class TestStringTemplate extends FunSuite with CommonLog{

  def genNumber(): String ={
    1 to 5 map (_.toString) mkString ("\n"+" "*4)
  }
  test("string template"){
    val s=
      s"""
        |class A extends B {
        |  test("ddddd"){
        |    ${genNumber()}
        |  }
        |}
      """.stripMargin
    log.info(s)
  }

  test("string template from file"){
    //todo:
  }

} 
Example 31
Source File: DemoCrawlerSuite.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import org.scalatest.FunSuite


class DemoCrawlerSuite extends FunSuite{
  var name="自动遍历"
  override def suiteName=name
  1 to 10 foreach(i=>{
    test(s"xxx ${i}"){
      markup("<img src='/Users/seveniruby/projects/LBSRefresh/iOS_20160811165931/141_雪球-港股聚宝盆_老熊老雄.jpg'/>")
      assert(1==i)
    }
  })

  1 to 10 foreach(i=>{
    test(s"xxx ignore ${i}"){
      markup("<img src='/Users/seveniruby/projects/LBSRefresh/iOS_20160811165931/141_雪球-港股聚宝盆_老熊老雄.jpg'/>")
      cancel("未遍历")
    }
  })

  1 to 10 foreach(i=>{
    test(s"xxx ignore ${i}"){
      markup("<img src='/Users/seveniruby/projects/LBSRefresh/iOS_20160811165931/141_雪球-港股聚宝盆_老熊老雄.jpg'/>")
    }
  })


} 
Example 32
Source File: TestReportPlugin.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.testerhome.appcrawler.plugin.ReportPlugin
import com.testerhome.appcrawler.{CommonLog, URIElement}
import com.testerhome.appcrawler._
import com.testerhome.appcrawler.plugin.ReportPlugin
import org.scalatest.FunSuite
import org.scalatest.tools.Runner


class TestReportPlugin extends FunSuite with CommonLog{
  test("gen suite"){
    val report=new ReportPlugin()
    val crawler=new Crawler()
    report.setCrawer(crawler)

    val element_1=URIElement("a", "b", "c", "d", "e")
    val info_1=new ElementInfo()
    info_1.element=element_1
    info_1.action=ElementStatus.Skipped


    val element_2=URIElement("aa", "bb", "cc", "dd", "ee")
    val info_2=new ElementInfo()
    info_2.element=element_2
    info_2.action=ElementStatus.Clicked

    val elementsStore=scala.collection.mutable.Map(
      element_1.toString->info_1,
      element_2.toString->info_2
    )
    val store=new URIElementStore
    store.elementStore ++= elementsStore
    report.saveTestCase(store, "/tmp/")

  }

  test("run"){

    val report=new ReportPlugin()
    val crawler=new Crawler()
    report.setCrawer(crawler)

    //Runner.run(Array("-R", "target", "-w", "com.testerhome.appcrawler.report", "-o", "-u", "target/test-reports", "-h", "target/test-reports"))
    Runner.run(Array(
      "-R", "/Users/seveniruby/projects/LBSRefresh/target",
      "-w", "com.testerhome.appcrawler",
      "-o", "-u", "target/test-reports", "-h", "target/test-reports"))

  }

} 
Example 33
Source File: TestGA.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.brsanthu.googleanalytics.{GoogleAnalytics, PageViewHit}
import org.apache.log4j.{BasicConfigurator, Level, Logger}
import org.scalatest.FunSuite


class TestGA extends FunSuite{
  test("google analyse"){
    println("ga start")

    BasicConfigurator.configure()
    Logger.getRootLogger().setLevel(Level.WARN)
    val ga = new GoogleAnalytics("UA-74406102-1")
    1 to 10 foreach(x=>{
      ga.postAsync(new PageViewHit(s"http://appcrawler.io/demo${x}", "test"))
    })
    Thread.sleep(10000)

    1 to 10 foreach(x=>{
      ga.postAsync(new PageViewHit(s"http://appcrawler.io/dem1${x}", "test"))
    })

    Thread.sleep(10000)
    1 to 10 foreach(x=>{
      ga.postAsync(new PageViewHit(s"http://appcrawler.io/dem2${x}", "test"))
    })
    //ga.post(new PageViewHit("http://appcrawler.io/test2", "test"))
    println("ga end")

  }

} 
Example 34
Source File: TestTreeNode.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.ut

import com.testerhome.appcrawler.TreeNode
import org.scalatest.FunSuite

import scala.collection.mutable.ListBuffer


class TestTreeNode extends FunSuite{
  test("generate tree"){
    val root=TreeNode("root")
    root.appendNode(root, TreeNode("1")).appendNode(root, TreeNode("11")).appendNode(root, TreeNode("111"))
    root.appendNode(root, TreeNode("2")).appendNode(root, TreeNode("21"))
    root.appendNode(root, TreeNode("3"))
    root.toXml(root)

  }

  test("generate tree by list"){
    val list=ListBuffer(1, 2, 3, 4, 1, 5, 6, 5, 7)
    TreeNode(0).generateFreeMind(list, "1.mm")
  }


  test("generate tree by list string"){
    val list=ListBuffer("1", "2", "3", "4", "1", "5", "66\"66", "5", "7")
    TreeNode("demo").generateFreeMind(list, "2.mm")
  }

  test("append node single"){
    val root=TreeNode(0)
    var current1=root.appendNode(root, TreeNode(1))
    println(current1)
    var current2=current1.appendNode(root, TreeNode(2))
    println(root)
    println(current1)
    println(current2)
  }

} 
Example 35
Source File: TestWebDriverAgent.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.it

import java.net.URL

import com.testerhome.appcrawler.AppiumSuite
import org.openqa.selenium.Capabilities
import org.openqa.selenium.remote.{DesiredCapabilities, RemoteWebDriver}
import org.scalatest.FunSuite
import scala.collection.JavaConversions._


class TestWebDriverAgent extends AppiumSuite{
  test("use remote webdriver"){
    val capability=new DesiredCapabilities()
    capability.setCapability("app", "/Users/seveniruby/projects/snowball-ios/DerivedData/Snowball/Build/Products/Debug-iphonesimulator/Snowball.app")
    capability.setCapability("bundleId", "com.xueqiu")
    capability.setCapability("fullReset", "true")
    capability.setCapability("noReset", "true")
    capability.setCapability("udid", "4F05E384-FE32-43DE-8539-4DC3E2EBC117")
    capability.setCapability("automationName", "XCUITest")
    capability.setCapability("platformName", "ios")
    capability.setCapability("deviceName", "iPhone Simulator")
    capability.setCapability("bundleId", "com.xueqiu")

    //val url="http://192.168.100.65:7771"
    val url="http://127.0.0.1:4723/wd/hub"
    val driver=new RemoteWebDriver(new URL(url), capability)
    println(driver.getPageSource)
  }


  test("use remote webdriver meituan"){
    val capability=new DesiredCapabilities()
    capability.setCapability("app", "/Users/seveniruby/Downloads/app/waimai.app")
    capability.setCapability("bundleId", "com.meituan.iToGo.ep")
    //capability.setCapability("fullReset", false)
    //capability.setCapability("noReset", true)
    //capability.setCapability("udid", "4F05E384-FE32-43DE-8539-4DC3E2EBC117")
    capability.setCapability("automationName", "XCUITest")
    capability.setCapability("platformName", "ios")
    capability.setCapability("deviceName", "iPhone 6")
    capability.setCapability("platformVersion", "10.2")
    capability.setCapability("autoAcceptAlerts", true)
    //capability.setCapability("webDriverAgentUrl", "http://172.18.118.90:8100/")

    //val url="http://192.168.100.65:7771"
    //val url="http://127.0.0.1:8100"
    val url="http://127.0.0.1:4730/wd/hub"
    val driver=new RemoteWebDriver(new URL(url), capability)

    while(true){
      Thread.sleep(2000)
      println(driver.getPageSource)
    }

  }

  test("use remote webdriver xueqiu"){
    val capability=new DesiredCapabilities()
    capability.setCapability("app", "/Users/seveniruby/projects/snowball-ios/DerivedData/Snowball/Build/Products/Debug-iphonesimulator/Snowball.app")
    capability.setCapability("bundleId", "com.xueqiu")
    capability.setCapability("fullReset", "false")
    capability.setCapability("noReset", "true")
    //capability.setCapability("udid", "4F05E384-FE32-43DE-8539-4DC3E2EBC117")
    capability.setCapability("automationName", "XCUITest")
    capability.setCapability("platformName", "ios")
    capability.setCapability("deviceName", "iPhone Simulator")
    capability.setCapability("bundleId", "com.xueqiu")
    capability.setCapability("autoAcceptAlerts", true)


    //val url="http://192.168.100.65:7771"
    //val url="http://127.0.0.1:8100"
    val url="http://127.0.0.1:4730/wd/hub"
    val driver=new RemoteWebDriver(new URL(url), capability)

    while(true){
      Thread.sleep(2000)
      driver.findElementsByXPath("//*").foreach(e=>{
        println(s"tag=${e.getTagName} text=${e.getText}")
      })
      println(driver.getPageSource)
      println("==============")
    }

  }
} 
Example 36
Source File: TestMacaca.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.it

import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.apache.log4j.Logger
import com.alibaba.fastjson.JSONObject
import macaca.client.MacacaClient



class TestMacaca extends FunSuite with BeforeAndAfterAll{

  val driver=new MacacaClient()
  override def beforeAll(): Unit = {

    val porps = new JSONObject()
    porps.put("autoAcceptAlerts", true)
    porps.put("browserName", "")
    porps.put("platformName", "android")
    porps.put("package", "com.gotokeep.keep")
    porps.put("activity", ".activity.SplashActivity")
    porps.put("reuse", 3)

    val desiredCapabilities = new JSONObject()
    desiredCapabilities.put("desiredCapabilities", porps)
    driver.initDriver(desiredCapabilities)

  }
  test("macaca android"){
    println(driver.source())
  }
  test("macaca chrome"){
    val porps = new JSONObject()
    porps.put("autoAcceptAlerts", true)
    porps.put("browserName", "Chrome")
    porps.put("platformName", "desktop") // android or ios

    porps.put("javascriptEnabled", true)
    porps.put("platform", "ANY")

    val desiredCapabilities = new JSONObject()
    desiredCapabilities.put("desiredCapabilities", porps)
    driver.initDriver(desiredCapabilities)
    driver.get("http://www.baidu.com/")
  }

} 
Example 37
Source File: TestNW.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.it

import java.net.URL

import org.openqa.selenium.chrome.{ChromeOptions, ChromeDriver}
import org.openqa.selenium.remote.{RemoteWebDriver, DesiredCapabilities}
import org.scalatest.FunSuite
import collection.JavaConversions._


class TestNW extends FunSuite{
  test("test nw"){

    System.setProperty("webdriver.chrome.driver",
      "/Users/seveniruby/projects/nwjs/ics4_debug_nw0.14.7/chromedriver")
    val options=new ChromeOptions()
    options.addArguments("nwapp=/Users/seveniruby/projects/nwjs/ics4_debug_nw0.14.7/app")
    val driver=new ChromeDriver(options)
    println(driver.getPageSource)
    Thread.sleep(2000)
    driver.findElementsByXPath("//label").foreach(x=>{
      println(x.getTagName)
      println(x.getLocation)
      println(x.getText)
      println("text()="+x.getAttribute("text()"))
      println("text="+x.getAttribute("text"))
      println("value="+x.getAttribute("value"))
      println("name="+x.getAttribute("name"))
      println("id="+x.getAttribute("id"))
      println("class="+x.getAttribute("class"))
      println("type="+x.getAttribute("type"))
      println("placeholder="+x.getAttribute("placeholder"))
      println("============")
    })
    driver.findElementByXPath("//label[contains(., 'selectedRegion')]").click()

    //driver.quit()

  }

  test("test nw remote"){
    val options=new ChromeOptions()
    options.addArguments("nwapp=/Users/seveniruby/projects/nwjs/ics4_debug_nw0.14.7/app")
    val url="http://10.3.2.65:4444/wd/hub"

    val dc = DesiredCapabilities.chrome()
    dc.setCapability(ChromeOptions.CAPABILITY, options)

    val driver=new RemoteWebDriver(new URL(url), dc)
    println(driver.getPageSource)
    Thread.sleep(2000)
    driver.findElementsByXPath("//label").foreach(x=>{
      println(x.getTagName)
      println(x.getLocation)
      println(x.getText)
      println("text()="+x.getAttribute("text()"))
      println("text="+x.getAttribute("text"))
      println("value="+x.getAttribute("value"))
      println("name="+x.getAttribute("name"))
      println("id="+x.getAttribute("id"))
      println("class="+x.getAttribute("class"))
      println("type="+x.getAttribute("type"))
      println("placeholder="+x.getAttribute("placeholder"))
      println("============")
    })
    driver.findElementByXPath("//label[contains(., 'selectedRegion')]").click()

    //driver.quit()

  }

} 
Example 38
Source File: TestAppium.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.it

import java.net.URL

import com.sun.jdi.connect.spi.TransportService.Capabilities
import com.testerhome.appcrawler.driver.AppiumClient
import io.appium.java_client.android.AndroidDriver
import io.appium.java_client.remote.{AndroidMobileCapabilityType, MobileCapabilityType}
import org.openqa.selenium.WebElement
import org.openqa.selenium.remote.DesiredCapabilities
import org.scalatest.FunSuite

import scala.io.Source


class TestAppium extends FunSuite{
  val a=new AppiumClient()
  test("appium success"){
    a.start()
    println(Source.fromURL("http://127.0.0.1:4723/wd/hub/sessions").mkString)
    a.stop()
  }

  test("single session"){
    val capa=new DesiredCapabilities()
    capa.setCapability(AndroidMobileCapabilityType.APP_PACKAGE, "com.xueqiu.android")
    capa.setCapability(AndroidMobileCapabilityType.APP_ACTIVITY, ".view.WelcomeActivityAlias")
    capa.setCapability(MobileCapabilityType.DEVICE_NAME, "demo")
    val driver=new AndroidDriver[WebElement](new URL("http://127.0.0.1:4723/wd/hub/"), capa)


  }
} 
Example 39
Source File: TestXueQiu.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
package com.testerhome.appcrawler.it

import java.net.URL

import com.testerhome.appcrawler.AppCrawler
import io.appium.java_client.android.{AndroidDriver, AndroidElement}
import org.openqa.selenium.remote.DesiredCapabilities
import org.scalatest.FunSuite

class TestXueQiu extends FunSuite{
  val capability=new DesiredCapabilities()
  capability.setCapability("app", "")
  capability.setCapability("appPackage", "com.tencent.mm")
  capability.setCapability("appActivity", ".ui.LauncherUI")
  capability.setCapability("deviceName", "emulator-5554")
  capability.setCapability("fastReset", "false")
  capability.setCapability("fullReset", "false")
  capability.setCapability("noReset", "true")
  capability.setCapability("unicodeKeyboard", "true")
  capability.setCapability("resetKeyboard", "true")
  capability.setCapability("automationName", "appium")

  test("all app "){
    capability.setCapability("app", "")
    capability.setCapability("appPackage", "com.xueqiu.android")
    capability.setCapability("appActivity", ".view.WelcomeActivityAlias")
    val driver=new AndroidDriver[AndroidElement](new URL("http://127.0.0.1:4723/wd/hub"), capability)

  }

  test("appcrawler"){
    AppCrawler.main(Array("-c", "src/test/scala/com/testerhome/appcrawler/it/xueqiu_private.yml",
      "-o", s"/tmp/xueqiu/${System.currentTimeMillis()}", "--verbose"
    )
    )
  }

  test("appcrawler base example"){
    AppCrawler.main(Array("-c", "src/test/scala/com/testerhome/appcrawler/it/xueqiu_base.yml",
      "-o", s"/tmp/xueqiu/${System.currentTimeMillis()}", "--verbose"
    )
    )
  }

} 
Example 40
Source File: TestOCR.scala    From AppCrawler   with Apache License 2.0 5 votes vote down vote up
class TestOCR extends FunSuite{

  test("test ocr"){
    val api=new Tesseract()
    api.setDatapath("/Users/seveniruby/Downloads/")
    api.setLanguage("eng+chi_sim")
    val img=new java.io.File("/Users/seveniruby/temp/google-test7.png")
    val imgFile=ImageIO.read(img)
    val graph=imgFile.createGraphics()
    graph.setStroke(new BasicStroke(5))

    val result=api.doOCR(img)

    val words=api.getWords(imgFile, TessPageIteratorLevel.RIL_WORD).toList
    words.foreach(word=>{

      val box=word.getBoundingBox
      val x=box.getX.toInt
      val y=box.getY.toInt
      val w=box.getWidth.toInt
      val h=box.getHeight.toInt

      graph.drawRect(x, y, w, h)
      graph.drawString(word.getText, x, y)

      println(word.getBoundingBox)
      println(word.getText)
    })
    graph.dispose()
    ImageIO.write(imgFile, "png", new java.io.File(s"${img}.mark.png"))



    println(result)

  }

}
*/ 
Example 41
Source File: FillSuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts

import scala.Double.NaN

import com.cloudera.sparkts.UnivariateTimeSeries._

import org.scalatest.{FunSuite, ShouldMatchers}

class FillSuite extends FunSuite with ShouldMatchers {
  ignore("nearest") {
    fillNearest(Array(1.0)) should be (Array(1.0))
    fillNearest(Array(1.0, 1.0, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillNearest(Array(1.0, NaN, NaN, 2.0)) should be (Array(1.0, 1.0, 2.0, 2.0))
    // round down to previous
    fillNearest(Array(1.0, NaN, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillNearest(Array(1.0, NaN, NaN, NaN, 2.0)) should be (Array(1.0, 1.0, 1.0, 2.0, 2.0))
    fillNearest(Array(1.0, NaN, 3.0, NaN, 2.0)) should be (Array(1.0, 1.0, 3.0, 3.0, 2.0))
  }

  test("previous") {
    fillPrevious(Array(1.0)) should be (Array(1.0))
    fillPrevious(Array(1.0, 1.0, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillPrevious(Array(1.0, NaN, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillPrevious(Array(1.0, NaN, NaN, 2.0)) should be (Array(1.0, 1.0, 1.0, 2.0))
    fillPrevious(Array(1.0, NaN, NaN, NaN, 2.0)) should be (Array(1.0, 1.0, 1.0, 1.0, 2.0))
    fillPrevious(Array(1.0, NaN, 3.0, NaN, 2.0)) should be (Array(1.0, 1.0, 3.0, 3.0, 2.0))
  }

  test("next") {
    fillNext(Array(1.0)) should be (Array(1.0))
    fillNext(Array(1.0, 1.0, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillNext(Array(1.0, NaN, 2.0)) should be (Array(1.0, 2.0, 2.0))
    fillNext(Array(1.0, NaN, NaN, 2.0)) should be (Array(1.0, 2.0, 2.0, 2.0))
    fillNext(Array(1.0, NaN, NaN, NaN, 2.0)) should be (Array(1.0, 2.0, 2.0, 2.0, 2.0))
    fillNext(Array(1.0, NaN, 3.0, NaN, 2.0)) should be (Array(1.0, 3.0, 3.0, 2.0, 2.0))
  }

  test("linear") {
    fillLinear(Array(1.0)) should be (Array(1.0))
    fillLinear(Array(1.0, 1.0, 2.0)) should be (Array(1.0, 1.0, 2.0))
    fillLinear(Array(1.0, NaN, 2.0)) should be (Array(1.0, 1.5, 2.0))
    fillLinear(Array(2.0, NaN, 1.0)) should be (Array(2.0, 1.5, 1.0))
    fillLinear(Array(1.0, NaN, NaN, 4.0)) should be (Array(1.0, 2.0, 3.0, 4.0))
    fillLinear(Array(1.0, NaN, NaN, NaN, 5.0)) should be (Array(1.0, 2.0, 3.0, 4.0, 5.0))
    fillLinear(Array(1.0, NaN, 3.0, NaN, 2.0)) should be (Array(1.0, 2.0, 3.0, 2.5, 2.0))
  }
} 
Example 42
Source File: EWMASuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts.models

import org.apache.spark.mllib.linalg._
import org.scalatest.{FunSuite, ShouldMatchers}

class EWMASuite extends FunSuite with ShouldMatchers {
  test("adding time dependent effects") {
    val orig = new DenseVector((1 to 10).toArray.map(_.toDouble))

    val m1 = new EWMAModel(0.2)
    val smoothed1 = new DenseVector(Array.fill(10)(0.0))
    m1.addTimeDependentEffects(orig, smoothed1)

    smoothed1(0) should be (orig(0))
    smoothed1(1) should be (m1.smoothing * orig(1) + (1 - m1.smoothing) * smoothed1(0))
    round2Dec(smoothed1.toArray.last) should be (6.54)

    val m2 = new EWMAModel(0.6)
    val smoothed2 = new DenseVector(Array.fill(10)(0.0))
    m2.addTimeDependentEffects(orig, smoothed2)

    smoothed2(0) should be (orig(0))
    smoothed2(1) should be (m2.smoothing * orig(1) + (1 - m2.smoothing) * smoothed2(0))
    round2Dec(smoothed2.toArray.last) should be (9.33)
  }

  test("removing time dependent effects") {
    val smoothed = new DenseVector(Array(1.0, 1.2, 1.56, 2.05, 2.64, 3.31, 4.05, 4.84, 5.67, 6.54))

    val m1 = new EWMAModel(0.2)
    val orig1 = new DenseVector(Array.fill(10)(0.0))
    m1.removeTimeDependentEffects(smoothed, orig1)

    round2Dec(orig1(0)) should be (1.0)
    orig1.toArray.last.toInt should be(10)
  }

  test("fitting EWMA model") {
    // We reproduce the example in ch 7.1 from
    // https://www.otexts.org/fpp/7/1
    val oil = Array(446.7, 454.5, 455.7, 423.6, 456.3, 440.6, 425.3, 485.1, 506.0, 526.8,
      514.3, 494.2)
    val model =  EWMA.fitModel(new DenseVector(oil))
    val truncatedSmoothing = (model.smoothing * 100.0).toInt
    truncatedSmoothing should be (89) // approximately 0.89
  }

  private def round2Dec(x: Double): Double = {
    (x * 100).round / 100.00
  }
} 
Example 43
Source File: RegressionARIMASuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts.models

import breeze.linalg
import breeze.linalg.DenseMatrix
import org.scalatest.{FunSuite, ShouldMatchers}

class RegressionARIMASuite extends FunSuite with ShouldMatchers {
  
  test("Cochrane-Orcutt-Stock-Data") {
    val expenditure = Array(214.6, 217.7, 219.6, 227.2, 230.9, 233.3, 234.1, 232.3, 233.7, 236.5,
      238.7, 243.2, 249.4, 254.3, 260.9, 263.3, 265.6, 268.2, 270.4, 275.6)

    val stock = Array(159.3, 161.2, 162.8, 164.6, 165.9, 167.9, 168.3, 169.7, 170.5, 171.6, 173.9,
      176.1, 178.0, 179.1, 180.2, 181.2, 181.6, 182.5, 183.3, 184.3)
    val Y = linalg.DenseVector(expenditure)
    val regressors = new DenseMatrix[Double](stock.length, 1)

    regressors(::, 0) := linalg.DenseVector(stock)
    val regARIMA = RegressionARIMA.fitCochraneOrcutt(Y, regressors, 11)
    val beta = regARIMA.regressionCoeff
    val rho = regARIMA.arimaCoeff(0)
    rho should equal(0.8241 +- 0.001)
    beta(0) should equal(-235.4889 +- 0.1)
    beta(1) should equal(2.75306 +- 0.001)
  }
} 
Example 44
Source File: GARCHSuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts.models

import com.cloudera.sparkts.MatrixUtil.toBreeze

import org.apache.spark.mllib.linalg._
import org.apache.commons.math3.random.MersenneTwister
import org.scalatest.FunSuite

class GARCHSuite extends FunSuite {
  test("GARCH log likelihood") {
    val model = new GARCHModel(.2, .3, .4)
    val rand = new MersenneTwister(5L)
    val n  = 10000

    val ts = new DenseVector(model.sample(n, rand))
    val logLikelihoodWithRightModel = model.logLikelihood(ts)

    val logLikelihoodWithWrongModel1 = new GARCHModel(.3, .4, .5).logLikelihood(ts)
    val logLikelihoodWithWrongModel2 = new GARCHModel(.25, .35, .45).logLikelihood(ts)
    val logLikelihoodWithWrongModel3 = new GARCHModel(.1, .2, .3).logLikelihood(ts)

    assert(logLikelihoodWithRightModel > logLikelihoodWithWrongModel1)
    assert(logLikelihoodWithRightModel > logLikelihoodWithWrongModel2)
    assert(logLikelihoodWithRightModel > logLikelihoodWithWrongModel3)
    assert(logLikelihoodWithWrongModel2 > logLikelihoodWithWrongModel1)
  }

  test("gradient") {
    val alpha = 0.3
    val beta = 0.4
    val omega = 0.2
    val genModel = new GARCHModel(omega, alpha, beta)
    val rand = new MersenneTwister(5L)
    val n = 10000

    val ts = new DenseVector(genModel.sample(n, rand))

    val gradient1 = new GARCHModel(omega + .1, alpha + .05, beta + .1).gradient(ts)
    assert(gradient1.forall(_ < 0.0))
    val gradient2 = new GARCHModel(omega - .1, alpha - .05, beta - .1).gradient(ts)
    assert(gradient2.forall(_ > 0.0))
  }

  test("fit model") {
    val omega = 0.2
    val alpha = 0.3
    val beta = 0.5
    val genModel = new ARGARCHModel(0.0, 0.0, alpha, beta, omega)
    val rand = new MersenneTwister(5L)
    val n = 10000

    val ts = new DenseVector(genModel.sample(n, rand))

    val model = GARCH.fitModel(ts)
    assert(model.omega - omega < .1) // TODO: we should be able to be more accurate
    assert(model.alpha - alpha < .02)
    assert(model.beta - beta < .02)
  }

  test("fit model 2") {
    val arr = Array[Double](0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,
      0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,
      -0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,
      -0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,
      -0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,
      0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,
      0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,
      0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,
      -0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,
      0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,
      -0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,
      -0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,
      -0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,
      0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,
      0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,
      -0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,
      -0.1,0.1,0.0,-0.01,0.00,-0.1,0.1,-0.2,-0.1,0.1,0.0,-0.01,0.00,-0.1)
    val ts = new DenseVector(arr)
    val model = ARGARCH.fitModel(ts)
    println(s"alpha: ${model.alpha}")
    println(s"beta: ${model.beta}")
    println(s"omega: ${model.omega}")
    println(s"c: ${model.c}")
    println(s"phi: ${model.phi}")
  }

  test("standardize and filter") {
    val model = new ARGARCHModel(40.0, .4, .2, .3, .4)
    val rand = new MersenneTwister(5L)
    val n  = 10000

    val ts = new DenseVector(model.sample(n, rand))

    // de-heteroskedasticize
    val standardized = model.removeTimeDependentEffects(ts, Vectors.zeros(n))
    // heteroskedasticize
    val filtered = model.addTimeDependentEffects(standardized, Vectors.zeros(n))

    assert((toBreeze(filtered) - toBreeze(ts)).toArray.forall(math.abs(_) < .001))
  }
} 
Example 45
Source File: AutoregressionSuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts.models

import java.util.Random

import com.cloudera.sparkts.MatrixUtil.toBreeze

import org.apache.spark.mllib.linalg._
import org.apache.commons.math3.random.MersenneTwister
import org.scalatest.FunSuite

class AutoregressionSuite extends FunSuite {
  test("fit AR(1) model") {
    val model = new ARModel(1.5, Array(.2))
    val ts = model.sample(5000, new MersenneTwister(10L))
    val fittedModel = Autoregression.fitModel(ts, 1)
    assert(fittedModel.coefficients.length == 1)
    assert(math.abs(fittedModel.c - 1.5) < .07)
    assert(math.abs(fittedModel.coefficients(0) - .2) < .03)
  }

  test("fit AR(2) model") {
    val model = new ARModel(1.5, Array(.2, .3))
    val ts = model.sample(5000, new MersenneTwister(10L))
    val fittedModel = Autoregression.fitModel(ts, 2)
    assert(fittedModel.coefficients.length == 2)
    assert(math.abs(fittedModel.c - 1.5) < .15)
    assert(math.abs(fittedModel.coefficients(0) - .2) < .03)
    assert(math.abs(fittedModel.coefficients(1) - .3) < .03)
  }

  test("add and remove time dependent effects") {
    val rand = new Random()
    val ts = new DenseVector(Array.fill(1000)(rand.nextDouble()))
    val model = new ARModel(1.5, Array(.2, .3))
    val added = model.addTimeDependentEffects(ts, Vectors.zeros(ts.size))
    val removed = model.removeTimeDependentEffects(added, Vectors.zeros(ts.size))
    assert((toBreeze(ts) - toBreeze(removed)).toArray.forall(math.abs(_) < .001))
  }
} 
Example 46
Source File: AugmentedDickeyFullerSuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts.stats

import com.cloudera.sparkts.models.ARModel
import org.apache.commons.math3.random.MersenneTwister
import org.apache.spark.mllib.linalg.DenseVector
import org.scalatest.FunSuite

class AugmentedDickeyFullerSuite extends FunSuite {
  test("non-stationary AR model") {
    val rand = new MersenneTwister(10L)
    val arModel = new ARModel(0.0, .95)
    val sample = arModel.sample(500, rand)

    val (adfStat, pValue) = TimeSeriesStatisticalTests.adftest(sample, 1)
    assert(!java.lang.Double.isNaN(adfStat))
    assert(!java.lang.Double.isNaN(pValue))
    println("adfStat: " + adfStat)
    println("pValue: " + pValue)
  }

  test("iid samples") {
    val rand = new MersenneTwister(11L)
    val iidSample = Array.fill(500)(rand.nextDouble())
    val (adfStat, pValue) = TimeSeriesStatisticalTests.adftest(new DenseVector(iidSample), 1)
    assert(!java.lang.Double.isNaN(adfStat))
    assert(!java.lang.Double.isNaN(pValue))
    println("adfStat: " + adfStat)
    println("pValue: " + pValue)
  }
} 
Example 47
Source File: DateTimeIndexUtilsSuite.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts

import java.time.{ZonedDateTime, ZoneId}

import com.cloudera.sparkts.DateTimeIndex._
import org.scalatest.{FunSuite, ShouldMatchers}

class DateTimeIndexUtilsSuite extends FunSuite with ShouldMatchers {
  val UTC = ZoneId.of("Z")

  test("non-overlapping sorted") {
    val index1: DateTimeIndex = uniform(dt("2015-04-10"), 5, new DayFrequency(2), UTC)
    val index2: DateTimeIndex = uniform(dt("2015-05-10"), 5, new DayFrequency(2), UTC)
    val index3: DateTimeIndex = irregular(Array(
      dt("2015-06-10"),
      dt("2015-06-13"),
      dt("2015-06-15"),
      dt("2015-06-20"),
      dt("2015-06-25")
    ), UTC)

    DateTimeIndexUtils.union(Array(index1, index2, index3), UTC) should be (
      hybrid(Array(index1, index2, index3)))
  }

  test("non-overlapping non-sorted") {
    val index1: DateTimeIndex = uniform(dt("2015-04-10"), 5, new DayFrequency(2), UTC)
    val index2: DateTimeIndex = uniform(dt("2015-05-10"), 5, new DayFrequency(2), UTC)
    val index3: DateTimeIndex = irregular(Array(
      dt("2015-06-10"),
      dt("2015-06-13"),
      dt("2015-06-15"),
      dt("2015-06-20"),
      dt("2015-06-25")
    ), UTC)

    DateTimeIndexUtils.union(Array(index3, index1, index2), UTC) should be (
      hybrid(Array(index1, index2, index3)))
  }

  test("overlapping uniform and irregular") {
    val index1: DateTimeIndex = uniform(dt("2015-04-10"), 5, new DayFrequency(2), UTC)
    val index2: DateTimeIndex = uniform(dt("2015-05-10"), 5, new DayFrequency(2), UTC)
    val index3: DateTimeIndex = irregular(Array(
      dt("2015-04-09"),
      dt("2015-04-11"),
      dt("2015-05-01"),
      dt("2015-05-10"),
      dt("2015-06-25")
    ), UTC)

    DateTimeIndexUtils.union(Array(index3, index1, index2), UTC) should be (
      hybrid(Array(
        irregular(Array(
          dt("2015-04-09"),
          dt("2015-04-10"),
          dt("2015-04-11")), UTC),
        uniform(dt("2015-04-12"), 4, new DayFrequency(2), UTC),
        irregular(Array(dt("2015-05-01"),
          dt("2015-05-10")), UTC),
        uniform(dt("2015-05-12"), 4, new DayFrequency(2), UTC),
        irregular(Array(dt("2015-06-25")), UTC)
      )))
  }

  def dt(dt: String, zone: ZoneId = UTC): ZonedDateTime = {
    val splits = dt.split("-").map(_.toInt)
    ZonedDateTime.of(splits(0), splits(1), splits(2), 0, 0, 0, 0, zone)
  }
} 
Example 48
Source File: LocalDBSCANArcherySuite.scala    From dbscan-on-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.clustering.dbscan

import java.net.URI

import scala.io.Source

import org.scalatest.FunSuite
import org.scalatest.Matchers
import org.apache.spark.mllib.linalg.Vectors

class LocalDBSCANArcherySuite extends FunSuite with Matchers {

  private val dataFile = "labeled_data.csv"

  test("should cluster") {

    val labeled: Map[DBSCANPoint, Double] =
      new LocalDBSCANArchery(eps = 0.3F, minPoints = 10)
        .fit(getRawData(dataFile))
        .map(l => (l, l.cluster.toDouble))
        .toMap

    val expected: Map[DBSCANPoint, Double] = getExpectedData(dataFile).toMap

    labeled.foreach {
      case (key, value) => {
        val t = expected(key)
        if (t != value) {
          println(s"expected: $t but got $value for $key")
        }

      }
    }

    labeled should equal(expected)

  }

  def getExpectedData(file: String): Iterator[(DBSCANPoint, Double)] = {
    Source
      .fromFile(getFile(file))
      .getLines()
      .map(s => {
        val vector = Vectors.dense(s.split(',').map(_.toDouble))
        val point = DBSCANPoint(vector)
        (point, vector(2))
      })
  }

  def getRawData(file: String): Iterable[DBSCANPoint] = {

    Source
      .fromFile(getFile(file))
      .getLines()
      .map(s => DBSCANPoint(Vectors.dense(s.split(',').map(_.toDouble))))
      .toIterable
  }

  def getFile(filename: String): URI = {
    getClass.getClassLoader.getResource(filename).toURI
  }

} 
Example 49
Source File: UDFTest.scala    From SparkGIS   with Apache License 2.0 5 votes vote down vote up
package org.betterers.spark.gis

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.apache.spark.sql.{SQLContext, Row}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.betterers.spark.gis.udf.Functions


class UDFTest extends FunSuite with BeforeAndAfter {
  import Geometry.WGS84

  val point = Geometry.point((2.0, 2.0))
  val multiPoint = Geometry.multiPoint((1.0, 1.0), (2.0, 2.0), (3.0, 3.0))
  var line = Geometry.line((11.0, 11.0), (12.0, 12.0))
  var multiLine = Geometry.multiLine(
    Seq((11.0, 1.0), (23.0, 23.0)),
    Seq((31.0, 3.0), (42.0, 42.0)))
  var polygon = Geometry.polygon((1.0, 1.0), (2.0, 2.0), (3.0, 1.0))
  var multiPolygon = Geometry.multiPolygon(
    Seq((1.0, 1.0), (2.0, 2.0), (3.0, 1.0)),
    Seq((1.1, 1.1), (2.0, 1.9), (2.5, 1.1))
  )
  val collection = Geometry.collection(point, multiPoint, line)
  val all: Seq[Geometry] = Seq(point, multiPoint, line, multiLine, polygon, multiPolygon, collection)

  var sc: SparkContext = _
  var sql: SQLContext = _

  before {
    sc = new SparkContext(new SparkConf().setMaster("local[4]").setAppName("SparkGIS"))
    sql = new SQLContext(sc)
  }

  after {
    sc.stop()
  }

  test("ST_Boundary") {
    // all.foreach(g => println(Functions.ST_Boundary(g).toString))

    assertResult(true) {
      Functions.ST_Boundary(point).isEmpty
    }
    assertResult(true) {
      Functions.ST_Boundary(multiPoint).isEmpty
    }
    assertResult("Some(MULTIPOINT ((11 11), (12 12)))") {
      Functions.ST_Boundary(line).toString
    }
    assertResult(None) {
      Functions.ST_Boundary(multiLine)
    }
    assertResult("Some(LINEARRING (1 1, 2 2, 3 1, 1 1))") {
      Functions.ST_Boundary(polygon).toString
    }
    assertResult(None) {
      Functions.ST_Boundary(multiPolygon)
    }
    assertResult(None) {
      Functions.ST_Boundary(collection)
    }
  }

  test("ST_CoordDim") {
    all.foreach(g => {
      assertResult(3) {
        Functions.ST_CoordDim(g)
      }
    })
  }

  test("UDF in SQL") {
    val schema = StructType(Seq(
      StructField("id", IntegerType),
      StructField("geo", GeometryType.Instance)
    ))
    val jsons = Map(
      (1, "{\"type\":\"Point\",\"coordinates\":[1,1]}}"),
      (2, "{\"type\":\"LineString\",\"coordinates\":[[12,13],[15,20]]}}")
    )
    val rdd = sc.parallelize(Seq(
      "{\"id\":1,\"geo\":" + jsons(1) + "}",
      "{\"id\":2,\"geo\":" + jsons(2) + "}"
    ))
    rdd.name = "TEST"
    val df = sql.read.schema(schema).json(rdd)
    df.registerTempTable("TEST")
    Functions.register(sql)
    assertResult(Array(3,3)) {
      sql.sql("SELECT ST_CoordDim(geo) FROM TEST").collect().map(_.get(0))
    }
  }
} 
Example 50
Source File: AbstractInMemoryTest.scala    From spark-dynamodb   with Apache License 2.0 5 votes vote down vote up
package com.audienceproject.spark.dynamodb

import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.dynamodbv2.document.{DynamoDB, Item}
import com.amazonaws.services.dynamodbv2.local.main.ServerRunner
import com.amazonaws.services.dynamodbv2.local.server.DynamoDBProxyServer
import com.amazonaws.services.dynamodbv2.model.{AttributeDefinition, CreateTableRequest, KeySchemaElement, ProvisionedThroughput}
import com.amazonaws.services.dynamodbv2.{AmazonDynamoDB, AmazonDynamoDBClientBuilder}
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterAll, FunSuite}

class AbstractInMemoryTest extends FunSuite with BeforeAndAfterAll {

    val server: DynamoDBProxyServer = ServerRunner.createServerFromCommandLineArgs(Array("-inMemory"))

    val client: AmazonDynamoDB = AmazonDynamoDBClientBuilder.standard()
        .withEndpointConfiguration(new EndpointConfiguration(System.getProperty("aws.dynamodb.endpoint"), "us-east-1"))
        .build()
    val dynamoDB: DynamoDB = new DynamoDB(client)

    val spark: SparkSession = SparkSession.builder
        .master("local")
        .appName(this.getClass.getName)
        .getOrCreate()

    spark.sparkContext.setLogLevel("ERROR")

    override def beforeAll(): Unit = {
        server.start()

        // Create a test table.
        dynamoDB.createTable(new CreateTableRequest()
            .withTableName("TestFruit")
            .withAttributeDefinitions(new AttributeDefinition("name", "S"))
            .withKeySchema(new KeySchemaElement("name", "HASH"))
            .withProvisionedThroughput(new ProvisionedThroughput(5L, 5L)))

        // Populate with test data.
        val table = dynamoDB.getTable("TestFruit")
        for ((name, color, weight) <- Seq(
            ("apple", "red", 0.2), ("banana", "yellow", 0.15), ("watermelon", "red", 0.5),
            ("grape", "green", 0.01), ("pear", "green", 0.2), ("kiwi", "green", 0.05),
            ("blackberry", "purple", 0.01), ("blueberry", "purple", 0.01), ("plum", "purple", 0.1)
        )) {
            table.putItem(new Item()
                .withString("name", name)
                .withString("color", color)
                .withDouble("weightKg", weight))
        }
    }

    override def afterAll(): Unit = {
        client.deleteTable("TestFruit")
        server.stop()
    }

} 
Example 51
Source File: TargetTest.scala    From bazel-deps   with MIT License 5 votes vote down vote up
package com.github.johnynek.bazel_deps
import java.io.File

import cats.implicits._

import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks._
import WriterGenerators._

import scala.util.{Failure, Success}

class TargetTestTest extends FunSuite {
  test ("Test we can serialize and round trip via the string format") {
    val separator = "|||"
    forAll (targetGen) { target =>

      val rt = target.listStringEncoding(separator).flatMap{ e => Target.fromListStringEncoding(separator, e)}

      val rtV = rt.foldMap(IO.fileSystemExec(new File("/tmp"))) match {
        case Failure(err) =>
          fail("Failure during IO:", err)
        case Success(result) =>
          result
      }
      assert(rtV === target)
    }
  }
} 
Example 52
Source File: ParseTestUtil.scala    From bazel-deps   with MIT License 5 votes vote down vote up
package com.github.johnynek.bazel_deps

import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks._

object ParseTestUtil extends FunSuite {
  def decode(str: String): Model = {
    val Right(mod) = Decoders.decodeModel(Yaml, str)
    mod
  }

  def law(model: Model) = {
    val str = model.toDoc.render(70)
    val decoded = decode(str)
    // if (decoded != model) {
    //   println(str)
    //   println("------")
    //   println(decoded.toDoc.render(70))
    // }
    assert(decoded == model || decoded.flatten === model.flatten)
    assert(decoded.toDoc.render(70) === str)
  }

  def roundTripsTo(input: String, output: String) = {
    val mod = decode(input)
    val modStr = mod.toDoc.render(70)
    //assert(decode(modStr) === mod)
    // println(input)
    // println("------")
    // println(modStr)
    assert(modStr === output)
  }

} 
Example 53
Source File: ParseGeneratedDocTest.scala    From bazel-deps   with MIT License 5 votes vote down vote up
package com.github.johnynek.bazel_deps

import org.scalacheck.Gen
import org.scalatest.FunSuite
import org.scalatest.prop.PropertyChecks._
import ParseTestUtil._

class ParseGeneratedDocTest extends FunSuite {
  test("parse randomly generated Model.toDoc") {
    // this test is slow and takes a lot of memory sadly
    implicit val generatorDrivenConfig =
      PropertyCheckConfig(minSuccessful = 50)

    forAll(ModelGenerators.modelGen)(law _)
  }

  test("Dependencies.normalize laws") {

    val genList = Gen.listOf(Gen.zip(ModelGenerators.artifactOrProjGen, ModelGenerators.projectRecordGen(Language.Java, Nil)))

    forAll(genList) { lp =>
      val output = Dependencies.normalize(lp)
      assert(lp.size >= output.size)
      val flat1 = lp.flatMap { case (a, p) => p.flatten(a) }
      val flat2 = output.flatMap { case (a, p) => p.flatten(a) }
      assert(flat1.toSet == flat2.toSet)
    }
  }
} 
Example 54
Source File: IteratorProcessorTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.obj.value

import org.mmadt.language.LanguageException
import org.mmadt.processor.Processor
import org.mmadt.storage.StorageFactory._
import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FunSuite, Matchers}


    assertThrows[LanguageException] {
      int(10) ===> bool.and(bfalse)
    }

    assertThrows[LanguageException] {
      int(10) ===> str
    }

    assertThrows[LanguageException] {
      int(10) ===> str.q(2)
    }

    assertThrows[LanguageException] {
      str("hello") ===> bool
    }
  }
} 
Example 55
Source File: LoadInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.sideeffect
import org.mmadt.language.LanguageException
import org.mmadt.language.obj.`type`.__
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite

class LoadInstTest extends FunSuite {
  val file1: String = getClass.getResource("/load/source-1.mm").getPath
  test("[load] w/ [a] mapping") {
    assertResult(bfalse)(int(5).load(file1).a(__("person")))
    assertResult(btrue)(int(5).load(file1).a(__("vertex")))
    assertResult(btrue)(rec(str("name") -> str("marko"), str("age") -> int(29)).load(file1).a(__("person")))
    assertResult(bfalse)(rec(str("name") -> str("marko"), str("age") -> int(0)).load(file1).a(__("person")))
    assertResult(bfalse)(rec(str("age") -> int(29)).load(file1).a(__("person")))
    assertResult(btrue)(rec(str("name") -> str("marko"), str("age") -> int(29)).load(file1).get("age").a(__("nat")))
    assertResult(bfalse)(rec(str("name") -> str("marko"), str("age") -> int(0)).load(file1).get("age").a(__("nat")))
  }
  test("[load] w/ [as] mapping") {
    assertResult(rec(str("id") -> int(5)).named("vertex"))(int(5).load(file1).as(__("vertex")))
    assertThrows[LanguageException] {
      int(5).load(file1).as(__("person"))
    }
    //
    assertResult(rec(str("name") -> str("marko"), str("age") -> int(29).named("nat")).named("person"))(rec(str("name") -> str("marko"), str("age") -> int(29)).load(file1).as(__("person")))
    assertThrows[LanguageException] {
      rec(str("name") -> str("marko"), str("age") -> int(0)).load(file1).as(__("person"))
    }
    //
    assertResult(int(29).named("nat"))(rec(str("name") -> str("marko"), str("age") -> int(29)).load(file1).get("age").as(__("nat")))
    assertThrows[LanguageException] {
      println(rec(str("name") -> str("marko"), str("age") -> int(0)).load(file1).get("age").as(__("nat")))
    }
  }
} 
Example 56
Source File: PutInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.sideeffect

import org.mmadt.language.obj.value.StrValue
import org.mmadt.language.obj.{Obj, Rec}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite

import scala.collection.immutable.ListMap


class PutInstTest extends FunSuite {
  test("[put] w/ rec value") {
    val marko: Rec[StrValue, Obj] = rec(str("name") -> str("marko"))
    val markoFull = marko.put(str("age"), int(29))
    assertResult(rec(str("name") -> str("marko"), str("age") -> int(29)))(markoFull)
    assertResult(rec(str("name") -> str("marko"), str("age") -> int(29)))(markoFull.put(str("name"), str("marko")))
    assertResult(rec(str("name") -> str("kuppitz"), str("age") -> int(29)))(markoFull.put(str("name"), str("kuppitz")))
    assertResult(rec(str("name") -> str("marko"), str("age") -> int(28)))(markoFull.put(str("age"), int(28)))
    // test rec key/value ordering
    assertResult(ListMap(str("name") -> str("kuppitz"), str("age") -> int(29)))(markoFull.put(str("name"), str("kuppitz")).gmap)
    assertResult(ListMap(str("name") -> str("marko"), str("age") -> int(28)))(markoFull.put(str("age"), int(28)).gmap)
    assertResult(int(29))(markoFull.get(str("age")))
  }
} 
Example 57
Source File: GetInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.LanguageException
import org.mmadt.language.obj.Obj._
import org.mmadt.language.obj.op.map.GetOp
import org.mmadt.language.obj.value.{IntValue, StrValue}
import org.mmadt.language.obj.{Lst, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}


class GetInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[get] w/ lst values") {
    val check: TableFor3[Lst[StrValue], IntValue, StrValue] =
      new TableFor3[Lst[StrValue], IntValue, StrValue](("list", "key", "value"),
        ("a" |, 0, str("a")),
        ("a" | "b", 0, "a"),
        ("a" | "b" | "c", 1, "b"),
        ("d" | "b" | "c", 2, "c"),
      )
    forEvery(check) { (alst, akey, avalue) => {
      assertResult(avalue)(alst.get(akey))
      assertResult(avalue)(GetOp(akey).exec(alst.asInstanceOf[Obj with GetOp[IntValue, StrValue]]))
    }
    }
  }

  test("[get] w/ lst value exception") {
    assertThrows[LanguageException] {
      (str("a") | "b" | "c").get(-1)
    }
    assertThrows[LanguageException] {
      (str("a") | "b" | "c").get(3)
    }
    assertThrows[LanguageException] {
      lst("|").get(0)
    }
  }

  test("[get] lineage") {
    val marko = rec(str("name") -> str("marko"), str("age") -> int(29))
    assertResult(2)(marko.get(str("name"), str).plus(" rodriguez").trace.length)
  }


  test("[get] w/ rec value") {
    val marko = rec(str("name") -> str("marko"), str("age") -> int(29))
    assertResult(str("marko"))(marko.get(str("name")))
    assertResult(int(29))(marko.get(str("age")))
    //assertThrows[LanguageException] {
    assertResult(zeroObj)(marko.get(str("bad-key")))
    //}
  }
} 
Example 58
Source File: EqsInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.op.map.EqsOp
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor4}


class EqsInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[eq] value, type, strm, anon combinations") {
    val starts: TableFor4[Obj, Obj, Obj, String] =
      new TableFor4[Obj, Obj, Obj, String](("input", "type", "result", "kind"),
        //////// INT
        (int.q(qZero), __.eqs(str.q(qZero)), btrue.q(qZero), "value"),
        (int(2), int.eqs(1), bfalse, "value"), // value * value = value
        (int(2).q(10), __.eqs(1), bfalse.q(10), "value"), // value * value = value
        (int(2).q(10), __.eqs(1).q(20), bfalse.q(200), "value"), // value * value = value
        (int(2), __.eqs(int(1).q(10)), bfalse, "value"), // value * value = value
        (int(2), __.eqs(int), btrue, "value"), // value * type = value
        (int(2), __.eqs(__.mult(int)), bfalse, "value"), // value * anon = value
        (int, __.eqs(int(2)), int.eqs(int(2)), "type"), // type * value = type
        (int.q(10), __.eqs(int(2)), int.q(10).eqs(2), "type"), // type * value = type
        (int, __.eqs(int), int.eqs(int), "type"), // type * type = type
        (int(1, 2, 3), __.eqs(2), bool(false, true, false), "strm"), // strm * value = strm
        (int(1, 2, 3), __.eqs(int(2).q(10)), bool(false, true, false), "strm"), // strm * value = strm
        (int(1, 2, 3), __.eqs(int(2)).q(10), bool(bfalse.q(10), btrue.q(10), bfalse.q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3), __.eqs(int), bool(true, true, true), "strm"), // strm * type = strm
        (int(1, 2, 3), __.eqs(__.mult(int)), bool(true, false, false), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0), __.eqs(1.0), bfalse, "value"), // value * value = value
        (real(2.0), __.eqs(real), btrue, "value"), // value * type = value
        (real(2.0), __.eqs(__.mult(real)), bfalse, "value"), // value * anon = value
        (real, __.eqs(real(2.0)), real.eqs(2.0), "type"), // type * value = type
        (real, __.eqs(real), real.eqs(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0), __.eqs(2.0), bool(false, true, false), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0), __.eqs(real), bool(true, true, true), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0), __.eqs(__.mult(real)), bool(true, false, false), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (input, atype, result, kind) => {
      List(
        EqsOp(atype.trace.head._2.arg0).q(atype.trace.head._2.q).exec(input),
        input.compute(asType(atype)),
        input ===> (input.range ===> atype),
        input ===> atype,
        input ==> asType(atype)).foreach(x => {
        assertResult(result)(x)
        kind match {
          case "value" => assert(x.isInstanceOf[Value[_]])
          case "type" => assert(x.isInstanceOf[Type[_]])
          case "strm" => assert(x.isInstanceOf[Strm[_]])
        }
      })
    }
    }
  }
} 
Example 59
Source File: PathInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.`type`.__
import org.mmadt.language.obj.op.map.PathOp
import org.mmadt.language.obj.value.IntValue
import org.mmadt.language.obj.{Lst, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class PathInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[path] value, type, strm") {
    val starts: TableFor3[Obj, Obj, Obj] =
      new TableFor3[Obj, Obj, Obj](("input", "type", "result"),
        (str("a"), __.plus("b").plus("c").path(), str("a") `;` "ab" `;` "abc"),
        (str("a"), __.plus("b").plus(__.plus("c").plus("d")).plus("e").path(), str("a") `;` "ab" `;` "ababcd" `;` "ababcde"),
        //(str("a"), __.plus("b").plus(__.plus("c").plus("d")).plus("e").path().get(1).path(), str("a")`;` "ab"`;` "ababcd"`;` "ababcde"),
        (int(1, 2, 3), __.plus(1).path(), strm(List[Lst[IntValue]](int(1) `;` 2, int(2) `;` 3, int(3) `;` 4))),
        (int(1, 2, 3), __.plus(1).plus(2).path(), strm(List[Lst[IntValue]](int(1) `;` 2 `;` 4, int(2) `;` 3 `;` 5, int(3) `;` 4 `;` 6))),
      )
    forEvery(starts) { (input, atype, result) => {
      List(
        //new mmlangScriptEngineFactory().getScriptEngine.eval(s"${input}${atype}"),
        PathOp().q(atype.trace.head._2.q).exec(input),
        input.compute(asType(atype)),
        input ===> atype.start(),
      ).foreach(x => {
        assertResult(result)(x)
      })
    }
    }
  }
  test("[path] w/ int value") {
    assertResult(int(0) `;` int(1) `;` int(3) `;` int(6) `;` int(10))(int(0).plus(1).plus(2).plus(3).plus(4).path())
  }

} 
Example 60
Source File: LtInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory.{bfalse, bool, btrue, int, real}
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}


class LtInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[lt] value, type, strm, anon combinations") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).lt(1), bfalse, "value"), // value * value = value
        (int(2).q(10).lt(1), bfalse.q(10), "value"), // value * value = value
        (int(2).q(10).lt(1).q(20), bfalse.q(200), "value"), // value * value = value
        (int(2).lt(int(1).q(10)), bfalse, "value"), // value * value = value
        (int(2).lt(int), bfalse, "value"), // value * type = value
        (int(2).lt(__.mult(int)), btrue, "value"), // value * anon = value
        (int.lt(int(2)), int.lt(int(2)), "type"), // type * value = type
        (int.q(10).lt(int(2)), int.q(10).lt(int(2)), "type"), // type * value = type
        (int.lt(int), int.lt(int), "type"), // type * type = type
        (int(1, 2, 3).lt(2), bool(true, false, false), "strm"), // strm * value = strm
        (int(1, 2, 3).lt(int(2).q(10)), bool(true, false, false), "strm"), // strm * value = strm
        (int(1, 2, 3) ==> __.lt(int(2)).q(10), bool(btrue.q(10), bfalse.q(10), bfalse.q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3).lt(int), bool(false, false, false), "strm"), // strm * type = strm
        (int(1, 2, 3).lt(__.mult(int)), bool(false, true, true), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0).lt(1.0), bfalse, "value"), // value * value = value
        (real(2.0).lt(real), bfalse, "value"), // value * type = value
        (real(2.0).lt(__.mult(real)), true, "value"), // value * anon = value
        (real.lt(real(2.0)), real.lt(2.0), "type"), // type * value = type
        (real.lt(real), real.lt(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0).lt(2.0), bool(true, false, false), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0).lt(real), bool(false, false, false), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0).lt(__.mult(real)), bool(false, true, true), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (query, result, atype) => {
      //assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 61
Source File: JuxtaInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map
import org.mmadt.language.Tokens
import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj._
import org.mmadt.language.obj.op.trace.JuxtaOp
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2}

class JuxtaInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[juxta] value, type, strm, anon combinations") {
    val starts: TableFor2[List[Obj], Obj] =
      new TableFor2[List[Obj], Obj](("query", "result"),
        // value/value
        (List(int(1).q(5)), int(1).q(5)),
        (List(int(1), int(2), int(3)), int(3)),
        (List(int(1), int(2).q(10), int(3)), int(3).q(10)),
        (List(int(1), int(2).q(10), int(3).q(2)), int(3).q(20)),
        // value/type
        (List[Int](int(1), int.plus(1)), int(2)),
        (List[Int](int(1), int.plus(10)), int(11)),
        (List[Int](int(1), int.plus(int)), int(2)),
        (List[Int](int(1), int.plus(int.plus(2))), int(4)),
        (List[Obj](int(1), int.plus(int.plus(2)).as(str), str.plus("a")), str("4a")),
        (List[Int](int(1), int.plus(1).q(0)), int(2).q(qZero)),
        // type/value
        (List[Int](int.plus(1), int(1)), int(1)),
        (List[Str](str, str("marko")), str("marko")),
        (List[Real](real.plus(1.0).q(10), real(13.0).q(2)), real(13.0).q(20)),
        // type/type
        (List(str), str),
        (List(str, str.id()), str.id()),
        (List(int, int.plus(1), int.plus(2)), int.plus(1).plus(2)),
      )
    forEvery(starts) { (left, right) => {
      println(left.map(_.toString).reduce((a, b) => a + Tokens.juxt_op + b))
     // assertResult(right)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${left.map(_.toString).reduce((a, b) => a + "=>" + b)}"))
      assertResult(right)(left.reduce((a, b) => a `=>` b))
      assertResult(right)(left.reduce((a, b) => JuxtaOp(b).exec(a)))
    }
    }
  }
} 
Example 62
Source File: GteInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory.{bfalse, bool, btrue, int, real}
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class GteInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[gte] value, type, strm, anon combinations") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).gte(1), btrue, "value"), // value * value = value
        (int(2).q(10).gte(1), btrue.q(10), "value"), // value * value = value
        (int(2).q(10).gte(1).q(20), btrue.q(200), "value"), // value * value = value
        (int(2).gte(int(1).q(10)), btrue, "value"), // value * value = value
        (int(2).gte(int), btrue, "value"), // value * type = value
        (int(2).gte(__.mult(int)), bfalse, "value"), // value * anon = value
        (int.gte(int(2)), int.gte(int(2)), "type"), // type * value = type
        (int.q(10).gte(int(2)), int.q(10).gte(2), "type"), // type * value = type
        (int.gte(int), int.gte(int), "type"), // type * type = type
        (int(1, 2, 3).gte(2), bool(false, true, true), "strm"), // strm * value = strm
        (int(1, 2, 3).gte(int(2).q(10)), bool(false, true, true), "strm"), // strm * value = strm
        //(int(1, 2, 3).gte(int(2)).q(10), bool(bfalse.q(10), btrue.q(10), btrue.q(10)), "strm"), // strm * value = strm
        //(int(1, 2, 3).gte(int(2)).q(10).id(), bool(bfalse.q(10), btrue.q(10), btrue.q(10)), "strm"), // strm * value = strm
        //(int(1, 2, 3).gte(int(2)).q(10).id().q(5), bool(bfalse.q(50), btrue.q(50), btrue.q(50)), "strm"), // strm * value = strm
        (int(1, 2, 3).gte(int), bool(true, true, true), "strm"), // strm * type = strm
        (int(1, 2, 3).gte(__.mult(int)), bool(true, false, false), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0).gte(1.0), btrue, "value"), // value * value = value
        (real(2.0).gte(real), btrue, "value"), // value * type = value
        (real(2.0).gte(__.mult(real)), bfalse, "value"), // value * anon = value
        (real.gte(real(2.0)), real.gte(2.0), "type"), // type * value = type
        (real.gte(real), real.gte(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0).gte(2.0), bool(false, true, true), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0).gte(real), bool(true, true, true), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0).gte(__.mult(real)), bool(true, false, false), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (query, result, atype) => {
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 63
Source File: MapInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

 , "value"), // value * value = value
        (int(2).map(int), int(2), "value"), // value * type = value
        (int(2).map(__.mult(int)), int(4), "value"), // value * anon = value
        (int.map(int(2)), int.map(int(2)), "type"), // type * value = type
        (int.q(10).map(int(2)), int.q(10).map(int(2)), "type"), // type * value = type
        (int.map(int), int.map(int), "type"), // type * type = type
        (int(1, 2, 3).map(2), int(2, 2, 2), "strm"), // strm * value = strm
        //(int(1, 2, 3).map(int(2).q(10)), int(int(2).q(10), int(2).q(10), int(2).q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3) ===> __.map(int(2)).q(10), int(int(2).q(10), int(2).q(10), int(2).q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3).map(int), int(1, 2, 3), "strm"), // strm * type = strm
        (int(1, 2, 3).map(int.mult(int)), int(1, 4, 9), "strm"), // strm * type = strm
        (int(1, 2, 3).map(__.mult(int)), int(1, 4, 9), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0).map(real(1.0)), real(1.0), "value"), // value * value = value
        (real(2.0).map(real), real(2.0), "value"), // value * type = value
        (real(2.0).map(__.mult(real)), real(4.0), "value"), // value * anon = value
        (real.map(real(2.0)), real.map(2.0), "type"), // type * value = type
        (real.map(real), real.map(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0).map(2.0), real(2.0, 2.0, 2.0), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0).map(real), real(1.0, 2.0, 3.0), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0).map(__.mult(real)), real(1.0, 4.0, 9.0), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (query, result, atype) => {
      //assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }

  test("[map] w/ values") {
    assertResult(int(5))(int(1).plus(1).map(int(5)))
    assertResult(int(2))(int(1).plus(1).map(int))
    assertResult(int(20))(int(1).plus(1).map(int.mult(10)))
  }
  test("[map] w/ types") {
    assertResult("int[plus,1][map,int]")(int.plus(1).map(int).toString)
    assertResult("int[plus,1][map,int[mult,10]]")(int.plus(1).map(int.mult(10)).toString)
    assertResult(int(200))(int(18) ==> int.plus(1).map(int.mult(10)).plus(10))
    assertResult("int[plus,1][map,int[mult,10]]")(int.plus(1).map(int.mult(10)).toString)
    //
    assertResult(int(60))(int(5) ==> int.plus(1).map(int.mult(10)))

  }
} 
Example 64
Source File: ZeroInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.__
import org.mmadt.language.obj.op.map.ZeroOp
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}


class ZeroInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[zero] value, type, strm") {
    val starts: TableFor3[Obj, Obj, Obj] =
      new TableFor3[Obj, Obj, Obj](("input", "type", "result"),
        //////// INT
        (int(2), __.zero(), int(0)),
        (int(-2), __.zero(), int(0)),
        (int, __.zero(), int(0)),
        (int(1, 2, 3), __.zero(), int(0).q(3)),
        (int(1, 2), __.plus(1).q(10).zero(), int(0).q(20)),
        //////// REAL
        (real(2.0), __.zero(), real(0.0)),
        (real(-2.0), __.zero(), real(0.0)),
        (real, __.zero(), real(0.0)),
        (real(-1.0, -2.0, -3.0), __.zero(), real(0.0).q(3)),
        (real(-1.0, -2.0, -3.0), __.plus(1.0).q(10).zero(), real(0.0).q(30)),
        (real(-1.0, -2.0, -3.0), __.plus(1.0).q(20).zero(), real(0.0).q(60)),
        //////// STR
        (str("a"), __.zero(), str("")),
        (str("b"), __.zero(), str("")),
        (str, __.zero(), str("")),
        (str("a", "b", "c"), __.zero(), str("").q(3)),
        //////// PROD
        //(`;`(str("a")), __.zero(), `;`()),
        //(prod(prod(str("a")), prod(str("b")), prod(str("c"))).zero(), prod().q(3)),
      )
    forEvery(starts) { (input, atype, result) => {
      List(
        //new mmlangScriptEngineFactory().getScriptEngine.eval(s"${input}${atype}"),
        ZeroOp().q(atype.trace.head._2.q).exec(input),
        input.compute(asType(atype)),
        input ===> atype.start(),
      ).foreach(x => {
        assertResult(result)(x)
      })
    }
    }
  }
} 
Example 65
Source File: IdInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.Type
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}


class IdInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[id] value, type, strm") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).id(), int(2), "value"),
        (int(-2).id(), int(-2), "value"),
        (int.id(), int.id(), "type"),
        (int(1, 2, 3).id(), int(1, 2, 3), "strm"),
        //////// REAL
        (real(2.0).id(), real(2.0), "value"),
        (real(2.0).id().q(10), real(2.0).q(10), "value"),
        (real(2.0).q(5).id().q(10), real(2.0).q(50), "value"),
        (real(-2.0).one(), real(1.0), "value"),
        (real.id(), real.id(), "type"),
        (real(1.0, 2.0, 3.0).id(), real(1.0, 2.0, 3.0), "strm"),
        (real(1.0, 2.0, 3.0).id().q(10), real(real(1.0).q(10), real(2.0).q(10), real(3.0).q(10)), "strm"),
        (real(1.0, 2.0, 3.0).id().q(10).id(), real(real(1.0).q(10), real(2.0).q(10), real(3.0).q(10)), "strm"),
        (real(1.0, 2.0, 3.0).id().q(10).id().q(5), real(real(1.0).q(50), real(2.0).q(50), real(3.0).q(50)), "strm"),
        //////// STR
        (str("a").id(), str("a"), "value"),
        (str.id(), str.id(), "type"),
        (str("a", "b", "c").id(), str("a", "b", "c"), "strm"),
      )
    forEvery(starts) { (query, result, atype) => {
      // assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"[${query}]"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 66
Source File: PlusInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.Obj._
import org.mmadt.language.obj.`type`.{IntType, RealType, Type, __}
import org.mmadt.language.obj.op.map.PlusOp
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.language.obj.value.{IntValue, RealValue, Value}
import org.mmadt.language.obj.{Int, Obj, Real}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor4}

class PlusInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[plus] value, type, strm, anon combinations") {
    val starts: TableFor4[Obj, Obj, Obj, String] =
      new TableFor4[Obj, Obj, Obj, String](("input", "type", "result", "kind"),
        //////// INT
        (int(2), __.plus(2), int(4), "value"), // value * value = value
        (int(2).q(10), __.plus(2), int(4).q(10), "value"), // value * value = value
        (int(2).q(10), __.plus(2).q(20), int(4).q(200), "value"), // value * value = value
        (int(2), __.plus(int(2).q(10)), int(4), "value"), // value * value = value
        (int(2), __.plus(int), int(4), "value"), // value * type = value
        (int(2), __.plus(__.plus(int)), int(6), "value"), // value * anon = value
        (int, __.plus(int(2)), int.plus(int(2)), "type"), // type * value = type
        (int.q(10), __.plus(int(2)), int.q(10).plus(int(2)), "type"), // type * value = type
        (int, __.plus(int), int.plus(int), "type"), // type * type = type
        (int(1, 2, 3), __.plus(2), int(3, 4, 5), "strm"), // strm * value = strm
        (int(1, 2, 3), __.plus(int(2).q(10)), int(3, 4, 5), "strm"), // strm * value = strm
        (int(1, 2, 3), int.q(3).plus(int(2)).q(10), int(int(3).q(10), int(4).q(10), int(5).q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3), __.plus(int(2)).q(10), int(int(3).q(10), int(4).q(10), int(5).q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3), __.plus(int), int(2, 4, 6), "strm"), // strm * type = strm
        (int(1, 2, 3), __.plus(__.plus(int)), int(3, 6, 9), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0), __.plus(2.0), real(4), "value"), // value * value = value
        (real(2.0), __.plus(real), real(4.0), "value"), // value * type = value
        (real(2.0), __.plus(__.plus(real)), real(6.0), "value"), // value * anon = value
        (real, __.plus(real(2.0)), real.plus(real(2.0)), "type"), // type * value = type
        (real, __.plus(real), real.plus(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0), __.plus(2.0), real(3.0, 4.0, 5.0), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0), __.plus(real), real(2.0, 4.0, 6.0), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0), __.plus(__.plus(real)), real(3.0, 6.0, 9.0), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (input, atype, result, kind) => {
      List(
        // new mmlangScriptEngineFactory().getScriptEngine.eval(s"${input}${atype}"),
        PlusOp(atype.trace.head._2.arg0).q(atype.trace.head._2.q).exec(input),
        input.compute(asType(atype)),
        input ===> (input.range ===> atype),
        input ===> atype,
        input ==> asType(atype)).foreach(x => {
        assertResult(result)(x)
        kind match {
          case "value" => assert(x.isInstanceOf[Value[_]])
          case "type" => assert(x.isInstanceOf[Type[_]])
          case "strm" => assert(x.isInstanceOf[Strm[_]])
        }
      })
    }
    }
  }
  ///////////////////////////////////////////////////////////////////////

  test("[plus] w/ int") {
    assertResult(int(4))(int(1).plus(int(3))) // value * value = value
    assert(int(1).plus(int(3)).isInstanceOf[IntValue])
    assert(int(1).plus(int(3)).isInstanceOf[Int])
    assertResult(int(2))(int(1).plus(int)) // value * type = value
    assert(int(1).plus(int).isInstanceOf[IntValue])
    assert(int(1).plus(int).isInstanceOf[Int])
    assertResult(int.plus(int(3)))(int.plus(int(3))) // type * value = type
    assert(int.plus(int(3)).isInstanceOf[IntType])
    assert(int.plus(int(3)).isInstanceOf[Int])
    assertResult(int.plus(int))(int.plus(int)) // type * type = type
    assert(int.plus(int).isInstanceOf[IntType])
    assert(int.plus(int).isInstanceOf[Int])
  }

  test("[plus] w/ real") {
    assertResult(real(4.0))(real(1).plus(real(3))) // value * value = value
    assert(real(1).plus(real(3)).isInstanceOf[RealValue])
    assert(real(1).plus(real(3)).isInstanceOf[Real])
    assertResult(real(2))(real(1).plus(real)) // value * type = value
    assert(real(1).plus(real).isInstanceOf[RealValue])
    assert(real(1).plus(real).isInstanceOf[Real])
    assertResult(real.plus(real(3)))(real.plus(real(3))) // type * value = type
    assert(real.plus(real(3)).isInstanceOf[RealType])
    assert(real.plus(real(3)).isInstanceOf[Real])
    assertResult(real.plus(real))(real.plus(real)) // type * type = type
    assert(real.plus(real).isInstanceOf[RealType])
    assert(real.plus(real).isInstanceOf[Real])
  }

  
} 
Example 67
Source File: OneInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.__
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2}


class OneInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[one] value, type, strm") {
    val starts: TableFor2[Obj, Obj] =
      new TableFor2[Obj, Obj](("query", "result"),
        //////// INT
        (int(2).one(), int(1)),
        (int(2).one().q(10), int(1).q(10)),
        (int(2).q(10).one(), int(1).q(10)),
        (int(2).q(10).one().q(20), int(1).q(200)),
        (int(-2).one(), int(1)),
        (int.one(), int(1)),
        (int.one().q(10), int(1).q(10)),
        (int.q(10).one(), int(1).q(10)),
        (int.q(10).one().q(20), int(1).q(200)),
        (int(1, 2, 3).one(), int(1).q(3)),
        //////// REAL
        (real(2.0).one(), real(1.0)),
        (real(-2.0).one(), real(1.0)),
        (real.one(), real(1.0)),
        (real(-1.0, -2.0, -3.0).one(), real(1.0).q(3)),
        (real(-1.0, -2.0, -3.0).id().q(10).one(), real(1.0).q(30)),
        (real(-1.0, -2.0, -3.0) ===> __.q(3).id().q(10).one(), real(1.0).q(30)),
        (real(-1.0, -2.0, -3.0).id().q(10).one(), real(1.0).q(30)),
        (real(-1.0, -2.0, -3.0).q(3).id().q(10).one(), real(1.0).q(90)),
      )
    forEvery(starts) { (query, result) => {
      assertResult(result)(query)
      assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
    }
    }
  }
} 
Example 68
Source File: TailInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.LanguageException
import org.mmadt.language.obj.Obj._
import org.mmadt.language.obj.`type`.__
import org.mmadt.language.obj.{Lst, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2}

class TailInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[tail] anonymous type") {
    assertResult("b" |)(("a" | "b") ===> __.tail())
    assertResult("b" | "c")(("a" | "b" | "c") ===> __.tail())
    //
    assertResult("b" `;`)(("a" `;` "b") ===> __.tail())
    assertResult("b" `;` "c")(("a" `;` "b" `;` "c") ===> __.tail())
  }

  test("[tail] w/ parallel poly]") {
    val check: TableFor2[Lst[_], Obj] =
      new TableFor2(("parallel", "tail"),
        (str("a") |, lst("|") <= (str("a") |).tail()),
        (str("a") | "b", str("b") |),
        (str("a") | "b" | "c", str("b") | str("c")),
        (str("d") | "b" | "c", str("b") | str("c")),
      )
    forEvery(check) { (left, right) => {
      assertResult(right)(left.tail())
    }
    }
  }

  test("[tail] exception") {
    assertThrows[LanguageException] {
      lst.tail()
    }
  }

  test("[tail] w/ serial poly") {
    val check: TableFor2[Lst[_], Obj] =
      new TableFor2(("serial", "tail"),
        //(str("a") /, /),
        (str("a") `;` "b", str("b") `;`),
        (str("a") `;` "b" `;` "c", str("b") `;` "c"),
        (str("d") `;` "b" `;` "c", str("b") `;` "c"),
      )
    forEvery(check) { (left, right) => {
      assertResult(right)(left.tail())
    }
    }
  }


} 
Example 69
Source File: OrInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.op.map.OrOp
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.language.obj.{Bool, Obj}
import org.mmadt.storage.StorageFactory.{asType, bfalse, bool, btrue, int}
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor1, TableFor4}

class OrInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[or] value, type, strm, anon combinations") {
    val starts: TableFor4[Obj, Obj, Obj, String] =
      new TableFor4[Obj, Obj, Obj, String](("input", "type", "result", "kind"),
        (bfalse, __.or(btrue), btrue, "value"), // value * value = value
        (bfalse, __.or(bool), bfalse, "value"), // value * type = value
        (bfalse, __.or(__.or(bool)), bfalse, "value"), // value * anon = value
        (bool, __.or(btrue), bool.or(btrue), "type"), // type * value = type
        (bool, __.or(bool), bool.or(bool), "type"), // type * type = type
        (bool(true, true, false), __.or(btrue), bool(true, true, true), "strm"), // strm * value = strm
        (bool(true, true, false), __.or(bool), bool(true, true, false), "strm"), // strm * type = strm
        (bool(true, true, false), __.or(__.or(bool)), bool(true, true, false), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (input, atype, result, kind) => {
      List(
        //new mmlangScriptEngineFactory().getScriptEngine.eval(s"${input}${atype.toString}"),
        OrOp(atype.trace.head._2.arg0).q(atype.trace.head._2.q).exec(input.asInstanceOf[Bool]),
        input.compute(asType(atype)),
        input ===> (input.range ===> atype),
        input ===> atype,
        input ==> asType(atype)).foreach(x => {
        assertResult(result)(x)
        kind match {
          case "value" => assert(x.isInstanceOf[Value[_]])
          case "type" => assert(x.isInstanceOf[Type[_]])
          case "strm" => assert(x.isInstanceOf[Strm[_]])
        }
      })
    }
    }
  }

  test("[or] testing") {
    def maker(x: Obj with OrOp): Obj = x.q(2).or(bfalse).q(3).or(bfalse).q(10)

    val starts: TableFor1[OrOp with Obj] =
      new TableFor1("obj",
        bool,
        btrue,
        bfalse)
    forEvery(starts) { obj => {
      val expr = maker(obj)
      obj match {
        case value: Value[_] => assert(value.g == expr.asInstanceOf[Value[_]].g)
        case _ =>
      }
      assert(obj.q != expr.q)
      assertResult(2)(expr.trace.length)
      assertResult((int(60), int(60)))(expr.q)
      assertResult((obj.q(2), OrOp(bfalse).q(3)))(expr.trace.head)
      assertResult((obj.q(2).or(bfalse).q(3), OrOp(bfalse).q(10)))(expr.trace.last)
    }
    }
  }
} 
Example 70
Source File: MultInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj._
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.language.obj.{Lst, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}


class MultInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[mult] value, type, strm, anon combinations") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).mult(2), int(4), "value"), // value * value = value
        (int(2).q(10).mult(2), int(4).q(10), "value"), // value * value = value
        (int(2).q(10).mult(2).q(20), int(4).q(200), "value"), // value * value = value
        (int(2).mult(int(2).q(10)), int(4), "value"), // value * value = value
        (int(2).mult(int), int(4), "value"), // value * type = value
        (int(2).mult(__.mult(int)), int(8), "value"), // value * anon = value
        (int.mult(int(2)), int.mult(int(2)), "type"), // type * value = type
        (int.q(10).mult(int(2)), int.q(10).mult(int(2)), "type"), // type * value = type
        (int.mult(int), int.mult(int), "type"), // type * type = type
        (int(1, 2, 3).mult(2), int(2, 4, 6), "strm"), // strm * value = strm
        (int(1, 2, 3).mult(int(2).q(10)), int(2, 4, 6), "strm"), // strm * value = strm
        (int(1, 2, 3).mult(int(2)).q(10), int(int(2).q(10), int(4).q(10), int(6).q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3).mult(int), int(1, 4, 9), "strm"), // strm * type = strm
        (int(1, 2, 3).mult(__.mult(int)), int(1, 8, 27), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0).mult(2.0), real(4), "value"), // value * value = value
        (real(2.0).mult(real), real(4.0), "value"), // value * type = value
        (real(2.0).mult(__.mult(real)), real(8.0), "value"), // value * anon = value
        (real.mult(real(2.0)), real.mult(real(2.0)), "type"), // type * value = type
        (real.mult(real), real.mult(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0).mult(2.0), real(2.0, 4.0, 6.0), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0).mult(real), real(1.0, 4.0, 9.0), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0).mult(__.mult(real)), real(1.0, 8.0, 27.0), "strm"), // strm * anon = strm
        //////// POLY
        //(("a" |).mult(("1" /).asInstanceOf[Poly[Obj]]), "a" / "1", "value"),
        (("a" `;`).mult(("1" `;`).asInstanceOf[Lst[Obj]]), "a" `;` "1", "value"),
        (("a" `;`).mult("1" |[Obj] "2"), ("a" `;` "1") | ("a" `;` "2"), "value"),
        (("a" `;` "b" `;` "c").mult("1" |[Obj] "2"), ("a" `;` "b" `;` "c" `;` "1") | ("a" `;` "b" `;` "c" `;` "2"), "value"),
        (("a" `;` "b" `;` "c").mult("1" `;`[Obj] "2"), "a" `;` "b" `;` "c" `;` "1" `;` "2", "value"),
        (("a" | "b" | "c").mult("1" `;`[Obj] "2"), lst[Obj]("|", values = List(("a" `;` "1" `;` "2"), ("b" `;` "1" `;` "2"), ("c" `;` "1" `;` "2")): _*), "value"),
        //(("a" | "b" | "c").mult("1" |[Obj] "2"), lst[Obj]("|", values = ("a" | "1") | ("a" | "2") | ("b" | "1") | ("b" | "2") | ("c" | "1") | ("c" | "2")), "value")
      )
    forEvery(starts) { (query, result, atype) => {
      //assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 71
Source File: LteInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory.{bfalse, bool, btrue, int, real}
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class LteInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[lt] value, type, strm, anon combinations") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).lte(1), bfalse, "value"), // value * value = value
        (int(2).q(10).lte(1), bfalse.q(10), "value"), // value * value = value
        (int(2).q(10).lte(1).q(20), bfalse.q(200), "value"), // value * value = value
        (int(2).lte(int(1).q(10)), bfalse, "value"), // value * value = value
        (int(2).lte(int), btrue, "value"), // value * type = value
        (int(2).lte(__.mult(int)), btrue, "value"), // value * anon = value
        (int.lte(int(2)), int.lte(int(2)), "type"), // type * value = type
        (int.q(10).lte(int(2)), int.q(10).lte(int(2)), "type"), // type * value = type
        (int.lte(int), int.lte(int), "type"), // type * type = type
        (int(1, 2, 3).lte(2), bool(true, true, false), "strm"), // strm * value = strm
        (int(1, 2, 3).lte(int(2).q(10)), bool(true, true, false), "strm"), // strm * value = strm
        (int(1, 2, 3) ==> __.lte(int(2)).q(10), bool(btrue.q(10), btrue.q(10), bfalse.q(10)), "strm"), // strm * value = strm
        (int(1, 2, 3).lte(int), bool(true, true, true), "strm"), // strm * type = strm
        (int(1, 2, 3).lte(__.mult(int)), bool(true, true, true), "strm"), // strm * anon = strm
        //////// REAL
        (real(2.0).lte(1.0), bfalse, "value"), // value * value = value
        (real(2.0).lte(real), btrue, "value"), // value * type = value
        (real(2.0).lte(__.mult(real)), true, "value"), // value * anon = value
        (real.lte(real(2.0)), real.lte(2.0), "type"), // type * value = type
        (real.lte(real), real.lte(real), "type"), // type * type = type
        (real(1.0, 2.0, 3.0).lte(2.0), bool(true, true, false), "strm"), // strm * value = strm
        (real(1.0, 2.0, 3.0).lte(real), bool(true, true, true), "strm"), // strm * type = strm
        (real(1.0, 2.0, 3.0).lte(__.mult(real)), bool(true, true, true), "strm"), // strm * anon = strm
      )
    forEvery(starts) { (query, result, atype) => {
      //assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 72
Source File: NegInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.Type
import org.mmadt.language.obj.value.Value
import org.mmadt.language.obj.value.strm.Strm
import org.mmadt.storage.StorageFactory.{int, real}
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class NegInstTest extends FunSuite with TableDrivenPropertyChecks {
  test("[neg] value, type, strm") {
    val starts: TableFor3[Obj, Obj, String] =
      new TableFor3[Obj, Obj, String](("query", "result", "type"),
        //////// INT
        (int(2).neg(), int(-2), "value"),
        (int(2).q(2).neg(), int(-2).q(2), "value"),
        (int(-2).neg(), int(2), "value"),
        (int(-2).neg().q(4).neg().q(2), int(-2).q(8), "value"),
        (int.neg(), int.neg(), "type"),
        (int(-1, -2, -3).neg(), int(1, 2, 3), "strm"),
        //////// REAL
        (real(2.0).neg(), real(-2.0), "value"),
        (real(-2.0).neg(), real(2.0), "value"),
        (real.neg(), real.neg(), "type"),
        (real(-1.0, -2.0, -3.0).neg(), real(1.0, 2.0, 3.0), "strm"),
        (real(-1.0, -2.0, -3.0).neg().q(10), real(real(1.0).q(10), real(2.0).q(10), real(3.0).q(10)), "strm"),
      )
    forEvery(starts) { (query, result, atype) => {
      //assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
      atype match {
        case "value" => assert(query.isInstanceOf[Value[_]])
        case "type" => assert(query.isInstanceOf[Type[_]])
        case "strm" => assert(query.isInstanceOf[Strm[_]])
      }
    }
    }
  }
} 
Example 73
Source File: HeadInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.map

import org.mmadt.language.LanguageException
import org.mmadt.language.obj.Obj._
import org.mmadt.language.obj.`type`.__
import org.mmadt.language.obj.{Lst, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2}

class HeadInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[head] anonymous type") {
    assertResult(str("a"))(("a" |) ===> __.head())
    assertResult(str("a"))(("a" | "b") ===> __.head())
    assertResult(str("a"))(("a" | "b" | "c") ===> __.head())
    //
    assertResult(str("a"))(("a" `;`) ===> __.head())
    assertResult(str("a"))(("a" `;` "b") ===> __.head())
    assertResult(str("a"))(("a" `;` "b" `;` "c") ===> __.head())
  }

  test("[head] w/ parallel poly") {
    val check: TableFor2[Lst[_], Obj] =
      new TableFor2(("parallel", "head"),
        (str("a") |, "a"),
        (str("a") | "b", "a"),
        (str("a") | "b" | "c", "a"),
        (str("d") | "b" | "c", "d"),
      )
    forEvery(check) { (left, right) => {
      assertResult(right)(left.head())
    }
    }
  }

  test("[head] w/ serial poly") {
    val check: TableFor2[Lst[_], Obj] =
      new TableFor2(("serial", "head"),
        (str("a") `;`, "a"),
        (str("a") `;` "b", "a"),
        (str("a") `;` "b" `;` "c", "a"),
        (str("d") `;` "b" `;` "c", "d"),
      )
    forEvery(check) { (left, right) => {
      assertResult(right)(left.head())
    }
    }
  }

  test("[head] exception") {
    assertThrows[LanguageException] {
      lst.head()
    }
  }
} 
Example 74
Source File: FoldInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.reduce

import org.mmadt.language.obj.`type`.__
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class FoldInstTest extends FunSuite {
  test("[fold] w/ int") {
    assertResult(int(2))(int(2).fold(int(0))(__.from("x").get(0).plus(__.from("x").get(1))))
    assertResult(int(2))(int(2) ===> int.fold(int(0))(__.from("x").get(0, int).plus(__.from("x").get(1, int))))
    assertResult(int(7))(int(5).fold(int(2))(__.from("x").get(0).plus(__.from("x").get(1))))
    assertResult(int(10))(int(1, 2, 3, 4).fold(int(0))(__.from("x").get(0).plus(__.from("x").get(1))))
    assertResult(int(10))(int(1, 2, 3, 4) ===> int.q(4).fold(int(0))(__.from("x").get(0).plus(__.from("x").get(1))))
    assertResult(int(10))(int(1, 2, 3, 4) ===> (int.q(4) ===> int.q(4).fold(int(0))(__.from("x").get(0).plus(__.from("x").get(1)))))
    assertResult(int(10))((int(1) `,` 2 `,` 3 `,` 4) ===> ((int `,` int `,` int `,` int).>-.fold(int(0))(__.from("x").get(0).plus(__.from("x").get(1)))))
  }
  test("[fold] w/ str") {
    assertResult(str("marko"))(str("m", "a", "r", "k", "o").fold(str(""))(__.from("x").get(0).plus(__.from("x").get(1))))
  }
} 
Example 75
Source File: CountInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.reduce

import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class CountInstTest extends FunSuite {
  test("[count] w/ int") {
    assertResult(int(1))(int(2).count())
    assertResult(int(10))(int(2).q(10).count())
    assertResult(int(1))(int(12) ===> int.count())
    assertResult(int(0))(int(1) ===> int.is(int.gt(10)).count())
    assertResult(int(0))(int(1, 2, 3) ===> int.q(*).is(int.gt(10)).count())
    assertResult(int(3))(int(1, 2, 3).count())
    assertResult(int(3))(int(1, 2, 3) ===> int.q(3).count())
    assertResult(int(3))(int(1, 2, 3) ===> int.q(+).plus(10).count())
    // assertResult(int(2))((int(0,1) ===> int.q(*)-<(rec((int.is(int.gt(int(0))) -> int), (int -> int)).>-.count()))
    //assertResult(int(17))((int(int(0).q(int(10)),int(1).q(int(7))) ===> int.q(*).choose(int.q(*).is(int.q(*).gt(int(0))) -> int,int -> int).count())) // TODO: need smarter handling of strm compilations with quantifiers
    assertResult(int(13))(int(int(0).q(10), int(1).q(3)).plus(10).count())
    //  assertResult(int(13))(int(int(0).q(10),int(1).q(3)) ===> int.q(*).plus(10).count())
    assertResult(int(14))(int(int(0).q(10), int(1).q(3), 6).plus(10).count())
    // assertResult(int(14))(int(int(0).q(10),int(1).q(3),6) ===> int.q(*).plus(10).count())
  }
} 
Example 76
Source File: DefineInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.trace
import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.language.obj.{Bool, Obj}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor2}

class DefineInstTest extends FunSuite with TableDrivenPropertyChecks {

  test("[define] value, type, strm, anon combinations") {
    val starts: TableFor2[Obj, Obj] =
      new TableFor2[Obj, Obj](("query", "result"),
        (int(2).define(__("nat") <= int.is(int.gt(0))).a(__("nat")), btrue),
        (int(-2).define(__("nat") <= int.is(int.gt(0))).a(__("nat")), bfalse),
        (int(-2).define(__("nat") <= int.is(int.gt(0))).a(__("nat").plus(100)), bfalse),

        (int(2).define(__("abc") <= int.is(int.gt(0))).a(__("abc")), btrue),
        (int(-2).define(__("abc") <= int.is(int.gt(0))).a(__("abc")), bfalse),
      
      )
    forEvery(starts) { (query, result) => {
      assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"${query}"))
      assertResult(result)(query)
    }
    }
  }

  test("[define] play tests") {
    println(int.define(int.is(int.gt(0))).a(__("nat")))
    println(int(-10).define(__("nat") <= int.is(int.gt(0))).a(__("nat").plus(100)))
    println(__("nat").plus(100).domain)
    println(int(-10).compute(int.define(__("nat") <= int.is(int.gt(0))).a(__("nat")).asInstanceOf[Type[Bool]]))
    println(int.define(int.plus(10).mult(20)).plus(2) -< (__("x").plus(100) `,` __("x")) >-)
    println(new mmlangScriptEngineFactory().getScriptEngine.eval("1[a,[real|str]]"))
    println(str.a(__.-<(real `|` int) >-)) // TODO
  }
} 
Example 77
Source File: GivenInstTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.processor.inst.branch

import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.{Type, __}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class GivenInstTest extends FunSuite with TableDrivenPropertyChecks {


  test("[given] value, type, strm") {
    val check: TableFor3[Obj, Obj, Obj] =
      new TableFor3[Obj, Obj, Obj](("input", "type", "result"),
        (int(1), int.-<((int.plus(50).is(__.gt(0)) --> int.plus(20)) `,` (str --> str.plus("a"))), int(21) `,` zeroObj),
        (int(1), int.-<((int.plus(50).is(__.gt(0)) --> int.plus(20)) | (str --> str.plus("a"))), int(21) | zeroObj),
        (int(1), int.-<((int.plus(50).is(__.gt(0)) --> int.plus(20)) `,` (int.plus(-10).is(__.lt(0)) --> int.plus(100))), int(21) `,` 101),
        (int(1), int.-<((int.plus(50).is(__.gt(0)) --> int.plus(20)) | (int.plus(-10).is(__.lt(0)) --> int.plus(100))), int(21) | zeroObj),
        (int(1), int.-<((int.plus(50).is(__.lt(0)) --> int.plus(20)) `,` (int.plus(-10).is(__.lt(0)) --> int.plus(100))), zeroObj `,` 101),
        (int(-1), int.plus(2).-<(int.is(int > 5) --> int(34) | int.is(int === 1) --> int.plus(2) | int --> int(20)), zeroObj | 3 | zeroObj),
        (int(10, int(50).q(2), 60), int.q(4).-<(bool --> btrue | int --> int + 1), strm(List(zeroObj | int(11), zeroObj | int(51).q(2), zeroObj | int(61)))),
        (int(10, int(50).q(2), 60), int.q(4).-<(bool --> btrue | int --> int + 1).>-, int(int(11), int(51).q(2), int(61))),
      )
    forEvery(check) { (input, atype, result) => {
      assertResult(result)(input.compute(atype.asInstanceOf[Type[Obj]]))
      assertResult(result)(input ==> atype.asInstanceOf[Type[Obj]])
      assertResult(result)(input ===> atype)
      assertResult(result)(input ===> (input.range ==> atype.asInstanceOf[Type[Obj]]))
      assertResult(result)(input ===> (input.range ===> atype))
    }
    }
  }
} 
Example 78
Source File: ValueTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language.obj.value

import org.mmadt.language.LanguageException
import org.mmadt.language.obj.Obj
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite

class ValueTest extends FunSuite {

  test("value hashCode, equals, toString") {
    val values: List[Value[Obj]] = List(btrue, int(1), real(1.0), str("1"), (int(1) `,`).asInstanceOf[LstValue[Obj]], rec(int(1) -> str("1")).asInstanceOf[RecValue[Obj,Obj]])
    var sameCounter = 0
    var diffCounter = 0
    for (a <- values) {
      for (b <- values) {
        if (a.getClass == b.getClass) {
          sameCounter = sameCounter + 1
          assert(a == b)
          assert(a.name == b.name)
          assert(a.g == b.g)
          assert(a.hashCode == b.hashCode)
          assert(a.toString == b.toString)
        } else {
          diffCounter = diffCounter + 1
          assert(a != b)
          assert(a.name != b.name)
          assert(!a.g.equals(b.g)) // == in Scala converts numbers
          assert(a.hashCode != b.hashCode)
          assert(a.toString != b.toString)
        }
      }
    }
    assertResult(values.length)(sameCounter)
    assertResult(values.length * (values.length - 1))(diffCounter)
  }

  test("value structure w/ two canonical types") {
    val avalue = int(5).plus(10).id().mult(5).gt(10)
    assertResult(bool)(asType(avalue))
    assertResult(4)(avalue.trace.length)
    // rinvert
    assertResult(int(5).plus(10).id().mult(5))(avalue.rinvert[IntValue]())
    assertResult(int(5).plus(10).id())(avalue.rinvert[IntValue]().rinvert[IntValue]())
    assertResult(int(5).plus(10))(avalue.rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]())
    assertResult(int(5))(avalue.rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]())
    assertThrows[LanguageException] {
      avalue.rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]().rinvert[IntValue]()
    }
    // linvert
    assertResult(int(15).id().mult(5).gt(10))(avalue.linvert())
    assertResult(int(15).mult(5).gt(10))(avalue.linvert().linvert())
    assertResult(int(75).gt(10))(avalue.linvert().linvert().linvert())
    assertResult(btrue)(avalue.linvert().linvert().linvert().linvert())
    assertThrows[LanguageException] {
      avalue.linvert().linvert().linvert().linvert().linvert()
    }
  }
} 
Example 79
Source File: IntValueTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language.obj.value
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite

class IntValueTest extends FunSuite {

  test("int value test") {
    // value ~ value
    assert(int(3).test(int(3)))
    assert(int(3).test(int(3).plus(10).plus(-5).plus(-5)))
    assert(!int(3).test(int(-3)))
    assert(!int(3).test(int(3).plus(10).plus(-5)))
    // value ~ type
    assert(int(3).test(int))
    assert(!int(3).test(str))
    assert(int(3).test(int.plus(2)))
    assert(int(3).test(str.map(int(3))))
    assert(!int(3).test(str.map(int)))
  }

} 
Example 80
Source File: IntTypeTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language.obj.`type`

import org.mmadt.language.LanguageException
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class IntTypeTest extends FunSuite {

  test("int type test") {
    // type ~ value
    assert(!int.test(int(3)))
    assert(!int.plus(10).plus(-5).plus(-5).test(int(3).plus(10).plus(-5).plus(-5)))
    // type ~ type
    assert(int.test(int))
    assert(int.plus(2).test(int.plus(2)))
    assert(int.is(int.gt(2)).test(int.is(int.gt(2))))
    assert(!int.test(str))
    assert(!int.test(int.plus(2)))
    assert(!int.plus(2).test(int))
    assert(!int.test(str.map(int(2))))
    assert(!int.test(str.map(int)))
  }

  test("int infix operators") {
    assertResult("bool<=int[plus,2][gt,4]")((int + 2 > 4).toString)
    assertResult("int{?}<=int[plus,2][is,bool<=int[gt,4]]")((int + 2 is int.gt(4)).toString)
  }
  test("int: refinement types") {
    assertResult("int[is,bool<=int[gt,5]]")((int <= int.is(int.gt(5))).toString())
    assertResult(int(5))(int(5) ==> (int <= int.is(int.gt(4))))
    assertResult(int(5))(int(5) ==> (int.is(int.gt(4))))
    //intercept[LanguageException]{
     int(4) ==> (int <= int.is(int.gt(4)))
    //}
    //intercept[LanguageException] {
      int(6) ==> int.q(0).is(int.gt(5))
    //}
    intercept[LanguageException] {
      int(6) ==> int.q(2).is(int.gt(5))
    }
    intercept[LanguageException] {
      int(6) ==> int.q(15, 46).is(int.gt(5))
    }
  }
  test("int: deep nest") {
    assertResult(int(2))(int(1) ==> int.plus(1))
    assertResult(int(3))(int(1) ==> int.plus(int.plus(1)))
    assertResult(int(4))(int(1) ==> int.plus(int.plus(int.plus(1))))
    assertResult(int(5))(int(1) ==> int.plus(int.plus(int.plus(int.plus(1)))))
    assertResult(int(6))(int(1) ==> int.plus(int.plus(int.plus(int.plus(int.plus(1))))))
  }
} 
Example 81
Source File: TypeOperatorTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language.obj.`type`

import org.mmadt.language.LanguageException
import org.mmadt.language.obj.op.map.PlusOp
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class TypeOperatorTest extends FunSuite {

  test("insts analysis from type composition") {
    assertResult(List((int, PlusOp(int(1)))))(int.plus(1).trace)
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(2)))))(int.plus(1).plus(2).trace)
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(2))), (int.plus(1).plus(2), PlusOp(int(3)))))(int.plus(1).plus(2).plus(3).trace)
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(2)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().trace)
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(2))), (int.plus(1).plus(2), PlusOp(int(3)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().plus(3).trace)
  }

  test("insts analysis from type right inverse") {
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(2)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().trace)
    assertResult(List((int, PlusOp(int(1)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().trace)
    assertResult(List())(int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().rinvert[IntType]().trace)
    assertResult(int)(int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().rinvert[IntType]())
    assertThrows[LanguageException] {
      int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().rinvert[IntType]().rinvert()
    }
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(4)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().plus(4).trace)
    assertResult(List((int, PlusOp(int(1))), (int.plus(1), PlusOp(int(4))), (int.plus(1).plus(4), PlusOp(int(5)))))(int.plus(1).plus(2).plus(3).rinvert[IntType]().rinvert[IntType]().plus(4).plus(5).trace)
  }

  test("insts analysis from type left inverse") {
    assertResult(List((int, PlusOp(int(2))), (int.plus(2), PlusOp(int(3)))))(int.plus(1).plus(2).plus(3).linvert().trace)
    assertResult(List((int, PlusOp(int(3)))))(int.plus(1).plus(2).plus(3).linvert().linvert().trace)
    assertResult(List())(int.plus(1).plus(2).plus(3).linvert().linvert().linvert().trace)
    assertThrows[LanguageException] {
      assertResult(List())(int.plus(1).plus(2).plus(3).linvert().linvert().linvert().linvert().trace)
    }
  }
} 
Example 82
Source File: LstTypeTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language.obj.`type`

import org.mmadt.language.mmlang.mmlangScriptEngineFactory
import org.mmadt.language.obj.`type`.__
import org.mmadt.language.obj.{Int, Lst, Obj, Poly}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite
import org.scalatest.prop.{TableDrivenPropertyChecks, TableFor3}

class LstTypeTest extends FunSuite with TableDrivenPropertyChecks {
  test("parallel expressions") {
    val starts: TableFor3[Obj, Lst[Obj], Obj] =
      new TableFor3[Obj, Lst[Obj], Obj](("lhs", "rhs", "result"),
        (int(1), int `,` int, int(1).q(2)),
        (int(1), int `,` int.plus(2), int(1, 3)),
        (int(1), int `,` int.plus(2).q(10), int(1, int(3).q(10))),
        (int(1).q(5), int `,` int.plus(2).q(10), int(int(1).q(5), int(3).q(50))),
        (int(int(1), int(100)), int | int, int(int(1), int(100))),
        (int(int(1), int(100)), int `,` int, int(1, 1, 100, 100)),
        (int(int(1), int(100)), int `,` int, int(int(1).q(2), int(100).q(2))),
        (int(int(1).q(5), int(100)), int `,` int.plus(2).q(10), int(int(1).q(5), int(3).q(50), int(100), int(102).q(10))),
        (int(int(1).q(5), int(100)), int | int.plus(2).q(10), int(int(1).q(5), int(100))),
        (int(1, 2), int | (int | int), int(1, 2)), // TODO: this is not computing the lst as a type
        (int(1, 2), (int | int) | int, int(1, 2)), // TODO: this is not computing the lst as a type
        //(int(1, 2), (int | int) | (int | int), int(1, 2)),
        //(int(int(1), int(2)).-<(int `,` (int -< (int | int))), strm[Obj](List(int(1), int(1) |, int(2), int(2) |))),
        (int(1), str | int, int(1)),
        //(strm(List(int(1), str("a"))).-<(str | int), strm(List(zeroObj | int(1), str("a") | zeroObj))),
      )
    forEvery(starts) { (lhs, rhs, result) => {
      assertResult(result)(new mmlangScriptEngineFactory().getScriptEngine.eval(s"(${lhs})>--<${rhs}>-"))
      assertResult(result)(lhs ===> __.-<(rhs).>-)
    }
    }
  }

  test("parallel [get] types") {
    assertResult(str)((str.plus("a") | str).get(0, str).range)
  }

  test("parallel structure") {
    val poly: Poly[Obj] = int.mult(8).split(__.id() | __.plus(2) | 3)
    assertResult("(int[id]|int[plus,2]|3)<=int[mult,8]-<(int[id]|int[plus,2]|3)")(poly.toString)
    assertResult(int.id())(poly.glist.head)
    assertResult(int.plus(2))(poly.glist(1))
    assertResult(int(3))(poly.glist(2))
    assertResult(int)(poly.glist.head.via._1)
    assertResult(int)(poly.glist(1).via._1)
    assert(poly.glist(2).root)
    assertResult(int.id() | int.plus(2) | int(3))(poly.range)
  }

  test("parallel quantifier") {
    val poly: Poly[Obj] = int.q(2).mult(8).split(__.id() | __.plus(2) | 3)
    assertResult("(int{2}[id]|int{2}[plus,2]|3)<=int{2}[mult,8]-<(int{2}[id]|int{2}[plus,2]|3)")(poly.toString)
    assertResult(int.q(2).id())(poly.glist.head)
    assertResult(int.q(2).plus(2))(poly.glist(1))
    assertResult(int(3))(poly.glist(2))
    assertResult(int.q(2))(poly.glist.head.via._1)
    assertResult(int.q(2))(poly.glist(1).via._1)
    assert(poly.glist(2).root)
    assertResult(int.q(2).id() | int.q(2).plus(2) | int(3))(poly.range)
  }

  test("parallel [split] quantification") {
    assertResult(int)(int.mult(8).split(__.id() | __.plus(8).mult(2) | int(56)).merge[Int].id().isolate)
    assertResult(int.q(1, 20))(int.mult(8).split(__.id().q(10, 20) | __.plus(8).mult(2).q(2) | int(56)).merge[Int].id().isolate)
    assertResult(int.q(1, 40))(int.q(2).mult(8).q(1).split(__.id().q(10, 20) | __.plus(8).mult(2).q(2) | int(56)).merge[Int].id().isolate)
    assertResult(int(56))(int.q(2).mult(8).q(0).split(__.id().q(10, 20) | __.plus(8).mult(2).q(2) | int(56)).merge[Obj].id().isolate)
  }
} 
Example 83
Source File: QTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.language

import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class QTest extends FunSuite {

  test("value quantifiers toString") {
    assertResult("3")(int(3).toString)
    assertResult("3{0}")(int(3).q(0).toString)
    assertResult("3{1,2}")(int(3).q(int(1), int(2)).toString)
    assertResult("3{1,2}")(int(3).q(1, 2).toString)
  }

  test("type quantifiers toString") {
    assertResult("int")(int.toString)
    assertResult("int")(int.q(1).toString)
    assertResult("int")(int.q(1).q(1).q(10).q(1).toString)
    assertResult("int{0}")(int.q(0).toString)
    assertResult("int{?}")(int.q(0, 1).toString)
    assertResult("int{+}")(int.q(1, Long.MaxValue).toString)
    assertResult("int{*}")(int.q(0, Long.MaxValue).toString)
    assertResult("int{1,2}")(int.q(1, 2).toString)
    assertResult("int{1,2}")(int.q(1, 2).toString)
    assertResult("bool{3}<=int{3}[gt,5]")(int.q(3).gt(5).toString)
    assertResult("int{?}<=int[is,bool<=int[gt,5]]")(int.is(int.gt(5)).toString)
    assertResult("int{?}<=int[is,bool<=int[gt,5]][plus,10]")(int.is(int.gt(5)).plus(10).toString)
    assertResult("int{0}[is,bool{0}<=int[gt,5]][plus,10]")(int.q(0).is(int.gt(5)).plus(10).toString)
  }
} 
Example 84
Source File: MultiSetTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.value.strm.util

import org.mmadt.language.obj.IntQ
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite

class MultiSetTest extends FunSuite {

  def qmaker(a: Int, b: Int): IntQ = (a, b)
  def qmaker(a: Int): IntQ = (a, a)

  test("multiset put") {
    assertResult(1L)(MultiSet.put(int(2)).objSize)
    assertResult(qOne)(MultiSet.put(int(2)).qSize)
    //
    assertResult(1L)(MultiSet.put(int(2)).put(int(2)).objSize)
    assertResult(qmaker(2))(MultiSet.put(int(2)).put(int(2)).qSize)
    //
    assertResult(1L)(MultiSet.put(int(2)).put(int(2)).put(int(2)).objSize)
    assertResult(qmaker(3))(MultiSet.put(int(2)).put(int(2)).put(int(2)).qSize)
    //
    assertResult(1L)(MultiSet.put(int(2)).put(int(2)).put(int(2).q(1, 2)).objSize)
    assertResult(qmaker(3, 4))(MultiSet.put(int(2)).put(int(2)).put(int(2).q(1, 2)).qSize)
    //
    assertResult(2L)(MultiSet.put(btrue).put(btrue.q(10)).put(bfalse.q(1, 2)).objSize)
    assertResult(qmaker(12, 13))(MultiSet.put(btrue).put(btrue.q(10)).put(bfalse.q(1, 2)).qSize)
    //
    assertResult(2L)(MultiSet.put(btrue).put(btrue.q(10)).put(bfalse.q(1, 2)).put(btrue.q(20)).objSize)
    assertResult(qmaker(32, 33))(MultiSet.put(btrue).put(btrue.q(10)).put(bfalse.q(1, 2)).put(btrue.q(20)).qSize)
  }

  test("multiset seq") {
    assertResult(2L)(MultiSet.put(int(2), int(3)).objSize)
    assertResult(qmaker(2))(MultiSet.put(int(2), int(3)).qSize)
    //
    assertResult(2L)(MultiSet.put(int(2), int(3), int(3).q(10)).objSize)
    assertResult(qmaker(12))(MultiSet.put(int(2), int(3), int(3).q(10)).qSize)
  }

  test("multiset w/ inst") {
    println(int(1, 1, 1).q(20))
    assertResult(int(int(12).q(40), int(13).q(40), int(14).q(40)))(int(1, 2, 3).q(20).plus(10).q(2).plus(1))
    assertResult(int(int(12).q(80), int(13).q(40)))(int(1, 1, 2).q(20).plus(10).q(2).plus(1))
  }
} 
Example 85
Source File: VBoolTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.value

import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class VBoolTest extends FunSuite {
  test("bool value") {
    assert(btrue.g)
    assertResult(btrue)(btrue || bfalse)
    assertResult(bfalse)(btrue && bfalse)
    assertResult(btrue)(int(4) ===> (int.plus(3).mult(int) ===> int.plus(2).gt(5)))
  }
  test("bool value quantifiers") {
    assertResult(btrue.q(int(2)))(btrue.q(int(2)) ==> bool.q(int(2)))
    assertResult(btrue.q(int(2)))(btrue.q(int(2)) ===> bool.q(int(2)).and(btrue))
    assertResult(btrue.q(int(2)))(btrue.q(int(2)) ===> bool.q(int(2)).and(btrue).or(bfalse.q(int(34))))
    assertResult(btrue.q(int(4)))(btrue.q(int(2)) ===> bool.q(int(2)).and(btrue).or(bfalse.q(int(34))).q(int(2)))
    assertResult(btrue.q(12, 1200))(int(3).q(int(2)) ===> int.q(int(2)).plus(int(4)).q(2).mult(int(2).q(34)).q(3).gt(0).q(1, 100))
  }
} 
Example 86
Source File: VIntTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.value

import org.mmadt.language.obj.Obj
import org.mmadt.language.obj.`type`.IntType
import org.mmadt.language.obj.op.map.{IdOp, PlusOp}
import org.mmadt.language.obj.value.strm.IntStrm
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class VIntTest extends FunSuite {

  test("int value") {
    assertResult(int(8))(4 + int)
    assertResult(int(3))(int(1) + int(2))
    assertResult(int(3))(int(1) + 2)
    assertResult(int(-4))(-int(4))
    assertResult(int(-4))(int(3) ==> int.plus(1).neg())
  }
  test("int value quantifiers") {
    assertResult(int(3).q(int(2)))(int(3).q(int(2)) ==> int.q(int(2)))
    assertResult(int(7).q(int(2)))(int(3).q(int(2)) ==> int.q(int(2)).plus(int(4)))
    assertResult(int(14).q(int(2)))(int(3).q(int(2)) ==> int.q(int(2)).plus(int(4)).mult(int(2).q(int(34))))
    assertResult(int(14).q(4))(int(3).q(2) ===> int.q(2).plus(int(4)).mult(int(2).q(int(34))).q(2))
    assertResult(bfalse.q(int(3)))(int(5).q(int(3)) ===> int.q(int(3)).plus(int(4)).gt(int(10)))
    assertResult(btrue.q(int(3)))(int(5).q(int(3)) ===> int.q(int(3)).plus(int(4)).gt(int(2)))
    assertResult(int(14).q(12))(int(3).q(2) ==> int.q(2).plus(int(4)).q(2).mult(int(2).q(34)).q(3))
    assertResult(btrue.q(40))(int(3).q(2) ===> int.q(2).plus(int(4)).q(2).gt(int(2).q(34)).q(10))
    assertResult(btrue.q(40))(int(3).q(2) ===> int.q(2).plus(int(4)).q(2).a(int.q(0, 4)).q(10))
    //
    println(int(1, 2, 3).map(int(2)).asInstanceOf[IntStrm].values.map(x => x.trace))
    assertResult(int(int(3).q(50), int(4).q(100)))(int(int(3).q(10), int(4).q(20)).q(5))
    assertResult(int(int(3).q(500), int(4).q(1000)))(int(int(3).q(10), int(4).q(20)).q(5).q(10))
  }
  test("nested lineages of types") {
    val atype = int.id().plus(int.plus(2))
    assertResult(2)(atype.trace.length)
    assertResult((int.id(), PlusOp(int.plus(2))))(atype.trace.last)
    assertResult(PlusOp(int.plus(2)))(atype.trace.last._2)
    assertResult(int.plus(2))(atype.trace.last._2.arg0[IntType])
    assertResult(List((int, PlusOp(2))))(atype.trace.last._2.arg0[IntType].trace)
  }
  test("nested lineages of values") {
    val atype = int.id().plus(int.plus(2))
    assertResult(int(8))(int(3) ===> atype)
    assertResult(2)((int(3) ===> atype).trace.length)
    assertResult((int(3), PlusOp(5)))((int(3) ===> atype).trace.last)
    assertResult(PlusOp(5))((int(3) ===> int.id().plus(int.plus(2))).trace.last._2)
    assertResult(int(5))((int(3) ===> int.id().plus(int.plus(2))).trace.last._2.arg0[Obj])
    assertResult(List((int(3), IdOp()), (int(3), PlusOp(2))))((int(3) ===> int.id().plus(int.plus(2))).trace.last._2.arg0[Obj].trace)
  }
} 
Example 87
Source File: VStrTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.value

import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class VStrTest extends FunSuite {
  test("str value") {
    assertResult(str("mar"))(str("m").plus("a").plus("r"))
    assertResult(str("mar"))(str("m") + "a" + "r")
    assertResult(btrue)(str("marko").eqs(str("marko")))
    assertResult(btrue)(str("marko").eqs("marko"))
    assertResult(btrue)(str("m").gt(str("a")))
    assertResult(btrue)(str("m").gt("a"))
    assertResult(bfalse)(str("m").gt("r"))
  }
  test("str value quantifiers") {
    assertResult(str("marko").q(2))(str("marko").q(2) ==> str.q(2))
    assertResult(str("marko").q(2))(str("mar").q(2) ==> str.q(2).plus(str("ko")))
    assertResult(str("marko").q(2))(str("mar").q(2) ==> str.q(2).plus(str("k")).plus(str("o").q(34)))
    assertResult(str("marko").q(4))(str("mar").q(2) ==> str.q(2).plus(str("k")).plus(str("o").q(34)).q(2))
    assertResult(str("marko").q(200))(str("mar").q(2) ==> str.q(2).plus(str("k")).q(10).plus(str("o").q(34)).q(10))
  }
  test("str compute") {
    assertResult(str("marko"))(str("m") ==> str.plus("a").plus("r").plus("k").plus("o"))
    assertResult(str("marko"))(str("m") ==> str.plus(str("a").plus(str("r").plus(str("k").plus("o")))))
    assertResult(str("mmamarmarkmarko"))(str("m") ==>
      str.to("a").plus("a")
        .to("b").plus("r")
        .to("c").plus("k")
        .to("d").plus("o")
        .to("e")
        .map(str.from("a")
          .plus(str.from("b"))
          .plus(str.from("c"))
          .plus(str.from("d"))
          .plus(str.from("e"))))
    assertResult(str("marko"))(str("m") ===> str.plus("a").plus("r").plus("k").plus("o").path().tail().tail().tail().tail().head())
    assertResult(str("marko"))(str("m") ===> str.plus("a").plus("r").plus("k").plus("o").path().tail().tail().tail().head().as(str).plus("k").plus("o").path().tail().tail().tail().tail().head())
  }

  test("play") {
   str("m") ==> str.plus("a").plus("r").plus("k").plus("o")
  }
} 
Example 88
Source File: TBoolTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.`type`

import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class TBoolTest extends FunSuite {
  test("bool typeo") {
    assertResult("bool")(bool.name)
    assertResult(bool.and(bool))(bool && bool)
    assertResult(bool.and(true))(bool && true)
    assertResult(bool.or(false))(bool || false)
    assertResult(bool.and(bool.and(bool)))(bool && (bool && bool))
    assertResult(bool.and(bool.and(bool.or(bool))))(bool && (bool && (bool || bool)))
  }
} 
Example 89
Source File: TIntTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.obj.`type`

import org.mmadt.language.obj._
import org.mmadt.language.obj.op.map.{MultOp, PlusOp}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


    assert(int.q(?).test((int.q(?) <= int.is(int.gt(5))).range))
    assert(!int.test(bool))
    assert(!bool.test(int))
  }

  test("type/type patterns on atomic objs") {
    assert(int.test(int))
    assert(bool.test(bool))
    assert(str.test(str))
    //
    assert(!int.plus(2).test(int))
    assert(!bool.test(bool.is(btrue)))
    assert(!str.test(str.plus("a")))
    //
    assert(int.plus(2).test(int.plus(2)))
    assert(str.plus("a").test(str.plus("a")))
    //
    assert(int.named("nat").test(int))
    assert(int.named("nat").plus(1).test(int.plus(1)))
    assert(!int.named("nat").test(int.is(int.gt(0))))
    assert(!int.named("nat").test(int.named("nat").is(int.gt(0))))
  }

  test("value/type patterns on atomic objs") {
    assert(str("m").test(str("m")))
    assert(!str("m").test(int(2)))
    assert(str("m").test(str))
    assert(!int.test(int(3)))
    assert(int(3).test(int))
  }

  test("value/type patterns on refinement types") {
    assert(int(6).test(int))
    assert(!int(6).test(int.q(0)))
    assert(int.plus(2).test(int.plus(2)))
    assert(!int.plus(2).test(int.plus(3)))
  }
} 
Example 90
Source File: mmkvStoreTest.scala    From vm   with GNU Affero General Public License v3.0 5 votes vote down vote up
package org.mmadt.storage.mmkv

import javax.script.ScriptContext
import org.mmadt.language.LanguageFactory
import org.mmadt.language.jsr223.mmADTScriptEngine
import org.mmadt.language.obj.`type`.IntType
import org.mmadt.language.obj.value.{BoolValue, IntValue, StrValue}
import org.mmadt.language.obj.{Obj, Rec}
import org.mmadt.storage.StorageFactory._
import org.scalatest.FunSuite


class mmkvStoreTest extends FunSuite {

  lazy val engine: mmADTScriptEngine = LanguageFactory.getLanguage("mmlang").getEngine.get()
  val file1: String = getClass.getResource("/mmkv/mmkv-1.mm").getPath
  val file2: String = getClass.getResource("/mmkv/mmkv-2.mm").getPath
  val file3: String = getClass.getResource("/mmkv/mmkv-3.mm").getPath
  val mmkv: String = "=mmkv"

  test("mmkv storage provider") {
    assert(engine.getBindings(ScriptContext.ENGINE_SCOPE).values().isEmpty)
  }

  test("mmkv store [get]") {
    val store = mmkvStore.open[IntValue, StrValue](file1)
    try {
      assertResult(str("marko"))(store.get(int(1)))
      assertResult(str("ryan"))(store.get(int(2)))
      assertResult(str("stephen"))(store.get(int(3)))
      assertResult(str("kuppitz"))(store.get(int(4)))
    } finally store.close()
  }

  test("mmkv store [count]") {
    val store: mmkvStore[IntType, Rec[StrValue, Obj]] = mmkvStore.open[IntType, Rec[StrValue, Obj]](file2)
    try {
      assertResult(rec[StrValue, Obj](g=(",", Map(str("k") -> int, str("v") -> rec[StrValue, Obj](g=(",", Map(str("name") -> str, str("age") -> int))).named("person")))).named("mmkv"))(store.schema)
      assertResult(4)(store.count())
    } finally store.close()
  }

  test("mmkv store [put]") {
    val store = mmkvStore.open[IntValue, BoolValue](file3)
    try {
      assertResult(rec(g=(",", Map(str("k") -> int, str("v") -> bool))).named("mmkv"))(store.schema)
      store.clear()
      assertResult(0)(store.stream().values.count(_ => true))
      assertResult(bfalse)(store.put(bfalse))
      assertResult(1)(store.stream().values.count(_ => true))
      assert(store.stream().values.map(x => x.gmap(str("k"))).exists(x => x.g == 0))
      assertResult(btrue)(store.put(45, btrue))
      assertResult(2)(store.stream().values.count(_ => true))
      assert(store.stream().values.map(x => x.gmap(str("k"))).exists(x => x.g == 0))
      assert(store.stream().values.map(x => x.gmap(str("k"))).exists(x => x.g == 45))
      assertResult(btrue)(store.get(45))
      assertResult(bfalse)(store.get(0))
    } finally store.close()
  }

  test("mmkv store [close]/[clear]/[count]") {
    var store = mmkvStore.open[IntValue, BoolValue](file3)
    try {
      assertResult(rec(str("k") -> int, str("v") -> bool).named("mmkv"))(store.schema)
      store.clear()
      assertResult(bfalse)(store.put(0, bfalse))
      assertResult(1L)(store.count())
      store.close()
      store = mmkvStore.open[IntValue, BoolValue](file3)
      assertResult(bfalse)(store.get(0))
      assertResult(1L)(store.count())
      store.close()
      store = mmkvStore.open[IntValue, BoolValue](file3)
      assertResult(bfalse)(store.get(0))
      assertResult(1L)(store.count())
      store.clear()
      assertResult(0L)(store.count())
      store.close()
      store = mmkvStore.open[IntValue, BoolValue](file3)
      assertResult(0L)(store.count())
    } finally store.close()
  }

} 
Example 91
Source File: FileMetadataTest.scala    From databus-maven-plugin   with GNU Affero General Public License v3.0 5 votes vote down vote up
import java.io.File

import org.apache.maven.plugin.logging.SystemStreamLog
import org.dbpedia.databus.lib.Datafile
import org.scalatest.FunSuite

class FileMetadataTest extends FunSuite  {

  test("compressed byte size smaller than uncompressed"){
    val list = List ("filestat/instance-types_lang_ga_marvin_2019.ttl.bz2","filestat/basic.nt.bz2")

    list.foreach(i=>{
      val df = datafile(i)
      assert(df.uncompressedByteSize >= df.bytes, "calculated compressed byte size larger than uncompressed size for file:  " + i)
    })
  }

  test("basic fixed value bz2 file parameters/metadata test: basic.nt.bz2") {
    val df =datafile("filestat/basic.nt.bz2")
    assert(df.sha256sum==="1ce31e72c9553e8aa3ed63acd22f3046321a0df2d8ecb85b59af28f5bfb3cbd7" , "sha256sum is calculated wrong")
    assert(df.nonEmptyLines === 6 , "non-empty lines count is calculated wrong")
    assert(df.duplicates === 2, "duplicate lines count is calculated wrong")
    assert(df.sorted === true, "sorted lines flag is calculated wrong")
    assert(df.bytes === 323, "bytes count is calculated wrong")
    assert(df.uncompressedByteSize === 734, "uncompressedByteSize is calculated wrong")
  }

  test("basic fixed value bz2 file with corrupt encoding parameters/metadata test: faulty-char-instance-types_lang_ga.ttl.bz2") {
    val df =datafile("filestat/faulty-char-instance-types_lang_ga.ttl.bz2")
    assert(df.sha256sum==="c785e6305136a9717d981041614487ac6a9f14a1fb3bc2d38d23233caa94aaf5" , "sha256sum is calculated wrong")
    assert(df.nonEmptyLines === -1 , "non-empty lines count is calculated wrong")
    assert(df.duplicates === -1, "duplicate lines count is calculated wrong")
    assert(df.sorted === false, "sorted lines flag is calculated wrong")
    assert(df.bytes === 59079, "bytes count is calculated wrong")
    assert(df.uncompressedByteSize === -1, "uncompressedByteSize is calculated wrong")
  }

  test("basic fixed value bz2 file with corrupt encoding parameters/metadata test: instance-types_lang_cs.ttl.bz2") {
    val df =datafile("filestat/instance-types_lang_cs.ttl.bz2")
    assert(df.sha256sum==="0c0a0d41ce79e0ee8f132d3ca8a7c33e0e4cd9e2e52796dc001fe38e4fdb9f18" , "sha256sum is calculated wrong")
    assert(df.nonEmptyLines === -1 , "non-empty lines count is calculated wrong")
    assert(df.duplicates === -1, "duplicate lines count is calculated wrong")
    assert(df.sorted === false, "sorted lines flag is calculated wrong")
    assert(df.bytes === 1051359, "bytes count is calculated wrong")
    assert(df.uncompressedByteSize === -1, "uncompressedByteSize is calculated wrong")
  }



  test("testing sort order US Sorted vs. ASCII") {
    var df = datafile("filestat/sorttest_us.ttl")
    assert(df.sorted === false, "sorted lines flag is calculated wrong")
    df  = datafile("filestat/sorttest_ascii.txt")
    assert(df.sorted === true, "sorted lines flag is calculated wrong")

  }


  def datafile(resourcename:String): Datafile = {
    val testFile = new File(getClass.getClassLoader.getResource(resourcename).getFile)
    val df = Datafile(testFile)(new SystemStreamLog())
    df.updateFileMetrics();
    print(df.toString)
    df

  }

} 
Example 92
Source File: AbstractProjectTests.scala    From ScalaClean   with Apache License 2.0 5 votes vote down vote up
package scalaclean

import org.scalatest.FunSuite
import org.scalatest.{ BeforeAndAfterAllConfigMap, ConfigMap }
import scalaclean.cli.{DeadCodeProjectTestRunner, PrivatiserProjectTestRunner}

abstract class AbstractProjectTests extends FunSuite with BeforeAndAfterAllConfigMap {
  private var overwrite = false

  override protected def beforeAll(configMap: ConfigMap) = {
    overwrite = configMap.getWithDefault("overwrite", "false").equalsIgnoreCase("true")
  }

  def deadCodeProjectTest(projectName: String, overwriteTarget: Boolean = false): Unit = {
    deadCodeProjectTest(List(projectName), overwriteTarget)
  }

  def deadCodeProjectTest(projectNames: List[String], overwriteTarget: Boolean): Unit = {
    val res = new DeadCodeProjectTestRunner(projectNames, overwrite || overwriteTarget).run()
    if(!res)
      fail(s" Failed for projects $projectNames, overwriteTarget=$overwriteTarget")
  }

  def privatiserProjectTest(projectName: String, overwriteTarget: Boolean = false): Unit = {
    privatiserProjectTest(List(projectName), overwriteTarget)
  }

  def privatiserProjectTest(projectNames: List[String], overwriteTarget: Boolean): Unit = {
    val res = new PrivatiserProjectTestRunner(projectNames, overwrite || overwriteTarget).run()
    if(!res)
      fail(s" Failed for projects $projectNames, overwriteTarget=$overwriteTarget")
  }


} 
Example 93
Source File: QueryTest.scala    From spark-netezza   with Apache License 2.0 5 votes vote down vote up
package com.ibm.spark.netezza.integration

import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.{DataFrame, Row}
import org.scalatest.FunSuite

  def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Option[String] = {
    val isSorted = df.queryExecution.logical.collect { case s: logical.Sort => s }.nonEmpty

    val sparkAnswer = try df.collect().toSeq catch {
      case e: Exception =>
        val errorMessage =
          s"""
             |Exception thrown while executing query:
             |${df.queryExecution}
             |== Exception ==
             |$e
             |${org.apache.spark.sql.catalyst.util.stackTraceToString(e)}
          """.stripMargin
        return Some(errorMessage)
    }

    sameRows(expectedAnswer, sparkAnswer, isSorted).map { results =>
      s"""
         |Results do not match for query:
         |${df.queryExecution}
         |== Results ==
         |$results
       """.stripMargin
    }
  }

  def prepareAnswer(answer: Seq[Row], isSorted: Boolean): Seq[Row] = {
    // Converts data to types that we can do equality comparison using Scala collections.
    // For BigDecimal type, the Scala type has a better definition of equality test (similar to
    // Java's java.math.BigDecimal.compareTo).
    // For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for
    // equality test.
    val converted: Seq[Row] = answer.map(prepareRow)
    if (!isSorted) converted.sortBy(_.toString()) else converted
  }

  // We need to call prepareRow recursively to handle schemas with struct types.
  def prepareRow(row: Row): Row = {
    Row.fromSeq(row.toSeq.map {
      case null => null
      case d: java.math.BigDecimal => BigDecimal(d)
      // Convert array to Seq for easy equality check.
      case b: Array[_] => b.toSeq
      case r: Row => prepareRow(r)
      case o => o
    })
  }

  def sameRows(
                expectedAnswer: Seq[Row],
                sparkAnswer: Seq[Row],
                isSorted: Boolean = false): Option[String] = {
    if (prepareAnswer(expectedAnswer, isSorted) != prepareAnswer(sparkAnswer, isSorted)) {
      val errorMessage =
        s"""
           |== Results ==
           |${sideBySide(
          s"== Correct Answer - ${expectedAnswer.size} ==" +:
            prepareAnswer(expectedAnswer, isSorted).map(_.toString()),
          s"== Spark Answer - ${sparkAnswer.size} ==" +:
            prepareAnswer(sparkAnswer, isSorted).map(_.toString())).mkString("\n")}
        """.stripMargin
      return Some(errorMessage)
    }
    None
  }

  def sideBySide(left: Seq[String], right: Seq[String]): Seq[String] = {
    val maxLeftSize = left.map(_.size).max
    val leftPadded = left ++ Seq.fill(math.max(right.size - left.size, 0))("")
    val rightPadded = right ++ Seq.fill(math.max(left.size - right.size, 0))("")

    leftPadded.zip(rightPadded).map {
      case (l, r) => (if (l == r) " " else "!") + l + (" " * ((maxLeftSize - l.size) + 3)) + r
    }
  }
} 
Example 94
Source File: IntegrationSuiteBase.scala    From spark-netezza   with Apache License 2.0 5 votes vote down vote up
package com.ibm.spark.netezza.integration

import java.sql.Connection

import com.ibm.spark.netezza.NetezzaJdbcUtils
import com.typesafe.config.ConfigFactory
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Row, DataFrame, SQLContext}
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.slf4j.LoggerFactory

trait IntegrationSuiteBase extends FunSuite with BeforeAndAfterAll with QueryTest{
  private val log = LoggerFactory.getLogger(getClass)

  protected var sc: SparkContext = _
  protected var sqlContext: SQLContext = _
  protected var conn: Connection = _
  protected val prop = new java.util.Properties

  // Configurable vals
  protected var configFile = "application"
  protected var testURL: String = _
  protected var testTable: String = _
  protected var user: String = _
  protected var password: String = _
  protected var numPartitions: Int = _
  protected var sampleDbmaxNumTables: Int = _

  override def beforeAll(): Unit = {
    super.beforeAll()

    sc = new SparkContext("local[*]", "IntegrationTest", new SparkConf())
    sqlContext = new SQLContext(sc)

    val conf = ConfigFactory.load(configFile)
    testURL = conf.getString("test.integration.dbURL")
    testTable = conf.getString("test.integration.table")
    user = conf.getString("test.integration.user")
    password = conf.getString("test.integration.password")
    numPartitions = conf.getInt("test.integration.partition.number")
    sampleDbmaxNumTables = conf.getInt("test.integration.max.numtables")
    prop.setProperty("user", user)
    prop.setProperty("password", password)
    log.info("Attempting to get connection from" + testURL)
    conn = NetezzaJdbcUtils.getConnector(testURL, prop)()
    log.info("got connection.")
  }

  override def afterAll(): Unit = {
    try {
      sc.stop()
    }
    finally {
      conn.close()
      super.afterAll()
    }
  }

  
  def withTable(tableNames: String*)(f: => Unit): Unit = {
    try f finally {
      tableNames.foreach { name =>
        executeJdbcStmt(s"DROP TABLE $name")
      }
    }
  }
} 
Example 95
Source File: NetezzaBaseSuite.scala    From spark-netezza   with Apache License 2.0 5 votes vote down vote up
package com.ibm.spark.netezza

import org.apache.spark.sql.types.{MetadataBuilder, StructField, StructType}
import org.scalatest.FunSuite


  def buildSchema(cols: Array[Column]): StructType = {
    val fields = new Array[StructField](cols.length)
    var i = 0
    for (col <- cols) {
      val columnType = NetezzaSchema.getSparkSqlType(
        col.jdbcType, col.precision, col.scale, col.signed)
      val metadata = new MetadataBuilder().putString("name", col.name)
      fields(i) = StructField(col.name, columnType, true, metadata.build())
      i = i + 1
    }
    new StructType(fields)
  }
} 
Example 96
Source File: RangerSparkPlanOmitStrategyTest.scala    From spark-ranger   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import org.apache.spark.sql.catalyst.plans.logical.{RangerSparkMasking, RangerSparkRowFilter}
import org.apache.spark.sql.hive.test.TestHive
import org.scalatest.FunSuite

class RangerSparkPlanOmitStrategyTest extends FunSuite {

  private val spark = TestHive.sparkSession

  test("ranger spark plan omit strategy") {
    val strategy = RangerSparkPlanOmitStrategy(spark)
    val df = spark.range(0, 5)
    val plan1 = df.queryExecution.optimizedPlan
    assert(strategy.apply(plan1) === Nil)
    val plan2 = RangerSparkRowFilter(plan1)
    assert(strategy.apply(plan2) === PlanLater(plan1) :: Nil)
    val plan3 = RangerSparkMasking(plan1)
    assert(strategy.apply(plan3) === PlanLater(plan1) :: Nil)
    val plan4 = RangerSparkMasking(plan2)
    assert(strategy.apply(plan4) === PlanLater(plan2) :: Nil)
    val plan5 = RangerSparkRowFilter(plan3)
    assert(strategy.apply(plan5) === PlanLater(plan3) :: Nil)
  }
} 
Example 97
Source File: RangerSparkMaskingExtensionTest.scala    From spark-ranger   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.optimizer

import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.RangerSparkTestUtils._
import org.apache.spark.sql.catalyst.expressions.Alias
import org.apache.spark.sql.catalyst.plans.logical.{Project, RangerSparkMasking}
import org.scalatest.FunSuite

class RangerSparkMaskingExtensionTest extends FunSuite {

  private val spark = TestHive.sparkSession

  test("data masking for bob show last 4") {
    val extension = RangerSparkMaskingExtension(spark)
    val plan = spark.sql("select * from src").queryExecution.optimizedPlan
    println(plan)
    withUser("bob") {
      val newPlan = extension.apply(plan)
      assert(newPlan.isInstanceOf[Project])
      val project = newPlan.asInstanceOf[Project]
      val key = project.projectList.head
      assert(key.name === "key", "no affect on un masking attribute")
      val value = project.projectList.tail
      assert(value.head.name === "value", "attibute name should be unchanged")
      assert(value.head.asInstanceOf[Alias].child.sql ===
        "mask_show_last_n(`value`, 4, 'x', 'x', 'x', -1, '1')")
    }

    withUser("alice") {
      val newPlan = extension.apply(plan)
      assert(newPlan === RangerSparkMasking(plan))
    }
  }

} 
Example 98
Source File: RangerSparkRowFilterExtensionTest.scala    From spark-ranger   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.catalyst.optimizer

import org.apache.spark.sql.hive.test.TestHive
import org.scalatest.FunSuite
import org.apache.spark.sql.RangerSparkTestUtils._
import org.apache.spark.sql.catalyst.plans.logical.{Filter, RangerSparkRowFilter}

class RangerSparkRowFilterExtensionTest extends FunSuite {

  private val spark = TestHive.sparkSession

  test("ranger spark row filter extension") {
    val extension = RangerSparkRowFilterExtension(spark)
    val plan = spark.sql("select * from src").queryExecution.optimizedPlan
    println(plan)
    withUser("bob") {
      val newPlan = extension.apply(plan)
      assert(newPlan.isInstanceOf[RangerSparkRowFilter])
      val filters = newPlan.collect { case f: Filter => f }
      assert(filters.nonEmpty, "ranger row level filters should be applied automatically")
      println(newPlan)
    }
    withUser("alice") {
      val newPlan = extension.apply(plan)
      assert(newPlan.isInstanceOf[RangerSparkRowFilter])
      val filters = newPlan.collect { case f: Filter => f }
      assert(filters.isEmpty, "alice does not have implicit filters")
      println(newPlan)
    }
  }

} 
Example 99
Source File: TestRenaming.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.lir

import at.forsyte.apalache.tla.lir.transformations.impl.TrackerWithListeners
import at.forsyte.apalache.tla.lir.transformations.standard.Renaming
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}


@RunWith(classOf[JUnitRunner])
class TestRenaming extends FunSuite with BeforeAndAfterEach with TestingPredefs {
  import at.forsyte.apalache.tla.lir.Builder._

  private var renaming = new Renaming(TrackerWithListeners())

  override protected def beforeEach(): Unit = {
    renaming = new Renaming(TrackerWithListeners())
  }

  test("test renaming exists/forall") {
    val original =
        and(
          exists(n_x, n_S, gt(n_x, int(1))),
          forall(n_x, n_T, lt(n_x, int(42))))
    ///
    val expected =
      and(
        exists(name("x_1"), n_S, gt(name("x_1"), int(1))),
        forall(name("x_2"), n_T, lt(name("x_2"), int(42))))
    val renamed = renaming.renameBindingsUnique(original)
    assert(expected == renamed)
  }

  test("test renaming filter") {
    val original =
        cup(
          filter(name("x"), name("S"), eql(name("x"), int(1))),
          filter(name("x"), name("S"), eql(name("x"), int(2)))
        )
    val expected =
      cup(
        filter(name("x_1"), name("S"), eql(name("x_1"), int(1))),
        filter(name("x_2"), name("S"), eql(name("x_2"), int(2))))
    val renamed = renaming.renameBindingsUnique(original)
    assert(expected == renamed)
  }

  test( "Test renaming LET-IN" ) {
    // LET p(t) == \A x \in S . R(t,x) IN \E x \in S . p(x)
    val original =
      letIn(
        exists( n_x, n_S, appOp( name( "p" ), n_x ) ),
        declOp( "p", forall( n_x, n_S, appOp( name( "R" ), name( "t" ), n_x ) ), "t" )
      )

    val expected =
      letIn(
        exists( name( "x_2" ), n_S, appOp( name( "p_1" ), name( "x_2" ) ) ),
        declOp( "p_1", forall( name( "x_1" ), n_S, appOp( name( "R" ), name( "t_1" ), name( "x_1" ) ) ), "t_1" )
      )

    val actual = renaming( original )

    assert(expected == actual)
  }

  test( "Test renaming multiple LET-IN" ) {
    // LET X == TRUE IN X /\ LET X == FALSE IN X
    val original =
      and(
        letIn(
          appOp( name( "X" ) ),
          declOp( "X", trueEx )
        ),
        letIn(
          appOp( name( "X" ) ),
          declOp( "X", falseEx )
        )
      )

    val expected =
      and(
      letIn(
        appOp( name( "X_1" ) ),
        declOp( "X_1", trueEx )
      ),
      letIn(
        appOp( name( "X_2" ) ),
        declOp( "X_2", falseEx )
      )
    )

    val actual = renaming( original )

    assert(expected == actual)
  }

} 
Example 100
Source File: TestLirValues.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.lir

import at.forsyte.apalache.tla.lir.values._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner


@RunWith(classOf[JUnitRunner])
class TestLirValues extends FunSuite {
  test("create booleans") {
    val b = TlaBool(false)
    assert(!b.value)
  }

  test("create int") {
    val i = TlaInt(1)
    assert(i.value == BigInt(1))
    assert(i == TlaInt(1))
    assert(i.isNatural)
    assert(TlaInt(0).isNatural)
    assert(!TlaInt(-1).isNatural)
  }

  test("create a string") {
    val s = TlaStr("hello")
    assert(s.value == "hello")
  }


  test("create a constant") {
    val c = new TlaConstDecl("x")
    assert("x" == c.name)
  }

  test("create a variable") {
    val c = new TlaVarDecl("x")
    assert("x" == c.name)
  }
} 
Example 101
Source File: TestAux.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.lir

import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

@RunWith( classOf[JUnitRunner] )
class TestAux extends FunSuite with TestingPredefs {

  test( "Test aux::collectSegments" ){

    val ar0Decl1 = TlaOperDecl( "X", List.empty, n_x )
    val ar0Decl2 = TlaOperDecl( "Y", List.empty, n_y )
    val ar0Decl3 = TlaOperDecl( "Z", List.empty, n_z )

    val arGe0Decl1 = TlaOperDecl( "A", List( SimpleFormalParam( "t" ) ), n_a )
    val arGe0Decl2 = TlaOperDecl( "B", List( SimpleFormalParam( "t" ) ), n_b )
    val arGe0Decl3 = TlaOperDecl( "C", List( SimpleFormalParam( "t" ) ), n_c )

    val pa1 =
      List( ar0Decl1 ) ->
        List( List( ar0Decl1 ) )
    val pa2 =
      List( ar0Decl1, ar0Decl2 ) ->
        List( List( ar0Decl1, ar0Decl2 ) )
    val pa3 =
      List( arGe0Decl1, ar0Decl1 ) ->
        List( List( arGe0Decl1 ), List( ar0Decl1 ) )
    val pa4 =
      List( arGe0Decl1, arGe0Decl2 ) ->
        List( List( arGe0Decl1, arGe0Decl2 ) )
    val pa5 =
      List( arGe0Decl1, arGe0Decl2, ar0Decl1, ar0Decl2, arGe0Decl3 ) ->
        List( List( arGe0Decl1, arGe0Decl2 ), List( ar0Decl1, ar0Decl2 ), List( arGe0Decl3 ) )

    val expected = Seq(
      pa1, pa2, pa3, pa4, pa5
    )
    val cmp = expected map { case (k, v) =>
      (v, aux.collectSegments( k ))
    }
    cmp foreach { case (ex, act) =>
      assert( ex == act )
    }
  }
} 
Example 102
Source File: TestFlatLanguagePred.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.lir.transformations.standard

import at.forsyte.apalache.tla.lir.SimpleFormalParam
import org.scalatest.FunSuite
import at.forsyte.apalache.tla.lir.convenience._
import at.forsyte.apalache.tla.lir.TlaModule

class TestFlatLanguagePred extends FunSuite {
  private val pred = new FlatLanguagePred

  test("a flat expression") {
    val expr = tla.enumSet(tla.int(1), tla.str("abc"), tla.bool(false))
    assert(pred.isExprOk(expr))
  }

  test("a call to a user operator") {
    val expr = tla.enumSet(tla.int(1), tla.str("abc"), tla.bool(false))
    val app = tla.appOp(tla.name("UserOp"), expr)
    assert(!pred.isExprOk(app))
  }

  test("a non-nullary let-in ") {
    val app = tla.appOp(tla.name("UserOp"), tla.int(3))
    val letIn = tla.letIn(app,
      tla.declOp("UserOp",
        tla.plus(tla.int(1), tla.name("x")),
        SimpleFormalParam("x")))
    assert(!pred.isExprOk(app))
  }

  test("a nullary let-in ") {
    val app = tla.appOp(tla.name("UserOp"))
    val letIn = tla.letIn(app,
      tla.declOp("UserOp",
        tla.plus(tla.int(1), tla.int(2))))
    assert(pred.isExprOk(letIn))
  }

  test("nested nullary let-in ") {
    val app = tla.plus(tla.appOp(tla.name("A")), tla.appOp(tla.name("B")))
    val letIn = tla.letIn(app,
      tla.declOp("A",
        tla.plus(tla.int(1), tla.int(2))))
    val outerLetIn =
      tla.letIn(letIn,
        tla.declOp("B",
          tla.int(3)))
    assert(pred.isExprOk(outerLetIn))
  }

  test("a call to a user operator in a module") {
    val appB = tla.appOp(tla.name("B"), tla.int(1))
    val defA = tla.declOp("A", appB)
    val mod = new TlaModule("mod", Seq(defA))
    assert(!pred.isModuleOk(mod))
  }

  test("a module without calls") {
    val appB = tla.int(1)
    val defA = tla.declOp("A", appB)
    val mod = new TlaModule("mod", Seq(defA))
    assert(pred.isModuleOk(mod))
  }
} 
Example 103
Source File: TestTypeReduction.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.types

import at.forsyte.apalache.tla.lir.TestingPredefs
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.junit.JUnitRunner

@RunWith( classOf[JUnitRunner] )
class TestTypeReduction extends FunSuite with TestingPredefs with BeforeAndAfter {

  var gen = new SmtVarGenerator
  var tr  = new TypeReduction( gen )

  before {
    gen = new SmtVarGenerator
    tr = new TypeReduction( gen )
  }

  test( "Test nesting" ) {
    val tau = FunT( IntT, SetT( IntT ) )
    val m = Map.empty[TypeVar, SmtTypeVariable]
    val rr = tr( tau, m )
    assert( rr.t == fun( int, set( int ) ) )
  }

  test("Test tuples"){
    val tau = SetT( FunT( TupT( IntT, StrT ), SetT( IntT ) ) )
    val m = Map.empty[TypeVar, SmtTypeVariable]
    val rr = tr(tau, m)
    val idx = SmtIntVariable( 0 )
    assert( rr.t == set( fun( tup( idx ), set( int ) ) ) )
    assert( rr.phi.contains( hasIndex( idx, 0, int ) ) )
    assert( rr.phi.contains( hasIndex( idx, 1, str ) ) )
  }
} 
Example 104
Source File: TestVCGenerator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import at.forsyte.apalache.tla.imp.SanyImporter
import at.forsyte.apalache.tla.imp.src.SourceStore
import at.forsyte.apalache.tla.lir.transformations.impl.IdleTracker
import at.forsyte.apalache.tla.lir.{TlaModule, TlaOperDecl}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

import scala.io.Source

@RunWith(classOf[JUnitRunner])
class TestVCGenerator extends FunSuite {
  private def mkVCGen(): VCGenerator = {
    new VCGenerator(new IdleTracker)
  }

  test("simple invariant") {
    val text =
      """---- MODULE inv ----
        |EXTENDS Integers
        |VARIABLE x
        |Inv == x > 0
        |====================
      """.stripMargin

    val mod = loadFromText("inv", text)
    val newMod = mkVCGen().gen(mod, "Inv")
    assertDecl(newMod, "VCInv$0", "x > 0")
    assertDecl(newMod, "VCNotInv$0", "¬(x > 0)")
  }

  test("conjunctive invariant") {
    val text =
      """---- MODULE inv ----
        |EXTENDS Integers
        |VARIABLE x
        |Inv == x > 0 /\ x < 10
        |====================
      """.stripMargin

    val mod = loadFromText("inv", text)
    val newMod = mkVCGen().gen(mod, "Inv")
    assertDecl(newMod, "VCInv$0", "x > 0")
    assertDecl(newMod, "VCInv$1", "x < 10")
    assertDecl(newMod, "VCNotInv$0", "¬(x > 0)")
    assertDecl(newMod, "VCNotInv$1", "¬(x < 10)")
  }

  test("conjunction under universals") {
    val text =
      """---- MODULE inv ----
        |EXTENDS Integers
        |VARIABLE x, S
        |Inv == \A z \in S: \A y \in S: y > 0 /\ y < 10
        |====================
      """.stripMargin

    val mod = loadFromText("inv", text)
    val newMod = mkVCGen().gen(mod, "Inv")
    assertDecl(newMod, "VCInv$0", """∀z ∈ S: (∀y ∈ S: (y > 0))""")
    assertDecl(newMod, "VCInv$1", """∀z ∈ S: (∀y ∈ S: (y < 10))""")
    assertDecl(newMod, "VCNotInv$0", """¬(∀z ∈ S: (∀y ∈ S: (y > 0)))""")
    assertDecl(newMod, "VCNotInv$1", """¬(∀z ∈ S: (∀y ∈ S: (y < 10)))""")
  }

  private def assertDecl(mod: TlaModule, name: String, expectedBodyText: String): Unit = {
    val vc = mod.declarations.find(_.name == name)
    assert(vc.nonEmpty, s"(VC $name not found)")
    assert(vc.get.isInstanceOf[TlaOperDecl])
    assert(vc.get.asInstanceOf[TlaOperDecl].body.toString == expectedBodyText)
  }

  private def loadFromText(moduleName: String, text: String): TlaModule = {
    val locationStore = new SourceStore
    val (rootName, modules) = new SanyImporter(locationStore)
      .loadFromSource(moduleName, Source.fromString(text))
    modules(moduleName)
  }
} 
Example 105
Source File: TestTypeInference.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import at.forsyte.apalache.tla.bmcmt.types.{Signatures, TypeInference}
import at.forsyte.apalache.tla.lir.TestingPredefs
import at.forsyte.apalache.tla.lir.convenience._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

// TODO: remove?
@RunWith( classOf[JUnitRunner] )
class TestTypeInference extends FunSuite with TestingPredefs {

  ignore( "Signatures" ) {
    val exs = List(
      tla.and( n_x, n_y ),
      tla.choose( n_x, n_S, n_p ),
      tla.enumSet( seq( 10 ) : _* ),
      tla.in( n_x, n_S ),
      tla.map( n_e, n_x, n_S )
    )

    val sigs = exs map Signatures.get

    exs zip sigs foreach { case (x, y) => println( s"${x}  ...  ${y}" ) }

    val funDef = tla.funDef( tla.plus( n_x, n_y ), n_x, n_S, n_y, n_T )

    val sig = Signatures.get( funDef )

    printsep()
    println( sig )
    printsep()
  }

  ignore( "TypeInference" ) {
    val ex = tla.and( tla.primeEq( n_a, tla.choose( n_x, n_S, n_p ) ), tla.in( 2, n_S ) )

    val r = TypeInference.theta( ex )

    println( r )

  }

  ignore( "Application" ) {

    val ex = tla.eql( tla.plus(  tla.appFun( n_f, n_x ) , 2), 4 )
    val ex2 =
      tla.and(
        tla.in( n_x, n_S ),
        tla.le(
          tla.plus(
            tla.mult( 2, n_x ),
            5
          ),
          10
        ),
        tla.primeEq( n_x,
          tla.appFun(
            n_f,
            n_x
          )
        )
      )

    val r = TypeInference( ex )
  }
} 
Example 106
Source File: RewriterBase.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import java.io.{PrintWriter, StringWriter}

import at.forsyte.apalache.tla.bmcmt.types.eager.TrivialTypeFinder
import at.forsyte.apalache.tla.lir.convenience.tla
import org.scalatest.{BeforeAndAfterEach, FunSuite}

class RewriterBase extends FunSuite with BeforeAndAfterEach {
  protected var solverContext: SolverContext = new PreproSolverContext(new Z3SolverContext())
  protected var arena: Arena = Arena.create(solverContext)

  override def beforeEach() {
    solverContext = new PreproSolverContext(new Z3SolverContext(debug = true))
    arena = Arena.create(solverContext)
  }

  override def afterEach() {
    solverContext.dispose()
  }

  protected def create(): SymbStateRewriterAuto = {
    new SymbStateRewriterAuto(solverContext)
  }

  protected def createWithoutCache(): SymbStateRewriter = {
    new SymbStateRewriterImpl(solverContext, new TrivialTypeFinder())
  }

  protected def assertUnsatOrExplain(rewriter: SymbStateRewriter, state: SymbState): Unit = {
    assertOrExplain("UNSAT", rewriter, state, !solverContext.sat())
  }

  protected def assumeTlaEx(rewriter: SymbStateRewriter, state: SymbState): SymbState = {
    val nextState = rewriter.rewriteUntilDone(state.setTheory(BoolTheory()))
    solverContext.assertGroundExpr(nextState.ex)
    assert(solverContext.sat())
    nextState
  }

  protected def assertTlaExAndRestore(rewriter: SymbStateRewriter, state: SymbState): Unit = {
    rewriter.push()
    val nextState = rewriter.rewriteUntilDone(state.setTheory(BoolTheory()))
    assert(solverContext.sat())
    rewriter.push()
    solverContext.assertGroundExpr(nextState.ex)
    assert(solverContext.sat())
    rewriter.pop()
    rewriter.push()
    solverContext.assertGroundExpr(tla.not(nextState.ex))
    assertUnsatOrExplain(rewriter, nextState)
    rewriter.pop()
    rewriter.pop()
  }

  private def assertOrExplain(msg: String, rewriter: SymbStateRewriter,
                              state: SymbState, outcome: Boolean): Unit = {
    if (!outcome) {
      val writer = new StringWriter()
      new SymbStateDecoder(solverContext, rewriter).dumpArena(state, new PrintWriter(writer))
      solverContext.log(writer.getBuffer.toString)
      solverContext.push() // push and pop flush the log output
      solverContext.pop()
      fail("Expected %s, check log.smt for explanation".format(msg))
    }

  }
} 
Example 107
Source File: TestArena.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.bmcmt

import at.forsyte.apalache.tla.bmcmt.types.{BoolT, FinSetT, UnknownT}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

@RunWith(classOf[JUnitRunner])
class TestArena extends FunSuite {
  test("create cells") {
    val solverContext = new Z3SolverContext()
    val emptyArena = Arena.create(solverContext)
    val arena = emptyArena.appendCell(UnknownT())
    assert(emptyArena.cellCount + 1 == arena.cellCount)
    assert(UnknownT() == arena.topCell.cellType)
    val arena2 = arena.appendCell(BoolT())
    assert(emptyArena.cellCount + 2 == arena2.cellCount)
    assert(BoolT() == arena2.topCell.cellType)
  }

  test("add 'has' edges") {
    val solverContext = new Z3SolverContext()
    val arena = Arena.create(solverContext).appendCell(FinSetT(UnknownT()))
    val set = arena.topCell
    val arena2 = arena.appendCell(BoolT())
    val elem = arena2.topCell
    val arena3 = arena2.appendHas(set, elem)
    assert(List(elem) == arena3.getHas(set))
  }

  test("BOOLEAN has FALSE and TRUE") {
    val solverContext = new Z3SolverContext()
    val arena = Arena.create(solverContext)
    val boolean = arena.cellBooleanSet()
    assert(List(arena.cellFalse(), arena.cellTrue()) == arena.getHas(arena.cellBooleanSet()))
  }
} 
Example 108
Source File: TestSourceStore.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.imp.src

import at.forsyte.apalache.tla.lir.convenience.tla
import at.forsyte.apalache.tla.lir.src.SourceRegion
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner


@RunWith(classOf[JUnitRunner])
class TestSourceStore extends FunSuite {
  test("basic add and find") {
    val store = new SourceStore()
    val ex = tla.int(1)
    val loc = SourceLocation("root", SourceRegion(1, 2, 3, 4))
    store.addRec(ex, loc)
    val foundLoc = store.find(ex.ID)
    assert(loc == foundLoc.get)
  }

  test("recursive add and find") {
    val store = new SourceStore()
    val int1 = tla.int(1)
    val set = tla.enumSet(int1)
    val loc = SourceLocation("root", SourceRegion(1, 2, 3, 4))
    store.addRec(set, loc)
    val foundLoc = store.find(set.ID)
    assert(loc == foundLoc.get)
    val foundLoc2 = store.find(int1.ID)
    assert(loc == foundLoc2.get)
  }

  test("locations are not overwritten") {
    val store = new SourceStore()
    val int1 = tla.int(1)
    val set = tla.enumSet(int1)
    val set2 = tla.enumSet(set)
    val loc1 = SourceLocation("tada", SourceRegion(100, 200, 300, 400))
    store.addRec(int1, loc1)
    val loc2 = SourceLocation("root", SourceRegion(1, 2, 3, 4))
    store.addRec(set2, loc2)
    assert(loc2 == store.find(set2.ID).get)
    assert(loc2 == store.find(set.ID).get)
    assert(loc1 == store.find(int1.ID).get)
  }
} 
Example 109
Source File: TestRegionTree.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.imp.src

import at.forsyte.apalache.tla.lir.src.{RegionTree, SourcePosition, SourceRegion}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner

@RunWith(classOf[JUnitRunner])
class TestRegionTree extends FunSuite {
  test("add") {
    val tree = new RegionTree()
    val region = SourceRegion(SourcePosition(1, 20), SourcePosition(3, 10))
    tree.add(region)
  }

  test("add a subregion, then size") {
    val tree = new RegionTree()
    val reg1 = SourceRegion(SourcePosition(1, 20), SourcePosition(3, 10))
    tree.add(reg1)
    assert(tree.size == 1)
    val reg2 = SourceRegion(SourcePosition(1, 20), SourcePosition(2, 5))
    tree.add(reg2)
    assert(tree.size == 2)
    val reg3 = SourceRegion(SourcePosition(2, 10), SourcePosition(3, 10))
    tree.add(reg3)
    assert(tree.size == 3)
  }

  test("add an overlapping subregion") {
    val tree = new RegionTree()
    val reg1 = SourceRegion(SourcePosition(1, 10), SourcePosition(3, 10))
    tree.add(reg1)
    val reg2 = SourceRegion(SourcePosition(1, 20), SourcePosition(5, 20))
    assertThrows[IllegalArgumentException] {
      tree.add(reg2)
    }
  }

  test("add a small region, then a larger region") {
    val tree = new RegionTree()
    val reg1 = SourceRegion(SourcePosition(2, 10), SourcePosition(3, 10))
    tree.add(reg1)
    val reg2 = SourceRegion(SourcePosition(1, 1), SourcePosition(4, 1))
    tree.add(reg2)
  }

  test("add a region twice") {
    val tree = new RegionTree()
    val reg1 = SourceRegion(SourcePosition(2, 10), SourcePosition(3, 10))
    tree.add(reg1)
    val reg2 = SourceRegion(SourcePosition(2, 10), SourcePosition(3, 10))
    tree.add(reg2)
  }

  test("add and find") {
    val tree = new RegionTree()
    val region = SourceRegion(SourcePosition(1, 20), SourcePosition(3, 10))
    val idx = tree.add(region)
    val found = tree(idx)
    assert(found == region)
  }

  test("find non-existing index") {
    val tree = new RegionTree()
    val region = SourceRegion(SourcePosition(1, 20), SourcePosition(3, 10))
    val idx = tree.add(region)
    assertThrows[IndexOutOfBoundsException] {
      tree(999)
    }
  }
} 
Example 110
Source File: TestConstAndDefRewriter.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.pp

import at.forsyte.apalache.tla.imp.SanyImporter
import at.forsyte.apalache.tla.imp.src.SourceStore
import at.forsyte.apalache.tla.lir.{SimpleFormalParam, TlaOperDecl}
import at.forsyte.apalache.tla.lir.convenience._
import at.forsyte.apalache.tla.lir.transformations.impl.IdleTracker
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}

import scala.io.Source

@RunWith(classOf[JUnitRunner])
class TestConstAndDefRewriter extends FunSuite with BeforeAndAfterEach {
  test("override a constant") {
    val text =
      """---- MODULE const ----
        |CONSTANT n
        |OVERRIDE_n == 10
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    val rewritten = new ConstAndDefRewriter(new IdleTracker())(root)
    assert(rewritten.constDeclarations.isEmpty) // no constants anymore
    assert(rewritten.operDeclarations.size == 2)
    val expected_n = TlaOperDecl("n", List(), tla.int(10))
    assert(expected_n == rewritten.operDeclarations.head)
    val expected_A = TlaOperDecl("A", List(), tla.enumSet(tla.appOp(tla.name("n"))))
    assert(expected_A == rewritten.operDeclarations(1))
  }

  // In TLA+, constants may be operators with multiple arguments.
  // We do not support that yet.
  test("override a constant with a unary operator") {
    val text =
      """---- MODULE const ----
        |CONSTANT n
        |OVERRIDE_n(x) == x
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }

  test("overriding a variable with an operator => error") {
    val text =
      """---- MODULE const ----
        |VARIABLE n, m
        |OVERRIDE_n == m
        |A == {n}
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("const", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }

  test("override an operator") {
    val text =
      """---- MODULE op ----
        |BoolMin(S) == CHOOSE x \in S: \A y \in S: x => y
        |OVERRIDE_BoolMin(S) == CHOOSE x \in S: TRUE
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("op", Source.fromString(text))
    val root = modules(rootName)
    val rewritten = new ConstAndDefRewriter(new IdleTracker())(root)
    assert(rewritten.constDeclarations.isEmpty)
    assert(rewritten.operDeclarations.size == 1)
    val expected = TlaOperDecl("BoolMin", List(SimpleFormalParam("S")),
      tla.choose(tla.name("x"), tla.name("S"), tla.bool(true)))
    assert(expected == rewritten.operDeclarations.head)
  }

  test("override a unary operator with a binary operator") {
    val text =
      """---- MODULE op ----
        |BoolMin(S) == CHOOSE x \in S: \A y \in S: x => y
        |OVERRIDE_BoolMin(S, T) == CHOOSE x \in S: x \in T
        |================================
      """.stripMargin

    val (rootName, modules) = new SanyImporter(new SourceStore)
      .loadFromSource("op", Source.fromString(text))
    val root = modules(rootName)
    assertThrows[OverridingError](new ConstAndDefRewriter(new IdleTracker())(root))
  }
} 
Example 111
Source File: TestUniqueNameGenerator.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.tla.pp

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterEach, FunSuite}

@RunWith(classOf[JUnitRunner])
class TestUniqueNameGenerator extends FunSuite with BeforeAndAfterEach {
  test("first three") {
    val gen = new UniqueNameGenerator
    assert("t_1" == gen.newName())
    assert("t_2" == gen.newName())
    assert("t_3" == gen.newName())
  }

  test("after 10000") {
    val gen = new UniqueNameGenerator
    for (i <- 1.to(10000)) {
      gen.newName()
    }
    assert("t_7pt" == gen.newName())
  }
} 
Example 112
Source File: TestPassChainExecutor.scala    From apalache   with Apache License 2.0 5 votes vote down vote up
package at.forsyte.apalache.infra.passes

import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.easymock.EasyMockSugar
import org.scalatest.junit.JUnitRunner

@RunWith(classOf[JUnitRunner])
class TestPassChainExecutor extends FunSuite with EasyMockSugar {
  test("""2 passes, OK""") {
    val pass1 = mock[Pass]
    val pass2 = mock[Pass]
    expecting {
      pass1.name.andReturn("pass1").anyTimes()
      pass1.execute().andReturn(true)
      pass1.next().andReturn(Some(pass2))
      pass2.name.andReturn("pass2").anyTimes()
      pass2.execute().andReturn(true)
      pass2.next().andReturn(None)
    }
    // run the chain
    whenExecuting(pass1, pass2) {
      val options = new WriteablePassOptions()
      val executor = new PassChainExecutor(options, pass1)
      val result = executor.run()
      assert(result.isDefined)
      assert(result.contains(pass2))
    }
  }

  test("""2 passes, first fails""") {
    val pass1 = mock[Pass]
    val pass2 = mock[Pass]
    expecting {
      pass1.name.andReturn("pass1").anyTimes()
      pass1.execute().andReturn(false)
    }
    // run the chain
    whenExecuting(pass1, pass2) {
      val options = new WriteablePassOptions()
      val executor = new PassChainExecutor(options, pass1)
      val result = executor.run()
      assert(result.isEmpty)
    }
  }
} 
Example 113
Source File: GraphMapTest.scala    From stellar-random-walk   with Apache License 2.0 5 votes vote down vote up
package au.csiro.data61.randomwalk.algorithm

import org.scalatest.FunSuite

class GraphMapTest extends FunSuite {

  test("test GraphMap data structure") {
    val e1 = Array((2, 1.0f))
    val e2 = Array((3, 1.0f))
    val e3 = Array((3, 1.0f))
    val e4 = Array((1, 1.0f))
    var v2N = Array((1, e1))
    GraphMap.addVertex(1, e1)
    GraphMap.addVertex(2)
    assert(GraphMap.getNumEdges == 1)
    assert(GraphMap.getNumVertices == 2)
    assertMap(v2N, GraphMap)

    GraphMap.reset
    v2N = Array((1, e1 ++ e2))
    GraphMap.addVertex(1, e1 ++ e2)
    GraphMap.addVertex(2)
    GraphMap.addVertex(3)
    assertMap(v2N, GraphMap)


    GraphMap.reset
    v2N = v2N ++ Array((2, e3 ++ e4))
    GraphMap.addVertex(2, e3 ++ e4)
    GraphMap.addVertex(1, e1 ++ e2)
    GraphMap.addVertex(3)
    assertMap(v2N, GraphMap)
  }

  private def assertMap(verticesToNeighbors: Array[(Int, Array[(Int,Float)])], gMap: GraphMap
    .type) = {
    for (v <- verticesToNeighbors) {
      var neighbors: Array[(Int, Float)] = Array()
      for (e <- v._2) {
        neighbors = neighbors ++ Array((e._1, e._2))
      }
      assert(gMap.getNeighbors(v._1) sameElements neighbors)
    }
  }

} 
Example 114
Source File: ExceptionPrinterTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.infra.util

import org.scalatest.FunSuite

class ExceptionPrinterTest extends FunSuite {

  test("getting stack trace as string") {
    val e = new Exception("sf") with ExceptionPrinter
    val st = List(
      new StackTraceElement("f.q.d.n.Class", "c", "Class.java", 30),
      new StackTraceElement("f.q.d.n.Class", "b", "Class.java", 20),
      new StackTraceElement("f.q.d.n.Class", "a", "Class.java", 10)
    )
    e.setStackTrace(st.toArray)

    val expected: String = st.map(_.toString()).mkString("\n") ++ "\n"
    val actual: String = e.getStackTraceString

    assert(expected == actual, "Stacktrace converted to string correctly")
  }

} 
Example 115
Source File: KafkaMetricFactoryTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.metrics

import com.aol.one.dwh.bandarlog.connectors.KafkaConnector
import com.aol.one.dwh.bandarlog.metrics.BaseMetrics._
import com.aol.one.dwh.bandarlog.metrics.KafkaMetricFactoryTest._
import com.aol.one.dwh.bandarlog.providers.{KafkaInMessagesProvider, KafkaLagProvider, KafkaOutMessagesProvider}
import com.aol.one.dwh.infra.config.{Tag, Topic}
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar


object KafkaMetricFactoryTest {
  private val metricPrefix = "kafka_prefix"
  private val topic = Topic("some_topic_id", Set("topic-1", "topic-2"), "some_group_id")
  private val expectedTags = List(Tag("topic", topic.id), Tag("group-id", topic.groupId))
}

class KafkaMetricFactoryTest extends FunSuite with MockitoSugar {

  private val kafkaConnector = mock[KafkaConnector]
  private val kafkaMetricFactory = new KafkaMetricFactory(kafkaConnector)

  test("create kafka Metric & Provider for IN metric id") {
    val result = kafkaMetricFactory.create(IN, metricPrefix, topic)

    assertMetric(result.metric, "in_messages")
    assert(result.provider.isInstanceOf[KafkaInMessagesProvider])
  }

  test("create kafka Metric & Provider for OUT metric id") {
    val result = kafkaMetricFactory.create(OUT, metricPrefix, topic)

    assertMetric(result.metric, "out_messages")
    assert(result.provider.isInstanceOf[KafkaOutMessagesProvider])
  }

  test("create kafka Metric & Provider for LAG metric id") {
    val result = kafkaMetricFactory.create(LAG, metricPrefix, topic)

    assertMetric(result.metric, "lag")
    assert(result.provider.isInstanceOf[KafkaLagProvider])
  }

  test("throw exception in unknown metric case") {
    intercept[IllegalArgumentException] {
      kafkaMetricFactory.create("UNKNOWN_METRIC", metricPrefix, topic)
    }
  }

  private def assertMetric[V](metric: Metric[V], expectedName: String) = {
    assert(metric.prefix == metricPrefix)
    assert(metric.name == expectedName)
    assert(metric.tags == expectedTags)
    assert(metric.value == AtomicValue(None))
  }
} 
Example 116
Source File: SqlTimestampProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.bandarlog.connectors.JdbcConnector
import com.aol.one.dwh.infra.config.Table
import com.aol.one.dwh.infra.sql.{Query, VerticaMaxValuesQuery}
import org.mockito.Matchers.any
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar

class SqlTimestampProviderTest extends FunSuite with MockitoSugar {

  private val query = VerticaMaxValuesQuery(Table("table", List("column"), None))
  private val jdbcConnector = mock[JdbcConnector]
  private val sqlTimestampProvider = new SqlTimestampProvider(jdbcConnector, query)

  test("check timestamp value by connector and query") {
    val resultTimestamp = Some(1234567890L)
    when(jdbcConnector.runQuery(any(classOf[Query]), any())).thenReturn(resultTimestamp)

    val result = sqlTimestampProvider.provide()

    assert(result.getValue == resultTimestamp)
  }

  test("return none if can't get timestamp value") {
    when(jdbcConnector.runQuery(any(classOf[Query]), any())).thenReturn(None)

    val result = sqlTimestampProvider.provide()

    assert(result.getValue.isEmpty)
  }
} 
Example 117
Source File: GlueTimestampProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.bandarlog.connectors.GlueConnector
import com.aol.one.dwh.infra.config.Table
import org.mockito.Matchers.any
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar

class GlueTimestampProviderTest extends FunSuite with MockitoSugar{

  private val table = mock[Table]
  private val glueConnector = mock[GlueConnector]
  private val glueTimestampProvider = new GlueTimestampProvider(glueConnector, table)

  test("check timestamp value by glue connector and table") {
    val glueTimestamp = 1533709910004L
    when(glueConnector.getMaxPartitionValue(any())).thenReturn(glueTimestamp)

    val result = glueTimestampProvider.provide()

    assert(result.getValue == Some(glueTimestamp))
  }

  test("return zero if partition column does not have values") {
    when(glueConnector.getMaxPartitionValue(any())).thenReturn(0)

    val result = glueTimestampProvider.provide()

    assert(result.getValue == Some(0))
  }
} 
Example 118
Source File: SqlLagProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.bandarlog.metrics.AtomicValue
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar


class SqlLagProviderTest extends FunSuite with MockitoSugar {

  private val fromProvider = mock[SqlTimestampProvider]
  private val toProvider = mock[SqlTimestampProvider]
  private val toGlueProvider = mock[GlueTimestampProvider]
  private val lagProvider1 = new SqlLagProvider(fromProvider, toProvider)
  private val lagProvider2 = new SqlLagProvider(fromProvider, toGlueProvider)

  test("check lag between from and to providers") {
    val fromValue = AtomicValue(Some(7L))
    val toValue = AtomicValue(Some(4L))
    val toGlueValue = AtomicValue(Some(6L))

    when(fromProvider.provide()).thenReturn(fromValue)
    when(toProvider.provide()).thenReturn(toValue)
    when(toGlueProvider.provide()).thenReturn(toGlueValue)

    val lag1 = lagProvider1.provide()
    val lag2 = lagProvider2.provide()

    assert(lag1.getValue.nonEmpty)
    assert(lag1.getValue.get == 3)
    assert(lag2.getValue.nonEmpty)
    assert(lag2.getValue.get == 1)
  }

  test("return none if 'from provider' value is none") {
    val toValue = AtomicValue(Some(4L))

    when(fromProvider.provide()).thenReturn(AtomicValue[Long](None))
    when(toProvider.provide()).thenReturn(toValue)

    val lag = lagProvider1.provide()

    assert(lag.getValue.isEmpty)
  }

  test("return none if 'to provider' value is none") {
    val fromValue = AtomicValue(Some(7L))

    when(fromProvider.provide()).thenReturn(fromValue)
    when(toProvider.provide()).thenReturn(AtomicValue[Long](None))
    when(toGlueProvider.provide()).thenReturn(AtomicValue[Long](None))

    val lag1 = lagProvider1.provide()
    val lag2 = lagProvider2.provide()

    assert(lag1.getValue.isEmpty)
    assert(lag2.getValue.isEmpty)
  }

  test("return none if both providers values is none") {
    when(fromProvider.provide()).thenReturn(AtomicValue[Long](None))
    when(toProvider.provide()).thenReturn(AtomicValue[Long](None))
    when(toGlueProvider.provide()).thenReturn(AtomicValue[Long](None))

    val lag1 = lagProvider1.provide()
    val lag2 = lagProvider2.provide()

    assert(lag1.getValue.isEmpty)
    assert(lag2.getValue.isEmpty)
  }
} 
Example 119
Source File: KafkaOutMessagesProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.bandarlog.connectors.KafkaConnector
import com.aol.one.dwh.infra.config.Topic
import kafka.common.TopicAndPartition
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar


class KafkaOutMessagesProviderTest extends FunSuite with MockitoSugar {

  private val kafkaConnector = mock[KafkaConnector]
  private val topic = Topic("topic_id", Set("topic_1", "topic_2"), "group_id")

  test("check count of out messages/offsets over all topic partitions") {
    val offsets = Option(Map(
      TopicAndPartition("topic_1", 1) -> 1L,
      TopicAndPartition("topic_2", 2) -> 2L,
      TopicAndPartition("topic_3", 3) -> 3L
    ))
    when(kafkaConnector.getOffsets(topic)).thenReturn(offsets)

    val result = new KafkaOutMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 6) // 1 + 2 + 3
  }

  test("check count of out messages/offsets for empty offsets result") {
    when(kafkaConnector.getOffsets(topic)).thenReturn(Some(Map[TopicAndPartition, Long]()))

    val result = new KafkaOutMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 0)
  }

  test("return none if can't retrieve offsets") {
    when(kafkaConnector.getOffsets(topic)).thenReturn(None)

    val result = new KafkaOutMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.isEmpty)
  }
} 
Example 120
Source File: KafkaLagProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.infra.config.Topic
import com.aol.one.dwh.bandarlog.connectors.KafkaConnector
import kafka.common.TopicAndPartition
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar


class KafkaLagProviderTest extends FunSuite with MockitoSugar {

  private val kafkaConnector = mock[KafkaConnector]
  private val topic = Topic("topic_id", Set("topic_1", "topic_2", "topic_3"), "group_id")

  test("check lag per topic") {
    val heads = Map(
      TopicAndPartition("topic_1", 1) -> 4L,
      TopicAndPartition("topic_2", 2) -> 5L,
      TopicAndPartition("topic_3", 3) -> 6L
    )

    val offsets = Map(
      TopicAndPartition("topic_1", 1) -> 1L,
      TopicAndPartition("topic_2", 2) -> 2L,
      TopicAndPartition("topic_3", 3) -> 3L
    )
    val kafkaState = Option((heads, offsets))
    when(kafkaConnector.getKafkaState(topic)).thenReturn(kafkaState)

    val result = new KafkaLagProvider(kafkaConnector, topic).provide()

    // topic       partition  heads  offsets  lag
    // topic_1     1          4      1        4-1=3
    // topic_2     2          5      2        5-2=3
    // topic_3     3          6      3        6-3=3
    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 9) // lag sum 3 + 3 + 3
  }

  test("check 0 lag case per topic") {
    val heads = Map(
      TopicAndPartition("topic_1", 1) -> 1L,
      TopicAndPartition("topic_2", 2) -> 2L,
      TopicAndPartition("topic_3", 3) -> 3L
    )

    val offsets = Map(
      TopicAndPartition("topic_1", 1) -> 4L,
      TopicAndPartition("topic_2", 2) -> 5L,
      TopicAndPartition("topic_3", 3) -> 6L
    )
    val kafkaState = Option((heads, offsets))
    when(kafkaConnector.getKafkaState(topic)).thenReturn(kafkaState)

    val result = new KafkaLagProvider(kafkaConnector, topic).provide()

    // topic       partition  heads  offsets  lag
    // topic_1     1          1      4        1-4= -3
    // topic_2     2          2      5        2-5= -3
    // topic_3     3          3      6        3-6= -3
    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 0) // lag.max(0) = 0
  }

  test("check lag for empty heads and offsets") {
    val kafkaState = Option((Map[TopicAndPartition, Long](), Map[TopicAndPartition, Long]()))
    when(kafkaConnector.getKafkaState(topic)).thenReturn(kafkaState)

    val result = new KafkaLagProvider(kafkaConnector, topic).provide()

    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 0)
  }

  test("return none if can't retrieve kafka state") {
    when(kafkaConnector.getKafkaState(topic)).thenReturn(None)

    val result = new KafkaLagProvider(kafkaConnector, topic).provide()

    assert(result.getValue.isEmpty)
  }
} 
Example 121
Source File: KafkaInMessagesProviderTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.providers

import com.aol.one.dwh.bandarlog.connectors.KafkaConnector
import com.aol.one.dwh.infra.config.Topic
import kafka.common.TopicAndPartition
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar


class KafkaInMessagesProviderTest extends FunSuite with MockitoSugar {

  private val kafkaConnector = mock[KafkaConnector]
  private val topic = Topic("topic_id", Set("topic_1", "topic_2"), "group_id")

  test("check count of in messages/heads over all topic partitions") {
    val heads = Some(Map(
      TopicAndPartition("topic_1", 1) -> 1L,
      TopicAndPartition("topic_2", 2) -> 2L,
      TopicAndPartition("topic_3", 3) -> 3L
    ))
    when(kafkaConnector.getHeads(topic)).thenReturn(heads)

    val result = new KafkaInMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 6) // 1 + 2 + 3
  }

  test("check count of in messages/heads for empty heads result") {
    when(kafkaConnector.getHeads(topic)).thenReturn(Some(Map[TopicAndPartition, Long]()))

    val result = new KafkaInMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.nonEmpty)
    assert(result.getValue.get == 0)
  }

  test("return none if can't retrieve heads") {
    when(kafkaConnector.getHeads(topic)).thenReturn(None)

    val result = new KafkaInMessagesProvider(kafkaConnector, topic).provide()

    assert(result.getValue.isEmpty)
  }
} 
Example 122
Source File: GlueConnectorTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.connectors
import com.aol.one.dwh.infra.config._
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar

import scala.concurrent.duration._

class GlueConnectorTest extends FunSuite with MockitoSugar {

  private val config = GlueConfig("eu-central-1", "default", "accessKey", "secretKey", 5, 2, 10.seconds)
  private val glueConnector = mock[GlueConnector]

  test("Check max batchId from glue metadata tables") {
    val resultValue = 100L
    val numericTable = Table("table", List("column"), None)
    when(glueConnector.getMaxPartitionValue(numericTable)).thenReturn(resultValue)

    val result = glueConnector.getMaxPartitionValue(numericTable)

    assert(result == resultValue)
  }

  test("Check max date partitions' value from glue metadata table") {
    val resultValue = 15681377656L
    val datetimeTable = Table("table", List("year", "month", "day"), Some(List("yyyy", "MM", "dd")))
    when(glueConnector.getMaxPartitionValue(datetimeTable)).thenReturn(resultValue)

    val result = glueConnector.getMaxPartitionValue(datetimeTable)

    assert(result == resultValue)
  }
} 
Example 123
Source File: JdbcConnectorTest.scala    From bandar-log   with Apache License 2.0 5 votes vote down vote up
package com.aol.one.dwh.bandarlog.connectors

import java.sql.{Connection, DatabaseMetaData, ResultSet, Statement}

import com.aol.one.dwh.infra.config._
import com.aol.one.dwh.infra.sql.pool.HikariConnectionPool
import com.aol.one.dwh.infra.sql.{ListStringResultHandler, Setting, VerticaMaxValuesQuery}
import org.apache.commons.dbutils.ResultSetHandler
import org.mockito.Mockito.when
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar

class JdbcConnectorTest extends FunSuite with MockitoSugar {

  private val statement = mock[Statement]
  private val resultSet = mock[ResultSet]
  private val connectionPool = mock[HikariConnectionPool]
  private val connection = mock[Connection]
  private val databaseMetaData = mock[DatabaseMetaData]
  private val resultSetHandler = mock[ResultSetHandler[Long]]
  private val listStringResultHandler = mock[ListStringResultHandler]

  test("check run query result for numeric batch_id column") {
    val resultValue = 100L
    val table = Table("table", List("column"), None)
    val query = VerticaMaxValuesQuery(table)
    when(connectionPool.getConnection).thenReturn(connection)
    when(connectionPool.getName).thenReturn("connection_pool_name")
    when(connection.createStatement()).thenReturn(statement)
    when(statement.executeQuery("SELECT MAX(column) AS column FROM table")).thenReturn(resultSet)
    when(connection.getMetaData).thenReturn(databaseMetaData)
    when(databaseMetaData.getURL).thenReturn("connection_url")
    when(resultSetHandler.handle(resultSet)).thenReturn(resultValue)

    val result = new DefaultJdbcConnector(connectionPool).runQuery(query, resultSetHandler)

    assert(result == resultValue)
  }

  test("check run query result for date/time partitions") {
    val resultValue = Some(20190924L)
    val table = Table("table", List("year", "month", "day"), Some(List("yyyy", "MM", "dd")))
    val query = VerticaMaxValuesQuery(table)
    when(connectionPool.getConnection).thenReturn(connection)
    when(connectionPool.getName).thenReturn("connection_pool_name")
    when(connection.createStatement()).thenReturn(statement)
    when(statement.executeQuery("SELECT DISTINCT year, month, day FROM table")).thenReturn(resultSet)
    when(connection.getMetaData).thenReturn(databaseMetaData)
    when(databaseMetaData.getURL).thenReturn("connection_url")
    when(listStringResultHandler.handle(resultSet)).thenReturn(resultValue)

    val result = new DefaultJdbcConnector(connectionPool).runQuery(query, listStringResultHandler)

    assert(result == resultValue)
  }
}

class DefaultJdbcConnector(connectionPool: HikariConnectionPool) extends JdbcConnector(connectionPool) {
  override def applySetting(connection: Connection, statement: Statement, setting: Setting): Unit = {}
} 
Example 124
Source File: MainecoonTestSuite.scala    From mainecoon   with Apache License 2.0 5 votes vote down vote up
package mainecoon.tests

import cats.arrow.FunctionK
import cats.instances.AllInstances
import cats.kernel.Eq
import mainecoon.syntax.AllSyntax
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.{FunSuite, Matchers}
import org.typelevel.discipline.scalatest.Discipline

import scala.util.Try

class MainecoonTestSuite extends FunSuite with Matchers with Discipline with TestInstances with AllInstances with AllSyntax with cats.syntax.AllSyntax


trait TestInstances {
  implicit val catsDataArbitraryOptionList: Arbitrary[FunctionK[Option, List]] = Arbitrary(Gen.const(λ[FunctionK[Option, List]](_.toList)))
  implicit val catsDataArbitraryListOption: Arbitrary[FunctionK[List, Option]] = Arbitrary(Gen.const(λ[FunctionK[List, Option]](_.headOption)))
  implicit val catsDataArbitraryTryOption: Arbitrary[FunctionK[Try, Option]] = Arbitrary(Gen.const(λ[FunctionK[Try, Option]](_.toOption)))
  implicit val catsDataArbitraryOptionTry: Arbitrary[FunctionK[Option, Try]] = Arbitrary(Gen.const(λ[FunctionK[Option, Try]](o => Try(o.get))))
  implicit val catsDataArbitraryListVector: Arbitrary[FunctionK[List, Vector]] = Arbitrary(Gen.const(λ[FunctionK[List, Vector]](_.toVector)))
  implicit val catsDataArbitraryVectorList: Arbitrary[FunctionK[Vector, List]] = Arbitrary(Gen.const(λ[FunctionK[Vector, List]](_.toList)))

  implicit val eqThrow: Eq[Throwable] = Eq.allEqual
} 
Example 125
Source File: UrlUtilSpec.scala    From scrapy4s   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.scrapy4s.util

import org.scalatest.FunSuite


class UrlUtilSpec  extends FunSuite{
  test("url util test") {
    val url = UrlUtil.getRealUrl("/xiaoqu/xicheng/", "https://bj.lianjia.com/xiaoqu/dongcheng/")
    println(url)
    assert("https://bj.lianjia.com/xiaoqu/xicheng/".equals(url))

    val url2 = UrlUtil.getRealUrl("xiaoqu/xicheng/", "https://bj.lianjia.com/xiaoqu/dongcheng/")
    println(url2)
    assert("https://bj.lianjia.com/xiaoqu/dongcheng/xiaoqu/xicheng/".equals(url2))

    val url3 = UrlUtil.getRealUrl("//www.ziroom.com/z/nl/z3-d23008614.html", "http://www.ziroom.com/z/nl/z3.html")
    println(url3)
    assert("http://www.ziroom.com/z/nl/z3-d23008614.html".equals(url3))
  }

} 
Example 126
Source File: JSONParserTestSuite.scala    From NGSI-LD_Experimental   with MIT License 5 votes vote down vote up
package json

import org.scalatest.FunSuite


class JSONParserTestSuite extends FunSuite {

  def parse(data:String) ={
    val parser = new JSONParser
    parser.parse(parser.value, data) match {
      case parser.Success(matched,_) => matched
      case parser.Failure(msg,_) => fail(s"Unexpected failure: ${msg}")
      case parser.Error(msg,_) => fail(s"Unexpected error: ${msg}")
    }
  }

  test("Should parse single string") {
    val data = """"This is a string""""
    assert(parse(data) == "This is a string")
  }

  test("Should parse a single number") {
    val data = "45"
    assert(parse(data) == 45)
  }

  test("Should parse a single floating point number") {
    val data = "1.45"
    assert(parse(data) == 1.45)
  }

  test("Should parse list") {
    val data = "[34,67,89]"
    assert(parse(data) == List(34,67,89))
  }

  test("Should parse empty list") {
    val data = "[]"
    assert(parse(data) == List())
  }

  test("Should parse map") {
    val data = """{"c1":"v1","c2":45,"c3":[34,56],"c4":{"c41":"v"}}"""
    val expected = Map("c1" -> "v1","c2"->45,"c3" -> List(34,56), "c4" -> Map("c41" -> "v"))
    assert(parse(data) == expected)
  }

  test("Should parse empty map") {
    val data = "{}"
    assert(parse(data) == Map())
  }

  test("Should parse GeoJSON Structure") {
    val data = """{"location": {"type": "GeoProperty","value": { "type": "Point", "coordinates": [-8, 40]}}}"""
    val expected = Map("location"->Map("type" -> "GeoProperty", "value"->Map("type"->"Point", "coordinates" -> List(-8,40))))
    assert(parse(data) == expected)
  }
} 
Example 127
Source File: ThresholdFinderSuite.scala    From spark-MDLP-discretization   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.feature

import org.apache.spark.mllib.feature.{BucketInfo, FeatureUtils, ThresholdFinder}
import org.apache.spark.sql.SQLContext
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.junit.JUnitRunner



@RunWith(classOf[JUnitRunner])
class ThresholdFinderSuite extends FunSuite {


  test("Test calcCriterion with even split hence low criterion value (and high entropy)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(50L, 100L, 150L)
    val rightFreqs = Array(50L, 100L, 150L)

    assertResult((-0.030412853556075408, 1.4591479170272448, 300, 300)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }

  test("Test calcCriterion with even split (and some at split) hence low criterion value (and high entropy)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(40L, 100L, 140L)
    val rightFreqs = Array(50L, 90L, 150L)

    assertResult((0.05852316831964029,1.370380206618117,280,290)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }

  test("Test calcCriterion with uneven split hence high criterion value (and low entropy)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(100L, 10L, 250L)
    val rightFreqs = Array(0L, 190L, 50L)

    assertResult((0.5270800719912969, 0.9086741857687387, 360, 240)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }

  test("Test calcCriterion with uneven split hence very high criterion value (and very low entropy)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(100L, 200L, 0L)
    val rightFreqs = Array(0L, 0L, 300L)

    assertResult((0.9811176395006821, 0.45914791702724483, 300, 300)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }

  test("Test calcCriterion with all data on one side (hence low criterion value)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(0L, 0L, 0L)
    val rightFreqs = Array(100L, 200L, 300L)

    assertResult((-0.02311711397093918, 1.4591479170272448, 0, 600)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }

  test("Test calcCriterion with most data on one side (hence low criterion value)") {

    val bucketInfo = new BucketInfo(Array(100L, 200L, 300L))
    val leftFreqs = Array(0L, 10L, 0L)
    val rightFreqs = Array(100L, 190L, 300L)

    assertResult((0.003721577231942788,1.4323219723298557,10,590)) {
      ThresholdFinder.calcCriterionValue(bucketInfo, leftFreqs, rightFreqs)
    }
  }
} 
Example 128
Source File: MDLPDiscretizerHugeSuite.scala    From spark-MDLP-discretization   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.feature

import org.apache.spark.ml.feature.TestHelper._
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FunSuite}




  test("Run MDLPD on all columns in serverX data (label = target2, maxBins = 50, maxByPart = 10000)") {
    val dataDf = readServerXData(sqlContext)
    val model = getDiscretizerModel(dataDf,
      Array("CPU1_TJ", "CPU2_TJ", "total_cfm", "rpm1"),
      "target4", maxBins = 50, maxByPart = 10000, stoppingCriterion = 0, minBinPercentage = 1)

    assertResult(
      """-Infinity, 337.55365, 363.06793, Infinity;
        |-Infinity, 329.35974, 330.47424, 331.16617, 331.54724, 332.8419, 333.82208, 334.7564, 335.65106, 336.6503, 337.26328, 337.8406, 339.16763, 339.81476, 341.1809, 341.81186, 343.64825, 355.91144, 357.8602, 361.57806, Infinity;
        |-Infinity, 0.0041902177, 0.0066683707, 0.00841628, 0.009734755, 0.011627266, 0.012141651, 0.012740928, 0.013055362, 0.013293093, 0.014488807, 0.014869433, 0.015116488, 0.015383363, 0.015662778, 0.015978532, 0.016246023, 0.016492717, 0.01686273, 0.017246526, 0.017485093, 0.017720722, 0.017845878, 0.018008012, 0.018357705, 0.018629191, 0.018964633, 0.019226547, 0.019445801, 0.01960973, 0.019857172, 0.020095222, 0.020373512, 0.020728927, 0.020977266, 0.02137091, 0.021543117, 0.02188059, 0.022238541, 0.02265025, 0.023091711, 0.023352059, 0.023588676, 0.023957964, 0.024230447, 0.024448851, 0.024822969, 0.025079254, 0.026178652, 0.027195029, Infinity;
        |-Infinity, 1500.0, 4500.0, 7500.0, Infinity""".stripMargin.replaceAll(System.lineSeparator(), "")) {
      model.splits.map(a => a.mkString(", ")).mkString(";")
    }
  }

} 
Example 129
Source File: OrcSchemaCompatibilityTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels.component.orc

import io.eels.schema._
import org.apache.orc.TypeDescription
import org.scalatest.{FunSuite, Matchers}

// tests that the eel <-> orc schemas are compatible
class OrcSchemaCompatibilityTest extends FunSuite with Matchers {

  test("orc schemas should be cross compatible with eel structs") {

    val schema = TypeDescription.createStruct()
      .addField("binary", TypeDescription.createBinary())
      .addField("boolean", TypeDescription.createBoolean())
      .addField("byte", TypeDescription.createByte())
      .addField("char", TypeDescription.createChar().withMaxLength(8))
      .addField("date", TypeDescription.createDate())
      .addField("decimal", TypeDescription.createDecimal().withScale(2).withPrecision(4))
      .addField("double", TypeDescription.createDouble())
      .addField("float", TypeDescription.createFloat())
      .addField("int", TypeDescription.createInt())
      .addField("long", TypeDescription.createLong())
      .addField("timestamp", TypeDescription.createTimestamp())
      .addField("varchar", TypeDescription.createVarchar().withMaxLength(222))
      .addField("map", TypeDescription.createMap(TypeDescription.createString(), TypeDescription.createBoolean()))
      .addField("array", TypeDescription.createList(TypeDescription.createString()))
      .addField("struct", TypeDescription.createStruct()
        .addField("a", TypeDescription.createString)
        .addField("b", TypeDescription.createBoolean()))

    val structType = StructType(
      Field("binary", BinaryType, true),
      Field("boolean", BooleanType, true),
      Field("byte", ByteType.Signed, true),
      Field("char", CharType(8), true),
      Field("date", DateType, true),
      Field("decimal", DecimalType(4, 2), true),
      Field("double", DoubleType, true),
      Field("float", FloatType, true),
      Field("int", IntType.Signed, true),
      Field("long", LongType.Signed, true),
      Field("timestamp", TimestampMillisType, true),
      Field("varchar", VarcharType(222), true),
      Field("map", MapType(StringType, BooleanType), true),
      Field("array", ArrayType(StringType), true),
      Field("struct", StructType(Field("a", StringType), Field("b", BooleanType)), true)
    )

    OrcSchemaFns.fromOrcType(schema) shouldBe structType
    OrcSchemaFns.toOrcSchema(structType) shouldBe schema
  }
} 
Example 130
Source File: TestTabulizer.scala    From Mastering-Machine-Learning-with-Spark-2.x   with MIT License 5 votes vote down vote up
package com.packtpub.mmlwspark.utils

import com.packtpub.mmlwspark.utils.Tabulizer.table
import org.scalatest.FunSuite


class TestTabulizer extends FunSuite {
  test("table sort") {
     println(
       s"""GBM Model: Grid results:
          ~${table(Seq("iterations, depth, learningRate", "AUC", "error"), gbmResults.sortBy(-_._2).take(10), format = Map(1 -> "%.3f", 2 -> "%.3f"))}
        """.stripMargin('~'))

  }

  val gbmResults = Seq(
    ((5,2,0.1),0.635,0.363),
    ((5,2,0.01),0.631,0.370),
    ((5,2,0.001),0.631,0.370),
    ((5,3,0.1),0.662,0.338),
    ((5,3,0.01),0.660,0.343),
    ((5,3,0.001),0.640,0.367),
    ((5,5,0.1),0.686,0.312),
    ((5,5,0.01),0.673,0.326),
    ((5,5,0.001),0.662,0.335),
    ((5,7,0.1),0.694,0.304),
    ((5,7,0.01),0.683,0.314),
    ((5,7,0.001),0.681,0.316),
    ((10,2,0.1),0.641,0.356),
    ((10,2,0.01),0.631,0.370),
    ((10,2,0.001),0.631,0.370),
    ((10,3,0.1),0.672,0.326),
    ((10,3,0.01),0.661,0.341),
    ((10,3,0.001),0.640,0.367),
    ((10,5,0.1),0.695,0.303),
    ((10,5,0.01),0.676,0.323),
    ((10,5,0.001),0.662,0.335),
    ((10,7,0.1),0.702,0.297),
    ((10,7,0.01),0.684,0.313),
    ((10,7,0.001),0.681,0.316),
    ((50,2,0.1),0.684,0.313),
    ((50,2,0.01),0.635,0.363),
    ((50,2,0.001),0.631,0.370),
    ((50,3,0.1),0.700,0.298),
    ((50,3,0.01),0.663,0.336),
    ((50,3,0.001),0.661,0.342),
    ((50,5,0.1),0.714,0.285),
    ((50,5,0.01),0.688,0.310),
    ((50,5,0.001),0.674,0.324),
    ((50,7,0.1),0.716,0.283),
    ((50,7,0.01),0.694,0.304),
    ((50,7,0.001),0.684,0.314),
    ((100,2,0.1),0.701,0.297),
    ((100,2,0.01),0.641,0.356),
    ((100,2,0.001),0.631,0.370),
    ((100,3,0.1),0.709,0.289),
    ((100,3,0.01),0.671,0.327),
    ((100,3,0.001),0.660,0.343),
    ((100,5,0.1),0.721,0.277),
    ((100,5,0.01),0.698,0.300),
    ((100,5,0.001),0.677,0.322),
    ((100,7,0.1),0.720,0.278),
    ((100,7,0.01),0.704,0.294),
    ((100,7,0.001),0.685,0.312)
  )
} 
Example 131
Source File: QuickCheckSuite.scala    From Principles-of-Reactive-Programming   with GNU General Public License v3.0 5 votes vote down vote up
package quickcheck

import org.scalatest.FunSuite

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner

import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop
import org.scalacheck.Prop._

import org.scalatest.exceptions.TestFailedException

object QuickCheckBinomialHeap extends QuickCheckHeap with BinomialHeap

@RunWith(classOf[JUnitRunner])
class QuickCheckSuite extends FunSuite with Checkers {
  def checkBogus(p: Prop) {
    var ok = false
    try {
      check(p)
    } catch {
      case e: TestFailedException =>
        ok = true
    }
    assert(ok, "A bogus heap should NOT satisfy all properties. Try to find the bug!")
  }

  test("Binomial heap satisfies properties.") {
    check(new QuickCheckHeap with BinomialHeap)
  }

  test("Bogus (1) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus1BinomialHeap)
  }

  test("Bogus (2) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus2BinomialHeap)
  }

  test("Bogus (3) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus3BinomialHeap)
  }

  test("Bogus (4) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus4BinomialHeap)
  }

  test("Bogus (5) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus5BinomialHeap)
  }
} 
Example 132
Source File: RootNodesStorageTest.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.storage

import java.io.File

import encry.view.state.avlTree.utils.implicits.Instances._
import encry.modifiers.InstanceFactory
import encry.storage.VersionalStorage.{StorageKey, StorageValue, StorageVersion}
import encry.storage.levelDb.versionalLevelDB.{LevelDbFactory, VLDBWrapper, VersionalLevelDBCompanion}
import encry.utils.{EncryGenerator, FileHelper}
import encry.view.state.avlTree.AvlTree
import org.encryfoundation.common.utils.Algos
import org.encryfoundation.common.utils.TaggedTypes.Height
import org.iq80.leveldb.{DB, Options, ReadOptions}
import org.scalatest.{FunSuite, Matchers, PropSpec}
import scorex.utils.Random

import scala.util.{Random => SRandom}

class RootNodesStorageTest extends PropSpec with InstanceFactory with EncryGenerator with Matchers {

  def createAvl: AvlTree[StorageKey, StorageValue] = {
    val firstDir: File = FileHelper.getRandomTempDir
    val firstStorage: VLDBWrapper = {
      val levelDBInit = LevelDbFactory.factory.open(firstDir, new Options)
      VLDBWrapper(VersionalLevelDBCompanion(levelDBInit, settings.levelDB.copy(keySize = 33), keySize = 33))
    }
    val dir: File = FileHelper.getRandomTempDir
    val levelDb: DB = LevelDbFactory.factory.open(dir, new Options)
    AvlTree[StorageKey, StorageValue](firstStorage, RootNodesStorage.emptyRootStorage[StorageKey, StorageValue])
  }

  property("testRollback") {
    val avl: AvlTree[StorageKey, StorageValue] = createAvl
    val dir: File = FileHelper.getRandomTempDir
    val levelDb: DB = LevelDbFactory.factory.open(dir, new Options)
    val batch1 = levelDb.createWriteBatch()
    val readOptions1 = new ReadOptions()
    val rootNodesStorage = RootNodesStorage[StorageKey, StorageValue](levelDb, 10, dir)
    val (_, avlAfterInsertions, insertList) =
      (0 to SRandom.nextInt(1000) + 10).foldLeft(rootNodesStorage, avl, List.empty[(Height, (List[(StorageKey, StorageValue)], List[StorageKey]))]) {
      case ((rootStorage, previousAvl, insertionList), height) =>
        val version = StorageVersion @@ Random.randomBytes()
        val toInsert = (0 to SRandom.nextInt(100)).foldLeft(List.empty[(StorageKey, StorageValue)]) {
          case (list, _) => (StorageKey @@ Random.randomBytes() -> StorageValue @@ Random.randomBytes()) :: list
        }
        val previousInsertions = insertionList.lastOption.map(_._2._1).getOrElse(List.empty[(StorageKey, StorageValue)])
        val deletions = previousInsertions.take(1).map(_._1)
        val newAvl = previousAvl.insertAndDeleteMany(
          version,
          toInsert,
          deletions
        )
        val newRootStorage = rootStorage.insert(
          version,
          newAvl.rootNode,
          Height @@ height
        )
        (newRootStorage, newAvl, insertionList :+ (Height @@ height -> (toInsert -> deletions)))
    }
    val (_, rootNodeRestored) = rootNodesStorage.rollbackToSafePoint(insertList.dropWhile(_._1 != rootNodesStorage.safePointHeight).drop(1))
    (avlAfterInsertions.rootNode.hash sameElements rootNodeRestored.hash) shouldBe true
  }
} 
Example 133
Source File: BlockSerializerTest.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.modifiers.history

import encry.modifiers.mempool.TransactionFactory
import encry.settings.Settings
import encry.utils.{EncryGenerator, TestHelper}
import org.encryfoundation.common.crypto.equihash.EquihashSolution
import org.encryfoundation.common.modifiers.history._
import org.encryfoundation.common.utils.Algos
import org.encryfoundation.common.utils.TaggedTypes.ModifierId
import org.scalatest.FunSuite
import scorex.crypto.hash.Digest32
import scorex.utils.Random

class BlockSerializerTest extends FunSuite with EncryGenerator with Settings {

  test("testToBytes $ testFromBytes") {

    val blockHeader = Header(
      99: Byte,
      ModifierId @@ Random.randomBytes(),
      Digest32 @@ Random.randomBytes(),
      99999L,
      199,
      999L,
      settings.constants.InitialDifficulty,
      EquihashSolution(Seq(1, 2, 3)),
      Random.randomBytes()
    )

    val factory = TestHelper
    val keys = factory.genKeys(10)

    val fee = factory.Props.txFee
    val timestamp = 12345678L

    val txs = keys.map { k =>
      val useBoxes = IndexedSeq(factory.genAssetBox(k.publicImage.address.address))
      TransactionFactory.defaultPaymentTransactionScratch(k, fee,
        timestamp, useBoxes, randomAddress, factory.Props.boxValue)
    }

    val blockPayload = Payload(ModifierId @@ Array.fill(32)(19: Byte), txs)

    val block = Block(blockHeader,blockPayload)

    val blockSererialized = BlockSerializer.toBytes(block)

    val blockDeserealized = BlockSerializer.parseBytes(blockSererialized).get

    assert(Algos.hash(block.bytes) sameElements Algos.hash(blockDeserealized.bytes), "Block bytes mismatch.")
  }
} 
Example 134
Source File: TokenizerSuite.scala    From spark-nkp   with Apache License 2.0 5 votes vote down vote up
package com.github.uosdmlab.nkp

import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.feature.{CountVectorizer, IDF}
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfter, FunSuite}


class TokenizerSuite extends FunSuite with BeforeAndAfterAll with BeforeAndAfter {

  private var tokenizer: Tokenizer = _

  private val spark: SparkSession =
    SparkSession.builder()
      .master("local[2]")
      .appName("Tokenizer Suite")
      .getOrCreate

  spark.sparkContext.setLogLevel("WARN")

  import spark.implicits._

  override protected def afterAll(): Unit = {
    try {
      spark.stop
    } finally {
      super.afterAll()
    }
  }

  before {
    tokenizer = new Tokenizer()
      .setInputCol("text")
      .setOutputCol("words")
  }

  private val df = spark.createDataset(
    Seq(
      "아버지가방에들어가신다.",
      "사랑해요 제플린!",
      "스파크는 재밌어",
      "나는야 데이터과학자",
      "데이터야~ 놀자~"
    )
  ).toDF("text")

  test("Default parameters") {
    assert(tokenizer.getFilter sameElements Array.empty[String])
  }

  test("Basic operation") {
    val words = tokenizer.transform(df)

    assert(df.count == words.count)
    assert(words.schema.fieldNames.contains(tokenizer.getOutputCol))
  }

  test("POS filter") {
    val nvTokenizer = new Tokenizer()
      .setInputCol("text")
      .setOutputCol("nvWords")
      .setFilter("N", "V")

    val words = tokenizer.transform(df).join(nvTokenizer.transform(df), "text")

    assert(df.count == words.count)
    assert(words.schema.fieldNames.contains(nvTokenizer.getOutputCol))
    assert(words.where(s"SIZE(${tokenizer.getOutputCol}) < SIZE(${nvTokenizer.getOutputCol})").count == 0)
  }

  test("TF-IDF pipeline") {
    tokenizer.setFilter("N")

    val cntVec = new CountVectorizer()
      .setInputCol("words")
      .setOutputCol("tf")

    val idf = new IDF()
      .setInputCol("tf")
      .setOutputCol("tfidf")

    val pipe = new Pipeline()
      .setStages(Array(tokenizer, cntVec, idf))

    val pipeModel = pipe.fit(df)

    val result = pipeModel.transform(df)

    assert(result.count == df.count)

    val fields = result.schema.fieldNames
    assert(fields.contains(tokenizer.getOutputCol))
    assert(fields.contains(cntVec.getOutputCol))
    assert(fields.contains(idf.getOutputCol))

    result.show
  }
} 
Example 135
Source File: MetadataTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.ml.feature.operator.{MetadataTransformUtils, VectorCartesian}
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfter, FunSuite}

class MetadataTest extends FunSuite with BeforeAndAfter {

  var spark: SparkSession = _

  before {
    spark = SparkSession.builder().master("local").getOrCreate()
  }

  after {
    spark.close()
  }

  test("test_vector_cartesian") {
    val data = spark.read.format("libsvm")
      .option("numFeatures", "123")
      .load("data/a9a/a9a_123d_train_trans.libsvm")
      .persist()

    val cartesian = new VectorCartesian()
      .setInputCols(Array("features", "features"))
      .setOutputCol("cartesian_features")

    val assembler = new VectorAssembler()
      .setInputCols(Array("features", "cartesian_features"))
      .setOutputCol("assemble_features")

    val pipeline = new Pipeline()
      .setStages(Array(cartesian, assembler))

    val featureModel = pipeline.fit(data)
    val crossDF = featureModel.transform(data)

    crossDF.schema.fields.foreach { field =>
      println("name: " + field.name)
      println("metadata: " + field.metadata.toString())
    }
  }

  test("test_three_order_cartesian") {
    val data = spark.read.format("libsvm")
      .option("numFeatures", 8)
      .load("data/abalone/abalone_8d_train.libsvm")
      .persist()

    val cartesian = new VectorCartesian()
      .setInputCols(Array("features", "features"))
      .setOutputCol("f_f")

    val cartesian2 = new VectorCartesian()
      .setInputCols(Array("features", "f_f"))
      .setOutputCol("f_f_f")

    val pipeline = new Pipeline()
      .setStages(Array(cartesian, cartesian2))

    val crossDF = pipeline.fit(data).transform(data).persist()

    // first cartesian, the number of dimensions is 64
    println("first cartesian dimension = " + crossDF.select("f_f").schema.fields.last.metadata.getStringArray(MetadataTransformUtils.DERIVATION).length)
    println(crossDF.select("f_f").schema.fields.last.metadata.getStringArray(MetadataTransformUtils.DERIVATION).mkString(","))

    println()

    // second cartesian, the number of dimensions is 512
    println("second cartesian dimension = " + crossDF.select("f_f_f").schema.fields.last.metadata.getStringArray(MetadataTransformUtils.DERIVATION).length)
    println(crossDF.select("f_f_f").schema.fields.last.metadata.getStringArray(MetadataTransformUtils.DERIVATION).mkString(","))
  }
} 
Example 136
Source File: GPModelTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import breeze.linalg.{DenseMatrix, DenseVector}
import breeze.numerics.{cos, pow}
import com.tencent.angel.spark.automl.tuner.kernel.Matern5Iso
import com.tencent.angel.spark.automl.tuner.model.GPModel
import org.scalatest.FunSuite

class GPModelTest extends FunSuite {

  test("test_linear") {
    // Test linear: y=2*x
    val X = DenseMatrix((1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)).t
    val y = 2.0 * DenseVector(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)
    val z = DenseMatrix((2.5, 4.5, 6.5, 8.5, 10.0, 12.0)).t
    val truePredZ = 2.0 * DenseVector(2.5, 4.5, 6.5, 8.5, 10.0, 12.0)

    val covFunc = Matern5Iso()
    val initCovParams = DenseVector(1.0, 1.0)
    val initNoiseStdDev = 0.01

    val gpModel = GPModel(covFunc, initCovParams, initNoiseStdDev)
    gpModel.fit(X, y)

    println("Fitted covariance function params:")
    println(gpModel.covParams)
    println("Fitted noiseStdDev:")
    println(gpModel.noiseStdDev)
    println("\n")

    val prediction = gpModel.predict(z)
    println("Mean and Var:")
    println(prediction)
    println("True value:")
    println(truePredZ)
  }

  test("test_cosine") {
    // Test no_linear: y=cos(x)+1
    val X = DenseMatrix((1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)).t
    val y = cos(DenseVector(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)) + 1.0
    val z = DenseMatrix((2.5, 4.5, 6.5, 8.5, 10.0, 12.0)).t
    val truePredZ = cos(DenseVector(2.5, 4.5, 6.5, 8.5, 10.0, 12.0)) + 1.01

    val covFunc = Matern5Iso()
    val initCovParams = DenseVector(1.0, 1.0)
    val initNoiseStdDev = 0.01

    val gpModel = GPModel(covFunc, initCovParams, initNoiseStdDev)
    gpModel.fit(X, y)

    println("Fitted covariance function params:")
    println(gpModel.covParams)
    println("Fitted noiseStdDev:")
    println(gpModel.noiseStdDev)
    println("\n")

    val prediction = gpModel.predict(z)
    println("Mean and Var:")
    println(prediction)
    println("True value:")
    println(truePredZ)
  }

  test("testSquare") {
    // Test no_linear: y=x^2
    val X = DenseMatrix((1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)).t
    val y = DenseVector(1.0, 4.0, 9.0, 16.0, 25.0, 36.0, 49.0, 64.0, 81.0)
    val z = DenseMatrix((2.5, 4.5, 6.5, 8.5, 10.0, 12.0)).t
    val truePredZ = pow(z, 2)

    val covFunc = Matern5Iso()
    val initCovParams = DenseVector(1.0, 1.0)
    val initNoiseStdDev = 0.01

    val gpModel = GPModel(covFunc, initCovParams, initNoiseStdDev)
    gpModel.fit(X, y)

    println("Fitted covariance function params:")
    println(gpModel.covParams)
    println("Fitted noiseStdDev:")
    println(gpModel.noiseStdDev)
    println("\n")

    val prediction = gpModel.predict(z)
    println("Mean and Var:")
    println(prediction)
    println("True value:")
    println(truePredZ)
  }
} 
Example 137
Source File: SquareDistTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import breeze.linalg.{DenseMatrix, DenseVector}
import com.tencent.angel.spark.automl.tuner.math.SquareDist
import org.junit.Assert._
import org.scalatest.FunSuite

class SquareDistTest extends FunSuite {

  test("test_XX_1D") {

    val x = DenseVector(1.0, 2.0, 3.0).toDenseMatrix.t
    val expected = DenseMatrix((0.0, 1.0, 4.0), (1.0, 0.0, 1.0), (4.0, 1.0, 0.0))
    assertEquals(expected, SquareDist(x, x))
  }

  test("test_XX_2D") {

    val x = DenseMatrix((1.0, 2.0, 3.0), (4.0, 5.0, 6.0)).t
    val expected = DenseMatrix((0.0, 2.0, 8.0), (2.0, 0.0, 2.0), (8.0, 2.0, 0.0))
    assertEquals(expected, SquareDist(x, x))
  }

  test("test_XY_1D") {

    val x1 = DenseVector(1.0, 2.0, 3.0).toDenseMatrix.t
    val x2 = DenseVector(4.0, 5.0).toDenseMatrix.t

    val expected = DenseMatrix((9.0, 16.0), (4.0, 9.0), (1.0, 4.0))
    assertEquals(expected, SquareDist(x1, x2))
  }

  test("test_XY_2D") {

    val x1 = DenseMatrix((1.0, 2.0, 3.0), (4.0, 5.0, 6.0)).t
    val x2 = DenseMatrix((7.0, 8.0), (9.0, 10.0)).t

    val expected = DenseMatrix((61.0, 85.0), (41.0, 61.0), (25.0, 41.0))
    assertEquals(expected, SquareDist(x1, x2))
  }
} 
Example 138
Source File: PipelineTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import com.tencent.angel.spark.automl.feature.preprocess.{HashingTFWrapper, IDFWrapper, TokenizerWrapper}
import com.tencent.angel.spark.automl.feature.{PipelineBuilder, PipelineWrapper, TransformerWrapper}
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfter, FunSuite}

class PipelineTest extends FunSuite with BeforeAndAfter {

  var spark: SparkSession = _

  before {
    spark = SparkSession.builder().master("local").getOrCreate()
  }

  after {
    spark.close()
  }

  test("test_tfidf") {
    val sentenceData = spark.createDataFrame(Seq(
      (0.0, "Hi I heard about Spark"),
      (0.0, "I wish Java could use case classes"),
      (1.0, "Logistic regression models are neat")
    )).toDF("label", "sentence")

    val pipelineWrapper = new PipelineWrapper()

    val transformers = Array[TransformerWrapper](
      new TokenizerWrapper(),
      new HashingTFWrapper(20),
      new IDFWrapper()
    )

    val stages = PipelineBuilder.build(transformers)

    transformers.foreach { transformer =>
      val inputCols = transformer.getInputCols
      val outputCols = transformer.getOutputCols
      inputCols.foreach(print)
      print("    ")
      outputCols.foreach(print)
      println()
    }

    pipelineWrapper.setStages(stages)

    val model = pipelineWrapper.fit(sentenceData)

    val outputDF = model.transform(sentenceData)
    outputDF.select("outIDF").show()
    outputDF.select("outIDF").foreach { row =>
      println(row.get(0).getClass.getSimpleName)
      val arr = row.get(0)
      println(arr.toString)
    }
    outputDF.rdd.map(row => row.toString()).repartition(1)
      .saveAsTextFile("tmp/output/tfidf")
  }
} 
Example 139
Source File: BreezeOpTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import com.tencent.angel.spark.automl.tuner.math.BreezeOp._
import org.junit.Assert._
import org.scalatest.FunSuite

class BreezeOpTest extends FunSuite {

  test("test cartesian") {

    val a: Array[Double] = Array(1.0, 2.0)
    val b: Array[Double] = Array(3.0, 4.0)
    val c: Array[Array[Double]] = cartesian(a, b)
    val expected: Array[Array[Double]] = Array(Array(1.0, 3.0), Array(1.0, 4.0), Array(2.0, 3.0), Array(2.0, 4.0))

    println(c.deep.mkString("\n"))
    assertEquals(expected.deep.mkString("\n"), c.deep.mkString("\n"))
  }

  test("test_higher_cartesian") {

    val a: Array[Double] = Array(1.0, 2.0)
    val b: Array[Double] = Array(3.0, 4.0)
    val c: Array[Double] = Array(5.0, 6.0)
    val d: Array[Array[Double]] = cartesian(a, b)
    val e: Array[Array[Double]] = cartesian(d, c)
    val expected = Array(Array(1.0, 3.0, 5.0),
      Array(1.0, 3.0, 6.0),
      Array(1.0, 4.0, 5.0),
      Array(1.0, 4.0, 6.0),
      Array(2.0, 3.0, 5.0),
      Array(2.0, 3.0, 6.0),
      Array(2.0, 4.0, 5.0),
      Array(2.0, 4.0, 6.0))

    println(e.deep.mkString("\n"))
    assertEquals(expected.deep.mkString("\n"), e.deep.mkString("\n"))
  }

  test("test_cartesian_array") {

    val a: Array[Double] = Array(1.0, 2.0)
    val b: Array[Double] = Array(3.0, 4.0)
    val c: Array[Double] = Array(5.0, 6.0)
    val d: Array[Double] = Array(7.0, 8.0)
    val allArray = Array(a, b, c, d)
    var tmp: Array[Array[Double]] = cartesian(allArray(0), allArray(1))
    allArray.foreach { case a =>
      if (a != allArray(0) && a != allArray(1)) {
        tmp = cartesian(tmp, a)
      }
    }
    val expected = Array(Array(1.0, 3.0, 5.0, 7.0),
      Array(1.0, 3.0, 5.0, 8.0),
      Array(1.0, 3.0, 6.0, 7.0),
      Array(1.0, 3.0, 6.0, 8.0),
      Array(1.0, 4.0, 5.0, 7.0),
      Array(1.0, 4.0, 5.0, 8.0),
      Array(1.0, 4.0, 6.0, 7.0),
      Array(1.0, 4.0, 6.0, 8.0),
      Array(2.0, 3.0, 5.0, 7.0),
      Array(2.0, 3.0, 5.0, 8.0),
      Array(2.0, 3.0, 6.0, 7.0),
      Array(2.0, 3.0, 6.0, 8.0),
      Array(2.0, 4.0, 5.0, 7.0),
      Array(2.0, 4.0, 5.0, 8.0),
      Array(2.0, 4.0, 6.0, 7.0),
      Array(2.0, 4.0, 6.0, 8.0))

    println(tmp.deep.mkString("\n"))
    assertEquals(expected.deep.mkString("\n"), tmp.deep.mkString("\n"))
  }
} 
Example 140
Source File: TunerTest.scala    From automl   with Apache License 2.0 5 votes vote down vote up
package com.tencent.angel.spark.automl

import com.tencent.angel.spark.automl.tuner.config.Configuration
import com.tencent.angel.spark.automl.tuner.parameter.ParamSpace
import com.tencent.angel.spark.automl.tuner.solver.Solver
import com.tencent.angel.spark.automl.tuner.trail.{TestTrail, Trail}
import org.apache.spark.ml.linalg.Vector
import org.scalatest.FunSuite

class TunerTest extends FunSuite {

  test("test_random") {
    val param1 = ParamSpace.fromConfigString("param1", "{2.0,3.0,4.0,5.0,6.0}")
    val param2 = ParamSpace.fromConfigString("param2", "{3:10:1}")
    val solver: Solver = Solver(Array(param1, param2), true, surrogate = "Random")
    val trail: Trail = new TestTrail()
    (0 until 10).foreach { iter =>
      println(s"------iteration $iter starts------")
      val configs: Array[Configuration] = solver.suggest()
      val results: Array[Double] = trail.evaluate(configs)
      solver.feed(configs, results)
    }
    val result: (Vector, Double) = solver.optimal
    solver.stop
    println(s"Best configuration ${result._1.toArray.mkString(",")}, best performance: ${result._2}")
  }

  test("test_grid") {
    val param1 = ParamSpace.fromConfigString("param1", "[1,10]")
    val param2 = ParamSpace.fromConfigString("param2", "[-5:5:10]")
    val solver: Solver = Solver(Array(param1, param2), true, surrogate = "Grid")
    val trail: Trail = new TestTrail()
    (0 until 10).foreach { iter =>
      println(s"------iteration $iter starts------")
      val configs: Array[Configuration] = solver.suggest()
      val results: Array[Double] = trail.evaluate(configs)
      solver.feed(configs, results)
    }
    val result: (Vector, Double) = solver.optimal
    solver.stop
    println(s"Best configuration ${result._1.toArray.mkString(",")}, best performance: ${result._2}")
  }

  test("test_gp") {
    val param1 = ParamSpace.fromConfigString("param1", "[1,10]")
    val param2 = ParamSpace.fromConfigString("param2", "[-5:5:10]")
    val param3 = ParamSpace.fromConfigString("param3", "{0.0,1.0,3.0,5.0}")
    val param4 = ParamSpace.fromConfigString("param4", "{-5:5:1}")
    val solver: Solver = Solver(Array(param1, param2, param3, param4), true, surrogate = "GaussianProcess")
    val trail: Trail = new TestTrail()
    (0 until 10).foreach { iter =>
      println(s"------iteration $iter starts------")
      val configs: Array[Configuration] = solver.suggest
      val results: Array[Double] = trail.evaluate(configs)
      solver.feed(configs, results)
    }
    val result: (Vector, Double) = solver.optimal
    solver.stop
    println(s"Best configuration ${result._1.toArray.mkString(",")}, best performance: ${result._2}")
  }

  test("test_rf") {
    val param1 = ParamSpace.fromConfigString("param1", "[1,10]")
    val param2 = ParamSpace.fromConfigString("param2", "[-5:5:10]")
    val param3 = ParamSpace.fromConfigString("param3", "{0.0,1.0,3.0,5.0}")
    val param4 = ParamSpace.fromConfigString("param4", "{-5:5:1}")
    val solver: Solver = Solver(Array(param1, param2, param3, param4), true, "RandomForest")
    val trail: Trail = new TestTrail()
    (0 until 10).foreach { iter =>
      println(s"------iteration $iter starts------")
      val configs: Array[Configuration] = solver.suggest
      val results: Array[Double] = trail.evaluate(configs)
      solver.feed(configs, results)
    }
    val result: (Vector, Double) = solver.optimal
    solver.stop
    println(s"Best configuration ${result._1.toArray.mkString(",")}, best performance: ${result._2}")
  }
} 
Example 141
Source File: X2PSuite.scala    From spark-tsne   with Apache License 2.0 5 votes vote down vote up
package com.github.saurfang.spark.tsne

import org.apache.spark.SharedSparkContext
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.scalatest.{FunSuite, Matchers}


class X2PSuite extends FunSuite with SharedSparkContext with Matchers {

  test("Test X2P against tsne.jl implementation") {
    val input = new RowMatrix(
      sc.parallelize(Seq(1 to 3, 4 to 6, 7 to 9, 10 to 12))
        .map(x => Vectors.dense(x.map(_.toDouble).toArray))
    )
    val output = X2P(input, 1e-5, 2).toRowMatrix().rows.collect().map(_.toArray.toList)
    println(output.toList)
    //output shouldBe List(List(0, .5, .5), List(.5, 0, .5), List(.5, .5, .0))
  }
} 
Example 142
Source File: BugDemonstrationTest.scala    From spark-tsne   with Apache License 2.0 5 votes vote down vote up
package com.github.saurfang.spark.tsne

import org.apache.spark.mllib.linalg.{Vectors, Vector}
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}


class BugDemonstrationTest extends FunSuite with Matchers with BeforeAndAfterAll {
  private var sparkSession : SparkSession = _
  override def beforeAll(): Unit = {
    super.beforeAll()
    sparkSession = SparkSession.builder().appName("BugTests").master("local[2]").getOrCreate()
  }

  override def afterAll(): Unit = {
    super.afterAll()
    sparkSession.stop()
  }

  test("This demonstrates a bug was fixed in tsne-spark 2.1") {
    val sc = sparkSession.sparkContext

    val observations = sc.parallelize(
      Seq(
        Vectors.dense(1.0, 10.0, 100.0),
        Vectors.dense(2.0, 20.0, 200.0),
        Vectors.dense(3.0, 30.0, 300.0)
      )
    )

    // Compute column summary statistics.
    val summary: MultivariateStatisticalSummary = Statistics.colStats(observations)
    val expectedMean = Vectors.dense(2.0,20.0,200.0)
    val resultMean = summary.mean
    assertEqualEnough(resultMean, expectedMean)
    val expectedVariance = Vectors.dense(1.0,100.0,10000.0)
    assertEqualEnough(summary.variance, expectedVariance)
    val expectedNumNonZeros = Vectors.dense(3.0, 3.0, 3.0)
    assertEqualEnough(summary.numNonzeros, expectedNumNonZeros)
  }

  private def assertEqualEnough(sample: Vector, expected: Vector): Unit = {
    expected.toArray.zipWithIndex.foreach{ case(d: Double, i: Int) =>
      sample(i) should be (d +- 1E-12)
    }
  }
} 
Example 143
Source File: RedisSourceConfigSuite.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.sql.redis.stream

import org.apache.spark.sql.redis._
import org.scalatest.{FunSuite, Matchers}


class RedisSourceConfigSuite extends FunSuite with Matchers {

  val group: String = "group55"

  test("testFromMap") {
    val config = RedisSourceConfig.fromMap(Map(
      StreamOptionStreamKeys -> "mystream1,mystream2,mystream3",
      StreamOptionStreamOffsets ->
        s"""
          |{
          |  "offsets":{
          |    "mystream1": {
          |      "groupName": "$group",
          |      "offset": "0-10"
          |    },
          |    "mystream2": {
          |       "groupName": "$group",
          |       "offset": "0-7"
          |    }
          |  }
          |}
        """.stripMargin,
      StreamOptionParallelism -> "2",
      StreamOptionGroupName -> group,
      StreamOptionConsumerPrefix -> "consumer"
    ))
    config shouldBe RedisSourceConfig(
      Seq(
        RedisConsumerConfig("mystream1", group, "consumer-1", 100, 500),
        RedisConsumerConfig("mystream1", group, "consumer-2", 100, 500),
        RedisConsumerConfig("mystream2", group, "consumer-1", 100, 500),
        RedisConsumerConfig("mystream2", group, "consumer-2", 100, 500),
        RedisConsumerConfig("mystream3", group, "consumer-1", 100, 500),
        RedisConsumerConfig("mystream3", group, "consumer-2", 100, 500)
      ),
      Some(RedisSourceOffset(Map(
        "mystream1" -> RedisConsumerOffset(group, "0-10"),
        "mystream2" -> RedisConsumerOffset(group, "0-7")
      )))
    )
  }
} 
Example 144
Source File: RedisSourceTest.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.sql.redis.stream

import org.scalatest.{FunSuite, Matchers, OptionValues}


class RedisSourceTest extends FunSuite with Matchers with OptionValues {

  test("testGetOffsetRanges") {
    val startOffsets = RedisSourceOffset(Map("mystream" -> RedisConsumerOffset("group55", "0-0")))
    val endOffsets = RedisSourceOffset(Map("mystream" -> RedisConsumerOffset("group55", "0-1")))
    val consumerConfig = RedisConsumerConfig("mystream", "group55", "consumer", 1000, 100)
    val consumerConfigs = Seq(consumerConfig)
    val offsetRanges = RedisSource.getOffsetRanges(Some(startOffsets), endOffsets, consumerConfigs)
    offsetRanges.head shouldBe RedisSourceOffsetRange(Some("0-0"), "0-1", consumerConfig)
  }
} 
Example 145
Source File: RedisConsumerOffsetTest.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.sql.redis.stream

import org.scalatest.{FunSuite, Matchers}


class RedisConsumerOffsetTest extends FunSuite with Matchers {

  test("testFromJson") {
    val offset = RedisSourceOffset.fromJson(
      """
        |{
        |  "offsets":{
        |    "mystream": {
        |      "groupName": "group55",
        |      "offset": "1543674099961-0"
        |    }
        |  }
        |}
        |""".stripMargin)
    offset shouldBe RedisSourceOffset(Map("mystream" ->
      RedisConsumerOffset("group55", "1543674099961-0")))
  }
} 
Example 146
Source File: RedisConfigSuite.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.redislabs.provider.redis

import org.scalatest.{FunSuite, Matchers}
import redis.clients.jedis.util.JedisClusterCRC16

class RedisConfigSuite extends FunSuite with Matchers {

  val redisStandaloneConfig = new RedisConfig(RedisEndpoint("127.0.0.1", 6379, "passwd"))
  val redisClusterConfig = new RedisConfig(RedisEndpoint("127.0.0.1", 7379))

  test("getNodesBySlots") {
    redisStandaloneConfig.getNodesBySlots(0, 16383).length shouldBe 1
    redisClusterConfig.getNodesBySlots(0, 16383).length shouldBe 7
  }

  test("getHost") {
    val key = "getHost"
    val slot = JedisClusterCRC16.getSlot(key)
    val standaloneHost = redisStandaloneConfig.getHost(key)
    assert(standaloneHost.startSlot <= slot && standaloneHost.endSlot >= slot)
    val clusterHost = redisClusterConfig.getHost(key)
    assert(clusterHost.startSlot <= slot && clusterHost.endSlot >= slot)
  }

  test("getNodes") {
    redisStandaloneConfig.getNodes(RedisEndpoint("127.0.0.1", 6379, "passwd")).length shouldBe 1
    redisClusterConfig.getNodes(RedisEndpoint("127.0.0.1", 7379)).length shouldBe 7
  }
} 
Example 147
Source File: SparkStreamingRedisSuite.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.redislabs.provider.redis

import com.redislabs.provider.redis.env.Env
import com.redislabs.provider.redis.util.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.scalatest.{BeforeAndAfterEach, FunSuite}


trait SparkStreamingRedisSuite extends FunSuite with Env with BeforeAndAfterEach with Logging {

  override protected def beforeEach(): Unit = {
    super.beforeEach()
    spark = SparkSession.builder().config(conf).getOrCreate()
    sc = spark.sparkContext
    ssc = new StreamingContext(sc, Seconds(1))
  }

  override protected def afterEach(): Unit = {
    ssc.stop()
    spark.stop
    System.clearProperty("spark.driver.port")
    super.afterEach()
  }

} 
Example 148
Source File: ConnectionSSLUtilsTest.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.redislabs.provider.redis.util

import com.redislabs.provider.redis.env.RedisStandaloneSSLEnv
import com.redislabs.provider.redis.util.ConnectionUtils.{JedisExt, XINFO}
import org.scalatest.{FunSuite, Matchers}
import redis.clients.jedis.StreamEntryID

import scala.collection.JavaConverters._


class ConnectionSSLUtilsTest extends FunSuite with Matchers with RedisStandaloneSSLEnv {

  test("xinfo") {
    val streamKey = TestUtils.generateRandomKey()
    val conn = redisConfig.connectionForKey(streamKey)
    val data = Map("key" -> "value").asJava
    val entryId = conn.xadd(streamKey, new StreamEntryID(0, 1), data)
    val info = conn.xinfo(XINFO.SubCommandStream, streamKey)
    info.get(XINFO.LastGeneratedId) shouldBe Some(entryId.toString)
  }
} 
Example 149
Source File: ConnectionUtilsTest.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.redislabs.provider.redis.util

import com.redislabs.provider.redis.env.RedisStandaloneEnv
import com.redislabs.provider.redis.util.ConnectionUtils.{JedisExt, XINFO}
import org.scalatest.{FunSuite, Matchers}
import redis.clients.jedis.StreamEntryID

import scala.collection.JavaConverters._


class ConnectionUtilsTest extends FunSuite with Matchers with RedisStandaloneEnv {

  test("xinfo") {
    val streamKey = TestUtils.generateRandomKey()
    val conn = redisConfig.connectionForKey(streamKey)
    val data = Map("key" -> "value").asJava
    val entryId = conn.xadd(streamKey, new StreamEntryID(0, 1), data)
    val info = conn.xinfo(XINFO.SubCommandStream, streamKey)
    info.get(XINFO.LastGeneratedId) shouldBe Some(entryId.toString)
  }
} 
Example 150
Source File: Tests.scala    From spark-es   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.elasticsearch

import org.elasticsearch.common.settings.Settings
import org.scalatest.FunSuite

class Tests extends FunSuite with SparkSuite with ElasticSearchSuite {
  test("Reads documents from multiple shards") {
    val client = es.client

    val indexName = "index-with-multiple-shards"

    client.admin().indices().prepareCreate(indexName)
      .setSettings(Settings.settingsBuilder()
        .put("index.number_of_replicas", 0)
        .put("index.number_of_shards", 2)
        .build()
      )
      .get()

    for (i <- 1 to 1000) {
      client.prepareIndex(indexName, "foo", i.toString).setSource("{}").get()
    }

    client.admin().cluster().prepareHealth(indexName).setWaitForGreenStatus().get()
    client.admin().indices().prepareRefresh(indexName).get()

    val rdd = sparkContext.esRDD(Seq("localhost"), es.clusterName, Seq(indexName), Seq("foo"), "*")

    assert(rdd.partitions.length == 2)
    assert(rdd.collect().map(_.metadata.id).sorted.toList == (1 to 1000).map(_.toString).sorted.toList)
  }

  test("Writes documents to ElasticSearch") {
    val client = es.client

    val indexName = "index1"

    sparkContext.parallelize(Seq(1, 2, 3, 4))
      .map(id => ESDocument(ESMetadata(id.toString, "foo", indexName), "{}"))
      .saveToES(Seq("localhost"), es.clusterName)

    client.admin().cluster().prepareHealth(indexName).setWaitForGreenStatus().get()
    client.admin().indices().prepareRefresh(indexName).get()

    assert(client.prepareGet(indexName, "foo", "1").get().isExists)
    assert(client.prepareGet(indexName, "foo", "2").get().isExists)
    assert(client.prepareGet(indexName, "foo", "3").get().isExists)
    assert(client.prepareGet(indexName, "foo", "4").get().isExists)
  }
} 
Example 151
Source File: SparkFunSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark

// scalastyle:off
import org.apache.log4j.{Level, Logger}
import org.scalatest.{FunSuite, Outcome}

import org.apache.spark.Logging


  final protected override def withFixture(test: NoArgTest): Outcome = {
    val testName = test.text
    val suiteName = this.getClass.getName
    val shortSuiteName = suiteName.replaceAll("org.apache.spark", "o.a.s")
    try {
      Logger.getLogger("org").setLevel(Level.OFF)
      Logger.getLogger("akka").setLevel(Level.OFF)

      logInfo(s"\n\n===== TEST OUTPUT FOR $shortSuiteName: '$testName' =====\n")
      test()
    } finally {
      logInfo(s"\n\n===== FINISHED $shortSuiteName: '$testName' =====\n")
    }
  }

} 
Example 152
Source File: PurchaseLogGeneratorSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch08

import org.scalatest.FunSuite

class PurchaseLogGeneratorSuite extends FunSuite {

  test("generate products, users, purchaseLog") {
    val numProducts = 10
    val numUsers = 10
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions = RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    val products = PurchaseLogGenerator.genProductList
    assert(products.size === numProducts)
  }

  test("generate user list") {
    val numProducts = 10
    val numUsers = 10
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions = RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    val users = PurchaseLogGenerator.genUserList
    assert(users.size === numUsers)
  }

  test("generate purchaseLog with RandomSelection") {
    val numProducts = 10
    val numUsers = 10
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions = RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    implicit val pidGenerator = ProductIdGenerator.fromString("RandomSelection")

    val users = PurchaseLogGenerator.genUserList
    val purchaseLog = PurchaseLogGenerator.genPurchaseLog(users)

    assert(purchaseLog.size === numUsers * numProductsPerUser)
    assert(purchaseLog.groupBy(_.uid).size === numUsers)
  }

  test("generate purchaseLog with PreferentialAttachment") {
    val numProducts = 10
    val numUsers = 10
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions = RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    implicit val pidGenerator = ProductIdGenerator.fromString("PreferentialAttachment")

    val users = PurchaseLogGenerator.genUserList
    val purchaseLog = PurchaseLogGenerator.genPurchaseLog(users)

    assert(purchaseLog.size === numUsers * numProductsPerUser)
    assert(purchaseLog.groupBy(_.uid).size === numUsers)
  }
} 
Example 153
Source File: ProductIdGeneratorSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch08

import org.scalatest.FunSuite

class ProductIdGeneratorSuite extends FunSuite {

  test("get next productId by RandomSelection") {
    val numProducts = 5
    val numUsers = 5
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions =
      RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    val purchaseLog = List(
      Purchase(6L, 1L),
      Purchase(6L, 2L),
      Purchase(7L, 3L),
      Purchase(7L, 4L),
      Purchase(8L, 5L),
      Purchase(8L, 1L),
      Purchase(9L, 2L),
      Purchase(9L, 3L),
      Purchase(10L, 4L),
      Purchase(10L, 5L)
    )
    (1 to 10).foreach( i => {
      val pid = ProductIdGenerator.RandomSelection.getNextPid(recOpts, purchaseLog)
      assert(0 <= pid && pid <= numProducts)
    })
  }

  test("get next productId by PreferentialAttachment") {
    val numProducts = 5
    val numUsers = 5
    val numProductsPerUser = 2
    implicit val recOpts: RecommendLogOptions = RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
    val purchaseLog = List(
      Purchase(6L, 1L),
      Purchase(6L, 2L),
      Purchase(7L, 3L),
      Purchase(7L, 4L),
      Purchase(8L, 5L),
      Purchase(8L, 1L),
      Purchase(9L, 2L),
      Purchase(9L, 3L),
      Purchase(10L, 4L),
      Purchase(10L, 5L)
    )
    (1 to 10).foreach( i => {
      val pid = ProductIdGenerator.PreferentialAttachment.getNextPid(recOpts, purchaseLog)
      assert(0 <= pid && pid <= numProducts)
    })
  }

  test("get ProductIdGenerator from string") {
    assert(ProductIdGenerator.RandomSelection === ProductIdGenerator.fromString("RandomSelection"))
    assert(ProductIdGenerator.PreferentialAttachment === ProductIdGenerator.fromString("PreferentialAttachment"))
    assert(ProductIdGenerator.RandomSelection === ProductIdGenerator.fromString("hoge"))
  }

} 
Example 154
Source File: TripSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch05

import org.scalatest.FunSuite

class TripSuite extends FunSuite {

  test("should be parsed") {
    val line = "911926,566,8/31/2015 8:20,Harry Bridges Plaza (Ferry Building)," +
      "50,8/31/2015 8:30,Post at Kearny,47,566,Subscriber,95442"
    val trip = Trip.parse(line)
    assert(trip.id === 911926)
    assert(trip.duration === 566)
    assert(trip.zipcode === "95442")
  }
} 
Example 155
Source File: StationSuite.scala    From gihyo-spark-book-example   with Apache License 2.0 5 votes vote down vote up
package jp.gihyo.spark.ch05

import java.sql.Timestamp
import java.text.SimpleDateFormat

import org.scalatest.FunSuite

class StationSuite extends FunSuite {

  test("should be parse") {
    val line = "2,San Jose Diridon Caltrain Station,37.329732,-121.901782,27,San Jose,8/6/2013"
    val station = Station.parse(line)

    val dateFormat = new SimpleDateFormat("MM/dd/yyy")
    assert(station.id === 2)
    assert(station.name === "San Jose Diridon Caltrain Station")
    assert(station.lat === 37.329732)
    assert(station.lon === -121.901782)
    assert(station.dockcount === 27)
    assert(station.landmark === "San Jose")
    assert(station.installation === new Timestamp(dateFormat.parse("8/6/2013").getTime))
  }
} 
Example 156
Source File: MeanAveragePrecisionSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.evaluation

import breeze.linalg.DenseVector
import org.scalatest.FunSuite
import org.apache.spark.SparkContext
import keystoneml.utils.Stats
import keystoneml.workflow.PipelineContext

class MeanAveragePrecisionSuite extends FunSuite with PipelineContext {

  test("random map test") {
    sc = new SparkContext("local", "test")

    // Build some random test data with 4 classes 0,1,2,3
    val actual = List(Array(0, 3), Array(2), Array(1, 2), Array(0))
    val actualRdd = sc.parallelize(actual)

    val predicted = List(
      DenseVector(0.1, -0.05, 0.12, 0.5),
      DenseVector(-0.23, -0.45, 0.23, 0.1),
      DenseVector(-0.34, -0.32, -0.66, 1.52),
      DenseVector(-0.1, -0.2, 0.5, 0.8))

    val predictedRdd = sc.parallelize(predicted)

    val map = new MeanAveragePrecisionEvaluator(4).evaluate(predictedRdd, actualRdd)

    // Expected values from running this in MATLAB
    val expected = DenseVector(1.0, 0.3333, 0.5, 0.3333)

    assert(Stats.aboutEq(map, expected, 1e-4))
  }
} 
Example 157
Source File: MulticlassClassifierEvaluatorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.evaluation

import breeze.linalg.DenseMatrix
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext

class MulticlassClassifierEvaluatorSuite extends FunSuite with PipelineContext {
  test("Multiclass keystoneml.evaluation metrics") {
    
    sc = new SparkContext("local", "test")
    val confusionMatrix = new DenseMatrix(3, 3, Array(2, 1, 0, 1, 3, 0, 1, 0, 1))
    val labels = Array(0.0, 1.0, 2.0)
    val predictionAndLabels = sc.parallelize(
      Seq((0.0, 0.0), (0.0, 1.0), (0.0, 0.0), (1.0, 0.0), (1.0, 1.0),
        (1.0, 1.0), (1.0, 1.0), (2.0, 2.0), (2.0, 0.0)), 2)
    val evaluator = new MulticlassClassifierEvaluator(3)
    val metrics = evaluator.evaluate(predictionAndLabels.map(_._1.toInt), predictionAndLabels.map(_._2.toInt)
    )
    val delta = 0.0000001
    val precision0 = 2.0 / (2 + 1)
    val precision1 = 3.0 / (3 + 1)
    val precision2 = 1.0 / (1 + 1)
    val recall0 = 2.0 / (2 + 2)
    val recall1 = 3.0 / (3 + 1)
    val recall2 = 1.0 / (1 + 0)
    val f1measure0 = 2 * precision0 * recall0 / (precision0 + recall0)
    val f1measure1 = 2 * precision1 * recall1 / (precision1 + recall1)
    val f1measure2 = 2 * precision2 * recall2 / (precision2 + recall2)
    val f2measure0 = (1 + 2 * 2) * precision0 * recall0 / (2 * 2 * precision0 + recall0)
    val f2measure1 = (1 + 2 * 2) * precision1 * recall1 / (2 * 2 * precision1 + recall1)
    val f2measure2 = (1 + 2 * 2) * precision2 * recall2 / (2 * 2 * precision2 + recall2)

    assert(metrics.confusionMatrix.toArray.sameElements(confusionMatrix.toArray))
    assert(math.abs(metrics.classMetrics(0).precision - precision0) < delta)
    assert(math.abs(metrics.classMetrics(1).precision - precision1) < delta)
    assert(math.abs(metrics.classMetrics(2).precision - precision2) < delta)
    assert(math.abs(metrics.classMetrics(0).recall - recall0) < delta)
    assert(math.abs(metrics.classMetrics(1).recall - recall1) < delta)
    assert(math.abs(metrics.classMetrics(2).recall - recall2) < delta)
    assert(math.abs(metrics.classMetrics(0).fScore() - f1measure0) < delta)
    assert(math.abs(metrics.classMetrics(1).fScore() - f1measure1) < delta)
    assert(math.abs(metrics.classMetrics(2).fScore() - f1measure2) < delta)
    assert(math.abs(metrics.classMetrics(0).fScore(2.0) - f2measure0) < delta)
    assert(math.abs(metrics.classMetrics(1).fScore(2.0) - f2measure1) < delta)
    assert(math.abs(metrics.classMetrics(2).fScore(2.0) - f2measure2) < delta)

    assert(math.abs(metrics.microRecall -
        (2.0 + 3.0 + 1.0) / ((2 + 3 + 1) + (1 + 1 + 1))) < delta)
    assert(math.abs(metrics.microRecall - metrics.microPrecision) < delta)
    assert(math.abs(metrics.microRecall - metrics.microFScore()) < delta)
    assert(math.abs(metrics.macroPrecision -
        (precision0 + precision1 + precision2) / 3.0) < delta)
    assert(math.abs(metrics.macroRecall -
        (recall0 + recall1 + recall2) / 3.0) < delta)
    assert(math.abs(metrics.macroFScore() -
        (f1measure0 + f1measure1 + f1measure2) / 3.0) < delta)
    assert(math.abs(metrics.macroFScore(2.0) -
        (f2measure0 + f2measure1 + f2measure2) / 3.0) < delta)
  }
} 
Example 158
Source File: BinaryClassifierEvaluatorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.evaluation

import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.utils.Stats
import keystoneml.workflow.PipelineContext

class BinaryClassifierEvaluatorSuite extends FunSuite with PipelineContext {
  test("Multiclass keystoneml.evaluation metrics") {
    
    sc = new SparkContext("local", "test")

    val predictionAndLabels = sc.parallelize( Seq.fill(6)((true, true)) ++ Seq.fill(2)((false, true))
        ++ Seq.fill(1)((true, false)) ++ Seq.fill(3)((false, false)), 2)
    val metrics = BinaryClassifierEvaluator.evaluate(predictionAndLabels.map(_._1), predictionAndLabels.map(_._2))

    assert(metrics.tp === 6)
    assert(metrics.fp === 1)
    assert(metrics.tn === 3)
    assert(metrics.fn === 2)

    assert(Stats.aboutEq(metrics.precision, 6.0/7.0))
    assert(Stats.aboutEq(metrics.recall, 6.0/8.0))
    assert(Stats.aboutEq(metrics.accuracy, 9.0/12.0))
    assert(Stats.aboutEq(metrics.specificity, 3.0/4.0))
    assert(Stats.aboutEq(metrics.fScore(), 2.0 * 6.0 / (2.0 * 6.0 + 2.0 + 1.0)))
  }
} 
Example 159
Source File: MLlibUtilsSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils

import org.apache.spark.mllib.linalg._
import breeze.linalg.{DenseVector => BDV, SparseVector => BSV}
import org.scalatest.FunSuite

class MLlibUtilsSuite extends FunSuite {
  val arr = Array(0.1, 0.2, 0.3, 0.4)
  val n = 20
  val indices = Array(0, 3, 5, 10, 13)
  val values = Array(0.1, 0.5, 0.3, -0.8, -1.0)

  test("dense vector to breeze dense") {
    val vec = Vectors.dense(arr)
    assert(MLlibUtils.mllibVectorToDenseBreeze(vec) === new BDV[Double](arr))
  }

  test("sparse vector to breeze dense") {
    val vec = Vectors.sparse(n, indices, values)
    val breeze = new BDV[Double](n)
    indices.zip(values).foreach { case (x, y) =>
      breeze(x) = y
    }
    assert(MLlibUtils.mllibVectorToDenseBreeze(vec) === breeze)
  }

  test("dense breeze to vector") {
    val breeze = new BDV[Double](arr)
    val vec = MLlibUtils.breezeVectorToMLlib(breeze).asInstanceOf[DenseVector]
    assert(vec.size === arr.length)
    assert(vec.values.eq(arr), "should not copy data")
  }

  test("sparse breeze to vector") {
    val breeze = new BSV[Double](indices, values, n)
    val vec = MLlibUtils.breezeVectorToMLlib(breeze).asInstanceOf[SparseVector]
    assert(vec.size === n)
    assert(vec.indices.eq(indices), "should not copy data")
    assert(vec.values.eq(values), "should not copy data")
  }

  test("sparse breeze with partially-used arrays to vector") {
    val activeSize = 3
    val breeze = new BSV[Double](indices, values, activeSize, n)
    val vec = MLlibUtils.breezeVectorToMLlib(breeze).asInstanceOf[SparseVector]
    assert(vec.size === n)
    assert(vec.indices === indices.slice(0, activeSize))
    assert(vec.values === values.slice(0, activeSize))
  }

  test("dense matrix to breeze dense") {
    val mat = Matrices.dense(3, 2, Array(0.0, 1.0, 2.0, 3.0, 4.0, 5.0))
    val breeze = MLlibUtils.mllibMatrixToDenseBreeze(mat)
    assert(breeze.rows === mat.numRows)
    assert(breeze.cols === mat.numCols)
    assert(breeze.data.eq(mat.asInstanceOf[DenseMatrix].values), "should not copy data")
  }

  test("sparse matrix to breeze dense") {
    val values = Array(1.0, 2.0, 4.0, 5.0)
    val colPtrs = Array(0, 2, 4)
    val rowIndices = Array(1, 2, 1, 2)
    val mat = Matrices.sparse(3, 2, colPtrs, rowIndices, values)
    val breeze = MLlibUtils.mllibMatrixToDenseBreeze(mat)
    assert(breeze.rows === mat.numRows)
    assert(breeze.cols === mat.numCols)
    assert(breeze.toArray === mat.toArray)
  }
} 
Example 160
Source File: MatrixUtilsSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils

import org.scalatest.FunSuite

import breeze.linalg._
import breeze.stats._

import org.apache.spark.SparkContext

import keystoneml.pipelines._
import keystoneml.workflow.PipelineContext

class MatrixUtilsSuite extends FunSuite with PipelineContext {

  test("computeMean works correctly") {
    val numRows = 1000
    val numCols = 32
    val numParts = 4
    sc = new SparkContext("local", "test")
    val in = DenseMatrix.rand(numRows, numCols)
    val inArr = MatrixUtils.matrixToRowArray(in)
    val rdd = sc.parallelize(inArr, numParts).mapPartitions { iter => 
      Iterator.single(MatrixUtils.rowsToMatrix(iter))
    }
    val expected = mean(in(::, *)).t
    val actual = MatrixUtils.computeMean(rdd)
    assert(Stats.aboutEq(expected, actual, 1e-6))
  }

} 
Example 161
Source File: ImageUtilsSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils

import org.scalatest.FunSuite

class ImageUtilsSuite extends FunSuite {

  test("crop") {
    val imgArr =
      (0 until 4).flatMap { x =>
        (0 until 4).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1).toDouble
          }
        }
      }.toArray

    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(4, 4, 1))
    val cropped = ImageUtils.crop(image, 1, 1, 3, 3)

    assert(cropped.metadata.xDim == 2)
    assert(cropped.metadata.yDim == 2)
    assert(cropped.metadata.numChannels == 1)

    assert(cropped.get(0, 0, 0) == 5.0)
    assert(cropped.get(0, 1, 0) == 6.0)
    assert(cropped.get(1, 0, 0) == 9.0)
    assert(cropped.get(1, 1, 0) == 10.0)
  }

  test("flipHorizontal") {
    val imgArr =
      (0 until 4).flatMap { x =>
        (0 until 4).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1).toDouble
          }
        }
      }.toArray

    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(4, 4, 1))

    val flipped = ImageUtils.flipHorizontal(image)

    assert(flipped.metadata.xDim == 4)
    assert(flipped.metadata.yDim == 4)
    assert(flipped.metadata.numChannels == 1)

    (0 until 4).foreach { x =>
      assert(flipped.get(x, 0, 0) == image.get(x, 3, 0))
      assert(flipped.get(x, 1, 0) == image.get(x, 2, 0))
      assert(flipped.get(x, 2, 0) == image.get(x, 1, 0))
      assert(flipped.get(x, 3, 0) == image.get(x, 0, 0))
    }
  }

} 
Example 162
Source File: ImageSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils.images

import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.VectorizedImage
import keystoneml.utils.TestUtils._

class ImageSuite extends FunSuite with Logging {
  test("Vectorized Image Coordinates Should be Correct") {
    val (x,y,z) = (100,100,3)

    val images = Array[VectorizedImage](
      genChannelMajorArrayVectorizedImage(x,y,z),
      genColumnMajorArrayVectorizedImage(x,y,z),
      genRowMajorArrayVectorizedImage(x,y,z),
      genRowColumnMajorByteArrayVectorizedImage(x,y,z)
    )

    for (
      img <- images;
      idx <- 0 until x*y*z
    ) {
      val coord = img.vectorToImageCoords(idx)
      assert(img.imageToVectorCoords(coord.x,coord.y,coord.channelIdx) == idx,
        s"imageToVectorCoords(vectorToImageCoords(idx)) should be equivalent to identity(idx) for img $img")
    }

    for (
      img <- images;
      xi <- 0 until x;
      yi <- 0 until y;
      zi <- 0 until z
    ) {
      val coord = img.vectorToImageCoords(img.imageToVectorCoords(xi,yi,zi))
      assert((coord.x, coord.y, coord.channelIdx) == (xi,yi,zi),
        s"vectorToImageCoords(imageToVectorCoords(x,y,z)) should be equivalent to identity(x,y,z) for img $img")
    }
  }
} 
Example 163
Source File: VLFeatSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils.external

import java.io.File

import breeze.linalg._
import breeze.numerics.abs
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{ImageUtils, MatrixUtils, TestUtils}

class VLFeatSuite extends FunSuite with Logging {
  test("Load an Image and compute SIFT Features") {
    val testImage = TestUtils.loadTestImage("images/000012.jpg")
    val singleImage = ImageUtils.mapPixels(testImage, _/255.0)
    val grayImage = ImageUtils.toGrayScale(singleImage)

    val extLib = new VLFeat

    val stepSize = 3
    val binSize = 4
    val scales = 4
    val descriptorLength = 128
    val scaleStep = 0

    val rawDescDataShort = extLib.getSIFTs(grayImage.metadata.xDim, grayImage.metadata.yDim,
      stepSize, binSize, scales, scaleStep, grayImage.getSingleChannelAsFloatArray())

    assert(rawDescDataShort.length % descriptorLength == 0, "Resulting SIFTs must be 128-dimensional.")

    val numCols = rawDescDataShort.length/descriptorLength
    val result = new DenseMatrix(descriptorLength, numCols, rawDescDataShort.map(_.toDouble))

    // Compare with the output of running this image through vl_phow with matlab from the enceval package:
    // featpipem_addpaths;
    // im = im2single(imread('images/000012.jpg'));
    // featextr = featpipem.features.PhowExtractor();
    // featextr.step = 3;
    // [frames feats] = featextr.compute(im);
    // csvwrite('images/feats128.csv', feats)

    val testFeatures = csvread(new File(TestUtils.getTestResourceFileName("images/feats128.csv")))

    val diff = result - testFeatures

    // Because of subtle differences in the way image smoothing works in the VLFeat C library and the VLFeat matlab
    // library (vl_imsmooth_f vs. _vl_imsmooth_f), these two matrices will not be exactly the same.
    // Instead, we check that 99.5% of the matrix entries are off by at most 1.
    val absdiff = abs(diff).toDenseVector

    assert(absdiff.findAll(_ > 1.0).length.toDouble < 0.005*absdiff.length,
      "Fewer than 0.05% of entries may be different by more than 1.")
  }
} 
Example 164
Source File: EncEvalSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.utils.external

import java.io.File

import breeze.linalg._
import breeze.stats.distributions.Gaussian
import keystoneml.nodes.learning.GaussianMixtureModel
import keystoneml.nodes.learning.external.GaussianMixtureModelEstimator
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{Stats, TestUtils}

class EncEvalSuite extends FunSuite with Logging {

  test("Load SIFT Descriptors and compute Fisher Vector Features") {

    val siftDescriptor = csvread(new File(TestUtils.getTestResourceFileName("images/feats.csv")))

    val gmmMeans = TestUtils.getTestResourceFileName("images/voc_codebook/means.csv")
    val gmmVars = TestUtils.getTestResourceFileName("images/voc_codebook/variances.csv")
    val gmmWeights = TestUtils.getTestResourceFileName("images/voc_codebook/priors")

    val gmm = GaussianMixtureModel.load(gmmMeans, gmmVars, gmmWeights)

    val nCenters = gmm.means.cols
    val nDim = gmm.means.rows

    val extLib = new EncEval

    val fisherVector = extLib.calcAndGetFVs(
      gmm.means.toArray.map(_.toFloat),
      nCenters,
      nDim,
      gmm.variances.toArray.map(_.toFloat),
      gmm.weights.toArray.map(_.toFloat),
      siftDescriptor.toArray.map(_.toFloat))

    log.info(s"Fisher Vector is ${fisherVector.sum}")
    assert(Stats.aboutEq(fisherVector.sum, 40.109097, 1e-4), "SUM of Fisher Vectors must match expected sum.")

  }

  test("Compute a GMM from scala") {
    val nsamps = 10000

    // Generate two gaussians.
    val x = Gaussian(-1.0, 0.5).samples.take(nsamps).toArray
    val y = Gaussian(5.0, 1.0).samples.take(nsamps).toArray

    val z = shuffle(x ++ y).map(x => DenseVector(x))

    // Compute a 1-d GMM.
    val extLib = new EncEval
    val gmm = new GaussianMixtureModelEstimator(2).fit(z)

    logInfo(s"GMM means: ${gmm.means.toArray.mkString(",")}")
    logInfo(s"GMM vars: ${gmm.variances.toArray.mkString(",")}")
    logInfo(s"GMM weights: ${gmm.weights.toArray.mkString(",")}")

    // The results should be close to the distribution we set up.
    assert(Stats.aboutEq(min(gmm.means), -1.0, 1e-1), "Smallest mean should be close to -1.0")
    assert(Stats.aboutEq(max(gmm.means), 5.0, 1e-1), "Largest mean should be close to 1.0")
    assert(Stats.aboutEq(math.sqrt(min(gmm.variances)), 0.5, 1e-1), "Smallest SD should be close to 0.25")
    assert(Stats.aboutEq(math.sqrt(max(gmm.variances)), 1.0, 1e-1), "Largest SD should be close to 5.0")
  }
} 
Example 165
Source File: EstimatorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.workflow

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging

class EstimatorSuite extends FunSuite with PipelineContext with Logging {
  test("Estimator fit RDD") {
    sc = new SparkContext("local", "test")

    val intEstimator = new Estimator[Int, Int] {
      def fit(data: RDD[Int]): Transformer[Int, Int] = {
        val first = data.first()
        Transformer(x => x + first)
      }
    }

    val trainData = sc.parallelize(Seq(32, 94, 12))
    val testData = sc.parallelize(Seq(42, 58, 61))

    val pipeline = intEstimator.withData(trainData)
    assert(pipeline.apply(testData).get().collect().toSeq === Seq(42 + 32, 58 + 32, 61 + 32))
  }

  test("Estimator fit Pipeline Data") {
    sc = new SparkContext("local", "test")

    val transformer = Transformer[Int, Int](_ * 2)

    val intEstimator = new Estimator[Int, Int] {
      def fit(data: RDD[Int]): Transformer[Int, Int] = {
        val first = data.first()
        Transformer(x => x + first)
      }
    }

    val trainData = sc.parallelize(Seq(32, 94, 12))
    val testData = sc.parallelize(Seq(42, 58, 61))

    val pipeline = intEstimator.withData(transformer(trainData))
    assert(pipeline.apply(testData).get().collect().toSeq === Seq(42 + 64, 58 + 64, 61 + 64))
  }

} 
Example 166
Source File: LabelEstimatorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.workflow

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging

class LabelEstimatorSuite extends FunSuite with PipelineContext with Logging {
  test("LabelEstimator fit RDD") {
    sc = new SparkContext("local", "test")

    val intEstimator = new LabelEstimator[Int, Int, String] {
      def fit(data: RDD[Int], labels: RDD[String]): Transformer[Int, Int] = {
        val first = data.first()
        val label = labels.first().hashCode
        Transformer(x => x + first + label)

      }
    }

    val trainData = sc.parallelize(Seq(32, 94, 12))
    val trainLabels = sc.parallelize(Seq("sjkfdl", "iw", "432"))
    val testData = sc.parallelize(Seq(42, 58, 61))

    val pipeline = intEstimator.withData(trainData, trainLabels)
    val offset = 32 + "sjkfdl".hashCode
    assert(pipeline.apply(testData).get().collect().toSeq === Seq(42 + offset, 58 + offset, 61 + offset))
  }

  test("LabelEstimator fit pipeline data") {
    sc = new SparkContext("local", "test")

    val dataTransformer = Transformer[Int, Int](_ * 2)
    val labelTransformer = Transformer[String, String](_ + "hi")

    val intEstimator = new LabelEstimator[Int, Int, String] {
      def fit(data: RDD[Int], labels: RDD[String]): Transformer[Int, Int] = {
        val first = data.first()
        val label = labels.first().hashCode
        Transformer(x => x + first + label)

      }
    }

    val trainData = sc.parallelize(Seq(32, 94, 12))
    val trainLabels = sc.parallelize(Seq("sjkfdl", "iw", "432"))
    val testData = sc.parallelize(Seq(42, 58, 61))

    val pipeline = intEstimator.withData(dataTransformer(trainData), labelTransformer(trainLabels))
    val offset = 64 + "sjkfdlhi".hashCode
    assert(pipeline.apply(testData).get().collect().toSeq === Seq(42 + offset, 58 + offset, 61 + offset))
  }
} 
Example 167
Source File: KMeansPlusPlusSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines._
import keystoneml.utils.{MatrixUtils, Stats}
import keystoneml.workflow.PipelineContext

class KMeansPlusPlusSuite extends FunSuite with PipelineContext with Logging {

  test("K-Means++ Single Center") {
    sc = new SparkContext("local", "test")

    val k = 1

    val data = sc.parallelize(Array(
      DenseVector[Double](1.0, 2.0, 6.0),
      DenseVector[Double](1.0, 3.0, 0.0),
      DenseVector[Double](1.0, 4.0, 6.0)
    ))

    val center = DenseVector[Double](1.0, 3.0, 4.0).asDenseMatrix

    val kMeans = KMeansPlusPlusEstimator(k, maxIterations = 1).fit(data)
    assert(Stats.aboutEq(kMeans.means, center))

    val kMeans10 = KMeansPlusPlusEstimator(k, maxIterations = 10).fit(data)
    assert(Stats.aboutEq(kMeans.means, center))

    val out = kMeans.apply(data).collect()
  }

  test("K-Means++ Two Centers") {
    sc = new SparkContext("local", "test")

    val k = 2

    val data = sc.parallelize(Array(
      DenseVector[Double](1.0, 2.0, 6.0),
      DenseVector[Double](1.0, 3.0, 0.0),
      DenseVector[Double](1.0, 4.0, 6.0),
      DenseVector[Double](1.0, 1.0, 0.0)
    ))

    val centers = Set(
      DenseVector[Double](1.0, 2.0, 0.0),
      DenseVector[Double](1.0, 3.0, 6.0)
    )

    val kMeans = KMeansPlusPlusEstimator(k, maxIterations = 10).fit(data)
    val fitCenters = MatrixUtils.matrixToRowArray(kMeans.means).toSet
    assert(fitCenters === centers )

    val kMeans5 = KMeansPlusPlusEstimator(k, maxIterations = 5).fit(data)
    val fitCenters5 = MatrixUtils.matrixToRowArray(kMeans5.means).toSet
    assert(fitCenters5 === centers )

    val out = kMeans.apply(data).collect()
  }

  test("K-Means Transformer") {
    sc = new SparkContext("local", "test")

    val data = Array(
      DenseVector[Double](1.0, 2.0, 6.0),
      DenseVector[Double](1.0, 3.0, 0.0),
      DenseVector[Double](1.0, 4.0, 6.0),
      DenseVector[Double](1.0, 1.0, 0.0)
    )

    val centers = MatrixUtils.rowsToMatrix(Array(
      DenseVector[Double](1.0, 2.0, 0.0),
      DenseVector[Double](1.0, 3.0, 6.0)
    ))

    val clusterOne = DenseVector[Double](1.0, 0.0)
    val clusterTwo = DenseVector[Double](0.0, 1.0)

    val assignments = Seq(clusterTwo, clusterOne, clusterTwo, clusterOne)
    val kMeans = KMeansModel(centers)

    // Test Single Apply
    assert(kMeans.apply(DenseVector[Double](1.0, 3.0, 0.0)) === clusterOne)
    assert(kMeans.apply(DenseVector[Double](1.0, 1.0, 0.0)) === clusterOne)
    assert(kMeans.apply(DenseVector[Double](1.0, 2.0, 6.0)) === clusterTwo)
    assert(kMeans.apply(DenseVector[Double](1.0, 4.0, 6.0)) === clusterTwo)

    // Test Matrix Apply
    assert(kMeans.apply(MatrixUtils.rowsToMatrix(data)) === MatrixUtils.rowsToMatrix(assignments))

    // Test RDD Apply
    assert(kMeans.apply(sc.parallelize(data)).collect().toSeq === assignments)
  }
} 
Example 168
Source File: KernelModelSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._

import org.apache.spark.SparkContext
import org.scalatest.FunSuite

import keystoneml.workflow.PipelineContext
import keystoneml.utils.{MatrixUtils, Stats}

class KernelModelSuite extends FunSuite with PipelineContext {

  test("KernelModel XOR test") {
    sc = new SparkContext("local", "test")

    val x = Array(DenseVector(-1.0, -1.0), DenseVector(1.0, 1.0), DenseVector(-1.0, 1.0),DenseVector(1.0, -1.0))
    val xTest = Array(DenseVector(-1.0, -1.0), DenseVector(1.0, 1.0), DenseVector(-1.0, 1.0))
    val y = Array(DenseVector(0.0, 1.0), DenseVector(0.0, 1.0), DenseVector(1.0, 0.0), DenseVector(1.0, 0.0))
    val yTest = Array(DenseVector(0.0, 1.0), DenseVector(0.0, 1.0), DenseVector(1.0, 0.0))

    val xRDD = sc.parallelize(x, 2)
    val yRDD = sc.parallelize(y, 2)
    val xTestRDD = sc.parallelize(xTest, 2)

    val gaussian = new GaussianKernelGenerator(10)
    // Set block size to number of data points so no blocking happens
    val clf = new KernelRidgeRegression(gaussian, 0, 4, 2)

    val kernelModel = clf.fit(xRDD, yRDD)
    val yHat = kernelModel(xTestRDD).collect()
    // Fit should be good
    val delta = MatrixUtils.rowsToMatrix(yHat) - MatrixUtils.rowsToMatrix(yTest)

    delta :*= delta
    println("SUM OF DELTA1 " + sum(delta))
    assert(Stats.aboutEq(sum(delta), 0, 1e-4))
  }

  test("KernelModel XOR blocked test") {
    sc = new SparkContext("local", "test")

    val x = Array(DenseVector(-1.0, -1.0), DenseVector(1.0, 1.0), DenseVector(-1.0, 1.0),DenseVector(1.0, -1.0))
    val xTest = Array(DenseVector(-1.0, -1.0), DenseVector(1.0, 1.0), DenseVector(-1.0, 1.0))
    val y = Array(DenseVector(0.0, 1.0), DenseVector(0.0, 1.0), DenseVector(1.0, 0.0), DenseVector(1.0, 0.0))
    val yTest = Array(DenseVector(0.0, 1.0), DenseVector(0.0, 1.0), DenseVector(1.0, 0.0))

    val xRDD = sc.parallelize(x, 2)
    val yRDD = sc.parallelize(y, 2)
    val xTestRDD = sc.parallelize(xTest, 2)

    val gaussian = new GaussianKernelGenerator(10)

    // Set block size to half number of data points so blocking happens
    val clf = new KernelRidgeRegression(gaussian, 0, 2, 2)

    val kernelModel = clf.fit(xRDD, yRDD)
    val yHat = kernelModel(xTestRDD).collect()
    // Fit should be good
    val delta = MatrixUtils.rowsToMatrix(yHat) - MatrixUtils.rowsToMatrix(yTest)

    delta :*= delta
    assert(Stats.aboutEq(sum(delta), 0, 1e-4))
  }
} 
Example 169
Source File: BlockLinearMapperSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg.{DenseVector, DenseMatrix}
import breeze.stats.distributions.Rand
import keystoneml.workflow.PipelineContext
import scala.collection.mutable.ArrayBuffer

import org.scalatest.FunSuite

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

import keystoneml.pipelines._
import keystoneml.utils.Stats

class BlockLinearMapperSuite extends FunSuite with PipelineContext with Logging {

  test("BlockLinearMapper transformation") {
    sc = new SparkContext("local", "test")

    val inDims = 1000
    val outDims = 100
    val numChunks = 5
    val numPerChunk = inDims/numChunks

    val mat = DenseMatrix.rand(inDims, outDims, Rand.gaussian)
    val vec = DenseVector.rand(inDims, Rand.gaussian)
    val intercept = DenseVector.rand(outDims, Rand.gaussian)

    val splitVec = (0 until numChunks).map(i => vec((numPerChunk*i) until (numPerChunk*i + numPerChunk)))
    val splitMat = (0 until numChunks).map(i => mat((numPerChunk*i) until (numPerChunk*i + numPerChunk), ::))

    val linearMapper = new LinearMapper[DenseVector[Double]](mat, Some(intercept))
    val blockLinearMapper = new BlockLinearMapper(splitMat, numPerChunk, Some(intercept))

    val linearOut = linearMapper(vec)

    // Test with intercept
    assert(Stats.aboutEq(blockLinearMapper(vec), linearOut, 1e-4))

    // Test the apply and evaluate call
    val blmOuts = new ArrayBuffer[RDD[DenseVector[Double]]]
    val splitVecRDDs = splitVec.map { vec =>
      sc.parallelize(Seq(vec), 1)
    }
    blockLinearMapper.applyAndEvaluate(splitVecRDDs,
      (predictedValues: RDD[DenseVector[Double]]) => {
        blmOuts += predictedValues
        ()
      }
    )

    // The last blmOut should match the linear mapper's output
    assert(Stats.aboutEq(blmOuts.last.collect()(0), linearOut, 1e-4))
  }
} 
Example 170
Source File: LinearMapperSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import edu.berkeley.cs.amplab.mlmatrix.RowPartitionedMatrix
import keystoneml.nodes.stats.StandardScaler
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{TestUtils, MatrixUtils, Stats}
import keystoneml.workflow.PipelineContext

class LinearMapperSuite extends FunSuite with PipelineContext with Logging {
  test("Solve and apply a linear system") {
    sc = new SparkContext("local", "test")

    // Create the data.
    val A = TestUtils.createRandomMatrix(sc, 128, 5, 4)
    val x = DenseVector(5.0, 4.0, 3.0, 2.0, -1.0).toDenseMatrix
    val b = A.mapPartitions(part => part * x.t)

    val Aary = A.rdd.flatMap(part => MatrixUtils.matrixToRowArray(part.mat).toIterator)
    val bary = b.rdd.flatMap(part => MatrixUtils.matrixToRowArray(part.mat).toIterator)

    val mapper = new LinearMapEstimator().fit(Aary, bary)

    assert(Stats.aboutEq(mapper.x, x.t), "Coefficients from the solve must match the hand-created model.")

    val point = DenseVector(2.0, -3.0, 2.0, 3.0, 5.0)

    assert(Stats.aboutEq(mapper(sc.parallelize(Seq(point))).first()(0), 5.0),
        "Linear model applied to a point should be 5.0")

    val bt = mapper(Aary)
    assert(Stats.aboutEq(bt.collect()(0), bary.collect()(0)),
        "Linear model applied to input should be the same as training points.")
  }

  test("LocalLeastSquaresEstimator doesn't crash") {
    sc = new SparkContext("local", "test")

    // Create the data.
    val A = TestUtils.createRandomMatrix(sc, 50, 400, 4)
    val x = DenseVector(5.0, 4.0, 3.0, 2.0, -1.0).toDenseMatrix
    val b = A.mapPartitions(part => DenseMatrix.rand(part.rows, 3))

    val Aary = A.rdd.flatMap(part => MatrixUtils.matrixToRowArray(part.mat).toIterator)
    val bary = b.rdd.flatMap(part => MatrixUtils.matrixToRowArray(part.mat).toIterator)

    val mapper = new LocalLeastSquaresEstimator(1e-2).fit(Aary, bary)
    assert(mapper.x.rows === 400)
    assert(mapper.x.cols === 3)
  }

  test("Solve a dense linear system (fit intercept) using local least squares") {
    sc = new SparkContext("local", "test")

    // Create the data.
    val A = TestUtils.createRandomMatrix(sc, 128, 5, 4)
    val x = DenseMatrix((5.0, 4.0, 3.0, 2.0, -1.0), (3.0, -1.0, 2.0, -2.0, 1.0))
    val dataMean = DenseVector(1.0, 0.0, 1.0, 2.0, 0.0)
    val extraBias = DenseVector(3.0, 4.0)

    val initialAary = A.rdd.flatMap(part => MatrixUtils.matrixToRowArray(part.mat).toIterator)
    val meanScaler = new StandardScaler(normalizeStdDev = false).fit(initialAary)
    val Aary = meanScaler.apply(initialAary).map(_ + dataMean)
    val bary = Aary.map(a => (x * (a - dataMean)) + extraBias)

    val mapper = new LocalLeastSquaresEstimator(0).fit(Aary, bary)

    val trueResult = MatrixUtils.rowsToMatrix(bary.collect())
    val solverResult = MatrixUtils.rowsToMatrix(mapper(Aary).collect())

    assert(Stats.aboutEq(trueResult, solverResult, 1e-5), "Results from the solve must match the hand-created model.")
    assert(Stats.aboutEq(mapper.x, x.t, 1e-6), "Model weights from the solve must match the hand-created model.")
    assert(Stats.aboutEq(mapper.bOpt.get, extraBias, 1e-6), "Learned intercept must match the hand-created model.")
    assert(Stats.aboutEq(mapper.featureScaler.get.mean, dataMean, 1e-6),
      "Learned intercept must match the hand-created model.")

  }

} 
Example 171
Source File: ZCAWhiteningSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import breeze.numerics._
import breeze.stats.distributions._
import org.scalatest.FunSuite
import keystoneml.pipelines._
import keystoneml.workflow.PipelineContext

class ZCAWhiteningSuite extends FunSuite with PipelineContext with Logging {

  val nrows = 10000
  val ndim = 10

  val x = DenseMatrix.rand[Double](nrows, ndim, Gaussian(0.0, 1.0))

  def fitAndCompare(x: DenseMatrix[Double], eps: Double, thresh: Double): Boolean = {
    val whitener = new ZCAWhitenerEstimator(eps).fitSingle(x)

    val wx = whitener(x)

    //Checks max(max(abs(cov(whiten(x))) - eye(10)) < sqrt(eps)
    max(abs(cov(convert(wx, Double)) - DenseMatrix.eye[Double](ndim))) < thresh
  }

  test("whitening with small epsilon") {
    assert(fitAndCompare(x, 1e-12, 1e-4),
      "Whitening the base matrix should produce unit variance and zero covariance.")
  }

  test("whitening with large epsilon") {
    assert(fitAndCompare(x, 0.1, 0.1),
      "Whitening the base matrix should produce unit variance and zero covariance.")

    assert(!fitAndCompare(x, 0.1, 1e-4),
      "Whitening the base matrix with a large epsilon should be somewhat noisy.")
  }
} 
Example 172
Source File: LinearDiscriminantAnalysisSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import breeze.stats.distributions.{Multinomial, Uniform, Gaussian}
import keystoneml.nodes.stats.StandardScaler
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{TestUtils, MatrixUtils, Stats}
import keystoneml.workflow.PipelineContext

class LinearDiscriminantAnalysisSuite extends FunSuite with PipelineContext with Logging {
  test("Solve Linear Discriminant Analysis on the Iris Dataset") {
    sc = new SparkContext("local", "test")

    // Uses the Iris flower dataset
    val irisData = sc.parallelize(TestUtils.loadFile("iris.data"))
    val trainData = irisData.map(_.split(",").dropRight(1).map(_.toDouble)).map(new DenseVector(_))
    val features = new StandardScaler().fit(trainData).apply(trainData)
    val labels = irisData.map(_ match {
      case x if x.endsWith("Iris-setosa") => 1
      case x if x.endsWith("Iris-versicolor") => 2
      case x if x.endsWith("Iris-virginica") => 3
    })

    val lda = new LinearDiscriminantAnalysis(2)
    val out = lda.fit(features, labels)

    // Correct output taken from http://sebastianraschka.com/Articles/2014_python_lda.html#introduction
    logInfo(s"\n${out.x}")
    val majorVector = DenseVector(-0.1498, -0.1482, 0.8511, 0.4808)
    val minorVector = DenseVector(0.0095, 0.3272, -0.5748, 0.75)

    // Note that because eigenvectors can be reversed and still valid, we allow either direction
    assert(Stats.aboutEq(out.x(::, 0), majorVector, 1E-4) || Stats.aboutEq(out.x(::, 0), majorVector * -1.0, 1E-4))
    assert(Stats.aboutEq(out.x(::, 1), minorVector, 1E-4) || Stats.aboutEq(out.x(::, 1), minorVector * -1.0, 1E-4))
  }

  test("Check LDA output for a diagonal covariance") {
    sc = new SparkContext("local", "test")

    val matRows = 1000
    val matCols = 10
    val dimRed = 5

    // Generate a random Gaussian matrix.
    val gau = new Gaussian(0.0, 1.0)
    val randMatrix = new DenseMatrix(matRows, matCols, gau.sample(matRows*matCols).toArray)

    // Parallelize and estimate the LDA.
    val data = sc.parallelize(MatrixUtils.matrixToRowArray(randMatrix))
    val labels = data.map(x => Multinomial(DenseVector(0.2, 0.2, 0.2, 0.2, 0.2)).draw(): Int)
    val lda = new LinearDiscriminantAnalysis(dimRed).fit(data, labels)

    // Apply LDA to the input data.
    val redData = lda(data)
    val redMat = MatrixUtils.rowsToMatrix(redData.collect)

    // Compute its covariance.
    val redCov = cov(redMat)
    log.info(s"Covar\n$redCov")

    // The covariance of the dimensionality reduced matrix should be diagonal.
    for (
      x <- 0 until dimRed;
      y <- 0 until dimRed if x != y
    ) {
      assert(Stats.aboutEq(redCov(x,y), 0.0, 1e-6), s"LDA Matrix should be 0 off-diagonal. $x,$y = ${redCov(x,y)}")
    }
  }

} 
Example 173
Source File: HogExtractorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import breeze.linalg._
import org.scalatest.FunSuite

import keystoneml.pipelines.Logging
import keystoneml.utils.{ImageUtils, Stats, TestUtils}

class HogExtractorSuite extends FunSuite with Logging {
  test("Load an Image and compute Hog Features") {
    val testImage = TestUtils.loadTestImage("images/gantrycrane.png")

    // NOTE: The MATLAB implementation from voc-release5 uses
    // images in double range -- So convert our image by rescaling
    val testImageScaled = ImageUtils.mapPixels(testImage, x => x/255.0)

    val binSize = 50
    val hog = new HogExtractor(binSize)
    val descriptors = hog.apply(testImageScaled)

    val ourSum = sum(descriptors)
    val matlabSum = 59.2162514

    assert(Stats.aboutEq((ourSum - matlabSum) / ourSum, 0, 1e-8),
      "Hog features sum should match")

    // With a smaller bin size
    val hog1 = new HogExtractor(binSize=8)
    val descriptors1 = hog1.apply(testImageScaled)

    val matlabSum1 = 4.5775269e+03
    val ourSum1 = sum(descriptors1)

    // TODO: Figure out why error is a bit higher here ?
    assert(Stats.aboutEq((ourSum1 - matlabSum1) / ourSum1, 0, 1e-4),
      "Hog features sum should match")
  }
} 
Example 174
Source File: DaisyExtractorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import breeze.linalg._
import keystoneml.nodes.images.external.SIFTExtractor
import org.scalatest.FunSuite

import keystoneml.pipelines.Logging
import keystoneml.utils.{ImageUtils, Stats, TestUtils}

class DaisyExtractorSuite extends FunSuite with Logging {
  test("Load an Image and compute Daisy Features") {
    val testImage = TestUtils.loadTestImage("images/gantrycrane.png")
    val grayImage = ImageUtils.toGrayScale(testImage)

    val df = new DaisyExtractor()
    val daisyDescriptors = convert(df.apply(grayImage), Double)

    val firstKeyPointSum = sum(daisyDescriptors(::, 0))
    val fullFeatureSum = sum(daisyDescriptors)

    // Values found from running matlab code on same input file.
    val matlabFirstKeyPointSum = 55.127217737738533
    val matlabFullFeatureSum = 3.240635661296463E5

    // TODO: This should be at most 1e-8 as we are using Floats. But its 1e-5, 1e-7 right now ?
    assert(Stats.aboutEq(
      (firstKeyPointSum - matlabFirstKeyPointSum)/matlabFirstKeyPointSum, 0, 1e-5),
      "First keypoint sum must match for Daisy")
    assert(Stats.aboutEq((fullFeatureSum - matlabFullFeatureSum)/matlabFullFeatureSum, 0, 1e-7),
      "Sum of Daisys must match expected sum")
  }

  test("Daisy and SIFT extractors should have same row/column ordering.") {
    val testImage = TestUtils.loadTestImage("images/gantrycrane.png")
    val grayImage = ImageUtils.toGrayScale(testImage)

    val df = new DaisyExtractor()
    val daisyDescriptors = convert(df.apply(grayImage), Double)

    val se = SIFTExtractor(scaleStep = 2)
    val siftDescriptors = se.apply(grayImage)

    assert(daisyDescriptors.rows == df.daisyFeatureSize && siftDescriptors.rows == se.descriptorSize)

  }
} 
Example 175
Source File: CenterCornerPatcherSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{ChannelMajorArrayVectorizedImage, ImageMetadata, TestUtils}

class CenterCornerPatcherSuite extends FunSuite with Logging {

  test("check number and dimension of patches") {
    val image = TestUtils.loadTestImage("images/000012.jpg")
    val xDim = image.metadata.xDim
    val yDim = image.metadata.yDim
    val patchSizeX = xDim / 2 
    val patchSizeY = yDim / 2

    val withFlipPatcher = CenterCornerPatcher(patchSizeX, patchSizeY, true)
    val withFlipPatches = withFlipPatcher.centerCornerPatchImage(image).toSeq

    assert(withFlipPatches.map(_.metadata.xDim).forall(_ == patchSizeX) &&
      withFlipPatches.map(_.metadata.yDim).forall(_ == patchSizeY) &&
      withFlipPatches.map(_.metadata.numChannels).forall(_ == image.metadata.numChannels),
      "All patches must have right dimensions")

    assert(withFlipPatches.size === 10, "Number of patches must match")

    val noFlipPatcher = CenterCornerPatcher(patchSizeX, patchSizeY, false) 
    val noFlipPatches = noFlipPatcher.centerCornerPatchImage(image).toSeq

    assert(noFlipPatches.map(_.metadata.xDim).forall(_ == patchSizeX) &&
      noFlipPatches.map(_.metadata.yDim).forall(_ == patchSizeY) &&
      noFlipPatches.map(_.metadata.numChannels).forall(_ == image.metadata.numChannels),
      "All patches must have right dimensions")

    assert(noFlipPatches.size === 5, "Number of patches must match")
  }

  test("1x1 image patches") {
    val imgArr =
      (0 until 5).flatMap { x =>
        (0 until 5).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 5 * 1).toDouble
          }
        }
      }.toArray

    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(5, 5, 1))
    val patchSizeX = 1
    val patchSizeY = 1

    val noFlipPatcher = CenterCornerPatcher(patchSizeX, patchSizeY, false)
    val noFlipPatches = noFlipPatcher.centerCornerPatchImage(image).toSeq

    assert(noFlipPatches.length === 5)
    // NOTE(shivaram): This assumes order of patches returned stays the same. 
    assert(noFlipPatches(0).get(0, 0, 0) === 0.0)
    assert(noFlipPatches(1).get(0, 0, 0) === 20.0)
    assert(noFlipPatches(2).get(0, 0, 0) === 4.0)
    assert(noFlipPatches(3).get(0, 0, 0) === 24.0)
    assert(noFlipPatches(4).get(0, 0, 0) === 12.0)
  }
} 
Example 176
Source File: RandomPatcherSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{ChannelMajorArrayVectorizedImage, ImageMetadata, TestUtils}

class RandomPatcherSuite extends FunSuite with Logging {

  test("patch dimensions, number") {
    val image = TestUtils.loadTestImage("images/000012.jpg")
    val xDim = image.metadata.xDim
    val yDim = image.metadata.yDim
    val patchSizeX = xDim / 2 
    val patchSizeY = yDim / 2
    val numPatches = 5

    val patcher = RandomPatcher(numPatches, patchSizeX, patchSizeY)

    val patches = patcher.randomPatchImage(image).toSeq

    assert(patches.map(_.metadata.xDim).forall(_ == patchSizeX) &&
      patches.map(_.metadata.yDim).forall(_ == patchSizeY) &&
      patches.map(_.metadata.numChannels).forall(_ == image.metadata.numChannels),
      "All patches must have right dimensions")

    assert(patches.size === numPatches,
      "Number of patches must match argument passed in")
  }
} 
Example 177
Source File: PoolingSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import breeze.linalg.{DenseVector, sum}
import keystoneml.nodes._
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{ChannelMajorArrayVectorizedImage, ImageMetadata}

class PoolingSuite extends FunSuite with Logging {

  test("pooling") {
    val imgArr =
      (0 until 4).flatMap { x =>
        (0 until 4).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1).toDouble
          }
        }
      }.toArray

    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(4, 4, 1))
    val pooling = new Pooler(2, 2, x => x, x => x.max)

    val poolImage = pooling(image)

    assert(poolImage.get(0, 0, 0) === 5.0)
    assert(poolImage.get(0, 1, 0) === 7.0)
    assert(poolImage.get(1, 0, 0) === 13.0)
    assert(poolImage.get(1, 1, 0) === 15.0)
  }

  test("pooling odd") {
    val hogImgSize = 14
    val convSizes = List(1, 2, 3, 4, 6, 8)
    convSizes.foreach { convSize =>
      val convResSize = hogImgSize - convSize + 1

      val imgArr =
        (0 until convResSize).flatMap { x =>
          (0 until convResSize).flatMap { y =>
            (0 until 1000).map { c =>
              (c + x * 1 + y * 4 * 1).toDouble
            }
          }
        }.toArray

      val image = new ChannelMajorArrayVectorizedImage(
        imgArr, ImageMetadata(convResSize, convResSize, 1000))

      val poolSizeReqd = math.ceil(convResSize / 2.0).toInt

      // We want poolSize to be even !!
      val poolSize = (math.ceil(poolSizeReqd / 2.0) * 2).toInt
      // overlap as little as possible
      val poolStride = convResSize - poolSize


      println(s"VALUES: $convSize $convResSize $poolSizeReqd $poolSize $poolStride")

      def summ(x: DenseVector[Double]): Double = sum(x)

      val pooling = new Pooler(poolStride, poolSize, identity, summ)
      val poolImage = pooling(image)
    }
  }
} 
Example 178
Source File: WindowingSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.{ChannelMajorArrayVectorizedImage, ImageMetadata, TestUtils}

class WindowingSuite extends FunSuite with Logging {

  test("windowing") {
    val image = TestUtils.loadTestImage("images/000012.jpg")
    val stride = 100
    val size = 50

    val windowing = new Windower(stride, size)

    val windows = windowing.getImageWindow(image)

    assert(windows.map(_.metadata.xDim).forall(_ == size) &&
      windows.map(_.metadata.yDim).forall(_ == size),
      "All windows must be 100x100")

    assert(windows.size == (image.metadata.xDim/stride) * (image.metadata.yDim/stride),
      "Must have number of windows matching xDims and yDims given the stride.")
  }

  test("1x1 windowing") {
    val imgArr =
      (0 until 4).flatMap { x =>
        (0 until 4).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1).toDouble
          }
        }
      }.toArray


    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(4, 4, 1))

    val windower = new Windower(1, 1)
    val windowImages = windower.getImageWindow(image)

    assert(windowImages.length === 16)
    assert(windowImages(0).get(0, 0, 0) === 0)
    assert(windowImages(1).get(0, 0, 0) === 1.0)
    assert(windowImages(2).get(0, 0, 0) === 2.0)
    assert(windowImages(3).get(0, 0, 0) === 3.0)
  }

  test("2x2 windowing") {
    val imgArr =
      (0 until 4).flatMap { x =>
        (0 until 4).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1).toDouble
          }
        }
      }.toArray


    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(4, 4, 1))

    val windower = new Windower(2, 2)

    val windowImages = windower.getImageWindow(image)

    assert(windowImages.length === 4)

    assert(windowImages(0).get(0, 0, 0) === 0)
    assert(windowImages(1).get(0, 0, 0) === 2.0)
    assert(windowImages(2).get(0, 0, 0) === 8.0)
    assert(windowImages(3).get(0, 0, 0) === 10.0)
  }

  test("nxn windowing with step=1") {
    val dim = 30
    val imgArr =
      (0 until dim).flatMap { x =>
        (0 until dim).flatMap { y =>
          (0 until 1).map { c =>
            (c + x * 1 + y * 4 * 1 + 10).toDouble
          }
        }
      }.toArray


    val image = new ChannelMajorArrayVectorizedImage(imgArr, ImageMetadata(dim, dim, 1))
    val sizes = List(1, 2, 3, 4, 6, 8)

    sizes.foreach { w =>
      val windower = new Windower(1, w)
      val windowImages = windower.getImageWindow(image)
      assert(windowImages.length === (dim-w+1) * (dim-w+1))
      assert(windowImages.forall(x => !x.toArray.contains(0.0)))
    }
  }
} 
Example 179
Source File: LCSExtractorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.images

import breeze.linalg._
import org.scalatest.FunSuite

import keystoneml.pipelines.Logging
import keystoneml.utils.{ImageUtils, Stats, TestUtils}

class LCSExtractorSuite extends FunSuite with Logging {
  test("Load an Image and compute LCS Features") {
    val testImage = TestUtils.loadTestImage("images/gantrycrane.png")

    val lf = new LCSExtractor(stride=4, subPatchSize=6, strideStart=16)
    val lcsDescriptors = convert(lf.apply(testImage), Double)

    val firstKeyPointSum = sum(lcsDescriptors(::, 0))
    val fullFeatureSum = sum(lcsDescriptors)

    // Values found from running matlab code on same input file.
    val matlabFirstKeyPointSum = 3.786557667540610e+03
    val matlabFullFeatureSum = 3.171963632855949e+07

    assert(
      Stats.aboutEq((firstKeyPointSum - matlabFirstKeyPointSum)/matlabFirstKeyPointSum, 0, 1e-8),
      "First keypoint sum must match for LCS")
    assert(Stats.aboutEq((fullFeatureSum - matlabFullFeatureSum)/matlabFullFeatureSum, 0, 1e-8),
      "Sum of LCS must match expected sum")
  }
} 
Example 180
Source File: TermFrequencySuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.misc

import keystoneml.nodes.stats.TermFrequency
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext

class TermFrequencySuite extends FunSuite with PipelineContext {
  test("term frequency of simple strings") {
    sc = new SparkContext("local", "test")
    val in = Seq(Seq[Any]("b", "a", "c", "b", "b", "a", "b"))
    val out = TermFrequency().apply(sc.parallelize(in)).first().toMap
    assert(out === Map("a" -> 2, "b" -> 4, "c" -> 1))
  }

  test("term frequency of varying types") {
    sc = new SparkContext("local", "test")
    val in = Seq(Seq("b", "a", "c", ("b", "b"), ("b", "b"), 12, 12, "a", "b", 12))
    val out = TermFrequency().apply(sc.parallelize(in)).first().toMap
    assert(out === Map("a" -> 2, "b" -> 2, "c" -> 1, ("b", "b") -> 2, 12 -> 3))
  }

  test("log term frequency") {
    sc = new SparkContext("local", "test")
    val in = Seq(Seq[Any]("b", "a", "c", "b", "b", "a", "b"))
    val out = TermFrequency(x => math.log(x + 1)).apply(sc.parallelize(in)).first().toMap
    assert(out === Map("a" -> math.log(3), "b" -> math.log(5), "c" -> math.log(2)))
  }
} 
Example 181
Source File: SparseFeatureVectorizerSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.misc

import keystoneml.nodes.util.{SparseFeatureVectorizer, AllSparseFeatures, CommonSparseFeatures}
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.workflow.PipelineContext

class SparseFeatureVectorizerSuite extends FunSuite with PipelineContext with Logging {
  test("sparse feature vectorization") {
    sc = new SparkContext("local", "test")

    val featureVectorizer = new SparseFeatureVectorizer(Map("First" -> 0, "Second" -> 1, "Third" -> 2))
    val test = Seq(("Third", 4.0), ("Fourth", 6.0), ("First", 1.0))
    val vector = featureVectorizer.apply(sc.parallelize(Seq(test))).first()

    assert(vector.size == 3)
    assert(vector(0) == 1)
    assert(vector(1) == 0)
    assert(vector(2) == 4)
  }

  test("all sparse feature selection") {
    sc = new SparkContext("local", "test")
    val train = sc.parallelize(List(Seq(("First", 0.0), ("Second", 6.0)), Seq(("Third", 3.0), ("Second", 4.0))))

    val featureVectorizer = AllSparseFeatures().fit(train.map(x => x))
    // The selected features should now be "First", "Second", and "Third"

    val test = Seq(("Third", 4.0), ("Fourth", 6.0), ("First", 1.0))
    val out = featureVectorizer.apply(sc.parallelize(Seq(test))).first().toArray

    assert(out === Array(1.0, 0.0, 4.0))
  }

  test("common sparse feature selection") {
    sc = new SparkContext("local", "test")
    val train = sc.parallelize(List(
      Seq(("First", 0.0), ("Second", 6.0)),
      Seq(("Third", 3.0), ("Second", 4.8)),
      Seq(("Third", 7.0), ("Fourth", 5.0)),
      Seq(("Fifth", 5.0), ("Second", 7.3))
    ))

    val featureVectorizer = CommonSparseFeatures(2).fit(train.map(x => x))
    // The selected features should now be "Second", and "Third"

    val test = Seq(("Third", 4.0), ("Seventh", 8.0), ("Second", 1.3), ("Fourth", 6.0), ("First", 1.0))
    val out = featureVectorizer.apply(sc.parallelize(Seq(test))).first().toArray

    assert(out === Array(1.3, 4.0))
  }
} 
Example 182
Source File: LinearRectifierSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.stats

import breeze.linalg.DenseMatrix
import breeze.stats.distributions.Rand
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines._
import keystoneml.utils.{TestUtils, MatrixUtils}
import keystoneml.workflow.PipelineContext

class LinearRectifierSuite extends FunSuite with PipelineContext with Logging {

  test("Test MaxVal") {
    sc = new SparkContext("local", "test")
    val matrixParts = TestUtils.createRandomMatrix(sc, 128, 16, 4).rdd.map(_.mat)

    val x = matrixParts.flatMap(y => MatrixUtils.matrixToRowArray(y))
    val y = x.map(r => r.forall(_ >= 0.0))

    val valmaxNode = LinearRectifier()
    val maxy = valmaxNode.apply(x).map(r => r.forall(_ >= 0.0))

    //The random matrix should *not* all be >= 0
    assert(!y.reduce {(a,b) => a | b})

    //The valmax'ed random matrix *should* all be >= 0.
    assert(maxy.reduce {(a,b) => a | b})
  }
} 
Example 183
Source File: RandomSignNodeSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.stats

import breeze.linalg._
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import keystoneml.pipelines.Logging

class RandomSignNodeSuite extends FunSuite with Logging with ShouldMatchers {

  test("RandomSignNode") {
    val signs = DenseVector(1.0, -1.0, 1.0)
    val node = RandomSignNode(signs)
    val data: DenseVector[Double] = DenseVector(1.0, 2.0, 3.0)
    val result = node(data)
    Seq(result) should equal (Seq(DenseVector(1.0, -2.0, 3.0)))
  }

  test("RandomSignNode.create") {
    val node = RandomSignNode(1000)
    
    node.signs.foreach(elt => assert(elt == -1.0 || elt == 1.0))
  }
} 
Example 184
Source File: CosineRandomFeaturesSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.stats

import breeze.linalg._
import breeze.numerics.cos
import breeze.stats._
import breeze.stats.distributions.{CauchyDistribution, Rand}
import org.scalatest.FunSuite
import keystoneml.utils.Stats


class CosineRandomFeaturesSuite extends FunSuite {
  val gamma = 1.34
  val numInputFeatures = 400
  val numOutputFeatures = 1000

  test("Guassian cosine random features") {
    val rf = CosineRandomFeatures(numInputFeatures, numOutputFeatures, gamma)

    // Check that b is uniform
    assert(max(rf.b) <= 2*math.Pi)
    assert(min(rf.b) >= 0)
    assert(rf.b.size == numOutputFeatures)

    // Check that W is gaussian
    assert(rf.W.rows == numOutputFeatures)
    assert(rf.W.cols == numInputFeatures)
    assert(Stats.aboutEq(mean(rf.W),0, 10e-3 * gamma))
    assert(Stats.aboutEq(variance(rf.W), gamma * gamma, 10e-3 * gamma * gamma))

    //check the mapping
    val in = DenseVector.rand(numInputFeatures, Rand.uniform)
    val out = cos((in.t * rf.W.t).t + rf.b)
    assert(Stats.aboutEq(rf(in), out, 10e-3))
  }

  test("Cauchy cosine random features") {
    val rf = CosineRandomFeatures(
      numInputFeatures,
      numOutputFeatures,
      gamma,
      new CauchyDistribution(0, 1))

    // Check that b is uniform
    assert(max(rf.b) <= 2*math.Pi)
    assert(min(rf.b) >= 0)
    assert(rf.b.size == numOutputFeatures)

    // Check that W is cauchy
    assert(rf.W.rows == numOutputFeatures)
    assert(rf.W.cols == numInputFeatures)
    assert(Stats.aboutEq(median(rf.W),0,10e-3 * gamma))

    //check the mapping
    val in = DenseVector.rand(numInputFeatures, Rand.uniform)
    val out = cos((in.t * rf.W.t).t + rf.b)
    assert(Stats.aboutEq(rf(in), out, 10e-3))
  }
} 
Example 185
Source File: PaddedFFTSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.stats

import breeze.linalg._
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.utils.Stats
import keystoneml.workflow.PipelineContext


class PaddedFFTSuite extends FunSuite with PipelineContext with Logging {
  test("Test PaddedFFT node") {
    sc = new SparkContext("local", "test")

    // Set up a test matrix.
    val ones = DenseVector.zeros[Double](100)
    val twos = DenseVector.zeros[Double](100)
    ones(0) = 1.0
    twos(2) = 1.0

    val x = sc.parallelize(Seq(twos, ones))
    val fftd = PaddedFFT().apply(x).collect()

    val twosout = fftd(0)
    val onesout = fftd(1)

    // Proof by agreement w/ R: Re(fft(c(0, 0, 1, rep(0, 125))))
    assert(twosout.length === 64)
    assert(Stats.aboutEq(twosout(0), 1.0))
    assert(Stats.aboutEq(twosout(16), 0.0))
    assert(Stats.aboutEq(twosout(32), -1.0))
    assert(Stats.aboutEq(twosout(48), 0.0))

    // Proof by agreement w/ R: Re(fft(c(1, rep(0, 127))))
    assert(Stats.aboutEq(onesout, DenseVector.ones[Double](64)))
  }
} 
Example 186
Source File: CoreNLPFeatureExtractorSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.nlp

import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.pipelines.Logging
import keystoneml.workflow.PipelineContext

class CoreNLPFeatureExtractorSuite extends FunSuite with PipelineContext with Logging {
  test("lemmatization") {
    sc = new SparkContext("local", "test")

    val text = "jumping snakes lakes oceans hunted"
    val tokens = CoreNLPFeatureExtractor(1 to 3).apply(sc.parallelize(Seq(text))).first().toSet

    // Make sure at least very simple cases were lemmatized
    assert(tokens.contains("jump"))
    assert(tokens.contains("snake"))
    assert(tokens.contains("lake"))
    assert(tokens.contains("ocean"))
    assert(tokens.contains("hunt"))

    // Assert the unlemmatized tokens are no longer there
    assert(!tokens.contains("jumping"))
    assert(!tokens.contains("snakes"))
    assert(!tokens.contains("oceans"))
    assert(!tokens.contains("lakes"))
    assert(!tokens.contains("hunted"))
  }

  test("entity extraction") {
    sc = new SparkContext("local", "test")

    val text = "John likes cake and he lives in Florida"
    val tokens = CoreNLPFeatureExtractor(1 to 3).apply(sc.parallelize(Seq(text))).first().toSet

    // Make sure at least very simple entities were identified and extracted
    assert(tokens.contains("PERSON"))
    assert(tokens.contains("LOCATION"))

    // Assert the original tokens are no longer there
    assert(!tokens.contains("John"))
    assert(!tokens.contains("Florida"))
  }

  test("1-2-3-grams") {
    sc = new SparkContext("local", "test")

    val text = "a b c d"
    val tokens = CoreNLPFeatureExtractor(1 to 3).apply(sc.parallelize(Seq(text))).first().toSet

    // Make sure expected unigrams appear
    assert(tokens.contains("a"))
    assert(tokens.contains("b"))
    assert(tokens.contains("c"))
    assert(tokens.contains("d"))

    // Make sure expected bigrams appear
    assert(tokens.contains("a b"))
    assert(tokens.contains("b c"))
    assert(tokens.contains("c d"))

    // Make sure expected 3-grams appear
    assert(tokens.contains("a b c"))
    assert(tokens.contains("b c d"))
  }
} 
Example 187
Source File: NGramIndexerSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.nlp

import org.scalatest.FunSuite

class NGramIndexerSuite extends FunSuite {

  test("pack()") {
    require(NaiveBitPackIndexer.pack(Seq(1)) == math.pow(2, 40).toLong)

    require(NaiveBitPackIndexer.pack(Seq(1, 1)) ==
      math.pow(2, 40).toLong + math.pow(2, 20).toLong + math.pow(2, 60).toLong)

    require(NaiveBitPackIndexer.pack(Seq(1, 1, 1)) ==
      1 + math.pow(2, 40).toLong + math.pow(2, 20).toLong + math.pow(2, 61).toLong)

    val ngramIndexer = new NGramIndexerImpl[Int]
    val seq = ngramIndexer.minNgramOrder to ngramIndexer.maxNgramOrder
    require(ngramIndexer.pack(seq).equals(new NGram(seq)))
  }

  test("removeFarthestWord()") {
    def testWith[Word >: Int, Ngram](indexer: BackoffIndexer[Word, Ngram]) = {
      var ngramId = indexer.pack(Seq(1, 2, 3))
      var context = indexer.removeFarthestWord(ngramId)
      var expected = indexer.pack(Seq(2, 3))
      require(context == expected, s"actual $context, expected $expected")

      ngramId = indexer.pack(Seq(1, 2))
      context = indexer.removeFarthestWord(ngramId)
      expected = indexer.pack(Seq(2))
      require(context == expected, s"actual $context, expected $expected")
    }

    testWith(new NGramIndexerImpl[Int])
    testWith(NaiveBitPackIndexer)
  }

  test("removeCurrentWord()") {
    def testWith[Word >: Int, Ngram](indexer: BackoffIndexer[Word, Ngram]) = {
      var ngramId = indexer.pack(Seq(1, 2, 3))
      var context = indexer.removeCurrentWord(ngramId)
      var expected = indexer.pack(Seq(1, 2))
      require(context == expected, s"actual $context, expected $expected")

      ngramId = indexer.pack(Seq(1, 2))
      context = indexer.removeCurrentWord(ngramId)
      expected = indexer.pack(Seq(1))
      require(context == expected, s"actual $context, expected $expected")
    }

    testWith(new NGramIndexerImpl[Int])
    testWith(NaiveBitPackIndexer)
  }

} 
Example 188
Source File: StringUtilsSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.nlp

import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext

class StringUtilsSuite extends FunSuite with PipelineContext {
  val stringToManip = Array("  The quick BROWN fo.X ", " ! !.,)JumpeD. ovER the LAZy DOG.. ! ")
  test("trim") {
    sc = new SparkContext("local", "test")
    val out = Trim.apply(sc.parallelize(stringToManip, 1)).collect().toSeq
    assert(out === Seq("The quick BROWN fo.X", "! !.,)JumpeD. ovER the LAZy DOG.. !"))
  }

  test("lower case") {
    sc = new SparkContext("local", "test")
    val out = LowerCase().apply(sc.parallelize(stringToManip, 1)).collect().toSeq
    assert(out === Seq("  the quick brown fo.x ", " ! !.,)jumped. over the lazy dog.. ! "))
  }

  test("tokenizer") {
    sc = new SparkContext("local", "test")
    val out = Tokenizer().apply(sc.parallelize(stringToManip, 1)).collect().toSeq
    assert(out === Seq(Seq("", "The", "quick", "BROWN", "fo", "X"), Seq("", "JumpeD", "ovER", "the", "LAZy", "DOG")))
  }
} 
Example 189
Source File: ClassLabelIndicatorsSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.util

import breeze.linalg.DenseVector
import org.scalatest.FunSuite

class ClassLabelIndicatorsSuite extends FunSuite {
  test("single label indicators") {
    intercept[AssertionError] {
      val zerolabels = ClassLabelIndicatorsFromIntLabels(0)
    }

    intercept[AssertionError] {
      val onelabel = ClassLabelIndicatorsFromIntLabels(1)
    }


    val fivelabel = ClassLabelIndicatorsFromIntLabels(5)
    assert(fivelabel(2) === DenseVector(-1.0,-1.0,1.0,-1.0,-1.0))

    intercept[RuntimeException] {
      fivelabel(5)
    }
  }

  test("multiple label indicators without validation") {
    intercept[AssertionError] {
      val zerolabels = ClassLabelIndicatorsFromIntArrayLabels(0)
    }

    intercept[AssertionError] {
      val onelabel = ClassLabelIndicatorsFromIntArrayLabels(1)
    }

    val fivelabel = ClassLabelIndicatorsFromIntArrayLabels(5)

    assert(fivelabel(Array(2,1)) === DenseVector(-1.0,1.0,1.0,-1.0,-1.0))

    intercept[IndexOutOfBoundsException] {
      fivelabel(Array(4,6))
    }

    assert(fivelabel(Array(-1,2)) === DenseVector(-1.0,-1.0,1.0,-1.0,1.0),
      "In the unchecked case, we should get weird behavior.")

  }

  test("multiple label indicators with validation") {
    intercept[AssertionError] {
      val zerolabels = ClassLabelIndicatorsFromIntArrayLabels(0, true)
    }

    intercept[AssertionError] {
      val onelabel = ClassLabelIndicatorsFromIntArrayLabels(1, true)
    }

    val fivelabel = ClassLabelIndicatorsFromIntArrayLabels(5, true)

    assert(fivelabel(Array(2,1)) === DenseVector(-1.0,1.0,1.0,-1.0,-1.0))

    intercept[RuntimeException] {
      fivelabel(Array(4,6))
    }

    intercept[RuntimeException] {
      fivelabel(Array(-1,2))
    }
  }
} 
Example 190
Source File: VectorSplitterSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.util

import breeze.linalg._
import org.scalatest.FunSuite

class VectorSplitterSuite extends FunSuite {
  test("vector splitter") {
    for (
      bs <- Array(128, 256, 512, 1024, 2048);
      mul <- 0 to 2;
      off <- 0 to 20 by 5;
      feats <- Array(Some(bs*mul + off), None)
    ) {
      val sp = new VectorSplitter(bs, feats)
      val vec = DenseVector.zeros[Double](bs*mul + off)

      val expectedSplits = (bs*mul + off)/bs + (if ((bs*mul + off) % bs == 0) 0 else 1)

      assert(sp.splitVector(vec).length === expectedSplits,
        s"True length is ${sp.splitVector(vec).length}, expected length is ${expectedSplits}")
    }
  }

  test("vector splitter maintains order") {
    for (
      bs <- Array(128, 256, 512, 1024, 2048);
      mul <- 0 to 2;
      off <- 0 to 20 by 5;
      feats <- Array(Some(bs*mul + off), None)
    ) {
      val sp = new VectorSplitter(bs, feats)
      val vec = rand(bs*mul + off)

      assert(DenseVector.vertcat(sp.splitVector(vec):_*) === vec,
        s"Recombinded split vector of length ${bs*mul + off} with block size $bs did not match its input")
    }
  }
} 
Example 191
Source File: TopKClassifierSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.util

import breeze.linalg.DenseVector
import org.apache.spark.SparkContext
import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext

class TopKClassifierSuite extends FunSuite with PipelineContext {
  test("top k classifier, k <= vector size") {
    sc = new SparkContext("local", "test")

    assert(TopKClassifier(2).apply(DenseVector(-10.0, 42.4, -43.0, 23.0)) === Array(1, 3))
    assert(TopKClassifier(4).apply(DenseVector(Double.MinValue, Double.MaxValue, 12.0, 11.0, 10.0)) === Array(1, 2, 3, 4))
    assert(TopKClassifier(3).apply(DenseVector(3.0, -23.2, 2.99)) === Array(0, 2, 1))
  }

  test("top k classifier, k > vector size") {
    sc = new SparkContext("local", "test")

    assert(TopKClassifier(5).apply(DenseVector(-10.0, 42.4, -43.0, 23.0)) === Array(1, 3, 0, 2))
    assert(TopKClassifier(2).apply(DenseVector(Double.MinValue)) === Array(0))
    assert(TopKClassifier(20).apply(DenseVector(3.0, -23.2, 2.99)) === Array(0, 2, 1))
  }

} 
Example 192
Source File: VOCLoaderSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.loaders

import org.scalatest.FunSuite
import org.apache.spark.SparkContext
import keystoneml.utils.TestUtils
import keystoneml.workflow.PipelineContext

class VOCLoaderSuite extends FunSuite with PipelineContext {
  test("load a sample of VOC data") {
    sc = new SparkContext("local", "test")
    val dataPath = TestUtils.getTestResourceFileName("images/voc")
    val labelsPath = TestUtils.getTestResourceFileName("images/voclabels.csv")

    val imgs = VOCLoader(sc,
      VOCDataPath(dataPath, "VOCdevkit/VOC2007/JPEGImages/", Some(1)),
      VOCLabelPath(labelsPath)).collect()

    // We should have 10 images
    assert(imgs.length === 10)

    // There should be one file whose name ends with "000104.jpg"
    val personMonitor = imgs.filter(_.filename.get.endsWith("000104.jpg"))
    assert(personMonitor.length === 1)

    // It should have two labels, 14 and 19.
    assert(personMonitor(0).label.contains(14) && personMonitor(0).label.contains(19))

    // There should be two 13 labels total and 9 should be distinct.
    assert(imgs.map(_.label).flatten.length === 13)
    assert(imgs.map(_.label).flatten.distinct.length === 9)
  }
} 
Example 193
Source File: ImageNetLoaderSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.loaders

import org.scalatest.FunSuite
import org.apache.spark.SparkContext
import keystoneml.utils.TestUtils
import keystoneml.workflow.PipelineContext

class ImageNetLoaderSuite extends FunSuite with PipelineContext {
  test("load a sample of imagenet data") {
    sc = new SparkContext("local", "test")
    val dataPath = TestUtils.getTestResourceFileName("images/imagenet")
    val labelsPath = TestUtils.getTestResourceFileName("images/imagenet-test-labels")

    val imgs = ImageNetLoader.apply(sc, dataPath, labelsPath).collect()
    // We should have 5 images
    assert(imgs.length === 5)

    // The images should all have label 12
    assert(imgs.map(_.label).distinct.length === 1)
    assert(imgs.map(_.label).distinct.head === 12)

    // The image filenames should begin with n15075141
    assert(imgs.forall(_.filename.get.startsWith("n15075141")), "Image filenames should be correct")
  }
} 
Example 194
Source File: StupidBackoffSuite.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.pipelines.nlp

import keystoneml.nodes.nlp._

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext

import scala.collection.JavaConverters._

class StupidBackoffSuite extends FunSuite with PipelineContext {

  val data = Seq("Winter is coming",
    "Finals are coming",
    "Summer is coming really soon")

  def featurizer(orders: Seq[Int], mode: NGramsCountsMode.Value = NGramsCountsMode.Default) = {
    def feat(data: RDD[String]) = {
      NGramsCounts[String](mode).apply(
        (Tokenizer() andThen NGramsFeaturizer[String](orders)).apply(data).get)
    }
    feat _
  }

  def requireNGramColocation[T, V](
      ngrams: RDD[(NGram[T], V)],
      indexer: BackoffIndexer[T, NGram[T]]) = {

    ngrams.mapPartitions { part =>
      val map = new java.util.HashMap[NGram[T], V]().asScala
      part.foreach { case (ngramId, count) => map.put(ngramId, count) }

      map.keySet.foreach { ngramId =>
        var currNGram = ngramId
        while (indexer.ngramOrder(currNGram) > 2) {
          val context = indexer.removeCurrentWord(currNGram)
          require(map.contains(context),
            s"ngram $currNGram is not co-located with its context $context within same partition")
          currNGram = context
        }
      }
      Iterator.empty
    }.count()
  }

  test("end-to-end InitialBigramPartitioner") {
    sc = new SparkContext("local[4]", "StupidBackoffSuite")
    val corpus = sc.parallelize(data, 3)
    val ngrams = featurizer(2 to 5, NGramsCountsMode.NoAdd)(corpus)
    val unigrams = featurizer(1 to 1)(corpus)
      .collectAsMap()
      .map { case (key, value) => key.words(0) -> value }

    val stupidBackoff = StupidBackoffEstimator[String](unigrams).fit(ngrams)
    requireNGramColocation[String, Double](stupidBackoff.scoresRDD, new NGramIndexerImpl)
  }

  test("Stupid Backoff calculates correct scores") {
    sc = new SparkContext("local[4]", "StupidBackoffSuite")
    val corpus = sc.parallelize(data, 3)
    val ngrams = featurizer(2 to 5, NGramsCountsMode.NoAdd)(corpus)
    val unigrams = featurizer(1 to 1)(corpus)
      .collectAsMap()
      .map { case (key, value) => key.words(0) -> value }
    val lm = StupidBackoffEstimator[String](unigrams).fit(ngrams)

    assert(lm.score(new NGram(Seq("is", "coming"))) === 2.0 / 2.0)
    assert(lm.score(new NGram(Seq("is", "coming", "really"))) === 1.0 / 2.0)

    assert(lm.score(new NGram(Seq("is", "unseen-coming"))) === 0,
      "not equal to expected: bacoffed once & curr word unseen, so should be zero")
    assert(lm.score(new NGram(Seq("is-unseen", "coming"))) === lm.alpha * 3.0 / lm.numTokens,
      "not equal to expected: backoffed once, should be alpha * currWordCount / numTokens")
  }

} 
Example 195
Source File: MockedDefaultSourceSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark

import java.util.concurrent.{Callable, Executors}

import com.sap.spark.dsmock.DefaultSource
import org.apache.spark.sql.sources.HashPartitioningFunction
import org.apache.spark.sql.{GlobalSapSQLContext, Row, SQLContext}
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.FunSuite

import scala.concurrent.duration._


class MockedDefaultSourceSuite
  extends FunSuite
  with GlobalSapSQLContext {

  val testTimeout = 10 // seconds

  private def numberOfThreads: Int = {
    val noOfCores = Runtime.getRuntime.availableProcessors()
    assert(noOfCores > 0)

    if (noOfCores == 1) 2 // It should always be multithreaded although only
                          // one processor is available (pseudo-multithreading)
    else noOfCores
  }

  def runMultiThreaded[A](op: Int => A): Seq[A] = {
    info(s"Running with $numberOfThreads threads")
    val pool = Executors.newFixedThreadPool(numberOfThreads)

    val futures = 1 to numberOfThreads map { i =>
      val task = new Callable[A] {
        override def call(): A = op(i)
      }
      pool.submit(task)
    }

    futures.map(_.get(testTimeout, SECONDS))
  }

  test("Underlying mocks of multiple threads are distinct") {
    val dataSources = runMultiThreaded { _ =>
      DefaultSource.withMock(identity)
    }

    dataSources foreach { current =>
      val sourcesWithoutCurrent = dataSources.filter(_.ne(current))
      assert(sourcesWithoutCurrent.forall(_.underlying ne current))
    }
  }

  test("Mocking works as expected") {
    runMultiThreaded { i =>
      DefaultSource.withMock { defaultSource =>
        when(defaultSource.getAllPartitioningFunctions(
          anyObject[SQLContext],
          anyObject[Map[String, String]]))
          .thenReturn(Seq(HashPartitioningFunction(s"foo$i", Seq.empty, None)))

        val Array(Row(name)) = sqlc
          .sql("SHOW PARTITION FUNCTIONS USING com.sap.spark.dsmock")
          .select("name")
          .collect()

        assertResult(s"foo$i")(name)
      }
    }
  }
} 
Example 196
Source File: HierarchyBuilderSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hierarchy

import org.apache.spark.SparkConf
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.sql.Row
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.Node
import org.scalatest.FunSuite

class HierarchyBuilderSuite extends FunSuite {

  val N = 5
  val rowFunctions = HierarchyRowFunctions(Seq.fill(N)(StringType))

  test("HierarchyRowFunctions.rowGet") {
    for (i <- 0 to 5) {
      val row = Row((0 to 5).map(_.toString): _*)
      assertResult(i.toString)(rowFunctions.rowGet(i)(row))
    }
  }

  test("HierarchyRowFunctions.rowInit") {
    for (i <- 0 to 5) {
      val row = Row((0 to 5).map(_.toString): _*)

      val result = rowFunctions.rowInit(rowFunctions.rowGet(i), StringType)(row, None)
      val expected = Row(row.toSeq :+ Node(List(i.toString), StringType): _*)
      assertResult(expected)(result)
    }
  }

  // scalastyle:off magic.number
  test("HierarchyRowFunctions.rowInitWithOrder") {
    for (i <- 0 to 5) {
      val row = Row((0 to 5).map(_.toString): _*)
      val result = rowFunctions.rowInit(rowFunctions.rowGet(i), StringType)(row, Some(42L))
      val expected = Row(row.toSeq :+ Node(List(i.toString),StringType, ordPath = List(42L)): _*)
      assertResult(expected)(result)
    }
  }
  // scalastyle:on magic.number

  test("HierarchyRowFunctions.rowModify") {
    for (i <- 0 to 5) {
      val rightRow = Row(0 to 5: _*)
      val leftRow = Row("foo", 0, "bar", Node(List(0),StringType))
      val result = rowFunctions.rowModify(
        rowFunctions.rowGet(i),StringType
      )(leftRow, rightRow)
      val expected = Row((0 to 5) :+ Node(List(0, i), StringType): _*)
      assertResult(expected)(result)
    }
  }

  // scalastyle:off magic.number
  test("HierarchyRowFunctions.rowModifyAndOrder") {
    for (i <- 0 to 5) {
      val rightRow = Row(0 to 5: _*)
      val leftRow = Row("foo", 0, "bar", Node(List(0),StringType))
      val result = rowFunctions.rowModifyAndOrder(
        rowFunctions.rowGet(i), StringType
      )(leftRow, rightRow, Some(42L))
      val expected = Row((0 to 5) :+ Node(List(0, i), StringType, ordPath = List(42L)): _*)
      assertResult(expected)(result)
    }
  }
  // scalastyle:on magic.number

  test("HierarchyBuilder closure is serializable") {
    val closureSerializer = new JavaSerializer(new SparkConf(loadDefaults = false)).newInstance()
    val serialized = closureSerializer.serialize(() =>
      HierarchyJoinBuilder(null, null, null, null, null, null))
  }

  test("HierarchyRowFunctions closure is serializable") {
    val closureSerializer = new JavaSerializer(new SparkConf(loadDefaults = false)).newInstance()
    val serialized = closureSerializer.serialize(() =>
      HierarchyRowJoinBuilder(null, null, null, null))
  }

} 
Example 197
Source File: ExtendableOptimizerSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.extension

import org.apache.spark.sql.catalyst.optimizer.{FiltersReduction, Optimizer}
import org.apache.spark.sql.extension.OptimizerFactory.ExtendableOptimizerBatch
import org.scalatest.{FunSuite, PrivateMethodTester}

class ExtendableOptimizerSuite extends FunSuite with PrivateMethodTester {

  implicit class OptimizerOps(opt: Optimizer) {
    private val nameMethod = PrivateMethod[String]('name)
    private def batches: Seq[AnyRef] = {
      
      val clazz = opt.getClass
      val batchesMethod = clazz.getMethods.find(_.getName == "batches").get
      batchesMethod.setAccessible(true)
      batchesMethod.invoke(opt).asInstanceOf[Seq[AnyRef]]
    }
    def batchNames: Seq[String] =
      batches map { b => b invokePrivate nameMethod() }
  }

  test("No rules is equivalent to DefaultOptimizer") {
    val extOpt = OptimizerFactory.produce()
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames == defOpt.batchNames)
  }

  test("One early batch is added before the main optimizer batch") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) :: Nil
    )

    assert(extOpt.batchNames match {
      case subQueries :: early :: other => early.equals("FOO")
    })
  }

  test("Several early batches are added before the main optimizer batch") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) ::
        ExtendableOptimizerBatch("BAR", 1, FiltersReduction :: Nil) ::
        Nil
    )

    assert(extOpt.batchNames match {
      case subQueries :: firstEarly :: secondEarly :: other =>
        firstEarly.equals("FOO") && secondEarly.equals("BAR")
    })
  }

  test("Expression rules are added") {
    val extOpt = OptimizerFactory.produce(
      mainBatchRules = FiltersReduction :: Nil
    )
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames == defOpt.batchNames)
  }

  test("Both rules are added") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) :: Nil,
      mainBatchRules = FiltersReduction :: Nil
    )
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames.toSet ==
      defOpt.batchNames.toSet ++ Seq("FOO"))
  }
} 
Example 198
Source File: ShowPartitionFunctionsSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql

import com.sap.spark.dstest.DefaultSource
import org.apache.spark.sql.sources.Stride
import org.apache.spark.util.PartitioningFunctionUtils
import org.scalatest.FunSuite

class ShowPartitionFunctionsSuite
  extends FunSuite
  with GlobalSapSQLContext
  with PartitioningFunctionUtils {

  override def beforeEach(): Unit = {
    super.beforeEach()
    DefaultSource.reset()
  }

  test("Show partition functions shows no partitioning functions if none are there") {
    val funs = sqlc.sql("SHOW PARTITION FUNCTIONS USING com.sap.spark.dstest").collect()

    assert(funs.isEmpty)
  }

  // scalastyle:off magic.number
  test("Show partition functions shows the previously registered partitioning functions") {
    createHashPartitioningFunction("foo", Seq("string", "float"), Some(10), "com.sap.spark.dstest")
    createRangePartitioningFunction("bar", "int", 0, 10, Stride(10), "com.sap.spark.dstest")
    createRangeSplitPartitioningFunction("baz", "float", Seq(1, 2, 3),
      rightClosed = true, "com.sap.spark.dstest")

    val funs = sqlc.sql("SHOW PARTITION FUNCTIONS USING com.sap.spark.dstest").collect()

    assertResult(Set(
      Row("baz", "RangeSplitPartitioningFunction", "FloatType", "1,2,3",
        true, null, null, null, null, null),
      Row("foo", "HashPartitioningFunction", "StringType,FloatType", null,
        null, null, null, null, null, 10),
      Row("bar", "RangeIntervalPartitioningFunction", "IntegerType", null, null,
        0, 10, "Stride", 10, null)))(funs.toSet)
  }
  // scalastyle:on magic.number

  // scalastyle:off magic.number
  test("Show partition functions does not show deleted functions") {
    createHashPartitioningFunction("foo", Seq("string", "float"), Some(10), "com.sap.spark.dstest")
    createRangePartitioningFunction("bar", "int", 0, 10, Stride(10), "com.sap.spark.dstest")
    createRangeSplitPartitioningFunction("baz", "float", Seq(1, 2, 3),
      rightClosed = true, "com.sap.spark.dstest")

    val f1 = sqlc.sql("SHOW PARTITION FUNCTIONS USING com.sap.spark.dstest").collect()

    assertResult(Set(
      Row("baz", "RangeSplitPartitioningFunction", "FloatType", "1,2,3",
        true, null, null, null, null, null),
      Row("foo", "HashPartitioningFunction", "StringType,FloatType", null,
        null, null, null, null, null, 10),
      Row("bar", "RangeIntervalPartitioningFunction", "IntegerType", null, null,
        0, 10, "Stride", 10, null)))(f1.toSet)

    dropPartitioningFunction("bar", dataSource = "com.sap.spark.dstest")

    val f2 = sqlc.sql("SHOW PARTITION FUNCTIONS USING com.sap.spark.dstest").collect()

    assertResult(Set(
      Row("baz", "RangeSplitPartitioningFunction", "FloatType", "1,2,3",
        true, null, null, null, null, null),
      Row("foo", "HashPartitioningFunction", "StringType,FloatType", null,
        null, null, null, null, null, 10)))(f2.toSet)
  }
  // scalastyle:on magic.number
} 
Example 199
Source File: DualKeyPartialSortedMapSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.currency.basic

import org.scalatest.FunSuite

class DualKeyPartialSortedMapSuite extends FunSuite {
  type UnorderedKey = (String, String)
  type OrderedKey = Int

  test("usage with un/existing unordered and ordered keys") {
    val dkMap = new DualKeyPartialSortedMap[UnorderedKey, OrderedKey, Int]()
    val N = 300
    
    (0 to N).foreach { i =>
      dkMap.put(("A", "B"), i*2, i*2)
      dkMap.put(("A", "C"), i*2, i*2)
      dkMap.put(("B", "C"), i*2, i*2)
    }
    (0 to N*2).foreach { i =>
      val checker = i % 2 == 0 match {
        case true => i
        case false => i - 1
      }
      assert(dkMap.getSortedKeyFloorValue(("A", "B"), i).get == checker)
      assert(dkMap.getSortedKeyFloorValue(("A", "C"), i).get == checker)
      assert(dkMap.getSortedKeyFloorValue(("B", "C"), i).get == checker)
      assert(dkMap.getSortedKeyFloorValue(("A", "Y"), i).isEmpty)
      assert(dkMap.getSortedKeyFloorValue(("X", "Y"), i).isEmpty)
    }
    assert(dkMap.getSortedKeyFloorValue(("A", "B"), -1).isEmpty)
  }

  test("floor") {
    val dkMap = new DualKeyPartialSortedMap[UnorderedKey, OrderedKey, Int]()
    val unorderedKey = ("A", "B")
    val orderedKey = 50
    val lowerOrderedKey = 1
    val higherOrderedKey = 10000
    val value = 111
    dkMap.put(unorderedKey, orderedKey, value)
    assert(dkMap.getSortedKeyFloorValue(unorderedKey, lowerOrderedKey).isEmpty)
    assert(dkMap.getSortedKeyFloorValue(unorderedKey, orderedKey).get == value)
    assert(dkMap.getSortedKeyFloorValue(unorderedKey, higherOrderedKey).get == value)
  }

  test("empty map") {
    val dkMap = new DualKeyPartialSortedMap[UnorderedKey, OrderedKey, Int]()
    assert(dkMap.getSortedKeyFloorValue(("A", "A"), 0).isEmpty)
  }

  test("clear") {
    val dkMap = new DualKeyPartialSortedMap[UnorderedKey, OrderedKey, Int]()
    val unorderedKey = ("A", "B")
    val orderedKey = 50
    val value = 111
    dkMap.put(unorderedKey, orderedKey, value)
    assert(dkMap.getSortedKeyFloorValue(unorderedKey, orderedKey).get == value)
    dkMap.clear()
    assert(dkMap.getSortedKeyFloorValue(unorderedKey, orderedKey).isEmpty)
  }
} 
Example 200
Source File: KPISmokeSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql

import com.sap.commons.TimingTestUtils
import com.sap.spark.util.TestUtils._
import org.apache.spark.util.DummyRelationUtils._
import org.apache.spark.sql.types.StructType
import org.scalatest.FunSuite



  test("Create/drop does not change with number of runs") {
    val sampleSize = 40
    val warmup = 10
    def action(): Unit = {
      createTestTable()
      dropTestTable()
    }
    val (res, corr, samples) =
      TimingTestUtils.executionTimeNotCorrelatedWithRuns(
        acceptedCorrelation, warmup, sampleSize)(action)
    assert(res.booleanValue(), s"Correlation check failed. Correlation is $corr. " +
      s"Accepted correlation is $acceptedCorrelation. Determined samples: $samples")
  }
}