org.junit.Assert Scala Examples
The following examples show how to use org.junit.Assert.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TestFunctionality.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.functionality import java.util.Locale import org.junit.Assert import org.junit.Test import com.ibm.icu.text.SimpleDateFormat import com.ibm.icu.util.Calendar import com.ibm.icu.util.GregorianCalendar class TestFunctionality { @Test def test_calendar_format_timezone(): Unit = { val value = "08:43:00.000000-08:00" val cal = new GregorianCalendar() val pos = new java.text.ParsePosition(0) val patternIn = "HH:mm:ss.SSSSSSxxxxx" new com.ibm.icu.text.SimpleDateFormat(patternIn).parse(value, cal, pos) cal.getTime val pattern = "hh:mm.V" val locale = Locale.ENGLISH val calendar = Calendar.getInstance(locale) calendar.clear() val formatter = new SimpleDateFormat(pattern, locale) // Make sure you've done 'sbt update-classifiers' to pull in all the source. // Step through the format here and into SimpleDateFormat -> case: 29 aka V -> TimeZoneFormat.format // It would appear as though the SimpleTimeZone GMT-0800 is not recognized in the ICU library // and so returns this 'unk' value rather than 'uslax'. Even though -0800 GMT IS uslax!!! :( val str = formatter.format(cal) //Console.out.println(str) Assert.assertEquals("08:43.unk", str) } }
Example 2
Source File: UtilsTest.scala From spark-http-stream with BSD 2-Clause "Simplified" License | 5 votes |
import java.sql.Date import org.apache.spark.SparkConf import org.apache.spark.serializer.KryoSerializer import org.apache.spark.sql.SparkSession import org.junit.Assert import org.junit.Test import java.io.ByteArrayOutputStream import java.io.InputStream import org.apache.commons.io.IOUtils import com.esotericsoftware.kryo.io.Input import org.apache.spark.sql.execution.streaming.http.KryoSerializerUtils class UtilsTest { @Test def testKryoSerDe() { val d1 = new Date(30000); val bytes = KryoSerializerUtils.serialize(d1); val d2 = KryoSerializerUtils.deserialize(bytes); Assert.assertEquals(d1, d2); val d3 = Map('x' -> Array("aaa", "bbb"), 'y' -> Array("ccc", "ddd")); println(d3); val bytes2 = KryoSerializerUtils.serialize(d3); val d4 = KryoSerializerUtils.deserialize(bytes2).asInstanceOf[Map[String, Any]]; println(d4); } @Test def testEncoderSchema() { val spark = SparkSession.builder.master("local[4]") .getOrCreate(); val sqlContext = spark.sqlContext; import sqlContext.implicits._ import org.apache.spark.sql.catalyst.encoders.encoderFor val schema1 = encoderFor[String].schema; val schema2 = encoderFor[(String)].schema; val schema3 = encoderFor[((String))].schema; Assert.assertEquals(schema1, schema2); Assert.assertEquals(schema1, schema3); } @Test def testDateInTuple() { val spark = SparkSession.builder.master("local[4]") .getOrCreate(); val sqlContext = spark.sqlContext; import sqlContext.implicits._ val d1 = new Date(30000); val ds = sqlContext.createDataset(Seq[(Int, Date)]((1, d1))); val d2 = ds.collect()(0)._2; //NOTE: d1!=d2, maybe a bug println(d1.equals(d2)); } }
Example 3
Source File: HttpStreamServerClientTest.scala From spark-http-stream with BSD 2-Clause "Simplified" License | 5 votes |
import org.apache.spark.SparkConf import org.apache.spark.serializer.KryoSerializer import org.apache.spark.sql.Row import org.apache.spark.sql.execution.streaming.http.HttpStreamClient import org.junit.Assert import org.junit.Test import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.DoubleType import org.apache.spark.sql.types.BooleanType import org.apache.spark.sql.types.FloatType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.SparkSession import org.apache.spark.sql.types.ByteType import org.apache.spark.sql.execution.streaming.http.HttpStreamServer import org.apache.spark.sql.execution.streaming.http.StreamPrinter import org.apache.spark.sql.execution.streaming.http.HttpStreamServerSideException class HttpStreamServerClientTest { val ROWS1 = Array(Row("hello1", 1, true, 0.1f, 0.1d, 1L, '1'.toByte), Row("hello2", 2, false, 0.2f, 0.2d, 2L, '2'.toByte), Row("hello3", 3, true, 0.3f, 0.3d, 3L, '3'.toByte)); val ROWS2 = Array(Row("hello"), Row("world"), Row("bye"), Row("world")); @Test def testHttpStreamIO() { //starts a http server val kryoSerializer = new KryoSerializer(new SparkConf()); val server = HttpStreamServer.start("/xxxx", 8080); val spark = SparkSession.builder.appName("testHttpTextSink").master("local[4]") .getOrCreate(); spark.conf.set("spark.sql.streaming.checkpointLocation", "/tmp/"); val sqlContext = spark.sqlContext; import spark.implicits._ //add a local message buffer to server, with 2 topics registered server.withBuffer() .addListener(new StreamPrinter()) .createTopic[(String, Int, Boolean, Float, Double, Long, Byte)]("topic-1") .createTopic[String]("topic-2"); val client = HttpStreamClient.connect("http://localhost:8080/xxxx"); //tests schema of topics val schema1 = client.fetchSchema("topic-1"); Assert.assertArrayEquals(Array[Object](StringType, IntegerType, BooleanType, FloatType, DoubleType, LongType, ByteType), schema1.fields.map(_.dataType).asInstanceOf[Array[Object]]); val schema2 = client.fetchSchema("topic-2"); Assert.assertArrayEquals(Array[Object](StringType), schema2.fields.map(_.dataType).asInstanceOf[Array[Object]]); //prepare to consume messages val sid1 = client.subscribe("topic-1")._1; val sid2 = client.subscribe("topic-2")._1; //produces some data client.sendRows("topic-1", 1, ROWS1); val sid4 = client.subscribe("topic-1")._1; val sid5 = client.subscribe("topic-2")._1; client.sendRows("topic-2", 1, ROWS2); //consumes data val fetched = client.fetchStream(sid1).map(_.originalRow); Assert.assertArrayEquals(ROWS1.asInstanceOf[Array[Object]], fetched.asInstanceOf[Array[Object]]); //it is empty now Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid1).map(_.originalRow).asInstanceOf[Array[Object]]); Assert.assertArrayEquals(ROWS2.asInstanceOf[Array[Object]], client.fetchStream(sid2).map(_.originalRow).asInstanceOf[Array[Object]]); Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid4).map(_.originalRow).asInstanceOf[Array[Object]]); Assert.assertArrayEquals(ROWS2.asInstanceOf[Array[Object]], client.fetchStream(sid5).map(_.originalRow).asInstanceOf[Array[Object]]); Assert.assertArrayEquals(Array[Object](), client.fetchStream(sid5).map(_.originalRow).asInstanceOf[Array[Object]]); client.unsubscribe(sid4); try { client.fetchStream(sid4); //exception should be thrown, because subscriber id is invalidated Assert.assertTrue(false); } catch { case e: Throwable ⇒ e.printStackTrace(); Assert.assertEquals(classOf[HttpStreamServerSideException], e.getClass); } server.stop(); } }
Example 4
Source File: FactoryTest.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package test.scala.collection import org.junit.{Assert, Test} import scala.collection.compat._ import scala.collection.{BitSet, immutable, mutable} class FactoryTest { implicitly[Factory[Char, String]] implicitly[Factory[Char, Array[Char]]] implicitly[Factory[Int, collection.BitSet]] implicitly[Factory[Int, mutable.BitSet]] implicitly[Factory[Int, immutable.BitSet]] implicitly[Factory[Nothing, Seq[Nothing]]] def f[A] = implicitly[Factory[A, Stream[A]]] BitSet: Factory[Int, BitSet] Iterable: Factory[Int, Iterable[Int]] immutable.TreeSet: Factory[Int, immutable.TreeSet[Int]] Map: Factory[(Int, String), Map[Int, String]] immutable.TreeMap: Factory[(Int, String), immutable.TreeMap[Int, String]] @Test def streamFactoryPreservesLaziness(): Unit = { val factory = implicitly[Factory[Int, Stream[Int]]] var counter = 0 val source = Stream.continually { counter += 1; 1 } val result = factory.fromSpecific(source) Assert.assertEquals(1, counter) // One element has been evaluated because Stream is not lazy in its head } @Test def factoriesAreReusable(): Unit = { def generically[M[X] <: Iterable[X]](in: M[Int], factory: Factory[Int, M[Int]]): Unit = { val l = Iterator(-3, -2, -1).to(factory) val m = in.iterator.to(factory) Assert.assertEquals(in, m) } generically[List](List(1, 2, 3), List) generically[Seq](Seq(1, 2, 3), Seq) generically[IndexedSeq](IndexedSeq(1, 2, 3), IndexedSeq) generically[Vector](Vector(1, 2, 3), Vector) generically[Set](Set(1, 2, 3), Set) } }
Example 5
Source File: ArraySeqTest.scala From scala-library-compat with Apache License 2.0 | 5 votes |
package test.scala.collection import org.junit.{Assert, Test} import scala.collection.compat.immutable.ArraySeq // The unmodified ArraySeqTest from collection-strawman class ArraySeqTest { @Test def slice(): Unit = { implicit def array2ArraySeq[T](array: Array[T]): ArraySeq[T] = ArraySeq.unsafeWrapArray(array) val booleanArray = Array(true, false, true, false) check(booleanArray, Array(true, false), Array(false, true)) val shortArray = Array(1.toShort, 2.toShort, 3.toShort, 4.toShort) check(shortArray, Array(1.toShort, 2.toShort), Array(2.toShort, 3.toShort)) val intArray = Array(1, 2, 3, 4) check(intArray, Array(1, 2), Array(2, 3)) val longArray = Array(1L, 2L, 3L, 4L) check(longArray, Array(1L, 2L), Array(2L, 3L)) val byteArray = Array(1.toByte, 2.toByte, 3.toByte, 4.toByte) check(byteArray, Array(1.toByte, 2.toByte), Array(2.toByte, 3.toByte)) val charArray = Array('1', '2', '3', '4') check(charArray, Array('1', '2'), Array('2', '3')) val doubleArray = Array(1.0, 2.0, 3.0, 4.0) check(doubleArray, Array(1.0, 2.0), Array(2.0, 3.0)) val floatArray = Array(1.0f, 2.0f, 3.0f, 4.0f) check(floatArray, Array(1.0f, 2.0f), Array(2.0f, 3.0f)) val refArray = Array("1", "2", "3", "4") check[String](refArray, Array("1", "2"), Array("2", "3")) def unit1(): Unit = {} def unit2(): Unit = {} Assert.assertEquals(unit1, unit2) // unitArray is actually an instance of Immutable[BoxedUnit], the check to which is actually checked slice // implementation of ofRef val unitArray: ArraySeq[Unit] = Array(unit1, unit2, unit1, unit2) check(unitArray, Array(unit1, unit1), Array(unit1, unit1)) } private def check[T](array: ArraySeq[T], expectedSliceResult1: ArraySeq[T], expectedSliceResult2: ArraySeq[T]) { Assert.assertEquals(array, array.slice(-1, 4)) Assert.assertEquals(array, array.slice(0, 5)) Assert.assertEquals(array, array.slice(-1, 5)) Assert.assertEquals(expectedSliceResult1, array.slice(0, 2)) Assert.assertEquals(expectedSliceResult2, array.slice(1, 3)) Assert.assertEquals(ArraySeq.empty[Nothing], array.slice(1, 1)) Assert.assertEquals(ArraySeq.empty[Nothing], array.slice(2, 1)) } }
Example 6
Source File: BinaryCompaTest.scala From scala-library-compat with Apache License 2.0 | 5 votes |
import org.junit.{Assert, Test} import build.BuildInfo._ import com.typesafe.tools.mima.lib.MiMaLib import com.typesafe.tools.mima.core.Config class BinaryCompaTest { @Test def compat(): Unit = { Config.setup("foo", Array(oldClasspath, newClasspath)) val mima = new MiMaLib(Config.baseClassPath) val allProblems = mima.collectProblems(oldClasspath, newClasspath) val unexpectedDescriptions = allProblems.iterator .map(_.description("new")) // code improvement: it would be more standard to use a ProblemFilter here .filterNot( _ == "static method id(scala.collection.Iterable,scala.collection.generic.CanBuildFrom)scala.collection.Iterable in class org.example.Lib has a different signature in new version, where it is <A:Ljava/lang/Object;C::Lscala/collection/Iterable<Ljava/lang/Object;>;>(TC;Lscala/collection/generic/CanBuildFrom<Lscala/runtime/Nothing$;TA;TC;>;)TC; rather than <A:Ljava/lang/Object;C::Lscala/collection/Iterable<Ljava/lang/Object;>;>(TC;Lscala/collection/generic/CanBuildFrom<TC;TA;TC;>;)TC;") .filterNot( _ == "method id(scala.collection.Iterable,scala.collection.generic.CanBuildFrom)scala.collection.Iterable in object org.example.Lib has a different signature in new version, where it is <A:Ljava/lang/Object;C::Lscala/collection/Iterable<Ljava/lang/Object;>;>(TC;Lscala/collection/generic/CanBuildFrom<Lscala/runtime/Nothing$;TA;TC;>;)TC; rather than <A:Ljava/lang/Object;C::Lscala/collection/Iterable<Ljava/lang/Object;>;>(TC;Lscala/collection/generic/CanBuildFrom<TC;TA;TC;>;)TC;") .toList val msg = unexpectedDescriptions.mkString( s"The following ${unexpectedDescriptions.size} problems were reported but not expected:\n - ", "\n - ", "\n") Assert.assertEquals(msg, Nil, unexpectedDescriptions) } }
Example 7
Source File: CarbonTableSchemaCommonSuite.scala From carbondata with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.command import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.test.util.QueryTest import org.junit.Assert import org.scalatest.BeforeAndAfterAll import org.apache.carbondata.spark.exception.ProcessMetaDataException class CarbonTableSchemaCommonSuite extends QueryTest with BeforeAndAfterAll { test("Creating table: Duplicate dimensions found with name, it should throw AnalysisException") { sql("DROP TABLE IF EXISTS carbon_table") try { sql( s""" | CREATE TABLE carbon_table( | BB INT, bb char(10) | ) | STORED AS carbondata """.stripMargin) Assert.assertTrue(false) } catch { case _: AnalysisException => Assert.assertTrue(true) case _: Exception => Assert.assertTrue(false) } finally { sql("DROP TABLE IF EXISTS carbon_table") } } test("Altering table: Duplicate column found with name, it should throw RuntimeException") { sql("DROP TABLE IF EXISTS carbon_table") sql( s""" | CREATE TABLE if not exists carbon_table( | BB INT, cc char(10) | ) | STORED AS carbondata """.stripMargin) val ex = intercept[ProcessMetaDataException] { sql( s""" | alter TABLE carbon_table add columns( | bb char(10) ) """.stripMargin) } sql("DROP TABLE IF EXISTS carbon_table") } }
Example 8
Source File: ExprTests.scala From weld-java with BSD 3-Clause "New" or "Revised" License | 5 votes |
package weld.expressions import org.junit.{Assert, Test} import weld.WeldStruct.struct import weld._ import weld.expressions.DSL._ class ExprTests { @Test def testBuilding(): Unit = { val program = Lambda(Seq(Identifier("a", i64), Identifier("b", i64)), Add(Identifier("a"), Identifier("b"))) Assert.assertEquals("|a: i64, b: i64| (a + b)", program.flatDesc) } @Test def testRangeIter(): Unit = { def program = lambda() { ctx => import ctx._ val loop = let("loop", fore(zip(rangeIter(0L, 10L, 1L)), appender(i64)) { (_, b, _, n) => merge(b, n + 1L) }) result(loop) } Resources.withCleanup { track => val module = track(WeldModule.compile(program.flatDesc)) val input = track(WeldValue.empty()) val output = track(module.run(input)) .result(VecType.vecOf(i64)) .getVec(0) assert(output.numElements == 10) assert(output.getLong(0) == 1L) } } @Test def testSort(): Unit = { def program = { val input = "input".vec(i32) lambda(input) { ctx => ctx.sort(input)((_, n) => -n) } } Resources.withCleanup { track => val module = track(WeldModule.compile(program.flatDesc)) val input = track(struct(WeldVec.vec(Array(1, 4, 3))).toValue) val output = track(module.run(input)) .result(VecType.vecOf(i32)) .getVec(0) assert(output.numElements == 3) assert(output.getInt(0) == 4) assert(output.getInt(1) == 3) assert(output.getInt(2) == 1) } } }
Example 9
Source File: DeduplicatedCollectorRegistrySuite.scala From spark-metrics with Apache License 2.0 | 5 votes |
package com.banzaicloud.spark.metrics import com.codahale.metrics.MetricRegistry import io.prometheus.client.{Collector, CollectorRegistry} import io.prometheus.client.dropwizard.DropwizardExports import org.junit.{Assert, Test} import scala.collection.JavaConverters._ class DeduplicatedCollectorRegistrySuite { @Test def testDeduplication(): Unit = { // given val baseRegistry = new MetricRegistry val registryA = new MetricRegistry val counterA = registryA.counter("counter") counterA.inc(20) counterA.inc(30) val registryB = new MetricRegistry val counterB = registryB.counter("counter") counterB.inc(40) counterB.inc(50) baseRegistry.register("hive_", registryA) baseRegistry.register("hive.", registryB) val metricsExports = new DropwizardExports(baseRegistry) val deduplicatedCollectorRegistry = new DeduplicatedCollectorRegistry(new CollectorRegistry(true)) // when metricsExports.register(deduplicatedCollectorRegistry) val samples = deduplicatedCollectorRegistry.metricFamilySamples() // then val actual = samples .asScala .filter(mfs => mfs.`type`== Collector.Type.GAUGE && mfs.name == "hive__counter") Assert.assertEquals(1, actual.size) } }
Example 10
Source File: KryoStringEventBatchTest.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.log import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.io.{Input, Output} import com.yahoo.maha.data.StringEventBatch import org.junit.{Assert, Test} class KryoStringEventBatchTest { @Test @throws[Exception] def test(): Unit = { val kryo = new Kryo kryo.register(classOf[StringEventBatch], new KryoStringEventBatch) val builder = new StringEventBatch.Builder(3) builder.add("one") builder.add("two") builder.add("three") val recordList = builder.build.asInstanceOf[StringEventBatch] val output = new Output(new Array[Byte](1024 * 1024 + 1)) kryo.writeObject(output, recordList) System.out.println("output.position=" + output.position) val input = new Input(output.getBuffer, 0, output.total.toInt) val resultRecordList = kryo.readObject(input, classOf[StringEventBatch]) Assert.assertEquals(resultRecordList.getEvents.get(0), "one") Assert.assertEquals(resultRecordList.getEvents.get(1), "two") Assert.assertEquals(resultRecordList.getEvents.get(2), "three") val output2 = new Output(new Array[Byte](1024 * 1024 + 1)) kryo.writeObject(output2, resultRecordList) } }
Example 11
Source File: RawDataTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.data import org.junit.{Assert, Test} class RawDataTest { @Test def constructDense() = { val dataDense = RawData.dense[Double, Integer](Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(5, dataDense.size) Assert.assertEquals(1, dataDense(0), 0.00001) Assert.assertEquals(-5.23, dataDense(3), 0.00001) } @Test def constructSparse() = { val dataSparse = RawData.sparse[Double, Integer](87, Array(0, 5, 10, 23, 24), Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(87, dataSparse.size) Assert.assertEquals(1, dataSparse(0), 0.00001) Assert.assertEquals(-5.23, dataSparse(23), 0.00001) Assert.assertEquals(-100, dataSparse(55), 0.00001) } @Test def compressed() = { val dataDense = RawData.dense[Double, Integer](Array(1, 2.3, 0.7, -5.23, 123.34), -100) val dataSparse = RawData.sparse[Double, Integer](87, Array(0, 5, 10, 23, 24), Array(1, 2.3, 0.7, -5.23, 123.34), -100) val dataSparseCompressable = RawData.sparse[Double, Integer](5, Array(0, 1, 2, 3, 4), Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(true, dataSparseCompressable.compressed match {case v : RawDataDense[Double,Integer] => true; case _ => false}) Assert.assertEquals(5, dataSparseCompressable.compressed.size) } @Test def numActives() = { val dataDense = RawData.dense[Double, Integer](Array(1, 2.3, 0.7, -5.23, 123.34), -100) val dataSparse = RawData.sparse[Double, Integer](87, Array(0, 5, 10, 23, 24), Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(5, dataDense.numActives) Assert.assertEquals(5, dataSparse.numActives) } @Test def numNonzeros() = { val dataDense = RawData.dense[Double, Integer](Array(1, 2.3, 0.7, -5.23, 123.34), -100) val dataSparse = RawData.sparse[Double, Integer](87, Array(0, 5, 10, 23, 24), Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(5, dataDense.numNonzeros) Assert.assertEquals(5, dataSparse.numNonzeros) } @Test def toArray() = { val dataDense = RawData.dense[Double, Integer](Array(1, 2.3, 0.7, -5.23, 123.34), -100) val dataSparse = RawData.sparse[Double, Integer](87, Array(0, 5, 10, 23, 24), Array(1, 2.3, 0.7, -5.23, 123.34), -100) Assert.assertEquals(5, dataDense.toArray.length) Assert.assertEquals(87, dataSparse.toArray.length) Assert.assertEquals(-5.23, dataDense.toArray(3),0.000001) Assert.assertEquals(0, dataSparse.toArray(3),0.000001) Assert.assertEquals(-5.23, dataSparse.toArray(23),0.000001) } }
Example 12
Source File: LibSVMUtilTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.data.load import org.junit.{Assert, Test} import reforest.test.BroadcastSimple class LibSVMUtilTest { val util = new LibSVMUtil[Double, Integer](BroadcastSimple.typeInfoDouble, BroadcastSimple.gcInstrumentedEmpty, BroadcastSimple.categoryInfoEmpty); @Test def parseLibSVMRecordSparse(): Unit = { val returned : (Double, Array[Int], Array[Double]) = util.parseLibSVMRecord("1 1:2596.000000 20:51.000000 37:3.000000") Assert.assertEquals(1, returned._1, 0); Assert.assertEquals(3, returned._2.length); Assert.assertEquals(0, returned._2(0)); Assert.assertEquals(19, returned._2(1)); Assert.assertEquals(36, returned._2(2)); Assert.assertEquals(3, returned._3.length); Assert.assertEquals(2596, returned._3(0),0); Assert.assertEquals(51, returned._3(1),0); Assert.assertEquals(3, returned._3(2),0); } @Test def parseLibSVMRecordDense(): Unit = { val returned : (Double, Array[Int], Array[Double]) = util.parseLibSVMRecord("1 1:2596.000000 2:51.000000 3:3.000000") Assert.assertEquals(1, returned._1, 0); Assert.assertEquals(3, returned._2.length); Assert.assertEquals(0, returned._2(0)); Assert.assertEquals(1, returned._2(1)); Assert.assertEquals(2, returned._2(2)); Assert.assertEquals(3, returned._3.length); Assert.assertEquals(2596, returned._3(0),0); Assert.assertEquals(51, returned._3(1),0); Assert.assertEquals(3, returned._3(2),0); } }
Example 13
Source File: ARFFUtilTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.data.load import org.junit.{Assert, Test} import reforest.test.BroadcastSimple class ARFFUtilTest { val util = new ARFFUtil[Double, Integer](BroadcastSimple.typeInfoDouble, BroadcastSimple.gcInstrumentedEmpty, BroadcastSimple.categoryInfoEmpty); @Test def parseARFFRecord(): Unit = { val returned : (Double, Array[Double]) = util.parseARFFRecord("5.1,3.5,1.4,0.2,0") Assert.assertEquals(0, returned._1, 0); Assert.assertEquals(4, returned._2.length); Assert.assertEquals(5.1, returned._2(0),0.000001); Assert.assertEquals(3.5, returned._2(1),0.000001); Assert.assertEquals(1.4, returned._2(2),0.000001); Assert.assertEquals(0.2, returned._2(3),0.000001); } }
Example 14
Source File: RFFeatureSizerTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.rf.feature import org.junit.Assert.assertEquals import reforest.rf.{RFCategoryInfo, RFCategoryInfoEmpty} import reforest.rf.split.RFSplitter import test.RFResourceFactory import org.junit.{Assert, Test} class RFFeatureSizerTest { private val numberBin = 32 private val numClasses = 10 private val splitter = RFResourceFactory.getSplitterRandomDefault(-23.5, 12.7, numberBin) private val sizer = splitter.generateRFSizer(numClasses) @Test def getSize(): Unit = { assertEquals((numberBin + 1) * numClasses, sizer.getSize(1)) } @Test def shrinker(): Unit = { val sizer = new RFFeatureSizerSimpleModelSelection(32, 2, new RFCategoryInfoEmpty, 16) assertEquals(0, sizer.getShrinkedValue(1, 0)) assertEquals(1, sizer.getShrinkedValue(1, -1)) assertEquals(16, sizer.getShrinkedValue(1, 32)) assertEquals(16, sizer.getShrinkedValue(1, 33)) assertEquals(16, sizer.getShrinkedValue(1, 100)) } @Test def shrinkerSpecialized(): Unit = { val splitNumberMap = Map(0 -> 4, 1 -> 8, 2 -> 7) val sizer = new RFFeatureSizerSpecializedModelSelection(splitNumberMap, 2, new RFCategoryInfoEmpty, 8, 32) assertEquals(5, sizer.getShrinkedValue(0, 5)) assertEquals(4, sizer.getShrinkedValue(0, 4)) assertEquals(1, sizer.getShrinkedValue(0, 1)) assertEquals(0, sizer.getShrinkedValue(0, 0)) assertEquals(0, sizer.getShrinkedValue(1, 0)) assertEquals(1, sizer.getShrinkedValue(1, -1)) assertEquals(8, sizer.getShrinkedValue(1, 32)) assertEquals(8, sizer.getShrinkedValue(1, 33)) assertEquals(8, sizer.getShrinkedValue(1, 100)) assertEquals(8, sizer.getShrinkedValue(2, 8)) assertEquals(7, sizer.getShrinkedValue(2, 7)) assertEquals(6, sizer.getShrinkedValue(2, 6)) assertEquals(2, sizer.getShrinkedValue(2, 2)) assertEquals(1, sizer.getShrinkedValue(2, 1)) } @Test def deShrinker(): Unit = { val sizer = new RFFeatureSizerSimpleModelSelection(32, 2, new RFCategoryInfoEmpty, 16) assertEquals(0, sizer.getDeShrinkedValue(0, 0)) assertEquals(0, sizer.getDeShrinkedValue(1, 0)) assertEquals(32, sizer.getDeShrinkedValue(1, 32)) assertEquals(32, sizer.getDeShrinkedValue(1, 33)) assertEquals(32, sizer.getDeShrinkedValue(1, 100)) } @Test def deShrinkerSpecialized(): Unit = { val splitNumberMap = Map(0 -> 4, 1 -> 8, 2 -> 7) val sizer = new RFFeatureSizerSpecializedModelSelection(splitNumberMap, 2, new RFCategoryInfoEmpty, 8, 32) assertEquals(5, sizer.getDeShrinkedValue(0, 5)) assertEquals(4, sizer.getDeShrinkedValue(0, 4)) assertEquals(1, sizer.getDeShrinkedValue(0, 1)) assertEquals(0, sizer.getDeShrinkedValue(0, 0)) assertEquals(0, sizer.getDeShrinkedValue(1, 0)) assertEquals(9, sizer.getDeShrinkedValue(1, 32)) assertEquals(9, sizer.getDeShrinkedValue(1, 33)) assertEquals(9, sizer.getDeShrinkedValue(1, 100)) assertEquals(8, sizer.getDeShrinkedValue(2, 8)) assertEquals(7, sizer.getDeShrinkedValue(2, 7)) assertEquals(6, sizer.getDeShrinkedValue(2, 6)) assertEquals(2, sizer.getDeShrinkedValue(2, 2)) assertEquals(1, sizer.getDeShrinkedValue(2, 1)) } }
Example 15
Source File: RFParameterTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.rf.parameter import org.junit.{Assert, Test} import reforest.test.RFCreator class RFParameterTest { @Test def builderInit = { val b1 = RFCreator.parameterBuilder .addParameter(RFParameterType.NumTrees, 100) val parameter1 = b1.build Assert.assertEquals(1, parameter1.numTrees.length) Assert.assertEquals(100, parameter1.getMaxNumTrees) val parameter2 = parameter1.applyNumTrees(101) Assert.assertEquals(1, parameter1.numTrees.length) Assert.assertEquals(1, parameter2.numTrees.length) Assert.assertEquals(100, parameter1.getMaxNumTrees) Assert.assertEquals(101, parameter2.getMaxNumTrees) } }
Example 16
Source File: RFParameterBuilderTest.scala From reforest with Apache License 2.0 | 5 votes |
package reforest.rf.parameter import org.junit.{Assert, Test} import reforest.test.RFCreator class RFParameterBuilderTest { @Test def builderInit = { val b1 = RFCreator.parameterBuilder val b2 = RFCreator.parameterBuilder val parameter1 = b1.build val parameter2 = b2.build Assert.assertNotEquals(parameter1.UUID, parameter2.UUID) } @Test def builderInitFromParameter = { val b1 = RFCreator.parameterBuilder val parameter1 = b1.build val b2 = RFParameterBuilder.apply(parameter1) val parameter2 = b2.build Assert.assertEquals(parameter1.UUID, parameter2.UUID) } @Test def builderAddParameter = { val b1 = RFParameterBuilder.apply .addParameter(RFParameterType.Dataset, "this is required") .addParameter(RFParameterType.Instrumented, true) .addParameter(RFParameterType.SparkCompressionCodec, "snappy") .addParameter(RFParameterType.MaxNodesConcurrent, 5) .addParameter(RFParameterType.PoissonMean, 5.3) val parameter1 = b1.build Assert.assertEquals(true, parameter1.Instrumented) Assert.assertEquals("snappy", parameter1.sparkCompressionCodec) Assert.assertEquals(5, parameter1.maxNodesConcurrent) Assert.assertEquals(5.3, parameter1.poissonMean, 0.000001) } }
Example 17
Source File: DataLoadingRulesTest.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.common.ml.strategy.data import scala.util.Random import org.junit.Assert import org.junit.Test class DataLoadingRulesTest { @Test def isEnoughSamplesToTrain(): Unit = { Assert.assertFalse( "There must be enough samples to allow training.", DataLoadingRules.isEnoughSamplesToTrain( Seq.fill(DataLoadingRules.MinTrainingSamples - 1)(Random.nextInt) ) ) Assert.assertTrue( "If there are enough samples, we should allow training.", DataLoadingRules.isEnoughSamplesToTrain( Seq.fill(DataLoadingRules.MinTrainingSamples)(Random.nextInt) ) ) } }
Example 18
Source File: Neo4jConfigTest.scala From neo4j-spark-connector with Apache License 2.0 | 5 votes |
package org.neo4j.spark import org.apache.spark.SparkConf import org.junit.{Assert, Test} class Neo4jConfigTest { @Test def testParams(): Unit = { // given val encryption = "true" val user = "neo4j" val pass = "pass" val url = "neo4j://localhost" val sparkConf = new SparkConf() .set("spark.neo4j.encryption", encryption) .set("spark.neo4j.user", user) .set("spark.neo4j.password", pass) .set("spark.neo4j.url", url) // when val neo4jConf = Neo4jConfig(sparkConf) // then Assert.assertEquals(encryption.toBoolean, neo4jConf.encryption) Assert.assertEquals(user, neo4jConf.user) Assert.assertEquals(pass, neo4jConf.password.get) Assert.assertEquals(url, neo4jConf.url) } @Test def testOldParams(): Unit = { // given val encryption = "true" val user = "neo4j" val pass = "pass" val url = "neo4j://localhost" val sparkConf = new SparkConf() .set("spark.neo4j.bolt.encryption", encryption) .set("spark.neo4j.bolt.user", user) .set("spark.neo4j.bolt.password", pass) .set("spark.neo4j.bolt.url", url) // when val neo4jConf = Neo4jConfig(sparkConf) // then Assert.assertEquals(encryption.toBoolean, neo4jConf.encryption) Assert.assertEquals(user, neo4jConf.user) Assert.assertEquals(pass, neo4jConf.password.get) Assert.assertEquals(url, neo4jConf.url) } }
Example 19
Source File: IterableDecoratorTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection package decorators import org.junit.{Assert, Test} import scala.collection.immutable.{LazyList, List, Map, Range} class IterableDecoratorTest { @Test def foldSomeLeft(): Unit = { val r = Range(0, 100) Assert.assertEquals(0, r.foldSomeLeft(0)((x, y) => None)) Assert.assertEquals(10, r.foldSomeLeft(0)((x, y) => if (y > 10) None else Some(y))) Assert.assertEquals(55, r.foldSomeLeft(0)((x, y) => if (y > 10) None else Some(x + y))) Assert.assertEquals(4950, r.foldSomeLeft(0)((x, y) => Some(x + y))) Assert.assertEquals(10, List[Int]().foldSomeLeft(10)((x, y) => Some(x + y))) } @Test def lazyFoldLeftIsStackSafe(): Unit = { val bigList = List.range(1, 50000) def sum(as: Iterable[Int]): Int = as.lazyFoldLeft(0)(_ + _) Assert.assertEquals(sum(bigList), 1249975000) } @Test def lazyFoldLeftIsLazy(): Unit = { val nats = LazyList.from(0) def exists[A](as: Iterable[A])(f: A => Boolean): Boolean = as.lazyFoldLeft(false)(_ || f(_)) Assert.assertTrue(exists(nats)(_ > 100000)) } @Test def lazyFoldRightIsLazy(): Unit = { val xs = LazyList.from(0) def chooseOne(x: Int): Either[Int, Int => Int]= if (x < (1 << 16)) Right(identity) else Left(x) Assert.assertEquals(1 << 16, xs.lazyFoldRight(0)(chooseOne)) } @Test def hasIterableOpsWorksWithStringAndMap(): Unit = { val result = "foo".foldSomeLeft(0) { case (_, 'o') => None case (n, _) => Some(n + 1) } Assert.assertEquals(1, result) val result2 = Map(1 -> "foo", 2 -> "bar").foldSomeLeft(0) { case (n, (k, _)) => if (k == -1) None else Some(n + 1) } Assert.assertEquals(2, result2) } @Test def splitByShouldHonorEmptyIterator(): Unit = { val split = Vector.empty[Int].splitBy(identity) Assert.assertEquals(Vector.empty, split) } @Test def splitByShouldReturnSingleSeqWhenSingleElement(): Unit = { val value = Vector("1") val split = value.splitBy(identity) Assert.assertEquals(Vector(value), split) } @Test def splitByShouldReturnSingleSeqWhenAllElHaveTheSameKey(): Unit = { val value = Vector("1", "1", "1") val split = value.splitBy(identity) Assert.assertEquals(Vector(value), split) } @Test def splitByShouldReturnVectorOfVectorOrConsecutiveElementsWithTheSameKey(): Unit = { val value = Vector("1", "2", "2", "3", "3", "3", "2", "2") val split: Vector[Vector[String]] = value.splitBy(identity) Assert.assertEquals(Vector(Vector("1"), Vector("2", "2"), Vector("3", "3", "3"), Vector("2", "2")), split) } @Test def splitByShouldReturnListOfListOfConsecutiveElementsWithTheSameKey(): Unit = { val value = List("1", "2", "2", "3", "3", "3", "2", "2") val split: List[List[String]] = value.splitBy(identity) Assert.assertEquals(List(List("1"), List("2", "2"), List("3", "3", "3"), List("2", "2")), split) } @Test def splitByShouldReturnSetOfSetOfConsecutiveElementsWithTheSameKey(): Unit = { val value = Set("1", "2", "2", "3", "3", "3", "2", "2") val split: Set[Set[String]] = value.splitBy(identity) Assert.assertEquals(Set(Set("1"), Set("2"), Set("3")), split) } }
Example 20
Source File: OntologyHubClientTest.scala From daf-semantics with Apache License 2.0 | 5 votes |
package clients import java.nio.file.Paths import org.junit.After import org.junit.Assert import org.junit.Assume import org.junit.Before import org.junit.BeforeClass import org.junit.Test import org.slf4j.LoggerFactory import play.Logger import utilities.Adapters.AwaitFuture import clients.HTTPClient object OntologyHubClientTest { val logger = LoggerFactory.getLogger(this.getClass) @BeforeClass def check_before() { Assume.assumeTrue(ontonethub_is_running) logger.info("Ontonethub is UP! [TESTING...]") } private def ontonethub_is_running = { val client = HTTPClient client.start() val ontonethub = new OntonetHubClient(client.ws) val check = ontonethub.status().await client.stop() check } }
Example 21
Source File: OntonetHubEndpointsTest.scala From daf-semantics with Apache License 2.0 | 5 votes |
package clients import scala.concurrent.Await import org.junit.After import org.junit.Before import org.junit.Test import play.api.libs.json.JsLookupResult import play.api.libs.json.JsValue import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits._ import utilities.JSONHelper import org.junit.Assert import semantic_manager.yaml.OntonetHubProperty import clients.HTTPClient class OntonetHubEndpointsTest { var http = HTTPClient var hub: OntonetHubClient = null @Before def before() { http.start() hub = new OntonetHubClient(http.ws, OntonetHubClient.DEFAULT_CONFIG) } @After def after() { http.stop() } // @Test def testing_hub_find { val (host, port) = ("localhost", 8000) val (query, lang, limit) = ("nome", "it", 4) val http = HTTPClient http.start() val ws = http.ws val future = ws.url(s"http://${host}:${port}/stanbol/ontonethub/ontologies/find") .withHeaders(("accept", "application/json")) .withHeaders(("content-type", "application/x-www-form-urlencoded")) .withFollowRedirects(true) .post(s"name=${query}&lang=${lang}&limit=${limit}") .map { item => val json = JSONHelper.pretty(item.body) println("\n\n") println(json) item } val results = Await.result(future, Duration.Inf) Assert.assertTrue(results.status == 200) http.stop() } @Test def testing_find_property { val (query, lang, limit) = ("nome", "it", 2) val future = hub.find_property(query, lang, limit) // CHECK for de-coupling from swagger // .map(_.map(item => OntonetHubProperty.tupled(OntonetHubClient.models.FindResult.unapply(item).get))) println("\n\n############################################ RESULTS") val results = Await.result(future, Duration.Inf) println(results.mkString("\n\n")) } }
Example 22
Source File: MultiMapTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.immutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class MultiMapTest { @Test def multiMap(): Unit = { val mm = MultiDict("a" -> 1, "b" -> 1, "b" -> 2, "c" -> 1) val m = Map("a" -> Set(1), "b" -> Set(1, 2), "c" -> Set(1)) Assert.assertEquals(m, mm.sets) Assert.assertEquals(mm.sets, m) val mm2 = mm + ("a" -> 2) Assert.assertEquals(Set(1, 2), mm2.get("a")) val mm3 = mm2 - ("a" -> 1) Assert.assertEquals(Set(2), mm3.get("a")) Assert.assertTrue(mm3.containsKey("a")) val mm4 = mm3 - ("a" -> 2) Assert.assertFalse(mm4.containsKey("a")) Assert.assertEquals(Set(1, 2), mm4.get("b")) val mm5 = mm4 -* "b" Assert.assertEquals(Set.empty, mm5.get("b")) } }
Example 23
Source File: SortedMultiMapTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.immutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiMapTest { @Test def sortedMultiMap(): Unit = { val smm = SortedMultiDict("a" -> 1, "b" -> 1, "b" -> 2, "c" -> 1) val m = Map("a" -> Set(1), "b" -> Set(1, 2), "c" -> Set(1)) Assert.assertEquals(m, smm.sets) Assert.assertEquals(smm.sets, m) Assert.assertEquals("a", smm.firstKey) Assert.assertEquals("c", smm.lastKey) Assert.assertEquals(SortedMultiDict("c" -> 1), smm.rangeFrom("c")) val smm2 = smm + ("a" -> 2) Assert.assertEquals(Set(1, 2), smm2.get("a")) val smm3 = smm2 - ("a" -> 1) Assert.assertEquals(Set(2), smm3.get("a")) Assert.assertTrue(smm3.containsKey("a")) val smm4 = smm3 - ("a" -> 2) Assert.assertFalse(smm4.containsKey("a")) Assert.assertEquals(Set(1, 2), smm4.get("b")) val smm5 = smm4 -* "b" Assert.assertEquals(Set.empty, smm5.get("b")) } }
Example 24
Source File: SortedMultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.immutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiSetTest { @Test def sortedMultiSet(): Unit = { val sms = SortedMultiSet(2, 1, 3, 2) Assert.assertEquals(1, sms.get(1)) Assert.assertEquals(2, sms.get(2)) Assert.assertEquals(1, sms.firstKey) Assert.assertEquals(3, sms.lastKey) Assert.assertEquals(SortedMultiSet(3, 2, 2), sms.rangeFrom(2)) val sms2 = sms + 2 Assert.assertEquals(3, sms2.get(2)) val sms3 = sms2 - 3 Assert.assertFalse(sms3.contains(3)) } }
Example 25
Source File: MultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.immutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class MultiSetTest { @Test def multiSet(): Unit = { val ms = MultiSet("a", "b", "b", "c") val m = Map("a" -> 1, "b" -> 2, "c" -> 1) Assert.assertEquals(m, ms.occurrences) Assert.assertEquals(ms.occurrences, m) Assert.assertEquals(1, ms.get("a")) Assert.assertEquals(2, ms.get("b")) val ms2 = ms + "a" Assert.assertEquals(2, ms2.get("a")) val ms3 = ms2 - "a" Assert.assertEquals(1, ms3.get("a")) Assert.assertTrue(ms3.contains("a")) val ms4 = ms3 - "a" Assert.assertFalse(ms4.contains("a")) } }
Example 26
Source File: BitSetDecoratorTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.decorators import org.junit.{Assert, Test} import scala.collection.BitSet class BitSetDecoratorTest { import Assert.{assertEquals, assertSame} import BitSet.empty @Test def shiftEmptyLeft(): Unit = { for (shiftBy <- 0 to 128) { assertSame(empty, empty << shiftBy) } } @Test def shiftLowestBitLeft(): Unit = { for (shiftBy <- 0 to 128) { assertEquals(BitSet(shiftBy), BitSet(0) << shiftBy) } } @Test def shiftNegativeLeft(): Unit = { assertEquals(BitSet(0), BitSet(1) << -1) } @Test def largeShiftLeft(): Unit = { val bs = BitSet(0 to 300 by 5: _*) for (shiftBy <- 0 to 128) { assertEquals(bs.map(_ + shiftBy), bs << shiftBy) } } @Test def skipZeroWordsOnShiftLeft(): Unit = { val result = BitSet(5 * 64 - 1) << 64 assertEquals(BitSet(6 * 64 - 1), result) assertEquals(6, result.nwords) } @Test def shiftEmptyRight(): Unit = { for (shiftBy <- 0 to 128) { assertSame(empty, empty >> shiftBy) } } @Test def shiftLowestBitRight(): Unit = { assertEquals(BitSet(0), BitSet(0) >> 0) for (shiftBy <- 1 to 128) { assertSame(empty, BitSet(0) >> shiftBy) } } @Test def shiftToLowestBitRight(): Unit = { for (shiftBy <- 0 to 128) { assertEquals(BitSet(0), BitSet(shiftBy) >> shiftBy) } } @Test def shiftNegativeRight(): Unit = { assertEquals(BitSet(1), BitSet(0) >> -1) } @Test def largeShiftRight(): Unit = { val bs = BitSet(0 to 300 by 5: _*) for (shiftBy <- 0 to 128) { assertEquals(bs.collect { case b if b >= shiftBy => b - shiftBy }, bs >> shiftBy) } } }
Example 27
Source File: MapDecoratorTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection package decorators import org.junit.{Assert, Test} class MapDecoratorTest { @Test def zipByKeyWith(): Unit = { val map1 = Map(1 -> "a", 2 -> "b") val map2 = Map(2 -> "c") val zipped = map1.zipByKeyWith(map2)(_ ++ _) val expected = Map(2 -> "bc") Assert.assertEquals(expected, zipped) val sortedMap1 = SortedMap(2 -> "a", 1 -> "b") val sortedMap2 = SortedMap(1 -> "d", 2 -> "c") val sortedZipped = sortedMap1.zipByKeyWith(sortedMap2)(_ ++ _) val sortedZippedT: SortedMap[Int, String] = sortedZipped val sortedExpected = SortedMap(1 -> "bd", 2 -> "ac") Assert.assertEquals(sortedExpected, sortedZipped) } @Test def joins(): Unit = { val map1 = Map(1 -> "a", 2 -> "b") val map2 = Map(2 -> "c", 3 -> "d") locally { val expected = Map( 1 -> (Some("a"), None), 2 -> (Some("b"), Some("c")), 3 -> (None, Some("d")) ) Assert.assertEquals(expected, map1.fullOuterJoin(map2)) } locally { val expected = Map( 1 -> ("a", None), 2 -> ("b", Some("c")) ) Assert.assertEquals(expected, map1.leftOuterJoin(map2)) } locally { val expected = Map( 2 -> (Some("b"), "c"), 3 -> (None, "d") ) Assert.assertEquals(expected, map1.rightOuterJoin(map2)) } val sortedMap1 = SortedMap(2 -> "a", 1 -> "b") val sortedMap2 = SortedMap(2 -> "c", 3 -> "d") locally { val expected = SortedMap( 1 -> (Some("b"), None), 2 -> (Some("a"), Some("c")), 3 -> (None, Some("d")) ) val expectedT: SortedMap[Int, (Option[String], Option[String])] = expected Assert.assertEquals(expected, sortedMap1.fullOuterJoin(sortedMap2)) } } @Test def mapDecoratorWorksWithViews(): Unit = { val map1 = Map(1 -> "a", 2 -> "b") val map2 = Map(2 -> "c") val zipped = map1.view.zipByKeyWith(map2)(_ ++ _).to(Map) val expected = Map(2 -> "bc") Assert.assertEquals(expected, zipped) // val mutableMap1 = mutable.Map(1 -> "a", 2 -> "b") // val zipped2 = mutableMap1.zipByKeyWith(map2)(_ ++ _).to(Map) // Assert.assertEquals(expected, zipped2) } }
Example 28
Source File: ScriptEngineTest.scala From piflow with BSD 2-Clause "Simplified" License | 5 votes |
import java.util import javax.script.{Compilable, Invocable, ScriptEngineManager} import jdk.nashorn.api.scripting.{ScriptObjectMirror, ScriptUtils} import org.junit.{Assert, Test} class ScriptEngineTest { @Test def testJs(): Unit = { val engine = new ScriptEngineManager().getEngineByName("javascript"); engine.eval("function f(s){return s.toUpperCase();}"); val s = engine.asInstanceOf[Invocable].invokeFunction("f", "bluejoe"); Assert.assertEquals("BLUEJOE", s); Assert.assertEquals(classOf[String], s.getClass); val s2 = engine.eval("(function (){return java.util.Arrays.asList([1,2,3]);})();"); println(s2); println(engine.eval("1;").getClass); println(engine.eval("'abc';").getClass); println(engine.eval("true;").getClass); println(engine.eval("1.1;").getClass); println(engine.eval("var x = {'a':1}; x;").getClass); println(engine.eval("new java.lang.Object()").getClass); println(engine.eval("java").getClass); } @Test def testJs2(): Unit = { val engine = new ScriptEngineManager().getEngineByName("javascript"); val s = engine.eval("""["1","2","3"]"""); println(s); val m = ScriptUtils.convert(s, classOf[Array[String]]); println(m); } @Test def testTs(): Unit = { val engine = new ScriptEngineManager().getEngineByName("typescript"); val s = engine.eval("""["1","2","3"]"""); println(s); val m = ScriptUtils.convert(s, classOf[Array[String]]); println(m); } }
Example 29
Source File: MultiMapTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala package collection package mutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class MultiMapTest { @Test def multiMap(): Unit = { val mm = MultiDict.empty[String, Int] mm += "a" -> 1 mm += "a" -> 2 mm += "b" -> 1 val m = Map("a" -> Set(1, 2), "b" -> Set(1)) Assert.assertEquals(m, mm.sets) Assert.assertTrue(mm.containsKey("a")) Assert.assertTrue(mm.containsEntry("b" -> 1)) Assert.assertTrue(mm.containsValue(2)) Assert.assertFalse(mm.containsKey("c")) Assert.assertFalse(mm.containsEntry("a" -> 3)) Assert.assertFalse(mm.containsValue(3)) Assert.assertEquals(Set(1, 2), mm.get("a")) Assert.assertEquals(Set(1), mm.get("b")) mm -= "b" -> 1 Assert.assertFalse(mm.containsKey("b")) Assert.assertEquals(Set.empty, mm.get("b")) mm -*= "a" Assert.assertFalse(mm.containsKey("a")) Assert.assertEquals(Set.empty, mm.get("a")) } }
Example 30
Source File: SortedMultiMapTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.mutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiMapTest { @Test def sortedMultiMap(): Unit = { val smm = SortedMultiDict.empty[String, Int] smm += "a" -> 1 smm += "b" -> 1 smm += "b" -> 2 smm += "c" -> 1 val m = Map("a" -> Set(1), "b" -> Set(1, 2), "c" -> Set(1)) Assert.assertEquals(m, smm.sets) Assert.assertEquals(smm.sets, m) Assert.assertEquals("a", smm.firstKey) Assert.assertEquals("c", smm.lastKey) Assert.assertEquals(SortedMultiDict("c" -> 1), smm.rangeFrom("c")) smm += "a" -> 2 Assert.assertEquals(Set(1, 2), smm.get("a")) smm -= "a" -> 1 Assert.assertEquals(Set(2), smm.get("a")) Assert.assertTrue(smm.containsKey("a")) smm -= "a" -> 2 Assert.assertFalse(smm.containsKey("a")) Assert.assertEquals(Set(1, 2), smm.get("b")) smm -*= "b" Assert.assertEquals(Set.empty, smm.get("b")) } }
Example 31
Source File: SortedMultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection.mutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiSetTest { @Test def sortedMultiSet(): Unit = { val sms = SortedMultiSet(2, 1, 3, 2) Assert.assertEquals(1, sms.get(1)) Assert.assertEquals(2, sms.get(2)) Assert.assertEquals(1, sms.firstKey) Assert.assertEquals(3, sms.lastKey) sms += 2 Assert.assertEquals(3, sms.get(2)) sms -= 3 Assert.assertFalse(sms.contains(3)) } }
Example 32
Source File: MultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala package collection package mutable import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class MultiSetTest { @Test def multiSet(): Unit = { val ms = MultiSet.empty[String] ms += "a" ms += "a" ms += "b" val m = Map("a" -> 2, "b" -> 1) Assert.assertEquals(m, ms.occurrences) Assert.assertTrue(ms.contains("a")) Assert.assertTrue(ms.contains("b")) Assert.assertEquals(2, ms.get("a")) Assert.assertEquals(1, ms.get("b")) ms -= "b" Assert.assertFalse(ms.contains("b")) Assert.assertEquals(0, ms.get("b")) } }
Example 33
Source File: SortedMultiMapTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiMapTest { def sortedMultiMap(smm: SortedMultiDict[Int, Int]): Unit = { Assert.assertEquals(Set(1), smm.get(1)) Assert.assertEquals(Set(0, 1), smm.get(2)) Assert.assertEquals(1, smm.firstKey) Assert.assertEquals(3, smm.lastKey) Assert.assertEquals(SortedMultiDict(3 -> 2, 2 -> 1, 2 -> 0), smm.rangeFrom(2)) } @Test def run(): Unit = { sortedMultiMap(immutable.SortedMultiDict(2 -> 0, 1 -> 1, 3 -> 2, 2 -> 1)) sortedMultiMap(mutable.SortedMultiDict(2 -> 0, 1 -> 1, 3 -> 2, 2 -> 1)) } }
Example 34
Source File: SortedMultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection import org.junit.{Assert, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class SortedMultiSetTest { def sortedMultiSet(sms: SortedMultiSet[Int]): Unit = { Assert.assertEquals(1, sms.get(1)) Assert.assertEquals(2, sms.get(2)) Assert.assertEquals(1, sms.firstKey) Assert.assertEquals(3, sms.lastKey) Assert.assertEquals(SortedMultiSet(3, 2, 2), sms.rangeFrom(2)) } @Test def run(): Unit = { sortedMultiSet(immutable.SortedMultiSet(2, 1, 3, 2)) sortedMultiSet(mutable.SortedMultiSet(2, 1, 3, 2)) } }
Example 35
Source File: MultiSetTest.scala From scala-collection-contrib with Apache License 2.0 | 5 votes |
package scala.collection import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.immutable.List import org.junit.{Assert, Test} @RunWith(classOf[JUnit4]) class MultiSetTest { @Test def equality(): Unit = { val ms1 = MultiSet("a", "b", "b", "c") val ms2 = MultiSet("a", "b", "b", "c") Assert.assertEquals(ms2, ms1) Assert.assertEquals(ms1, ms2) Assert.assertEquals(ms1.##, ms2.##) } @Test def concat(): Unit = { Assert.assertEquals( MultiSet(1, 1), MultiSet(1).concat(MultiSet(1)) ) Assert.assertEquals( MultiSet("a", "a", "a"), MultiSet("a").concatOccurrences(List(("a", 2))) ) } @Test def map(): Unit = { Assert.assertEquals( MultiSet("A", "B", "B"), MultiSet("a", "b", "b").map(_.toUpperCase) ) Assert.assertEquals( MultiSet(1, 1), MultiSet("a", "b").map(_ => 1) ) Assert.assertEquals( MultiSet("c", "c", "c", "c"), MultiSet("a", "b", "b").mapOccurrences { _ => ("c", 2) } ) } @Test def testToString(): Unit = { def run(ms: MultiSet[Int]): Unit = { val actual = ms.toString assert(actual.startsWith("MultiSet("), s"`$actual` does not start with `MultiSet(`") assert(actual.endsWith(")"), s"`$actual` does not end with `)`") // The order of elements in the multiset are not defined, so this test should be robust to order changes Assert.assertEquals(ms, actual .stripPrefix("MultiSet(") .stripSuffix(")") .split(",") .iterator .flatMap (_.trim match { case "" => None case s => Some(s.toInt) }) .to(MultiSet)) } def runForFactory(factory: IterableFactory[MultiSet]): Unit = { Assert.assertEquals(factory().toString, "MultiSet()") Assert.assertEquals(factory(1).toString, "MultiSet(1)") run(factory()) run(factory(1)) run(factory(1234)) run(factory(1,2,3)) run(factory(1,1,1,2,3)) run(factory(1,1,1,2,2,2,2,3)) } runForFactory(MultiSet) runForFactory(mutable.MultiSet) runForFactory(immutable.MultiSet) } }
Example 36
Source File: KafkaStringEventBatchTest.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.log import java.util.Collections import com.yahoo.maha.data.{Compressor, StringEventBatch} import org.junit.{Assert, Test} class KafkaStringEventBatchTest { @Test def testWithNoConfiguration(): Unit = { val encoder = new KafkaStringEventBatchSerializer() encoder.configure(Collections.emptyMap(), false) val decoder = new KafkaStringEventBatchDeserializer() decoder.configure(Collections.emptyMap(), false) val builder = new StringEventBatch.Builder(3) builder.add("one") builder.add("two") builder.add("three") val recordList = builder.build.asInstanceOf[StringEventBatch] val encoded = encoder.serialize("blah", recordList) val decoded = decoder.deserialize("blah", encoded) Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size()) Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0)) Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1)) Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2)) } @Test def testWithConfiguration(): Unit = { import scala.collection.JavaConverters._ val config: java.util.Map[String, _] = Map( Compressor.COMPRESSOR_CODEC_PROPERTY -> "lz4hc" , KafkaStringEventBatchSerializer.BUFFER_MB_PROPERTY -> "2" , KafkaStringEventBatchDeserializer.BUFFER_MB_PROPERTY -> "2" ).asJava val encoder = new KafkaStringEventBatchSerializer() encoder.configure(config, false) val decoder = new KafkaStringEventBatchDeserializer() decoder.configure(config, false) val builder = new StringEventBatch.Builder(3) builder.add("one") builder.add("two") builder.add("three") val recordList = builder.build.asInstanceOf[StringEventBatch] val encoded = encoder.serialize("blah", recordList) val decoded = decoder.deserialize("blah", encoded) Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size()) Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0)) Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1)) Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2)) } @Test def testWithBadConfiguration(): Unit = { import scala.collection.JavaConverters._ val config: java.util.Map[String, _] = Map( Compressor.COMPRESSOR_CODEC_PROPERTY -> "blah" , KafkaStringEventBatchSerializer.BUFFER_MB_PROPERTY -> "abc" , KafkaStringEventBatchDeserializer.BUFFER_MB_PROPERTY -> "-1" ).asJava val encoder = new KafkaStringEventBatchSerializer() encoder.configure(config, false) val decoder = new KafkaStringEventBatchDeserializer() decoder.configure(config, false) val builder = new StringEventBatch.Builder(3) builder.add("one") builder.add("two") builder.add("three") val recordList = builder.build.asInstanceOf[StringEventBatch] val encoded = encoder.serialize("blah", recordList) val decoded = decoder.deserialize("blah", encoded) Assert.assertEquals(recordList.getEvents.size(), decoded.getEvents.size()) Assert.assertEquals(recordList.getEvents.get(0), decoded.getEvents.get(0)) Assert.assertEquals(recordList.getEvents.get(1), decoded.getEvents.get(1)) Assert.assertEquals(recordList.getEvents.get(2), decoded.getEvents.get(2)) } }