org.junit.Before Scala Examples
The following examples show how to use org.junit.Before.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: TsStreamingTest.scala From spark-riak-connector with Apache License 2.0 | 7 votes |
package com.basho.riak.spark.streaming import java.nio.ByteBuffer import java.util.concurrent.{Callable, Executors, TimeUnit} import com.basho.riak.spark._ import com.basho.riak.spark.rdd.RiakTSTests import com.basho.riak.spark.rdd.timeseries.{AbstractTimeSeriesTest, TimeSeriesData} import com.fasterxml.jackson.core.JsonParser import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper, SerializationFeature} import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.apache.spark.sql.Row import org.junit.Assert._ import org.junit.experimental.categories.Category import org.junit.{After, Before, Test} @Category(Array(classOf[RiakTSTests])) class TsStreamingTest extends AbstractTimeSeriesTest(false) with SparkStreamingFixture { protected final val executorService = Executors.newCachedThreadPool() private val dataSource = new SocketStreamingDataSource private var port = -1 @Before def setUp(): Unit = { port = dataSource.start(client => { testData .map(tolerantMapper.writeValueAsString) .foreach(x => client.write(ByteBuffer.wrap(s"$x\n".getBytes))) logInfo(s"${testData.length} values were send to client") }) } @After def tearDown(): Unit = { dataSource.stop() } @Test(timeout = 10 * 1000) // 10 seconds timeout def saveToRiak(): Unit = { executorService.submit(new Runnable { override def run(): Unit = { ssc.socketTextStream("localhost", port) .map(string => { val tsdata = new ObjectMapper() .configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, true) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true) .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true) .configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true) .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .registerModule(DefaultScalaModule) .readValue(string, classOf[TimeSeriesData]) Row(1, "f", tsdata.time, tsdata.user_id, tsdata.temperature_k) }) .saveToRiakTS(bucketName) ssc.start() ssc.awaitTerminationOrTimeout(5 * 1000) } }) val result = executorService.submit(new Callable[Array[Seq[Any]]] { override def call(): Array[Seq[Any]] = { var rdd = sc.riakTSTable[Row](bucketName) .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause") var count = rdd.count() while (count < testData.length) { TimeUnit.SECONDS.sleep(2) rdd = sc.riakTSTable[Row](bucketName) .sql(s"SELECT user_id, temperature_k FROM $bucketName $sqlWhereClause") count = rdd.count() } rdd.collect().map(_.toSeq) } }).get() assertEquals(testData.length, result.length) assertEqualsUsingJSONIgnoreOrder( """ |[ | ['bryce',305.37], | ['bryce',300.12], | ['bryce',295.95], | ['ratman',362.121], | ['ratman',3502.212] |] """.stripMargin, result) } }
Example 2
Source File: WalletBoxSerializerTest.scala From Sidechains-SDK with MIT License | 5 votes |
package com.horizen import scorex.core.{NodeViewModifier, bytesToId, idToBytes} import com.horizen.box.{Box, BoxSerializer} import com.horizen.companion.SidechainBoxesCompanion import com.horizen.customtypes.{CustomBox, CustomBoxSerializer} import com.horizen.fixtures.BoxFixture import com.horizen.proposition.Proposition import com.horizen.utils.BytesUtils import org.junit.{Before, Test} import org.junit.Assert._ import org.scalatest.junit.JUnitSuite import java.util.{HashMap => JHashMap} import java.lang.{Byte => JByte} import scala.util.Random class WalletBoxSerializerTest extends JUnitSuite with BoxFixture { @Test def WalletBoxSerializerTest_SerializationTest(): Unit = { val transactionIdBytes = new Array[Byte](32) var customBoxesSerializers: JHashMap[JByte, BoxSerializer[SidechainTypes#SCB]] = new JHashMap() customBoxesSerializers.put(CustomBox.BOX_TYPE_ID, CustomBoxSerializer.getSerializer.asInstanceOf[BoxSerializer[SidechainTypes#SCB]]) val sidechainBoxesCompanion = SidechainBoxesCompanion(customBoxesSerializers) var serializer: WalletBoxSerializer = null var bytes: Array[Byte] = null // Test 1: serialization for core Box Random.nextBytes(transactionIdBytes) val walletBoxWithRegularBox = new WalletBox( getRegularBox(getPrivateKey25519("seed1".getBytes), 1, 100), bytesToId(transactionIdBytes), 10000) serializer = walletBoxWithRegularBox.serializer(sidechainBoxesCompanion) bytes = serializer.toBytes(walletBoxWithRegularBox) val parsedWalletBoxWithRegularBox = serializer.parseBytesTry(bytes).get assertEquals("Core WalletBoxes expected to be equal.", walletBoxWithRegularBox, parsedWalletBoxWithRegularBox) // Test 2: serialization of custom Box Random.nextBytes(transactionIdBytes) val walletBoxWithCustomBox = new WalletBox( getCustomBox.asInstanceOf[SidechainTypes#SCB], bytesToId(transactionIdBytes), 20000) serializer = walletBoxWithCustomBox.serializer(sidechainBoxesCompanion) bytes = serializer.toBytes(walletBoxWithCustomBox) val parsedWalletBoxWithCustomBox = serializer.parseBytesTry(bytes).get assertEquals("Custom WalletBoxes expected to be equal.", walletBoxWithCustomBox, parsedWalletBoxWithCustomBox) // Test 3: try parse of broken bytes val failureExpected: Boolean = new WalletBoxSerializer(sidechainBoxesCompanion).parseBytesTry("broken bytes".getBytes).isFailure assertEquals("Failure during parsing expected.", true, failureExpected) } }
Example 3
Source File: HttpServerTest.scala From exhibitor-mesos-framework with Apache License 2.0 | 5 votes |
package ly.stealth.mesos.exhibitor import ly.stealth.mesos.exhibitor.Cli.sendRequest import ly.stealth.mesos.exhibitor.Util.parseMap import org.junit.Assert._ import org.junit.{After, Before, Test} import scala.collection.mutable class HttpServerTest extends MesosTestCase { @Before override def before() { super.before() Config.api = "http://localhost:8000" HttpServer.start(resolveDeps = false) } @After override def after() { HttpServer.stop() super.after() } @Test def addServer() { val response = sendRequest("/add", parseMap("id=0,cpu=0.6,mem=128,port=3000..8000")).as[ApiResponse] assertEquals(1, Scheduler.cluster.servers.size) val server = Scheduler.cluster.servers.head assertEquals("0", server.id) assertEquals(0.6, server.config.cpus, 0.001) assertEquals(128, server.config.mem, 0.001) assertEquals(1, server.config.ports.size) assertEquals(3000, server.config.ports.head.start) assertEquals(8000, server.config.ports.head.end) assertTrue(response.message.contains("Added servers")) assert(response.success) assertNotEquals(None, response.value) ExhibitorServerTest.assertServerEquals(server, response.value.get.servers.head) } @Test def configServer() { sendRequest("/add", parseMap("id=0")) val response = sendRequest("/config", parseMap("id=0,zkconfigconnect=192.168.3.1:2181,zookeeper-install-directory=/tmp/zookeeper")).as[ApiResponse] val serverOpt = Scheduler.cluster.getServer("0") assertNotEquals(None, serverOpt) val server = serverOpt.get assertEquals("0", server.id) assertEquals(mutable.Map("zkconfigconnect" -> "192.168.3.1:2181"), server.config.exhibitorConfig) assertEquals(mutable.Map("zookeeper-install-directory" -> "/tmp/zookeeper"), server.config.sharedConfigOverride) assertTrue(response.success) assertTrue(response.message.contains("Updated configuration")) assertNotEquals(None, response.value) ExhibitorServerTest.assertServerEquals(server, response.value.get.servers.head) } @Test def clusterStatus() { sendRequest("/add", parseMap("id=0")) sendRequest("/add", parseMap("id=1")) sendRequest("/add", parseMap("id=2")) val response = sendRequest("/status", Map()).as[ApiResponse] assertTrue(response.success) assertNotEquals(None, response.value) assertEquals(3, response.value.get.servers.size) assertEquals(3, response.value.get.servers.map(_.id).distinct.size) } @Test def removeServer() { sendRequest("/add", parseMap("id=0")) sendRequest("/add", parseMap("id=1")) sendRequest("/add", parseMap("id=2")) sendRequest("/remove", parseMap("id=1")) assertEquals(2, Scheduler.cluster.servers.size) } @Test def startStopServer() { sendRequest("/add", parseMap("id=0")) val startResponse = sendRequest("/start", parseMap("id=0")).as[ApiResponse] assertTrue(startResponse.success) assertTrue(startResponse.message.contains("Started servers")) assertNotEquals(None, startResponse.value) assertEquals(ExhibitorServer.Stopped, startResponse.value.get.servers.head.state) val stopResponse = sendRequest("/stop", parseMap("id=0")).as[ApiResponse] assertTrue(stopResponse.success) assertTrue(stopResponse.message.contains("Stopped servers")) assertNotEquals(None, stopResponse.value) assertEquals(ExhibitorServer.Added, stopResponse.value.get.servers.head.state) } }
Example 4
Source File: FunctionsAndMethodsUnitTest.scala From scala-tutorials with MIT License | 5 votes |
package com.baeldung.scala.functionsandmethods import org.junit.Assert.assertEquals import org.junit.{Before, Test} import scala.util.Random class FunctionsAndMethodsUnitTest { @Test def givenAnonymousFunction_whenPassItToAMethodCall_thenAnonymousFunctionUsage (): Unit = { val result1 = FunctionsAndMethods.anonymousFunctionUsage((number: Int) => number + 1) val result2 = FunctionsAndMethods.anonymousFunctionUsageWithApply((number: Int) => number + 1) assertEquals(result1, result2) } @Test def givenFunction_whenCheckingItsType_thenItIsAFunctionN(): Unit = { val functionWithoutParameters = () => Random.nextInt() val functionWithOneParameter = (number: Int) => number + 1 val functionWithTwoParameters = (x: Int, y: Int) => (x + 1, y + 1) assert(functionWithoutParameters.isInstanceOf[Function0[Int]]) assert(functionWithOneParameter.isInstanceOf[Function1[Int, Int]]) assert(functionWithTwoParameters.isInstanceOf[Function2[Int, Int, (Int, Int)]]) } @Test def givenByValueFunction_whenCallIt_thenValuesAreEquals(): Unit = { val (firstAccess, secondAccess) = FunctionsAndMethods.byValue(Random.nextInt) assert(firstAccess == secondAccess) } @Test def givenByNameFunction_whenCallIt_thenValuesAreDifferent(): Unit = { val (firstAccess, secondAccess) = FunctionsAndMethods.byName(Random.nextInt) assert(firstAccess != secondAccess) } @Test def givenExtensionMethod_whenImportInContext_thenWeCanUseIt(): Unit = { import FunctionsAndMethods._ assertEquals(true, 10.isOdd) assertEquals(false, 11.isOdd) } @Test def givenLine45_whenUseItInAPlot_thenCorrectResults(): Unit = { val a45DegreeLine = FunctionsAndMethods.line(1,0) val results = FunctionsAndMethods.plot(a45DegreeLine) val expected = List(-10.0, -9.0, -8.0, -7.0, -6.0, -5.0, -4.0, -3.0, -2.0, -1.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0) assertEquals(expected, results) } @Test def givenNestedMethod_whenUseIt_thenCorrectResults(): Unit = { val factorialResult = FunctionsAndMethods.factorial(10) val expected = 3628800; assertEquals(expected, factorialResult) } @Test def givenParameterizedMethod_whenUseIt_thenCorrectResults(): Unit = { val strings = Seq("a", "b", "c") val first = FunctionsAndMethods.pop(strings) assertEquals("a", first) val ints = Seq(10, 3, 11, 22, 10) val second = FunctionsAndMethods.pop(ints) assertEquals(10, second) } }
Example 5
Source File: SharedSparkSessionSuite.scala From ecosystem with Apache License 2.0 | 5 votes |
package org.tensorflow.spark.datasources.tfrecords import java.io.File import org.apache.commons.io.FileUtils import org.apache.spark.SharedSparkSession import org.junit.{After, Before} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} trait BaseSuite extends WordSpecLike with Matchers with BeforeAndAfterAll class SharedSparkSessionSuite extends SharedSparkSession with BaseSuite { val TF_SANDBOX_DIR = "tf-sandbox" val file = new File(TF_SANDBOX_DIR) @Before override def beforeAll() = { super.setUp() FileUtils.deleteQuietly(file) file.mkdirs() } @After override def afterAll() = { FileUtils.deleteQuietly(file) super.tearDown() } }
Example 6
Source File: BrokerCommandsTests.scala From activemq-cli with Apache License 2.0 | 5 votes |
package activemq.cli.command import activemq.cli.ActiveMQCLI import activemq.cli.command.CommandsTests._ import activemq.cli.command.MessageCommandsTests._ import activemq.cli.util.Console._ import java.io.File import org.junit.After import org.junit.AfterClass import org.junit.Assert.assertEquals import org.junit.Assert.assertFalse import org.junit.Assert.assertNotNull import org.junit.Assert.assertNull import org.junit.Assert.assertTrue import org.junit.Before import org.junit.BeforeClass import org.junit.Rule import org.junit.rules.TemporaryFolder import org.junit.Test import org.springframework.shell.Bootstrap import org.springframework.shell.core.CommandResult import org.springframework.shell.core.JLineShellComponent import scala.xml.XML class BrokerCommandsTests { @Before def before = { assertTrue(shell.executeCommand("remove-all-queues --force").isSuccess) assertTrue(shell.executeCommand("remove-all-topics --force").isSuccess) } @Test def testExportBrokerFileAlreadyExists = { val testExportBrokerFileAlreadyExists = File.createTempFile("MessageCommandsTests_testExportBrokerFileAlreadyExists", ".xml") try assertEquals(warn(s"File '${testExportBrokerFileAlreadyExists.getCanonicalPath}' already exists"), shell.executeCommand(s"export-broker --file ${testExportBrokerFileAlreadyExists.getAbsolutePath}").getResult) finally testExportBrokerFileAlreadyExists.delete } @Test def testAvailabilityIndicators: Unit = { assertTrue(shell.executeCommand("disconnect").isSuccess) try { List("info", "disconnect", "export-broker").map(command ⇒ { assertCommandFailed(shell.executeCommand(command)) }) } finally { assertTrue(shell.executeCommand("connect --broker test").isSuccess) } } } object BrokerCommandsTests { val shell = createShell @BeforeClass def beforeClass() = startAndConnectToEmbeddedBroker(shell) @AfterClass def afterClass() = stopEmbeddedBroker(shell) }
Example 7
Source File: SchrodingerExceptionTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.ex import org.junit.{Before, Test} import org.junit.Assert._ import java.io.{PrintWriter, OutputStreamWriter, ByteArrayOutputStream, PrintStream} class SchrodingerExceptionTest { private[this] var ex: SchrodingerException = _ @Before def before() { ex = new SchrodingerException } @Test def testFillInStackTrace() { assertTrue(new SchrodingerException().fillInStackTrace().isInstanceOf[SchrodingerException]) } @Test(expected = classOf[SchrodingerException]) def testGetMessage() { ex.getMessage() } @Test(expected = classOf[SchrodingerException]) def testGetStackTrace() { ex.getStackTrace() } @Test(expected = classOf[SchrodingerException]) def testGetCause() { ex.getCause() } @Test(expected = classOf[SchrodingerException]) def testSetStackTrace() { ex.setStackTrace(Array.empty) } @Test(expected = classOf[SchrodingerException]) def testGetLocalizedMessage() { ex.getLocalizedMessage() } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceEmpty() { ex.printStackTrace() } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceStream() { val baos = new ByteArrayOutputStream() val ps = new PrintStream(baos) ex.printStackTrace(ps) } @Test(expected = classOf[SchrodingerException]) def testPrintStackTraceWriter() { val baos = new ByteArrayOutputStream() val osw = new OutputStreamWriter(baos) val ps = new PrintWriter(osw) ex.printStackTrace(ps) } @Test(expected = classOf[SchrodingerException]) def testInitCause() { ex.initCause(new Throwable) } @Test(expected = classOf[SchrodingerException]) def testToString() { ex.toString() } @Test def testNoThrowForSchrodingerExceptionWithSchrodingerExceptionCause() { new SchrodingerException(new SchrodingerException) } @Test def testNoThrowForSchrodingerExceptionWithExceptionCause() { new SchrodingerException(new Exception) } @Test(expected = classOf[SchrodingerException]) def testThrowForThrowableWithSchrodingerExceptionCause() { new Throwable(ex) } @Test(expected = classOf[SchrodingerException]) def testThrowForExceptionWithSchrodingerExceptionCause() { new Exception(ex) } @Test(expected = classOf[SchrodingerException]) def testThrowForRuntimeExceptionWithSchrodingerExceptionCause() { new RuntimeException(ex) } }
Example 8
Source File: VwFeatureNormalizerTest.scala From aloha with MIT License | 5 votes |
package com.eharmony.aloha.dataset.vw import org.junit.Assert._ import org.junit.{Test, Before} import org.junit.runner.RunWith import org.junit.runners.BlockJUnit4ClassRunner @RunWith(classOf[BlockJUnit4ClassRunner]) class VwFeatureNormalizerTest { private[this] var normalizer: VwFeatureNormalizer = _ @Before def setup() { normalizer = new VwFeatureNormalizer } @Test def testBlank() { assertEquals("", normalizer("").toString) } @Test def testSimple() { val vwLine: String = "1 1| |A a b c" assertEquals("1 1| |A:0.57735 a b c", normalizer.apply(vwLine).toString) } @Test def testMultipleNamespaces() { val vwLine: String = "1 1| |A a b c |b 1=2 3=4" assertEquals("1 1| |A:0.57735 a b c |b:0.70711 1=2 3=4", normalizer.apply(vwLine).toString) } @Test def testWithWeights() { val vwLine: String = "1 1| |A a:0.987 b c:0.435" assertEquals("1 1| |A:0.67988 a:0.987 b c:0.435", normalizer.apply(vwLine).toString) } }
Example 9
Source File: TestWithSparkContext.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.common.ml.util.testutil import org.apache.spark.SparkContext import org.junit.After import org.junit.Before trait TestWithSparkContext { @transient private var _sc: Option[SparkContext] = None // Can't be called before `initSparkContext()` def sc: SparkContext = { _sc.get } @Before def initContexts(): Unit = { initSparkContext() } @After def cleanupContexts(): Unit = { cleanupSparkContext() } protected def initSparkContext(): Unit = { _sc = Some(TestSparkContextProvider.createContext()) } protected def cleanupSparkContext(): Unit = { TestSparkContextProvider.stopContext(sc) _sc = None } }
Example 10
Source File: TestWithHiveContext.scala From aerosolve with Apache License 2.0 | 5 votes |
package com.airbnb.common.ml.util.testutil import org.apache.spark.sql.hive.test.TestHiveContext import org.junit.After import org.junit.Before trait TestWithHiveContext extends TestWithSparkContext { @transient private var _hc: Option[TestHiveContext] = None // Can't be called before `initHiveContext()` def hc: TestHiveContext = _hc.get @Before override def initContexts(): Unit = { initSparkContext() initHiveContext() } @After override def cleanupContexts(): Unit = { cleanupSparkContext() cleanupHiveContext() } protected def initHiveContext(): Unit = { _hc = Some(TestHiveContextProvider.createContext(sc)) } protected def cleanupHiveContext(): Unit = { TestHiveContextProvider.stopContext(hc) _hc = None } }
Example 11
Source File: TestSchemaCache.scala From incubator-daffodil with Apache License 2.0 | 5 votes |
package org.apache.daffodil.tdml import java.io.File import org.junit.Assert.assertEquals import org.junit.Test import java.io.FileOutputStream import org.apache.daffodil.api.URISchemaSource import org.junit.Before import scala.Right class TestSchemaCache { object SCache extends SchemaCache[Null, Null] var compileCount = 0 var originalUSS: URISchemaSource = null var newUSS: URISchemaSource = null var tempFile: File = null @Before def setup: Unit = { compileCount = 0 SCache.resetCache tempFile = java.io.File.createTempFile("tdml", "tdml") tempFile.deleteOnExit() touchFile() val originalURI = tempFile.toURI originalUSS = URISchemaSource(originalURI) val newURI = tempFile.toURI newUSS = URISchemaSource(newURI) } def touchFile(): Unit = { val startingModTime = tempFile.lastModified() var iters = 0 while (tempFile.lastModified() <= startingModTime) { iters += 1 Thread.sleep(100) val os = new FileOutputStream(tempFile) os.write(0) os.flush() os.close() } // println("iters = " + iters) } def compileTheSchema(uss: URISchemaSource): Unit = { SCache.compileAndCache(uss, false, false, null, null) { compileCount += 1 uss.newInputSource().getByteStream().close() Right(null) } } @Test def testReset: Unit = { compileTheSchema(originalUSS) SCache.resetCache } @Test def testSameFileCompiledOnce: Unit = { compileTheSchema(originalUSS) assertEquals(1, compileCount) compileTheSchema(newUSS) // file has not been touched, so this should hit the cache. assertEquals(1, compileCount) } @Test def testSameFileCompiledTwice: Unit = { compileTheSchema(originalUSS) assertEquals(1, compileCount) touchFile() compileTheSchema(newUSS) // file has changed assertEquals(2, compileCount) } }
Example 12
Source File: PrometheusSinkSuite.scala From spark-metrics with Apache License 2.0 | 5 votes |
package org.apache.spark.banzaicloud.metrics.sink import org.apache.spark.metrics.MetricsSystem import org.apache.spark.{SecurityManager, SparkConf} import org.junit.{After, Before, Test} class PrometheusSinkSuite { private val sinkClassPropertyName = "spark.metrics.conf.*.sink.prometheus.class" private val sinkClassPropertyValue = "org.apache.spark.banzaicloud.metrics.sink.PrometheusSink" @Test def testThatPrometheusSinkCanBeLoaded() = { val instance = "driver" val conf = new SparkConf(true) val sm = new SecurityManager(conf) val ms = MetricsSystem.createMetricsSystem(instance, conf, sm) ms.start() ms.stop() } @Before def tearDown(): Unit = { System.setProperty(sinkClassPropertyName, sinkClassPropertyValue) } @After def setUp(): Unit = { System.clearProperty(sinkClassPropertyName) } }
Example 13
Source File: SharedSparkSessionSuite.scala From spark-tensorflow-connector with Apache License 2.0 | 5 votes |
package org.trustedanalytics.spark.datasources.tensorflow import java.io.File import org.apache.commons.io.FileUtils import org.apache.spark.SharedSparkSession import org.junit.{After, Before} import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} trait BaseSuite extends WordSpecLike with Matchers with BeforeAndAfterAll class SharedSparkSessionSuite extends SharedSparkSession with BaseSuite { val TF_SANDBOX_DIR = "tf-sandbox" val file = new File(TF_SANDBOX_DIR) @Before override def beforeAll() = { super.setUp() FileUtils.deleteQuietly(file) file.mkdirs() } @After override def afterAll() = { FileUtils.deleteQuietly(file) super.tearDown() } }
Example 14
Source File: TestKafkaClient.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.shabondi.common import java.util.concurrent.TimeUnit import akka.actor.ActorSystem import oharastream.ohara.common.data.{Cell, Row} import oharastream.ohara.common.setting.TopicKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.shabondi.{BasicShabondiTest, KafkaSupport} import org.junit.{After, Before, Test} import org.scalatest.matchers.should.Matchers._ import scala.concurrent.ExecutionContext.Implicits._ import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} final class TestKafkaClient extends BasicShabondiTest { import oharastream.ohara.shabondi.common.ConvertSupport._ implicit lazy val system: ActorSystem = ActorSystem("shabondi-test") private[this] val topicKey = TopicKey.of("group", CommonUtils.randomString(5)) @Before def before(): Unit = createTestTopic(topicKey) @After override def tearDown(): Unit = topicAdmin.deleteTopic(topicKey) @Test def testSingleProducer(): Unit = { val producer = KafkaSupport.newProducer(brokerProps) try { val row = Row.of(Cell.of("col1", 100)) val sender = producer .sender() .key(row) .value(Array[Byte]()) .topicKey(topicKey) val future = sender.send.toScala val metadata = Await.result(future, Duration(10, TimeUnit.SECONDS)) metadata.topicKey should ===(topicKey) metadata.offset should ===(0) metadata.partition should ===(0) } finally { Releasable.close(producer) } } @Test def testConsumer(): Unit = { val producer = KafkaSupport.newProducer(brokerProps) try { Future.sequence { (1 to 9) .map(i => Row.of(Cell.of(s"col-$i", i * 10))) .map(row => producer.sender().key(row).value(Array[Byte]()).topicKey(topicKey)) .map { sender => sender.send.toScala } } val records = KafkaSupport.pollTopicOnce(brokerProps, topicKey, 10, 10) records.size should ===(9) records(0).topicKey shouldBe topicKey records(0).key.isPresent shouldBe true records(0).key.get shouldBe Row.of(Cell.of("col-1", 10)) records(8).topicKey shouldBe topicKey records(8).key.isPresent shouldBe true records(8).key.get shouldBe Row.of(Cell.of("col-9", 90)) } finally { Releasable.close(producer) } } }
Example 15
Source File: SimpleFormatterTest.scala From scala-js-java-logging with BSD 3-Clause "New" or "Revised" License | 5 votes |
package org.scalajs.testsuite.javalib.util.logging import java.util.logging.{Level, LogRecord, SimpleFormatter} import org.junit.{Before, Test} import org.junit.Assert._ import org.scalajs.testsuite.utils.Platform class SimpleFormatterTest { @Before def clearProperties():Unit = { System.clearProperty("java.util.logging.SimpleFormatter.format") } @Test def test_default_format(): Unit = { val f = new SimpleFormatter() val r = new LogRecord(Level.INFO, "message") r.setLoggerName("logger") assertTrue(f.format(r).contains("message")) assertTrue(f.format(r).contains("logger")) } @Test def test_format_with_params(): Unit = { val f = new SimpleFormatter() val msg = "message with params {0} {1}" val r = new LogRecord(Level.INFO, msg) assertTrue(f.format(r).contains(msg)) val r1 = new LogRecord(Level.INFO, msg) r1.setParameters(Array("param1")) assertTrue(f.format(r1).contains("message with params param1 {1}")) val r2 = new LogRecord(Level.INFO, msg) r2.setParameters(Array("param1", new java.lang.Integer(20))) assertTrue(f.format(r2).contains("message with params param1 20")) // Bogus cases val r3 = new LogRecord(Level.INFO, "message with params {0} {abc}") r3.setParameters(Array("param1", "test")) assertTrue(f.format(r3).contains("message with params {0} {abc}")) val r4 = new LogRecord(Level.INFO, "message with params {0} {{1}}") r4.setParameters(Array("param1", "test")) assertTrue(f.format(r4).contains("message with params {0} {{1}}")) val r5 = new LogRecord(Level.INFO, "message with params {0} {{1}") r5.setParameters(Array("param1", "test")) assertTrue(f.format(r5).contains("message with params {0} {{1}")) val r6 = new LogRecord(Level.INFO, "message with params {0} {-1}") r6.setParameters(Array("param1", "test")) assertTrue(f.format(r6).contains("message with params {0} {-1}")) val r7 = new LogRecord(Level.INFO, "message with params {0} {1") r7.setParameters(Array("param1", "test")) assertTrue(f.format(r7).contains("message with params {0} {1")) } @Test def test_format_property(): Unit = { System.setProperty("java.util.logging.SimpleFormatter.format", "%3$s - %5$s") val f = new SimpleFormatter() val r = new LogRecord(Level.INFO, "message") r.setLoggerName("logger") // The JVM has a different logic for formatting though the javadocs // indicate that the property above should be used if (!Platform.executingInJVM) assertEquals("logger - message", f.format(r)) } }
Example 16
Source File: SparkDataframesTest.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.rdd import scala.reflect.runtime.universe import org.apache.spark.sql.DataFrame import org.apache.spark.sql.SQLContext import org.junit.Assert._ import org.junit.{ Before, Test } import com.basho.riak.spark.toSparkContextFunctions import org.junit.experimental.categories.Category case class TestData(id: String, name: String, age: Int, category: String) @Category(Array(classOf[RiakTSTests])) class SparkDataframesTest extends AbstractRiakSparkTest { private val indexName = "creationNo" protected override val jsonData = Some( """[ | {key: 'key1', value: {id: 'u1', name: 'Ben', age: 20, category: 'CategoryA'}}, | {key: 'key2', value: {id: 'u2', name: 'Clair', age: 30, category: 'CategoryB'}}, | {key: 'key3', value: {id: 'u3', name: 'John', age: 70}}, | {key: 'key4', value: {id: 'u4', name: 'Chris', age: 10, category: 'CategoryC'}}, | {key: 'key5', value: {id: 'u5', name: 'Mary', age: 40, category: 'CategoryB'}}, | {key: 'key6', value: {id: 'u6', name: 'George', age: 50, category: 'CategoryC'}} |]""".stripMargin) protected override def initSparkConf() = super.initSparkConf().setAppName("Dataframes Test") var sqlContextHolder: SQLContext = _ var df: DataFrame = _ @Before def initializeDF(): Unit = { val sqlContext = new org.apache.spark.sql.SQLContext(sc) import sqlContext.implicits._ sqlContextHolder = sqlContext df = sc.riakBucket[TestData](DEFAULT_NAMESPACE.getBucketNameAsString) .queryAll().toDF df.registerTempTable("test") } @Test def schemaTest(): Unit = { df.printSchema() val schema = df.schema.map(_.name).toList val fields = universe.typeOf[TestData].members.withFilter(!_.isMethod).map(_.name.toString.trim).toList assertEquals(schema.sorted, fields.sorted) } @Test def sqlQueryTest(): Unit = { val sqlResult = sqlContextHolder.sql("select * from test where category >= 'CategoryC'").toJSON.collect val expected = """ [ | {id:'u4',name:'Chris',age:10,category:'CategoryC'}, | {id:'u6',name:'George',age:50,category:'CategoryC'} | ]""".stripMargin assertEqualsUsingJSONIgnoreOrder(expected, stringify(sqlResult)) } @Test def udfTest(): Unit = { sqlContextHolder.udf.register("stringLength", (s: String) => s.length) val udf = sqlContextHolder.sql("select name, stringLength(name) strLgth from test order by strLgth, name").toJSON.collect val expected = """ [ | {name:'Ben',strLgth:3}, | {name:'John',strLgth:4}, | {name:'Mary',strLgth:4}, | {name:'Chris',strLgth:5}, | {name:'Clair',strLgth:5}, | {name:'George',strLgth:6} | ]""".stripMargin assertEqualsUsingJSON(expected, stringify(udf)) } @Test def grouppingTest(): Unit = { val groupped = df.groupBy("category").count.toJSON.collect val expected = """ [ | {category:'CategoryA',count:1}, | {category:'CategoryB',count:2}, | {category:'CategoryC',count:2}, | {count:1} | ]""".stripMargin assertEqualsUsingJSONIgnoreOrder(expected, stringify(groupped)) } @Test def sqlVsFilterTest(): Unit = { val sql = sqlContextHolder.sql("select id, name from test where age >= 50").toJSON.collect val filtered = df.where(df("age") >= 50).select("id", "name").toJSON.collect assertEqualsUsingJSONIgnoreOrder(stringify(sql), stringify(filtered)) } }
Example 17
Source File: AbstractFailoverOfflineTest.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.rdd.failover import com.basho.riak.client.core.query.Namespace import com.basho.riak.client.core.util.HostAndPort import com.basho.riak.stub.{RiakMessageHandler, RiakNodeStub} import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.spark.{Logging, SparkConf, SparkContext} import org.hamcrest.{Description, Matchers} import org.junit.internal.matchers.ThrowableCauseMatcher import org.junit.{After, Before} import scala.collection.JavaConversions._ abstract class AbstractFailoverOfflineTest extends Logging { protected final val NAMESPACE = new Namespace("default", "test-bucket") protected final val COVERAGE_ENTRIES_COUNT = 64 protected var sc: SparkContext = _ protected var riakNodes: Seq[(HostAndPort, RiakNodeStub)] = _ // tuple HostAndPort -> stub val riakHosts: Int = 1 val riakMessageHandler: Option[RiakMessageHandler] = None def sparkConf: SparkConf = new SparkConf(false) .setMaster("local") .setAppName(getClass.getSimpleName) .set("spark.riak.connection.host", riakNodes.map{case (hp, _) => s"${hp.getHost}:${hp.getPort}"}.mkString(",")) .set("spark.riak.output.wquorum", "1") .set("spark.riak.input.fetch-size", "2") def initRiakNodes(): Seq[(HostAndPort, RiakNodeStub)] = { require(riakMessageHandler.isDefined) // start riak stubs on localhost and free random port (1 to riakHosts).map { _ => val riakNode = RiakNodeStub(riakMessageHandler.get) riakNode.start() -> riakNode } } @Before def setUp(): Unit = { riakNodes = initRiakNodes() sc = new SparkContext(sparkConf) } @After def tearDown(): Unit = { Option(riakNodes).foreach(_.foreach(n => n._2.stop())) Option(sc).foreach(_.stop()) } def distributeEvenly(size: Int, splitCount: Int): Seq[Int] = { val (base, rem) = (size / splitCount, size % splitCount) (0 until splitCount).map(i => if (i < rem) base + 1 else base) } } class RootCauseMatcher[T <: Throwable](val excClass: Class[T]) extends ThrowableCauseMatcher[T](Matchers.isA(excClass)) { private def getOneBeforeRootCause(item: T): Throwable = { val throwables = ExceptionUtils.getThrowableList(item) if (throwables.length > 1) { throwables.reverse.tail.head } else { throwables.head } } override def matchesSafely(item: T): Boolean = super.matchesSafely(getOneBeforeRootCause(item).asInstanceOf[T]) override def describeMismatchSafely(item: T, description: Description): Unit = super.describeMismatchSafely(getOneBeforeRootCause(item).asInstanceOf[T], description) }
Example 18
Source File: SparkRDDTest.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.rdd import com.basho.riak.client.core.query.Location import com.basho.riak.spark._ import com.basho.riak.spark.rdd.connector.RiakConnector import org.apache.spark.rdd.RDD import org.junit.Assert._ import org.junit.experimental.categories.Category import org.junit.{Before, Test} import scala.collection.mutable.ListBuffer case class UserData(timestamp: String, user_id: String) @Category(Array(classOf[RiakCommonTests])) class SparkRDDTest extends AbstractRiakSparkTest { private val CREATION_INDEX = "creationNo" protected override val jsonData = Option( """ [ | {key: 'key-1', indexes: {creationNo: 1}, value: {timestamp: '2014-11-24T13:14:04.823Z', user_id: 'u1'}}, | {key: 'key-2', indexes: {creationNo: 2}, value: {timestamp: '2014-11-24T13:15:04.823Z', user_id: 'u1'}}, | {key: 'key-3', indexes: {creationNo: 3}, value: {timestamp: '2014-11-24T13:18:04', user_id: 'u1'}}, | {key: 'key-4', indexes: {creationNo: 4}, value: {timestamp: '2014-11-24T13:14:04Z', user_id: 'u2'}}, | {key: 'key-5', indexes: {creationNo: 5}, value: {timestamp: '2014-11-24T13:16:04.823Z', user_id: 'u3'}}, | {key: 'key-6', indexes: {creationNo: 6}, value: {timestamp: '2014-11-24T13:21:04.823Z', user_id: 'u3'}} | ] """.stripMargin) var rdd: RDD[UserData] = _ protected override def initSparkConf() = super.initSparkConf() .setAppName("RDD tests") @Before def initializeRDD(): Unit ={ rdd = sc.riakBucket[UserData](DEFAULT_NAMESPACE.getBucketNameAsString) .query2iRange(CREATION_INDEX, 1, 6) } @Test def calculateCount(): Unit = { val count = rdd.count() assertEquals(6, count) // scalastyle:ignore } @Test def firstElement(): Unit ={ // Initial implementation fails on this operation val first = rdd.first() } private def calculateUserOrderedTotals() = { rdd.map(x => (x.user_id, 1)) .reduceByKey((a, b) => a + b) .sortBy(_._1) } @Test def checkActions(): Unit ={ val perUserTotalRDD = calculateUserOrderedTotals() val data = perUserTotalRDD.collect() assertEqualsUsingJSON("[['u1',3],['u2',1],['u3',2]]", data) } @Test def storePairRDDWithDefaultMapper(): Unit = { val perUserTotalRDD = calculateUserOrderedTotals() perUserTotalRDD.saveToRiak(DEFAULT_NAMESPACE_4STORE.getBucketNameAsString) // Read data from riak and populate data buffer val data = ListBuffer[(String,Long)]() RiakConnector(sc.getConf).withSessionDo { session => { foreachKeyInBucket(session.unwrap(), DEFAULT_NAMESPACE_4STORE, (RiakConnector, l: Location) =>{ val v = readByLocation[Long](session.unwrap(), l) data += ((l.getKeyAsString,v)) }) }} assertEquals(3, data.size) } }
Example 19
Source File: KvStreamingTest.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.streaming import java.nio.ByteBuffer import java.util.concurrent._ import com.basho.riak.spark._ import com.basho.riak.spark.rdd.{AbstractRiakSparkTest, RiakTSTests} import org.junit.Assert.assertEquals import org.junit.experimental.categories.Category import org.junit.{After, Before, Test} @Category(Array(classOf[RiakTSTests])) class KvStreamingTest extends AbstractRiakSparkTest with SparkStreamingFixture { protected final val VALUES_NUMBER = 5 protected final val executorService = Executors.newCachedThreadPool() private val dataSource = new SocketStreamingDataSource private var port = -1 @Before def setUp(): Unit = { port = dataSource.start(client => { (0 until VALUES_NUMBER).foreach(i => client.write(ByteBuffer.wrap(s"value-$i\n".getBytes))) logInfo(s"$VALUES_NUMBER values were send to client") }) } @After def tearDown(): Unit = { dataSource.stop() } @Test(timeout = 10 * 1000) // 10 seconds timeout def saveToRiak(): Unit = { executorService.submit(new Runnable { override def run(): Unit = { ssc.socketTextStream("localhost", port) .saveToRiak(DEFAULT_NAMESPACE_4STORE.getBucketNameAsString) ssc.start() ssc.awaitTerminationOrTimeout(5 * 1000) } }) val data: List[(String, Any)] = executorService.submit(new Callable[List[(String, Any)]] { override def call(): List[(String, Any)] = { var rdd = sc.riakBucket(DEFAULT_NAMESPACE_4STORE).queryAll() var count = rdd.count() while (count < VALUES_NUMBER) { TimeUnit.SECONDS.sleep(2) rdd = sc.riakBucket(DEFAULT_NAMESPACE_4STORE).queryAll() count = rdd.count() } rdd.collect().toList } }).get() assertEquals(VALUES_NUMBER, data.length) assertEqualsUsingJSONIgnoreOrder( """[ | ['${json-unit.ignore}', 'value-0'], | ['${json-unit.ignore}', 'value-1'], | ['${json-unit.ignore}', 'value-2'], | ['${json-unit.ignore}', 'value-3'], | ['${json-unit.ignore}', 'value-4'] | ]""".stripMargin, data) } }
Example 20
Source File: SparkStreamingFixture.scala From spark-riak-connector with Apache License 2.0 | 5 votes |
package com.basho.riak.spark.streaming import org.apache.spark.{Logging, SparkContext} import org.apache.spark.streaming.{Seconds, StreamingContext} import org.junit.{After, Before} trait SparkStreamingFixture extends Logging { protected var sc: SparkContext protected var ssc: StreamingContext = _ protected val batchDuration = Seconds(1) @Before def startStreamingContext(): Unit = { ssc = new StreamingContext(sc, batchDuration) logInfo("Streaming context created") } @After def stopStreamingContext(): Unit = { Option(ssc).foreach(_.stop()) logInfo("Streaming context stopped") } }
Example 21
Source File: TestingHttpApi.scala From daf-semantics with Apache License 2.0 | 5 votes |
package it.almawave.linkeddata.kb.http import play.api.inject.guice.GuiceApplicationBuilder import org.junit.Test import org.junit.After import play.api.Application import org.junit.Before import it.almawave.linkeddata.kb.utils.JSONHelper import play.api.libs.ws.WSClient import org.asynchttpclient.DefaultAsyncHttpClient import play.api.libs.ws.ssl.SystemConfiguration import akka.stream.ActorMaterializer import play.api.libs.ws.ahc.AhcWSClient import scala.concurrent.Await import scala.concurrent.duration.Duration import java.net.URL import com.typesafe.config.ConfigFactory class TestingHttpApi { var app: Application = null var conf = ConfigFactory.empty() var ws: WSClient = null var app_url = new URL("http://localhost:8080") @Test def testing_contexts() { // curl -X GET http://localhost:8999/kb/v1/prefixes/lookup?prefix=no_pref // -H "accept: application/json" // -H "content-type: application/json" val fut = ws.url(s"http://localhost:8999/kb/v1/prefixes/lookup") .withHeaders(("accept", "application/json")) .withHeaders(("content-type", "application/json")) .withFollowRedirects(true) .withQueryString(("prefix", "muapit")) .get() val results = Await.result(fut, Duration.Inf) println(results.body) } @Before def before() { app = GuiceApplicationBuilder() .build() conf = app.configuration.underlying // play.app.local.url // play.server.http.address // play.server.http.port println(JSONHelper.writeToString(conf.root().unwrapped())) app_url = new URL(conf.getString("app.local.url")) println(s"\n\nrunning at ${app_url}") val materializer = ActorMaterializer()(app.actorSystem) ws = AhcWSClient()(materializer) } @After def after() { ws.close() app.stop() } }
Example 22
Source File: OntonetHubEndpointsTest.scala From daf-semantics with Apache License 2.0 | 5 votes |
package clients import scala.concurrent.Await import org.junit.After import org.junit.Before import org.junit.Test import play.api.libs.json.JsLookupResult import play.api.libs.json.JsValue import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits._ import utilities.JSONHelper import org.junit.Assert import semantic_manager.yaml.OntonetHubProperty import clients.HTTPClient class OntonetHubEndpointsTest { var http = HTTPClient var hub: OntonetHubClient = null @Before def before() { http.start() hub = new OntonetHubClient(http.ws, OntonetHubClient.DEFAULT_CONFIG) } @After def after() { http.stop() } // @Test def testing_hub_find { val (host, port) = ("localhost", 8000) val (query, lang, limit) = ("nome", "it", 4) val http = HTTPClient http.start() val ws = http.ws val future = ws.url(s"http://${host}:${port}/stanbol/ontonethub/ontologies/find") .withHeaders(("accept", "application/json")) .withHeaders(("content-type", "application/x-www-form-urlencoded")) .withFollowRedirects(true) .post(s"name=${query}&lang=${lang}&limit=${limit}") .map { item => val json = JSONHelper.pretty(item.body) println("\n\n") println(json) item } val results = Await.result(future, Duration.Inf) Assert.assertTrue(results.status == 200) http.stop() } @Test def testing_find_property { val (query, lang, limit) = ("nome", "it", 2) val future = hub.find_property(query, lang, limit) // CHECK for de-coupling from swagger // .map(_.map(item => OntonetHubProperty.tupled(OntonetHubClient.models.FindResult.unapply(item).get))) println("\n\n############################################ RESULTS") val results = Await.result(future, Duration.Inf) println(results.mkString("\n\n")) } }
Example 23
Source File: OntologyHubClientTest.scala From daf-semantics with Apache License 2.0 | 5 votes |
package clients import java.nio.file.Paths import org.junit.After import org.junit.Assert import org.junit.Assume import org.junit.Before import org.junit.BeforeClass import org.junit.Test import org.slf4j.LoggerFactory import play.Logger import utilities.Adapters.AwaitFuture import clients.HTTPClient object OntologyHubClientTest { val logger = LoggerFactory.getLogger(this.getClass) @BeforeClass def check_before() { Assume.assumeTrue(ontonethub_is_running) logger.info("Ontonethub is UP! [TESTING...]") } private def ontonethub_is_running = { val client = HTTPClient client.start() val ontonethub = new OntonetHubClient(client.ws) val check = ontonethub.status().await client.stop() check } }
Example 24
Source File: TestContainerRoute.scala From ohara with Apache License 2.0 | 5 votes |
package oharastream.ohara.configurator.route import oharastream.ohara.client.configurator._ import oharastream.ohara.common.rule.OharaTest import oharastream.ohara.common.setting.ObjectKey import oharastream.ohara.common.util.{CommonUtils, Releasable} import oharastream.ohara.configurator.Configurator import org.junit.{After, Before, Test} import org.scalatest.matchers.should.Matchers._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.{Await, Future} import scala.concurrent.duration.Duration class TestContainerRoute extends OharaTest { private[this] val configurator = Configurator.builder.fake(0, 0).build() private[this] val containerApi = ContainerApi.access.hostname(configurator.hostname).port(configurator.port) private[this] val brokerApi = BrokerApi.access.hostname(configurator.hostname).port(configurator.port) private[this] val workerApi = WorkerApi.access.hostname(configurator.hostname).port(configurator.port) private[this] val zkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10)) private[this] val bkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10)) private[this] val wkClusterKey = ObjectKey.of("default", CommonUtils.randomString(10)) private[this] val nodeNames: Set[String] = Set("n0", "n1") private[this] def result[T](f: Future[T]): T = Await.result(f, Duration("20 seconds")) @Before def setup(): Unit = { val nodeApi = NodeApi.access.hostname(configurator.hostname).port(configurator.port) nodeNames.isEmpty shouldBe false nodeNames.foreach { n => result(nodeApi.request.nodeName(n).port(22).user("user").password("pwd").create()) } val zk = result( ZookeeperApi.access .hostname(configurator.hostname) .port(configurator.port) .request .key(zkClusterKey) .nodeNames(nodeNames) .create() ) zk.key shouldBe zkClusterKey result(ZookeeperApi.access.hostname(configurator.hostname).port(configurator.port).start(zk.key)) val bk = result(brokerApi.request.key(bkClusterKey).zookeeperClusterKey(zkClusterKey).nodeNames(nodeNames).create()) result(brokerApi.start(bk.key)) val wk = result(workerApi.request.key(wkClusterKey).brokerClusterKey(bkClusterKey).nodeNames(nodeNames).create()) result(workerApi.start(wk.key)) } @Test def testGetContainersOfBrokerCluster(): Unit = { val containerGroups = result(containerApi.get(bkClusterKey)) containerGroups.size should not be 0 containerGroups.foreach(group => { group.clusterKey shouldBe bkClusterKey group.clusterType shouldBe "broker" group.containers.size should not be 0 }) } @Test def testGetContainersOfWorkerCluster(): Unit = { val containerGroups = result(containerApi.get(wkClusterKey)) containerGroups.size should not be 0 containerGroups.foreach(group => { group.clusterKey shouldBe wkClusterKey group.clusterType shouldBe "worker" group.containers.size should not be 0 }) } @After def tearDown(): Unit = Releasable.close(configurator) }