org.scalatest.GivenWhenThen Scala Examples
The following examples show how to use org.scalatest.GivenWhenThen.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: SourceFileSequenceBuilderSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.builder import java.io.File import org.apache.maven.plugin.logging.SystemStreamLog import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.GivenWhenThen import org.scalatest.matchers.should.Matchers class SourceFileSequenceBuilderSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "SourceFileSequenceBuilder" it should "Create a sequence of valid source paths" in { val input = Seq("src/test/scala", "src/main/scala").map(new File(_)) val result = new SourceFileSequenceBuilder(new SystemStreamLog).build(input).map(_.getName) result.contains("SourceFileFormatterSpec.scala") should be(true) result.contains("SourceFileSequenceBuilderSpec.scala") should be(true) result.contains("ConfigFileValidatorSpec.scala") should be(true) result.contains("ScalaFormatter.scala") should be(true) result.contains("FormatResult.scala") should be(true) result.contains("Formatter.scala") should be(true) result.contains("SourceFileFormatter.scala") should be(true) result.contains("Builder.scala") should be(true) result.contains("SourceFileSequenceBuilder.scala") should be(true) result.contains("MavenLogReporter.scala") should be(true) result.contains("ConfigFileValidator.scala") should be(true) result.contains("Validator.scala") should be(true) } it should "Create an empty sequence when given invalid paths" in { new SourceFileSequenceBuilder(new SystemStreamLog) .build(Seq("src/main1/scala", "src/test1/scala").map(new File(_))) should be(Seq()) } it should "Create an empty sequence when given null values" in { new SourceFileSequenceBuilder(new SystemStreamLog).build(null) should be(Seq()) } }
Example 2
Source File: VertexMeasureConfigurationTest.scala From sparkling-graph with BSD 2-Clause "Simplified" License | 5 votes |
package ml.sparkling.graph.api.operators.measures import ml.sparkling.graph.api.operators.IterativeComputation.BucketSizeProvider import org.apache.spark.graphx.Graph import org.scalatest.{FlatSpec, GivenWhenThen} class VertexMeasureConfigurationTest extends FlatSpec with GivenWhenThen { "Creation without parameters" should "be possible" in{ VertexMeasureConfiguration() } "Creation with undirected flag" should "be possible" in{ Given("Directed flag") val flag=false When("Configuration creation") VertexMeasureConfiguration(treatAsUndirected = flag ) } "Creation with bucket size provider" should "be possible" in{ Given("Bucker size provider") val provider:BucketSizeProvider[Long,Long]=(g:Graph[Long,Long])=>1l When("Configuration creation") VertexMeasureConfiguration(bucketSizeProvider = provider) } "Creation with bucket size provider and directed flag" should "be possible" in{ Given("Bucker size provider") val provider:BucketSizeProvider[Long,Long]=(g:Graph[Long,Long])=>1l When("Configuration creation") VertexMeasureConfiguration( false, provider) } }
Example 3
Source File: ByIdPredicateTest.scala From sparkling-graph with BSD 2-Clause "Simplified" License | 5 votes |
package ml.sparkling.graph.operators.predicates import org.scalatest.{FlatSpec, GivenWhenThen} class ByIdPredicateTest extends FlatSpec with GivenWhenThen { "True" should "be returned if correct id is given" in{ Given("Vertex id and predicate") val vertexId=1l val predicate=ByIdPredicate(vertexId) When("Checked") val checkResult: Boolean = predicate.apply(vertexId) Then("should return true") assert(checkResult) } "False" should "be returned if incorrect id is given" in{ Given("Vertex id and predicate") val vertexId=1l val predicate=ByIdPredicate(vertexId) When("Checked") val checkResult: Boolean = predicate.apply(2*vertexId) Then("should return false") assert(!checkResult) } }
Example 4
Source File: MetronomeConfigTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome import com.typesafe.config.ConfigFactory import org.scalatest.{FunSuite, GivenWhenThen, Matchers} import play.api.Configuration class MetronomeConfigTest extends FunSuite with Matchers with GivenWhenThen { private def fromConfig(cfg: String): MetronomeConfig = new MetronomeConfig(new Configuration(ConfigFactory.parseString(cfg))) test("Http and Https ports with valid parseable strings") { Given("http Port is a valid port string") val httpPort = "9000" val httpsPort = "9010" When("Config parser tries to extract it") val cfg = fromConfig(s""" | play.server.http.port="$httpPort" | play.server.https.port="$httpsPort" """.stripMargin) Then("Should return an integer of that given port") cfg.httpPort shouldEqual Some(9000) cfg.httpsPort shouldEqual 9010 } test("Http overriden with `disabled`") { Given("http Port is `disabled`") val httpPort = "disabled" val httpsPort = "9010" When("Config parser tries to extract it") val cfg = fromConfig(s""" | play.server.http.port="$httpPort" | play.server.https.port="$httpsPort" """.stripMargin) Then("Http port should be None") cfg.httpPort shouldEqual None Then("Effective port should be https") cfg.effectivePort shouldEqual 9010 } test("feature gpu_resources is enabled when gpu_scheduling_behavior is set") { Given("A config with gpu_scheduling_behavior") val cfg = fromConfig(s""" | metronome.gpu_scheduling_behavior="restricted" """.stripMargin) When("enabled features are requested") val featues = Then("features should contain gpu_resources") cfg.scallopConf.features.toOption.get.contains("gpu_resources") shouldEqual true And("gpu_scheduling_behavior must be set") cfg.scallopConf.gpuSchedulingBehavior.toOption.contains("restricted") shouldEqual true } test("feature gpu_resources is disabled when gpu_scheduling_behavior is not set") { Given("A config with gpu_scheduling_behavior") val cfg = fromConfig("") When("enabled features are requested") val featues = Then("features should contain gpu_resources") cfg.scallopConf.features.toOption.get shouldEqual Set.empty And("gpu_scheduling_behavior must be set") cfg.scallopConf.gpuSchedulingBehavior.toOption shouldEqual Some("undefined") } }
Example 5
Source File: JobRunIdTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome package model import dcos.metronome.utils.test.Mockito import org.scalatest.{FunSuite, GivenWhenThen, Matchers} class JobRunIdTest extends FunSuite with Matchers with Mockito with GivenWhenThen { test("Convert simple appId into JobRunId") { Given("a simple appId") val jobId: JobId = JobId("test") When("The id is converted into a JobRunId") val jobRunId = JobRunId(jobId, "20160614133813ap8ZQ") Then("It is broken apart correctly") jobRunId.jobId shouldEqual JobId("test") jobRunId.value shouldEqual "20160614133813ap8ZQ" } test("Convert appId with dots into JobRunId") { Given("an appId with multiple dots") val appId: JobId = JobId("test.foo") When("The id is converted into a JobRunId") val jobRunId = JobRunId(appId, "20160614133813ap8ZQ") Then("It is broken apart correctly") jobRunId.jobId shouldEqual JobId("test.foo") jobRunId.value shouldEqual "20160614133813ap8ZQ" } }
Example 6
Source File: TaskStateTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome package scheduler import dcos.metronome.utils.test.Mockito import mesosphere.marathon.core.condition.Condition import org.scalatest.{FunSuite, GivenWhenThen, Matchers} import org.apache.mesos class TaskStateTest extends FunSuite with Mockito with Matchers with GivenWhenThen { test("Mesos TaskState -> TaskState") { TaskState(Condition.Error) shouldBe Some(TaskState.Failed) TaskState(Condition.Failed) shouldBe Some(TaskState.Failed) TaskState(Condition.Finished) shouldBe Some(TaskState.Finished) TaskState(Condition.Killed) shouldBe Some(TaskState.Killed) TaskState(Condition.Killing) shouldBe Some(TaskState.Running) TaskState(Condition.Unreachable) shouldBe Some(TaskState.Failed) TaskState(Condition.Running) shouldBe Some(TaskState.Running) TaskState(Condition.Staging) shouldBe Some(TaskState.Staging) TaskState(Condition.Starting) shouldBe Some(TaskState.Starting) } def taskStatus(state: mesos.Protos.TaskState): mesos.Protos.TaskStatus = { mesos.Protos.TaskStatus .newBuilder() .setState(state) .buildPartial() } }
Example 7
Source File: ScheduleSpecTest.scala From metronome with Apache License 2.0 | 5 votes |
package dcos.metronome.jobspec.impl import java.time.{Instant, ZoneId} import dcos.metronome.model.{ConcurrencyPolicy, CronSpec, ScheduleSpec} import dcos.metronome.utils.test.Mockito import org.scalatest.{FunSuite, GivenWhenThen, Matchers} import scala.concurrent.duration._ class ScheduleSpecTest extends FunSuite with Matchers with Mockito with GivenWhenThen { test("nextExecution when close to daylight savings") { val schedule = ScheduleSpec( id = "default", cron = CronSpec("55 23 * * *"), timeZone = ZoneId.of("Europe/Rome"), startingDeadline = 900.seconds, concurrencyPolicy = ConcurrencyPolicy.Allow, enabled = true ) Given("a schedule that was last run at 22:55") val lastScheduledAt = Instant.parse("2019-03-30T22:55:00.000Z") When("we are now close to midnight and compute the next scheduled time") val now = Instant.parse("2019-03-30T23:54:59.000Z") val nextTime = schedule.nextExecution(lastScheduledAt) // 60 secs is the smallest unit of reschedule time for cron val inSeconds = Math.max(java.time.Duration.between(now, nextTime).getSeconds, 60) println(s"now is $now, nextScheduleIn = $inSeconds seconds, next run is scheduled for: $nextTime") Then("The next run should be scheduled on the 31st") val expected = Instant.parse("2019-03-31T21:55:00Z") nextTime shouldEqual expected } }
Example 8
Source File: ConfigFileValidatorSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.validation import java.nio.file.Paths import org.apache.maven.plugin.logging.SystemStreamLog import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.GivenWhenThen import org.scalatest.matchers.should.Matchers class ConfigFileValidatorSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "ConfigFileValidator" it should "Return an error when the config file path is empty" in { an[IllegalArgumentException] should be thrownBy { new ConfigFileValidator(new SystemStreamLog).validate("") } } it should "Return an error when the config file path is null" in { an[IllegalArgumentException] should be thrownBy { new ConfigFileValidator(new SystemStreamLog).validate(null) } } it should "Create a valid config sequence when passed a config location" in { new ConfigFileValidator(new SystemStreamLog).validate(".scalafmt.conf") should be( Paths.get(".scalafmt.conf") ) } it should "Raise an exception when the config path is invalid and a config is required" in { an[IllegalArgumentException] should be thrownBy { new ConfigFileValidator(new SystemStreamLog).validate("--config") } } }
Example 9
Source File: ScalaFormatterSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt import java.io.File import org.antipathy.mvn_scalafmt.builder.Builder import org.antipathy.mvn_scalafmt.format.Formatter import org.antipathy.mvn_scalafmt.io.Writer import org.antipathy.mvn_scalafmt.model.{FormatResult, Summary} import org.mockito.{ArgumentMatchers, Mockito} import org.scalatest.GivenWhenThen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import scala.jdk.CollectionConverters._ class ScalaFormatterSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "ScalaFormatter" it should "format scala files" in { val sourceBuilder = Mockito.mock(classOf[Builder[Seq[File], Seq[File]]]) val changedFilesBuilder = Mockito.mock(classOf[Builder[Seq[File], Seq[File]]]) val fileFormatter = Mockito.mock(classOf[Formatter[File, FormatResult]]) val writer = Mockito.mock(classOf[Writer[Seq[FormatResult], Summary]]) val formatter = new ScalaFormatter(sourceBuilder, changedFilesBuilder, fileFormatter, writer) val input = Seq(Mockito.mock(classOf[File])).asJava val sources = Mockito.mock(classOf[Seq[File]]) val sourceToFormat = Mockito.mock(classOf[File]) val sourcesToFormat = Seq(sourceToFormat) val formatResult = Mockito.mock(classOf[FormatResult]) val summary = Mockito.mock(classOf[Summary]) Mockito.when(sourceBuilder.build(ArgumentMatchers.any(classOf[Seq[File]]))).thenReturn(sources) Mockito.when(changedFilesBuilder.build(sources)).thenReturn(sourcesToFormat) Mockito.when(fileFormatter.format(sourceToFormat)).thenReturn(formatResult) Mockito.when(writer.write(Seq(formatResult))).thenReturn(summary) val result = formatter.format(input) result should be(summary) } }
Example 10
Source File: SourceFileFormatterSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.format import java.io.File import java.nio.file.Files import org.antipathy.mvn_scalafmt.logging.MavenLogReporter import org.antipathy.mvn_scalafmt.validation.ConfigFileValidator import org.apache.maven.plugin.logging.SystemStreamLog import org.scalafmt.interfaces.Scalafmt import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.GivenWhenThen import org.scalatest.matchers.should.Matchers class SourceFileFormatterSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "SourceFileFormatter" it should "format a source file" in { val log = new SystemStreamLog val config = new ConfigFileValidator(log).validate(".scalafmt.conf") val sourceFile = new File("src/main/scala/org/antipathy/mvn_scalafmt/model/FormatResult.scala") val reporter = new MavenLogReporter(log) val scalafmt: Scalafmt = Scalafmt.create(this.getClass.getClassLoader).withRespectVersion(false).withReporter(reporter) val result = new SourceFileFormatter(config, scalafmt, log).format(sourceFile).formattedSource result.trim should be(new String(Files.readAllBytes(sourceFile.toPath)).trim) } }
Example 11
Source File: ElasticSearchWriterUtilsSpec.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.indexer.unit import com.expedia.www.haystack.trace.indexer.writers.es.ElasticSearchWriterUtils import org.scalatest.{BeforeAndAfterEach, FunSpec, GivenWhenThen, Matchers} class ElasticSearchWriterUtilsSpec extends FunSpec with Matchers with GivenWhenThen with BeforeAndAfterEach { var timezone: String = _ override def beforeEach() { timezone = System.getProperty("user.timezone") System.setProperty("user.timezone", "CST") } override def afterEach(): Unit = { System.setProperty("user.timezone", timezone) } describe("elastic search writer") { it("should use UTC when generating ES indexes") { Given("the system timezone is not UTC") System.setProperty("user.timezone", "CST") val eventTimeInMicros = System.currentTimeMillis() * 1000 When("the writer generates the ES indexes") val cstName = ElasticSearchWriterUtils.indexName("haystack-traces", 6, eventTimeInMicros) System.setProperty("user.timezone", "UTC") val utcName = ElasticSearchWriterUtils.indexName("haystack-traces", 6, eventTimeInMicros) Then("it should use UTC to get those indexes") cstName shouldBe utcName } } }
Example 12
Source File: ChangedFilesBuilderSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.builder import java.io.{File, FileNotFoundException} import java.nio.file.Paths import org.apache.maven.plugin.logging.SystemStreamLog import org.scalatest.GivenWhenThen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers class ChangedFilesBuilderSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "ChangedFilesBuilder" it should "Identify files that have changed from master" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changedFiles = Seq( "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilder.scala", "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/SourceFileSequenceBuilder.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilderSpec.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/LocalConfigBuilderSpec.scala" ).map(x => getAbsolutePathFrom(x)) val changeFunction = () => changedFiles.map(new File(_)) val result = new ChangedFilesBuilder(log, true, "master", changeFunction).build(sources) result should be(changedFiles.map(new File(_))) } it should "return all files if diff is false" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changeFunction = () => sources val result = new ChangedFilesBuilder(log, false, "master", changeFunction).build(sources) result should be(sources) } it should "re-throw exceptions it encounters" in { val log = new SystemStreamLog val sourceDirs = Seq("src/test/scala", "src/main/scala").map(new File(_)) val sources = new SourceFileSequenceBuilder(log).build(sourceDirs) val changedFiles = Seq( "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilder.scala", "/mvn_scalafmt/src/main/scala/org/antipathy/mvn_scalafmt/builder/SourceFileSequenceBuilder.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/ChangedFilesBuilderSpec.scala", "/mvn_scalafmt/src/test/scala/org/antipathy/mvn_scalafmt/builder/LocalConfigBuilderSpec.scala" ).map(x => getAbsolutePathFrom(x)) val changeFunction = () => throw new FileNotFoundException("Ooops") an[FileNotFoundException] should be thrownBy { new ChangedFilesBuilder(log, true, "master", changeFunction).build(sources) } } def getAbsolutePathFrom(path: String): String = Paths.get(path).normalize.toAbsolutePath.toString }
Example 13
Source File: RemoteConfigWriterSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.io import java.io.File import java.nio.charset.StandardCharsets import java.nio.file.{Files, Paths} import org.antipathy.mvn_scalafmt.model.RemoteConfig import org.apache.commons.io.FileUtils import org.apache.maven.plugin.logging.SystemStreamLog import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.GivenWhenThen import org.scalatest.matchers.should.Matchers class RemoteConfigWriterSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "RemoteConfigWriter" it should "Write a config to a local path" in { val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt.conf" val contents = """version = "1.5.1" |maxColumn = 120 |align = false |rewrite.rules = [SortImports] |danglingParentheses = true |importSelectors = singleLine |binPack.parentConstructors = true |includeCurlyBraceInSelectChains = false""".stripMargin val writer = new RemoteConfigWriter(new SystemStreamLog) val input = RemoteConfig(contents, Paths.get(localPath)) writer.write(input) new String(Files.readAllBytes(new File(localPath).toPath)) Files.delete(input.location) } it should "Overwrite a config in a local path" in { val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt2.conf" val contents = """version = "1.5.1" |maxColumn = 120 |align = false |rewrite.rules = [SortImports] |danglingParentheses = true |importSelectors = singleLine |binPack.parentConstructors = true |includeCurlyBraceInSelectChains = false""".stripMargin val oldContents = "SomeOldConfig" val writer = new RemoteConfigWriter(new SystemStreamLog) val input = RemoteConfig(contents, Paths.get(localPath)) FileUtils.writeStringToFile(new File(localPath), oldContents, StandardCharsets.UTF_8) new String(Files.readAllBytes(new File(localPath).toPath)) should be(oldContents) writer.write(input) new String(Files.readAllBytes(new File(localPath).toPath)) should be(contents) Files.delete(input.location) } }
Example 14
Source File: RemoteConfigReaderSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.io import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.GivenWhenThen import org.scalatest.matchers.should.Matchers import org.apache.maven.plugin.logging.SystemStreamLog import java.net.MalformedURLException class RemoteConfigReaderSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "RemoteConfigReader" it should "read a config from a remote location" in { val url = "https://raw.githubusercontent.com/SimonJPegg/mvn_scalafmt/35f3863c501b43beb59d84cb49fe124ee99c70a5/.scalafmt.conf" val reader = new RemoteConfigReader(new SystemStreamLog) val expectedResult = "maxColumn = 120\n" reader.read(url).contents should be(expectedResult) } it should "raise an exception when unable to retrieve a config" in { val url = "Skyrim belongs to the Nords" val reader = new RemoteConfigReader(new SystemStreamLog) an[MalformedURLException] should be thrownBy { reader.read(url) } } }
Example 15
Source File: TestResultLogWriterSpec.scala From mvn_scalafmt with Apache License 2.0 | 5 votes |
package org.antipathy.mvn_scalafmt.io import org.scalatest.GivenWhenThen import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.apache.maven.plugin.logging.Log import org.mockito.Mockito import org.antipathy.mvn_scalafmt.model.FormatResult import java.io.File class TestResultLogWriterSpec extends AnyFlatSpec with GivenWhenThen with Matchers { behavior of "TestResultLogWriter" it should "write details of unformatted sources to a log" in { val log = Mockito.mock(classOf[Log]) val writer = new TestResultLogWriter(log) val unformattedFile = Mockito.mock(classOf[File]) val formattedFile = Mockito.mock(classOf[File]) val unformatted = FormatResult( sourceFile = unformattedFile, originalSource = "unformatted", formattedSource = "formatted" ) val formatted = FormatResult( sourceFile = formattedFile, originalSource = "formatted", formattedSource = "formatted" ) val input = Seq(unformatted, formatted) Mockito.when(unformattedFile.getName).thenReturn("unformatted.scala") Mockito.when(formattedFile.getName).thenReturn("formatted.scala") val result = writer.write(input) result.totalFiles should be(input.length) result.unformattedFiles should be(1) result.fileDetails.length should be(input.length) result.fileDetails.filter(_.name == unformattedFile.getName).foreach { fd => fd.name should be(unformattedFile.getName) fd.details should be("Requires formatting") } result.fileDetails.filter(_.name == formattedFile.getName).foreach { fd => fd.name should be(formattedFile.getName) fd.details should be("Formatted") } } }
Example 16
Source File: ModelDefinitionCatalogSpec.scala From modelmatrix with Apache License 2.0 | 5 votes |
package com.collective.modelmatrix.catalog import java.time.Instant import com.collective.modelmatrix.ModelFeature import com.collective.modelmatrix.transform.{Bins, Identity, Index, Top} import org.scalatest.{BeforeAndAfterAll, FlatSpec, GivenWhenThen} class ModelDefinitionCatalogSpecTest extends ModelDefinitionCatalogSpec with TestDatabase with InstallSchemaBefore trait ModelDefinitionCatalogSpec extends FlatSpec with GivenWhenThen with BeforeAndAfterAll with CatalogDatabase { import scala.concurrent.ExecutionContext.Implicits.global lazy val testClassName: String = this.getClass.getSimpleName val now = Instant.now() val isActive = true val addAllOther = true lazy val modelDefinitions = new ModelDefinitions(catalog) lazy val modelDefinitionFeatures = new ModelDefinitionFeatures(catalog) "Model Definition Catalog" should "add model definition with features and read them later" in { Given("model features") val identity = ModelFeature(isActive, "Advertisement", "ad_size", "size", Identity) val top = ModelFeature(isActive, "Advertisement", "ad_type", "type", Top(95, addAllOther)) val index = ModelFeature(isActive, "Advertisement", "ad_network", "network", Index(0.5, addAllOther)) val bins = ModelFeature(isActive, "Advertisement", "ad_performance", "pct_clicks", Bins(5, 0, 0)) And("model definition") val addModelDefinition = modelDefinitions.add( name = Some(s"definitionName=$testClassName${now.toEpochMilli}"), source = s"definitionSource=$testClassName", createdBy = "ModelDefinitionFeaturesSpec", createdAt = now, comment = Some("testing") ) Then("should save model and features in catalog") val insert = for { id <- addModelDefinition featureId <- modelDefinitionFeatures.addFeatures(id, identity, top, index, bins) } yield (id, featureId) val (modelDefinitionId, featuresId) = await(db.run(insert)) assert(featuresId.size == 4) Then("it should be resolvable by content") val byContent = await(db.run(modelDefinitions.findByContent(s"definitionSource=$testClassName"))).get.id assert(byContent == modelDefinitionId) And("read saved model") val modelO = await(db.run(modelDefinitions.all)).find(_.id == modelDefinitionId) assert(modelO.isDefined) val model = modelO.get assert(model.createdBy == "ModelDefinitionFeaturesSpec") assert(model.createdAt == now) assert(model.features == 4) And("find model definitions by id") val foundById = await(db.run(modelDefinitions.findById(modelDefinitionId))) assert(foundById == modelO) And("find model definition by name") val foundByName = await(db.run(modelDefinitions.list(name = Some(s"definitionName=$testClassName${now.toEpochMilli}")))).headOption assert(foundByName == modelO) And("read all model features by model definition id") val features = await(db.run(modelDefinitionFeatures.features(modelDefinitionId))) val featureMap = features.map(f => f.feature.feature -> f.feature).toMap assert(features.size == 4) assert(featureMap("ad_size") == identity) assert(featureMap("ad_type") == top) assert(featureMap("ad_network") == index) assert(featureMap("ad_performance") == bins) } }
Example 17
Source File: ModelConfigurationParserSpec.scala From modelmatrix with Apache License 2.0 | 5 votes |
package com.collective.modelmatrix.cli import com.collective.modelmatrix.{ModelConfigurationParser, ModelFeature} import com.collective.modelmatrix.transform.{Identity, Index, Top} import com.typesafe.config.ConfigFactory import org.scalatest.{FlatSpec, GivenWhenThen} import scalaz.syntax.validation._ class ModelConfigurationParserSpec extends FlatSpec with GivenWhenThen { val isActive = true val notActive = false val isAllOther = true "Model Configuration parser" should "parse model matrix config" in { Given("well-defined model-matrix config") val config = ConfigFactory.load("./matrix-model-cli.conf") val parser = new ModelConfigurationParser(config) Then("should parse all features") val features = parser.features().toMap assert(features.size == 6) And("ad_network should be correct 'identity' feature") val adNetwork = features("ad_network") assert(adNetwork == ModelFeature( isActive, "advertisement", "ad_network", "network", Identity ).successNel) And("ad_type should be correct 'top' feature") val adType = features("ad_type") assert(adType == ModelFeature( isActive, "advertisement", "ad_type", "type", Top(95.0, isAllOther) ).successNel) And("ad_size should be correct 'index' feature") val adSize = features("ad_size") assert(adSize == ModelFeature( isActive, "advertisement", "ad_size", "size", Index(0.5, isAllOther) ).successNel) And("ad_visibility should be correct 'top' feature") val adVisibility = features("ad_visibility") assert(adVisibility == ModelFeature( notActive, "advertisement", "ad_visibility", "visibility", Top(95.0, isAllOther) ).successNel) And("ad_tag should be wrong-defined feature") val adTag = features("ad_tag") assert(adTag == "Unknown transform type: magic-transform".failureNel) And("ad_position should be wrong-defined feature") val adPosition = features("ad_position") assert(adPosition.isFailure) } }
Example 18
Source File: OffsetGraphiteReporterTest.scala From kafka-offset-monitor-graphite with Apache License 2.0 | 5 votes |
package pl.allegro.tech.kafka.offset.monitor.graphite import com.quantifind.kafka.OffsetGetter.OffsetInfo import org.scalatest.{BeforeAndAfter, FlatSpec, GivenWhenThen} class OffsetGraphiteReporterTest extends FlatSpec with BeforeAndAfter with GivenWhenThen { val GRAPHITE_PORT = 48213 val graphite = new GraphiteMockServer(GRAPHITE_PORT) val reporter = new OffsetGraphiteReporter(s"graphiteHost=localhost,graphitePort=$GRAPHITE_PORT,graphitePrefix=offset,metricsCacheExpireSeconds=1,graphiteReportPeriod=1") before { graphite.start() } after { graphite.stop() } it should "report metrics to graphite" in { Given("offset") val offset = OffsetInfo("group", "topic", 0, 10, 11, Option.empty, null, null) graphite.expectMetric("offset.topic.group.0.offset", 10) graphite.expectMetric("offset.topic.group.0.logSize", 11) graphite.expectMetric("offset.topic.group.0.lag", 1) When("reporting") reporter.report(Array(offset)) Then("expect metrics to be delivered") graphite.waitUntilReceived() } it should "escape names of topic and groups" in { Given("offset for topic and group with dots") val offset = OffsetInfo("escape.group", "escape.topic", 0, 10, 11, Option.empty, null, null) When("reporting") graphite.expectMetric("offset.escape_topic.escape_group.0.offset", 10) graphite.expectMetric("offset.escape_topic.escape_group.0.logSize", 11) graphite.expectMetric("offset.escape_topic.escape_group.0.lag", 1) reporter.report(Array(offset)) Then("expect metrics to be delivered") graphite.waitUntilReceived() } it should "not fail to recreate metrics after cache has expired" in { Given("offset") val offset = OffsetInfo("expired_group", "expired_topic", 0, 10, 11, Option.empty, null, null) reporter.report(Array(offset)) When("waiting for cache to expire and reporting again") Thread.sleep(2000) graphite.expectMetric("offset.expired_topic.expired_group.0.offset", 10) reporter.report(Array(offset)) Then("expect metrics to be delivered") graphite.waitUntilReceived() } }
Example 19
Source File: BlacklistSpecification.scala From Waves with MIT License | 5 votes |
package com.wavesplatform.network import java.net.{InetAddress, InetSocketAddress} import com.typesafe.config.ConfigFactory import com.wavesplatform.settings.NetworkSettings import net.ceedubs.ficus.Ficus._ import org.scalatest.{FeatureSpec, GivenWhenThen} class BlacklistSpecification extends FeatureSpec with GivenWhenThen { private val config = ConfigFactory.parseString("""waves.network { | known-peers = [] | file = null | black-list-residence-time: 1s |}""".stripMargin).withFallback(ConfigFactory.load()).resolve() private val networkSettings = config.as[NetworkSettings]("waves.network") info("As a Peer") info("I want to blacklist other peers for certain time") info("So I can give them another chance after") feature("Blacklist") { scenario("Peer blacklist another peer") { Given("Peer database is empty") val peerDatabase = new PeerDatabaseImpl(networkSettings) def isBlacklisted(address: InetSocketAddress) = peerDatabase.blacklistedHosts.contains(address.getAddress) assert(peerDatabase.knownPeers.isEmpty) assert(peerDatabase.blacklistedHosts.isEmpty) When("Peer adds another peer to knownPeers") val address = new InetSocketAddress(InetAddress.getByName("localhost"), 1234) peerDatabase.touch(address) assert(peerDatabase.knownPeers.contains(address)) assert(!isBlacklisted(address)) And("Peer blacklists another peer") peerDatabase.blacklist(address.getAddress, "") assert(isBlacklisted(address)) assert(!peerDatabase.knownPeers.contains(address)) And("Peer waits for some time") Thread.sleep(networkSettings.blackListResidenceTime.toMillis + 500) Then("Another peer disappear from blacklist") assert(!isBlacklisted(address)) And("Another peer became known") assert(peerDatabase.knownPeers.contains(address)) } } }
Example 20
Source File: ExecutorIdExtenderPluginTest.scala From marathon-example-plugins with Apache License 2.0 | 5 votes |
package mesosphere.marathon.example.plugin.executorid import com.typesafe.scalalogging.StrictLogging import org.apache.mesos.Protos.Environment.Variable import org.apache.mesos.Protos._ import org.scalatest.{GivenWhenThen, Matchers, WordSpec} class ExecutorIdExtenderPluginTest extends WordSpec with Matchers with GivenWhenThen with StrictLogging { "Given an MARATHON_EXECUTOR_ID label an executorID should be injected" in { val f = new Fixture Given("a TaskInfo with a MARATHON_EXECUTOR_ID label") val taskInfo = TaskInfo.newBuilder. setExecutor(ExecutorInfo.newBuilder. setCommand(CommandInfo.newBuilder. setEnvironment(Environment.newBuilder.addVariables( Variable.newBuilder.setName("foo").setValue("bar") ) )). setExecutorId(ExecutorID.newBuilder.setValue("task.12345")) ). setLabels(Labels.newBuilder.addLabels(Label.newBuilder. setKey(f.plugin.ExecutorIdLabel) .setValue("customer-executor-id") )) When("handled by the plugin") f.plugin.taskInfo(null, taskInfo) Then("ExecutorInfo.ExecutorId should be changed") taskInfo.getExecutor.getExecutorId.getValue shouldBe "customer-executor-id" And("Environment variables should be removed") taskInfo.getExecutor.getCommand.getEnvironment.getVariablesCount shouldBe 0 } "Given no MARATHON_EXECUTOR_ID label an executorID should be untouched" in { val f = new Fixture Given("a TaskInfo with a MARATHON_EXECUTOR_ID label") val taskInfo = TaskInfo.newBuilder. setExecutor(ExecutorInfo.newBuilder. setCommand(CommandInfo.newBuilder. setEnvironment(Environment.newBuilder.addVariables( Variable.newBuilder.setName("foo").setValue("bar") ) )). setExecutorId(ExecutorID.newBuilder.setValue("task.12345")) ). setLabels(Labels.newBuilder.addLabels(Label.newBuilder. setKey("baz") .setValue("wof") )) When("handled by the plugin") f.plugin.taskInfo(null, taskInfo) Then("ExecutorInfo.ExecutorId should stay the same") taskInfo.getExecutor.getExecutorId.getValue shouldBe "task.12345" And("environment variables should be kept") taskInfo.getExecutor.getCommand.getEnvironment.getVariablesCount shouldBe 1 } class Fixture { val plugin = new ExecutorIdExtenderPlugin() } }
Example 21
Source File: S2CellTransformerSpec.scala From spark-ext with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import com.collective.TestSparkContext import org.apache.spark.sql.Row import org.apache.spark.sql.types._ import org.scalatest.{FlatSpec, GivenWhenThen} class S2CellTransformerSpec extends FlatSpec with GivenWhenThen with TestSparkContext { val schema = StructType(Seq( StructField("city", StringType), StructField("lat", DoubleType), StructField("lon", DoubleType) )) val cities = sqlContext.createDataFrame(sc.parallelize(Seq( Row("New York", 40.7142700, -74.0059700), Row("London", 51.50722, -0.12750), Row("Princeton", 40.3487200, -74.6590500) )), schema) def cellMap(rows: Array[Row]): Map[String, String] = { rows.map { case Row(city: String, _, _, cell: String) => city -> cell }.toMap } "S2 Cell Transformer" should "compute S2 Cell Id for level = 6" in { Given("S2 Cell Transformer with level = 6") val s2CellTransformer = new S2CellTransformer().setLevel(6) val transformed = s2CellTransformer.transform(cities) val cells = cellMap(transformed.collect()) Then("New York should be in the same cell with Princeton") assert(cells("New York") == cells("Princeton")) } it should "compute S2 Cell Id for level = 12" in { Given("S2 Cell Transformer with level = 12") val s2CellTransformer = new S2CellTransformer().setLevel(12) val transformed = s2CellTransformer.transform(cities) val cells = cellMap(transformed.collect()) Then("all cities should in it's onw cell") assert(cells.values.toSet.size == 3) } }
Example 22
Source File: TaskRepositoryOnRDBSpec.scala From ddd-on-scala with MIT License | 5 votes |
package crossroad0201.dddonscala.infrastructure.task import crossroad0201.dddonscala.adapter.infrastructure.rdb.ScalikeJdbcSessionHolder import crossroad0201.dddonscala.adapter.infrastructure.rdb.task.TaskRepositoryOnRDB import crossroad0201.dddonscala.domain.UnitOfWork import crossroad0201.dddonscala.domain.task._ import crossroad0201.dddonscala.domain.user.UserId import org.scalatest.{BeforeAndAfterAll, GivenWhenThen, Inside, Matchers} import org.scalatest.fixture.WordSpec import scalikejdbc.scalatest.AutoRollback import scalikejdbc._ import scalikejdbc.config.DBs import scala.util.{Failure, Success} class TaskRepositoryOnRDBSpec extends WordSpec with GivenWhenThen with Matchers with Inside with BeforeAndAfterAll with AutoRollback { override protected def beforeAll() = DBs.setupAll override protected def afterAll() = DBs.closeAll override def fixture(implicit session: DBSession) { sql"""INSERT INTO tasks VALUES ('TESTTASK001', 'テストタスク1', 'OPENED', 'USER001', NULL, 1)""".update.apply sql"""INSERT INTO tasks VALUES ('TESTTASK002', 'テストタスク2', 'CLOSED', 'USER001', 'USER002', 1)""".update.apply sql"""INSERT INTO task_comments VALUES (1, 'TESTTASK002', 'ひとつめのコメント', 'USER001')""".update.apply sql"""INSERT INTO task_comments VALUES (2, 'TESTTASK002', 'ふたつめのコメント', 'USER002')""".update.apply } "get" when { "タスクが存在する" should { "タスクが返される" in { implicit dbs => new WithFixture { Given("存在するタスクID") val taskId = "TESTTASK002" Then("タスクを取得する") val actual = get(TaskId(taskId)) println(s"Actual: $actual") When("タスクが返される") inside(actual) { case (Success(Some(aTask))) => aTask.id should be(TaskId("TESTTASK002")) aTask.name should be(TaskName("テストタスク2")) aTask.state should be(TaskState.Closed) aTask.authorId should be(UserId("USER001")) aTask.assignment should be(Assigned(UserId("USER002"))) } } } } } trait WithFixture extends TaskRepositoryOnRDB { implicit def dbSessionAsUnitOfWork(implicit dbs: DBSession): UnitOfWork = new UnitOfWork with ScalikeJdbcSessionHolder { override val dbSession = dbs } } }
Example 23
Source File: ReebDiagramTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.{Vectors, EuclideanDistance, Vector} import org.apache.spark.sql.functions.{col, explode, udf} import org.scalatest.{PropSpec, Matchers, GivenWhenThen} import org.scalatest.prop.GeneratorDrivenPropertyChecks class ReebDiagramTest extends FeaturePropSpec with GivenWhenThen with GeneratorDrivenPropertyChecks with Matchers { val assembler = new VectorAssembler() .setInputCols(Array("double", "integer")) .setOutputCol("vector") val cover = new Cover() .setExploding(true) .setInputCols("double", "integer") .setOutputCol("cover_id") property("argument topTreeSize must be positive") { intercept[IllegalArgumentException] { val reeb = new ReebDiagram() // .setIdCol("id") // .setCoverCol("cover_id") // .setFeaturesCol("vector") // .setOutputCol("cluster_id") .setTopTreeSize(0) } } property("placeholder") { val reeb = new ReebDiagram() .setK(15) .setIdCol("id") .setCoverCol("cover_id") .setFeaturesCol("vector") .setOutputCol("cluster_id") forAll(dataframeGen.arbitrary) { df => val assembled = assembler.transform(df) whenever( assembled.count() > 0 && hasDistinctValues(assembled, "double", "integer")) { val transformed = cover .fit(assembled) .transform(assembled) val result = reeb .setTopTreeSize(1) .fit(transformed) .transform(transformed) // result.show() } } } }
Example 24
Source File: CoverTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.feature import org.apache.spark.ml.linalg.Vectors import org.apache.spark.sql.functions.{col, explode, udf} import org.scalatest.{PropSpec, Matchers, GivenWhenThen} import org.scalatest.prop.GeneratorDrivenPropertyChecks class CoverTest extends FeaturePropSpec with GivenWhenThen with GeneratorDrivenPropertyChecks with Matchers { val assembler = new VectorAssembler() .setInputCols(Array("double", "integer")) .setOutputCol("vector") property("argument numSplits must be positive") { intercept[IllegalArgumentException] { val cover = new Cover() .setInputCols("double") .setOutputCol("cover_ids") .setNumSplits(0) } } property("argument overlapRatio must be positive") { intercept[IllegalArgumentException] { val cover = new Cover() .setInputCols("double") .setOutputCol("cover_ids") .setOverlapRatio(0.0) } } property("cover estimator changes nothing with the original dataframe") { val cover = new Cover() .setInputCols("double", "integer", "vector") .setOutputCol("cover_ids") forAll(dataframeGen.arbitrary) { df => val transformed = assembler.transform(df) whenever( transformed.count() > 0 && hasDistinctValues(transformed, "double", "integer", "vector")) { val covered = cover .fit(transformed) .transform(transformed) .drop("cover_ids") .except(transformed) .count() should be(0) } } } property("generated cover covers all range of specified columns") { val cover = new Cover() .setInputCols("double", "integer", "vector") .setOutputCol("cover_ids") val uncovered = udf { xs: Seq[Long] => xs.length == 0 } forAll(dataframeGen.arbitrary) { df => val transformed = assembler.transform(df) whenever( transformed.count() > 0 && hasDistinctValues(transformed, "double", "integer", "vector")) { cover .fit(transformed) .transform(transformed) .where(uncovered(col("cover_ids"))) .count() should be(0) } } } property("Cover is readable/writable") { val cover = new Cover() .setInputCols("double", "integer") .setOutputCol("cover_ids") testDefaultReadWrite(cover) } property("CoverModel is readable/writable") { val model = new CoverModel("myCoverModel", Vectors.dense(-1.0, 0.0), Vectors.dense(1.0, 10.0)) .setInputCols("double", "integer") .setOutputCol("cover_ids") val newModel = testDefaultReadWrite(model) assert(newModel.min === model.min) assert(newModel.max === model.max) } }
Example 25
Source File: CubeTest.scala From spark-tda with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.util.interval import org.scalatest.Matchers import org.scalatest.GivenWhenThen import org.scalatest.prop.GeneratorDrivenPropertyChecks class CubeTest extends IntervalPropSpec with GeneratorDrivenPropertyChecks with GivenWhenThen with Matchers { property("cube intersection is commutative") { forAll { (lhs: Cube, rhs: Cube) => (lhs intersects rhs) should equal(rhs intersects lhs) } } property("cube inclusion is transitive") { forAll { (lhs: Cube, rhs: Cube, x: Double, y: Double) => !(lhs contains rhs) || !(rhs contains (x, y)) || (lhs contains (x, y)) } } }
Example 26
Source File: CLIConfiguration.scala From renku with Apache License 2.0 | 5 votes |
package ch.renku.acceptancetests.workflows import java.nio.file.Path import ch.renku.acceptancetests.model.users.UserCredentials import ch.renku.acceptancetests.tooling.console._ import ch.renku.acceptancetests.tooling.{RenkuCliConfig, console} import org.scalatest.{FeatureSpec, GivenWhenThen} trait CLIConfiguration extends GivenWhenThen { self: FeatureSpec => def `setup git configuration`(implicit userCredentials: UserCredentials): Unit = { implicit val workFolder: Path = rootWorkDirectory When("the user has configured its git username and email") val configuredUsername = console %%> c"git config --global user.name" if (configuredUsername.trim.isEmpty) console %> c"git config --global user.name '${userCredentials.fullName}'" val configuredEmail: String = console %%> c"git config --global user.email" if (configuredEmail.trim.isEmpty) console %> c"git config --global user.email '${userCredentials.email}'" } def `verify renku version`(implicit cliConfig: RenkuCliConfig, userCredentials: UserCredentials): Unit = { implicit val workFolder: Path = rootWorkDirectory When(s"the user has renku v${cliConfig.version} installed") val installedRenkuVersion = console %%> c"renku --version" if (installedRenkuVersion.trim != cliConfig.version.toString) console %> c"${cliConfig.installCommand.toString.format(cliConfig.version.toString)}" } }
Example 27
Source File: FeatureSpec.scala From haystack-trends with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trends.feature import java._ import java.util.Properties import com.expedia.metrics.MetricData import com.expedia.open.tracing.Span import com.expedia.www.haystack.commons.entities.encoders.Base64Encoder import com.expedia.www.haystack.trends.config.AppConfiguration import com.expedia.www.haystack.trends.config.entities.{KafkaConfiguration, TransformerConfiguration} import org.apache.kafka.streams.StreamsConfig import org.easymock.EasyMock import org.scalatest.easymock.EasyMockSugar import org.scalatest.{FeatureSpecLike, GivenWhenThen, Matchers} trait FeatureSpec extends FeatureSpecLike with GivenWhenThen with Matchers with EasyMockSugar { protected val METRIC_TYPE = "gauge" def generateTestSpan(duration: Long): Span = { val operationName = "testSpan" val serviceName = "testService" Span.newBuilder() .setDuration(duration) .setOperationName(operationName) .setServiceName(serviceName) .build() } protected def mockAppConfig: AppConfiguration = { val kafkaConsumeTopic = "test-consume" val kafkaProduceTopic = "test-produce" val streamsConfig = new Properties() streamsConfig.put(StreamsConfig.APPLICATION_ID_CONFIG, "test-app") streamsConfig.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "test-kafka-broker") val kafkaConfig = KafkaConfiguration(new StreamsConfig(streamsConfig), kafkaProduceTopic, kafkaConsumeTopic, null, null, 0l) val transformerConfig = TransformerConfiguration(new Base64Encoder, enableMetricPointServiceLevelGeneration = true, List()) val appConfiguration = mock[AppConfiguration] expecting { appConfiguration.kafkaConfig.andReturn(kafkaConfig).anyTimes() appConfiguration.transformerConfiguration.andReturn(transformerConfig).anyTimes() } EasyMock.replay(appConfiguration) appConfiguration } protected def getMetricDataTags(metricData : MetricData): util.Map[String, String] = { metricData.getMetricDefinition.getTags.getKv } }
Example 28
Source File: MathUtilsSpec.scala From subsearch with GNU General Public License v2.0 | 5 votes |
package com.gilazaria.subsearch.utils import org.scalatest.{GivenWhenThen, FlatSpec} import com.gilazaria.subsearch.utils.MathUtils._ class MathUtilsSpec extends FlatSpec with GivenWhenThen { behavior of "percentage" it should "calculate 50.0" in { Given("a = 1 and b = 2") val a = 1 val b = 2 When("the percentage is calculated") val expected: Float = 50.0.toFloat val actual: Float = percentage(a, b) Then(s"the result should be $expected") assert(expected == actual) } it should "calculate 150.0 when given 3 and 2" in { Given("a = 3 and b = 2") val a = 3 val b = 2 When("the percentage is calculated") val expected: Float = 150.0.toFloat val actual: Float = percentage(a, b) Then(s"the result should be $expected") assert(expected == actual) } it should "throw an IllegalArgumentException" in { Given("b = 0") val a = 1 val b = 0 When("the percentage is calculated") val expectedException = intercept[IllegalArgumentException] { percentage(a, b) } Then("an IllegalArgumentException is thrown") assert(expectedException.getMessage === "The second argument cannot be zero.") } }
Example 29
Source File: CPConfigurationTest.scala From toketi-iothubreact with MIT License | 5 votes |
// Copyright (c) Microsoft. All rights reserved. package com.microsoft.azure.iot.iothubreact.checkpointing import com.microsoft.azure.iot.iothubreact.checkpointing.backends.cassandra.lib.Auth import com.typesafe.config.{Config, ConfigException} import org.mockito.Mockito._ import org.scalatest.mockito.MockitoSugar import org.scalatest.{FeatureSpec, GivenWhenThen} class CPConfigurationTest extends FeatureSpec with GivenWhenThen with MockitoSugar { info("As a configured instance") info("I want logic around returned values to be consistent with application expectations") val confPath = "iothub-react.checkpointing." Feature("Configuration Cassandra authorization") { Scenario("Only one of username or password is supplied") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenReturn("username") when(cfg.getString(confPath + "storage.cassandra.password")).thenThrow(new ConfigException.Missing("path")) assert(new CPConfiguration(cfg).cassandraAuth == None) cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenThrow(new ConfigException.Missing("path")) when(cfg.getString(confPath + "storage.cassandra.password")).thenReturn("password") assert(new CPConfiguration(cfg).cassandraAuth == None) } Scenario("Both username and password are supplied") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenReturn("username") when(cfg.getString(confPath + "storage.cassandra.password")).thenReturn("password") assert(new CPConfiguration(cfg).cassandraAuth == Some(Auth("username", "password"))) } } Feature("Storage namespace") { Scenario("Cassandra has a special namespace value") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.namespace")).thenReturn("") when(cfg.getString(confPath + "storage.backendType")).thenReturn("anythingbutcassandra") assert(new CPConfiguration(cfg).storageNamespace == "iothub-react-checkpoints") when(cfg.getString(confPath + "storage.backendType")).thenReturn("AZUREBLOB") assert(new CPConfiguration(cfg).storageNamespace == "iothub-react-checkpoints") when(cfg.getString(confPath + "storage.backendType")).thenReturn("CASSANDRA") assert(new CPConfiguration(cfg).storageNamespace == "iothub_react_checkpoints") } } }
Example 30
Source File: ConfigurationTest.scala From toketi-iothubreact with MIT License | 5 votes |
package com.microsoft.azure.iot.iothubreact.checkpointing import com.microsoft.azure.iot.iothubreact.checkpointing.backends.cassandra.lib.Auth import com.typesafe.config.{Config, ConfigException} import org.mockito.Mockito._ import org.scalatest.mockito.MockitoSugar import org.scalatest.{FeatureSpec, GivenWhenThen} class ConfigurationTest extends FeatureSpec with GivenWhenThen with MockitoSugar { info("As a configured instance") info("I want logic around returned values to be consistent with application expectations") val confPath = "iothub-react.checkpointing." Feature("Configuration Cassandra authorization") { Scenario("Only one of username or password is supplied") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenReturn("username") when(cfg.getString(confPath + "storage.cassandra.password")).thenThrow(new ConfigException.Missing("path")) assert(new CPConfiguration(cfg).cassandraAuth == None) cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenThrow(new ConfigException.Missing("path")) when(cfg.getString(confPath + "storage.cassandra.password")).thenReturn("password") assert(new CPConfiguration(cfg).cassandraAuth == None) } Scenario("Both username and password are supplied") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.cassandra.username")).thenReturn("username") when(cfg.getString(confPath + "storage.cassandra.password")).thenReturn("password") assert(new CPConfiguration(cfg).cassandraAuth == Some(Auth("username", "password"))) } } Feature("Storage namespace") { Scenario("Cassandra has a special namespace value") { var cfg = mock[Config] when(cfg.getString(confPath + "storage.namespace")).thenReturn("") when(cfg.getString(confPath + "storage.backendType")).thenReturn("anythingbutcassandra") assert(new CPConfiguration(cfg).storageNamespace == "iothub-react-checkpoints") when(cfg.getString(confPath + "storage.backendType")).thenReturn("AZUREBLOB") assert(new CPConfiguration(cfg).storageNamespace == "iothub-react-checkpoints") when(cfg.getString(confPath + "storage.backendType")).thenReturn("CASSANDRA") assert(new CPConfiguration(cfg).storageNamespace == "iothub_react_checkpoints") } } }
Example 31
Source File: TestSpec.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.serialization.SerializationExtension import akka.stream.{ ActorMaterializer, Materializer } import akka.testkit.TestProbe import akka.util.Timeout import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatest.prop.PropertyChecks import org.scalatest.{ BeforeAndAfterAll, FlatSpec, GivenWhenThen, Matchers } import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.util.Try trait TestSpec extends FlatSpec with Matchers with GivenWhenThen with ScalaFutures with BeforeAndAfterAll with Eventually with PropertyChecks with AkkaPersistenceQueries with AkkaStreamUtils with InMemoryCleanup { implicit val timeout: Timeout = Timeout(10.seconds) implicit val system: ActorSystem = ActorSystem() implicit val ec: ExecutionContext = system.dispatcher implicit val mat: Materializer = ActorMaterializer() implicit val log: LoggingAdapter = Logging(system, this.getClass) implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds) val serialization = SerializationExtension(system) implicit class FutureToTry[T](f: Future[T]) { def toTry: Try[T] = Try(f.futureValue) } def killActors(actors: ActorRef*): Unit = { val probe = TestProbe() actors.foreach { actor ⇒ probe watch actor actor ! PoisonPill probe expectTerminated actor } } override protected def afterAll(): Unit = { system.terminate() system.whenTerminated.toTry should be a 'success } }
Example 32
Source File: BinaryModelMetricComputerSpec.scala From spark-ext with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.evaluation import com.collective.TestSparkContext import org.apache.spark.mllib.evaluation.binary.{Recall, BinaryConfusionMatrixImpl, BinaryLabelCounter} import org.scalatest.{GivenWhenThen, FlatSpec} class BinaryModelMetricComputerSpec extends FlatSpec with GivenWhenThen with TestSparkContext { val confusions = Seq( BinaryConfusionMatrixImpl(new BinaryLabelCounter(1, 0), new BinaryLabelCounter(5, 5)), BinaryConfusionMatrixImpl(new BinaryLabelCounter(5, 2), new BinaryLabelCounter(5, 5)) ) behavior of "AudienceReach" confusions foreach { b => { it should s"compute proper reach for $b" in { Given(s"confusion matrix entry $b") val expectedAudienceReach = (b.count.numPositives + b.count.numNegatives).toDouble / (b.totalCount.numNegatives + b.totalCount.numPositives) Then(s"audience reach should be equal to $expectedAudienceReach") assert(Reach(b) === expectedAudienceReach) } } } behavior of "Lift" confusions foreach { b => { it should s"compute proper lift for $b" in { Given(s"confusion matrix entry $b") val expectedAudienceReach = (b.count.numPositives + b.count.numNegatives).toDouble / (b.totalCount.numNegatives + b.totalCount.numPositives) val expectedLift = Recall(b)/expectedAudienceReach Then(s"lift should be equal to $expectedLift") assert(Lift(b) === expectedLift) } } } }
Example 33
Source File: BinaryModelMetricsSpec.scala From spark-ext with Apache License 2.0 | 5 votes |
package org.apache.spark.mllib.evaluation import com.collective.TestSparkContext import org.apache.spark.rdd.RDD import org.scalatest.{GivenWhenThen, FlatSpec} class BinaryModelMetricsSpec extends FlatSpec with GivenWhenThen with TestSparkContext { val scoreAndLabels: RDD[(Double, Double)] = sc.parallelize(Seq( (0.8, 0.0), (0.7, 1.0), (0.3, 0.0), (0.9, 1.0), (0.6, 0.0), (0.6, 1.0), (0.6, 0.0), (0.8, 1.0), (0.2, 0.0), (0.5, 1.0) ), 1) val modelMetricsNoBin = new BinaryModelMetrics(scoreAndLabels) behavior of "BinaryModelMetrics" it should "compute gains chart" in { Given(s"score and labels set with 7 unique scores") When("creating BinaryModelMetrics without bins specified") val modelMetricsNoBin = new BinaryModelMetrics(scoreAndLabels) val gainsChart = modelMetricsNoBin.gains() Then("resulting gains chart should have 9 pair of coordinates") assert(gainsChart.count() === 9) } it should "compute gains chart with numBins = 3" in { Given(s"score and labels set with 7 unique scores") When("creating BinaryModelMetrics with 3 bins specified") val modelMetricsNoBin = new BinaryModelMetrics(scoreAndLabels, 3) val gainsChart = modelMetricsNoBin.gains() val expectedGainsPoints = (1 + Math.ceil(7.toDouble/(7/3)) + 1).toInt Then(s"resulting gains chart should have $expectedGainsPoints pair of coordinates") assert(gainsChart.count() === expectedGainsPoints) } }
Example 34
Source File: DeltaErrorsSuite.scala From delta with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.delta import scala.sys.process.Process import org.apache.hadoop.fs.Path import org.scalatest.GivenWhenThen import org.apache.spark.sql.QueryTest import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} trait DeltaErrorsSuiteBase extends QueryTest with SharedSparkSession with GivenWhenThen with SQLTestUtils { val MAX_URL_ACCESS_RETRIES = 3 val path = "/sample/path" // Map of error name to the actual error message it throws // When adding an error, add the name of the function throwing the error as the key and the value // as the error being thrown def errorsToTest: Map[String, Throwable] = Map( "useDeltaOnOtherFormatPathException" -> DeltaErrors.useDeltaOnOtherFormatPathException("operation", path, spark), "useOtherFormatOnDeltaPathException" -> DeltaErrors.useOtherFormatOnDeltaPathException("operation", path, path, "format", spark), "createExternalTableWithoutLogException" -> DeltaErrors.createExternalTableWithoutLogException(new Path(path), "tableName", spark), "createExternalTableWithoutSchemaException" -> DeltaErrors.createExternalTableWithoutSchemaException(new Path(path), "tableName", spark), "createManagedTableWithoutSchemaException" -> DeltaErrors.createManagedTableWithoutSchemaException("tableName", spark), "multipleSourceRowMatchingTargetRowInMergeException" -> DeltaErrors.multipleSourceRowMatchingTargetRowInMergeException(spark), "concurrentModificationException" -> new ConcurrentWriteException(None)) def otherMessagesToTest: Map[String, String] = Map( "deltaFileNotFoundHint" -> DeltaErrors.deltaFileNotFoundHint( DeltaErrors.generateDocsLink( sparkConf, DeltaErrors.faqRelativePath, skipValidation = true), path)) def errorMessagesToTest: Map[String, String] = errorsToTest.mapValues(_.getMessage) ++ otherMessagesToTest def checkIfValidResponse(url: String, response: String): Boolean = { response.contains("HTTP/1.1 200 OK") || response.contains("HTTP/2 200") } def getUrlsFromMessage(message: String): List[String] = { val regexToFindUrl = "https://[^\\s]+".r regexToFindUrl.findAllIn(message).toList } def testUrls(): Unit = { errorMessagesToTest.foreach { case (errName, message) => getUrlsFromMessage(message).foreach { url => Given(s"*** Checking response for url: $url") var response = "" (1 to MAX_URL_ACCESS_RETRIES).foreach { attempt => if (attempt > 1) Thread.sleep(1000) response = Process("curl -I " + url).!! if (!checkIfValidResponse(url, response)) { fail( s""" |A link to the URL: '$url' is broken in the error: $errName, accessing this URL |does not result in a valid response, received the following response: $response """.stripMargin) } } } } } test("Validate that links to docs in DeltaErrors are correct") { testUrls() } } class DeltaErrorsSuite extends DeltaErrorsSuiteBase
Example 35
Source File: AkkaHttpLambdaHandlerSpec.scala From scala-server-lambda with MIT License | 5 votes |
package io.github.howardjohn.lambda.akka import akka.actor.ActorSystem import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.model.headers.RawHeader import akka.http.scaladsl.model.{HttpEntity, StatusCodes} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.stream.ActorMaterializer import io.circe.parser.decode import io.circe.syntax._ import io.circe.{Decoder, Encoder} import io.github.howardjohn.lambda.LambdaHandlerBehavior import io.github.howardjohn.lambda.LambdaHandlerBehavior._ import org.scalatest.{BeforeAndAfterAll, FeatureSpec, GivenWhenThen} import scala.concurrent.Future class AkkaHttpLambdaHandlerSpec extends FeatureSpec with LambdaHandlerBehavior with GivenWhenThen with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem("test") implicit val materializer: ActorMaterializer = ActorMaterializer() implicit val ec = scala.concurrent.ExecutionContext.Implicits.global val route: Route = path("hello") { (get & parameter("times".as[Int] ? 1)) { times => complete { Seq .fill(times)("Hello World!") .mkString(" ") } } } ~ path("long") { get { Thread.sleep(1000) complete("") } } ~ path("post") { post { entity(as[String]) { body => complete(body) } } } ~ path("json") { post { import CirceJsonMarshalling._ import io.circe.generic.auto._ entity(as[JsonBody]) { entity => complete(LambdaHandlerBehavior.jsonReturn.asJson) } } } ~ path("exception") { get { throw RouteException() } } ~ path("error") { get { complete(StatusCodes.InternalServerError) } } ~ path("header") { get { headerValueByName(inputHeader) { header => respondWithHeaders(RawHeader(outputHeader, outputHeaderValue)) { complete(header) } } } } val handler = new AkkaHttpLambdaHandler(route) scenariosFor(behavior(handler)) override def afterAll(): Unit = { materializer.shutdown() system.terminate() } } object CirceJsonMarshalling { implicit final def unmarshaller[A: Decoder]: FromEntityUnmarshaller[A] = Unmarshaller.stringUnmarshaller .forContentTypes(`application/json`) .flatMap { ctx => mat => json => decode[A](json).fold(Future.failed, Future.successful) } implicit final def marshaller[A: Encoder]: ToEntityMarshaller[A] = Marshaller.withFixedContentType(`application/json`) { a => HttpEntity(`application/json`, a.asJson.noSpaces) } }
Example 36
Source File: C2DMessageConverterTest.scala From toketi-kafka-connect-iothub with MIT License | 5 votes |
package com.microsoft.azure.iot.kafka.connect.sink import java.time.Instant import java.util.Date import com.microsoft.azure.iot.kafka.connect.sink.testhelpers.{TestSchemas, TestSinkRecords} import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization import org.apache.kafka.connect.errors.ConnectException import org.scalatest.{FlatSpec, GivenWhenThen} class C2DMessageConverterTest extends FlatSpec with GivenWhenThen with JsonSerialization { "C2DMessageConverter" should "validate the schema of a struct record against the expected schema" in { Given("A valid record schema") var schema = TestSchemas.validSchema When("ValidateSchema is called") Then("No exception is thrown") C2DMessageConverter.validateStructSchema(schema) Given("A valid record schema") schema = TestSchemas.validSchemaWithMissingOptionalField When("ValidateSchema is called") Then("No exception is thrown") C2DMessageConverter.validateStructSchema(schema) Given("A schema with an invalid type") schema = TestSchemas.invalidSchemaTypeSchema When("ValidateSchema is called") Then("A ConnectException is thrown") intercept[ConnectException] { C2DMessageConverter.validateStructSchema(schema) } Given("A schema with an invalid field type") schema = TestSchemas.invalidFieldTypeSchema When("ValidateSchema is called") Then("A ConnectException is thrown") intercept[ConnectException] { C2DMessageConverter.validateStructSchema(schema) } Given("A schema with a missing field") schema = TestSchemas.missingFieldSchema When("ValidateSchema is called") Then("A ConnectException is thrown") intercept[ConnectException] { C2DMessageConverter.validateStructSchema(schema) } } "C2DMessageConverter" should "deserialize sink records of String schema and return the C2D Message" in { Given("A valid record of string schema") var record = TestSinkRecords.getStringSchemaRecord() When("DeserializeMessage is called") var c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema()) Then("A valid C2D message is obtained") assert(c2DMessage != null) assert(c2DMessage.deviceId == "device1") assert(c2DMessage.messageId == "message1") assert(c2DMessage.message == "Turn on") assert(c2DMessage.expiryTime.isDefined) assert(c2DMessage.expiryTime.get.after(Date.from(Instant.parse("2016-01-01T00:00:00Z")))) Given("A valid record of string schema") record = TestSinkRecords.getStringSchemaRecord2() When("DeserializeMessage is called") c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema()) Then("A valid C2D message is obtained") assert(c2DMessage != null) assert(c2DMessage.deviceId == "device1") assert(c2DMessage.messageId == "message1") assert(c2DMessage.message == "Turn on") assert(c2DMessage.expiryTime.isEmpty) } "C2DMessageConverter" should "throw an exception if record with string schema has invalid data" in { Given("A record of string schema with invalid data") val record = TestSinkRecords.getInvalidScringSchemaRecord() When("DeserializeMessage is called") Then("Then a ConnectException is called") intercept[ConnectException] { C2DMessageConverter.deserializeMessage(record, record.valueSchema()) } } }
Example 37
Source File: IotHubSinkConnectorTest.scala From toketi-kafka-connect-iothub with MIT License | 5 votes |
package com.microsoft.azure.iot.kafka.connect.sink import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization import com.microsoft.azure.iot.kafka.connect.sink.testhelpers.SinkTestConfig import org.apache.kafka.connect.errors.ConnectException import org.scalatest.{FlatSpec, GivenWhenThen} class IotHubSinkConnectorTest extends FlatSpec with GivenWhenThen with JsonSerialization { "IotHubSinkConnector" should "validate all input properties and generate right set of task config properties" in { Given("Valid set of input properties") val inputProperties1 = SinkTestConfig.sinkConnectorTestProps var connector = new IotHubSinkConnector When("Start and TaskConfig are called in right order") connector.start(inputProperties1) Then("The TaskConfigs have all the expected properties") var taskConfigs = connector.taskConfigs(2) assert(taskConfigs.size() == 2) for (i <- 0 until 2) { val taskConfig: java.util.Map[String, String] = taskConfigs.get(i) assert(taskConfig.containsKey(IotHubSinkConfig.IotHubConnectionString)) } Given("Valid set of input properties") val inputProperties2 = SinkTestConfig.sinkConnectorTestProps2 connector = new IotHubSinkConnector When("Start and TaskConfig are called in right order") connector.start(inputProperties2) Then("The TaskConfigs have all the expected properties") taskConfigs = connector.taskConfigs(2) assert(taskConfigs.size() == 2) for (i <- 0 until 2) { val taskConfig: java.util.Map[String, String] = taskConfigs.get(i) assert(taskConfig.containsKey(IotHubSinkConfig.IotHubConnectionString)) } } it should "throw an exception if invalid config properties are supplied" in { Given("Input properties without the required values values") val inputPropertiesWithoutRequiredValues = SinkTestConfig.invalidSinkConnectorTestProps var connector = new IotHubSinkConnector When("Connector.Start throws a ConnectException") intercept[ConnectException] { connector.start(inputPropertiesWithoutRequiredValues) } Given("Input properties with invalid values") val inputPropertiesWithInvalidValue = SinkTestConfig.invalidSinkConnectorTestProps2 connector = new IotHubSinkConnector When("Connector.Start throws a ConnectException") intercept[ConnectException] { connector.start(inputPropertiesWithInvalidValue) } } }
Example 38
Source File: IotMessageConverterTest.scala From toketi-kafka-connect-iothub with MIT License | 5 votes |
// Copyright (c) Microsoft. All rights reserved. package com.microsoft.azure.iot.kafka.connect.source import java.text.SimpleDateFormat import java.time.Instant import com.microsoft.azure.eventhubs.impl.AmqpConstants import com.microsoft.azure.iot.kafka.connect.source.testhelpers.DeviceTemperature import org.apache.kafka.connect.data.Struct import org.json4s.jackson.Serialization._ import org.scalatest.{FlatSpec, GivenWhenThen} import scala.collection.mutable import scala.util.Random class IotMessageConverterTest extends FlatSpec with GivenWhenThen with JsonSerialization { private val random: Random = new Random "IotMessage Converter" should "populate right values for kafka message struct fields" in { Given("IotMessage object") val deviceTemp = DeviceTemperature(100.01, "F") val deviceTempStr = write(deviceTemp) val sequenceNumber = random.nextLong() val correlationId = random.nextString(10) val offset = random.nextString(10) val enqueuedDate = new SimpleDateFormat("MM/dd/yyyy").parse("12/01/2016") val systemProperties = mutable.Map[String, Object]( "iothub-connection-device-id" → "device10", AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME → sequenceNumber.asInstanceOf[Object], AmqpConstants.AMQP_PROPERTY_CORRELATION_ID → correlationId, AmqpConstants.OFFSET_ANNOTATION_NAME → offset, AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME → enqueuedDate) val timestamp = Instant.now().toString val messageProperties = mutable.Map[String, Object]( "timestamp" → timestamp, "contentType" → "temperature" ) val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties) When("getIotMessageStruct is called with IotMessage object") val kafkaMessageStruct: Struct = IotMessageConverter.getIotMessageStruct(iotMessage) Then("The struct has all the expected properties") assert(kafkaMessageStruct.getString("deviceId") == "device10") assert(kafkaMessageStruct.getString("offset") == offset) assert(kafkaMessageStruct.getString("contentType") == "temperature") assert(kafkaMessageStruct.getString("enqueuedTime") == enqueuedDate.toInstant.toString) assert(kafkaMessageStruct.getInt64("sequenceNumber") == sequenceNumber) assert(kafkaMessageStruct.getString("content") == deviceTempStr) val structSystemProperties = kafkaMessageStruct.getMap[String, String]("systemProperties") assert(structSystemProperties != null) assert(structSystemProperties.size == 1) assert(structSystemProperties.get(AmqpConstants.AMQP_PROPERTY_CORRELATION_ID) == correlationId) val structProperties = kafkaMessageStruct.getMap[String, String]("properties") assert(structProperties != null) assert(structProperties.size == 1) assert(structProperties.get("timestamp") == timestamp) } it should "use default values for missing properties" in { val deviceTemp = DeviceTemperature(100.01, "F") val deviceTempStr = write(deviceTemp) val systemProperties = mutable.Map.empty[String, Object] val messageProperties = mutable.Map.empty[String, Object] val iotMessage = IotMessage(deviceTempStr, systemProperties, messageProperties) When("getIotMessageStruct is called with IotMessage object") val kafkaMessageStruct: Struct = IotMessageConverter.getIotMessageStruct(iotMessage) Then("The struct has all the expected properties") assert(kafkaMessageStruct.getString("deviceId") == "") assert(kafkaMessageStruct.getString("offset") == "") assert(kafkaMessageStruct.getString("contentType") == "") assert(kafkaMessageStruct.getString("enqueuedTime") == "") assert(kafkaMessageStruct.getInt64("sequenceNumber") == 0) assert(kafkaMessageStruct.getString("content") == deviceTempStr) val structSystemProperties = kafkaMessageStruct.getMap[String, String]("systemProperties") assert(structSystemProperties != null) assert(structSystemProperties.size == 0) val structProperties = kafkaMessageStruct.getMap[String, String]("properties") assert(structProperties != null) assert(structProperties.size == 0) } }
Example 39
Source File: IotHubSourceTaskTest.scala From toketi-kafka-connect-iothub with MIT License | 5 votes |
// Copyright (c) Microsoft. All rights reserved. package com.microsoft.azure.iot.kafka.connect.source import java.time.{Duration, Instant} import java.util import com.microsoft.azure.iot.kafka.connect.source.testhelpers.{DeviceTemperature, MockDataReceiver, TestConfig, TestIotHubSourceTask} import org.apache.kafka.connect.data.Struct import org.json4s.jackson.Serialization.read import org.scalatest.{FlatSpec, GivenWhenThen} class IotHubSourceTaskTest extends FlatSpec with GivenWhenThen with JsonSerialization { "IotHubSourceTask poll" should "return a list of SourceRecords with the right format" in { Given("IotHubSourceTask instance") val iotHubSourceTask = new TestIotHubSourceTask iotHubSourceTask.start(TestConfig.sourceTaskTestProps) When("IotHubSourceTask.poll is called") val sourceRecords = iotHubSourceTask.poll() Then("It returns a list of SourceRecords") assert(sourceRecords != null) assert(sourceRecords.size() == 15) for (i <- 0 until 15) { val record = sourceRecords.get(i) assert(record.topic() == TestConfig.sourceTaskTestProps.get(IotHubSourceConfig.KafkaTopic)) assert(record.valueSchema() == IotMessageConverter.schema) val messageStruct = record.value().asInstanceOf[Struct] assert(messageStruct.getString("deviceId").startsWith("device")) assert(messageStruct.getString("contentType") == "temperature") val enqueuedTime = Instant.parse(messageStruct.getString("enqueuedTime")) assert(enqueuedTime.isAfter(Instant.parse("2016-11-20T00:00:00Z"))) val systemProperties = messageStruct.getMap[String, String]("systemProperties") assert(systemProperties != null) assert(systemProperties.get("sequenceNumber") != "") assert(systemProperties.get("correlationId") != "") val properties = messageStruct.getMap[String, String]("properties") assert(properties != null) assert(properties.get("timestamp") != "") val deviceTemperature = read[DeviceTemperature](messageStruct.get("content").asInstanceOf[String]) assert(deviceTemperature != null) assert(deviceTemperature.unit == "F") assert(deviceTemperature.value != 0) } } "IotHubSourceTask start" should "initialize all properties" in { Given("A list of properties for IotHubSourceTask") val props: util.Map[String, String] = TestConfig.sourceTaskTestProps When("IotHubSourceTask is started") val task = new TestIotHubSourceTask task.start(props) Then("Data receiver should be properly initialized") assert(task.partitionSources.length == 3) assert(!task.partitionSources.exists(s => s.dataReceiver == null)) for (ps ← task.partitionSources) { val dataReceiver = ps.dataReceiver.asInstanceOf[MockDataReceiver] assert(dataReceiver.offset.isDefined) assert(dataReceiver.startTime.isEmpty) assert(dataReceiver.connectionString != "") assert(dataReceiver.receiverConsumerGroup != "") assert(dataReceiver.receiveTimeout == Duration.ofSeconds(5)) } } it should "initialize start time correctly on the data receiver when it is passed in the config" in { Given("A list of properties with StartTime for IotHubSourceTask") val props: util.Map[String, String] = TestConfig.sourceTaskTestPropsStartTime When("IotHubSourceTask is started") val task = new TestIotHubSourceTask task.start(props) Then("Data receiver should be properly initialized, with StartTime, while Offsets value should be ignored") assert(task.partitionSources.length == 3) assert(!task.partitionSources.exists(s => s.dataReceiver == null)) for (ps ← task.partitionSources) { val dataReceiver = ps.dataReceiver.asInstanceOf[MockDataReceiver] assert(dataReceiver.offset.isEmpty) assert(dataReceiver.startTime.isDefined) assert(dataReceiver.startTime.get == Instant.parse("2016-12-10T00:00:00Z")) assert(dataReceiver.connectionString != "") assert(dataReceiver.receiverConsumerGroup != "") } } }