org.scalatest.MustMatchers Scala Examples
The following examples show how to use org.scalatest.MustMatchers.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ScalaValidatorsGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.{ WithModel, all_of_imports_yaml } import org.scalatest.{ FunSpec, MustMatchers } class ScalaValidatorsGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "validation/" describe("ScalaGenerator should generate play validators") { (model ++ examples ++ validations).foreach { ast => testScalaModelGenerator(ast) } } def testScalaModelGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playValidators(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 2
Source File: RuleGeneratorTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.generator.routes import de.zalando.apifirst.Application.StrictModel import de.zalando.apifirst.naming.Path import org.scalatest.{ FunSpec, MustMatchers } import play.routes.compiler.{ DynamicPart, StaticPart } class RuleGeneratorTest extends FunSpec with MustMatchers { implicit val model = StrictModel(Nil, Map.empty, Map.empty, Map.empty, "/base/", None, Map.empty, Map.empty) val routes = Map( "/" -> Nil, "/a/b/c/d" -> List(StaticPart("a/b/c/d")), "/a/b/c/d/" -> List(StaticPart("a/b/c/d/")), "/a/{b}/c/{d}" -> List(StaticPart("a/"), DynamicPart("b", """[^/]+""", true), StaticPart("/c/"), DynamicPart("d", """[^/]+""", true)), "/{a}/{b}/{c}/{d}/" -> List(DynamicPart("a", """[^/]+""", true), StaticPart("/"), DynamicPart("b", """[^/]+""", true), StaticPart("/"), DynamicPart("c", """[^/]+""", true), StaticPart("/"), DynamicPart("d", """[^/]+""", true), StaticPart("/")), "/{a}/b/{c}/d/" -> List(DynamicPart("a", """[^/]+""", true), StaticPart("/b/"), DynamicPart("c", """[^/]+""", true), StaticPart("/d/")) ) describe("RuleGeneratorTest") { routes.foreach { case (path, expected) => it(s"should parse $path as expected") { val result = RuleGenerator.convertPath(Path(path)).parts result must contain theSameElementsInOrderAs expected } } } }
Example 3
Source File: Rfc3339UtilTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.play.controllers import java.time.{ LocalDateTime, ZoneId, ZoneOffset, ZonedDateTime } import org.scalatest.{ FunSpec, MustMatchers } class Rfc3339UtilTest extends FunSpec with MustMatchers { val dtz = ZoneId.of("UTC") val offset = ZoneOffset.UTC //noinspection ScalaStyle val date = ZonedDateTime.of(LocalDateTime.ofEpochSecond(1451911387L, 0, offset), dtz) describe("Rfc3339UtilTest") { it("should parse RFC3339 DateTime") { Rfc3339Util.parseDateTime("2007-05-01T15:43:26-00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452-01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T16:43:26.345200Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+01:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T14:43:26.345200Z[UTC]" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+00:00").withZoneSameInstant(dtz).toString mustBe "2007-05-01T15:43:26.345200Z[UTC]" } it("should parse RFC3339 Date") { Rfc3339Util.parseDate("2007-05-01").toString mustBe "2007-05-01" Rfc3339Util.parseDate("2008-05-01").toString mustBe "2008-05-01" Rfc3339Util.parseDate("2007-08-01").toString mustBe "2007-08-01" Rfc3339Util.parseDate("2007-05-08").toString mustBe "2007-05-08" } it("should write DateTime") { Rfc3339Util.writeDateTime(date) mustBe "2016-01-04T12:43:07.0000+0000" } it("should write Date") { Rfc3339Util.writeDate(date.toLocalDate) mustBe "2016-01-04" } } }
Example 4
Source File: SecurityConstraintsIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import org.scalatest.{ FunSpec, MustMatchers } class SecurityConstraintsIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "security_constraints/" val fixtures = new File("swagger-parser/src/test/resources/examples").listFiles describe("Swagger ApiCall Converter with security constraints") { fixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testSecurityConverter(file) } } def testSecurityConverter(file: File): Unit = { it(s"should convert security constraints in ${file.getName}") { val (base, model) = StrictYamlParser.parse(file) val ast = ModelConverter.fromModel(base, model, Option(file)) val fullResult = ast.calls.filter(_.security.nonEmpty).flatMap(_.security).distinct.mkString("\n") val expected = asInFile(file, "types") if (expected.isEmpty && fullResult.trim.nonEmpty) dump(fullResult, file, "types") clean(fullResult) mustBe clean(expected) } } }
Example 5
Source File: ParseVendorExtensionsTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.dataformat.yaml.snakeyaml.parser.ParserException import de.zalando.apifirst.Application.ApiCall import de.zalando.apifirst.Http.{ GET, POST, PUT } import org.scalatest.{ FunSpec, MustMatchers } class ParseVendorExtensionsTest extends FunSpec with MustMatchers with ExpectedResults { val ok = new File(resourcesPath + "extensions/extensions.ok.yaml") val nok = new File(resourcesPath + "extensions/extensions.nok.yaml") val hypermediaOk = new File(resourcesPath + "extensions/hypermedia.ok.yaml") val hypermediaNOk1 = new File(resourcesPath + "extensions/hypermedia.nok1.yaml") val hypermediaNOk2 = new File(resourcesPath + "extensions/hypermedia.nok2.yaml") val errorMapping = new File(resourcesPath + "extensions/error_mapping.yaml") describe("The swagger parser") { it("should read valid vendor extensions") { implicit val (uri, swagger) = StrictYamlParser.parse(ok) swagger.info.vendorExtensions contains "x-info-extension" mustBe true swagger.paths("/").vendorExtensions contains "x-path-extension" mustBe true swagger.paths("/").get.vendorExtensions contains "x-operation-extension" mustBe true swagger.paths("/").get.responses("200").vendorExtensions contains "x-response-extension" mustBe true swagger.tags.head.vendorExtensions contains "x-tag-extension" mustBe true swagger.securityDefinitions("internalApiKey").vendorExtensions contains "x-security-extension" mustBe true } it("should read hypermedia definitions") { implicit val (uri, swagger) = StrictYamlParser.parse(hypermediaOk) val expected = Map( "resource created" -> Map("resource updated" -> Map("condition" -> "some rule to show the transition"), "subresource added" -> null), "resource updated" -> Map( "subresource added" -> Map("condition" -> ""), "self" -> Map("condition" -> "non-empty rule") ), "resource deleted" -> Map("self" -> null), "subresource added" -> Map("resource updated" -> null, "self" -> null, "resource deleted" -> null) ) swagger.transitions.nonEmpty mustBe true swagger.transitions mustEqual expected swagger.paths("/").get.responses("200").targetState mustEqual Some("resource created") swagger.paths("/").get.responses("default").targetState mustEqual None } it("should reject hypermedia definitions without well-formed definition") { val exception = intercept[JsonParseException] { StrictYamlParser.parse(hypermediaNOk1) } exception.getMessage mustEqual "Malformed transition definitions" } it("should reject hypermedia definitions with incorrect initial state") { intercept[ParserException] { StrictYamlParser.parse(hypermediaNOk2) }.getClass mustBe classOf[ParserException] } it("should read error mappings and assign right preference to them") { val (uri, model) = StrictYamlParser.parse(errorMapping) val ast = ModelConverter.fromModel(errorMapping.toURI, model, Option(errorMapping)) val expectedForPUT = Map( "404" -> List(classOf[java.util.NoSuchElementException]), "403" -> List(classOf[java.lang.SecurityException]), "405" -> List(classOf[java.lang.IllegalStateException]), "400" -> List(classOf[java.util.NoSuchElementException]) ) val expectedForPOST = Map( "403" -> List(classOf[java.lang.SecurityException]), "404" -> List(classOf[java.util.NoSuchElementException]), "405" -> List(classOf[java.lang.IllegalStateException]) ) ast.calls.foreach { case ApiCall(POST, _, _, _, _, mapping, _, _, _) => mapping must contain theSameElementsAs expectedForPOST case ApiCall(PUT, _, _, _, _, mapping, _, _, _) => mapping must contain theSameElementsAs expectedForPUT } } } }
Example 6
Source File: HypermediaConverterTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.apifirst.Hypermedia.State import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{ FunSpec, MustMatchers } class HypermediaConverterTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "hypermedia/" val exampleFixtures = new File(resourcesPath + "extensions").listFiles describe("Strict Swagger Parser hypermedia converter") { exampleFixtures.filter(_.getName.startsWith("hypermedia.ok")).foreach { file => testTransitionsConverter(file) testStateDefinitions(file) } } def testTransitionsConverter(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} with hypermedia information") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val ast = ModelConverter.fromModel(base, model, Some(file)) val hypermedia = ast.stateTransitions val expected = asInFile(file, "hypermedia") val media = State.toDot(hypermedia).mkString("\n") if (expected.isEmpty && media.nonEmpty) dump(media, file, "hypermedia") clean(media) mustBe clean(expected) } } def testStateDefinitions(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} with state name information") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val ast = ModelConverter.fromModel(base, model, Some(file)) val targetStates = ast.calls.map(_.targetStates) val expected = asInFile(file, "states") val media = targetStates.mkString("\n") if (expected.isEmpty && media.nonEmpty) dump(media, file, "states") clean(media) mustBe clean(expected) } } }
Example 7
Source File: SecurityConverterIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{ FunSpec, MustMatchers } class SecurityConverterIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "security_definitions/" val fixtures = new File(resourcesPath + "examples").listFiles describe("Swagger Security Converter") { fixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testSecurityConverter(file) } } def testSecurityConverter(file: File): Unit = { it(s"should convert security definitions from ${file.getName}") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val securityDefs = SecurityConverter.convertDefinitions(model.securityDefinitions) val fullResult = securityDefs.mkString("\n") val expected = asInFile(file, "types") if (expected.isEmpty) dump(fullResult, file, "types") clean(fullResult) mustBe clean(expected) } } }
Example 8
Source File: StrictParseExamplesTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import java.net.URI import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{ FunSpec, MustMatchers } class StrictParseExamplesTest extends FunSpec with MustMatchers with ExpectedResults { val fixtures = new File(resourcesPath + "examples").listFiles ++ new File(resourcesPath + "schema_examples").listFiles describe("Strict Swagger Parser") { fixtures.filter(_.getName.endsWith(".yaml")).foreach { file => it(s"should parse the yaml swagger file ${file.getName} as specification") { val result = StrictYamlParser.parse(file) result._1 mustBe a[URI] result._2 mustBe a[SwaggerModel] } } } }
Example 9
Source File: SecurityDefinitionDeserializerTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel._ import org.scalatest.{ MustMatchers, FunSpec } class SecurityDefinitionDeserializerTest extends FunSpec with MustMatchers with ExpectedResults { val file = new File(resourcesPath + "examples/security.api.yaml") describe("SecurityDefinitionDeserializer") { it(s"should parse security definitions in the ${file.getName}") { val (uri, model) = StrictYamlParser.parse(file) val result = model.securityDefinitions result.size mustBe 6 result("petstoreImplicit") mustBe a[Oauth2ImplicitSecurity] result("githubAccessCode") mustBe a[Oauth2AccessCodeSecurity] result("petstorePassword") mustBe a[Oauth2PasswordSecurity] result("justBasicStuff") mustBe a[BasicAuthenticationSecurity] result("petstoreApplication") mustBe a[Oauth2ApplicationSecurity] result("internalApiKey") mustBe a[ApiKeySecurity] } } }
Example 10
Source File: TypeConverterTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{ FunSpec, MustMatchers } class TypeConverterTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "types/" val modelFixtures = new File(resourcesPath + "model").listFiles val exampleFixtures = new File(resourcesPath + "examples").listFiles describe("Strict Swagger Parser model") { modelFixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testTypeConverter(file) } } describe("Strict Swagger Parser examples") { exampleFixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testTypeConverter(file) } } def testTypeConverter(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} as specification") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val typeDefs = ModelConverter.fromModel(base, model, Some(file)).typeDefs val typeMap = typeDefs map { case (k, v) => k -> ("\n\t" + de.zalando.apifirst.util.ShortString.toShortString("\t\t")(v)) } val typesStr = typeMap.toSeq.sortBy(_._1.parts.size).map(p => p._1 + " ->" + p._2).mkString("\n") val expected = asInFile(file, "types") if (expected.isEmpty) dump(typesStr, file, "types") clean(typesStr) mustBe clean(expected) } } }
Example 11
Source File: ScalaMarshallersGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaMarshallersGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "marshallers/" describe("ScalaGenerator should generate marshallers") { examples.foreach { file => testScalaMarshallersGenerator(file) } } def testScalaMarshallersGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaMarshallers(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 12
Source File: ReferenceTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.Domain._ import de.zalando.apifirst.naming._ import de.zalando.apifirst.naming.dsl._ import org.scalatest.{ FunSpec, MustMatchers } class ReferenceTest extends FunSpec with MustMatchers { describe("Reference") { it("can be created from absolute URI strings, optionally containing pointer fragments") { Reference("file:/swagger.yaml") mustBe Reference("file:/swagger.yaml") Reference("http://goo.gl/swagger.yaml") mustBe Reference("http://goo.gl/swagger.yaml") Reference("file:/swagger.yaml#/foo/bar") mustBe Reference("file:/swagger.yaml#/foo/bar") } it("can be created containing pointer fragments identifying a path segment") { ("{foo}" / "{bar}").parent mustBe Reference("{foo}") } it("must be able to append pointer tokens") { val base = Reference("file:/swagger.yaml") base / "foo" mustBe "file:/swagger.yaml" / "foo" base / "foo" / "bar" mustBe "file:/swagger.yaml" / "foo" / "bar" } it("must be able to append pointers") { val base = Reference("file:/swagger.yaml") val foo = Reference("foo") val bar = Reference("bar") base / foo mustBe "file:/swagger.yaml" / "foo" base / foo / bar mustBe "file:/swagger.yaml" / "foo" / "bar" base / foo / "" / bar mustBe "file:/swagger.yaml" / "foo" / "" / "bar" } it("must be able to prepend pointer tokens") { val reference = "file:/swagger.yaml" / "bar" reference.prepend("foo") mustBe "foo" / "file:/swagger.yaml" / "bar" } it("must return a pointers parent reference or itself if no parent pointer reference exists") { val base = Reference("file:/swagger.yaml") (base / "foo" / "bar").parent mustBe "file:/swagger.yaml" / "foo" (base / "foo").parent mustBe Reference("file:/swagger.yaml") base.parent mustBe Reference("") } it("must ignore starting # while comparing references") { val one = TypeDef(Reference("/definitions/ErrorModel"), Seq( new Field(Reference("/definitions/ErrorModel/message"), Str(None, TypeMeta(None))), new Field(Reference("/definitions/ErrorModel/code"), Intgr(TypeMeta(None))) ), TypeMeta(None)) val two = TypeDef(Reference("#/definitions/ErrorModel"), Seq( new Field(Reference("#/definitions/ErrorModel/message"), Str(None, TypeMeta(None))), new Field(Reference("#/definitions/ErrorModel/code"), Intgr(TypeMeta(None))) ), TypeMeta(None)) } } }
Example 13
Source File: ScalaPlayTestsGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaPlayTestsGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "tests/" describe("ScalaGenerator should generate tests") { (examples ++ model ++ validations).foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaTests(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 14
Source File: ScalaBaseControllerGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model._ import org.scalatest.{ FunSpec, MustMatchers } class ScalaBaseControllerGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "base_controllers/" describe("ScalaBaseControllerGenerator should generate controller bases") { (model ++ examples).foreach { ast => testScalaBaseControllerGenerator(ast) } } def testScalaBaseControllerGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaControllerBases(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "base.scala") if (expected.isEmpty) dump(scalaModel, name, "base.scala") clean(scalaModel) mustBe clean(expected) } } }
Example 15
Source File: ScalaControllerGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaControllerGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "controllers/" describe("ScalaSecurityGenerator should generate controlers") { (model ++ examples).foreach { ast => testScalaControllerGenerator(ast) } } def testScalaControllerGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val expected = asInFile(name, "scala") val scalaModel = new ScalaGenerator(model).playScalaControllers(name, ast.model.packageName.getOrElse(name), expected) if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 16
Source File: ScalaFormParserGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaFormParserGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "form_parsers/" describe("ScalaGenerator should generate a form parser") { examples.foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaFormParsers(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 17
Source File: ScalaTestDataGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.{ WithModel, string_formats_yaml } import org.scalatest.{ FunSpec, MustMatchers } class ScalaTestDataGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "test_data/" describe("ScalaGenerator should generate a test data generators") { (examples ++ model).foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).generateGenerators(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 18
Source File: ScalaModelGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaModelGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder: String = super.expectationsFolder + "model/" describe("ScalaGenerator should generate scala model") { (model ++ examples ++ validations).foreach { ast => testScalaModelGenerator(ast) } } def testScalaModelGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).generateModel(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 19
Source File: ScalaSecurityGeneratorIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{ FunSpec, MustMatchers } class ScalaSecurityGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "security/" describe("ScalaSecurityGenerator should generate security plumbing files") { examples.foreach { ast => testScalaSecurityGenerator(ast) } } describe("ScalaSecurityGenerator should generate security helper files") { examples.foreach { ast => testScalaSecurityExtractorsGenerator(ast) } } def testScalaSecurityGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaSecurity(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } def testScalaSecurityExtractorsGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaSecurityExtractors(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "extractor.scala") if (expected.isEmpty) dump(scalaModel, name, "extractor.scala") clean(scalaModel) mustBe clean(expected) } } }
Example 20
Source File: EitherOpsSpec.scala From scala-common with Apache License 2.0 | 5 votes |
import com.softwaremill.eitherops._ import org.scalatest.{FlatSpec, MustMatchers} class EitherOpsSpec extends FlatSpec with MustMatchers { "collectLefts" must "find no left" in { val e1 = Right(1) val e2 = Right(2) val e3 = Right(3) EitherOps.collectLefts(e1, e2, e3) must be(Nil) } "collectLefts" must "find some lefts" in { val e1 = Right(1) val e2 = Left("nan") val e3 = Left("nan2") EitherOps.collectLefts(e1, e2, e3) must be(Seq("nan", "nan2")) } "collectLefts" must "find some lefts with mixed right types" in { val e1 = Right(1) val e2 = Right("2") val e3 = Left("nan") EitherOps.collectLefts(e1, e2, e3) must be(Seq("nan")) } "collectLefts" must "find some lefts with error trait" in { sealed trait Error case class NumericError(message: String) extends Error case class OtherError(message: String) extends Error val e1 = Right(1) val e2 = Left(NumericError("nan")) val e3 = Left(OtherError("foo")) val lefts: Seq[Error] = EitherOps.collectLefts(e1, e2, e3) lefts must be(Seq(NumericError("nan"), OtherError("foo"))) } "collectRight" must "find no right" in { val e1 = Left(1) val e2 = Left(2) val e3 = Left(3) EitherOps.collectRights(e1, e2, e3) must be(Nil) } "collectRight" must "find some rights" in { val e0 = Left("nan") val e1 = Right(1) val e2 = Right(2) val e3 = Left("nan bis") val e4 = Right(3) EitherOps.collectRights(e0, e1, e2, e3, e4) must be(Seq(1, 2, 3)) } }
Example 21
Source File: SassCompilerTest.scala From sbt-sass with Apache License 2.0 | 5 votes |
package org.madoushi.sbt.sass import java.io.File import org.scalatest.{FunSpec, MustMatchers} class SassCompilerTest extends FunSpec with MustMatchers { describe("SassCompiler") { describe("using well formed scss input") { describe("without includes") { it("should compile") { val input = new File(getClass.getResource("/org/madoushi/sbt/sass/well-formed.scss").toURI) val output = File.createTempFile("sbt-sass-test", ".css") val outputMinified = File.createTempFile("sbt-sass-test", ".min.css") val processOutput = SassCompiler.compileWithDefaultSass(input, output, Some(outputMinified)) val css = scala.io.Source.fromFile(output).mkString val cssMin = scala.io.Source.fromFile(outputMinified).mkString css.length must be > cssMin.length val testCss = css.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "") testCss must include(".test{font-size:10px;}") testCss must include(".test.hidden{display:none;}") processOutput.size must be(1) processOutput.head must include("well-formed.scss") } } describe("with includes") { it("should compile") { val input = new File(getClass.getResource("/org/madoushi/sbt/sass/well-formed-using-import.scss").toURI) val output = File.createTempFile("sbt-sass-test", ".css") val outputMinified = File.createTempFile("sbt-sass-test", ".min.css") val processOutput = SassCompiler.compileWithDefaultSass(input, output, Some(outputMinified)) val css = scala.io.Source.fromFile(output).mkString val cssMin = scala.io.Source.fromFile(outputMinified).mkString css.length must be > cssMin.length val testCss = css.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "") testCss must include(".test-import{font-weight:bold;}") testCss must include(".test{font-size:10px;}") testCss must include(".test.hidden{display:none;}") processOutput.size must be(2) println(processOutput) processOutput.find(_.contains("_well-formed-import.scss")) must not be None } } } describe("using broken scss input") { it("should throw an exception") { val input = new File(getClass.getResource("/org/madoushi/sbt/sass/broken-input.scss").toURI) val output = File.createTempFile("sbt-sass-test", ".css") val outputMinified = File.createTempFile("sbt-sass-test", ".min.css") val exception = the [SassCompilerException] thrownBy SassCompiler.compileWithDefaultSass(input, output, Some(outputMinified)) exception.getMessage must include("Invalid CSS after") } } } }
Example 22
Source File: RouteLatencyFilterSpec.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters.filters import com.github.stijndehaes.playprometheusfilters.metrics.DefaultPlayUnmatchedDefaults import com.github.stijndehaes.playprometheusfilters.mocks.MockController import io.prometheus.client.CollectorRegistry import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.verify import org.scalatest.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.Configuration import play.api.libs.typedmap.TypedMap import play.api.mvc._ import play.api.routing.{HandlerDef, Router} import play.api.test.Helpers.stubControllerComponents import play.api.test.{DefaultAwaitTimeout, FakeRequest, FutureAwaits} import scala.concurrent.ExecutionContext.Implicits.global class RouteLatencyFilterSpec extends WordSpec with MustMatchers with MockitoSugar with Results with DefaultAwaitTimeout with FutureAwaits with GuiceOneAppPerSuite { private implicit val mat = app.materializer private val configuration = mock[Configuration] "Filter constructor" should { "Add a histogram to the prometheus registry" in { val collectorRegistry = mock[CollectorRegistry] new RouteLatencyFilter(collectorRegistry, configuration) verify(collectorRegistry).register(any()) } } "Apply method" should { "Measure the latency" in { val filter = new RouteLatencyFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest().withAttrs( TypedMap( Router.Attrs.HandlerDef -> HandlerDef(null, null, null, "test", null, null ,null ,null ,null) )) val action = new MockController(stubControllerComponents()).ok await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(samples.size() - 2) countSample.value mustBe 1.0 countSample.labelValues must have size 1 countSample.labelValues.get(0) mustBe "test" } "Measure the latency for an unmatched route" in { val filter = new RouteLatencyFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest() val action = new MockController(stubControllerComponents()).error await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(samples.size() - 2) countSample.value mustBe 1.0 countSample.labelValues must have size 1 countSample.labelValues.get(0) mustBe DefaultPlayUnmatchedDefaults.UnmatchedRouteString } } }
Example 23
Source File: SchemaColumnExpressionTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker.schema.table.columns import org.scalatest.{MustMatchers, WordSpec} class SchemaColumnExpressionTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.schema.table.columns.SchemaColumnExpressionProtocol._ import net.jcazevedo.moultingyaml._ val name = "test" val column_type = "Expression" val baseString = s"""name: $name |column_type: $column_type """.stripMargin "SchemaColumnExpression" must { "read an Expression column" in { val string = s"""$baseString |expression: concat('testing_', round(rand() * 10, 2)) """.stripMargin string.parseYaml.convertTo[SchemaColumnExpression] mustBe SchemaColumnExpression(name, "concat('testing_', round(rand() * 10, 2))") } } }
Example 24
Source File: SchemaTableTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker.schema.table import com.dunnhumby.datafaker.schema.table.columns.SchemaColumnFixed import org.scalatest.{MustMatchers, WordSpec} class SchemaTableTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.schema.table.SchemaTableProtocol._ import net.jcazevedo.moultingyaml._ val baseString = """name: test |rows: 10 |columns: |- name: test_column | column_type: Fixed | data_type: Int | value: 1 """.stripMargin "SchemaTable" must { "read a Table with columns" in { val string = baseString string.parseYaml.convertTo[SchemaTable] mustBe SchemaTable("test", 10, List(SchemaColumnFixed("test_column", 1)), None) } "read a Table with columns and partitions" in { val string = s"""$baseString |partitions: |- test_column """.stripMargin string.parseYaml.convertTo[SchemaTable] mustBe SchemaTable("test", 10, List(SchemaColumnFixed("test_column", 1)), Some(List("test_column"))) } "read a Table with columns and no partitions" in { val string = s"""$baseString |partitions: """.stripMargin string.parseYaml.convertTo[SchemaTable] mustBe SchemaTable("test", 10, List(SchemaColumnFixed("test_column", 1)), None) } } }
Example 25
Source File: ArgsParserTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker import java.sql.{Date, Timestamp} import org.scalatest.{MustMatchers, WordSpec} class ArgsParserTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.YamlParser.YamlParserProtocol._ import net.jcazevedo.moultingyaml._ "ArgsParser" must { "accepts --file arg" in { ArgsParser.parseArgs(List("--file", "test")) mustBe Map("file" -> "test") } "accepts --database arg" in { ArgsParser.parseArgs(List("--database", "test")) mustBe Map("database" -> "test") } } }
Example 26
Source File: ApiKeyAuthSpec.scala From shield with MIT License | 5 votes |
package shield.actors.middleware import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestActorRef, TestKit} import org.scalatest.{MustMatchers, WordSpecLike} import shield.actors.{DownstreamRequest, ForwardRequestCmd, ForwardResponseCmd, ResponseDetails} import shield.config.Settings import shield.proxying.FailBalancer import shield.routing._ import spray.http.HttpHeaders.RawHeader import spray.http._ class ApiKeyAuthSpec extends TestKit(ActorSystem("testSystem")) with WordSpecLike with MustMatchers with ImplicitSender { "ApiKeyAuthTest middleware actor" must { val stage = "myStage" val settings = Settings(system) val location = settings.DefaultServiceLocation val getEndpoint = EndpointTemplate(HttpMethods.GET, Path("/foobar")) val routingDestination = RoutingDestination(getEndpoint, List(), List(), FailBalancer) def httpRequest(headers: List[HttpHeader]): HttpRequest = HttpRequest(HttpMethods.GET, "/v4/mobile/stores", headers) def forwardResponseCmd(response: HttpResponse) = { ForwardResponseCmd( stage, ResponseDetails( location, settings.LocalServiceName, getEndpoint, None, response) ) } "reply with Forbidden when created with bad parameters" in { val actor = TestActorRef(ApiKeyAuth.props("", Set(""), true, location)) actor ! DownstreamRequest(stage, routingDestination, httpRequest(List())) expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Forbidden))) } "reply with Forbidden when given no headers" in { val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location)) actor ! DownstreamRequest(stage, routingDestination, httpRequest(List())) expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Forbidden))) } "reply with Unauthorized when given an incorrect header" in { val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location)) actor ! DownstreamRequest(stage, routingDestination, httpRequest(List(RawHeader("pid","asdasdada")))) expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Unauthorized))) } "succeed with a downstream request when given the correct header and value" in { val request = httpRequest(List(RawHeader("pid","BA914464-C559-4F81-A37E-521B830F1634"))) val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location)) actor ! DownstreamRequest(stage, routingDestination, request) expectMsg(ForwardRequestCmd(stage, request, None)) } "succeed with a downstream request when given a correct but capitalized header" in { val request = httpRequest(List(RawHeader("PID","BA914464-C559-4F81-A37E-521B830F1634"))) val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location)) actor ! DownstreamRequest(stage, routingDestination, request) expectMsg(ForwardRequestCmd(stage, request, None)) } "succeed with a downstream request when given a case-insensitive value and case sensitivity is off" in { val request = httpRequest(List(RawHeader("pid","ba914464-c559-4f81-a37e-521b830f1634"))) val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), false, location)) actor ! DownstreamRequest(stage, routingDestination, request) expectMsg(ForwardRequestCmd(stage, request, None)) } "reply with Unauthorized when given a case-insensitive value and case sensitivity is on" in { val request = httpRequest(List(RawHeader("pid","ba914464-c559-4f81-a37e-521b830f1634"))) val actor = TestActorRef(ApiKeyAuth.props("pid", Set("BA914464-C559-4F81-A37E-521B830F1634"), true, location)) actor ! DownstreamRequest(stage, routingDestination, request) expectMsg(forwardResponseCmd(HttpResponse(StatusCodes.Unauthorized))) } } }
Example 27
Source File: StaticDomainWatcherSpec.scala From shield with MIT License | 5 votes |
package shield.actors.config.domain import akka.actor.{ActorSystem, Props} import akka.testkit.{TestActorRef, TestKit, TestProbe} import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpecLike} import shield.actors.ShieldActorMsgs import shield.config.Settings class StaticDomainWatcherSpec extends TestKit(ActorSystem("testSystem")) with WordSpecLike with MustMatchers with BeforeAndAfterAll { val settings = Settings(system) "StaticDomainWatcher" should { "notify shield about domains found" in { val parent = TestProbe() TestActorRef(Props(new StaticDomainWatcher()), parent.ref, "static-domain-watcher") val msg: ShieldActorMsgs.DomainsUpdated = parent.expectMsgClass(classOf[ShieldActorMsgs.DomainsUpdated]) msg.domains.size must equal (settings.config.getConfigList("shield.domains").size) } } }
Example 28
Source File: UtilsTest.scala From sbt-dynamodb with MIT License | 5 votes |
package com.teambytes.sbt.dynamodb import org.scalatest.mock.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} class UtilsTest extends WordSpec with MustMatchers with MockitoSugar { "Utils" should { "extract PID correctly" in { val jpsOutput = """ |16706 QuorumPeerMain |60405 Boot |59022 DynamoDBLocal.jar |60479 Jps |51449 """.stripMargin Utils.extractDyanmoDBPid(jpsOutput) must equal(Some("59022")) } } }
Example 29
Source File: WaveformSpec.scala From scala-audio-file with MIT License | 5 votes |
package me.mziccard.audio import org.scalatest.{FlatSpec, MustMatchers} import org.scalamock.scalatest.MockFactory class WaveformSpec extends FlatSpec with MockFactory { "Waveform" should "get track info" in { val mockFile = mock[AudioFile] (mockFile.lengthInSeconds _).expects().returning(60L) (mockFile.numFrames _).expects().returning(3600) (mockFile.numChannels _).expects().returning(2) Waveform(mockFile) } "Waveform" should "read track frames" in { val mockFile = mock[AudioFile] (mockFile.lengthInSeconds _).expects().returning(60L) (mockFile.numFrames _).expects().returning(1024) (mockFile.numChannels _).expects().returning(2) (mockFile.readNormalizedFrames(_ : Array[Double], _ : Int)).expects(*, 2) Waveform(mockFile).getWaveform(512) } "Waveform" should "read enough frames" in { val mockFile = mock[AudioFile] (mockFile.lengthInSeconds _).expects().returning(60L) (mockFile.numFrames _).expects().returning(1024) (mockFile.numChannels _).expects().returning(2) var readFrames = 0 (mockFile.readNormalizedFrames(_ : Array[Double], _ : Int)) expects(*, 2) onCall { (buffer : Array[Double], numFramesToRead : Int) => { if (readFrames < 1024) { for(i <- 0 until numFramesToRead) buffer(i) = i.toFloat/numFramesToRead readFrames = readFrames + numFramesToRead 2 } else { 0 } } } repeat(513) Waveform(mockFile).getWaveform(512) } "Waveform" should "be of requested size" in { val mockFile = mock[AudioFile] (mockFile.lengthInSeconds _).expects().returning(60L) (mockFile.numFrames _).expects().returning(1024) (mockFile.numChannels _).expects().returning(2) var readFrames = 0 (mockFile.readNormalizedFrames(_ : Array[Double], _ : Int)) expects(*, 2) onCall { (buffer : Array[Double], numFramesToRead : Int) => { if (readFrames < 1024) { for(i <- 0 until numFramesToRead) buffer(i) = i.toFloat/numFramesToRead readFrames = readFrames + numFramesToRead 2 } else { 0 } } } repeat(513) val waveform = Waveform(mockFile).getWaveform(512) assert(waveform.length >= 512) } "Waveform" should "be the maximum of each frame block" in { val mockFile = mock[AudioFile] (mockFile.lengthInSeconds _).expects().returning(60L) (mockFile.numFrames _).expects().returning(1024) (mockFile.numChannels _).expects().returning(2) var readFrames = 0 (mockFile.readNormalizedFrames(_ : Array[Double], _ : Int)) expects(*, 2) onCall { (buffer : Array[Double], numFramesToRead : Int) => { if (readFrames < 1024) { for(i <- 0 until numFramesToRead) buffer(i) = i.toFloat/numFramesToRead readFrames = readFrames + numFramesToRead 2 } else { 0 } } } repeat(513) val waveform = Waveform(mockFile).getWaveform(512) waveform.foreach(x => assert(x == 1.toFloat/2)) } }
Example 30
Source File: ClassOpsSpec.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.streamlets import org.scalatest.{ MustMatchers, TryValues, WordSpec } // Definitions for test purposes class Foo class Bar extends Foo class ClassWithArgsConstructor(arg: Int) extends Bar { def args = arg } class ClassWithNoArgsConstructor() extends Bar object BarObject extends Bar class ClassWithCompanionObject object ClassWithCompanionObject class ClassWithArgsAndCompanionObject(arg: Int) { def args = arg } object ClassWithArgsAndCompanionObject class ClassOpsSpec extends WordSpec with MustMatchers with TryValues { import ClassOps._ "nameOf" should { "return the fully qualified class name of the specified type" in { nameOf[Foo] mustBe "cloudflow.streamlets.Foo" } "return the fully qualified class name of the specified instance" in { nameOf(BarObject) mustBe "cloudflow.streamlets.BarObject" } } "instanceOf" should { import ClassOps._ "create a new instance of a class" in { instanceOf("cloudflow.streamlets.Bar").success.value mustBe a[Bar] } "reuse the object instance of a singleton Object" in { instanceOf("cloudflow.streamlets.BarObject").success.value mustBe (BarObject) } "reuse the object instance of a singleton Object with name passed with a $" in { instanceOf("cloudflow.streamlets.BarObject$").success.value mustBe (BarObject) } "fail to create an instance for a class without a no-args constructor" in { instanceOf("cloudflow.streamlets.ClassWithArgsConstructor").failure.exception mustBe a[InstantiationException] } "create a new instance of a class with a no-arg constructor and a companion object" in { instanceOf("cloudflow.streamlets.ClassWithCompanionObject").success.value mustBe a[ClassWithCompanionObject] } "reuse the object instance when we have a class with no no-arg constructor along with a companion Object" in { instanceOf("cloudflow.streamlets.ClassWithArgsAndCompanionObject").success.value mustBe (ClassWithArgsAndCompanionObject) } } }
Example 31
Source File: StreamletDefinitionSpec.scala From cloudflow with Apache License 2.0 | 5 votes |
package cloudflow.streamlets import com.typesafe.config.ConfigFactory import org.scalatest.{ MustMatchers, OptionValues, TryValues, WordSpec } class StreamletDefinitionSpec extends WordSpec with MustMatchers with TryValues with OptionValues { "A valid StreamletConfig" should { val config = ConfigFactory.load("config-map-sample.json") val streamletConfig = StreamletDefinition.read(config).get "the loaded instances must contain class, instance and port information" in { val expectedStreamlet = ("sensor-data", "cloudflow.examples.sensordata.SensorDataIngress$") streamletConfig.streamletRef must be(expectedStreamlet._1) streamletConfig.streamletClass must be(expectedStreamlet._2) } "a loaded instance must have port configuration" in { val ports = streamletConfig.portMappings val expectedPorts = Map( "accepted" -> Topic("accepted"), "rejected" -> Topic("rejected") ) ports.foreach(portMapping ⇒ expectedPorts(portMapping.port) must be(portMapping.topic)) } "a loaded instance must have its own configuration" in { val config = streamletConfig.config config.getInt("cloudflow.internal.server.container-port") must be(2049) } "a loaded instance must have the common configuration" in { config.getString("cloudflow.common.attribute") must be("value") config.getString("cloudflow.kafka.bootstrap-servers") must be("cloudflow-kafka.lightbend:9092") } "a loaded instance must not have runner configuration" in { val config = streamletConfig.config config.hasPath("runner") must be(false) } } }
Example 32
Source File: PrometheusModuleSpec.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters import io.prometheus.client.{Collector, CollectorRegistry} import org.scalatest.{BeforeAndAfter, MustMatchers, PrivateMethodTester, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerTest import play.api.inject.guice.GuiceApplicationBuilder class PrometheusModuleSpec extends WordSpec with MustMatchers with BeforeAndAfter with PrivateMethodTester with GuiceOneAppPerTest { before { // clearing registry before each test CollectorRegistry.defaultRegistry.clear() } "PrometheusModule" should { "register default exporters when enabled" in { // default enabled val app = new GuiceApplicationBuilder() .configure(PrometheusModule.defaultExportsKey -> true) .build() val collector = app.injector.instanceOf[CollectorRegistry] val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors) (collector invokePrivate collectors()).size must be > 0 } "not register default exporters when disabled" in { // disable default exporters val app = new GuiceApplicationBuilder() .configure(PrometheusModule.defaultExportsKey -> false) .build() val collector = app.injector.instanceOf[CollectorRegistry] val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors) (collector invokePrivate collectors()).size must be (0) } } def getExporterNames: Seq[String] = { val exportNames = collection.mutable.Buffer.empty[String] val mfs = registry.metricFamilySamples() while(mfs.hasMoreElements) { exportNames += mfs.nextElement().name } exportNames } } }
Example 33
Source File: SchemaColumnSequentialTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker.schema.table.columns import java.sql.{Date, Timestamp} import org.scalatest.{MustMatchers, WordSpec} class SchemaColumnSequentialTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.schema.table.columns.SchemaColumnSequentialProtocol._ import net.jcazevedo.moultingyaml._ val name = "test" val column_type = "Sequential" val baseString = s"""name: $name |column_type: $column_type """.stripMargin "SchemaColumnSequential" must { "read an Int column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Int} |start: 1 |step: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, 1, 1) } "read a Long column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Long} |start: 1 |step: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, 1l, 1l) } "read a Float column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Float} |start: 1.0 |step: 1.0 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, 1f, 1f) } "read a Double column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Double} |start: 1.0 |step: 1.0 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, 1d, 1d) } "read a Date column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Date} |start: 1998-06-03 |step: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, Date.valueOf("1998-06-03"), 1) } "read a Timestamp column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Timestamp} |start: 1998-06-03 01:23:45 |step: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnSequential[_]] mustBe SchemaColumnSequential(name, Timestamp.valueOf("1998-06-03 01:23:45"), 1) } } }
Example 34
Source File: StatusCounterFilterSpec.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters.filters import com.github.stijndehaes.playprometheusfilters.mocks.MockController import io.prometheus.client.CollectorRegistry import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.verify import org.scalatest.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.Configuration import play.api.mvc.Results import play.api.test.Helpers.stubControllerComponents import play.api.test.{DefaultAwaitTimeout, FakeRequest, FutureAwaits} import scala.concurrent.ExecutionContext.Implicits.global class StatusCounterFilterSpec extends WordSpec with MustMatchers with MockitoSugar with Results with DefaultAwaitTimeout with FutureAwaits with GuiceOneAppPerSuite { private implicit val mat = app.materializer private val configuration = mock[Configuration] "Filter constructor" should { "Add a counter to the prometheus registry" in { val collectorRegistry = mock[CollectorRegistry] new StatusCounterFilter(collectorRegistry, configuration) verify(collectorRegistry).register(any()) } } "Apply method" should { "Count the requests with status" in { val filter = new StatusCounterFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest() val action = new MockController(stubControllerComponents()).ok await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples samples.get(0).value mustBe 1.0 samples.get(0).labelValues must have size 1 samples.get(0).labelValues.get(0) mustBe "200" } } }
Example 35
Source File: StatusAndRouteLatencyFilterSpec.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters.filters import com.github.stijndehaes.playprometheusfilters.metrics.DefaultPlayUnmatchedDefaults import com.github.stijndehaes.playprometheusfilters.mocks.MockController import io.prometheus.client.CollectorRegistry import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.verify import org.scalatest.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.Configuration import play.api.libs.typedmap.TypedMap import play.api.mvc.Results import play.api.routing.{HandlerDef, Router} import play.api.test.Helpers.stubControllerComponents import play.api.test.{DefaultAwaitTimeout, FakeRequest, FutureAwaits} import scala.concurrent.ExecutionContext.Implicits.global class StatusAndRouteLatencyFilterSpec extends WordSpec with MustMatchers with MockitoSugar with Results with DefaultAwaitTimeout with FutureAwaits with GuiceOneAppPerSuite { private implicit val mat = app.materializer private val configuration = mock[Configuration] "Filter constructor" should { "Add a histogram to the prometheus registry" in { val collectorRegistry = mock[CollectorRegistry] new StatusAndRouteLatencyFilter(collectorRegistry, configuration) verify(collectorRegistry).register(any()) } } "Apply method" should { "Measure the latency" in { val filter = new StatusAndRouteLatencyFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest().withAttrs( TypedMap( Router.Attrs.HandlerDef -> HandlerDef(null, null, "testController", "test", null, "GET", "/path", null ,null) )) val action = new MockController(stubControllerComponents()).ok await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(samples.size() - 2) countSample.value mustBe 1.0 countSample.labelValues must have size 5 countSample.labelValues.get(0) mustBe "test" countSample.labelValues.get(1) mustBe "200" countSample.labelValues.get(2) mustBe "testController" countSample.labelValues.get(3) mustBe "/path" countSample.labelValues.get(4) mustBe "GET" } "Measure the latency for an unmatched route" in { val filter = new StatusAndRouteLatencyFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest() val action = new MockController(stubControllerComponents()).error await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(samples.size() - 2) countSample.value mustBe 1.0 countSample.labelValues must have size 5 countSample.labelValues.get(0) mustBe DefaultPlayUnmatchedDefaults.UnmatchedRouteString countSample.labelValues.get(1) mustBe "404" countSample.labelValues.get(2) mustBe DefaultPlayUnmatchedDefaults.UnmatchedControllerString countSample.labelValues.get(3) mustBe DefaultPlayUnmatchedDefaults.UnmatchedPathString countSample.labelValues.get(4) mustBe DefaultPlayUnmatchedDefaults.UnmatchedVerbString } } }
Example 36
Source File: StatusAndRouteCounterFilterSpec.scala From play-prometheus-filters with MIT License | 5 votes |
package com.github.stijndehaes.playprometheusfilters.filters import com.github.stijndehaes.playprometheusfilters.metrics.DefaultPlayUnmatchedDefaults import com.github.stijndehaes.playprometheusfilters.mocks.MockController import io.prometheus.client.CollectorRegistry import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.verify import org.scalatest.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerSuite import play.api.Configuration import play.api.libs.typedmap.TypedMap import play.api.mvc.Results import play.api.routing.{HandlerDef, Router} import play.api.test.Helpers.stubControllerComponents import play.api.test.{DefaultAwaitTimeout, FakeRequest, FutureAwaits} import scala.concurrent.ExecutionContext.Implicits.global class StatusAndRouteCounterFilterSpec extends WordSpec with MustMatchers with MockitoSugar with Results with DefaultAwaitTimeout with FutureAwaits with GuiceOneAppPerSuite { private implicit val mat = app.materializer private val configuration = mock[Configuration] "Filter constructor" should { "Add a histogram to the prometheus registry" in { val collectorRegistry = mock[CollectorRegistry] new StatusAndRouteLatencyFilter(collectorRegistry, configuration) verify(collectorRegistry).register(any()) } } "Apply method" should { "Measure the count" in { val filter = new StatusAndRouteCounterFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest().withAttrs( TypedMap( Router.Attrs.HandlerDef -> HandlerDef(null, null, "testController", "test", null, "GET", "/path", null ,null) )) val action = new MockController(stubControllerComponents()).ok await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(0) countSample.value mustBe 1.0 countSample.labelValues must have size 5 countSample.labelValues.get(0) mustBe "test" countSample.labelValues.get(1) mustBe "200" countSample.labelValues.get(2) mustBe "testController" countSample.labelValues.get(3) mustBe "/path" countSample.labelValues.get(4) mustBe "GET" } "Measure the count for an unmatched route" in { val filter = new StatusAndRouteCounterFilter(mock[CollectorRegistry], configuration) val rh = FakeRequest() val action = new MockController(stubControllerComponents()).error await(filter(action)(rh).run()) val metrics = filter.metrics(0).metric.collect() metrics must have size 1 val samples = metrics.get(0).samples //this is the count sample val countSample = samples.get(0) countSample.value mustBe 1.0 countSample.labelValues must have size 5 countSample.labelValues.get(0) mustBe DefaultPlayUnmatchedDefaults.UnmatchedRouteString countSample.labelValues.get(1) mustBe "404" countSample.labelValues.get(2) mustBe DefaultPlayUnmatchedDefaults.UnmatchedControllerString countSample.labelValues.get(3) mustBe DefaultPlayUnmatchedDefaults.UnmatchedPathString countSample.labelValues.get(4) mustBe DefaultPlayUnmatchedDefaults.UnmatchedVerbString } } }
Example 37
Source File: UnitSpec.scala From sbt-coursier with Apache License 2.0 | 5 votes |
package t import com.typesafe.config.ConfigFactory import org.scalatest.{ MustMatchers, WordSpec } class UnitSpec extends WordSpec with MustMatchers { def conf = ConfigFactory.defaultReference() "Config" should { "return Akka HTTP server provider" in { val serverProvider = conf.getString("play.server.provider") serverProvider mustBe "play.core.server.AkkaHttpServerProvider" } "be able to load Netty settings" in { val nettyTransport = conf.getString("play.server.netty.transport") nettyTransport mustBe "jdk" } } }
Example 38
Source File: SassCompilerTest.scala From sbt-sassify with Apache License 2.0 | 5 votes |
package org.irundaia.sass import java.nio.file.{Paths, Files} import org.scalatest.{FunSpec, MustMatchers} import scala.io.Source class SassCompilerTest extends FunSpec with MustMatchers { val testDir = Files.createTempDirectory("sbt-sassify") val compilerSettings = CompilerSettings(Minified, true, true, Auto, Seq(), "", 10, "css") describe("The SassCompiler") { describe("using well formed scss input") { describe("without includes") { val input = Paths.get(getClass.getResource("/org/irundaia/sass/well-formed.scss").toURI) val compilationResults = SassCompiler.compile(input, input.getParent, testDir, compilerSettings) it("should compile") { compilationResults.isRight mustBe true } it("should contain the proper contents") { val cssMin = Source.fromFile(compilationResults.right.get.filesWritten.filter(_.toString.endsWith("css")).head.toFile).mkString val testMinCss = cssMin.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "") testMinCss must include(".test{font-size:10px") testMinCss must include(".test.hidden{display:none") } it("should have read one file") { compilationResults.right.get.filesRead.size must be(1) } it("should have read the correct file") { compilationResults.right.get.filesRead.head.toString must endWith("well-formed.scss") } } describe("with includes") { val input = Paths.get(getClass.getResource("/org/irundaia/sass/well-formed-using-import.scss").toURI) val compilationResults = SassCompiler.compile(input, input.getParent, testDir, compilerSettings) it("should compile") { compilationResults.isRight mustBe true } it("should include the contents of both the included and the including file") { val cssMin = Source.fromFile(compilationResults.right.get.filesWritten.filter(_.toString.endsWith("css")).head.toFile).mkString val testMinCss = cssMin.replaceAll("\\/\\*.*?\\*\\/", "").replaceAll("\\s+", "") testMinCss must include(".test-import{font-weight:bold") testMinCss must include(".test{font-size:10px") testMinCss must include(".test.hidden{display:none") } it("should have read two files") { compilationResults.right.get.filesRead.size must be(2) } it("should have read the included file") { compilationResults.right.get.filesRead.filter(_.endsWith("_well-formed-import.scss")) must not be None } } } describe("using broken scss input") { val input = Paths.get(getClass.getResource("/org/irundaia/sass/broken-input.scss").toURI) val compilationResult = SassCompiler.compile(input, input.getParent, testDir, compilerSettings) describe("should fail compilation") { compilationResult.isLeft mustBe true } describe("should throw an exception") { it("reporting Invalid CSS") { compilationResult match { case Left(exception) => exception.getMessage must include("Invalid CSS after ") case _ => fail } } it("reporting an error on line 2 column 15") { compilationResult match { case Left(exception: LineBasedCompilationFailure) => exception.line mustBe 2 exception.column mustBe 15 case _ => fail } } } } } }
Example 39
Source File: Sepc.scala From fusion-data with Apache License 2.0 | 5 votes |
package sample import org.scalatest.{MustMatchers, WordSpec} class SpecMultiJvmNode1 extends WordSpec with MustMatchers { "A node" should { "be able to say hello" in { val message = "Hello from node 1" message must be("Hello from node 1") } } } class SpecMultiJvmNode2 extends WordSpec with MustMatchers { "A node" should { "be able to say hello" in { val message = "Hello from node 2" message must be("Hello from node 2") } } }
Example 40
Source File: TypeFlattenerIntegrationTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst import java.io.File import de.zalando.apifirst.util.ScalaPrinter import org.scalatest.{ FunSpec, MustMatchers } import scala.io.Source class TypeFlattenerIntegrationTest extends FunSpec with MustMatchers { val expectation_path = "play-scala-generator/src/test/scala/model/" val prefix = "resources." import de.zalando.model._ val plainModels = Seq[WithModel]( additional_properties_yaml, basic_auth_api_yaml, basic_extension_yaml, basic_polymorphism_yaml, cross_spec_references_yaml, echo_api_yaml, error_in_array_yaml, expanded_polymorphism_yaml, form_data_yaml, full_petstore_api_yaml, hackweek_yaml, heroku_petstore_api_yaml, instagram_api_yaml, minimal_api_yaml, nakadi_yaml, nested_arrays_yaml, nested_arrays_validation_yaml, nested_objects_yaml, nested_objects_validation_yaml, nested_options_yaml, nested_options_validation_yaml, numbers_validation_yaml, options_yaml, security_api_yaml, simple_petstore_api_yaml, split_petstore_api_yaml, string_formats_yaml, string_formats_validation_yaml, type_deduplication_yaml, uber_api_yaml ) describe("TypeFlattener") { plainModels.foreach { model => testTypeFlattener(model) } } def testTypeFlattener(ast: WithModel): Unit = { val name = ScalaPrinter.nameFromModel(ast) it(s"should flatten API model $name") { val scalaModel = TypeNormaliser.flatten(ast.model) val expected = asInFile(name, ".scala") clean(ScalaPrinter.asScala(name, scalaModel)) mustBe clean(expected) } } def asInFile(name: String, suffix: String): String = { val expectedFile = new File(expectation_path, prefix + name + suffix) if (expectedFile.canRead) { val src = Source.fromFile(expectedFile) val result = src.getLines().mkString("\n") src.close() result } else "" } def clean(str: String): String = str.split("\n").map(_.trim).filter(_.nonEmpty).mkString("\n") }
Example 41
Source File: ScalaNameTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.ScalaName._ import de.zalando.apifirst.naming.dsl._ import org.scalatest.{ FunSpec, MustMatchers } class ScalaNameTest extends FunSpec with MustMatchers { it("must correctly capitalize names") { ("one" / "two" / "three").names mustBe (("one", "Two", "three")) ("ONE" / "TWO" / "THREE").names mustBe (("one", "TWO", "tHREE")) ("OnE" / "TwO" / "ThReE").names mustBe (("one", "TwO", "thReE")) } it("must correctly recognize short names") { ("one" / "two").names mustBe (("one", "Two", "two")) } it("must correctly escape scala names") { ("catch" / "if" / "match").names mustBe (("`catch`", "If", "`match`")) } it("must be able to produce import statemets") { ("java.util" / "date").qualifiedName("", "") mustBe (("java.util", "Date")) } it("must correctly concat names") { ("definitions" / "Example" / "nestedArrays" / "Opt" / "Arr:").names mustBe (("definitions", "Example", "arr_esc")) } }
Example 42
Source File: PathTest.scala From api-first-hand with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.naming.Path import org.scalatest.{ FunSpec, MustMatchers } class PathTest extends FunSpec with MustMatchers { describe("Path") { it("should convert the root path") { val root = Path("/") root.asSwagger mustBe "/" root.prepend("/echo/").asSwagger mustBe "/echo/" root.prepend("/echo").asSwagger mustBe "/echo/" } it("should convert absolute segments") { val a = Path("/a") a.asSwagger mustBe "/a" a.prepend("/echo/").asSwagger mustBe "/echo/a" a.prepend("/echo").asSwagger mustBe "/echo/a" } it("should convert relative segments") { val a = Path("a") a.asSwagger mustBe "/a" a.prepend("/echo/").asSwagger mustBe "/echo/a" a.prepend("/echo").asSwagger mustBe "/echo/a" } it("should convert absolute segments with trailing slash") { val a = Path("/a/") a.asSwagger mustBe "/a/" a.prepend("/echo/").asSwagger mustBe "/echo/a/" a.prepend("/echo").asSwagger mustBe "/echo/a/" } it("should convert nested path segments") { val a = Path("a/b") a.asSwagger mustBe "/a/b" a.prepend("/echo/").asSwagger mustBe "/echo/a/b" a.prepend("/echo").asSwagger mustBe "/echo/a/b" } it("should convert in-path parameters") { val a = Path("/{a}") a.asSwagger mustBe "/{a}" a.prepend("/echo/").asSwagger mustBe "/echo/{a}" a.prepend("/echo").asSwagger mustBe "/echo/{a}" a.interpolated mustBe "/${toPath(a)}" a.prepend("/echo/").interpolated mustBe "/echo/${toPath(a)}" a.prepend("/echo").interpolated mustBe "/echo/${toPath(a)}" a.asPlay mustBe "/:a" a.prepend("/echo/").asPlay mustBe "/echo/:a" a.prepend("/echo").asPlay mustBe "/echo/:a" a.asMethod mustBe "byA" a.prepend("/echo/").asMethod mustBe "echoByA" a.prepend("/echo").asMethod mustBe "echoByA" } it("should convert in-path parameters with trailing slash") { val a = Path("/a/{a}/") a.asSwagger mustBe "/a/{a}/" a.prepend("/echo/").asSwagger mustBe "/echo/a/{a}/" a.prepend("/echo").asSwagger mustBe "/echo/a/{a}/" a.interpolated mustBe "/a/${toPath(a)}/" a.prepend("/echo/").interpolated mustBe "/echo/a/${toPath(a)}/" a.prepend("/echo").interpolated mustBe "/echo/a/${toPath(a)}/" a.asPlay mustBe "/a/:a/" a.prepend("/echo/").asPlay mustBe "/echo/a/:a/" a.prepend("/echo").asPlay mustBe "/echo/a/:a/" a.asMethod mustBe "aByA" a.prepend("/echo/").asMethod mustBe "echoAByA" a.prepend("/echo").asMethod mustBe "echoAByA" } it("should convert multiple in-path parameters") { val a = Path("/a/{b}/{c}/d/{e}") a.asSwagger mustBe "/a/{b}/{c}/d/{e}" a.prepend("/echo/").asSwagger mustBe "/echo/a/{b}/{c}/d/{e}" a.prepend("/echo").asSwagger mustBe "/echo/a/{b}/{c}/d/{e}" a.interpolated mustBe "/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.prepend("/echo/").interpolated mustBe "/echo/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.prepend("/echo").interpolated mustBe "/echo/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.asPlay mustBe "/a/:b/:c/d/:e" a.prepend("/echo/").asPlay mustBe "/echo/a/:b/:c/d/:e" a.prepend("/echo").asPlay mustBe "/echo/a/:b/:c/d/:e" a.asMethod mustBe "aByBByCDByE" a.prepend("/echo/").asMethod mustBe "echoAByBByCDByE" a.prepend("/echo").asMethod mustBe "echoAByBByCDByE" } } }
Example 43
Source File: HomeControllerSpec.scala From phantom-activator-template with Apache License 2.0 | 5 votes |
package controllers import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpec} import org.scalatestplus.play.guice.GuiceOneAppPerTest import org.slf4j.LoggerFactory import play.api.test.Helpers._ import play.api.test._ class HomeControllerSpec extends WordSpec with GuiceOneAppPerTest with MustMatchers with BeforeAndAfterAll { private val logger = LoggerFactory.getLogger("embedded-cassandra") override protected def beforeAll(): Unit = { EmbeddedCassandra.start(logger) } override protected def afterAll(): Unit = { EmbeddedCassandra.cleanup(logger) } "Application" should { "render the index page" in { val result = route(app, FakeRequest(GET, "/")).get status(result) must equal(OK) contentAsString(result) must include("Spring Bud") } } }
Example 44
Source File: JdbcResultSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import java.sql.Timestamp import java.time.{ LocalDateTime, OffsetDateTime } import org.scalatest.{ MustMatchers, WordSpec } import play.api.libs.json._ class JdbcResultSpec extends WordSpec with MustMatchers { "A JDBC Result container" must { "convert to CSV" in { JdbcResults.flat.toCsv.toList must be { List( """"int", "string", "bool", "timestamp"""", """1, "str1", true, "2018-06-25T09:00:00Z"""", """2, "str2", false, "2018-06-25T09:30:00Z"""", """<null>, <null>, false, <null>""" ) } } "convert to json" in { JdbcResults.flat.toJson.toList must be { Seq( JsObject { Seq( "int" -> JsNumber(1), "string" -> JsString("str1"), "bool" -> JsBoolean(true), "timestamp" -> JsString("2018-06-25T09:00:00Z") ) }, JsObject { Seq( "int" -> JsNumber(2), "string" -> JsString("str2"), "bool" -> JsBoolean(false), "timestamp" -> JsString("2018-06-25T09:30:00Z") ) }, JsObject { Seq( "int" -> JsNull, "string" -> JsNull, "bool" -> JsBoolean(false), "timestamp" -> JsNull ) } ) } } } } object JdbcResults { private val offset = OffsetDateTime.now().getOffset private def timestamp(dateTime: LocalDateTime) = Timestamp.from { dateTime.toInstant(offset) } val flat = JdbcResult( header = Seq("int", "string", "bool", "timestamp"), rows = Vector( List( Int.box(1), "str1", Boolean.box(true), timestamp { LocalDateTime.of(2018, 6, 25, 9, 0) } ), List( Int.box(2), "str2", Boolean.box(false), timestamp { LocalDateTime.of(2018, 6, 25, 9, 30) } ), List( null, null, Boolean.box(false), null ) ) ) }
Example 45
Source File: ColumnFragmentsSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import daf.dataset.query._ import org.scalatest.{ MustMatchers, WordSpec } import doobie.implicits.toSqlInterpolator import scala.annotation.tailrec import scala.util.Success class SelectFragmentSpec extends WordSpec with MustMatchers { "A [select] fragment writer" must { "serialize a [select] clause in SQL" in { ColumnFragments.select { SelectClauses.simple }.run.map { _._1.toString } must be { Success { fr"SELECT col1, col2 AS alias1, 1, 'string' AS alias2, MAX(col3) AS alias3, SUM(true)".toString } } } "create a column/alias reference set" in { ColumnFragments.select { SelectClauses.simple }.run.get._2 must have ( ColumnReferenceMatchers hasColumn "col1", ColumnReferenceMatchers hasColumn "col2", ColumnReferenceMatchers hasColumn "1", ColumnReferenceMatchers hasColumn "'string'", ColumnReferenceMatchers hasColumn "col3", ColumnReferenceMatchers hasColumn "true", ColumnReferenceMatchers hasAlias "alias1", ColumnReferenceMatchers hasAlias "alias2", ColumnReferenceMatchers hasAlias "alias3" ) } "serialize a very long [select] without stack overflow" in { ColumnFragments.select { SelectClauses.nested }.run must be { 'Success } } "fail serialization when sql is injected in a column name" in { ColumnFragments.select { SelectClauses.injectNamed }.run must be { 'Failure } } "escape quotes in value strings" in { ColumnFragments.select { SelectClauses.injectValue }.run.map { _._1.toString } must be { Success { fr"""SELECT '\' SELECT col2 FROM table WHERE \'\' == \''""".toString } } } } } object SelectClauses { val simple = SelectClause { Seq( NamedColumn("col1"), NamedColumn("col2") as "alias1", ValueColumn(1), ValueColumn("string") as "alias2", Max(NamedColumn("col3")) as "alias3", Sum(ValueColumn(true)) ) } val injectNamed = SelectClause { Seq( NamedColumn("SELECT col2 FROM table") ) } val injectValue = SelectClause { Seq( ValueColumn("' SELECT col2 FROM table WHERE '' == '") ) } @tailrec private def nest(column: Column, n: Int = 10000): Column = if (n == 0) column else nest(Sum(column), n - 1) val nested = SelectClause { Seq { nest(ValueColumn(true)) } } }
Example 46
Source File: JdbcQueryAnalysisSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import cats.syntax.foldable.toFoldableOps import cats.instances.list.catsStdInstancesForList import org.scalatest.{ MustMatchers, WordSpec } class JdbcQueryAnalysisSpec extends WordSpec with MustMatchers { "JDBC Query Analysis" must { "interpret a simple query explanation" in { JdbcQueryAnalyses.simple.foldMap { JdbcQueryAnalysis.fromString } must have ( JdbcQueryAnalysisMatchers.memoryReservation { 400d }, JdbcQueryAnalysisMatchers.memoryEstimate { 294.912d }, JdbcQueryAnalysisMatchers.numSteps { 9 } ) } } } private object JdbcQueryAnalyses { val simple = List( "Per-Host Resource Reservation: Memory=400.00MB", "Per-Host Resource Estimates: Memory=0.288GB", "WARNING: The following tables are missing relevant table and/or column statistics.", "database.table", "", "PLAN-ROOT SINK", "|", "08:EXCHANGE [UNPARTITIONED]", "|", "07:AGGREGATE [FINALIZE]", "| output: count:merge(*)", "| group by: A.col1", "|", "06:EXCHANGE [HASH(A.col1)]", "|", "03:AGGREGATE [STREAMING]", "| output: count(*)", "| group by: A.col1", "|", "02:HASH JOIN [INNER JOIN, PARTITIONED]", "| hash predicates: A.col2 = B.col3", "| runtime filters: RF000 <- B.col3", "|", "|--05:EXCHANGE [HASH(B.col3)]", "| |", "| 01:SCAN HDFS [database.table b]", "| partitions=1/1 files=3 size=3.41KB", "|", "04:EXCHANGE [HASH(A.code_level_2)]", "|", "00:SCAN HDFS [database.table a]", " partitions=1/1 files=3 size=3.41KB", " runtime filters: RF000 -> A.code_level_2" ) }
Example 47
Source File: JdbcQueryServiceSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import cats.effect.IO import cats.instances.list.catsStdInstancesForList import daf.dataset.query.{ Count, GroupByClause, Gt, NamedColumn, Query, SelectClause, ValueColumn, WhereClause } import daf.instances.H2TransactorInstance import doobie.free.KleisliInterpreter import doobie.free.connection.AsyncConnectionIO import doobie.implicits.{ toConnectionIOOps, toSqlInterpolator } import doobie.util.query.Query0 import doobie.util.transactor.{ Strategy, Transactor } import doobie.util.update.Update import org.apache.commons.dbcp.BasicDataSource import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpec } class JdbcQueryServiceSpec extends WordSpec with MustMatchers with BeforeAndAfterAll { private lazy val service = new JdbcQueryService(null, None) with H2TransactorInstance override def beforeAll(): Unit = JdbcQueries.prepare.transact { service.transactor("") }.unsafeRunSync() match { case (_ , rows) if rows == 0 => throw new RuntimeException("Unable to start test: [rows] were not created") case (_, _) => // do nothing } "A jdbc query service" must { "run queries" in { service.exec(JdbcQueries.select, "user", "").map { _.toCsv.toList }.get must be { List( """"COUNTRY", "COUNTS"""", """"Italy", 2""", """"Netherlands", 1""" ) } } } } object JdbcQueries { type User = (String, String, Int, String) private def createTransactor(dataSource: BasicDataSource) = Transactor[IO, BasicDataSource]( dataSource, a => IO(a.getConnection), KleisliInterpreter[IO].ConnectionInterpreter, Strategy.void ) val ddl = sql""" CREATE TABLE user( id VARCHAR, username VARCHAR, age SMALLINT, country VARCHAR ) """.update.run Query0.apply("").stream val insert = Update[User]("INSERT INTO user(id, username, age, country) VALUES (?, ?, ?, ?)").updateMany[List] { List( ("id1", "user1", 42, "Italy"), ("id2", "user2", 32, "Italy"), ("id3", "user3", 27, "Italy"), ("id4", "user4", 33, "Netherlands") ) } val prepare = for { table <- JdbcQueries.ddl insert <- JdbcQueries.insert } yield (table, insert) val select = Query( select = SelectClause { Seq( NamedColumn("country"), Count(NamedColumn("id")) as "counts" ) }, where = Some { WhereClause { Gt(NamedColumn("age"), ValueColumn(30)) } }, groupBy = Some { GroupByClause { Seq(NamedColumn("country")) } }, having = None, limit = None ) }
Example 48
Source File: GroupingFragmentsSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset.query.jdbc import daf.dataset.query.{ GroupByClause, Max, NamedColumn } import doobie.implicits.toSqlInterpolator import org.scalatest.{ MustMatchers, WordSpec } import scala.util.Success class GroupingFragmentsSpec extends WordSpec with MustMatchers { "A [groupBy] fragment writer" must { "serialize a [groupBy] clause in SQL" in { GroupingFragments.groupBy(GroupByClauses.valid, GroupByClauses.validRef).run.map { _._1.toString } must be { Success { fr"GROUP BY col1, col2".toString } } } "throw an error" when { "a [groupBy] clause contains an alias column" in { GroupingFragments.groupBy(GroupByClauses.invalidAlias, GroupByClauses.validRef).run must be { 'Failure } } "a [groupBy] clause contains an function column" in { GroupingFragments.groupBy(GroupByClauses.invalidFunction, GroupByClauses.validRef).run must be { 'Failure } } "an invalid column reference is encountered" in { GroupingFragments.groupBy(GroupByClauses.valid, GroupByClauses.invalidRef).run must be { 'Failure } } } } } object GroupByClauses { val validRef = ColumnReference( Set("col1", "col2"), Set("alias1") ) val invalidRef = ColumnReference( Set("col1", "col2", "col3"), Set.empty[String] ) val valid = GroupByClause { Seq(NamedColumn("col1"), NamedColumn("col2")) } val invalidAlias = GroupByClause { Seq(NamedColumn("col1") as "alias1") } val invalidFunction = GroupByClause { Seq(Max(NamedColumn("col1"))) } }
Example 49
Source File: DatasetFunctionsSpec.scala From daf with BSD 3-Clause "New" or "Revised" License | 5 votes |
package daf.dataset import java.io.ByteArrayInputStream import akka.stream.ActorMaterializer import akka.stream.scaladsl.StreamConverters import controllers.modules.TestAbstractModule import daf.filesystem.MergeStrategy import daf.instances.{ AkkaInstance, ConfigurationInstance } import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpecLike } import scala.concurrent.Await import scala.concurrent.duration._ import scala.util.Random class DatasetFunctionsSpec extends TestAbstractModule with WordSpecLike with MustMatchers with BeforeAndAfterAll with ConfigurationInstance with AkkaInstance { implicit lazy val executionContext = actorSystem.dispatchers.lookup("akka.actor.test-dispatcher") protected implicit lazy val materializer = ActorMaterializer.create { actorSystem } override def beforeAll() = { startAkka() } def data = (1 to 5) .map { i => Random.alphanumeric.grouped(20).take(5).map { s => s"$i - ${s.mkString}" }.toStream :+ defaultSeparator } def stream = MergeStrategy.coalesced { data.map { iter => new ByteArrayInputStream( iter.mkString(defaultSeparator).getBytes("UTF-8") ) } } def source = StreamConverters.fromInputStream(() => stream, 5) "Source manipulation" must { "convert to a string source" in { Await.result( wrapDefault { asStringSource(source) }.runFold("") { _ + _ }, 5.seconds ).split(defaultSeparator).length must be { 25 } } "convert to a json source" in { Await.result( wrapJson { asStringSource(source) }.runFold("") { _ + _ }, 5.seconds ).split(jsonSeparator).length must be { 25 } } } }
Example 50
Source File: Spec.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless import scala.util.Try import scala.util.control.NonFatal import org.scalatest.MustMatchers import io.monadless.impl.TestSupport trait Spec extends org.scalatest.FreeSpec with MustMatchers with Monadless[Try] with TestSupport[Try] { def apply[T](v: => T) = Try(v) def collect[T](list: List[Try[T]]): Try[List[T]] = list.foldLeft(Try(List.empty[T])) { (acc, item) => for { l <- acc i <- item } yield l :+ i } def get[T](m: Try[T]): T = m.get def rescue[T](m: Try[T])(pf: PartialFunction[Throwable, Try[T]]) = m.recoverWith(pf) def ensure[T](m: Try[T])(f: => Unit) = m.map { r => try f catch { case NonFatal(e) => () } r } }
Example 51
Source File: MonadlessTaskSpec.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless.monix import java.util.concurrent.TimeUnit import org.scalatest.MustMatchers import io.monadless.impl.TestSupport import monix.eval.Task import monix.execution.Cancelable import monix.execution.schedulers.ReferenceScheduler class MonadlessTaskSpec extends org.scalatest.FreeSpec with MustMatchers with MonadlessTask with TestSupport[Task] { implicit val s = new ReferenceScheduler { def scheduleOnce(initialDelay: Long, unit: TimeUnit, r: Runnable) = { r.run() Cancelable.empty } def execute(command: Runnable) = command.run() def executionModel = monix.execution.ExecutionModel.SynchronousExecution def reportFailure(t: Throwable): Unit = {} } def get[T](f: Task[T]) = f.runSyncMaybe.right.get def fail[T]: T = throw new Exception val one = Task(1) val two = Task(2) "apply" in runLiftTest(1) { 1 } "collect" in runLiftTest(3) { unlift(one) + unlift(two) } "map" in runLiftTest(2) { unlift(one) + 1 } "flatMap" in runLiftTest(3) { val a = unlift(one) a + unlift(two) } "rescue" - { "success" in runLiftTest(1) { try unlift(one) catch { case e: Throwable => unlift(two) } } "failure" in runLiftTest(1) { try fail[Int] catch { case e: Exception => unlift(one) } } } "ensure" - { "success" in runLiftTest(1) { var i = 0 def c() = i += 1 try unlift(one) finally { c() } i } "failure" in runLiftTest(1) { var i = 0 def c() = i += 1 try { try unlift(one) / fail[Int] finally { c() } } catch { case e: Exception => 1 } i } } }
Example 52
Source File: MonadlessFutureSpec.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless.stdlib import org.scalatest.MustMatchers import scala.concurrent.Future import io.monadless.impl.TestSupport import java.util.concurrent.atomic.AtomicReference import scala.util.Try import scala.concurrent.ExecutionContext class MonadlessFutureSpec extends org.scalatest.FreeSpec with MustMatchers with MonadlessFuture with TestSupport[Future] { implicit val ec = new ExecutionContext { def execute(runnable: Runnable): Unit = runnable.run() def reportFailure(cause: Throwable): Unit = {} } def get[T](f: Future[T]) = { // can't use Await because of scala.js val r = new AtomicReference[Try[T]] f.onComplete(r.set) r.get.get } def fail[T]: T = throw new Exception val one = Future.successful(1) val two = Future.successful(2) "apply" in runLiftTest(1) { 1 } "collect" in runLiftTest(3) { unlift(one) + unlift(two) } "map" in runLiftTest(2) { unlift(one) + 1 } "flatMap" in runLiftTest(3) { val a = unlift(one) a + unlift(two) } "rescue" - { "success" in runLiftTest(1) { try unlift(one) catch { case e: Throwable => unlift(two) } } "failure" in runLiftTest(1) { try fail[Int] catch { case e: Exception => unlift(one) } } } "ensure" - { "success" in runLiftTest(1) { var i = 0 def c() = i += 1 try unlift(one) finally { c() } i } "failure" in runLiftTest(1) { var i = 0 def c() = i += 1 try { try unlift(one) / fail[Int] finally { c() } } catch { case e: Exception => 1 } i } } }
Example 53
Source File: MonadlessOptionSpec.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless.stdlib import org.scalatest.MustMatchers import io.monadless.impl.TestSupport class MonadlessOptionSpec extends org.scalatest.FreeSpec with MustMatchers with MonadlessOption with TestSupport[Option] { def get[T](t: Option[T]) = t.get def fail[T]: T = throw new Exception val one = Option(1) val two = Option(2) "apply" in runLiftTest(1) { 1 } "collect" in runLiftTest(3) { unlift(one) + unlift(two) } "map" in runLiftTest(2) { unlift(one) + 1 } "flatMap" in runLiftTest(3) { val a = unlift(one) a + unlift(two) } "rescue" - { "success" in { """ lift { try unlift(one) catch { case e: Exception => unlift(two) } } """ mustNot compile } "failure" in { """ lift { try fail[Int] catch { case e: Exception => unlift(one) } } """ mustNot compile } } "ensure" - { "success" in { """ lift { var i = 0 def c() = i += 1 try unlift(one) finally { c() } i } """ mustNot compile } "failure" in { """ lift { var i = 0 def c() = i += 1 try { try unlift(one) / fail[Int] finally { c() } } catch { case e: Exception => 1 } i } """ mustNot compile } } }
Example 54
Source File: MonadlessTrySpec.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless.stdlib import scala.util.Try import org.scalatest.MustMatchers import io.monadless.impl.TestSupport class MonadlessTrySpec extends org.scalatest.FreeSpec with MustMatchers with MonadlessTry with TestSupport[Try] { def get[T](t: Try[T]) = t.get def fail[T]: T = throw new Exception val one = Try(1) val two = Try(2) "apply" in runLiftTest(1) { 1 } "collect" in runLiftTest(3) { unlift(one) + unlift(two) } "map" in runLiftTest(2) { unlift(one) + 1 } "flatMap" in runLiftTest(3) { val a = unlift(one) a + unlift(two) } "rescue" - { "success" in runLiftTest(1) { try unlift(one) catch { case e: Exception => unlift(two) } } "failure" in runLiftTest(1) { try fail[Int] catch { case e: Exception => unlift(one) } } } "ensure" - { "success" in runLiftTest(1) { var i = 0 def c(): Unit = i += 1 try unlift(one) finally { c() } i } "failure" in runLiftTest(1) { var i = 0 def c(): Unit = i += 1 try { try unlift(one) / fail[Int] finally { c() } } catch { case e: Exception => 1 } i } } }
Example 55
Source File: MonadlessMonad.scala From monadless with Apache License 2.0 | 5 votes |
package io.monadless.algebird import org.scalatest.MustMatchers import io.monadless.impl.TestSupport import com.twitter.algebird.Identity import com.twitter.algebird.Monad class MonadlessMonadSpec extends org.scalatest.FreeSpec with MustMatchers with MonadlessMonad[Identity] with TestSupport[Identity] { override protected val tc = implicitly[Monad[Identity]] def get[T](t: Identity[T]): T = t.get def fail[T]: T = throw new Exception val one = Identity(1) val two = Identity(2) "apply" in runLiftTest(1) { 1 } "collect" in runLiftTest(3) { unlift(one) + unlift(two) } "map" in runLiftTest(2) { unlift(one) + 1 } "flatMap" in { runLiftTest(3) { val a = unlift(one) a + unlift(two) } } "rescue" - { "success" in { """ lift { try unlift(one) catch { case e: Exception => unlift(two) } } """ mustNot compile } "failure" in { """ lift { try fail[Int] catch { case e: Exception => unlift(one) } } """ mustNot compile } } "ensure" - { "success" in { """ lift { var i = 0 def c() = i += 1 try unlift(one) finally { c() } i } """ mustNot compile } "failure" in { """ lift { var i = 0 def c() = i += 1 try { try unlift(one) / fail[Int] finally { c() } } catch { case e: Exception => 1 } i } """ mustNot compile } } }
Example 56
Source File: LiigaJournalistSpec.scala From avoin-voitto with MIT License | 5 votes |
package liigavoitto.journalist import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpecLike} import scala.util.Try class LiigaJournalistSpec extends WordSpecLike with TestUtils with BeforeAndAfterAll with MustMatchers with MockData { "LiigaJournalist" must { "create an article with language" in { val data = md val finnishRes = LiigaJournalist.createArticle(data, "fi") assert(finnishRes.isDefined) assert(finnishRes.get.language == "fi") val swedishRes = LiigaJournalist.createArticle(data, "sv") assert(swedishRes.isDefined) assert(swedishRes.get.language == "sv") } } }
Example 57
Source File: SparkCassBulkWriterSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra import com.datastax.driver.core.querybuilder.QueryBuilder import com.datastax.spark.connector.AllColumns import com.datastax.spark.connector.writer.{ RowWriterFactory, SqlRowWriter } import com.github.jparkie.spark.cassandra.client.SparkCassSSTableLoaderClientManager import com.github.jparkie.spark.cassandra.conf.{ SparkCassServerConf, SparkCassWriteConf } import com.holdenkarau.spark.testing.SharedSparkContext import org.apache.spark.sql.{ Row, SQLContext } import org.scalatest.{ MustMatchers, WordSpec } import scala.collection.JavaConverters._ class SparkCassBulkWriterSpec extends WordSpec with MustMatchers with CassandraServerSpecLike with SharedSparkContext { val testKeyspace = "test_keyspace" val testTable = "test_table" override def beforeAll(): Unit = { super.beforeAll() getCassandraConnector.withSessionDo { currentSession => createKeyspace(currentSession, testKeyspace) currentSession.execute( s"""CREATE TABLE $testKeyspace.$testTable ( | test_key BIGINT PRIMARY KEY, | test_value VARCHAR |); """.stripMargin ) } } "SparkCassBulkWriter" must { "write() successfully" in { val sqlContext = new SQLContext(sc) import sqlContext.implicits._ implicit val testRowWriterFactory: RowWriterFactory[Row] = SqlRowWriter.Factory val testCassandraConnector = getCassandraConnector val testSparkCassWriteConf = SparkCassWriteConf() val testSparkCassServerConf = SparkCassServerConf( // See https://github.com/jsevellec/cassandra-unit/blob/master/cassandra-unit/src/main/resources/cu-cassandra.yaml storagePort = 7010 ) val testSparkCassBulkWriter = SparkCassBulkWriter( testCassandraConnector, testKeyspace, testTable, AllColumns, testSparkCassWriteConf, testSparkCassServerConf ) val testRDD = sc.parallelize(1 to 25) .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!")) val testDataFrame = testRDD.toDF("test_key", "test_value") sc.runJob(testDataFrame.rdd, testSparkCassBulkWriter.write _) getCassandraConnector.withSessionDo { currentSession => val queryStatement = QueryBuilder.select("test_key", "test_value") .from(testKeyspace, testTable) .limit(25) val resultSet = currentSession.execute(queryStatement) val outputSet = resultSet.all.asScala .map(currentRow => (currentRow.getLong("test_key"), currentRow.getString("test_value"))) .toMap for (currentNumber <- 1 to 25) { val currentKey = currentNumber.toLong outputSet(currentKey) mustEqual s"Hello World: $currentNumber!" } } SparkCassSSTableLoaderClientManager.evictAll() } } }
Example 58
Source File: SparkCassSSTableLoaderClientSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra.client import com.github.jparkie.spark.cassandra.CassandraServerSpecLike import com.github.jparkie.spark.cassandra.conf.SparkCassServerConf import org.apache.cassandra.tools.BulkLoadConnectionFactory import org.scalatest.{ MustMatchers, WordSpec } class SparkCassSSTableLoaderClientSpec extends WordSpec with MustMatchers with CassandraServerSpecLike { val testKeyspace = "test_keyspace" val testTable = "test_table" override def beforeAll(): Unit = { super.beforeAll() getCassandraConnector.withSessionDo { currentSession => createKeyspace(currentSession, testKeyspace) currentSession.execute( s"""CREATE TABLE $testKeyspace.$testTable ( | test_key VARCHAR PRIMARY KEY, | test_value BIGINT |); """.stripMargin ) } } "SparkCassSSTableLoaderClient" must { "initialize successfully" in { getCassandraConnector.withSessionDo { currentSession => val testSession = currentSession val testSparkCassServerConf = SparkCassServerConf() val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf) testSparkCassSSTableLoaderClient.init(testKeyspace) } } "ensure tables contain TableIdentifier(testKeyspace, testTable)" in { getCassandraConnector.withSessionDo { currentSession => val testSession = currentSession val testSparkCassServerConf = SparkCassServerConf() val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf) testSparkCassSSTableLoaderClient.init(testKeyspace) assert(testSparkCassSSTableLoaderClient.tables .contains(SparkCassSSTableLoaderClient.TableIdentifier(testKeyspace, testTable))) } } "retrieve CFMetaData" in { getCassandraConnector.withSessionDo { currentSession => val testSession = currentSession val testSparkCassServerConf = SparkCassServerConf() val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf) testSparkCassSSTableLoaderClient.init(testKeyspace) val outputCFMetaData = testSparkCassSSTableLoaderClient.getCFMetaData(testKeyspace, testTable) outputCFMetaData.ksName mustEqual testKeyspace outputCFMetaData.cfName mustEqual testTable } } "getConnectionFactory successfully" in { getCassandraConnector.withSessionDo { currentSession => val testSession = currentSession val testSparkCassServerConf = SparkCassServerConf() val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf) testSparkCassSSTableLoaderClient.init(testKeyspace) val outputConnectionFactory = testSparkCassSSTableLoaderClient .getConnectionFactory assert(outputConnectionFactory.isInstanceOf[BulkLoadConnectionFactory]) } } "close session on stop()" in { val testSession = getCassandraConnector.openSession() val testSparkCassServerConf = SparkCassServerConf() val testSparkCassSSTableLoaderClient = new SparkCassSSTableLoaderClient(testSession, testSparkCassServerConf) testSparkCassSSTableLoaderClient.stop() assert(testSession.isClosed) } } }
Example 59
Source File: SparkCassSSTableLoaderClientManagerSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra.client import com.github.jparkie.spark.cassandra.CassandraServerSpecLike import com.github.jparkie.spark.cassandra.conf.SparkCassServerConf import org.scalatest.{ MustMatchers, WordSpec } class SparkCassSSTableLoaderClientManagerSpec extends WordSpec with MustMatchers with CassandraServerSpecLike { "SparkCassSSTableLoaderClientManager" must { "return one SparkCassSSTableLoaderClient in getClient()" in { val testSparkCassServerConf = SparkCassServerConf() SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf) SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf) SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf) assert(SparkCassSSTableLoaderClientManager.internalClients.size == 1) SparkCassSSTableLoaderClientManager.evictAll() } "evictAll() ensures all sessions are stopped and internalClients is empty" in { val testSparkCassServerConf = SparkCassServerConf() val outputClient = SparkCassSSTableLoaderClientManager.getClient(getCassandraConnector, testSparkCassServerConf) SparkCassSSTableLoaderClientManager.evictAll() assert(outputClient.session.isClosed) assert(SparkCassSSTableLoaderClientManager.internalClients.isEmpty) } } }
Example 60
Source File: SparkCassDataFrameFunctionsSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra.sql import com.holdenkarau.spark.testing.SharedSparkContext import org.apache.spark.sql.SQLContext import org.scalatest.{ MustMatchers, WordSpec } class SparkCassDataFrameFunctionsSpec extends WordSpec with MustMatchers with SharedSparkContext { "Package com.github.jparkie.spark.cassandra.sql" must { "lift DataFrame into SparkCassDataFrameFunctions" in { val sqlContext = new SQLContext(sc) import sqlContext.implicits._ val testRDD = sc.parallelize(1 to 25) .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!")) val testDataFrame = testRDD.toDF("test_key", "test_value") // If internalSparkContext is available, RDD was lifted. testDataFrame.internalSparkContext } } }
Example 61
Source File: SparkCassRDDFunctionsSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra.rdd import com.holdenkarau.spark.testing.SharedSparkContext import org.scalatest.{ MustMatchers, WordSpec } class SparkCassRDDFunctionsSpec extends WordSpec with MustMatchers with SharedSparkContext { "Package com.github.jparkie.spark.cassandra.rdd" must { "lift RDD into SparkCassRDDFunctions" in { val testRDD = sc.parallelize(1 to 25) .map(currentNumber => (currentNumber.toLong, s"Hello World: $currentNumber!")) // If internalSparkContext is available, RDD was lifted. testRDD.internalSparkContext } } }
Example 62
Source File: SparkCassWriteConfSpec.scala From Spark2Cassandra with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.cassandra.conf import org.apache.cassandra.dht.{ ByteOrderedPartitioner, Murmur3Partitioner, RandomPartitioner } import org.apache.spark.SparkConf import org.scalatest.{ MustMatchers, WordSpec } class SparkCassWriteConfSpec extends WordSpec with MustMatchers { "SparkCassWriteConf" must { "be extracted from SparkConf successfully" in { val inputSparkConf = new SparkConf() .set("spark.cassandra.bulk.write.partitioner", "org.apache.cassandra.dht.ByteOrderedPartitioner") .set("spark.cassandra.bulk.write.throughput_mb_per_sec", "1") .set("spark.cassandra.bulk.write.connection_per_host", "2") val outputSparkCassWriteConf = SparkCassWriteConf.fromSparkConf(inputSparkConf) outputSparkCassWriteConf.partitioner mustEqual "org.apache.cassandra.dht.ByteOrderedPartitioner" outputSparkCassWriteConf.throughputMiBPS mustEqual 1 outputSparkCassWriteConf.connectionsPerHost mustEqual 2 } "set defaults when no properties set in SparkConf" in { val inputSparkConf = new SparkConf() val outputSparkCassWriteConf = SparkCassWriteConf.fromSparkConf(inputSparkConf) outputSparkCassWriteConf.partitioner mustEqual SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_PARTITIONER.default outputSparkCassWriteConf.throughputMiBPS mustEqual SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_THROUGHPUT_MB_PER_SEC.default outputSparkCassWriteConf.connectionsPerHost mustEqual SparkCassWriteConf.SPARK_CASSANDRA_BULK_WRITE_CONNECTIONS_PER_HOST.default } "reject invalid partitioner in SparkConf" in { val inputSparkConf = new SparkConf() .set("spark.cassandra.bulk.write.partitioner", "N/A") intercept[IllegalArgumentException] { SparkCassWriteConf.fromSparkConf(inputSparkConf) } } "getIPartitioner() correctly per partitioner" in { val sparkCassWriteConf1 = SparkCassWriteConf("org.apache.cassandra.dht.Murmur3Partitioner") assert(sparkCassWriteConf1.getIPartitioner.isInstanceOf[Murmur3Partitioner]) val sparkCassWriteConf2 = SparkCassWriteConf("org.apache.cassandra.dht.RandomPartitioner") assert(sparkCassWriteConf2.getIPartitioner.isInstanceOf[RandomPartitioner]) val sparkCassWriteConf3 = SparkCassWriteConf("org.apache.cassandra.dht.ByteOrderedPartitioner") assert(sparkCassWriteConf3.getIPartitioner.isInstanceOf[ByteOrderedPartitioner]) } } }
Example 63
Source File: BaseSpecTest.scala From wookiee with Apache License 2.0 | 5 votes |
package com.webtrends.harness.service.test import akka.actor.ActorSystem import ch.qos.logback.classic.Level import com.typesafe.config.{Config, ConfigFactory} import com.webtrends.harness.component.Component import com.webtrends.harness.service.Service import org.specs2.mutable.SpecificationLike import org.scalatest.{MustMatchers, WordSpecLike} import scala.concurrent.duration._ trait BaseWookieeTest { def config:Config = ConfigFactory.empty() def componentMap:Option[Map[String, Class[_<:Component]]] = None def servicesMap:Option[Map[String, Class[_<:Service]]] = None def logLevel: Level = Level.INFO def startupWait: FiniteDuration = 15 seconds TestHarness(config, servicesMap, componentMap, logLevel, startupWait) Thread.sleep(1000) implicit val system: ActorSystem = TestHarness.system.get } trait BaseWookieeSpecTest extends BaseWookieeTest with SpecificationLike trait BaseWookieeScalaTest extends BaseWookieeTest with WordSpecLike with MustMatchers
Example 64
Source File: EventMetadataSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.vertx import com.rbmhtechnology.eventuate.adapter.vertx.api.EventMetadata import io.vertx.core.MultiMap import org.scalatest.{ MustMatchers, WordSpecLike } import scala.collection.JavaConverters._ class EventMetadataSpec extends WordSpecLike with MustMatchers { import EventMetadata._ import EventMetadata.Headers._ def headers(elems: (String, Any)*): MultiMap = { val headers = MultiMap.caseInsensitiveMultiMap() headers.setAll(Map(elems: _*).mapValues(_.toString).asJava) headers } "An EventMetadata" when { "supplied with valid headers" must { "be instantiated with all metadata" in { val metadata = EventMetadata.fromHeaders(headers( MessageProducer -> VertxProducer, LocalLogId -> "logA", LocalSequenceNr -> 1L, EmitterId -> "emitter1") ) metadata.map(_.localLogId) mustBe Some("logA") metadata.map(_.localSequenceNr) mustBe Some(1L) metadata.map(_.emitterId) mustBe Some("emitter1") } } "supplied with invalid headers" must { "be empty if the source is not specified" in { val metadata = EventMetadata.fromHeaders(headers( LocalLogId -> "logA", LocalSequenceNr -> 1L, EmitterId -> "emitter1") ) metadata mustBe None } "be empty if the headers are empty" in { val metadata = EventMetadata.fromHeaders(headers()) metadata mustBe None } "fail to instantiate if the values have the wrong type" in { a[NumberFormatException] must be thrownBy EventMetadata.fromHeaders(headers( MessageProducer -> VertxProducer, LocalLogId -> "logA", LocalSequenceNr -> "i_am_not_a_long_value", EmitterId -> "emitter1") ) } "fail to instantiate if the a value is missing" in { an[IllegalArgumentException] must be thrownBy EventMetadata.fromHeaders(headers( MessageProducer -> VertxProducer, LocalSequenceNr -> 1L, EmitterId -> "emitter1") ) } } } }
Example 65
Source File: VertxAdapterSpec.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.vertx import akka.actor.ActorSystem import akka.testkit.TestKit import com.rbmhtechnology.eventuate.adapter.vertx.api.{ EventProducer, VertxAdapterConfig } import com.rbmhtechnology.eventuate.log.EventLogWriter import com.rbmhtechnology.eventuate.log.leveldb.LeveldbEventLog import com.rbmhtechnology.eventuate.utilities._ import com.rbmhtechnology.eventuate.{ LocationCleanupLeveldb, ReplicationEndpoint } import com.typesafe.config.Config import org.scalatest.{ BeforeAndAfterAll, MustMatchers, WordSpecLike } import scala.collection.immutable.Seq object VertxAdapterSpec { case class Event(id: String) val Config = TestConfig.withReplayBatchSize(10) } class VertxAdapterSpec extends TestKit(ActorSystem("test", VertxAdapterSpec.Config)) with WordSpecLike with MustMatchers with BeforeAndAfterAll with StopSystemAfterAll with LocationCleanupLeveldb with VertxEnvironment with VertxEventBusProbes { import VertxAdapterSpec._ import utilities._ val logName = "logA" val adapterId = "adapter1" var storage: ActorStorageProvider = _ var endpoint: ReplicationEndpoint = _ override def config: Config = VertxAdapterSpec.Config override def beforeAll(): Unit = { super.beforeAll() storage = new ActorStorageProvider(adapterId) endpoint = new ReplicationEndpoint(id = "1", logNames = Set(logName), logFactory = logId => LeveldbEventLog.props(logId), connections = Set()) } "A VertxAdapter" must { "read events from an inbound log and deliver them to the Vert.x eventbus" in { val log = endpoint.logs(logName) val adapterConfig = VertxAdapterConfig() .addProducer(EventProducer.fromLog(log) .publishTo { case _ => endpoint1.address } .as("adapter1")) .registerDefaultCodecFor(classOf[Event]) val vertxAdapter = VertxAdapter(adapterConfig, vertx, storage) val logWriter = new EventLogWriter("w1", endpoint.logs(logName)) endpoint.activate() vertxAdapter.start() logWriter.write(Seq(Event("1"))).await.head storage.expectRead(replySequenceNr = 0) storage.expectWrite(sequenceNr = 1) endpoint1.probe.expectVertxMsg(body = Event("1")) logWriter.write(Seq(Event("2"))).await storage.expectWrite(sequenceNr = 2) endpoint1.probe.expectVertxMsg(body = Event("2")) logWriter.write(Seq(Event("3"), Event("4"))).await storage.expectWriteAnyOf(sequenceNrs = Seq(3, 4)) endpoint1.probe.expectVertxMsg(body = Event("3")) endpoint1.probe.expectVertxMsg(body = Event("4")) } } }
Example 66
Source File: CheckJsStringSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.shrinkable import org.scalacheck.Shrink import org.scalatest.{MustMatchers, WordSpec} class CheckJsStringSpec extends WordSpec with MustMatchers { "JsString" should { "not shrink formatted" in { val original = CheckJsString.formatted("0123456789abcdefghijklmnopqrstuvxyz") val shrink = Shrink.shrink(original) shrink mustBe empty } "shrink without min length" in { val original = CheckJsString.unformatted( "0123456789abcdefghijklmnopqrstuvwxyz") val shrink = Shrink.shrink(original) shrink must not be empty shrink.foreach { value => value.minLength mustBe empty value.value.length must be <= 36 } } "shrink with min length" in { val original = CheckJsString(formatted = false, Some(30), "0123456789abcdefghijklmnopqrstuvwxyz") val shrink = Shrink.shrink(original) shrink must not be empty shrink.foreach { value => value.minLength mustBe Some(30) value.value.length must be <= 36 value.value.length must be >= 30 } } "not shrink beneath min length" in { val original = CheckJsString(formatted = false, Some(36), "0123456789abcdefghijklmnopqrstuvwxyz") val shrink = Shrink.shrink(original) shrink mustBe empty } } }
Example 67
Source File: CheckJsValueSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.shrinkable import org.scalacheck.util.Pretty import org.scalatest.{MustMatchers, WordSpec} class CheckJsValueSpec extends WordSpec with MustMatchers { "JsValueDeserializer" should { "deserialize null to JsNull" in { CheckJsValue.parse("null") mustEqual CheckJsNull } "deserialize integers to JsInteger" in { val result = CheckJsValue.parse("1234") result mustBe an[CheckJsInteger] result.asInstanceOf[CheckJsInteger].value mustEqual BigInt(1234) result.asInstanceOf[CheckJsInteger].min mustEqual Some(BigInt(1234)) } "deserialize floats to JsNumber" in { val result = CheckJsValue.parse("1234.5") result mustBe an[CheckJsNumber] result.asInstanceOf[CheckJsNumber].value mustEqual BigDecimal(1234.5) result.asInstanceOf[CheckJsNumber].min mustEqual Some(BigDecimal(1234.5)) } "deserialize strings to JsFormatterString" in { val result = CheckJsValue.parse( """"one piece of string"""") result mustBe an[CheckJsString] result.asInstanceOf[CheckJsString].formatted mustBe true result.asInstanceOf[CheckJsString].value mustEqual "one piece of string" } "deserialize booleans to JsBoolean" in { CheckJsValue.parse("true") mustEqual CheckJsBoolean(true) CheckJsValue.parse("false") mustEqual CheckJsBoolean(false) } "deserialize arrays to JsArray" in { val result = CheckJsValue.parse( """[1234, 1234.5, true, "one piece of string"]""") result mustBe an[CheckJsArray] result.asInstanceOf[CheckJsArray].elements mustEqual Seq( CheckJsInteger.fixed(1234), CheckJsNumber.fixed(1234.5), CheckJsBoolean(true), CheckJsString.formatted("one piece of string") ) result.asInstanceOf[CheckJsArray].minSize mustEqual Some(4) } "deserialize objects to JsObject" in { val result = CheckJsValue.parse( """{"one": 1234, "two": true, "three": "one piece of string"}""") result mustBe an[CheckJsObject] result.asInstanceOf[CheckJsObject].required mustEqual Set( "one", "two", "three" ) result.asInstanceOf[CheckJsObject].fields mustEqual Map( "one" -> CheckJsInteger.fixed(1234), "two" -> CheckJsBoolean(true), "three" -> CheckJsString.formatted("one piece of string") ) } "convertable to Pretty" in { val pretty = CheckJsValue.prettyJsValue( CheckJsObject.empty.copy(fields = Map("the" -> CheckJsString.formatted("value")))) pretty(Pretty.defaultParams) mustBe """{"the":"value"}""" pretty(Pretty.Params(1)) mustBe """{ | "the" : "value" |}""".stripMargin } } }
Example 68
Source File: CheckJsArraySpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.shrinkable import org.scalacheck.Shrink import org.scalatest.{MustMatchers, WordSpec} class CheckJsArraySpec extends WordSpec with MustMatchers { "JsArray" should { "shrink without min size" in { val original = CheckJsArray(None, Seq( CheckJsInteger(None, None, 1000000), CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"), CheckJsBoolean(true), CheckJsBoolean(false), CheckJsInteger(None, None, 10000), CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210") )) val originalJson = original.minified val shrink = Shrink.shrink(original) shrink must not be empty shrink.foreach { value => value.minSize mustBe empty value.elements.length must be <= 6 } } "shrink with min size" in { val original = CheckJsArray(Some(4), Seq( CheckJsInteger(None, None, 1000000), CheckJsString.unformatted( "0123456789abcdefghijklmnopqrstuvwxyz"), CheckJsBoolean(true), CheckJsBoolean(false), CheckJsInteger(None, None, 10000), CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210") )) val originalJson = original.minified val shrink = Shrink.shrink(original) shrink must not be empty shrink.foreach { value => value.minSize mustBe Some(4) value.elements.length must be <= 6 value.elements.length must be >= 4 } } "not shrink beneath min size" in { val original = CheckJsArray(Some(6), Seq( CheckJsInteger(None, None, 12345678), CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"), CheckJsBoolean(true), CheckJsBoolean(false), CheckJsInteger(None, None, 87654321), CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210") )) val originalJson = original.minified val shrink = Shrink.shrink(original) shrink mustBe empty } } }
Example 69
Source File: CheckJsObjectSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.shrinkable import org.scalacheck.Shrink import org.scalatest.{MustMatchers, WordSpec} class CheckJsObjectSpec extends WordSpec with MustMatchers { "JsObject" should { "shrink without order or required" in { val original = CheckJsObject(Set.empty, None, Map( "one" -> CheckJsInteger(None, None, 1000000), "two" -> CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"), "three" -> CheckJsBoolean(true), "four" -> CheckJsBoolean(false), "five" ->CheckJsInteger(None, None, 10000), "six" -> CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210") )) val originalJson = original.minified val shrink = Shrink.shrink[CheckJsObject](original) shrink must not be empty shrink.foreach { value => value.fields.keySet -- original.fields.keySet mustBe empty } } "only shrink values if all required" in { val original = CheckJsObject(Set("one", "two", "three", "four", "five", "six"), None, Map( "one" -> CheckJsInteger(None, None, 1000000), "two" -> CheckJsString.unformatted("0123456789abcdefghijklmnopqrstuvwxyz"), "three" -> CheckJsBoolean(true), "four" -> CheckJsBoolean(false), "five" ->CheckJsInteger(None, None, 10000), "six" -> CheckJsString.unformatted("zyxwvutsrqponmlkjihgfedcba9876543210") )) val originalJson = original.minified val shrink = Shrink.shrink[CheckJsObject](original) shrink must not be empty shrink.foreach { value => value.fields must have size 6 } } } }
Example 70
Source File: StringFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen.formats import de.leanovate.swaggercheck.schema.model.JsonPath import org.scalatest.{MustMatchers, WordSpec} class StringFormatsSpec extends WordSpec with MustMatchers { "URL string format" should { val format = GeneratableStringFormats.defaultFormats("url") "be valid for urls" in { format.validate(JsonPath(), "http://localhost/something").isSuccess mustBe true format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true } "fail for non-urls" in { format.validate(JsonPath(), "something").isSuccess mustBe false } } "URI string format" should { val format = GeneratableStringFormats.defaultFormats("uri") "be valid for uris" in { format.validate(JsonPath(), "/something").isSuccess mustBe true format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true } "fail for non-uris" in { format.validate(JsonPath(), ":?something").isSuccess mustBe false } } "UUID string format" should { val format = GeneratableStringFormats.defaultFormats("uuid") "be valid for uuids" in { format.validate(JsonPath(), "2df6e079-4028-4aa5-9bdb-bb59a314cdad").isSuccess mustBe true format.validate(JsonPath(), "864C67DF-51BB-4688-8A5B-105EC5FDD1D2").isSuccess mustBe true } "fail for non-uuids" in { format.validate(JsonPath(), "864C67DF-51BB-4688").isSuccess mustBe false } } "Email string format" should { val format = GeneratableStringFormats.defaultFormats("email") "be valid for emails" in { format.validate(JsonPath(), "[email protected]").isSuccess mustBe true format.validate(JsonPath(), "[email protected]").isSuccess mustBe true } "fail for non-emails" in { format.validate(JsonPath(), "someone").isSuccess mustBe false } } "Date string format" should { val format = GeneratableStringFormats.defaultFormats("date") "be valid for dates" in { format.validate(JsonPath(), "1856-12-20").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30").isSuccess mustBe true } "fail for non-dates" in { format.validate(JsonPath(), "23200130").isSuccess mustBe false format.validate(JsonPath(), "2320-01-50").isSuccess mustBe false } } "DateTime string format" should { val format = GeneratableStringFormats.defaultFormats("date-time") "be valid for datetimes" in { format.validate(JsonPath(), "1856-12-20T12:34:56").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123").isSuccess mustBe true format.validate(JsonPath(), "1856-12-20T12:34:56Z").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123Z").isSuccess mustBe true format.validate(JsonPath(), "1856-12-20T12:34:56+01:00").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123+01:00").isSuccess mustBe true } "fail for non-datetimes" in { format.validate(JsonPath(), "2320013012:34:56").isSuccess mustBe false format.validate(JsonPath(), "2320-01-5012:34:56").isSuccess mustBe false } } }
Example 71
Source File: IntegerFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen.formats import de.leanovate.swaggercheck.schema.model.JsonPath import de.leanovate.swaggercheck.schema.model.formats.IntegerFormats import org.scalatest.{MustMatchers, WordSpec} class IntegerFormatsSpec extends WordSpec with MustMatchers { "Int32 format" should { val format = GeneratableIntegerFormats.defaultFormats("int32") "fail for numbers out of 32-bit range" in { format.validate(JsonPath(), BigInt(Int.MaxValue) + BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Int.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigInt(Int.MinValue) - BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Int.MinValue)).isSuccess mustBe true } } "Int64 format" should { val format = GeneratableIntegerFormats.defaultFormats("int64") "fail for numbers out of 64-bit range" in { format.validate(JsonPath(), BigInt(Long.MaxValue) + BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Long.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigInt(Long.MinValue) - BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Long.MinValue)).isSuccess mustBe true } } }
Example 72
Source File: NumberFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.gen.formats import de.leanovate.swaggercheck.schema.model.JsonPath import org.scalatest.{MustMatchers, WordSpec} class NumberFormatsSpec extends WordSpec with MustMatchers { "Float format" should { val format = GeneratableNumberFormats.defaultFormats("float") "fail for numbers out of range" in { format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue) + BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue) - BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue)).isSuccess mustBe true } } "Double format" should { val format = GeneratableNumberFormats.defaultFormats("double") "fail for numbers out of range" in { format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue) + BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue) - BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue)).isSuccess mustBe true } } }
Example 73
Source File: ValidatingReadsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.play import de.leanovate.swaggercheck.schema.model.DefaultSchema import de.leanovate.swaggercheck.schema.play.Implicits._ import de.leanovate.swaggercheck.schema.play.model.ProductModel import org.scalatest.{MustMatchers, WordSpec} import play.api.libs.json.{JsError, Json, Reads} class ValidatingReadsSpec extends WordSpec with MustMatchers { val schema: DefaultSchema = Json .parse(getClass.getClassLoader.getResourceAsStream("schema/simple1.json")) .as[DefaultSchema] val atLeastOneTagRead: Reads[Seq[ProductModel]] = ValidatingReads.validating[Seq[ProductModel]](schema) "ValidatingReads" should { "reject invalid json input" in { val json = Json.parse("""[ | { | "id": 12345678, | "name": "thename", | "price": 1234.67, | "tags": [] | } |]""".stripMargin) val result = json.validate(atLeastOneTagRead) result mustBe a[JsError] } } }
Example 74
Source File: DefinitionFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.play import de.leanovate.swaggercheck.schema.model.{Definition, ObjectDefinition, StringDefinition} import org.scalatest.{MustMatchers, WordSpec} import play.api.libs.json.{JsSuccess, Json} import de.leanovate.swaggercheck.schema.play.Implicits._ class DefinitionFormatsSpec extends WordSpec with MustMatchers { "DefinitionFormats" should { "deserialize object_definition" in { val json = Json.parse(getClass.getClassLoader.getResourceAsStream("object_definition.json")) val JsSuccess(definition, _) = json.validate[Definition] val ObjectDefinition(required, properties, additionalProperties) = definition required mustBe Some(Set("field1")) properties mustBe Some(Map("field1" -> StringDefinition(None, None, None, None, None))) additionalProperties mustBe Left(true) } } }
Example 75
Source File: FutureResultsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.playhelper import org.scalatest.{MustMatchers, WordSpec} import play.api.mvc.Results import scala.concurrent.Future class FutureResultsSpec extends WordSpec with MustMatchers { "FutureResults" should { "extract data from a play Future[Result]" in { val result = Results.Status(202)("{}").withHeaders("some" -> "header", "something" -> "else") val futureResult = Future.successful(result) FutureResults.responseExtractor.status(futureResult) mustBe 202 FutureResults.responseExtractor.body(futureResult) mustBe "{}" FutureResults.responseExtractor.headers(futureResult) mustBe Map( "some" -> "header", "something" -> "else") } } }
Example 76
Source File: FakeRequestsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.playhelper import de.leanovate.swaggercheck.playhelper import de.leanovate.swaggercheck.shrinkable.CheckJsObject import org.scalatest.{MustMatchers, WordSpec} class FakeRequestsSpec extends WordSpec with MustMatchers { "FakeRequests" should { "create an empty FakeRequest" in { val request = playhelper.requestCreator.createEmpty("GET", "/the/uri", Seq("header1" -> "value1", "header2" -> "value2")) request.method mustBe "GET" request.uri mustBe "/the/uri" request.headers.get("header1") mustBe Some("value1") request.headers.get("header2") mustBe Some("value2") } "create a FakeRequest with body" in { val request = playhelper.requestCreator.createJson("POST", "/the/uri", Seq("header1" -> "value1", "header2" -> "value2"), CheckJsObject.empty) request.method mustBe "POST" request.uri mustBe "/the/uri" request.headers.get("header1") mustBe Some("value1") request.headers.get("header2") mustBe Some("value2") request.body mustBe "{}" } } }
Example 77
Source File: OperationResponseSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema import de.leanovate.swaggercheck.SwaggerChecks import de.leanovate.swaggercheck.schema.model.{Definition, JsonPath, ValidationResult} import de.leanovate.swaggercheck.shrinkable.{CheckJsString, CheckJsValue} import org.mockito.ArgumentMatchers._ import org.mockito.Mockito._ import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.mockito.MockitoSugar class OperationResponseSpec extends WordSpec with MustMatchers with MockitoSugar { "OperationResponse" should { "verify response body" in { val swaggerChecks = mock[SwaggerChecks] val bodySchema = mock[Definition] val response = OperationResponse(Some(bodySchema), Seq.empty) when(bodySchema.validate(any(), any(), any())(any())).thenReturn(ValidationResult.success) response.verify(swaggerChecks, Map.empty, "{}").isSuccess mustBe true verify(bodySchema).validate(swaggerChecks, JsonPath(), CheckJsValue.parse("{}"))(CheckJsValue.Adapter) } "verify response headers" in { val swaggerChecks = mock[SwaggerChecks] val headerSchema = mock[Definition] val response = OperationResponse(None, Seq("some header" -> headerSchema)) when(headerSchema.validate(any(), any(), any())(any())).thenReturn(ValidationResult.success) response.verify(swaggerChecks, Map.empty, "{}").isSuccess mustBe true verifyZeroInteractions(headerSchema) response.verify(swaggerChecks, Map("some header" -> "something"), "{}").isSuccess mustBe true verify(headerSchema).validate[CheckJsValue](swaggerChecks, JsonPath(), CheckJsString.formatted("something"))(CheckJsValue.Adapter) } } }
Example 78
Source File: SchemaModuleSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.jackson import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import de.leanovate.swaggercheck.schema.model.{StringDefinition, ObjectDefinition, Definition} import org.scalatest.{MustMatchers, WordSpec} class SchemaModuleSpec extends WordSpec with MustMatchers { val mapper = new ObjectMapper().registerModule(DefaultScalaModule).registerModule(JsonSchemaModule) "SchemaModule" should { "deserialize object_definition" in { val ObjectDefinition(required, properties, additionalProperties) = mapper.readValue(getClass.getClassLoader.getResource("object_definition.json"), classOf[Definition]) required mustBe Some(Set("field1")) properties mustBe Some(Map("field1" -> StringDefinition(None, None, None, None, None))) additionalProperties mustBe Left(true) } } }
Example 79
Source File: ArrayDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.mockito.Mockito._ import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.mockito.MockitoSugar class ArrayDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "ArrayDefinition" should { "accept any array if no item definition is set" in { val path = JsonPath("jsonpath") val node = TestNode(array = Some(Seq(TestNode(), TestNode()))) val schema = mock[Schema] val definition = ArrayDefinition(None, None, None) val result = definition.validate(schema, path, node) result mustBe ValidationSuccess } "succeed if item definition succeeds on all elements" in { val path = JsonPath("jsonpath") val item1 = TestNode() val item2 = TestNode() val node = TestNode(array = Some(Seq(item1, item2))) val schema = mock[Schema] val itemDefinition = mock[Definition] when(itemDefinition.validate(schema, path.index(0), item1)).thenReturn(ValidationSuccess) when(itemDefinition.validate(schema, path.index(1), item2)).thenReturn(ValidationSuccess) val definition = ArrayDefinition(None, None, Some(itemDefinition)) definition.validate(schema, path, node) mustBe ValidationSuccess verify(itemDefinition).validate(schema, path.index(0), item1) verify(itemDefinition).validate(schema, path.index(1), item2) } "fail if item definition fails on one element" in { val path = JsonPath("jsonpath") val item1 = TestNode() val item2 = TestNode() val node = TestNode(array = Some(Seq(item1, item2))) val schema = mock[Schema] val itemDefinition = mock[Definition] when(itemDefinition.validate(schema, path.index(0), item1)).thenReturn(ValidationResult.error("error")) when(itemDefinition.validate(schema, path.index(1), item2)).thenReturn(ValidationSuccess) val definition = ArrayDefinition(None, None, Some(itemDefinition)) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head mustBe "error" } "fail if array has less then minItems" in { val path = JsonPath("jsonpath") val node = TestNode(array = Some(Seq(TestNode(), TestNode()))) val schema = mock[Schema] val definition = ArrayDefinition(Some(3), None, None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should have at least 3 items in path jsonpath") } "fail if array has more then maxItems" in { val path = JsonPath("jsonpath") val node = TestNode(array = Some(Seq(TestNode(), TestNode()))) val schema = mock[Schema] val definition = ArrayDefinition(None, Some(1), None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should have at least 1 items in path jsonpath") } "fail validation on everything that is not an array" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val definition = ArrayDefinition(None, None, None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should be an array in path jsonpath") } } }
Example 80
Source File: StringFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model.formats import de.leanovate.swaggercheck.schema.model.JsonPath import org.scalatest.{MustMatchers, WordSpec} class StringFormatsSpec extends WordSpec with MustMatchers { "URL string format" should { val format = StringFormats.defaultFormats("url") "be valid for urls" in { format.validate(JsonPath(), "http://localhost/something").isSuccess mustBe true format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true } "fail for non-urls" in { format.validate(JsonPath(), "something").isSuccess mustBe false } } "URI string format" should { val format = StringFormats.defaultFormats("uri") "be valid for uris" in { format.validate(JsonPath(), "/something").isSuccess mustBe true format.validate(JsonPath(), "http://localhost:8080/something?query=param").isSuccess mustBe true } "fail for non-uris" in { format.validate(JsonPath(), ":?something").isSuccess mustBe false } } "UUID string format" should { val format = StringFormats.defaultFormats("uuid") "be valid for uuids" in { format.validate(JsonPath(), "2df6e079-4028-4aa5-9bdb-bb59a314cdad").isSuccess mustBe true format.validate(JsonPath(), "864C67DF-51BB-4688-8A5B-105EC5FDD1D2").isSuccess mustBe true } "fail for non-uuids" in { format.validate(JsonPath(), "864C67DF-51BB-4688").isSuccess mustBe false } } "Email string format" should { val format = StringFormats.defaultFormats("email") "be valid for emails" in { format.validate(JsonPath(), "[email protected]").isSuccess mustBe true format.validate(JsonPath(), "[email protected]").isSuccess mustBe true } "fail for non-emails" in { format.validate(JsonPath(), "someone").isSuccess mustBe false } } "Date string format" should { val format = StringFormats.defaultFormats("date") "be valid for dates" in { format.validate(JsonPath(), "1856-12-20").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30").isSuccess mustBe true } "fail for non-dates" in { format.validate(JsonPath(), "23200130").isSuccess mustBe false format.validate(JsonPath(), "2320-01-50").isSuccess mustBe false } } "DateTime string format" should { val format = StringFormats.defaultFormats("date-time") "be valid for datetimes" in { format.validate(JsonPath(), "1856-12-20T12:34:56").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123").isSuccess mustBe true format.validate(JsonPath(), "1856-12-20T12:34:56Z").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123Z").isSuccess mustBe true format.validate(JsonPath(), "1856-12-20T12:34:56+01:00").isSuccess mustBe true format.validate(JsonPath(), "2320-01-30T12:34:56.123+01:00").isSuccess mustBe true } "fail for non-datetimes" in { format.validate(JsonPath(), "2320013012:34:56").isSuccess mustBe false format.validate(JsonPath(), "2320-01-5012:34:56").isSuccess mustBe false } } }
Example 81
Source File: IntegerFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model.formats import de.leanovate.swaggercheck.schema.model.JsonPath import org.scalatest.{MustMatchers, WordSpec} class IntegerFormatsSpec extends WordSpec with MustMatchers { "Int32 format" should { val format = IntegerFormats.defaultFormats("int32") "fail for numbers out of 32-bit range" in { format.validate(JsonPath(), BigInt(Int.MaxValue) + BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Int.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigInt(Int.MinValue) - BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Int.MinValue)).isSuccess mustBe true } } "Int64 format" should { val format = IntegerFormats.defaultFormats("int64") "fail for numbers out of 64-bit range" in { format.validate(JsonPath(), BigInt(Long.MaxValue) + BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Long.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigInt(Long.MinValue) - BigInt(1)).isSuccess mustBe false format.validate(JsonPath(), BigInt(Long.MinValue)).isSuccess mustBe true } } }
Example 82
Source File: NumberFormatsSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model.formats import de.leanovate.swaggercheck.schema.model.JsonPath import org.scalatest.{MustMatchers, WordSpec} class NumberFormatsSpec extends WordSpec with MustMatchers { "Float format" should { val format = NumberFormats.defaultFormats("float") "fail for numbers out of range" in { format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue) + BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Float.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue) - BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Float.MinValue)).isSuccess mustBe true } } "Double format" should { val format = NumberFormats.defaultFormats("double") "fail for numbers out of range" in { format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue) + BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Double.MaxValue)).isSuccess mustBe true format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue) - BigDecimal(1)).isSuccess mustBe false format.validate(JsonPath(), BigDecimal.decimal(Double.MinValue)).isSuccess mustBe true } } }
Example 83
Source File: IntegerDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import de.leanovate.swaggercheck.schema.model.formats.ValueFormat import org.mockito.Mockito._ import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} class IntegerDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "IntegerDefinition" should { "accept any integer if no format or range is defined" in { val path = JsonPath("jsonpath") val node = TestNode(integer = Some(BigInt(Long.MaxValue) + 12345)) val schema = mock[Schema] val definition = IntegerDefinition(None, None, None) definition.validate(schema, path, node) mustBe ValidationSuccess } "accept values that match the defined format" in { val path = JsonPath("jsonpath") val node = TestNode(integer = Some(BigInt(12345))) val schema = mock[Schema] val format = mock[ValueFormat[BigInt]] when(schema.findIntegerFormat("theformat")).thenReturn(Some(format)) when(format.validate(path, BigInt(12345))).thenReturn(ValidationResult.success) val definition = IntegerDefinition(Some("theformat"), None, None) definition.validate(schema, path, node) mustBe ValidationSuccess verify(schema).findIntegerFormat("theformat") verify(format).validate(path, BigInt(12345)) } "fail validation if value is less than minimum" in { val path = JsonPath("jsonpath") val node = TestNode(integer = Some(BigInt(12345))) val schema = mock[Schema] val definition = IntegerDefinition(None, Some(BigInt(123456)), None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("has to be greater than 123456 in path jsonpath") } "fail validation if value is greater than maximum" in { val path = JsonPath("jsonpath") val node = TestNode(integer = Some(BigInt(123456))) val schema = mock[Schema] val definition = IntegerDefinition(None, None, Some(BigInt(12345))) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("has to be less than 12345 in path jsonpath") } "fail validation on everything that is not an integer" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val definition = IntegerDefinition(None, None, None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should be an integer in path jsonpath") } } }
Example 84
Source File: ReferenceDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.scalatest.{MustMatchers, WordSpec} import org.mockito.Mockito._ import org.scalatestplus.mockito.MockitoSugar class ReferenceDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "ReferenceDefinition" should { "delegate validation to referenced definition" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val referencedDefinition = mock[Definition] when(schema.findByRef("reference")).thenReturn(Some(referencedDefinition)) when(referencedDefinition.validate(schema, path, node)).thenReturn(ValidationResult.error("error1")) val definition = ReferenceDefinition("reference") val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head mustBe "error1" } "fail validation if referenced definition does not exists" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] when(schema.findByRef("reference")).thenReturn(None) val definition = ReferenceDefinition("reference") val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head mustBe "Referenced definition does not exists: reference" } } }
Example 85
Source File: NumberDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import de.leanovate.swaggercheck.schema.model.formats.ValueFormat import org.mockito.Mockito._ import org.scalatest.{MustMatchers, WordSpec} import org.scalatestplus.mockito.MockitoSugar class NumberDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "NumberDefinition" should { "accept any integer if no format or range is defined" in { val path = JsonPath("jsonpath") val node = TestNode(number = Some(BigDecimal(Long.MaxValue) + 12345)) val schema = mock[Schema] val definition = NumberDefinition(None, None, None) definition.validate(schema, path, node) mustBe ValidationSuccess } "accept values that match the defined format" in { val path = JsonPath("jsonpath") val node = TestNode(number = Some(BigDecimal(12345.67))) val schema = mock[Schema] val format = mock[ValueFormat[BigDecimal]] when(schema.findNumberFormat("theformat")).thenReturn(Some(format)) when(format.validate(path, BigDecimal(12345.67))).thenReturn(ValidationResult.success) val definition = NumberDefinition(Some("theformat"), None, None) definition.validate(schema, path, node) mustBe ValidationSuccess verify(schema).findNumberFormat("theformat") verify(format).validate(path, BigDecimal(12345.67)) } "fail validation if value is less than minimum" in { val path = JsonPath("jsonpath") val node = TestNode(number = Some(BigDecimal(12345.6))) val schema = mock[Schema] val definition = NumberDefinition(None, Some(BigDecimal(123456.7)), None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("has to be greater than 123456.7 in path jsonpath") } "fail validation if value is greater than maximum" in { val path = JsonPath("jsonpath") val node = TestNode(number = Some(BigDecimal(123456.7))) val schema = mock[Schema] val definition = NumberDefinition(None, None, Some(BigDecimal(12345.6))) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("has to be less than 12345.6 in path jsonpath") } "fail validation on everything that is not an integer" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val definition = NumberDefinition(None, None, None) val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should be a number in path jsonpath") } } }
Example 86
Source File: EmptyDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} class EmptyDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "EmptyDefinition" should { "validate anything" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val definition = EmptyDefinition definition.validate(schema, path, node) mustBe ValidationSuccess } } }
Example 87
Source File: OneOfDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.mockito.Mockito._ import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} class OneOfDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "OneOfDefinition" should { "succeed validation if one child succeed" in { val definition1 = mock[Definition] val definition2 = mock[Definition] val definition3 = mock[Definition] val schema = mock[Schema] val path = JsonPath("path") val node = TestNode() when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.error("error1")) when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.success) when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.error("error2")) val definition = OneOfDefinition(Seq(definition1, definition2, definition3)) definition.validate(schema, path, node) mustBe ValidationSuccess verify(definition1).validate(schema, path, node) verify(definition2).validate(schema, path, node) verify(definition3).validate(schema, path, node) } "fail validation if one child fails" in { val definition1 = mock[Definition] val definition2 = mock[Definition] val definition3 = mock[Definition] val schema = mock[Schema] val path = JsonPath("path") val node = TestNode() when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.error("error1")) when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.error("error2")) when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.error("error3")) val definition = OneOfDefinition(Seq(definition1, definition2, definition3)) val ValidationFailure(result) = definition.validate(schema, path, node) result mustBe Seq("error1", "error2", "error3") } } }
Example 88
Source File: AllOfDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.mockito.Mockito._ class AllOfDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "AllOfDefinition" should { "succeed validation if all children succeed" in { val definition1 = mock[Definition] val definition2 = mock[Definition] val definition3 = mock[Definition] val schema = mock[Schema] val path = JsonPath("path") val node = TestNode() when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.success) when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.success) when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.success) val definition = AllOfDefinition(Seq(definition1, definition2, definition3)) definition.validate(schema, path, node) mustBe ValidationSuccess verify(definition1).validate(schema, path, node) verify(definition2).validate(schema, path, node) verify(definition3).validate(schema, path, node) } "fail validation if one child fails" in { val definition1 = mock[Definition] val definition2 = mock[Definition] val definition3 = mock[Definition] val schema = mock[Schema] val path = JsonPath("path") val node = TestNode() when(definition1.validate(schema, path, node)).thenReturn(ValidationResult.success) when(definition2.validate(schema, path, node)).thenReturn(ValidationResult.error("error")) when(definition3.validate(schema, path, node)).thenReturn(ValidationResult.success) val definition = AllOfDefinition(Seq(definition1, definition2, definition3)) val result = definition.validate(schema, path, node) result mustBe ValidationResult.error("error") } } }
Example 89
Source File: BooleanDefinitionSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.scalatestplus.mockito.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} class BooleanDefinitionSpec extends WordSpec with MockitoSugar with MustMatchers { "BooleanDefinition" should { "succeed on any boolean value" in { val path = JsonPath("jsonpath") val node = TestNode(boolean = Some(true)) val schema = mock[Schema] val definition = BooleanDefinition definition.validate(schema, path, node) mustBe ValidationSuccess } "fail validation on everything that is not a boolean" in { val path = JsonPath("jsonpath") val node = TestNode() val schema = mock[Schema] val definition = BooleanDefinition val ValidationFailure(result) = definition.validate(schema, path, node) result must have size 1 result.head must endWith("should be a boolean in path jsonpath") } } }
Example 90
Source File: JsonPathSpec.scala From swagger-check with MIT License | 5 votes |
package de.leanovate.swaggercheck.schema.model import org.scalatest.{MustMatchers, WordSpec} class JsonPathSpec extends WordSpec with MustMatchers { "JsonPath" should { "has toString" in { val jsonPath = JsonPath("the.path") jsonPath.toString mustBe "the.path" } "concat fields names and indexes" in { val root = JsonPath() val sub1 = root.field("field1") val sub2 = sub1.field("field2") val sub3 = sub2.index(10) val sub4 = sub3.field("field3") root.toString mustBe "" sub1.toString mustBe "field1" sub2.toString mustBe "field1.field2" sub3.toString mustBe "field1.field2[10]" sub4.toString mustBe "field1.field2[10].field3" } } }
Example 91
Source File: PaginatedQueryValidatorSpec.scala From playsonify with MIT License | 5 votes |
package com.alexitc.playsonify.validators import com.alexitc.playsonify.models.pagination.{Limit, Offset, PaginatedQuery, PaginatedQueryError} import org.scalactic.{Bad, Every, Good} import org.scalatest.{MustMatchers, WordSpec} class PaginatedQueryValidatorSpec extends WordSpec with MustMatchers { val validator = new PaginatedQueryValidator "validate" should { "succeed on valid query" in { val query = PaginatedQuery(Offset(0), Limit(100)) val maxLimit = 100 val expected = Good(query) val result = validator.validate(query, maxLimit) result mustEqual expected } "fail on offset < 0" in { val query = PaginatedQuery(Offset(-1), Limit(1)) val maxLimit = 100 val expected = Bad(PaginatedQueryError.InvalidOffset).accumulating val result = validator.validate(query, maxLimit) result mustEqual expected } "fail on limit = 0" in { val query = PaginatedQuery(Offset(0), Limit(0)) val maxLimit = 100 val expected = Bad(PaginatedQueryError.InvalidLimit(maxLimit)).accumulating val result = validator.validate(query, maxLimit) result mustEqual expected } "fail on limit > maxLimit" in { val query = PaginatedQuery(Offset(0), Limit(101)) val maxLimit = 100 val expected = Bad(PaginatedQueryError.InvalidLimit(maxLimit)).accumulating val result = validator.validate(query, maxLimit) result mustEqual expected } "accumulate errors when offset and limit are invalid" in { val query = PaginatedQuery(Offset(-1), Limit(101)) val maxLimit = 100 val expected = Bad(Every(PaginatedQueryError.InvalidOffset, PaginatedQueryError.InvalidLimit(maxLimit))) val result = validator.validate(query, maxLimit) result mustEqual expected } } }
Example 92
Source File: FieldOrderingParserSpec.scala From playsonify with MIT License | 5 votes |
package com.alexitc.playsonify.parsers import com.alexitc.playsonify.models.ordering.{FieldOrdering, OrderingCondition, OrderingError, OrderingQuery} import org.scalactic.{Bad, Every, Good} import org.scalatest.{MustMatchers, WordSpec} class FieldOrderingParserSpec extends WordSpec with MustMatchers { import FieldOrderingParserSpec._ val parser = new CustomFieldParser "from" should { "parse an empty query to default ordering" in { val query = OrderingQuery("") val expected = FieldOrdering(Id, OrderingCondition.AscendingOrder) val result = parser.from(query) result mustEqual Good(expected) } "parse a field without ordering condition" in { val query = OrderingQuery("id") val expected = FieldOrdering(Id, OrderingCondition.AscendingOrder) val result = parser.from(query) result mustEqual Good(expected) } "parse a field with ordering condition" in { val query = OrderingQuery("name:desc") val expected = FieldOrdering(Name, OrderingCondition.DescendingOrder) val result = parser.from(query) result mustEqual Good(expected) } "reject unknown field" in { val query = OrderingQuery("age:desc") val expected = Bad(OrderingError.UnknownField).accumulating val result = parser.from(query) result mustEqual expected } "reject unknown ordering condition" in { val query = OrderingQuery("id:descending") val expected = Bad(OrderingError.InvalidCondition).accumulating val result = parser.from(query) result mustEqual expected } "accumulate errors on unknown field and ordering condition" in { val query = OrderingQuery("age:descending") val expected = Bad(Every(OrderingError.UnknownField, OrderingError.InvalidCondition)) val result = parser.from(query) result mustEqual expected } "reject bad ordering format" in { val query = OrderingQuery("id:desc:x") val expected = Bad(OrderingError.InvalidFormat).accumulating val result = parser.from(query) result mustEqual expected } } } object FieldOrderingParserSpec { sealed abstract class CustomField(val string: String) case object Id extends CustomField("id") case object Name extends CustomField("name") class CustomFieldParser extends FieldOrderingParser[CustomField] { override protected def defaultField: CustomField = Id override protected def parseField(unsafeField: String): Option[CustomField] = unsafeField match { case Id.string => Some(Id) case Name.string => Some(Name) case _ => None } } }
Example 93
Source File: ProxyMultiJvm.scala From 006877 with MIT License | 5 votes |
package aia.channels // multi-jvm:test-only aia.channels.ReliableProxySampleSpec 로 시작할것 import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.testkit.ImplicitSender import akka.actor.{Props, Actor} import akka.remote.testkit.MultiNodeSpecCallbacks import akka.remote.testkit.MultiNodeConfig import akka.remote.testkit.MultiNodeSpec trait STMultiNodeSpec extends MultiNodeSpecCallbacks with WordSpecLike with MustMatchers with BeforeAndAfterAll { override def beforeAll() = multiNodeSpecBeforeAll() override def afterAll() = multiNodeSpecAfterAll() } object ReliableProxySampleConfig extends MultiNodeConfig { val client = role("Client") val server = role("Server") testTransport(on = true) } class ReliableProxySampleSpecMultiJvmNode1 extends ReliableProxySample class ReliableProxySampleSpecMultiJvmNode2 extends ReliableProxySample import akka.remote.transport.ThrottlerTransportAdapter.Direction import scala.concurrent.duration._ import concurrent.Await import akka.contrib.pattern.ReliableProxy class ReliableProxySample extends MultiNodeSpec(ReliableProxySampleConfig) with STMultiNodeSpec with ImplicitSender { import ReliableProxySampleConfig._ def initialParticipants = roles.size "A MultiNodeSample" must { "wait for all nodes to enter a barrier" in { enterBarrier("startup") } "send to and receive from a remote node" in { runOn(client) { enterBarrier("deployed") val pathToEcho = node(server) / "user" / "echo" val echo = system.actorSelection(pathToEcho) val proxy = system.actorOf( ReliableProxy.props(pathToEcho, 500.millis), "proxy") proxy ! "message1" expectMsg("message1") Await.ready( testConductor.blackhole( client, server, Direction.Both), 1 second) echo ! "DirectMessage" proxy ! "ProxyMessage" expectNoMsg(3 seconds) Await.ready( testConductor.passThrough( client, server, Direction.Both), 1 second) expectMsg("ProxyMessage") echo ! "DirectMessage2" expectMsg("DirectMessage2") } runOn(server) { system.actorOf(Props(new Actor { def receive = { case msg: AnyRef => { sender() ! msg } } }), "echo") enterBarrier("deployed") } enterBarrier("finished") } } }
Example 94
Source File: DeadLetterTest.scala From 006877 with MIT License | 5 votes |
package aia.channels import akka.testkit.{ ImplicitSender, TestProbe, TestKit } import akka.actor.{ PoisonPill, Props, DeadLetter, ActorSystem } import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import java.util.Date class DeadLetterTest extends TestKit(ActorSystem("DeadLetterTest")) with WordSpecLike with BeforeAndAfterAll with MustMatchers with ImplicitSender { override def afterAll() { system.terminate() } "DeadLetter" must { "catch messages send to deadLetters" in { val deadLetterMonitor = TestProbe() system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val msg = new StateEvent(new Date(), "Connected") system.deadLetters ! msg val dead = deadLetterMonitor.expectMsgType[DeadLetter] dead.message must be(msg) dead.sender must be(testActor) dead.recipient must be(system.deadLetters) } "catch deadLetter messages send to deadLetters" in { val deadLetterMonitor = TestProbe() val actor = system.actorOf(Props[EchoActor], "echo") system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val msg = new Order("me", "Akka in Action", 1) val dead = DeadLetter(msg, testActor, actor) system.deadLetters ! dead deadLetterMonitor.expectMsg(dead) system.stop(actor) } "catch messages send to terminated Actor" in { val deadLetterMonitor = TestProbe() system.eventStream.subscribe( deadLetterMonitor.ref, classOf[DeadLetter]) val actor = system.actorOf(Props[EchoActor], "echo") actor ! PoisonPill val msg = new Order("me", "Akka in Action", 1) actor ! msg val dead = deadLetterMonitor.expectMsgType[DeadLetter] dead.message must be(msg) dead.sender must be(testActor) dead.recipient must be(actor) } } }
Example 95
Source File: ThroughputCPUTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.throughput import akka.testkit.TestProbe import akka.actor.{Props, ActorSystem} import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.routing.RoundRobinPool import com.typesafe.config.ConfigFactory import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest} import concurrent.duration._ class ThroughputCPUTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("performance/through") implicit val system = ActorSystem("ThroughputTest", configuration) "System" must { "fails to with cpu" in { val nrWorkers = 40 val nrMessages = nrWorkers * 40 val end = TestProbe() val workers = system.actorOf( RoundRobinPool(nrWorkers).props( Props(new ProcessCPURequest(250 millis, end.ref)).withDispatcher("my-dispatcher")), "Workers-cpu") val startTime = System.currentTimeMillis() for (i <- 0 until nrMessages) { workers ! new SystemMessage(startTime, 0, "") } val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]] val endTime = System.currentTimeMillis() val total = endTime - startTime println("total process time %d Average=%d".format(total, total / nrMessages)) val grouped = msg.groupBy(_.id) grouped.map { case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) }) }.foreach(println(_)) Thread.sleep(1000) system.stop(workers) } } }
Example 96
Source File: ThroughputTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.throughput import akka.testkit.TestProbe import akka.actor.{Props, ActorSystem} import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.routing.RoundRobinPool import com.typesafe.config.ConfigFactory import aia.performance.{ProcessCPURequest, SystemMessage, ProcessRequest} import concurrent.duration._ class ThroughputTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("performance/through") implicit val system = ActorSystem("ThroughputTest", configuration) "System" must { "fails to perform" in { val nrMessages = 99 val nrWorkers = 3 val statDuration = 2000 millis //((nrMessages * 10)+1000)/4 millis val end = TestProbe() val workers = system.actorOf( RoundRobinPool(nrWorkers).props(Props(new ProcessRequest(1 second, end.ref)).withDispatcher("my-dispatcher")), "Workers") val startTime = System.currentTimeMillis() for (i <- 0 until nrMessages) { workers ! new SystemMessage(startTime, 0, "") } val msg = end.receiveN(n = nrMessages, max = 9000 seconds).asInstanceOf[Seq[SystemMessage]] val endTime = System.currentTimeMillis() val total = endTime - startTime println("total process time %d Average=%d".format(total, total / nrMessages)) val grouped = msg.groupBy(_.id) grouped.map { case (key, listMsg) => (key, listMsg.foldLeft(0L) { (m, x) => math.max(m, x.duration) }) }.foreach(println(_)) Thread.sleep(1000) system.stop(workers) } } }
Example 97
Source File: MonitorMailboxTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.monitor import akka.testkit.TestProbe import akka.actor.{ Props, Actor, ActorSystem } import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import concurrent.duration._ import com.typesafe.config.ConfigFactory class MonitorMailboxTest extends WordSpecLike with BeforeAndAfterAll with MustMatchers { val configuration = ConfigFactory.load("monitor/mailbox") implicit val system = ActorSystem("MonitorMailboxTest", configuration) override protected def afterAll(): Unit = { system.terminate() super.afterAll() } "mailbox" must { "send statistics with dispatcher" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[MailboxStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second)) .withDispatcher("my-dispatcher"), "monitorActor") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[MailboxStatistics] println(stat) stat.queueSize must be(1) val stat2 = statProbe.expectMsgType[MailboxStatistics] println(stat2) stat2.queueSize must (be(2) or be(1)) val stat3 = statProbe.expectMsgType[MailboxStatistics] println(stat3) stat3.queueSize must (be(3) or be(2)) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } "send statistics with default" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[MailboxStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second)), "monitorActor2") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[MailboxStatistics] stat.queueSize must be(1) val stat2 = statProbe.expectMsgType[MailboxStatistics] stat2.queueSize must (be(2) or be(1)) val stat3 = statProbe.expectMsgType[MailboxStatistics] stat3.queueSize must (be(3) or be(2)) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } } } class ProcessTestActor(serviceTime: Duration) extends Actor { def receive = { case _ => { Thread.sleep(serviceTime.toMillis) } } }
Example 98
Source File: MonitorActorTest.scala From 006877 with MIT License | 5 votes |
package aia.performance.monitor import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers} import akka.testkit.{ TestProbe, TestKit } import akka.actor.{ Props, ActorSystem } import concurrent.duration._ class MonitorActorTest extends TestKit(ActorSystem("MonitorActorTest")) with WordSpecLike with BeforeAndAfterAll with MustMatchers { "Actor" must { "send statistics" in { val statProbe = TestProbe() system.eventStream.subscribe( statProbe.ref, classOf[ActorStatistics]) val testActor = system.actorOf(Props( new ProcessTestActor(1.second) with MonitorActor), "monitorActor") statProbe.send(testActor, "message") statProbe.send(testActor, "message2") statProbe.send(testActor, "message3") val stat = statProbe.expectMsgType[ActorStatistics] println(stat) stat.exitTime - stat.entryTime must be(1000L +- 20) val stat2 = statProbe.expectMsgType[ActorStatistics] println(stat2) stat2.exitTime - stat2.entryTime must be(1000L +- 20) val stat3 = statProbe.expectMsgType[ActorStatistics] println(stat3) stat3.exitTime - stat3.entryTime must be(1000L +- 20) Thread.sleep(2000) system.stop(testActor) system.eventStream.unsubscribe(statProbe.ref) } } }
Example 99
Source File: TicketSellerSpec.scala From 006877 with MIT License | 5 votes |
package com.goticks import akka.actor.{Props, ActorSystem} import akka.testkit.{ImplicitSender, TestKit} import org.scalatest.{WordSpecLike, MustMatchers} class TickerSellerSpec extends TestKit(ActorSystem("testTickets")) with WordSpecLike with MustMatchers with ImplicitSender with StopSystemAfterAll { "The TicketSeller" must { "Sell tickets until they are sold out" in { import TicketSeller._ def mkTickets = (1 to 10).map(i=>Ticket(i)).toVector val event = "RHCP" val ticketingActor = system.actorOf(TicketSeller.props(event)) ticketingActor ! Add(mkTickets) ticketingActor ! Buy(1) expectMsg(Tickets(event, Vector(Ticket(1)))) val nrs = (2 to 10) nrs.foreach(_ => ticketingActor ! Buy(1)) val tickets = receiveN(9) tickets.zip(nrs).foreach { case (Tickets(event, Vector(Ticket(id))), ix) => id must be(ix) } ticketingActor ! Buy(1) expectMsg(Tickets(event)) } "Sell tickets in batches until they are sold out" in { import TicketSeller._ val firstBatchSize = 10 def mkTickets = (1 to (10 * firstBatchSize)).map(i=>Ticket(i)).toVector val event = "Madlib" val ticketingActor = system.actorOf(TicketSeller.props(event)) ticketingActor ! Add(mkTickets) ticketingActor ! Buy(firstBatchSize) val bought = (1 to firstBatchSize).map(Ticket).toVector expectMsg(Tickets(event, bought)) val secondBatchSize = 5 val nrBatches = 18 val batches = (1 to nrBatches * secondBatchSize) batches.foreach(_ => ticketingActor ! Buy(secondBatchSize)) val tickets = receiveN(nrBatches) tickets.zip(batches).foreach { case (Tickets(event, bought), ix) => bought.size must equal(secondBatchSize) val last = ix * secondBatchSize + firstBatchSize val first = ix * secondBatchSize + firstBatchSize - (secondBatchSize - 1) bought.map(_.id) must equal((first to last).toVector) case _ => } ticketingActor ! Buy(1) expectMsg(Tickets(event)) ticketingActor ! Buy(10) expectMsg(Tickets(event)) } } }
Example 100
Source File: BoxOfficeSpec.scala From 006877 with MIT License | 5 votes |
package com.goticks import akka.actor.{ ActorRef, ActorSystem, Props } import akka.testkit.{ DefaultTimeout, ImplicitSender, TestKit } import com.goticks.BoxOffice._ import com.goticks.TicketSeller._ import org.scalatest.{ MustMatchers, WordSpecLike } class BoxOfficeSpec extends TestKit(ActorSystem("testBoxOffice")) with WordSpecLike with MustMatchers with ImplicitSender with DefaultTimeout with StopSystemAfterAll { "The BoxOffice" must { "Create an event and get tickets from the correct Ticket Seller" in { val boxOffice = system.actorOf(BoxOffice.props) val eventName = "RHCP" boxOffice ! CreateEvent(eventName, 10) expectMsg(EventCreated(Event(eventName, 10))) boxOffice ! GetEvents expectMsg(Events(Vector(Event(eventName, 10)))) boxOffice ! BoxOffice.GetEvent(eventName) expectMsg(Some(Event(eventName, 10))) boxOffice ! GetTickets(eventName, 1) expectMsg(Tickets(eventName, Vector(Ticket(1)))) boxOffice ! GetTickets("DavidBowie", 1) expectMsg(Tickets("DavidBowie")) } "Create a child actor when an event is created and sends it a Tickets message" in { val boxOffice = system.actorOf(Props( new BoxOffice { override def createTicketSeller(name: String): ActorRef = testActor } ) ) val tickets = 3 val eventName = "RHCP" val expectedTickets = (1 to tickets).map(Ticket).toVector boxOffice ! CreateEvent(eventName, tickets) expectMsg(Add(expectedTickets)) expectMsg(EventCreated(Event(eventName, tickets))) } "Get and cancel an event that is not created yet" in { val boxOffice = system.actorOf(BoxOffice.props) val noneExitEventName = "noExitEvent" boxOffice ! BoxOffice.GetEvent(noneExitEventName) expectMsg(None) boxOffice ! CancelEvent(noneExitEventName) expectMsg(None) } "Cancel a ticket which event is not created " in { val boxOffice = system.actorOf(BoxOffice.props) val noneExitEventName = "noExitEvent" boxOffice ! CancelEvent(noneExitEventName) expectMsg(None) } "Cancel a ticket which event is created" in { val boxOffice = system.actorOf(BoxOffice.props) val eventName = "RHCP" val tickets = 10 boxOffice ! CreateEvent(eventName, tickets) expectMsg(EventCreated(Event(eventName, tickets))) boxOffice ! CancelEvent(eventName) expectMsg(Some(Event(eventName, tickets))) } } }
Example 101
Source File: HelloWorldTest.scala From 006877 with MIT License | 5 votes |
package aia.deploy import org.scalatest.{BeforeAndAfterAll, WordSpecLike} import org.scalatest.MustMatchers import akka.testkit.{TestActorRef, ImplicitSender, TestKit} import akka.actor.ActorSystem class HelloWorldTest extends TestKit(ActorSystem("HelloWorldTest")) with ImplicitSender with WordSpecLike with MustMatchers with BeforeAndAfterAll { val actor = TestActorRef[HelloWorld] override def afterAll(): Unit = { system.terminate() } "HelloWorld" must { "reply when sending a string" in { actor ! "everybody" expectMsg("Hello everybody") } } }
Example 102
Source File: ConfigTest.scala From 006877 with MIT License | 5 votes |
package aia.config import akka.actor.ActorSystem import org.scalatest.WordSpecLike import com.typesafe.config.ConfigFactory import org.scalatest.MustMatchers class ConfigTest extends WordSpecLike with MustMatchers { "Configuration" must { "has configuration" in { val mySystem = ActorSystem("myTest") val config = mySystem.settings.config config.getInt("myTest.intParam") must be(20) config.getString("myTest.applicationDesc") must be("My Config Test") } "has defaults" in { val mySystem = ActorSystem("myDefaultsTest") val config = mySystem.settings.config config.getInt("myTestDefaults.intParam") must be(20) config.getString("myTestDefaults.applicationDesc") must be("My Current Test") } "can include file" in { val mySystem = ActorSystem("myIncludeTest") val config = mySystem.settings.config config.getInt("myTestIncluded.intParam") must be(20) config.getString("myTestIncluded.applicationDesc") must be("My Include Test") } "can be loaded by ourself" in { val configuration = ConfigFactory.load("load") val mySystem = ActorSystem("myLoadTest", configuration) val config = mySystem.settings.config config.getInt("myTestLoad.intParam") must be(20) config.getString("myTestLoad.applicationDesc") must be("My Load Test") } } }
Example 103
Source File: GetTicketInfoSpec.scala From 006877 with MIT License | 5 votes |
package com.goticks import org.scalatest.MustMatchers import org.scalatest.WordSpec import scala.concurrent.{Future, Await} class GetTicketInfoSpec extends WordSpec with MustMatchers { object TicketInfoService extends TicketInfoService with MockWebServiceCalls import TicketInfoService._ import scala.concurrent.duration._ "getTicketInfo" must { "return a complete ticket info when all futures are successful" in { val ticketInfo = Await.result(getTicketInfo("1234", Location(1d,2d)), 10.seconds) ticketInfo.event.isEmpty must be(false) ticketInfo.event.foreach( event=> event.name must be("Quasimoto")) ticketInfo.travelAdvice.isEmpty must be(false) ticketInfo.suggestions.map(_.name) must be (Seq("Madlib", "OhNo", "Flying Lotus")) } "return an incomplete ticket info when getEvent fails" in { val ticketInfo = Await.result(getTicketInfo("4321", Location(1d,2d)), 10.seconds) ticketInfo.event.isEmpty must be(true) ticketInfo.travelAdvice.isEmpty must be(true) ticketInfo.suggestions.isEmpty must be (true) } } } trait MockWebServiceCalls extends WebServiceCalls { import com.github.nscala_time.time.Imports._ import scala.concurrent.ExecutionContext.Implicits.global def getEvent(ticketNr: String, location: Location): Future[TicketInfo] = { Future { if(ticketNr == "1234") { TicketInfo(ticketNr, location, event = Some(Event("Quasimoto", Location(4.324218908d,53.12311144d), new DateTime(2013,10,1,22,30)))) } else throw new Exception("crap") } } def callWeatherXService(ticketInfo: TicketInfo): Future[Option[Weather]] = { Future { Some(Weather(30, false)) } } def callWeatherYService(ticketInfo: TicketInfo): Future[Option[Weather]] = { Future { Some(Weather(30, false)) } } def callTrafficService(origin: Location, destination: Location, time: DateTime): Future[Option[RouteByCar]] = { Future { Some(RouteByCar("route1", time - (35.minutes), origin, destination, 30.minutes, 5.minutes)) } } def callPublicTransportService(origin: Location, destination: Location, time: DateTime): Future[Option[PublicTransportAdvice]] = { Future { Some(PublicTransportAdvice("public transport route 1", time - (20.minutes), origin, destination, 20.minutes)) } } def callSimilarArtistsService(event: Event): Future[Seq[Artist]] = { Future { Seq(Artist("Madlib", "madlib.com/calendar"), Artist("OhNo", "ohno.com/calendar"), Artist("Flying Lotus", "fly.lo/calendar")) } } def callArtistCalendarService(artist: Artist, nearLocation: Location): Future[Event] = { Future { Event(artist.name,Location(1d,1d), DateTime.now) } } }
Example 104
Source File: LocalWordsSpec.scala From 006877 with MIT License | 5 votes |
package aia.cluster package words import akka.testkit.{ImplicitSender, TestKit} import akka.actor._ import org.scalatest._ import org.scalatest.MustMatchers import JobReceptionist._ import akka.routing.BroadcastPool trait CreateLocalWorkerRouter extends CreateWorkerRouter { this: Actor => override def createWorkerRouter: ActorRef = { context.actorOf(BroadcastPool(5).props(Props[JobWorker]), "worker-router") } } class TestJobMaster extends JobMaster with CreateLocalWorkerRouter class TestReceptionist extends JobReceptionist with CreateMaster { override def createMaster(name: String): ActorRef = context.actorOf(Props[TestJobMaster], name) } class LocalWordsSpec extends TestKit(ActorSystem("test")) with WordSpecLike with MustMatchers with StopSystemAfterAll with ImplicitSender { val receptionist = system.actorOf(Props[TestReceptionist], JobReceptionist.name) "The words system" must { "count the occurrence of words in a text" in { receptionist ! JobRequest("test2", List("this is a test ", "this is a test", "this is", "this")) expectMsg(JobSuccess("test2", Map("this" -> 4, "is"-> 3, "a" -> 2, "test" -> 2))) expectNoMsg } "count many occurences of words in a text" in { val words = List("this is a test ", "this is a test", "this is", "this") receptionist ! JobRequest("test3", (1 to 100).map(i=> words ++ words).flatten.toList) expectMsg(JobSuccess("test3", Map("this" -> 800, "is"-> 600, "a" -> 400, "test" -> 400))) expectNoMsg } "continue to process a job with intermittent failures" in { // the failure is simulated by a job worker throwing an exception on finding the word FAIL in the text. receptionist ! JobRequest("test4", List("this", "is", "a", "test", "FAIL!")) expectMsg(JobSuccess("test4", Map("this" -> 1, "is"-> 1, "a" -> 1, "test" -> 1))) expectNoMsg } } }
Example 105
Source File: TicketSellerSpec.scala From 006877 with MIT License | 5 votes |
package com.goticks import akka.actor.{ ActorSystem } import akka.testkit.{ImplicitSender, TestKit} import org.scalatest.{WordSpecLike, MustMatchers} class TickerSellerSpec extends TestKit(ActorSystem("testTickets")) with WordSpecLike with MustMatchers with ImplicitSender with StopSystemAfterAll { "The TicketSeller" must { "Sell tickets until they are sold out" in { import TicketSeller._ def mkTickets = (1 to 10).map(i=>Ticket(i)).toVector val event = "RHCP" val ticketingActor = system.actorOf(TicketSeller.props(event)) ticketingActor ! Add(mkTickets) ticketingActor ! Buy(1) expectMsg(Tickets(event, Vector(Ticket(1)))) val nrs = (2 to 10) nrs.foreach(_ => ticketingActor ! Buy(1)) val tickets = receiveN(9) tickets.zip(nrs).foreach { case (Tickets(event, Vector(Ticket(id))), ix) => id must be(ix) } ticketingActor ! Buy(1) expectMsg(Tickets(event)) } "Sell tickets in batches until they are sold out" in { import TicketSeller._ val firstBatchSize = 10 def mkTickets = (1 to (10 * firstBatchSize)).map(i=>Ticket(i)).toVector val event = "Madlib" val ticketingActor = system.actorOf(TicketSeller.props(event)) ticketingActor ! Add(mkTickets) ticketingActor ! Buy(firstBatchSize) val bought = (1 to firstBatchSize).map(Ticket).toVector expectMsg(Tickets(event, bought)) val secondBatchSize = 5 val nrBatches = 18 val batches = (1 to nrBatches * secondBatchSize) batches.foreach(_ => ticketingActor ! Buy(secondBatchSize)) val tickets = receiveN(nrBatches) tickets.zip(batches).foreach { case (Tickets(event, bought), ix) => bought.size must equal(secondBatchSize) val last = ix * secondBatchSize + firstBatchSize val first = ix * secondBatchSize + firstBatchSize - (secondBatchSize - 1) bought.map(_.id) must equal((first to last).toVector) case _ => } ticketingActor ! Buy(1) expectMsg(Tickets(event)) ticketingActor ! Buy(10) expectMsg(Tickets(event)) } } }
Example 106
Source File: BoxOfficeSpec.scala From 006877 with MIT License | 5 votes |
package com.goticks import akka.actor.{ ActorRef, Props, ActorSystem } import akka.testkit.{ TestKit, ImplicitSender, DefaultTimeout } import org.scalatest.{ WordSpecLike, MustMatchers } class BoxOfficeSpec extends TestKit(ActorSystem("testBoxOffice")) with WordSpecLike with MustMatchers with ImplicitSender with DefaultTimeout with StopSystemAfterAll { "The BoxOffice" must { "Create an event and get tickets from the correct Ticket Seller" in { import BoxOffice._ import TicketSeller._ val boxOffice = system.actorOf(BoxOffice.props) val eventName = "RHCP" boxOffice ! CreateEvent(eventName, 10) expectMsg(EventCreated(Event(eventName, 10))) boxOffice ! GetTickets(eventName, 1) expectMsg(Tickets(eventName, Vector(Ticket(1)))) boxOffice ! GetTickets("DavidBowie", 1) expectMsg(Tickets("DavidBowie")) } "Create a child actor when an event is created and sends it a Tickets message" in { import BoxOffice._ import TicketSeller._ val boxOffice = system.actorOf(Props( new BoxOffice { override def createTicketSeller(name: String): ActorRef = testActor } ) ) val tickets = 3 val eventName = "RHCP" val expectedTickets = (1 to tickets).map(Ticket).toVector boxOffice ! CreateEvent(eventName, tickets) expectMsg(Add(expectedTickets)) expectMsg(EventCreated(Event(eventName, tickets))) } } }
Example 107
Source File: FilteringActorTest.scala From 006877 with MIT License | 5 votes |
package aia.testdriven import akka.testkit.TestKit import akka.actor.{ Actor, Props, ActorRef, ActorSystem } import org.scalatest.{MustMatchers, WordSpecLike } class FilteringActorTest extends TestKit(ActorSystem("testsystem")) with WordSpecLike with MustMatchers with StopSystemAfterAll { "A Filtering Actor" must { "filter out particular messages" in { import FilteringActor._ val props = FilteringActor.props(testActor, 5) val filter = system.actorOf(props, "filter-1") filter ! Event(1) filter ! Event(2) filter ! Event(1) filter ! Event(3) filter ! Event(1) filter ! Event(4) filter ! Event(5) filter ! Event(5) filter ! Event(6) val eventIds = receiveWhile() { case Event(id) if id <= 5 => id } eventIds must be(List(1, 2, 3, 4, 5)) expectMsg(Event(6)) } "filter out particular messages using expectNoMsg" in { import FilteringActor._ val props = FilteringActor.props(testActor, 5) val filter = system.actorOf(props, "filter-2") filter ! Event(1) filter ! Event(2) expectMsg(Event(1)) expectMsg(Event(2)) filter ! Event(1) expectNoMsg filter ! Event(3) expectMsg(Event(3)) filter ! Event(1) expectNoMsg filter ! Event(4) filter ! Event(5) filter ! Event(5) expectMsg(Event(4)) expectMsg(Event(5)) expectNoMsg() } } } object FilteringActor { def props(nextActor: ActorRef, bufferSize: Int) = Props(new FilteringActor(nextActor, bufferSize)) case class Event(id: Long) } class FilteringActor(nextActor: ActorRef, bufferSize: Int) extends Actor { import FilteringActor._ var lastMessages = Vector[Event]() def receive = { case msg: Event => if (!lastMessages.contains(msg)) { lastMessages = lastMessages :+ msg nextActor ! msg if (lastMessages.size > bufferSize) { // 가장 오래된 것을 버린다 lastMessages = lastMessages.tail } } } }
Example 108
Source File: SendingActorTest.scala From 006877 with MIT License | 5 votes |
package aia.testdriven import scala.util.Random import akka.testkit.TestKit import akka.actor.{ Props, ActorRef, Actor, ActorSystem } import org.scalatest.{WordSpecLike, MustMatchers} class SendingActorTest extends TestKit(ActorSystem("testsystem")) with WordSpecLike with MustMatchers with StopSystemAfterAll { "A Sending Actor" must { "send a message to another actor when it has finished processing" in { import SendingActor._ val props = SendingActor.props(testActor) val sendingActor = system.actorOf(props, "sendingActor") val size = 1000 val maxInclusive = 100000 def randomEvents() = (0 until size).map{ _ => Event(Random.nextInt(maxInclusive)) }.toVector val unsorted = randomEvents() val sortEvents = SortEvents(unsorted) sendingActor ! sortEvents expectMsgPF() { case SortedEvents(events) => events.size must be(size) unsorted.sortBy(_.id) must be(events) } } } } object SendingActor { def props(receiver: ActorRef) = Props(new SendingActor(receiver)) case class Event(id: Long) case class SortEvents(unsorted: Vector[Event]) case class SortedEvents(sorted: Vector[Event]) } class SendingActor(receiver: ActorRef) extends Actor { import SendingActor._ def receive = { case SortEvents(unsorted) => receiver ! SortedEvents(unsorted.sortBy(_.id)) } }
Example 109
Source File: SilentActorNextTest.scala From 006877 with MIT License | 5 votes |
package aia.testdriven import org.scalatest.WordSpecLike import org.scalatest.MustMatchers import akka.testkit.{ TestActorRef, TestKit } import akka.actor._ package silentactor02 { class SilentActorTest extends TestKit(ActorSystem("testsystem")) with WordSpecLike with MustMatchers with StopSystemAfterAll { "A Silent Actor" must { "change internal state when it receives a message, single" in { import SilentActor._ val silentActor = TestActorRef[SilentActor] silentActor ! SilentMessage("whisper") silentActor.underlyingActor.state must (contain("whisper")) } } } object SilentActor { case class SilentMessage(data: String) case class GetState(receiver: ActorRef) } class SilentActor extends Actor { import SilentActor._ var internalState = Vector[String]() def receive = { case SilentMessage(data) => internalState = internalState :+ data } def state = internalState } } package silentactor03 { class SilentActorTest extends TestKit(ActorSystem("testsystem")) with WordSpecLike with MustMatchers with StopSystemAfterAll { "A Silent Actor" must { "change internal state when it receives a message, multi" in { import SilentActor._ val silentActor = system.actorOf(Props[SilentActor], "s3") silentActor ! SilentMessage("whisper1") silentActor ! SilentMessage("whisper2") silentActor ! GetState(testActor) expectMsg(Vector("whisper1", "whisper2")) } } } object SilentActor { case class SilentMessage(data: String) case class GetState(receiver: ActorRef) } class SilentActor extends Actor { import SilentActor._ var internalState = Vector[String]() def receive = { case SilentMessage(data) => internalState = internalState :+ data case GetState(receiver) => receiver ! internalState } } }
Example 110
Source File: ClientTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.networking import java.net.{InetSocketAddress, ServerSocket} import akka.actor.ActorSystem import akka.io.{Inet, Tcp} import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import org.bitcoins.core.config.TestNet3 import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.messages.control.VersionMessage import org.bitcoins.spvnode.messages.{NetworkPayload, VersionMessage} import org.bitcoins.spvnode.util.BitcoinSpvNodeUtil import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FlatSpecLike, MustMatchers} import scala.concurrent.duration._ import scala.util.Try class ClientTest extends TestKit(ActorSystem("ClientTest")) with FlatSpecLike with MustMatchers with ImplicitSender with BeforeAndAfter with BeforeAndAfterAll with BitcoinSLogger { "Client" must "connect to a node on the bitcoin network, " + "send a version message to a peer on the network and receive a version message back, then close that connection" in { val probe = TestProbe() val client = TestActorRef(Client.props,probe.ref) val remote = new InetSocketAddress(TestNet3.dnsSeeds(0), TestNet3.port) val randomPort = 23521 //random port client ! Tcp.Connect(remote, Some(new InetSocketAddress(randomPort))) //val bound : Tcp.Bound = probe.expectMsgType[Tcp.Bound] val conn : Tcp.Connected = probe.expectMsgType[Tcp.Connected] //make sure the socket is currently bound Try(new ServerSocket(randomPort)).isSuccess must be (false) client ! Tcp.Abort val confirmedClosed = probe.expectMsg(Tcp.Aborted) //make sure the port is now available val boundSocket = Try(new ServerSocket(randomPort)) boundSocket.isSuccess must be (true) boundSocket.get.close() } it must "bind connect to two nodes on one port" in { //NOTE if this test case fails it is more than likely because one of the two dns seeds //below is offline val remote1 = new InetSocketAddress(TestNet3.dnsSeeds(0), TestNet3.port) val remote2 = new InetSocketAddress(TestNet3.dnsSeeds(2), TestNet3.port) val probe1 = TestProbe() val probe2 = TestProbe() val client1 = TestActorRef(Client.props, probe1.ref) val client2 = TestActorRef(Client.props, probe2.ref) val local1 = new InetSocketAddress(TestNet3.port) val options = List(Inet.SO.ReuseAddress(true)) client1 ! Tcp.Connect(remote1,Some(local1),options) probe1.expectMsgType[Tcp.Connected] client1 ! Tcp.Abort val local2 = new InetSocketAddress(TestNet3.port) client2 ! Tcp.Connect(remote2,Some(local2),options) probe2.expectMsgType[Tcp.Connected](5.seconds) client2 ! Tcp.Abort } override def afterAll: Unit = { TestKit.shutdownActorSystem(system) } }
Example 111
Source File: BlockActorTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.networking import akka.actor.ActorSystem import akka.testkit.{ImplicitSender, TestActorRef, TestKit, TestProbe} import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.protocol.blockchain.BlockHeader import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.messages.BlockMessage import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FlatSpecLike, MustMatchers} import scala.concurrent.duration.DurationInt class BlockActorTest extends TestKit(ActorSystem("BlockActorTest")) with FlatSpecLike with MustMatchers with ImplicitSender with BeforeAndAfter with BeforeAndAfterAll with BitcoinSLogger { def blockActor = TestActorRef(BlockActor.props,self) val blockHash = DoubleSha256Digest(BitcoinSUtil.flipEndianness("00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206")) "BlockActor" must "be able to send a GetBlocksMessage then receive that block back" in { blockActor ! blockHash val blockMsg = expectMsgType[BlockMessage](10.seconds) blockMsg.block.blockHeader.hash must be (blockHash) } it must "be able to request a block from it's block header" in { val blockHeader = BlockHeader("0100000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000bac8b0fa927c0ac8234287e33c5f74d38d354820e24756ad709d7038fc5f31f020e7494dffff001d03e4b672") blockActor ! blockHeader val blockMsg = expectMsgType[BlockMessage](10.seconds) blockMsg.block.blockHeader.hash must be (blockHash) } override def afterAll = { TestKit.shutdownActorSystem(system) } }
Example 112
Source File: VersionMessageTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.messages.control import java.net.InetAddress import org.bitcoins.core.config.MainNet import org.bitcoins.core.number.{Int32, UInt64} import org.joda.time.DateTime import org.scalatest.{FlatSpec, MustMatchers} class VersionMessageTest extends FlatSpec with MustMatchers { "VersionMessage" must "create a new version message to be sent to another node on the network" in { val versionMessage = VersionMessage(MainNet, InetAddress.getLocalHost) versionMessage.addressReceiveServices must be (UnnamedService) versionMessage.addressReceiveIpAddress must be (InetAddress.getLocalHost) versionMessage.addressReceivePort must be (MainNet.port) versionMessage.addressTransServices must be (NodeNetwork) versionMessage.addressTransIpAddress must be (InetAddress.getLocalHost) versionMessage.addressTransPort must be (MainNet.port) versionMessage.nonce must be (UInt64.zero) versionMessage.startHeight must be (Int32.zero) versionMessage.timestamp.underlying must be (DateTime.now.getMillis +- 1000) } }
Example 113
Source File: NetworkPayloadTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.messages import org.bitcoins.core.util.BitcoinSLogger import org.bitcoins.spvnode.headers.NetworkHeader import org.bitcoins.spvnode.util.TestUtil import org.scalatest.{FlatSpec, MustMatchers} class NetworkPayloadTest extends FlatSpec with MustMatchers with BitcoinSLogger { "NetworkMessage" must "create a payload object from it's network header and the payload bytes" in { val rawNetworkMessage = TestUtil.rawNetworkMessage val header = NetworkHeader(rawNetworkMessage.take(48)) logger.debug("Header: " + header) val payloadHex = rawNetworkMessage.slice(48,rawNetworkMessage.length) val payload = NetworkPayload(header,payloadHex) payload.isInstanceOf[VersionMessage] must be (true) payload.commandName must be (NetworkPayload.versionCommandName) } }
Example 114
Source File: NetworkHeaderTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.headers import org.bitcoins.core.config.TestNet3 import org.bitcoins.core.number.UInt32 import org.bitcoins.core.util.{BitcoinSUtil, CryptoUtil} import org.bitcoins.spvnode.messages.VerAckMessage import org.bitcoins.spvnode.util.TestUtil import org.scalatest.{FlatSpec, MustMatchers} class NetworkHeaderTest extends FlatSpec with MustMatchers { "MessageHeader" must "must create a message header for a message" in { val messageHeader = NetworkHeader(TestNet3, TestUtil.versionMessage) messageHeader.network must be (TestNet3.magicBytes) messageHeader.commandName must be (TestUtil.versionMessage.commandName) messageHeader.payloadSize must be (UInt32(TestUtil.versionMessage.bytes.size)) messageHeader.checksum must be (CryptoUtil.doubleSHA256(TestUtil.versionMessage.bytes).bytes.take(4)) } it must "build the correct message header for a verack message" in { val messageHeader = NetworkHeader(TestNet3, VerAckMessage) messageHeader.network must be (TestNet3.magicBytes) messageHeader.commandName must be (VerAckMessage.commandName) messageHeader.payloadSize must be (UInt32.zero) BitcoinSUtil.encodeHex(messageHeader.checksum) must be ("5df6e0e2") } }
Example 115
Source File: BitcoinSpvNodeUtilTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.util import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.number.{UInt32, UInt64} import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.core.protocol.blockchain.BlockHeader import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.NetworkMessage import org.bitcoins.spvnode.constant.Constants import org.bitcoins.spvnode.gen.DataMessageGenerator import org.bitcoins.spvnode.messages.HeadersMessage import org.bitcoins.spvnode.messages.data.HeadersMessage import org.scalatest.{FlatSpec, MustMatchers} class BitcoinSpvNodeUtilTest extends FlatSpec with MustMatchers with BitcoinSLogger { "BitcoinSpvNodeUtil" must "return the entire byte array if a message is not aligned to a byte frame" in { val versionMessage = Constants.versionMessage val networkMsg = NetworkMessage(Constants.networkParameters,versionMessage) //remove last byte so the message is not aligned val bytes = networkMsg.bytes.slice(0,networkMsg.bytes.size-1) val (_,unAlignedBytes) = BitcoinSpvNodeUtil.parseIndividualMessages(bytes) unAlignedBytes must be (bytes) } it must "block header message that is not aligned with a tcp frame" in { val headersMsg = HeadersMessage(CompactSizeUInt(UInt64(2),1), List(BlockHeader(UInt32(3150175941L),DoubleSha256Digest("177e777f078d2deeaa3ad4b82e78a00ad2f4738c5217f7a36d9cf3bd11e41817"), DoubleSha256Digest("1dcaebebd620823bb344bd18a18276de508910d66b4e3cbb3426a14eced66224"),UInt32(2845833462L),UInt32(2626024374L), UInt32(2637850613L)), BlockHeader(UInt32(1694049746),DoubleSha256Digest("07b6d61809476830bc7ef862a983a7222997df3f639e0d2aa5902a5a48018430"), DoubleSha256Digest("68c65f803b70b72563e86ac3e8e20ad11fbfa2eac3f9fddf4bc624d03a14f084"),UInt32(202993555), UInt32(4046619225L),UInt32(1231236881)))) val networkMsg = NetworkMessage(Constants.networkParameters,headersMsg) //split the network msg at a random index to simulate a tcp frame not being aligned val randomIndex = scala.util.Random.nextInt().abs % networkMsg.bytes.size val (firstHalf,secondHalf) = networkMsg.bytes.splitAt(randomIndex) val (firstHalfParseHeaders,remainingBytes) = BitcoinSpvNodeUtil.parseIndividualMessages(firstHalf) firstHalfParseHeaders.isEmpty must be (true) val (secondHalfParsedHeaders,_) = BitcoinSpvNodeUtil.parseIndividualMessages(remainingBytes ++ secondHalf) val parsedNetworkMsg = secondHalfParsedHeaders.head val parsedHeadersMsg = parsedNetworkMsg.payload.asInstanceOf[HeadersMessage] parsedNetworkMsg.header must be (networkMsg.header) parsedHeadersMsg.headers.head must be (headersMsg.headers.head) parsedHeadersMsg.headers(1) must be (parsedHeadersMsg.headers(1)) } }
Example 116
Source File: RawAddrMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.scalatest.{FlatSpec, MustMatchers} class RawAddrMessageSerializerTest extends FlatSpec with MustMatchers { //from this bitcoin developer guide example //https://bitcoin.org/en/developer-reference#addr val addressCount = "01" val time = "d91f4854" val services = "0100000000000000" val address = "00000000000000000000ffffc0000233" val port = "208d" val hex = addressCount + time + services + address + port "RawAddrMessageSerializer" must "read a AddrMessage from a hex string" in { val addrMessage = RawAddrMessageSerializer.read(hex) addrMessage.ipCount must be (CompactSizeUInt(UInt64.one,1)) addrMessage.addresses.size must be (1) } it must "write a Addr message and get its original hex back" in { val addrMessage = RawAddrMessageSerializer.read(hex) RawAddrMessageSerializer.write(addrMessage) must be (hex) } }
Example 117
Source File: RawRejectMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.core.util.BitcoinSUtil import org.scalatest.{FlatSpec, MustMatchers} class RawRejectMessageSerializerTest extends FlatSpec with MustMatchers { //https://bitcoin.org/en/developer-reference#reject val hex = "02" + "7478" + "12" + "15" + "6261642d74786e732d696e707574732d7370656e74" + "394715fcab51093be7bfca5a31005972947baf86a31017939575fb2354222821" "RawRejectMessageSerializer" must "read in a reject message example" in { val rejectMsg = RawRejectMessageSerializer.read(hex) rejectMsg.messageSize must be (CompactSizeUInt(UInt64(2))) rejectMsg.message must be ("tx") rejectMsg.code must be (0x12.toChar) rejectMsg.reasonSize must be (CompactSizeUInt(UInt64(21))) rejectMsg.reason must be ("bad-txns-inputs-spent") BitcoinSUtil.encodeHex(rejectMsg.extra) must be ("394715fcab51093be7bfca5a31005972947baf86a31017939575fb2354222821") } it must "read then write a reject message" in { val rejectMsg = RawRejectMessageSerializer.read(hex) rejectMsg.hex must be (hex) } }
Example 118
Source File: RawServiceIdentifierSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.spvnode.messages.control.{NodeNetwork, UnnamedService} import org.scalatest.{FlatSpec, MustMatchers} class RawServiceIdentifierSerializerTest extends FlatSpec with MustMatchers { "RawServiceIdentifierSerializer" must "read a unnamed service identfier from a hex string" in { val hex = "0000000000000000" RawServiceIdentifierSerializer.read(hex) must be (UnnamedService) } it must "read a full node service identifier" in { val hex = "0100000000000000" RawServiceIdentifierSerializer.read(hex) must be (NodeNetwork) } it must "write a unnamed service identifier" in { val hex = "0000000000000000" val service = RawServiceIdentifierSerializer.read(hex) RawServiceIdentifierSerializer.write(service) must be (hex) } it must "write a node network service provider" in { val hex = "0100000000000000" val service = RawServiceIdentifierSerializer.read(hex) RawServiceIdentifierSerializer.write(service) must be (hex) } }
Example 119
Source File: RawNetworkIpAddressSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.number.UInt32 import org.bitcoins.spvnode.messages.control.NodeNetwork import org.scalatest.{FlatSpec, MustMatchers} class RawNetworkIpAddressSerializerTest extends FlatSpec with MustMatchers { //from this bitcoin developer guide example //https://bitcoin.org/en/developer-reference#addr val time = "d91f4854" val services = "0100000000000000" val address = "00000000000000000000ffffc0000233" val port = "208d" val hex = time + services + address + port "RawNetworkIpAddressSerializer" must "read a network ip address from a hex string" in { val ipAddress = RawNetworkIpAddressSerializer.read(hex) ipAddress.time must be (UInt32(1414012889)) ipAddress.services must be (NodeNetwork) ipAddress.address.toString must be ("/192.0.2.51") ipAddress.port must be (8333) } it must "write a network ip address from and get its original hex back" in { val ipAddress = RawNetworkIpAddressSerializer.read(hex) RawNetworkIpAddressSerializer.write(ipAddress) must be (hex) } }
Example 120
Source File: RawFilterAddMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.util.BitcoinSUtil import org.scalatest.{FlatSpec, MustMatchers} class RawFilterAddMessageSerializerTest extends FlatSpec with MustMatchers { //https://bitcoin.org/en/developer-reference#filteradd val hex = "20" + "fdacf9b3eb077412e7a968d2e4f11b9a9dee312d666187ed77ee7d26af16cb0b" "RawFilterAddMessageSerializer" must "deserialize a message in the bitcoin developer reference" in { val filterAddMsg = RawFilterAddMessageSerializer.read(hex) filterAddMsg.elementSize.hex must be ("20") BitcoinSUtil.encodeHex(filterAddMsg.element) must be ("fdacf9b3eb077412e7a968d2e4f11b9a9dee312d666187ed77ee7d26af16cb0b") } it must "serialize a filter add message" in { val filterAddMsg = RawFilterAddMessageSerializer.read(hex) filterAddMsg.hex must be (hex) } }
Example 121
Source File: RawPingMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.number.UInt64 import org.bitcoins.spvnode.messages.control.PingMessage import org.scalatest.{FlatSpec, MustMatchers} class RawPingMessageSerializerTest extends FlatSpec with MustMatchers { "RawPingMessageSerializer" must "read and write a uint64 representing the ping" in { val hex = "0094102111e2af4d" RawPingMessageSerializer.read(hex) must be (PingMessage(UInt64(hex))) } it must "write a ping message" in { val pingMessage = PingMessage(UInt64.zero) pingMessage.hex must be ("0000000000000000") } }
Example 122
Source File: RawFilterLoadMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.control import org.bitcoins.core.number.{UInt32, UInt64} import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.core.util.BitcoinSUtil import org.bitcoins.spvnode.bloom.BloomUpdateNone import org.scalatest.{FlatSpec, MustMatchers} class RawFilterLoadMessageSerializerTest extends FlatSpec with MustMatchers { "RawFilterLoadMessageSerializer" must "deserialize and serialize a filter load message" in { //example from the bitcoin developer reference //https://bitcoin.org/en/developer-reference#filterload val hex = "02b50f0b0000000000000000" val filterLoadMsg = RawFilterLoadMessageSerializer.read(hex) filterLoadMsg.bloomFilter.filterSize must be (CompactSizeUInt(UInt64(2))) BitcoinSUtil.encodeHex(filterLoadMsg.bloomFilter.data) must be ("b50f") filterLoadMsg.bloomFilter.hashFuncs must be (UInt32(11)) filterLoadMsg.bloomFilter.tweak must be (UInt32.zero) filterLoadMsg.bloomFilter.flags must be (BloomUpdateNone) RawFilterLoadMessageSerializer.write(filterLoadMsg) must be (hex) } }
Example 123
Source File: RawTypeIdentifierSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages import org.bitcoins.spvnode.messages.{MsgBlock, MsgFilteredBlock, MsgTx} import org.scalatest.{FlatSpec, MustMatchers} class RawTypeIdentifierSerializerTest extends FlatSpec with MustMatchers { val msgTxHex = "01000000" val msgBlockHex = "02000000" val msgFilteredBlockHex = "03000000" "RawTypeIdentifier" must "read/write a MsgTx" in { val msg = RawTypeIdentifierSerializer.read(msgTxHex) msg must be (MsgTx) RawTypeIdentifierSerializer.write(msg) must be (msgTxHex) } it must "read/write a MsgBlock" in { val msg = RawTypeIdentifierSerializer.read(msgBlockHex) msg must be (MsgBlock) RawTypeIdentifierSerializer.write(msg) must be (msgBlockHex) } it must "read/write a MsgFilteredBlock" in { val msg = RawTypeIdentifierSerializer.read(msgFilteredBlockHex) msg must be (MsgFilteredBlock) RawTypeIdentifierSerializer.write(msg) must be (msgFilteredBlockHex) } }
Example 124
Source File: RawMerkleBlockMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.number.{UInt32, UInt64} import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.core.util.BitcoinSUtil import org.scalatest.{FlatSpec, MustMatchers} class RawMerkleBlockMessageSerializerTest extends FlatSpec with MustMatchers { //from bitcoin developer reference //https://bitcoin.org/en/developer-reference#merkleblock val hex = "0100000082bb869cf3a793432a66e826e05a6fc37469f8efb7421dc88067010000000000" + "7f16c5962e8bd963659c793ce370d95f093bc7e367117b3c30c1f8fdd0d97287" + "76381b4d4c86041b554b85290700000004" + "3612262624047ee87660be1a707519a443b1c1ce3d248cbfc6c15870f6c5daa2" + "019f5b01d4195ecbc9398fbf3c3b1fa9bb3183301d7a1fb3bd174fcfa40a2b65" + "41ed70551dd7e841883ab8f0b16bf04176b7d1480e4f0af9f3d4c3595768d068" + "20d2a7bc994987302e5b1ac80fc425fe25f8b63169ea78e68fbaaefa59379bbf" + "011d" "RawMerkleBlockMessage" must "read a raw hex string into a merkle block message" in { val merkleBlockMessage = RawMerkleBlockMessageSerializer.read(hex) merkleBlockMessage.merkleBlock.transactionCount must be (UInt32(7)) merkleBlockMessage.merkleBlock.hashCount must be (CompactSizeUInt(UInt64(4))) merkleBlockMessage.merkleBlock.hashes must be (Seq( DoubleSha256Digest(BitcoinSUtil.decodeHex("3612262624047ee87660be1a707519a443b1c1ce3d248cbfc6c15870f6c5daa2")), DoubleSha256Digest(BitcoinSUtil.decodeHex("019f5b01d4195ecbc9398fbf3c3b1fa9bb3183301d7a1fb3bd174fcfa40a2b65")), DoubleSha256Digest(BitcoinSUtil.decodeHex("41ed70551dd7e841883ab8f0b16bf04176b7d1480e4f0af9f3d4c3595768d068")), DoubleSha256Digest(BitcoinSUtil.decodeHex("20d2a7bc994987302e5b1ac80fc425fe25f8b63169ea78e68fbaaefa59379bbf")) )) merkleBlockMessage.merkleBlock.partialMerkleTree.bits must be (Seq(true, false, true, true, true, false, false, false)) } it must "write a merkle block header message" in { val merkleBlockMessage = RawMerkleBlockMessageSerializer.read(hex) RawMerkleBlockMessageSerializer.write(merkleBlockMessage) must be (hex) } }
Example 125
Source File: RawInventoryMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.scalatest.{FlatSpec, MustMatchers} class RawInventoryMessageSerializerTest extends FlatSpec with MustMatchers { //from bitcoin developer reference //https://bitcoin.org/en/developer-reference#inv val hex = "0201000000de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a" + "0100000091d36d997037e08018262978766f24b8a055aaf1d872e94ae85e9817b2c68dc7" "RawInventoryMessageSerializer" must "read a InventoryMessage object from its hex serialization" in { val inventoryMessage = RawInventoryMessageSerializer.read(hex) inventoryMessage.inventoryCount must be (CompactSizeUInt(UInt64(2),1)) inventoryMessage.inventories.size must be (2) } it must "read and then write an inventory message and get its original hex" in { val inventoryMessage = RawInventoryMessageSerializer.read(hex) RawInventoryMessageSerializer.write(inventoryMessage) must be (hex) } }
Example 126
Source File: RawGetDataMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.spvnode.messages.MsgTx import org.bitcoins.spvnode.messages.data.Inventory import org.scalatest.{FlatSpec, MustMatchers} class RawGetDataMessageSerializerTest extends FlatSpec with MustMatchers { //from bitcoin developer reference //a getdata message is essentially an inv message //https://bitcoin.org/en/developer-reference#inv val hex = "02" + "01000000" + "de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a" + "01000000" + "91d36d997037e08018262978766f24b8a055aaf1d872e94ae85e9817b2c68dc7" "RawGetDataMessageSerializer" must "read in a data message" in { val dataMsg = RawGetDataMessageSerializer.read(hex) dataMsg.inventoryCount must be (CompactSizeUInt(UInt64(2))) dataMsg.inventories.head must be (Inventory(MsgTx, DoubleSha256Digest("de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a"))) dataMsg.inventories(1) must be (Inventory(MsgTx, DoubleSha256Digest("91d36d997037e08018262978766f24b8a055aaf1d872e94ae85e9817b2c68dc7"))) } it must "write a GetDataMessage back to the original hex" in { val dataMsg = RawGetDataMessageSerializer.read(hex) RawGetDataMessageSerializer.write(dataMsg) } }
Example 127
Source File: RawInventorySerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.util.BitcoinSUtil import org.bitcoins.spvnode.messages.MsgTx import org.scalatest.{FlatSpec, MustMatchers} class RawInventorySerializerTest extends FlatSpec with MustMatchers { //from bitcoin developer reference example section //https://bitcoin.org/en/developer-reference#inv val hex = "01000000de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a" "RawInventorySerializer" must "read a inventory object from its hex representation" in { val inventory = RawInventorySerializer.read(hex) inventory.typeIdentifier must be (MsgTx) inventory.hash must be (DoubleSha256Digest(BitcoinSUtil.decodeHex("de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a"))) } it must "write a inventory object to its serialized format" in { val inventory = RawInventorySerializer.read(hex) RawInventorySerializer.write(inventory) must be (hex) } }
Example 128
Source File: RawGetHeadersMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.spvnode.util.TestUtil import org.bitcoins.spvnode.versions.ProtocolVersion70002 import org.scalatest.{FlatSpec, MustMatchers} class RawGetHeadersMessageSerializerTest extends FlatSpec with MustMatchers { val hex = TestUtil.rawGetHeadersMsg "RawGetHeadersMessageSerializer" must "read a hex string representing a GetHeaderMessage" in { val getHeadersMessage = RawGetHeadersMessageSerializer.read(hex) getHeadersMessage.version must be (ProtocolVersion70002) getHeadersMessage.hashCount must be (CompactSizeUInt(UInt64(31),1)) getHeadersMessage.hashes.length must be (31) getHeadersMessage.hashStop must be (DoubleSha256Digest("0000000000000000000000000000000000000000000000000000000000000000")) } it must "write a GetHeaderMessage" in { val getHeadersMessage = RawGetHeadersMessageSerializer.read(hex) RawGetHeadersMessageSerializer.write(getHeadersMessage) must be (hex) } }
Example 129
Source File: RawHeadersMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.crypto.DoubleSha256Digest import org.bitcoins.core.number.{UInt32, UInt64} import org.bitcoins.core.protocol.CompactSizeUInt import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.scalatest.{FlatSpec, MustMatchers} class RawHeadersMessageSerializerTest extends FlatSpec with MustMatchers with BitcoinSLogger { //from this example //https://bitcoin.org/en/developer-reference#headers val hex = "01" + "02000000" + "b6ff0b1b1680a2862a30ca44d346d9e8910d334beb48ca0c0000000000000000" + "9d10aa52ee949386ca9385695f04ede270dda20810decd12bc9b048aaab31471" + "24d95a54" + "30c31b18" + "fe9f0864" + "00" "RawHeadersMessageSerializer" must "deserialize a list of block headers" in { val headersMsg = RawHeadersMessageSerializer.read(hex) val header = headersMsg.headers.head headersMsg.count must be (CompactSizeUInt(UInt64.one,1)) header.previousBlockHash must be (DoubleSha256Digest("b6ff0b1b1680a2862a30ca44d346d9e8910d334beb48ca0c0000000000000000")) header.merkleRootHash must be (DoubleSha256Digest("9d10aa52ee949386ca9385695f04ede270dda20810decd12bc9b048aaab31471")) header.time must be (UInt32(1415239972)) header.nBits must be (UInt32(BitcoinSUtil.flipEndianness("30c31b18"))) header.nonce must be (UInt32(BitcoinSUtil.flipEndianness("fe9f0864"))) } it must "read then write a HeaderMessage" in { val headersMsg = RawHeadersMessageSerializer.read(hex) RawHeadersMessageSerializer.write(headersMsg) must be (hex) } it must "read the first two block headers from testnet3" in { val hex = "020100000043497fd7f826957108f4a30fd9cec3aeba79972084e90ead01ea330900000000bac8b0fa927c0ac8234287e33c5f74d38d354820e24756ad709d7038fc5f31f020e7494dffff001d03e4b672000100000006128e87be8b1b4dea47a7247d5528d2702c96826c7a648497e773b800000000e241352e3bec0a95a6217e10c3abb54adfa05abb12c126695595580fb92e222032e7494dffff001d00d2353400" val headersMsg = RawHeadersMessageSerializer.read(hex) val first = headersMsg.headers.head logger.debug("Headers: " + headersMsg.headers) first.previousBlockHash.hex must be (BitcoinSUtil.flipEndianness("000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943")) first.hash.hex must be (BitcoinSUtil.flipEndianness("00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206")) first.merkleRootHash.hex must be (BitcoinSUtil.flipEndianness("f0315ffc38709d70ad5647e22048358dd3745f3ce3874223c80a7c92fab0c8ba")) logger.debug("Second header: " + headersMsg.headers(1)) val second = headersMsg.headers(1) second.previousBlockHash.hex must be (BitcoinSUtil.flipEndianness("00000000b873e79784647a6c82962c70d228557d24a747ea4d1b8bbe878e1206")) second.hash.hex must be (BitcoinSUtil.flipEndianness("000000006c02c8ea6e4ff69651f7fcde348fb9d557a06e6957b65552002a7820")) second.merkleRootHash.hex must be (BitcoinSUtil.flipEndianness("20222eb90f5895556926c112bb5aa0df4ab5abc3107e21a6950aec3b2e3541e2")) } }
Example 130
Source File: RawNotFoundMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.number.UInt64 import org.bitcoins.core.protocol.CompactSizeUInt import org.scalatest.{FlatSpec, MustMatchers} class RawNotFoundMessageSerializerTest extends FlatSpec with MustMatchers { //according to the developer reference, the format for inventory messages and //not found messages are the same //from bitcoin developer reference //https://bitcoin.org/en/developer-reference#inv val hex = "0201000000de55ffd709ac1f5dc509a0925d0b1fc442ca034f224732e429081da1b621f55a" + "0100000091d36d997037e08018262978766f24b8a055aaf1d872e94ae85e9817b2c68dc7" "RawNotFoundMessageSerializer" must "read a not found message from its hex representation" in { val notFoundMessage = RawNotFoundMessageSerializer.read(hex) notFoundMessage.inventoryCount must be (CompactSizeUInt(UInt64(2),1)) notFoundMessage.inventories.size must be (2) } it must "write a not found message and get its original hex representation back" in { val notFoundMessage = RawNotFoundMessageSerializer.read(hex) RawNotFoundMessageSerializer.write(notFoundMessage) must be (hex) } }
Example 131
Source File: RawTransactionMessageSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.messages.data import org.bitcoins.core.util.BitcoinSUtil import org.bitcoins.spvnode.util.TestUtil import org.scalatest.{FlatSpec, MustMatchers} class RawTransactionMessageSerializerTest extends FlatSpec with MustMatchers { "RawTransactionMessageSerializer" must "read a TransactionMessage from a sequence of bytes" in { val txMessage = RawTransactionMessageSerializer.read(TestUtil.rawTransaction) txMessage.transaction.txId.hex must be (BitcoinSUtil.flipEndianness("44e504f5b7649d215be05ad9f09026dee95201244a3b218013c504a6a49a26ff")) } it must "write a TransactionMessage to its hex format" in { val txMessage = RawTransactionMessageSerializer.read(TestUtil.rawTransaction) RawTransactionMessageSerializer.write(txMessage) must be (TestUtil.rawTransaction) } }
Example 132
Source File: RawNetworkHeaderSerializerTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.serializers.headers import org.bitcoins.core.number.UInt32 import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil} import org.bitcoins.spvnode.messages.NetworkPayload import org.bitcoins.spvnode.util.TestUtil import org.scalatest.{FlatSpec, MustMatchers} class RawNetworkHeaderSerializerTest extends FlatSpec with MustMatchers with BitcoinSLogger { val hex = "f9beb4d976657261636b000000000000000000005df6e0e2" "RawMessageHeaderSerializer" must "read hex string into a message header" in { //this example is from this section in the bitcoin developer reference //https://bitcoin.org/en/developer-reference#message-headers val messageHeader = RawNetworkHeaderSerializer.read(hex) //this is the mainnet id BitcoinSUtil.encodeHex(messageHeader.network) must be ("f9beb4d9") messageHeader.commandName must be ("verack") messageHeader.payloadSize must be (UInt32.zero) BitcoinSUtil.encodeHex(messageHeader.checksum) must be ("5df6e0e2") } it must "write an object that was just read and get the original input" in { val messageHeader = RawNetworkHeaderSerializer.read(hex) messageHeader.hex must be (hex) } it must "read a network header from a node on the network" in { val hex = TestUtil.rawNetworkMessage.take(48) val header = RawNetworkHeaderSerializer.read(hex) BitcoinSUtil.encodeHex(header.network) must be ("0B110907".toLowerCase) header.commandName.size must be (NetworkPayload.versionCommandName.size) header.commandName must be (NetworkPayload.versionCommandName) header.payloadSize must be (UInt32(102)) BitcoinSUtil.encodeHex(header.checksum) must be ("2f6743da") } }
Example 133
Source File: BlockHeaderStoreTest.scala From bitcoin-s-spv-node with MIT License | 5 votes |
package org.bitcoins.spvnode.store import org.bitcoins.core.gen.BlockchainElementsGenerator import org.scalatest.{BeforeAndAfter, FlatSpec, MustMatchers} class BlockHeaderStoreTest extends FlatSpec with MustMatchers with BeforeAndAfter { val testFile = new java.io.File("src/test/resources/block_header.dat") "BlockHeaderStore" must "write and then read a block header from the database" in { val blockHeader = BlockchainElementsGenerator.blockHeader.sample.get BlockHeaderStore.append(Seq(blockHeader),testFile) val headersFromFile = BlockHeaderStore.read(testFile) headersFromFile must be (Seq(blockHeader)) } it must "write one blockheader to the file, then append another header to the file, then read them both" in { val blockHeader1 = BlockchainElementsGenerator.blockHeader.sample.get val blockHeader2 = BlockchainElementsGenerator.blockHeader.sample.get BlockHeaderStore.append(Seq(blockHeader1),testFile) val headersFromFile1 = BlockHeaderStore.read(testFile) headersFromFile1 must be (Seq(blockHeader1)) BlockHeaderStore.append(Seq(blockHeader2),testFile) val headersFromFile2 = BlockHeaderStore.read(testFile) headersFromFile2 must be (Seq(blockHeader1, blockHeader2)) } after { testFile.delete() } }
Example 134
Source File: CryptoTest.scala From nodejs with Apache License 2.0 | 5 votes |
package io.scalajs.nodejs.crypto import org.scalatest.{FunSpec, MustMatchers} class CryptoTest extends FunSpec with MustMatchers { describe("Crypto") { val text = "Hello World" it("should be able to create an MD5 hash from a string") { val hasher = Crypto.createHash("md5") hasher.update(text) val buffer = hasher.digest() assert(buffer.toHexString == "b10a8db164e0754105b7a99be72e3fe5") } it("should be able to create a SHA256 hash from a string") { val hasher = Crypto.createHash("sha256") hasher.update(text) val buffer = hasher.digest() assert(buffer.toHexString == "a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e") } it("should be able to hash passwords with pbkdf2Sync") { val computedHash = Crypto.pbkdf2Sync("this-is-a-password", "random-salt", 10000, 512, "sha512").toString("base64") val expectedHash = "nzaxkuDuglFxKDMtr566d1mlr7kZV5y1j5PWpU9E/R9cpZz5q8pyYgeuTWADj2DhBF6koAsikPOac81lOHgK+tgiIYy6YRBd0SylsewT3QR5CelagTY1nrKougkJi95TmyKplJIO9M2szDqMhX5eT0yY2eVLnv9R0SCjd3qunvbVJmOvfEOgetPCwu5oUfRGeX/2JUlE6g3l80b252tVhzKUATeFMAKKdl448FzYdt/vWJRq2dt1ActjDV/C9RpWMNHBw10cLS3/ivSVXnjNzANArDSOAdzIh2lnrauoEtIULqlKFImx4vk2B7Pt4Wg5+ouOGWbW8ZeU1zOqAQOlDSOggxEs+fOJvOrFZdVwyZ15UYX2gRnHc+aNT2gQd+HKwCJMrDbuxz9rYQe7SGfcgeT6vxz2ZLnZ2E5SIWP5QIhm+CboYypDCGh7O5FwBHoJCSEJW3mFu/pA01Hwz11ORS6UeD/z29k546YZRa0jrzD5dLJQE+Rc72cVJv05VLs+U30b4NHcmmjmdqoT/vfxto9XM+atN7D10+dNne59YmbL9TQyvDBnxwHIUoXXGDp5OLjjtlh2A/AFu4VtF/vhRolyvSnMfQznJnEBmDkIiW/V1qwI0TevOheG04ERHtFU5eGUYt7ofPAlkUrrJxEmuwSyVoApuI+lI5CaKD4dke0=" computedHash mustEqual expectedHash } it("should be able to encrypt and decrypt text from a private key (AES-256-ctr)") { val alg = "aes-256-ctr" val key = "this-is-a-private-key" val stringToEncrypt = "text-to-encrypt" val cipher = Crypto.createCipher(alg, key) val encrypted = cipher.update(stringToEncrypt, "utf8", "base64") val finalEncryptedValue = encrypted + cipher.`final`("base64") val decipher = Crypto.createDecipher(alg, key) val decrypted = decipher.update(finalEncryptedValue, "base64", "utf8") val finalDecryptedValue = decrypted + decipher.`final`("utf8") finalEncryptedValue mustNot equal(stringToEncrypt) finalDecryptedValue must equal(stringToEncrypt) } } }
Example 135
Source File: CouchbaseConfigValidatorSpec.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.persistence.couchbase import akka.actor.ActorSystem import akka.event.Logging import akka.testkit.EventFilter import com.typesafe.config.ConfigFactory import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpec} import scala.concurrent.duration._ import scala.concurrent.Await class MyException extends RuntimeException("MyException") class CouchbaseConfigValidatorSpec extends WordSpec with MustMatchers with BeforeAndAfterAll { val akkaTestLogging = ConfigFactory.parseString("akka.loggers = [akka.testkit.TestEventListener]") implicit val system = ActorSystem("test", akkaTestLogging) val log = Logging(system, classOf[CouchbaseConfigValidatorSpec]) override def afterAll = Await.result(system.terminate(), Duration.Inf) "CouchbaseConfigValidator" should { "detect when bucket is not set" in { val config = ConfigFactory.parseString("""some.config.setting = 1""".stripMargin) EventFilter .error("Configuration for [test.bucket] must be set in application.conf ", occurrences = 1) .intercept { CouchbaseConfigValidator.validateBucket("test", config, log) } } "detect when bucket is set to null" in { val config = ConfigFactory.parseString("""testpath1.bucket = null""".stripMargin) EventFilter .error("Configuration for [testpath1.bucket] must be set in application.conf ", occurrences = 1) .intercept { CouchbaseConfigValidator.validateBucket("testpath1", config, log) } } "pass when bucket is specified" in { val config = ConfigFactory.parseString("""sample.path.bucket = bucketname""".stripMargin) // expect only one "another error" in the log EventFilter.error(occurrences = 1).intercept { CouchbaseConfigValidator.validateBucket("sample.path", config, log) log.error("another error") } } } }
Example 136
Source File: TypeFlattenerIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst import java.io.File import de.zalando.apifirst.util.ScalaPrinter import org.scalatest.{FunSpec, MustMatchers} import scala.io.Source class TypeFlattenerIntegrationTest extends FunSpec with MustMatchers { val expectation_path = "play-scala-generator/src/test/scala/model/" val prefix = "resources." import de.zalando.model._ val plainModels = Seq[WithModel]( additional_properties_yaml, basic_auth_api_yaml, basic_extension_yaml, basic_polymorphism_yaml, cross_spec_references_yaml, echo_api_yaml, error_in_array_yaml, expanded_polymorphism_yaml, form_data_yaml, full_petstore_api_yaml, hackweek_yaml, heroku_petstore_api_yaml, instagram_api_yaml, minimal_api_yaml, nakadi_yaml, nested_arrays_yaml, nested_arrays_validation_yaml, nested_objects_yaml, nested_objects_validation_yaml, nested_options_yaml, nested_options_validation_yaml, numbers_validation_yaml, options_yaml, security_api_yaml, simple_petstore_api_yaml, split_petstore_api_yaml, string_formats_yaml, string_formats_validation_yaml, type_deduplication_yaml, uber_api_yaml ) describe("TypeFlattener") { plainModels.foreach { model => testTypeFlattener(model) } } def testTypeFlattener(ast: WithModel): Unit = { val name = ScalaPrinter.nameFromModel(ast) it(s"should flatten API model $name") { val scalaModel = TypeNormaliser.flatten(ast.model) val expected = asInFile(name, ".scala") clean(ScalaPrinter.asScala(name, scalaModel)) mustBe clean(expected) } } def asInFile(name: String, suffix: String): String = { val expectedFile = new File(expectation_path, prefix + name + suffix) if (expectedFile.canRead) { val src = Source.fromFile(expectedFile) val result = src.getLines().mkString("\n") src.close() result } else "" } def clean(str: String): String = str.split("\n").map(_.trim).filter(_.nonEmpty).mkString("\n") }
Example 137
Source File: ScalaNameTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.ScalaName._ import de.zalando.apifirst.naming.dsl._ import org.scalatest.{FunSpec, MustMatchers} class ScalaNameTest extends FunSpec with MustMatchers { it("must correctly capitalize names") { ("one" / "two" / "three").names mustBe(("one", "Two", "three")) ("ONE" / "TWO" / "THREE").names mustBe(("one", "TWO", "tHREE")) ("OnE" / "TwO" / "ThReE").names mustBe(("one", "TwO", "thReE")) } it("must correctly recognize short names") { ("one" / "two").names mustBe(("one", "Two", "two")) } it("must correctly escape scala names") { ("catch" / "if" / "match").names mustBe(("`catch`","If","`match`")) } it("must be able to produce import statemets") { ("java.util" / "date").qualifiedName("", "") mustBe(("java.util","Date")) } it("must correctly concat names") { ("definitions"/"Example"/"nestedArrays"/"Opt"/"Arr:").names mustBe(("definitions", "Example", "arr_esc")) } }
Example 138
Source File: PathTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.naming.Path import org.scalatest.{FunSpec, MustMatchers} class PathTest extends FunSpec with MustMatchers { describe("Path") { it("should convert the root path") { val root = Path("/") root.asSwagger mustBe "/" root.prepend("/echo/").asSwagger mustBe "/echo/" root.prepend("/echo").asSwagger mustBe "/echo/" } it("should convert absolute segments") { val a = Path("/a") a.asSwagger mustBe "/a" a.prepend("/echo/").asSwagger mustBe "/echo/a" a.prepend("/echo").asSwagger mustBe "/echo/a" } it("should convert relative segments") { val a = Path("a") a.asSwagger mustBe "/a" a.prepend("/echo/").asSwagger mustBe "/echo/a" a.prepend("/echo").asSwagger mustBe "/echo/a" } it("should convert absolute segments with trailing slash") { val a = Path("/a/") a.asSwagger mustBe "/a/" a.prepend("/echo/").asSwagger mustBe "/echo/a/" a.prepend("/echo").asSwagger mustBe "/echo/a/" } it("should convert nested path segments") { val a = Path("a/b") a.asSwagger mustBe "/a/b" a.prepend("/echo/").asSwagger mustBe "/echo/a/b" a.prepend("/echo").asSwagger mustBe "/echo/a/b" } it("should convert in-path parameters") { val a = Path("/{a}") a.asSwagger mustBe "/{a}" a.prepend("/echo/").asSwagger mustBe "/echo/{a}" a.prepend("/echo").asSwagger mustBe "/echo/{a}" a.interpolated mustBe "/${toPath(a)}" a.prepend("/echo/").interpolated mustBe "/echo/${toPath(a)}" a.prepend("/echo").interpolated mustBe "/echo/${toPath(a)}" a.asPlay mustBe "/:a" a.prepend("/echo/").asPlay mustBe "/echo/:a" a.prepend("/echo").asPlay mustBe "/echo/:a" a.asMethod mustBe "byA" a.prepend("/echo/").asMethod mustBe "echoByA" a.prepend("/echo").asMethod mustBe "echoByA" } it("should convert in-path parameters with trailing slash") { val a = Path("/a/{a}/") a.asSwagger mustBe "/a/{a}/" a.prepend("/echo/").asSwagger mustBe "/echo/a/{a}/" a.prepend("/echo").asSwagger mustBe "/echo/a/{a}/" a.interpolated mustBe "/a/${toPath(a)}/" a.prepend("/echo/").interpolated mustBe "/echo/a/${toPath(a)}/" a.prepend("/echo").interpolated mustBe "/echo/a/${toPath(a)}/" a.asPlay mustBe "/a/:a/" a.prepend("/echo/").asPlay mustBe "/echo/a/:a/" a.prepend("/echo").asPlay mustBe "/echo/a/:a/" a.asMethod mustBe "aByA" a.prepend("/echo/").asMethod mustBe "echoAByA" a.prepend("/echo").asMethod mustBe "echoAByA" } it("should convert multiple in-path parameters") { val a = Path("/a/{b}/{c}/d/{e}") a.asSwagger mustBe "/a/{b}/{c}/d/{e}" a.prepend("/echo/").asSwagger mustBe "/echo/a/{b}/{c}/d/{e}" a.prepend("/echo").asSwagger mustBe "/echo/a/{b}/{c}/d/{e}" a.interpolated mustBe "/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.prepend("/echo/").interpolated mustBe "/echo/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.prepend("/echo").interpolated mustBe "/echo/a/${toPath(b)}/${toPath(c)}/d/${toPath(e)}" a.asPlay mustBe "/a/:b/:c/d/:e" a.prepend("/echo/").asPlay mustBe "/echo/a/:b/:c/d/:e" a.prepend("/echo").asPlay mustBe "/echo/a/:b/:c/d/:e" a.asMethod mustBe "aByBByCDByE" a.prepend("/echo/").asMethod mustBe "echoAByBByCDByE" a.prepend("/echo").asMethod mustBe "echoAByBByCDByE" } } }
Example 139
Source File: ReferenceTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst import de.zalando.apifirst.Domain._ import de.zalando.apifirst.naming._ import de.zalando.apifirst.naming.dsl._ import org.scalatest.{FunSpec, MustMatchers} class ReferenceTest extends FunSpec with MustMatchers { describe("Reference") { it("can be created from absolute URI strings, optionally containing pointer fragments") { Reference("file:/swagger.yaml") mustBe Reference("file:/swagger.yaml") Reference("http://goo.gl/swagger.yaml") mustBe Reference("http://goo.gl/swagger.yaml") Reference("file:/swagger.yaml#/foo/bar") mustBe Reference("file:/swagger.yaml#/foo/bar") } it("can be created containing pointer fragments identifying a path segment") { ("{foo}" / "{bar}").parent mustBe Reference("{foo}") } it("must be able to append pointer tokens") { val base = Reference("file:/swagger.yaml") base / "foo" mustBe "file:/swagger.yaml" / "foo" base / "foo" / "bar" mustBe "file:/swagger.yaml" / "foo" / "bar" } it("must be able to append pointers") { val base = Reference("file:/swagger.yaml") val foo = Reference("foo") val bar = Reference("bar") base / foo mustBe "file:/swagger.yaml" / "foo" base / foo / bar mustBe "file:/swagger.yaml" / "foo" / "bar" base / foo / "" / bar mustBe "file:/swagger.yaml" / "foo" / "" / "bar" } it("must be able to prepend pointer tokens") { val reference = "file:/swagger.yaml" / "bar" reference.prepend("foo") mustBe "foo" / "file:/swagger.yaml" / "bar" } it("must return a pointers parent reference or itself if no parent pointer reference exists") { val base = Reference("file:/swagger.yaml") (base / "foo" / "bar").parent mustBe "file:/swagger.yaml" / "foo" (base / "foo").parent mustBe Reference("file:/swagger.yaml") base.parent mustBe Reference("") } it("must ignore starting # while comparing references") { val one = TypeDef(Reference("/definitions/ErrorModel"), Seq( new Field(Reference("/definitions/ErrorModel/message"), Str(None, TypeMeta(None))), new Field(Reference("/definitions/ErrorModel/code"), Intgr(TypeMeta(None)))), TypeMeta(None)) val two = TypeDef(Reference("#/definitions/ErrorModel"), Seq( new Field(Reference("#/definitions/ErrorModel/message"), Str(None, TypeMeta(None))), new Field(Reference("#/definitions/ErrorModel/code"), Intgr(TypeMeta(None)))), TypeMeta(None)) } } }
Example 140
Source File: Rfc3339UtilTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.play.controllers import org.joda.time.{DateTime, DateTimeZone} import org.scalatest.{FunSpec, MustMatchers} class Rfc3339UtilTest extends FunSpec with MustMatchers { val dtz = DateTimeZone.UTC val date = new DateTime(1451911387284L, dtz) describe("Rfc3339UtilTest") { it("should parse RFC3339 DateTime") { Rfc3339Util.parseDateTime("2007-05-01T15:43:26-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.000Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.300Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452-01:00").withZone(dtz).toString mustBe "2007-05-01T16:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+01:00").withZone(dtz).toString mustBe "2007-05-01T14:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3452+00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.345Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26.3-00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.300Z" Rfc3339Util.parseDateTime("2007-05-01T15:43:26+00:00").withZone(dtz).toString mustBe "2007-05-01T15:43:26.000Z" } it("should parse RFC3339 Date") { Rfc3339Util.parseDate("2007-05-01").toString mustBe "2007-05-01" Rfc3339Util.parseDate("2008-05-01").toString mustBe "2008-05-01" Rfc3339Util.parseDate("2007-08-01").toString mustBe "2007-08-01" Rfc3339Util.parseDate("2007-05-08").toString mustBe "2007-05-08" } it("should write DateTime") { Rfc3339Util.writeDateTime(date.withZone(dtz)) mustBe "2016-01-04T12:43:07.284000+0000" } it("should write Date") { Rfc3339Util.writeDate(date.toLocalDate) mustBe "2016-01-04" } } }
Example 141
Source File: ParseVendorExtensionsTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import com.fasterxml.jackson.core.JsonParseException import com.fasterxml.jackson.dataformat.yaml.snakeyaml.parser.ParserException import de.zalando.apifirst.Application.ApiCall import de.zalando.apifirst.Http.{GET, POST, PUT} import org.scalatest.{FunSpec, MustMatchers} class ParseVendorExtensionsTest extends FunSpec with MustMatchers with ExpectedResults { val ok = new File(resourcesPath + "extensions/extensions.ok.yaml") val nok = new File(resourcesPath + "extensions/extensions.nok.yaml") val hypermediaOk = new File(resourcesPath + "extensions/hypermedia.ok.yaml") val hypermediaNOk1 = new File(resourcesPath + "extensions/hypermedia.nok1.yaml") val hypermediaNOk2 = new File(resourcesPath + "extensions/hypermedia.nok2.yaml") val errorMapping = new File(resourcesPath + "extensions/error_mapping.yaml") describe("The swagger parser") { it("should read valid vendor extensions") { implicit val (uri, swagger) = StrictYamlParser.parse(ok) swagger.info.vendorExtensions contains "x-info-extension" mustBe true swagger.paths("/").vendorExtensions contains "x-path-extension" mustBe true swagger.paths("/").get.vendorExtensions contains "x-operation-extension" mustBe true swagger.paths("/").get.responses("200").vendorExtensions contains "x-response-extension" mustBe true swagger.tags.head.vendorExtensions contains "x-tag-extension" mustBe true swagger.securityDefinitions("internalApiKey").vendorExtensions contains "x-security-extension" mustBe true } it("should reject invalid vendor extensions") { intercept[JsonParseException] { StrictYamlParser.parse(nok) }.getClass mustBe classOf[JsonParseException] } it("should read hypermedia definitions") { implicit val (uri, swagger) = StrictYamlParser.parse(hypermediaOk) val expected = Map("resource created" -> Map("resource updated" -> Map("condition" -> "some rule to show the transition"), "subresource added" -> null), "resource updated" -> Map("subresource added" -> Map("condition" -> ""), "self" -> Map("condition" -> "non-empty rule")), "resource deleted" -> Map("self" -> null), "subresource added" -> Map("resource updated" -> null, "self" -> null, "resource deleted" -> null)) swagger.transitions.nonEmpty mustBe true swagger.transitions mustEqual expected swagger.paths("/").get.responses("200").targetState mustEqual Some("resource created") swagger.paths("/").get.responses("default").targetState mustEqual None } it("should reject hypermedia definitions without well-formed definition") { val exception = intercept[JsonParseException] { StrictYamlParser.parse(hypermediaNOk1) } exception.getMessage mustEqual "Malformed transition definitions" } it("should reject hypermedia definitions with incorrect initial state") { intercept[ParserException] { StrictYamlParser.parse(hypermediaNOk2) }.getClass mustBe classOf[ParserException] } it("should read error mappings and assign right preference to them") { val (uri, model) = StrictYamlParser.parse(errorMapping) val ast = ModelConverter.fromModel(errorMapping.toURI, model, Option(errorMapping)) val expectedForPUT = Map( "404" -> List(classOf[java.util.NoSuchElementException]), "403" -> List(classOf[java.lang.SecurityException]), "405" -> List(classOf[java.lang.IllegalStateException]), "400" -> List(classOf[java.util.NoSuchElementException]) ) val expectedForPOST = Map( "403" -> List(classOf[java.lang.SecurityException]), "404" -> List(classOf[java.util.NoSuchElementException]), "405" -> List(classOf[java.lang.IllegalStateException]) ) ast.calls.foreach { case ApiCall(POST, _, _, _, _, mapping, _, _, _) => mapping must contain theSameElementsAs expectedForPOST case ApiCall(PUT, _, _, _, _, mapping, _, _, _) => mapping must contain theSameElementsAs expectedForPUT } } } }
Example 142
Source File: HypermediaConverterTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.apifirst.Hypermedia.State import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{FunSpec, MustMatchers} class HypermediaConverterTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "hypermedia/" val exampleFixtures = new File(resourcesPath + "extensions").listFiles describe("Strict Swagger Parser hypermedia converter") { exampleFixtures.filter(_.getName.startsWith("hypermedia.ok")).foreach { file => testTransitionsConverter(file) testStateDefinitions(file) } } def testTransitionsConverter(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} with hypermedia information") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val ast = ModelConverter.fromModel(base, model, Some(file)) val hypermedia = ast.stateTransitions val expected = asInFile(file, "hypermedia") val media = State.toDot(hypermedia).mkString("\n") if (expected.isEmpty && media.nonEmpty) dump(media, file, "hypermedia") clean(media) mustBe clean(expected) } } def testStateDefinitions(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} with state name information") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val ast = ModelConverter.fromModel(base, model, Some(file)) val targetStates = ast.calls.map(_.targetStates) val expected = asInFile(file, "states") val media = targetStates.mkString("\n") if (expected.isEmpty && media.nonEmpty) dump(media, file, "states") clean(media) mustBe clean(expected) } } }
Example 143
Source File: SecurityConverterIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{FunSpec, MustMatchers} class SecurityConverterIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "security_definitions/" val fixtures = new File(resourcesPath + "examples").listFiles describe("Swagger Security Converter") { fixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testSecurityConverter(file) } } def testSecurityConverter(file: File): Unit = { it(s"should convert security definitions from ${file.getName}") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val securityDefs = SecurityConverter.convertDefinitions(model.securityDefinitions) val fullResult = securityDefs.mkString("\n") val expected = asInFile(file, "types") if (expected.isEmpty) dump(fullResult, file, "types") clean(fullResult) mustBe clean(expected) } } }
Example 144
Source File: StrictParseExamplesTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import java.net.URI import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{FunSpec, MustMatchers} class StrictParseExamplesTest extends FunSpec with MustMatchers with ExpectedResults { val fixtures = new File(resourcesPath + "examples").listFiles ++ new File(resourcesPath + "schema_examples").listFiles describe("Strict Swagger Parser") { fixtures.filter(_.getName.endsWith(".yaml")).foreach { file => it(s"should parse the yaml swagger file ${file.getName} as specification") { val result = StrictYamlParser.parse(file) result._1 mustBe a [URI] result._2 mustBe a [SwaggerModel] } } } }
Example 145
Source File: SecurityDefinitionDeserializerTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel._ import org.scalatest.{MustMatchers, FunSpec} class SecurityDefinitionDeserializerTest extends FunSpec with MustMatchers with ExpectedResults { val file = new File(resourcesPath + "examples/security.api.yaml") describe("SecurityDefinitionDeserializer") { it(s"should parse security definitions in the ${file.getName}") { val (uri, model) = StrictYamlParser.parse(file) val result = model.securityDefinitions result.size mustBe 6 result("petstoreImplicit") mustBe a[Oauth2ImplicitSecurity] result("githubAccessCode") mustBe a[Oauth2AccessCodeSecurity] result("petstorePassword") mustBe a[Oauth2PasswordSecurity] result("justBasicStuff") mustBe a[BasicAuthenticationSecurity] result("petstoreApplication") mustBe a[Oauth2ApplicationSecurity] result("internalApiKey") mustBe a[ApiKeySecurity] } } }
Example 146
Source File: TypeConverterTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.swagger import java.io.File import de.zalando.swagger.strictModel.SwaggerModel import org.scalatest.{FunSpec, MustMatchers} class TypeConverterTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "types/" val modelFixtures = new File(resourcesPath + "model").listFiles val exampleFixtures = new File(resourcesPath + "examples").listFiles describe("Strict Swagger Parser model") { modelFixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testTypeConverter(file) } } describe("Strict Swagger Parser examples") { exampleFixtures.filter(_.getName.endsWith(".yaml")).foreach { file => testTypeConverter(file) } } def testTypeConverter(file: File): Unit = { it(s"should parse the yaml swagger file ${file.getName} as specification") { val (base, model) = StrictYamlParser.parse(file) model mustBe a[SwaggerModel] val typeDefs = ModelConverter.fromModel(base, model, Some(file)).typeDefs val typeMap = typeDefs map { case (k, v) => k -> ("\n\t" + de.zalando.apifirst.util.ShortString.toShortString("\t\t")(v)) } val typesStr = typeMap.toSeq.sortBy(_._1.parts.size).map(p => p._1 + " ->" + p._2).mkString("\n") val expected = asInFile(file, "types") if (expected.isEmpty) dump(typesStr, file, "types") clean(typesStr) mustBe clean(expected) } } }
Example 147
Source File: RuleGeneratorTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.play.compiler import de.zalando.apifirst.Application.StrictModel import de.zalando.apifirst.naming.Path import org.scalatest.{FunSpec, MustMatchers} import play.routes.compiler.{DynamicPart, StaticPart} class RuleGeneratorTest extends FunSpec with MustMatchers { implicit val model = StrictModel(Nil, Map.empty, Map.empty, Map.empty, "/base/", None, Map.empty, Map.empty) val routes = Map( "/" -> Nil ,"/a/b/c/d" -> List(StaticPart("a/b/c/d")) ,"/a/b/c/d/" -> List(StaticPart("a/b/c/d/")) ,"/a/{b}/c/{d}" -> List(StaticPart("a/"), DynamicPart("b", """[^/]+""",true), StaticPart("/c/"), DynamicPart("d", """[^/]+""",true)) ,"/{a}/{b}/{c}/{d}/" -> List(DynamicPart("a", """[^/]+""",true), StaticPart("/"), DynamicPart("b", """[^/]+""",true), StaticPart("/"), DynamicPart("c", """[^/]+""",true), StaticPart("/"), DynamicPart("d", """[^/]+""",true), StaticPart("/")) ,"/{a}/b/{c}/d/" -> List(DynamicPart("a", """[^/]+""",true), StaticPart("/b/"), DynamicPart("c", """[^/]+""",true), StaticPart("/d/")) ) describe("RuleGeneratorTest") { routes.foreach { case (path, expected) => it(s"should parse $path as expected") { val result = RuleGenerator.convertPath(Path(path)).parts result must contain theSameElementsInOrderAs expected } } } }
Example 148
Source File: ScalaMarshallersGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaMarshallersGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "marshallers/" describe("ScalaGenerator should generate marshallers") { examples.foreach { file => testScalaMarshallersGenerator(file) } } def testScalaMarshallersGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaMarshallers(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 149
Source File: ScalaValidatorsGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaValidatorsGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "validation/" describe("ScalaGenerator should generate play validators") { (model ++ examples ++ validations).foreach { ast => testScalaModelGenerator(ast) } } def testScalaModelGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playValidators(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 150
Source File: ScalaPlayTestsGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaPlayTestsGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "tests/" describe("ScalaGenerator should generate tests") { (examples ++ model ++ validations).foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaTests(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 151
Source File: ScalaControllerGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaControllerGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "controllers/" describe("ScalaSecurityGenerator should generate controlers") { (model ++ examples).foreach { ast => testScalaControllerGenerator(ast) } } describe("ScalaSecurityGenerator should generate controller bases") { (model ++ examples).foreach { ast => testScalaBaseControllerGenerator(ast) } } def testScalaControllerGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val expected = asInFile(name, "scala") val scalaModel = new ScalaGenerator(model).playScalaControllers(name, ast.model.packageName.getOrElse(name), expected) if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } def testScalaBaseControllerGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaControllerBases(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "base.scala") if (expected.isEmpty) dump(scalaModel, name, "base.scala") clean(scalaModel) mustBe clean(expected) } } }
Example 152
Source File: ScalaFormParserGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaFormParserGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "form_parsers/" describe("ScalaGenerator should generate a form parser") { examples.foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaFormParsers(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 153
Source File: ScalaTestDataGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaTestDataGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "test_data/" describe("ScalaGenerator should generate a test data generators") { (examples ++ model).foreach { file => testScalaFormParserGenerator(file) } } def testScalaFormParserGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).generateGenerators(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 154
Source File: ScalaModelGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaModelGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "model/" describe("ScalaGenerator should generate scala model") { (model ++ examples ++ validations).foreach { ast => testScalaModelGenerator(ast) } } def testScalaModelGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).generateModel(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } }
Example 155
Source File: ScalaSecurityGeneratorIntegrationTest.scala From play-swagger with MIT License | 5 votes |
package de.zalando.apifirst.generators import de.zalando.ExpectedResults import de.zalando.model.WithModel import org.scalatest.{FunSpec, MustMatchers} class ScalaSecurityGeneratorIntegrationTest extends FunSpec with MustMatchers with ExpectedResults { override val expectationsFolder = super.expectationsFolder + "security/" describe("ScalaSecurityGenerator should generate security plumbing files") { examples.foreach { ast => testScalaSecurityGenerator(ast) } } describe("ScalaSecurityGenerator should generate security helper files") { examples.foreach { ast => testScalaSecurityExtractorsGenerator(ast) } } def testScalaSecurityGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaSecurity(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "scala") if (expected.isEmpty) dump(scalaModel, name, "scala") clean(scalaModel) mustBe clean(expected) } } def testScalaSecurityExtractorsGenerator(ast: WithModel): Unit = { val name = nameFromModel(ast) it(s"from model $name") { val model = ast.model val scalaModel = new ScalaGenerator(model).playScalaSecurityExtractors(name, ast.model.packageName.getOrElse(name)) val expected = asInFile(name, "extractor.scala") if (expected.isEmpty) dump(scalaModel, name, "extractor.scala") clean(scalaModel) mustBe clean(expected) } } }
Example 156
Source File: SparkEsBulkWriterSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch import com.github.jparkie.spark.elasticsearch.conf.{ SparkEsMapperConf, SparkEsWriteConf } import com.github.jparkie.spark.elasticsearch.sql.{ SparkEsDataFrameMapper, SparkEsDataFrameSerializer } import com.holdenkarau.spark.testing.SharedSparkContext import org.apache.spark.sql.types.{ LongType, StringType, StructField, StructType } import org.apache.spark.sql.{ Row, SQLContext } import org.scalatest.{ MustMatchers, WordSpec } class SparkEsBulkWriterSpec extends WordSpec with MustMatchers with SharedSparkContext { val esServer = new ElasticSearchServer() override def beforeAll(): Unit = { super.beforeAll() esServer.start() } override def afterAll(): Unit = { esServer.stop() super.afterAll() } "SparkEsBulkWriter" must { "execute write() successfully" in { esServer.createAndWaitForIndex("test_index") val sqlContext = new SQLContext(sc) val inputSparkEsWriteConf = SparkEsWriteConf( bulkActions = 10, bulkSizeInMB = 1, concurrentRequests = 0, flushTimeoutInSeconds = 1 ) val inputMapperConf = SparkEsMapperConf( esMappingId = Some("id"), esMappingParent = None, esMappingVersion = None, esMappingVersionType = None, esMappingRouting = None, esMappingTTLInMillis = None, esMappingTimestamp = None ) val inputSchema = StructType( Array( StructField("id", StringType, true), StructField("parent", StringType, true), StructField("version", LongType, true), StructField("routing", StringType, true), StructField("ttl", LongType, true), StructField("timestamp", StringType, true), StructField("value", LongType, true) ) ) val inputData = sc.parallelize { Array( Row("TEST_ID_1", "TEST_PARENT_1", 1L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 1L), Row("TEST_ID_1", "TEST_PARENT_2", 2L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 2L), Row("TEST_ID_1", "TEST_PARENT_3", 3L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 3L), Row("TEST_ID_1", "TEST_PARENT_4", 4L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 4L), Row("TEST_ID_1", "TEST_PARENT_5", 5L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 5L), Row("TEST_ID_5", "TEST_PARENT_6", 6L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 6L), Row("TEST_ID_6", "TEST_PARENT_7", 7L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 7L), Row("TEST_ID_7", "TEST_PARENT_8", 8L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 8L), Row("TEST_ID_8", "TEST_PARENT_9", 9L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 9L), Row("TEST_ID_9", "TEST_PARENT_10", 10L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 10L), Row("TEST_ID_10", "TEST_PARENT_11", 11L, "TEST_ROUTING_1", 86400000L, "TEST_TIMESTAMP_1", 11L) ) } val inputDataFrame = sqlContext.createDataFrame(inputData, inputSchema) val inputDataIterator = inputDataFrame.rdd.toLocalIterator val inputSparkEsBulkWriter = new SparkEsBulkWriter[Row]( esIndex = "test_index", esType = "test_type", esClient = () => esServer.client, sparkEsSerializer = new SparkEsDataFrameSerializer(inputSchema), sparkEsMapper = new SparkEsDataFrameMapper(inputMapperConf), sparkEsWriteConf = inputSparkEsWriteConf ) inputSparkEsBulkWriter.write(null, inputDataIterator) val outputGetResponse = esServer.client.prepareGet("test_index", "test_type", "TEST_ID_1").get() outputGetResponse.isExists mustEqual true outputGetResponse.getSource.get("parent").asInstanceOf[String] mustEqual "TEST_PARENT_5" outputGetResponse.getSource.get("version").asInstanceOf[Integer] mustEqual 5 outputGetResponse.getSource.get("routing").asInstanceOf[String] mustEqual "TEST_ROUTING_1" outputGetResponse.getSource.get("ttl").asInstanceOf[Integer] mustEqual 86400000 outputGetResponse.getSource.get("timestamp").asInstanceOf[String] mustEqual "TEST_TIMESTAMP_1" outputGetResponse.getSource.get("value").asInstanceOf[Integer] mustEqual 5 } } }
Example 157
Source File: SparkEsTransportClientProxySpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.transport import com.github.jparkie.spark.elasticsearch.conf.SparkEsTransportClientConf import com.github.jparkie.spark.elasticsearch.util.SparkEsException import org.scalatest.{ MustMatchers, WordSpec } class SparkEsTransportClientProxySpec extends WordSpec with MustMatchers { "SparkEsTransportClientProxy" must { "prohibit close() call" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map.empty[String, String] ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} val inputSparkEsTransportClient = inputSparkEsTransportClientManager.getTransportClient(inputClientConf) val inputSparkEsTransportClientProxy = new SparkEsTransportClientProxy(inputSparkEsTransportClient) val outputException = intercept[SparkEsException] { inputSparkEsTransportClientProxy.close() } outputException.getMessage must include("close() is not supported in SparkEsTransportClientProxy. Please close with SparkEsTransportClientManager.") } } }
Example 158
Source File: SparkEsTransportClientManagerSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.transport import com.github.jparkie.spark.elasticsearch.conf.SparkEsTransportClientConf import org.scalatest.{ MustMatchers, WordSpec } class SparkEsTransportClientManagerSpec extends WordSpec with MustMatchers { "SparkEsTransportClientManager" must { "maintain one unique TransportClient in internalTransportClients" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map.empty[String, String] ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} inputSparkEsTransportClientManager.getTransportClient(inputClientConf) inputSparkEsTransportClientManager.getTransportClient(inputClientConf) inputSparkEsTransportClientManager.internalTransportClients.size mustEqual 1 inputSparkEsTransportClientManager.closeTransportClient(inputClientConf) } "return a SparkEsTransportClientProxy when calling getTransportClient()" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map.empty[String, String] ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} val outputClient = inputSparkEsTransportClientManager.getTransportClient(inputClientConf) outputClient.getClass mustEqual classOf[SparkEsTransportClientProxy] inputSparkEsTransportClientManager.closeTransportClient(inputClientConf) } "evict TransportClient after calling closeTransportClient" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map.empty[String, String] ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} inputSparkEsTransportClientManager.getTransportClient(inputClientConf) inputSparkEsTransportClientManager.closeTransportClient(inputClientConf) inputSparkEsTransportClientManager.internalTransportClients.size mustEqual 0 } "returns buildTransportSettings() successfully" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map( "TEST_KEY_1" -> "TEST_VALUE_1", "TEST_KEY_2" -> "TEST_VALUE_2", "TEST_KEY_3" -> "TEST_VALUE_3" ) ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} val outputSettings = inputSparkEsTransportClientManager.buildTransportSettings(inputClientConf) outputSettings.get("TEST_KEY_1") mustEqual "TEST_VALUE_1" outputSettings.get("TEST_KEY_2") mustEqual "TEST_VALUE_2" outputSettings.get("TEST_KEY_3") mustEqual "TEST_VALUE_3" } "returns buildTransportClient() successfully" in { val inputClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1"), transportPort = 9300, transportSettings = Map.empty[String, String] ) val inputSparkEsTransportClientManager = new SparkEsTransportClientManager {} val outputSettings = inputSparkEsTransportClientManager.buildTransportSettings(inputClientConf) val outputClient = inputSparkEsTransportClientManager.buildTransportClient(inputClientConf, outputSettings) val outputHost = outputClient.transportAddresses().get(0).getHost val outputPort = outputClient.transportAddresses().get(0).getPort outputHost mustEqual "127.0.0.1" outputPort mustEqual 9300 outputClient.close() } } }
Example 159
Source File: PackageSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.sql import com.holdenkarau.spark.testing.SharedSparkContext import org.apache.spark.sql.SQLContext import org.scalatest.{ MustMatchers, WordSpec } class PackageSpec extends WordSpec with MustMatchers with SharedSparkContext { "Package com.github.jparkie.spark.elasticsearch.sql" must { "lift DataFrame into SparkEsDataFrameFunctions" in { val sqlContext = new SQLContext(sc) val inputData = Seq( ("TEST_VALUE_1", 1), ("TEST_VALUE_2", 2), ("TEST_VALUE_3", 3) ) val outputDataFrame = sqlContext.createDataFrame(inputData) .toDF("key", "value") // If sparkContext is available, DataFrame was lifted into SparkEsDataFrameFunctions. outputDataFrame.sparkContext } } }
Example 160
Source File: SparkEsMapperConfSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.conf import org.apache.spark.SparkConf import org.scalatest.{ MustMatchers, WordSpec } class SparkEsMapperConfSpec extends WordSpec with MustMatchers { "SparkEsMapperConf" must { "be extracted from SparkConf successfully" in { val inputSparkConf = new SparkConf() .set("es.mapping.id", "TEST_VALUE_1") .set("es.mapping.parent", "TEST_VALUE_2") .set("es.mapping.version", "TEST_VALUE_3") .set("es.mapping.version.type", "TEST_VALUE_4") .set("es.mapping.routing", "TEST_VALUE_5") .set("es.mapping.ttl", "TEST_VALUE_6") .set("es.mapping.timestamp", "TEST_VALUE_7") val expectedSparkEsMapperConf = SparkEsMapperConf( esMappingId = Some("TEST_VALUE_1"), esMappingParent = Some("TEST_VALUE_2"), esMappingVersion = Some("TEST_VALUE_3"), esMappingVersionType = Some("TEST_VALUE_4"), esMappingRouting = Some("TEST_VALUE_5"), esMappingTTLInMillis = Some("TEST_VALUE_6"), esMappingTimestamp = Some("TEST_VALUE_7") ) val outputSparkEsMapperConf = SparkEsMapperConf.fromSparkConf(inputSparkConf) outputSparkEsMapperConf mustEqual expectedSparkEsMapperConf } "extract CONSTANT_FIELD_REGEX successfully" in { val inputString = "<TEST_VALUE_1>" val expectedString = "TEST_VALUE_1" val outputString = inputString match { case SparkEsMapperConf.CONSTANT_FIELD_REGEX(outputString) => outputString case _ => fail("CONSTANT_FIELD_REGEX failed.") } outputString mustEqual expectedString } } }
Example 161
Source File: SparkEsTransportClientConfSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.conf import java.net.InetSocketAddress import org.apache.spark.SparkConf import org.scalatest.{ MustMatchers, WordSpec } class SparkEsTransportClientConfSpec extends WordSpec with MustMatchers { "SparkEsTransportClientConf" must { "be extracted from SparkConf successfully" in { val inputSparkConf = new SparkConf() .set("es.nodes", "127.0.0.1:9000,127.0.0.1:9001,127.0.0.1:9002") .set("es.port", "1337") val expectedSparkEsTransportClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1:9000", "127.0.0.1:9001", "127.0.0.1:9002"), transportPort = 1337, transportSettings = Map.empty[String, String] ) val outputSparkEsTransportClientConf = SparkEsTransportClientConf.fromSparkConf(inputSparkConf) outputSparkEsTransportClientConf mustEqual expectedSparkEsTransportClientConf } "be extracted from SparkConf unsuccessfully" in { val inputSparkConf = new SparkConf() val outputException = intercept[IllegalArgumentException] { SparkEsTransportClientConf.fromSparkConf(inputSparkConf) } outputException.getMessage must include("No nodes defined in property es.nodes is in SparkConf.") } "extract transportSettings successfully" in { val inputSparkConf = new SparkConf() .set("es.nodes", "127.0.0.1:9000,127.0.0.1:9001,127.0.0.1:9002") .set("es.port", "1337") .set("es.cluster.name", "TEST_VALUE_1") .set("es.client.transport.sniff", "TEST_VALUE_2") .set("es.client.transport.ignore_cluster_name", "TEST_VALUE_3") .set("es.client.transport.ping_timeout", "TEST_VALUE_4") .set("es.client.transport.nodes_sampler_interval", "TEST_VALUE_5") val expectedSparkEsTransportClientConf = SparkEsTransportClientConf( transportAddresses = Seq("127.0.0.1:9000", "127.0.0.1:9001", "127.0.0.1:9002"), transportPort = 1337, transportSettings = Map( "cluster.name" -> "TEST_VALUE_1", "client.transport.sniff" -> "TEST_VALUE_2", "client.transport.ignore_cluster_name" -> "TEST_VALUE_3", "client.transport.ping_timeout" -> "TEST_VALUE_4", "client.transport.nodes_sampler_interval" -> "TEST_VALUE_5" ) ) val outputSparkEsTransportClientConf = SparkEsTransportClientConf.fromSparkConf(inputSparkConf) outputSparkEsTransportClientConf mustEqual expectedSparkEsTransportClientConf } "extract transportAddresses as Seq[InetSocketAddress] successfully with port secondly" in { val inputAddresses = Seq("127.0.0.1:9000", "127.0.0.1:9001", "127.0.0.1:9002") val inputPort = 1337 val expectedTransportAddresses = Seq( new InetSocketAddress("127.0.0.1", 9000), new InetSocketAddress("127.0.0.1", 9001), new InetSocketAddress("127.0.0.1", 9002) ) val outputTransportAddresses = SparkEsTransportClientConf.getTransportAddresses(inputAddresses, inputPort) outputTransportAddresses mustEqual expectedTransportAddresses } } }
Example 162
Source File: SparkEsWriteConfSpec.scala From Spark2Elasticsearch with Apache License 2.0 | 5 votes |
package com.github.jparkie.spark.elasticsearch.conf import org.apache.spark.SparkConf import org.scalatest.{ MustMatchers, WordSpec } class SparkEsWriteConfSpec extends WordSpec with MustMatchers { "SparkEsWriteConf" must { "be extracted from SparkConf successfully" in { val inputSparkConf = new SparkConf() .set("es.batch.size.entries", "1") .set("es.batch.size.bytes", "2") .set("es.batch.concurrent.request", "3") .set("es.batch.flush.timeout", "4") val expectedSparkEsWriteConf = SparkEsWriteConf( bulkActions = 1, bulkSizeInMB = 2, concurrentRequests = 3, flushTimeoutInSeconds = 4 ) val outputSparkEsWriteConf = SparkEsWriteConf.fromSparkConf(inputSparkConf) outputSparkEsWriteConf mustEqual expectedSparkEsWriteConf } } }
Example 163
Source File: RestKeyMiddlewareSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.webapp.rest import org.http4s._ import org.http4s.dsl._ import org.http4s.headers.Authorization import org.http4s.util.Writer import org.scalatest.{FlatSpec, MustMatchers} class RestKeyMiddlewareSpec extends FlatSpec with MustMatchers { val emptyservice = HttpService { case req @ GET -> Root => Ok("inner service") } val svc = new RestKeyMiddleware(List("key1", "key2")).apply(emptyservice) "when wrapping a service it" should "pass through calls with valid keys" in { val r = svc .apply( new Request(uri = Uri.uri("/"), headers = Headers(new Authorization(OAuth2BearerToken("key1"))))) .run r.status must equal(Ok) } "when wrapping a service it" should "fail calls without valid keys" in { val r = svc .apply( new Request(uri = Uri.uri("/"), headers = Headers(new Authorization(OAuth2BearerToken("key3"))))) .run r.status must equal(Unauthorized) } }
Example 164
Source File: SessionManagerSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.webapp import moe.pizza.auth.interfaces.UserDatabase import org.http4s._ import org.http4s.dsl._ import org.http4s.util.CaseInsensitiveString import org.scalatest.mock.MockitoSugar import org.scalatest.{FlatSpec, MustMatchers} import moe.pizza.auth.webapp.Utils._ class SessionManagerSpec extends FlatSpec with MustMatchers with MockitoSugar { val emptyservice = HttpService { case req @ GET -> Root => Ok(req.getSession.toString) case req @ GET -> Root / "flash" => val newsession = req.flash(Alerts.info, "this is an alert") Ok(req.getSession.toString).attachSessionifDefined(newsession) case req @ GET -> Root / "logout" => Ok(req.getSession.toString).clearSession() } val ud = mock[UserDatabase] val svc = new SessionManager("keygoeshere", ud).apply(emptyservice) "when wrapping a service it" should "add a session cookie" in { val r = svc.apply(new Request(uri = Uri.uri("/"))).run r.status must equal(Ok) val session = r.headers.get(CaseInsensitiveString("set-cookie")).get session.value.startsWith("authsession=") must equal(true) val bodytxt = EntityDecoder.decodeString(r)(Charset.`UTF-8`).run bodytxt must equal("Some(HydratedSession(List(),None,None,None))") } "when wrapping a service it" should "add a session cookie and use it to store state between calls" in { val r = svc.apply(new Request(uri = Uri.uri("/flash"))).run r.status must equal(Ok) val session = r.headers.get(CaseInsensitiveString("set-cookie")).get session.value.startsWith("authsession=") must equal(true) val cookie = session.value val r2 = svc .apply( new Request(uri = Uri.uri("/"), headers = Headers(Header("Cookie", cookie)))) .run r.status must equal(Ok) val bodytxt = EntityDecoder.decodeString(r2)(Charset.`UTF-8`).run bodytxt must equal( "Some(HydratedSession(List(Alert(info,this is an alert)),None,None,None))") } "when wrapping a service it" should "be able to remove sessions" in { val r = svc.apply(new Request(uri = Uri.uri("/logout"))).run r.status must equal(Ok) val removal = r.headers.get(CaseInsensitiveString("set-cookie")).get assert(removal.value.startsWith("authsession=;")) } }
Example 165
Source File: GraderChainSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.adapters import moe.pizza.auth.interfaces.PilotGrader import moe.pizza.auth.models.Pilot import moe.pizza.auth.models.Pilot.Status import org.scalatest.{FlatSpec, MustMatchers} import org.scalatest.mock.MockitoSugar class GraderChainSpec extends FlatSpec with MustMatchers with MockitoSugar { "GraderChain" should "chain graders and return the first result one gives" in { val grader1 = new PilotGrader { override def grade(p: Pilot): Status.Value = Status.banned } val grader2 = new PilotGrader { override def grade(p: Pilot): Status.Value = Status.ineligible } val chain = List(grader1, grader2) val graderchain = new GraderChain(chain) val p = Pilot("bob", Pilot.Status.internal, "myalliance", "mycorp", "Bob", "[email protected]", Pilot.OM.readTree("{\"meta\": \"%s\"}".format("metafield")), List("group1", "group3"), List("123:bobkey"), List.empty) graderchain.grade(p) must equal(Status.banned) } "GraderChain" should "fall through all unclassified results" in { val grader1 = new PilotGrader { override def grade(p: Pilot): Status.Value = Status.unclassified } val grader2 = new PilotGrader { override def grade(p: Pilot): Status.Value = Status.internal } val chain = List(grader1, grader2) val graderchain = new GraderChain(chain) val p = Pilot("bob", Pilot.Status.internal, "myalliance", "mycorp", "Bob", "[email protected]", Pilot.OM.readTree("{\"meta\": \"%s\"}".format("metafield")), List("group1", "group3"), List("123:bobkey"), List.empty) graderchain.grade(p) must equal(Status.internal) } }
Example 166
Source File: EveMapDbSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.graphdb import org.scalatest.{MustMatchers, WordSpec} import org.scalatest.mock.MockitoSugar class EveMapDbSpec extends WordSpec with MustMatchers with MockitoSugar { "EveMapDb" when { "being used" should { "do all of the normal expected things" in { val e = new EveMapDb("map-tests1") // initialise the database e.provisionIfRequired() e.withGraph { g => g.getEdgeType("gate") must not equal (null) } // fail gracefully on bad system names e.getDistanceBetweenSystemsByName("amor", "jota") must equal(None) // fail gracefully on bad system numbers e.getDistanceBetweenSystemsById(1, -42) must equal(None) // correctly find the distance between named systems e.getDistanceBetweenSystemsByName("Amarr", "Jita") must equal(Some(10)) // correctly find the distance between system ids e.getDistanceBetweenSystemsById(30000142, 30004711) must equal( Some(40)) // describe the distance between the same system and itself as 0 e.getDistanceBetweenSystemsById(30000142, 30000142) must equal(Some(0)) e.getDistanceBetweenSystemsByName("Amarr", "Amarr") must equal(Some(0)) e.cleanUp() } } } }
Example 167
Source File: IqlFilterSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.plugins.userfilters import moe.pizza.auth.models.Pilot import org.scalatest.{FlatSpec, MustMatchers} class IqlFilterSpec extends FlatSpec with MustMatchers { "IqlFilter" should "filter users using IQL" in { val iqlf = new IqlFilter val p = Pilot("bob", Pilot.Status.internal, "myalliance", "mycorp", "Bob", "[email protected]", Pilot.OM.readTree("{\"meta\": \"%s\"}".format("metafield")), List("group1", "group3"), List.empty, List.empty) val p2 = Pilot("terry", Pilot.Status.internal, "myalliance", "mycorp", "Terry", "[email protected]", Pilot.OM.readTree("{\"meta\": \"%s\"}".format("metafield")), List("group1", "group2"), List.empty, List.empty) val input = List(p, p2) iqlf.filter(input, ".uid == \"bob\"") must equal(List(p)) iqlf.filter(input, ".uid == \"terry\"") must equal(List(p2)) iqlf.filter(input, "1 == 1") must equal(List(p, p2)) iqlf.filter( input, "(.corporation == \"mycorp\") && (.alliance == \"myalliance\")") must equal( List(p, p2)) iqlf.filter( input, "(.corporation == \"mycorp\") && (.metadata.meta == \"metafield\") && (.alliance == \"myalliance\")") must equal( List(p, p2)) iqlf.filter( input, "(.corporation == \"mycorp\") && (.metadata.meta == \"metafiel\") && (.alliance == \"myalliance\")") must equal( List.empty[Pilot]) iqlf.filter( input, "(.corporation == \"mycorp\") && (\"group1\" in .authGroups)") must equal( List(p, p2)) iqlf.filter( input, "(.corporation == \"mycorp\") && (\"group2\" in .authGroups)") must equal( List(p2)) } }
Example 168
Source File: MembershipPilotGradersSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.plugins.pilotgraders import moe.pizza.auth.models.Pilot import moe.pizza.auth.plugins.pilotgraders.MembershipPilotGraders.{ AlliancePilotGrader, CorporationPilotGrader, PublicAccessPilotGrader } import org.scalatest.{MustMatchers, WordSpec} class MembershipPilotGradersSpec extends WordSpec with MustMatchers { "CorporationPilotGrader" when { "grading" should { "grade pilots who aren't in my corp as unclassified" in { val c = new CorporationPilotGrader("mycoolcorp") val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) c.grade(p) must equal(Pilot.Status.unclassified) } "grade pilots who are in my corp as internal" in { val c = new CorporationPilotGrader("bobcorp") val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) c.grade(p) must equal(Pilot.Status.internal) } } } "AlliancePilotGrader" when { "grading" should { "grade pilots who aren't in my alliance as unclassified" in { val c = new AlliancePilotGrader("mycoolalliance") val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) c.grade(p) must equal(Pilot.Status.unclassified) } "grade pilots who are in my alliance as internal" in { val c = new AlliancePilotGrader("boballiance") val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) c.grade(p) must equal(Pilot.Status.internal) } } } "PublicAccessPilotGrader" when { "grading" should { "grade pilots as ineligible" in { val c = new PublicAccessPilotGrader val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) c.grade(p) must equal(Pilot.Status.ineligible) } } } }
Example 169
Source File: InternalWhitelistPilotGraderSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.plugins.pilotgraders import moe.pizza.auth.models.Pilot import moe.pizza.auth.plugins.pilotgraders.MembershipPilotGraders.{AlliancePilotGrader, CorporationPilotGrader, PublicAccessPilotGrader} import moe.pizza.crestapi.CrestApi import moe.pizza.crestapi.CrestApi.{CallbackResponse, VerifyResponse} import org.scalatest.mock.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import org.mockito.Mockito._ import org.http4s.client.blaze.PooledHttp1Client import scalaz.concurrent.Task class InternalWhitelistPilotGraderSpec extends WordSpec with MustMatchers with MockitoSugar { implicit val client = PooledHttp1Client() //TODO: no mocking? "InternalWhitelistPilotGrader" when { "grading" should { "grade pilots who are in the list as internal" in { val crest = mock[CrestApi] when(crest.refresh("REF")).thenReturn(Task { new CallbackResponse("access", "type", 1000, Some("refresh")) }) when(crest.verify("access")).thenReturn(Task { new VerifyResponse(1, "Bob", "", "", "", "", "") }) val iwpg = new InternalWhitelistPilotGrader(crest, List(1L, 2L, 3L)) val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) iwpg.grade(p) must equal(Pilot.Status.internal) verify(crest).refresh("REF") verify(crest).verify("access") } "grade pilots who are not in the list as unclassified" in { val crest = mock[CrestApi] when(crest.refresh("REF")).thenReturn(Task { new CallbackResponse("access", "type", 1000, Some("refresh")) }) when(crest.verify("access")).thenReturn(Task { new VerifyResponse(1, "Bob", "", "", "", "", "") }) val iwpg = new InternalWhitelistPilotGrader(crest, List(2L, 3L)) val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) iwpg.grade(p) must equal(Pilot.Status.unclassified) verify(crest).refresh("REF") verify(crest).verify("access") } } } }
Example 170
Source File: CaldariCrestKeyGraderSpec.scala From pizza-auth-3 with MIT License | 5 votes |
package moe.pizza.auth.plugins.pilotgraders import moe.pizza.auth.models.Pilot import moe.pizza.crestapi.CrestApi import moe.pizza.crestapi.CrestApi.{CallbackResponse, VerifyResponse} import moe.pizza.eveapi.{EVEAPI, XMLApiResponse} import moe.pizza.eveapi.endpoints.Character import org.http4s.client.blaze.PooledHttp1Client import org.joda.time.DateTime import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{MustMatchers, WordSpec} import scalaz.concurrent.Task class CaldariCrestKeyGraderSpec extends WordSpec with MustMatchers with MockitoSugar { implicit val client = PooledHttp1Client() //TODO: no mocking? "CaldariCrestKeyGrader" when { "grading" should { "grade pilots who are caldari as THE ENEMY" in { val crest = mock[CrestApi] val eveapi = mock[EVEAPI] val char = mock[Character] when(eveapi.char).thenReturn(char) when(crest.refresh("REF")).thenReturn(Task { new CallbackResponse("access", "type", 1000, Some("refresh")) }) when(crest.verify("access")).thenReturn(Task { new VerifyResponse(1, "Bob", "", "", "", "", "") }) val pilotInfo = mock[moe.pizza.eveapi.generated.char.CharacterInfo.Result] when(pilotInfo.race).thenReturn("Caldari") when(char.CharacterInfo(1)).thenReturn(Task { new XMLApiResponse(DateTime.now(), DateTime.now(), pilotInfo) }) val iwpg = new CaldariCrestKeyGrader(crest, eve = Some(eveapi)) val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) iwpg.grade(p) must equal(Pilot.Status.banned) verify(crest).refresh("REF") verify(crest).verify("access") verify(char).CharacterInfo(1) verify(pilotInfo).race } "grade pilots who are not caldari as not THE ENEMY" in { val crest = mock[CrestApi] val eveapi = mock[EVEAPI] val char = mock[Character] when(eveapi.char).thenReturn(char) when(crest.refresh("REF")).thenReturn(Task { new CallbackResponse("access", "type", 1000, Some("refresh")) }) when(crest.verify("access")).thenReturn(Task { new VerifyResponse(1, "Bob", "", "", "", "", "") }) val pilotInfo = mock[moe.pizza.eveapi.generated.char.CharacterInfo.Result] when(pilotInfo.race).thenReturn("Gallente") when(char.CharacterInfo(1)).thenReturn(Task { new XMLApiResponse(DateTime.now(), DateTime.now(), pilotInfo) }) val iwpg = new CaldariCrestKeyGrader(crest, eve = Some(eveapi)) val p = new Pilot("bob", Pilot.Status.unclassified, "boballiance", "bobcorp", "Bob", "none@none", Pilot.OM.createObjectNode(), List.empty[String], List("1:REF"), List.empty[String]) iwpg.grade(p) must equal(Pilot.Status.unclassified) verify(crest).refresh("REF") verify(crest).verify("access") verify(char).CharacterInfo(1) verify(pilotInfo).race } } } }
Example 171
Source File: WeatherForecastSpec2.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.caseclass import org.scalatest.{FreeSpec, MustMatchers} class WeatherForecastSpec2 extends FreeSpec with MustMatchers { private def createWeatherForecast(city: String = "London", date: String = "01/01/2018", hour: String = "10 AM", temperature: String = "6") = WeatherForecast(city, date, hour, temperature) "default values" in { val wf = createWeatherForecast() wf.city mustBe ("London") wf.date mustBe "01/01/2018" wf.hour mustBe "10 AM" wf.temperature mustBe "6" } "check city value" in { val wf = createWeatherForecast(city = "Hyderabad") wf.city must not be "London" wf.city mustBe "Hyderabad" } "check date value" in { val wf = createWeatherForecast(date = "06/04/2018") wf.date mustBe "06/04/2018" } "check hour value" in { val wf = createWeatherForecast(hour = "14") wf.hour mustBe "14" } "check city and temperature values" in { val wf = createWeatherForecast(city = "Hyderabad", temperature = "30") wf.city mustBe "Hyderabad" wf.temperature mustBe "30" } }
Example 172
Source File: WeatherForecastSpec2.scala From Scala-Reactive-Programming with MIT License | 5 votes |
package com.packt.publishing.caseclass import org.scalatest.{FreeSpec, MustMatchers} class WeatherForecastSpec2 extends FreeSpec with MustMatchers { private def createWeatherForecast(city: String = "London", date: String = "01/01/2018", hour: String = "10 AM", temperature: String = "6") = WeatherForecast(city, date, hour, temperature) "default values" in { val wf = createWeatherForecast() wf.city mustBe ("London") wf.date mustBe "01/01/2018" wf.hour mustBe "10 AM" wf.temperature mustBe "6" } "check city value" in { val wf = createWeatherForecast(city = "Hyderabad") wf.city must not be "London" wf.city mustBe "Hyderabad" } "check date value" in { val wf = createWeatherForecast(date = "06/04/2018") wf.date mustBe "06/04/2018" } "check hour value" in { val wf = createWeatherForecast(hour = "14") wf.hour mustBe "14" } "check city and temperature values" in { val wf = createWeatherForecast(city = "Hyderabad", temperature = "30") wf.city mustBe "Hyderabad" wf.temperature mustBe "30" } }
Example 173
Source File: YamlParserTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker import java.sql.{Date, Timestamp} import org.scalatest.{MustMatchers, WordSpec} class YamlParserTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.YamlParser.YamlParserProtocol._ import net.jcazevedo.moultingyaml._ "YamlParser" must { "convert a YamlDate to java.sql.Date" in { val date = "1998-06-03" val string = s"""$date""".stripMargin string.parseYaml.convertTo[Date] mustBe Date.valueOf(date) } "convert a YamlDate to java.sql.Timestamp" in { val timestamp = "1998-06-03 01:23:45" val string = s"""$timestamp""".stripMargin string.parseYaml.convertTo[Timestamp] mustBe Timestamp.valueOf(timestamp) } } }
Example 174
Source File: SchemaTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker.schema import com.dunnhumby.datafaker.schema.table.SchemaTable import com.dunnhumby.datafaker.schema.table.columns.SchemaColumnFixed import org.scalatest.{MustMatchers, WordSpec} class SchemaTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.schema.SchemaProtocol._ import net.jcazevedo.moultingyaml._ val baseString = """tables: |- name: table1_test | rows: 100 | columns: | - name: table1_column1_test | column_type: Fixed | data_type: Int | value: 1 |- name: table2_test | rows: 200 | columns: | - name: table2_column1_test | column_type: Fixed | data_type: String | value: testing | partitions: | - table2_column1_test """.stripMargin "Schema" must { "read a Schema with tables" in { val string = baseString string.parseYaml.convertTo[Schema] mustBe { Schema(List( SchemaTable("table1_test", 100, List( SchemaColumnFixed("table1_column1_test", 1)), None), SchemaTable("table2_test", 200, List( SchemaColumnFixed("table2_column1_test", "testing")), Some(List("table2_column1_test"))))) } } } }
Example 175
Source File: SchemaColumnFixedTest.scala From data-faker with MIT License | 5 votes |
package com.dunnhumby.datafaker.schema.table.columns import java.sql.{Date, Timestamp} import org.scalatest.{MustMatchers, WordSpec} class SchemaColumnFixedTest extends WordSpec with MustMatchers { import com.dunnhumby.datafaker.schema.table.columns.SchemaColumnFixedProtocol._ import net.jcazevedo.moultingyaml._ val name = "test" val column_type = "Fixed" val baseString = s"""name: $name |column_type: $column_type """.stripMargin "SchemaColumnFixed" must { "read an Int column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Int} |value: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, 1) } "read a Long column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Long} |value: 1 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, 1l) } "read a Float column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Float} |value: 1.0 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, 1f) } "read a Double column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Double} |value: 1.0 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, 1d) } "read a Date column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Date} |value: 1998-06-03 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, Date.valueOf("1998-06-03")) } "read a Timestamp column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Timestamp} |value: 1998-06-03 01:23:45 """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, Timestamp.valueOf("1998-06-03 01:23:45")) } "read a String column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.String} |value: test """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, "test") } "read a Boolean column" in { val string = s"""$baseString |data_type: ${SchemaColumnDataType.Boolean} |value: true """.stripMargin string.parseYaml.convertTo[SchemaColumnFixed[_]] mustBe SchemaColumnFixed(name, true) } } }