org.json4s.jackson.JsonMethods.parse Scala Examples
The following examples show how to use org.json4s.jackson.JsonMethods.parse.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MetadataSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Metadata, MetadataBuilder} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 2
Source File: OneOfSpec.scala From scalapb-json4s with Apache License 2.0 | 5 votes |
package scalapb.json4s import com.google.protobuf.util.JsonFormat.{printer => ProtobufJavaPrinter} import jsontest.oneof.OneOf._ import jsontest.oneof.Pair.ValueByType._ import jsontest.oneof.{Dictionary, OneOf, OneOfMessage, Pair} import org.json4s.jackson.JsonMethods.parse import org.scalatest.prop._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.must.Matchers class OneOfSpec extends AnyFlatSpec with Matchers with TableDrivenPropertyChecks { val examples = Table( ("message", "json"), (OneOf.defaultInstance, "{}"), (OneOf(Field.Empty), "{}"), (OneOf(Field.Primitive("")), """{"primitive":""}"""), (OneOf(Field.Primitive("test")), """{"primitive":"test"}"""), (OneOf(Field.Wrapper("")), """{"wrapper":""}"""), (OneOf(Field.Wrapper("test")), """{"wrapper":"test"}"""), (OneOf(Field.Message(OneOfMessage())), """{"message":{}}"""), ( OneOf(Field.Message(OneOfMessage(Some("test")))), """{"message":{"field":"test"}}""" ) ) forEvery(examples) { (message: OneOf, json: String) => new Printer().toJson(message) must be( parse(json) ) new Printer().toJson(message) must be( parse( ProtobufJavaPrinter().print(toJavaProto(message)) ) ) new Printer().includingDefaultValueFields.toJson(message) must be( parse(json) ) new Printer().includingDefaultValueFields.toJson(message) must be( parse( ProtobufJavaPrinter() .includingDefaultValueFields() .print(toJavaProto(message)) ) ) } "dictionary test" should "preserve zero values in one of" in { val message = Dictionary(Seq(Pair("myKey", Uint32Value(0)))) new Printer().toJson(message) must be( parse("""{"pairs":[{"key": "myKey", "uint32Value": 0}]}""") ) new Printer().includingDefaultValueFields.toJson(message) must be( parse("""{"pairs":[{"key": "myKey", "uint32Value": 0}]}""") ) } }
Example 3
Source File: JObjectParam.scala From sona with Apache License 2.0 | 5 votes |
package com.tencent.angel.sona.ml.param import com.tencent.angel.sona.ml.util.Identifiable import org.json4s.DefaultFormats import org.json4s.JsonAST.JObject import org.json4s.jackson.JsonMethods.{compact, parse, render} class JObjectParam(parent: String, name: String, doc: String, isValid: JObject => Boolean) extends Param[JObject](parent, name, doc, isValid) { def this(parent: String, name: String, doc: String) = this(parent, name, doc, (value: JObject) => value != null) def this(parent: Identifiable, name: String, doc: String, isValid: JObject => Boolean) = this(parent.uid, name, doc, isValid) def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc) override def w(value: JObject): ParamPair[JObject] = super.w(value) override def jsonEncode(value: JObject): String = { compact(render(value)) } override def jsonDecode(json: String): JObject = { implicit val formats: DefaultFormats = DefaultFormats parse(json).asInstanceOf[JObject] } }
Example 4
Source File: JsonUtil.scala From marvin-engine-executor with Apache License 2.0 | 5 votes |
package org.marvin.util import com.fasterxml.jackson.databind.{DeserializationFeature, JsonNode, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.github.fge.jsonschema.core.exceptions.ProcessingException import com.github.fge.jsonschema.core.report.ProcessingMessage import com.github.fge.jsonschema.main.JsonSchemaFactory import grizzled.slf4j.Logging import org.json4s.jackson.JsonMethods.{asJsonNode, parse} import spray.json._ import scala.io.Source import scala.reflect.{ClassTag, _} object JsonUtil extends Logging { val jacksonMapper = new ObjectMapper() jacksonMapper.registerModule(DefaultScalaModule) jacksonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) def toJson(value: Map[Symbol, Any]): String = { toJson(value map { case (k,v) => k.name -> v}) } def toJson(value: Any): String = { jacksonMapper.writeValueAsString(value) } def toMap(jsonString: String): Map[String, Any] = { JsonUtil.fromJson[Map[String, List[Map[String, String]]]](jsonString) } def fromJson[T: ClassTag](jsonString: String, validate: Boolean = false): T = { if (validate) validateJson[T](jsonString) jacksonMapper.readValue[T](jsonString, classTag[T].runtimeClass.asInstanceOf[Class[T]]) } def validateJson[T: ClassTag](jsonString: String): Unit = { val className = classTag[T].runtimeClass.getSimpleName val schemaName = className.toString + "Schema.json" var jsonSchema: String = null try{ jsonSchema = Source.fromResource(schemaName).mkString validateJson(jsonString, jsonSchema) } catch { case e: NullPointerException => info(s"File ${schemaName} not found, check your schema file") throw e } } def validateJson(jsonString: String, jsonSchema: String): Unit = { val schema: JsonNode = asJsonNode(parse(jsonSchema)) val jsonToValidate: JsonNode = asJsonNode(parse(jsonString)) val validator = JsonSchemaFactory.byDefault().getValidator val processingReport = validator.validate(schema, jsonToValidate) if (!processingReport.isSuccess) { val sb = new StringBuilder() processingReport.forEach { message: ProcessingMessage => { warn(message.asJson()) sb.append(message.getMessage) } } throw new ProcessingException(sb.toString) } } def format(jsonString: String): String ={ return jsonString.parseJson.prettyPrint } def format(jsonMap: Map[String, Any]): String ={ return this.format(this.toJson(jsonMap)) } }
Example 5
Source File: MetadataSuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{MetadataBuilder, Metadata} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 6
Source File: SQLJsonProtocolSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart import org.apache.spark.util.JsonProtocol class SQLJsonProtocolSuite extends SparkFunSuite { test("SparkPlanGraph backward compatibility: metadata") { val SQLExecutionStartJsonString = """ |{ | "Event":"org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart", | "executionId":0, | "description":"test desc", | "details":"test detail", | "physicalPlanDescription":"test plan", | "sparkPlanInfo": { | "nodeName":"TestNode", | "simpleString":"test string", | "children":[], | "metadata":{}, | "metrics":[] | }, | "time":0 |} """.stripMargin val reconstructedEvent = JsonProtocol.sparkEventFromJson(parse(SQLExecutionStartJsonString)) val expectedEvent = SparkListenerSQLExecutionStart(0, "test desc", "test detail", "test plan", new SparkPlanInfo("TestNode", "test string", Nil, Nil), 0) assert(reconstructedEvent == expectedEvent) } }
Example 7
Source File: MetadataSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Metadata, MetadataBuilder} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 8
Source File: JsonVectorConverterSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.linalg import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite class JsonVectorConverterSuite extends SparkFunSuite { test("toJson/fromJson") { val sv0 = Vectors.sparse(0, Array.empty, Array.empty) val sv1 = Vectors.sparse(1, Array.empty, Array.empty) val sv2 = Vectors.sparse(2, Array(1), Array(2.0)) val dv0 = Vectors.dense(Array.empty[Double]) val dv1 = Vectors.dense(1.0) val dv2 = Vectors.dense(0.0, 2.0) for (v <- Seq(sv0, sv1, sv2, dv0, dv1, dv2)) { val json = JsonVectorConverter.toJson(v) parse(json) // `json` should be a valid JSON string val u = JsonVectorConverter.fromJson(json) assert(u.getClass === v.getClass, "toJson/fromJson should preserve vector types.") assert(u === v, "toJson/fromJson should preserve vector values.") } } }
Example 9
Source File: JsonMatrixConverterSuite.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.linalg import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite class JsonMatrixConverterSuite extends SparkFunSuite { test("toJson/fromJson") { val denseMatrices = Seq( Matrices.dense(0, 0, Array.empty[Double]), Matrices.dense(1, 1, Array(0.1)), new DenseMatrix(3, 2, Array(0.0, 1.21, 2.3, 9.8, 9.0, 0.0), true) ) val sparseMatrices = denseMatrices.map(_.toSparse) ++ Seq( Matrices.sparse(3, 2, Array(0, 1, 2), Array(1, 2), Array(3.1, 3.5)) ) for (m <- sparseMatrices ++ denseMatrices) { val json = JsonMatrixConverter.toJson(m) parse(json) // `json` should be a valid JSON string val u = JsonMatrixConverter.fromJson(json) assert(u.getClass === m.getClass, "toJson/fromJson should preserve Matrix types.") assert(u === m, "toJson/fromJson should preserve Matrix values.") } } }
Example 10
Source File: MetadataSuite.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{MetadataBuilder, Metadata} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() //元数据构建器和getter test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } //元数据的JSON转换 test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 11
Source File: OpPipelineStageReaderWriterTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages import com.salesforce.op.features._ import com.salesforce.op.features.types._ import com.salesforce.op.stages.OpPipelineStageReaderWriter._ import com.salesforce.op.test.PassengerSparkFixtureTest import com.salesforce.op.utils.reflection.ReflectionUtils import com.salesforce.op.utils.spark.RichDataset._ import org.apache.spark.ml.{Model, Transformer} import org.apache.spark.sql.types.{DataType, Metadata, MetadataBuilder} import org.json4s.JsonAST.JValue import org.json4s.jackson.JsonMethods.{compact, parse, pretty, render} import org.json4s.{JArray, JObject} import org.scalatest.FlatSpec import org.slf4j.LoggerFactory // TODO: consider adding a read/write test for a spark wrapped stage as well private[stages] abstract class OpPipelineStageReaderWriterTest extends FlatSpec with PassengerSparkFixtureTest { val meta = new MetadataBuilder().putString("foo", "bar").build() val expectedFeaturesLength = 1 def stage: OpPipelineStageBase with Transformer val expected: Array[Real] val hasOutputName = true private val log = LoggerFactory.getLogger(this.getClass) private lazy val savePath = tempDir + "/" + this.getClass.getSimpleName + "-" + System.currentTimeMillis() private lazy val writer = new OpPipelineStageWriter(stage) private lazy val stageJsonString: String = writer.writeToJsonString(savePath) private lazy val stageJson: JValue = parse(stageJsonString) private lazy val isModel = stage.isInstanceOf[Model[_]] private val FN = FieldNames Spec(this.getClass) should "write stage uid" in { log.info(pretty(stageJson)) (stageJson \ FN.Uid.entryName).extract[String] shouldBe stage.uid } it should "write class name" in { (stageJson \ FN.Class.entryName).extract[String] shouldBe stage.getClass.getName } it should "write params map" in { val params = extractParams(stageJson).extract[Map[String, Any]] if (hasOutputName) { params should have size 4 params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema", "outputFeatureName") } else { params should have size 3 params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema") } } it should "write outputMetadata" in { val params = extractParams(stageJson) val metadataStr = compact(render(extractParams(stageJson) \ "outputMetadata")) val metadata = Metadata.fromJson(metadataStr) metadata shouldBe stage.getMetadata() } it should "write inputSchema" in { val schemaStr = compact(render(extractParams(stageJson) \ "inputSchema")) val schema = DataType.fromJson(schemaStr) schema shouldBe stage.getInputSchema() } it should "write input features" in { val jArray = (extractParams(stageJson) \ "inputFeatures").extract[JArray] jArray.values should have length expectedFeaturesLength val obj = jArray(0).extract[JObject] obj.values.keys shouldBe Set("name", "isResponse", "isRaw", "uid", "typeName", "stages", "originFeatures") } it should "write model ctor args" in { if (stage.isInstanceOf[Model[_]]) { val ctorArgs = (stageJson \ FN.CtorArgs.entryName).extract[JObject] val (_, args) = ReflectionUtils.bestCtorWithArgs(stage) ctorArgs.values.keys shouldBe args.map(_._1).toSet } } it should "load stage correctly" in { val reader = new OpPipelineStageReader(stage) val stageLoaded = reader.loadFromJsonString(stageJsonString, path = savePath) stageLoaded shouldBe a[OpPipelineStageBase] stageLoaded shouldBe a[Transformer] stageLoaded.getOutput() shouldBe a[FeatureLike[_]] val _ = stage.asInstanceOf[Transformer].transform(passengersDataSet) val transformed = stageLoaded.asInstanceOf[Transformer].transform(passengersDataSet) transformed.collect(stageLoaded.getOutput().asInstanceOf[FeatureLike[Real]]) shouldBe expected stageLoaded.uid shouldBe stage.uid stageLoaded.operationName shouldBe stage.operationName stageLoaded.getInputFeatures() shouldBe stage.getInputFeatures() stageLoaded.getInputSchema() shouldBe stage.getInputSchema() } private def extractParams(stageJson: JValue): JValue = { val defaultParamsMap = stageJson \ FN.DefaultParamMap.entryName val paramsMap = stageJson \ FN.ParamMap.entryName defaultParamsMap.merge(paramsMap) } }
Example 12
Source File: SparkStageParamTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package org.apache.spark.ml import com.salesforce.op.stages.SparkStageParam import com.salesforce.op.test.TestSparkContext import org.apache.spark.ml.feature.StandardScaler import org.joda.time.DateTime import org.json4s.JsonDSL._ import org.json4s._ import org.json4s.jackson.JsonMethods.{parse, _} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{BeforeAndAfterEach, FlatSpec} @RunWith(classOf[JUnitRunner]) class SparkStageParamTest extends FlatSpec with TestSparkContext with BeforeAndAfterEach { import SparkStageParam._ var savePath: String = _ var param: SparkStageParam[StandardScaler] = _ var stage: StandardScaler = _ override def beforeEach(): Unit = { super.beforeEach() savePath = tempDir + "/op-stage-param-test-" + DateTime.now().getMillis param = new SparkStageParam[StandardScaler](parent = "test" , name = "test", doc = "none") // by setting both to be the same, we guarantee that at least one isn't the default value stage = new StandardScaler().setWithMean(true).setWithStd(false) } // easier if test both at the same time Spec[SparkStageParam[_]] should "encode and decode properly when is set" in { param.savePath = Option(savePath) val jsonOut = param.jsonEncode(Option(stage)) val parsed = parse(jsonOut).asInstanceOf[JObject] val updated = parsed ~ ("path" -> savePath) // inject path for decoding updated shouldBe JObject( "className" -> JString(stage.getClass.getName), "uid" -> JString(stage.uid), "path" -> JString(savePath) ) val updatedJson = compact(updated) param.jsonDecode(updatedJson) match { case None => fail("Failed to recover the stage") case Some(stageRecovered) => stageRecovered shouldBe a[StandardScaler] stageRecovered.uid shouldBe stage.uid stageRecovered.getWithMean shouldBe stage.getWithMean stageRecovered.getWithStd shouldBe stage.getWithStd } } it should "except out when path is empty" in { intercept[RuntimeException](param.jsonEncode(Option(stage))).getMessage shouldBe s"Path must be set before Spark stage '${stage.uid}' can be saved" } it should "have empty path if stage is empty" in { param.savePath = Option(savePath) val jsonOut = param.jsonEncode(None) val parsed = parse(jsonOut) parsed shouldBe JObject("className" -> JString(NoClass), "uid" -> JString(NoUID)) param.jsonDecode(jsonOut) shouldBe None } }
Example 13
Source File: TransientFeatureArrayParam.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages import com.salesforce.op.features._ import org.apache.spark.ml.param._ import org.apache.spark.ml.util.Identifiable import org.json4s.DefaultFormats import org.json4s.JsonAST.{JArray, JValue} import org.json4s.jackson.JsonMethods.{compact, parse, render} import scala.util.{Failure, Success} override def w(value: Array[TransientFeature]): ParamPair[Array[TransientFeature]] = super.w(value) override def jsonEncode(value: Array[TransientFeature]): String = { compact(render(JArray(value.map(_.toJson).toList))) } override def jsonDecode(json: String): Array[TransientFeature] = { parse(json).extract[Array[JValue]].map(obj => { TransientFeature(obj) match { case Failure(e) => throw new RuntimeException("Failed to parse TransientFeature", e) case Success(v) => v } }) } }
Example 14
Source File: SparkStageParam.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams import org.apache.hadoop.fs.Path import org.apache.spark.ml.PipelineStage import org.apache.spark.ml.param.{Param, ParamPair, Params} import org.apache.spark.ml.util.{Identifiable, MLReader, MLWritable} import org.apache.spark.util.SparkUtils import org.json4s.JsonAST.{JObject, JValue} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, parse, render} import org.json4s.{DefaultFormats, Formats, JString} class SparkStageParam[S <: PipelineStage with Params] ( parent: String, name: String, doc: String, isValid: Option[S] => Boolean ) extends Param[Option[S]](parent, name, doc, isValid) { import SparkStageParam._ override def jsonDecode(jsonStr: String): Option[S] = { val json = parse(jsonStr) val uid = (json \ "uid").extractOpt[String] val path = (json \ "path").extractOpt[String] path -> uid match { case (None, _) | (_, None) | (_, Some(NoUID)) => savePath = None None case (Some(p), Some(stageUid)) => savePath = Option(p) val stagePath = new Path(p, stageUid).toString val className = (json \ "className").extract[String] val cls = SparkUtils.classForName(className) val stage = cls.getMethod("read").invoke(null).asInstanceOf[MLReader[PipelineStage]].load(stagePath) Option(stage).map(_.asInstanceOf[S]) } } } object SparkStageParam { implicit val formats: Formats = DefaultFormats val NoClass = "" val NoUID = "" def updateParamsMetadataWithPath(jValue: JValue, path: String): JValue = jValue match { case JObject(pairs) => JObject( pairs.map { case (SparkWrapperParams.SparkStageParamName, j) => SparkWrapperParams.SparkStageParamName -> j.merge(JObject("path" -> JString(path))) case param => param } ) case j => throw new IllegalArgumentException(s"Cannot recognize JSON Spark params metadata: $j") } }
Example 15
Source File: MetadataSuite.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{MetadataBuilder, Metadata} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 16
Source File: JsonVectorConverterSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.linalg import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite class JsonVectorConverterSuite extends SparkFunSuite { test("toJson/fromJson") { val sv0 = Vectors.sparse(0, Array.empty, Array.empty) val sv1 = Vectors.sparse(1, Array.empty, Array.empty) val sv2 = Vectors.sparse(2, Array(1), Array(2.0)) val dv0 = Vectors.dense(Array.empty[Double]) val dv1 = Vectors.dense(1.0) val dv2 = Vectors.dense(0.0, 2.0) for (v <- Seq(sv0, sv1, sv2, dv0, dv1, dv2)) { val json = JsonVectorConverter.toJson(v) parse(json) // `json` should be a valid JSON string val u = JsonVectorConverter.fromJson(json) assert(u.getClass === v.getClass, "toJson/fromJson should preserve vector types.") assert(u === v, "toJson/fromJson should preserve vector values.") } } }
Example 17
Source File: JsonVectorConverterSuite.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.linalg import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite class JsonVectorConverterSuite extends SparkFunSuite { test("toJson/fromJson") { val sv0 = Vectors.sparse(0, Array.empty, Array.empty) val sv1 = Vectors.sparse(1, Array.empty, Array.empty) val sv2 = Vectors.sparse(2, Array(1), Array(2.0)) val dv0 = Vectors.dense(Array.empty[Double]) val dv1 = Vectors.dense(1.0) val dv2 = Vectors.dense(0.0, 2.0) for (v <- Seq(sv0, sv1, sv2, dv0, dv1, dv2)) { val json = JsonVectorConverter.toJson(v) parse(json) // `json` should be a valid JSON string val u = JsonVectorConverter.fromJson(json) assert(u.getClass === v.getClass, "toJson/fromJson should preserve vector types.") assert(u === v, "toJson/fromJson should preserve vector values.") } } }
Example 18
Source File: ArrayParam.scala From mmlspark with MIT License | 5 votes |
// Copyright (C) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in project root for information. package org.apache.spark.ml.param import org.apache.spark.annotation.DeveloperApi import org.json4s.{DefaultFormats, _} import org.json4s.jackson.JsonMethods.{compact, parse, render} import scala.collection.JavaConverters._ def w(value: java.util.List[_]): ParamPair[Array[_]] = w(value.asScala.toArray) override def jsonEncode(value: Array[_]): String = { import org.json4s.JsonDSL._ value match { case intArr: Array[Int] => compact(render(intArr.toSeq)) case dbArr: Array[Double] => compact(render(dbArr.toSeq)) case strArr: Array[String] => compact(render(strArr.toSeq)) case blArr: Array[Boolean] => compact(render(blArr.toSeq)) case intArr: Array[Integer] => compact(render(intArr.map(_.toLong).toSeq)) case _ => throw new IllegalArgumentException("Internal type not json serializable") } } override def jsonDecode(json: String): Array[_] = { implicit val formats: DefaultFormats.type = DefaultFormats parse(json).extract[Seq[_]].toArray } }
Example 19
Source File: BitgoFeeProviderSpec.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.blockchain.fee import akka.actor.ActorSystem import akka.util.Timeout import com.softwaremill.sttp.okhttp.OkHttpFutureBackend import fr.acinq.bitcoin.Block import org.json4s.DefaultFormats import org.scalatest.funsuite.AnyFunSuite import scala.concurrent.Await class BitgoFeeProviderSpec extends AnyFunSuite { import BitgoFeeProvider._ import org.json4s.jackson.JsonMethods.parse implicit val formats = DefaultFormats val sample_response = """ {"feePerKb":136797,"cpfpFeePerKb":136797,"numBlocks":2,"confidence":80,"multiplier":1,"feeByBlockTarget":{"1":149453,"2":136797,"5":122390,"6":105566,"8":100149,"9":96254,"10":122151,"13":116855,"15":110860,"17":87402,"27":82635,"33":71098,"42":105782,"49":68182,"73":59207,"97":17336,"121":16577,"193":13545,"313":12268,"529":11122,"553":9139,"577":5395,"793":5070}} """ test("parse test") { val json = parse(sample_response) val feeRanges = parseFeeRanges(json) assert(feeRanges.size === 23) } test("extract fee for a particular block delay") { val json = parse(sample_response) val feeRanges = parseFeeRanges(json) val fee = extractFeerate(feeRanges, 6) assert(fee === 105566) } test("extract all fees") { val json = parse(sample_response) val feeRanges = parseFeeRanges(json) val feerates = extractFeerates(feeRanges) val ref = FeeratesPerKB( block_1 = 149453, blocks_2 = 136797, blocks_6 = 105566, blocks_12 = 96254, blocks_36 = 71098, blocks_72 = 68182, blocks_144 = 16577) assert(feerates === ref) } test("make sure API hasn't changed") { import scala.concurrent.duration._ implicit val system = ActorSystem("test") implicit val ec = system.dispatcher implicit val sttp = OkHttpFutureBackend() implicit val timeout = Timeout(30 seconds) val bitgo = new BitgoFeeProvider(Block.LivenetGenesisBlock.hash, 5 seconds) assert(Await.result(bitgo.getFeerates, timeout.duration).block_1 > 0) } test("check that read timeout is enforced") { import scala.concurrent.duration._ implicit val system = ActorSystem("test") implicit val ec = system.dispatcher implicit val sttp = OkHttpFutureBackend() implicit val timeout = Timeout(30 second) val bitgo = new BitgoFeeProvider(Block.LivenetGenesisBlock.hash, 1 millisecond) val e = intercept[Exception] { Await.result(bitgo.getFeerates, timeout.duration) } assert(e.getMessage.contains("Read timed out")) } }
Example 20
Source File: MetadataSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Metadata, MetadataBuilder} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 21
Source File: JsonVectorConverterSuite.scala From sparkoscope with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.linalg import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite class JsonVectorConverterSuite extends SparkFunSuite { test("toJson/fromJson") { val sv0 = Vectors.sparse(0, Array.empty, Array.empty) val sv1 = Vectors.sparse(1, Array.empty, Array.empty) val sv2 = Vectors.sparse(2, Array(1), Array(2.0)) val dv0 = Vectors.dense(Array.empty[Double]) val dv1 = Vectors.dense(1.0) val dv2 = Vectors.dense(0.0, 2.0) for (v <- Seq(sv0, sv1, sv2, dv0, dv1, dv2)) { val json = JsonVectorConverter.toJson(v) parse(json) // `json` should be a valid JSON string val u = JsonVectorConverter.fromJson(json) assert(u.getClass === v.getClass, "toJson/fromJson should preserve vector types.") assert(u === v, "toJson/fromJson should preserve vector values.") } } }
Example 22
Source File: DataTypeUtils.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.thriftserver.types import org.json4s.{DefaultFormats, JValue} import org.json4s.JsonAST.{JObject, JString} import org.json4s.jackson.JsonMethods.parse def schemaFromSparkJson(sparkJson: String): Schema = { val schema = parse(sparkJson) \ "fields" val fields = schema.children.map { field => val name = (field \ "name").extract[String] val hiveType = toFieldType(field \ "type") // TODO: retrieve comment from metadata Field(name, hiveType, "") } Schema(fields.toArray) } }
Example 23
Source File: AsGeoJSONSuite.scala From magellan with Apache License 2.0 | 5 votes |
package magellan.catalyst import magellan.{Geometry, Point, TestSparkContext} import org.apache.spark.sql.Row import org.apache.spark.sql.magellan.dsl.expressions._ import org.json4s.jackson.JsonMethods.parse import org.scalatest.FunSuite class AsGeoJSONSuite extends FunSuite with TestSparkContext { test("as geojson") { val sqlCtx = this.sqlContext import sqlCtx.implicits._ val point = Point(35.7, -122.3) val points = Seq((1, point)) val df = sc.parallelize(points).toDF("id", "point") val withJson = df.withColumn("json", $"point" asGeoJSON) val json = withJson.select("json").map { case Row(s: String) => s}.take(1)(0) implicit val formats = org.json4s.DefaultFormats val result = parse(json).extract[Geometry] val shapes = result.shapes // expect a single point assert(shapes.size == 1) assert(shapes.head.asInstanceOf[Point] === point) } }
Example 24
Source File: L8-10-11UDF.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.io.Source import scala.reflect.runtime.universe import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.SQLContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.jackson.JsonMethods.parse import org.json4s.jvalue2extractable import org.json4s.string2JsonInput object CdrUDFApp { case class Cdr(squareId: Int, timeInterval: Long, countryCode: Int, smsInActivity: Float, smsOutActivity: Float, callInActivity: Float, callOutActivity: Float, internetTrafficActivity: Float) def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: CdrUDFApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) val sqlC = new SQLContext(ssc.sparkContext) import sqlC.implicits._ def getCountryCodeMapping() = { implicit val formats = org.json4s.DefaultFormats parse(Source.fromURL("http://country.io/phone.json").mkString).extract[Map[String, String]].map(_.swap) } def getCountryNameMapping() = { implicit val formats = org.json4s.DefaultFormats parse(Source.fromURL("http://country.io/names.json").mkString).extract[Map[String, String]] } def getCountryName(mappingPhone: Map[String, String], mappingName: Map[String, String], code: Int) = { mappingName.getOrElse(mappingPhone.getOrElse(code.toString, "NotFound"), "NotFound") } val getCountryNamePartial = getCountryName(getCountryCodeMapping(), getCountryNameMapping(), _: Int) sqlC.udf.register("getCountryNamePartial", getCountryNamePartial) val cdrStream = ssc.socketTextStream(hostname, port.toInt) .map(_.split("\\t", -1)) .foreachRDD(rdd => { val cdrs = seqToCdr(rdd).toDF() cdrs.registerTempTable("cdrs") sqlC.sql("SELECT getCountryNamePartial(countryCode) AS countryName, COUNT(countryCode) AS cCount FROM cdrs GROUP BY countryCode ORDER BY cCount DESC LIMIT 5").show() }) ssc.start() ssc.awaitTermination() } def seqToCdr(rdd: RDD[Array[String]]): RDD[Cdr] = { rdd.map(c => c.map(f => f match { case x if x.isEmpty() => "0" case x => x })).map(c => Cdr(c(0).toInt, c(1).toLong, c(2).toInt, c(3).toFloat, c(4).toFloat, c(5).toFloat, c(6).toFloat, c(7).toFloat)) } }
Example 25
Source File: AuthHeaderProdiverFactoryTest.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.service.factory import com.yahoo.maha.executor.druid.NoopAuthHeaderProvider import com.yahoo.maha.service.{DefaultMahaServiceConfigContext, MahaServiceConfigContext} import org.json4s.jackson.JsonMethods.parse import org.scalatest.{FunSuite, Matchers} class AuthHeaderProdiverFactoryTest extends FunSuite with Matchers { test("shouldCreateValidNoopAuthHeaderProvider") { val jsonDef: String = s""" |{ | "domain" : "Maha", | "service" :"MahaProviderService", | "privateKeyName" : "sa", | "privateKeyId" : "sa" |} """.stripMargin val factory = new NoopAuthHeaderProviderFactory implicit val context: MahaServiceConfigContext = DefaultMahaServiceConfigContext() val providerTry = factory.fromJson(parse(jsonDef)) assert(providerTry.isSuccess) assert(providerTry.toOption.get.isInstanceOf[NoopAuthHeaderProvider]) //No local gets or sets on provider, since all variables only run on test boxes. } }
Example 26
Source File: package.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service import com.netflix.archaius.config.PollingDynamicConfig import com.yahoo.maha.service.config.dynamic.DynamicConfigurations import com.yahoo.maha.service.config.dynamic.DynamicConfigurationUtils._ import grizzled.slf4j.Logging import org.json4s.{JValue, _} import org.json4s.jackson.JsonMethods.parse import org.json4s.scalaz.JsonScalaz import org.json4s.scalaz.JsonScalaz.{JSONR, _} package object request extends Logging { implicit val formats = org.json4s.DefaultFormats private var dynamicConfigurations:Option[DynamicConfigurations] = None def setDynamicConfigurations(value: DynamicConfigurations): Unit = dynamicConfigurations = Some(value) def fieldExtended[A: JSONR](name: String)(json: JValue): Result[A] = { val dynamicField = (extractDynamicFields(json)).filter(f => f._2._1.equals(name)).headOption val result = { if (dynamicField.isDefined && dynamicConfigurations.isDefined) { val defaultValue = JsonScalaz.fromJSON[A](parse(dynamicField.get._2._2)) dynamicConfigurations.get.addProperty(dynamicField.get._1, defaultValue.toOption.get.asInstanceOf[Int]) val dynamicValue = JsonScalaz.fromJSON[A](parse(dynamicConfigurations.get.getDynamicConfiguration(dynamicField.get._1).get.toString)) if (dynamicValue.isSuccess) { dynamicValue } else { error(s"Failed to fetch dynamic config value failure: $dynamicValue. Returning default: $defaultValue") defaultValue } } else { field[A](name)(json) } } result.leftMap { nel => nel.map { case UnexpectedJSONError(was, expected) => UncategorizedError(name, s"unexpected value : $was expected : ${expected.getSimpleName}", List.empty) case a => a } } } }
Example 27
Source File: SQLJsonProtocolSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart import org.apache.spark.util.JsonProtocol class SQLJsonProtocolSuite extends SparkFunSuite { test("SparkPlanGraph backward compatibility: metadata") { val SQLExecutionStartJsonString = """ |{ | "Event":"org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart", | "executionId":0, | "description":"test desc", | "details":"test detail", | "physicalPlanDescription":"test plan", | "sparkPlanInfo": { | "nodeName":"TestNode", | "simpleString":"test string", | "children":[], | "metadata":{}, | "metrics":[] | }, | "time":0 |} """.stripMargin val reconstructedEvent = JsonProtocol.sparkEventFromJson(parse(SQLExecutionStartJsonString)) val expectedEvent = SparkListenerSQLExecutionStart(0, "test desc", "test detail", "test plan", new SparkPlanInfo("TestNode", "test string", Nil, Map(), Nil), 0) assert(reconstructedEvent == expectedEvent) } }
Example 28
Source File: MetadataSuite.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Metadata, MetadataBuilder} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }
Example 29
Source File: JavaCollectionSerializer.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis.rpc.transform import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper import org.json4s.{CustomSerializer, JArray, JObject} import org.json4s.jackson.Serialization.write import org.json4s.jackson.JsonMethods.parse //TODO is now only the simplest implementation, and there is a need to optimize it later.(TODO 现在只做最简单的实现,后续有需要再优化) object JavaCollectionSerializer extends CustomSerializer[java.util.List[_]](implicit formats => ( { case j: JArray=> BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.List[_]]) }, { case list: java.util.List[_] => parse(BDPJettyServerHelper.gson.toJson(list)) } ) ) object JavaMapSerializer extends CustomSerializer[java.util.Map[_, _]](implicit formats => ( { case j: JObject => BDPJettyServerHelper.gson.fromJson(write(j), classOf[java.util.Map[_, _]]) }, { case map: java.util.Map[_, _] => parse(BDPJettyServerHelper.gson.toJson(map)) } ) )
Example 30
Source File: MetadataSuite.scala From drizzle-spark with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.catalyst.util import org.json4s.jackson.JsonMethods.parse import org.apache.spark.SparkFunSuite import org.apache.spark.sql.types.{Metadata, MetadataBuilder} class MetadataSuite extends SparkFunSuite { val baseMetadata = new MetadataBuilder() .putString("purpose", "ml") .putBoolean("isBase", true) .build() val summary = new MetadataBuilder() .putLong("numFeatures", 10L) .build() val age = new MetadataBuilder() .putString("name", "age") .putLong("index", 1L) .putBoolean("categorical", false) .putDouble("average", 45.0) .build() val gender = new MetadataBuilder() .putString("name", "gender") .putLong("index", 5) .putBoolean("categorical", true) .putStringArray("categories", Array("male", "female")) .build() val metadata = new MetadataBuilder() .withMetadata(baseMetadata) .putBoolean("isBase", false) // overwrite an existing key .putMetadata("summary", summary) .putLongArray("long[]", Array(0L, 1L)) .putDoubleArray("double[]", Array(3.0, 4.0)) .putBooleanArray("boolean[]", Array(true, false)) .putMetadataArray("features", Array(age, gender)) .build() test("metadata builder and getters") { assert(age.contains("summary") === false) assert(age.contains("index") === true) assert(age.getLong("index") === 1L) assert(age.contains("average") === true) assert(age.getDouble("average") === 45.0) assert(age.contains("categorical") === true) assert(age.getBoolean("categorical") === false) assert(age.contains("name") === true) assert(age.getString("name") === "age") assert(metadata.contains("purpose") === true) assert(metadata.getString("purpose") === "ml") assert(metadata.contains("isBase") === true) assert(metadata.getBoolean("isBase") === false) assert(metadata.contains("summary") === true) assert(metadata.getMetadata("summary") === summary) assert(metadata.contains("long[]") === true) assert(metadata.getLongArray("long[]").toSeq === Seq(0L, 1L)) assert(metadata.contains("double[]") === true) assert(metadata.getDoubleArray("double[]").toSeq === Seq(3.0, 4.0)) assert(metadata.contains("boolean[]") === true) assert(metadata.getBooleanArray("boolean[]").toSeq === Seq(true, false)) assert(gender.contains("categories") === true) assert(gender.getStringArray("categories").toSeq === Seq("male", "female")) assert(metadata.contains("features") === true) assert(metadata.getMetadataArray("features").toSeq === Seq(age, gender)) } test("metadata json conversion") { val json = metadata.json withClue("toJson must produce a valid JSON string") { parse(json) } val parsed = Metadata.fromJson(json) assert(parsed === metadata) assert(parsed.## === metadata.##) } }