org.json4s.JsonAST.JNothing Scala Examples
The following examples show how to use org.json4s.JsonAST.JNothing.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: L10-2DataProc.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.HashPartitioner import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.JsonAST.JNothing import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object DataProcApp { def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: DataProcApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) ssc.socketTextStream(hostname, port.toInt) .map(r => { implicit val formats = DefaultFormats parse(r) }) .filter(jvalue => { jvalue \ "attributes" \ "Wi-Fi" != JNothing }) .map(jvalue => { implicit val formats = DefaultFormats ((jvalue \ "attributes" \ "Wi-Fi").extract[String], (jvalue \ "stars").extract[Int]) }) .combineByKey( (v) => (v, 1), (accValue: (Int, Int), v) => (accValue._1 + v, accValue._2 + 1), (accCombine1: (Int, Int), accCombine2: (Int, Int)) => (accCombine1._1 + accCombine2._1, accCombine1._2 + accCombine2._2), new HashPartitioner(ssc.sparkContext.defaultParallelism)) .map({ case (k, v) => (k, v._1 / v._2.toFloat) }) .print() ssc.start() ssc.awaitTermination() } }
Example 2
Source File: MasterWebUISuite.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.master.ui import java.util.Date import scala.io.Source import scala.language.postfixOps import org.json4s.jackson.JsonMethods._ import org.json4s.JsonAST.{JNothing, JString, JInt} import org.mockito.Mockito.{mock, when} import org.scalatest.BeforeAndAfter import org.apache.spark.{SparkConf, SecurityManager, SparkFunSuite} import org.apache.spark.deploy.DeployMessages.MasterStateResponse import org.apache.spark.deploy.DeployTestUtils._ import org.apache.spark.deploy.master._ import org.apache.spark.rpc.RpcEnv class MasterWebUISuite extends SparkFunSuite with BeforeAndAfter { val masterPage = mock(classOf[MasterPage]) val master = { val conf = new SparkConf val securityMgr = new SecurityManager(conf) val rpcEnv = RpcEnv.create(Master.SYSTEM_NAME, "localhost", 0, conf, securityMgr) val master = new Master(rpcEnv, rpcEnv.address, 0, securityMgr, conf) master } val masterWebUI = new MasterWebUI(master, 0, customMasterPage = Some(masterPage)) before { masterWebUI.bind() } after { masterWebUI.stop() } test("list applications") { val worker = createWorkerInfo() val appDesc = createAppDesc() // use new start date so it isn't filtered by UI val activeApp = new ApplicationInfo( new Date().getTime, "id", appDesc, new Date(), null, Int.MaxValue) activeApp.addExecutor(worker, 2) val workers = Array[WorkerInfo](worker) val activeApps = Array(activeApp) val completedApps = Array[ApplicationInfo]() val activeDrivers = Array[DriverInfo]() val completedDrivers = Array[DriverInfo]() val stateResponse = new MasterStateResponse( "host", 8080, None, workers, activeApps, completedApps, activeDrivers, completedDrivers, RecoveryState.ALIVE) when(masterPage.getMasterState).thenReturn(stateResponse) val resultJson = Source.fromURL( s"http://localhost:${masterWebUI.boundPort}/api/v1/applications") .mkString val parsedJson = parse(resultJson) val firstApp = parsedJson(0) assert(firstApp \ "id" === JString(activeApp.id)) assert(firstApp \ "name" === JString(activeApp.desc.name)) assert(firstApp \ "coresGranted" === JInt(2)) assert(firstApp \ "maxCores" === JInt(4)) assert(firstApp \ "memoryPerExecutorMB" === JInt(1234)) assert(firstApp \ "coresPerExecutor" === JNothing) } }
Example 3
Source File: SampleActor.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.Props import io.coral.actors.{SimpleEmitTrigger, CoralActor} import io.coral.lib.Random import org.json4s.JsonAST.JNothing import org.json4s.{JObject, JValue} object SampleActor { implicit val formats = org.json4s.DefaultFormats def getParams(json: JValue) = { for { fraction <- (json \ "params" \ "fraction").extractOpt[Double] } yield { fraction } } def apply(json: JValue): Option[Props] = { getParams(json).map(_ => Props(classOf[SampleActor], json, Random)) } } class SampleActor(json: JObject, random: Random) extends CoralActor(json) with SimpleEmitTrigger { val fraction: Double = SampleActor.getParams(json).get var randomStream: Stream[Boolean] = random.binomial(fraction) def next(): Boolean = { val value = randomStream.head randomStream = randomStream.tail value } override def simpleEmitTrigger(json: JObject): Option[JValue] = { next() match { case false => Some(JNothing) case true => Some(json) } } }
Example 4
Source File: KafkaConsumerActor.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.connector import java.util.Properties import akka.actor.Props import io.coral.actors.CoralActor import io.coral.actors.connector.KafkaConsumerActor.{StopReadingMessageQueue, ReadMessageQueue} import io.coral.lib.{ConfigurationBuilder, KafkaJsonConsumer} import kafka.serializer.Decoder import kafka.tools.MessageFormatter import org.apache.kafka.clients.consumer.ConsumerConfig import org.json4s.JsonAST.{JNothing, JObject, JValue} object KafkaConsumerActor { case class ReadMessageQueue() case class StopReadingMessageQueue() implicit val formats = org.json4s.DefaultFormats val builder = new ConfigurationBuilder("kafka.consumer") def getParams(json: JValue) = { for { kafka <- (json \ "params" \ "kafka").extractOpt[JObject] topic <- (json \ "params" \ "topic").extractOpt[String] } yield { val properties = consumerProperties(kafka) (properties, topic) } } def consumerProperties(json: JObject): Properties = { val properties = builder.properties json.values.foreach { case (k: String, v: String) => properties.setProperty(k, v) } properties } def apply(json: JValue): Option[Props] = { getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer())) } def apply(json: JValue, decoder: Decoder[JValue]): Option[Props] = { getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer(decoder))) } } class KafkaConsumerActor(json: JObject, connection: KafkaJsonConsumer) extends CoralActor(json) { val (properties, topic) = KafkaConsumerActor.getParams(json).get lazy val stream = connection.stream(topic, properties) var shouldStop = false override def preStart(): Unit = { super.preStart() } override def receiveExtra: Receive = { case ReadMessageQueue() if stream.hasNextInTime => val message: JValue = stream.next stream.commitOffsets if (message != JNothing) { emit(message) } if (!shouldStop) { self ! ReadMessageQueue() } case ReadMessageQueue() => self ! ReadMessageQueue() case StopReadingMessageQueue() => shouldStop = true } }
Example 5
Source File: JsonExpressionParser.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import org.json4s.{JArray, JValue, JObject} import org.json4s.JsonAST.JNothing import scala.util.parsing.combinator.{PackratParsers, JavaTokenParsers} import scala.util.parsing.input.CharSequenceReader object JsonExpressionParser extends JavaTokenParsers with PackratParsers { abstract class FieldElement // Represents the complete list of identifiers ("field.array[0].reference['elem']") // A FieldReference is a concatenation of FieldElements. // A FieldElement is either a simple identifier, an array // access element or a dictionary access element. case class FieldReference(items: List[FieldElement]) // Represents a simple identifier between dots case class JsonIdentifier(id: String) extends FieldElement // Represents an array access identifier ("field[0]") case class ArrayAccess(id: JsonIdentifier, index: Int) extends FieldElement // Represents a dictionary access identifier ("field['inner']") case class DictionaryAccess(id: JsonIdentifier, field: String) extends FieldElement object ReferenceAll extends FieldElement def getFieldValue(json: JObject, id: FieldReference): JValue = { // tempJson holds the result we want to return var tempJson: JValue = json id.items.foreach({ case ReferenceAll => tempJson case i: JsonIdentifier => tempJson = tempJson \ i.id case a: ArrayAccess => val obj = tempJson \ a.id.id obj match { case array: JArray => if (a.index < array.arr.length) tempJson = array(a.index) else return JNothing case _ => return JNothing } case d: DictionaryAccess => tempJson = tempJson \ d.id.id \ d.field case _ => }) tempJson } type P[+T] = PackratParser[T] lazy val local_field_reference: P[FieldReference] = repsep(field_element, ".") ^^ { case i => FieldReference(i) } lazy val field_element: P[FieldElement] = reference_all | array_access | dictionary_access | json_identifier lazy val json_identifier: P[JsonIdentifier] = ident ^^ { case i => JsonIdentifier(i) } lazy val array_access: P[ArrayAccess] = json_identifier ~ "[" ~ wholeNumber ~ "]" ^^ { case id ~ "[" ~ index ~ "]" => ArrayAccess(id, index.toInt) } lazy val dictionary_access: P[DictionaryAccess] = json_identifier ~ "[" ~ "'" ~ ident ~ "'" ~ "]" ^^ { case id ~ "[" ~ "'" ~ field ~ "'" ~ "]" => DictionaryAccess(id, field) } lazy val reference_all: P[FieldElement] = "*" ^^ { case _ => ReferenceAll } }
Example 6
Source File: KafkaJsonConsumer.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import java.util.Properties import com.fasterxml.jackson.core.JsonParseException import kafka.consumer._ import kafka.serializer.{Decoder, DefaultDecoder} import org.json4s.JsonAST.{JNothing, JValue} import org.json4s.jackson.JsonMethods._ object KafkaJsonConsumer { def apply() = new KafkaJsonConsumer(JsonDecoder) def apply(decoder: Decoder[JValue]) = new KafkaJsonConsumer(decoder) } class KafkaJsonConsumer(decoder: Decoder[JValue]) { def stream(topic: String, properties: Properties): KafkaJsonStream = { val connection = Consumer.create(new ConsumerConfig(properties)) val stream = connection.createMessageStreamsByFilter( Whitelist(topic), 1, new DefaultDecoder, decoder)(0) new KafkaJsonStream(connection, stream) } } class KafkaJsonStream(connection: ConsumerConnector, stream: KafkaStream[Array[Byte], JValue]) { private lazy val it = stream.iterator // this method relies on a timeout value having been set @inline def hasNextInTime: Boolean = try { it.hasNext } catch { case cte: ConsumerTimeoutException => false } @inline def next: JValue = it.next.message @inline def commitOffsets = connection.commitOffsets } object JsonDecoder extends Decoder[JValue] { val encoding = "UTF8" override def fromBytes(bytes: Array[Byte]): JValue = { val s = new String(bytes, encoding) try { parse(s) } catch { case jpe: JsonParseException => JNothing } } }
Example 7
Source File: Machine.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.cluster import org.json4s.JObject import org.json4s.JsonAST.{JNull, JNothing} import org.json4s.JsonDSL._ case class Machine(// The friendly name of the machine, if any alias: Option[String], // The IP address of the machine ip: String, // The port on which the machine can be reached port: Int, // The role of the machine roles: List[String], // The status of the machine status: Option[String]) { def toJson(): JObject = { ("alias" -> alias.orNull) ~ ("ip" -> ip) ~ ("port" -> port) ~ ("roles" -> roles) ~ ("status" -> status) } }
Example 8
Source File: KafkaJsonConsumerSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import java.util.Properties import kafka.consumer._ import kafka.message.MessageAndMetadata import org.json4s.JsonAST.{JNothing, JValue} import org.json4s.jackson.JsonMethods._ import org.mockito.Mockito._ import org.scalatest.mock.MockitoSugar import org.scalatest.{Matchers, WordSpec} class KafkaJsonConsumerSpec extends WordSpec with Matchers with MockitoSugar { "KafkaJsonConsumer" should { "provide a stream" in { val consumer = KafkaJsonConsumer() intercept[IllegalArgumentException] { consumer.stream("abc", new Properties()) } } } "KafkaJsonStream" should { val fakeConnection = mock[ConsumerConnector] doNothing.when(fakeConnection).commitOffsets val fakeMessage = mock[MessageAndMetadata[Array[Byte], JValue]] when(fakeMessage.key()).thenReturn("TestKey".getBytes) when(fakeMessage.message()).thenReturn(parse( """{ "json": "test" }""")) val fakeIterator = mock[ConsumerIterator[Array[Byte], JValue]] when(fakeIterator.hasNext()).thenReturn(true).thenReturn(false) when(fakeIterator.next()).thenReturn(fakeMessage) val fakeStream = mock[KafkaStream[Array[Byte], JValue]] when(fakeStream.iterator()).thenReturn(fakeIterator) "provide a next value" in { val kjs = new KafkaJsonStream(fakeConnection, fakeStream) kjs.hasNextInTime shouldBe true kjs.next shouldBe parse( """{ "json": "test" }""") } } "JsonDecoder" should { "convert bytes to Json object" in { val jsonString = """{ "hello": "json" }""" val bytes = jsonString.getBytes val jsonValue = parse(jsonString) JsonDecoder.fromBytes(bytes) shouldBe jsonValue } "return JNothing for invalid JSon" in { val jsonString = """hello""" val bytes = jsonString.getBytes JsonDecoder.fromBytes(bytes) shouldBe JNothing } } }
Example 9
Source File: AnyFormat.scala From scalapb-json4s with Apache License 2.0 | 5 votes |
package scalapb.json4s import com.google.protobuf.any.{Any => PBAny} import org.json4s.JsonAST.{JNothing, JObject, JString, JValue} import scala.language.existentials object AnyFormat { val anyWriter: (Printer, PBAny) => JValue = { case (printer, any) => // Find the companion so it can be used to JSON-serialize the message. Perhaps this can be circumvented by // including the original GeneratedMessage with the Any (at least in memory). val cmp = printer.typeRegistry .findType(any.typeUrl) .getOrElse( throw new IllegalStateException( s"Unknown type ${any.typeUrl} in Any. Add a TypeRegistry that supports this type to the Printer." ) ) // Unpack the message... val message = any.unpack(cmp) // ... and add the @type marker to the resulting JSON printer.toJson(message) match { case JObject(fields) => JObject(("@type" -> JString(any.typeUrl)) +: fields) case value => // Safety net, this shouldn't happen throw new IllegalStateException( s"Message of type ${any.typeUrl} emitted non-object JSON: $value" ) } } val anyParser: (Parser, JValue) => PBAny = { case (parser, obj @ JObject(fields)) => obj \ "@type" match { case JString(typeUrl) => val cmp = parser.typeRegistry .findType(typeUrl) .getOrElse( throw new JsonFormatException( s"Unknown type ${typeUrl} in Any. Add a TypeRegistry that supports this type to the Parser." ) ) val message = parser.fromJson(obj, true)(cmp) PBAny(typeUrl = typeUrl, value = message.toByteString) case JNothing => throw new JsonFormatException(s"Missing type url when parsing $obj") case unknown => throw new JsonFormatException( s"Expected string @type field, got $unknown" ) } case (_, unknown) => throw new JsonFormatException(s"Expected an object, got $unknown") } }