org.json4s.JsonAST.JValue Scala Examples

The following examples show how to use org.json4s.JsonAST.JValue. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: KafkaConsumerActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.connector

import java.util.Properties

import akka.actor.Props
import io.coral.actors.CoralActor
import io.coral.actors.connector.KafkaConsumerActor.{StopReadingMessageQueue, ReadMessageQueue}
import io.coral.lib.{ConfigurationBuilder, KafkaJsonConsumer}
import kafka.serializer.Decoder
import kafka.tools.MessageFormatter
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.json4s.JsonAST.{JNothing, JObject, JValue}

object KafkaConsumerActor {
	case class ReadMessageQueue()
	case class StopReadingMessageQueue()

	implicit val formats = org.json4s.DefaultFormats
	val builder = new ConfigurationBuilder("kafka.consumer")

	def getParams(json: JValue) = {
		for {
			kafka <- (json \ "params" \ "kafka").extractOpt[JObject]
			topic <- (json \ "params" \ "topic").extractOpt[String]
		} yield {
			val properties = consumerProperties(kafka)
			(properties, topic)
		}
	}

	def consumerProperties(json: JObject): Properties = {
		val properties = builder.properties

		json.values.foreach {
			case (k: String, v: String) =>
				properties.setProperty(k, v)
		}

		properties
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer()))
	}

	def apply(json: JValue, decoder: Decoder[JValue]): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaConsumerActor], json, KafkaJsonConsumer(decoder)))
	}
}

class KafkaConsumerActor(json: JObject, connection: KafkaJsonConsumer) extends CoralActor(json) {
	val (properties, topic) = KafkaConsumerActor.getParams(json).get
	lazy val stream = connection.stream(topic, properties)
	var shouldStop = false

	override def preStart(): Unit = {
		super.preStart()
	}

	override def receiveExtra: Receive = {
		case ReadMessageQueue() if stream.hasNextInTime =>
			val message: JValue = stream.next
			stream.commitOffsets

			if (message != JNothing) {
				emit(message)
			}

			if (!shouldStop) {
				self ! ReadMessageQueue()
			}
		case ReadMessageQueue() =>
			self ! ReadMessageQueue()
		case StopReadingMessageQueue() =>
			shouldStop = true
	}

	
} 
Example 2
Source File: SparkStageParam.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.PipelineStage
import org.apache.spark.ml.param.{Param, ParamPair, Params}
import org.apache.spark.ml.util.{Identifiable, MLReader, MLWritable}
import org.apache.spark.util.SparkUtils
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{compact, parse, render}
import org.json4s.{DefaultFormats, Formats, JString}

class SparkStageParam[S <: PipelineStage with Params]
(
  parent: String,
  name: String,
  doc: String,
  isValid: Option[S] => Boolean
) extends Param[Option[S]](parent, name, doc, isValid) {

  import SparkStageParam._

  
  override def jsonDecode(jsonStr: String): Option[S] = {
    val json = parse(jsonStr)
    val uid = (json \ "uid").extractOpt[String]
    val path = (json \ "path").extractOpt[String]

    path -> uid match {
      case (None, _) | (_, None) | (_, Some(NoUID)) =>
        savePath = None
        None
      case (Some(p), Some(stageUid)) =>
        savePath = Option(p)
        val stagePath = new Path(p, stageUid).toString
        val className = (json \ "className").extract[String]
        val cls = SparkUtils.classForName(className)
        val stage = cls.getMethod("read").invoke(null).asInstanceOf[MLReader[PipelineStage]].load(stagePath)
        Option(stage).map(_.asInstanceOf[S])
    }
  }
}

object SparkStageParam {
  implicit val formats: Formats = DefaultFormats
  val NoClass = ""
  val NoUID = ""

  def updateParamsMetadataWithPath(jValue: JValue, path: String): JValue = jValue match {
    case JObject(pairs) => JObject(
      pairs.map {
        case (SparkWrapperParams.SparkStageParamName, j) =>
          SparkWrapperParams.SparkStageParamName -> j.merge(JObject("path" -> JString(path)))
        case param => param
      }
    )
    case j => throw new IllegalArgumentException(s"Cannot recognize JSON Spark params metadata: $j")
  }

} 
Example 3
Source File: TransientFeatureArrayParam.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.features._
import org.apache.spark.ml.param._
import org.apache.spark.ml.util.Identifiable
import org.json4s.DefaultFormats
import org.json4s.JsonAST.{JArray, JValue}
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.util.{Failure, Success}



  override def w(value: Array[TransientFeature]): ParamPair[Array[TransientFeature]] = super.w(value)

  override def jsonEncode(value: Array[TransientFeature]): String = {
    compact(render(JArray(value.map(_.toJson).toList)))
  }

  override def jsonDecode(json: String): Array[TransientFeature] = {
    parse(json).extract[Array[JValue]].map(obj => {
      TransientFeature(obj) match {
        case Failure(e) => throw new RuntimeException("Failed to parse TransientFeature", e)
        case Success(v) => v
      }
    })
  }
} 
Example 4
Source File: FeatureJsonHelper.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.features

import com.salesforce.op.features.types._
import com.salesforce.op.stages.{OPStage, OpPipelineStage}
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods
import org.json4s.jackson.JsonMethods._
import org.json4s.{DefaultFormats, Formats}

import scala.reflect.runtime.universe.WeakTypeTag
import scala.util.Try



  def fromJson(
    json: JValue,
    stages: Map[String, OPStage],
    features: Map[String, OPFeature]
  ): Try[OPFeature] = Try {
    val typeName = (json \ "typeName").extract[String]
    val uid = (json \ "uid").extract[String]
    val name = (json \ "name").extract[String]
    val isResponse = (json \ "isResponse").extract[Boolean]
    val originStageUid = (json \ "originStage").extract[String]
    val parentUids = (json \ "parents").extract[Array[String]]

    val originStage: Option[OPStage] = stages.get(originStageUid)
    if (originStage.isEmpty) {
      throw new RuntimeException(s"Origin stage $originStageUid not found for feature $name ($uid)")
    }

    // Order is important and so are duplicates, eg f = f1 + f1 has 2 parents but both the same feature
    val parents: Seq[OPFeature] = parentUids.flatMap(id => features.get(id))
    if (parents.length != parentUids.length) {
      throw new RuntimeException(s"Not all the parent features were found for feature $name ($uid)")
    }

    val wtt = FeatureType.featureTypeTag(typeName).asInstanceOf[WeakTypeTag[FeatureType]]
    Feature[FeatureType](
      uid = uid,
      name = name,
      isResponse = isResponse,
      parents = parents,
      originStage = originStage.get.asInstanceOf[OpPipelineStage[FeatureType]]
    )(wtt = wtt)

  }

} 
Example 5
Source File: OpPipelineStageReaderWriterTest.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.features._
import com.salesforce.op.features.types._
import com.salesforce.op.stages.OpPipelineStageReaderWriter._
import com.salesforce.op.test.PassengerSparkFixtureTest
import com.salesforce.op.utils.reflection.ReflectionUtils
import com.salesforce.op.utils.spark.RichDataset._
import org.apache.spark.ml.{Model, Transformer}
import org.apache.spark.sql.types.{DataType, Metadata, MetadataBuilder}
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods.{compact, parse, pretty, render}
import org.json4s.{JArray, JObject}
import org.scalatest.FlatSpec
import org.slf4j.LoggerFactory


// TODO: consider adding a read/write test for a spark wrapped stage as well
private[stages] abstract class OpPipelineStageReaderWriterTest
  extends FlatSpec with PassengerSparkFixtureTest {

  val meta = new MetadataBuilder().putString("foo", "bar").build()
  val expectedFeaturesLength = 1
  def stage: OpPipelineStageBase with Transformer
  val expected: Array[Real]
  val hasOutputName = true

  private val log = LoggerFactory.getLogger(this.getClass)
  private lazy val savePath = tempDir + "/" + this.getClass.getSimpleName + "-" + System.currentTimeMillis()
  private lazy val writer = new OpPipelineStageWriter(stage)
  private lazy val stageJsonString: String = writer.writeToJsonString(savePath)
  private lazy val stageJson: JValue = parse(stageJsonString)
  private lazy val isModel = stage.isInstanceOf[Model[_]]
  private val FN = FieldNames

  Spec(this.getClass) should "write stage uid" in {
    log.info(pretty(stageJson))
    (stageJson \ FN.Uid.entryName).extract[String] shouldBe stage.uid
  }
  it should "write class name" in {
    (stageJson \ FN.Class.entryName).extract[String] shouldBe stage.getClass.getName
  }
  it should "write params map" in {
    val params = extractParams(stageJson).extract[Map[String, Any]]
    if (hasOutputName) {
      params should have size 4
      params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema", "outputFeatureName")
    } else {
      params should have size 3
      params.keys shouldBe Set("inputFeatures", "outputMetadata", "inputSchema")
    }
  }
  it should "write outputMetadata" in {
    val params = extractParams(stageJson)
    val metadataStr = compact(render(extractParams(stageJson) \ "outputMetadata"))
    val metadata = Metadata.fromJson(metadataStr)
    metadata shouldBe stage.getMetadata()
  }
  it should "write inputSchema" in {
    val schemaStr = compact(render(extractParams(stageJson) \ "inputSchema"))
    val schema = DataType.fromJson(schemaStr)
    schema shouldBe stage.getInputSchema()
  }
  it should "write input features" in {
    val jArray = (extractParams(stageJson) \ "inputFeatures").extract[JArray]
    jArray.values should have length expectedFeaturesLength
    val obj = jArray(0).extract[JObject]
    obj.values.keys shouldBe Set("name", "isResponse", "isRaw", "uid", "typeName", "stages", "originFeatures")
  }
  it should "write model ctor args" in {
    if (stage.isInstanceOf[Model[_]]) {
      val ctorArgs = (stageJson \ FN.CtorArgs.entryName).extract[JObject]
      val (_, args) = ReflectionUtils.bestCtorWithArgs(stage)
      ctorArgs.values.keys shouldBe args.map(_._1).toSet
    }
  }
  it should "load stage correctly" in {
    val reader = new OpPipelineStageReader(stage)
    val stageLoaded = reader.loadFromJsonString(stageJsonString, path = savePath)
    stageLoaded shouldBe a[OpPipelineStageBase]
    stageLoaded shouldBe a[Transformer]
    stageLoaded.getOutput() shouldBe a[FeatureLike[_]]
    val _ = stage.asInstanceOf[Transformer].transform(passengersDataSet)
    val transformed = stageLoaded.asInstanceOf[Transformer].transform(passengersDataSet)
    transformed.collect(stageLoaded.getOutput().asInstanceOf[FeatureLike[Real]]) shouldBe expected
    stageLoaded.uid shouldBe stage.uid
    stageLoaded.operationName shouldBe stage.operationName
    stageLoaded.getInputFeatures() shouldBe stage.getInputFeatures()
    stageLoaded.getInputSchema() shouldBe stage.getInputSchema()
  }

  private def extractParams(stageJson: JValue): JValue = {
    val defaultParamsMap = stageJson \ FN.DefaultParamMap.entryName
    val paramsMap = stageJson \ FN.ParamMap.entryName
    defaultParamsMap.merge(paramsMap)
  }

} 
Example 6
Source File: StructField.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.types

import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._


  protected def this() = this(null, null)

  private[sql] def buildFormattedString(prefix: String, builder: StringBuilder): Unit = {
    builder.append(s"$prefix-- $name: ${dataType.typeName} (nullable = $nullable)\n")
    DataType.buildFormattedString(dataType, s"$prefix    |", builder)
  }

  // override the default toString to be compatible with legacy parquet files.
  //覆盖默认的toString以与传统的parquet文件兼容
  override def toString: String = s"StructField($name,$dataType,$nullable)"

  private[sql] def jsonValue: JValue = {
    ("name" -> name) ~
      ("type" -> dataType.jsonValue) ~
      ("nullable" -> nullable) ~
      ("metadata" -> metadata.jsonValue)
  }
} 
Example 7
Source File: StreamingQueryStatus.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.streaming

import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.annotation.InterfaceStability


  def prettyJson: String = pretty(render(jsonValue))

  override def toString: String = prettyJson

  private[sql] def copy(
      message: String = this.message,
      isDataAvailable: Boolean = this.isDataAvailable,
      isTriggerActive: Boolean = this.isTriggerActive): StreamingQueryStatus = {
    new StreamingQueryStatus(
      message = message,
      isDataAvailable = isDataAvailable,
      isTriggerActive = isTriggerActive)
  }

  private[sql] def jsonValue: JValue = {
    ("message" -> JString(message.toString)) ~
    ("isDataAvailable" -> JBool(isDataAvailable)) ~
    ("isTriggerActive" -> JBool(isTriggerActive))
  }
} 
Example 8
Source File: UserDefinedType.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.types

import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._

import org.apache.spark.annotation.DeveloperApi


  override def userClass: java.lang.Class[Any] = null

  override private[sql] def jsonValue: JValue = {
    ("type" -> "udt") ~
      ("pyClass" -> pyUDT) ~
      ("serializedClass" -> serializedPyClass) ~
      ("sqlType" -> sqlType.jsonValue)
  }

  override def equals(other: Any): Boolean = other match {
    case that: PythonUserDefinedType => this.pyUDT.equals(that.pyUDT)
    case _ => false
  }
} 
Example 9
Source File: StructField.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.types

import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._


  protected def this() = this(null, null)

  private[sql] def buildFormattedString(prefix: String, builder: StringBuilder): Unit = {
    builder.append(s"$prefix-- $name: ${dataType.typeName} (nullable = $nullable)\n")
    DataType.buildFormattedString(dataType, s"$prefix    |", builder)
  }

  // override the default toString to be compatible with legacy parquet files.
  override def toString: String = s"StructField($name,$dataType,$nullable)"

  private[sql] def jsonValue: JValue = {
    ("name" -> name) ~
      ("type" -> dataType.jsonValue) ~
      ("nullable" -> nullable) ~
      ("metadata" -> metadata.jsonValue)
  }
} 
Example 10
Source File: Json4sSupport.scala    From service-container   with Apache License 2.0 5 votes vote down vote up
package com.github.vonnagy.service.container.http.json

import java.lang.reflect.InvocationTargetException

import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller}
import akka.util.ByteString
import org.json4s.JsonAST.JValue
import org.json4s.{Formats, MappingException, Serialization}


  implicit def json4sMarshaller[A <: AnyRef](
                                              implicit serialization: Serialization,
                                              formats: Formats,
                                              shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False
                                            ): ToEntityMarshaller[A] = {
    shouldWritePretty match {
      case ShouldWritePretty.False =>
        jsonStringMarshaller.compose(serialization.write[A])
      case ShouldWritePretty.True =>
        jsonStringMarshaller.compose(serialization.writePretty[A])
    }
  }

  implicit def json4sJValueMarshaller[A <: JValue](
                                                    implicit serialization: Serialization,
                                                    formats: Formats,
                                                    shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False
                                                  ): ToEntityMarshaller[A] = {

    shouldWritePretty match {
      case ShouldWritePretty.False =>
        jsonStringMarshaller.compose(serialization.write[A])
      case ShouldWritePretty.True =>
        jsonStringMarshaller.compose(serialization.writePretty[A])
    }
  }
} 
Example 11
Source File: JsonActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.Props
import io.coral.actors.{SimpleEmitTrigger, CoralActor}
import io.coral.lib.JsonTemplate
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s._

object JsonActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			template <- (json \ "params" \ "template").extractOpt[JObject]
			if (JsonTemplate.validate(template))
		} yield {
			template
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[JsonActor], json))
	}

}

class JsonActor(json: JObject)
	extends CoralActor(json)
	with SimpleEmitTrigger {

	val template = JsonTemplate(JsonActor.getParams(json).get)

	override def simpleEmitTrigger(json: JObject): Option[JValue] = {
		Some(template.interpret(json))
	}

} 
Example 12
Source File: MinMaxActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.Props
import io.coral.actors.CoralActor
import org.json4s.JsonAST.JValue
import org.json4s._
import org.json4s.jackson.JsonMethods._

import scala.concurrent.Future

object MinMaxActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			min <- (json \ "params" \ "min").extractOpt[Double]
			max <- (json \ "params" \ "max").extractOpt[Double]
			field <- (json \ "params" \ "field").extractOpt[String]
			if (min < max)
		} yield {
			(min, max, field)
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[MinMaxActor], json))
	}
}

class MinMaxActor(json: JObject) extends CoralActor(json) {
	val (min, max, field) = MinMaxActor.getParams(json).get

	override def trigger = {
		json: JObject => {
			Future {
				val value = (json \ field).extractOpt[Double]

				value match {
					case None =>
						Some(json)
					case Some(v) =>
						val capped = if (v < min) min
						else if (v > max) max
						else v

						val result = json mapField {
							case (f, JInt(_)) if f == field =>
								(f, JDouble(capped))
							case (f, JDouble(_)) if f == field =>
								(f, JDouble(capped))
							case other =>
								other
						}

						Some(result)
				}
			}
		}
	}
} 
Example 13
Source File: StatsActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.{ActorLogging, Props}
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods.render
import io.coral.actors.{SimpleTimer, NoEmitTrigger, CoralActor}
import io.coral.lib.SummaryStatistics

import scala.language.implicitConversions

object StatsActor {
	implicit val formats = org.json4s.DefaultFormats

	def getParams(json: JValue) = {
		for {
			field <- (json \ "params" \ "field").extractOpt[String]
		} yield {
			field
		}
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[StatsActor], json))
	}
}

class StatsActor(json: JObject)
	extends CoralActor(json)
	with ActorLogging
	with NoEmitTrigger
	with SimpleTimer {
	implicit def double2jvalue(x: Double): JValue = if (x.isNaN) JNull else JDouble(x)

	val field = StatsActor.getParams(json).get
	val statistics = SummaryStatistics.mutable

	override def simpleTimer = {
		statistics.reset()
		Some(JNothing)
	}

	override def state = Map(
		("count", render(statistics.count)),
		("avg", render(statistics.average)),
		("sd", render(statistics.populationSd)),
		("min", render(statistics.min)),
		("max", render(statistics.max))
	)

	override def noEmitTrigger(json: JObject) = {
		for {
			value <- (json \ field).extractOpt[Double]
		} yield {
			statistics.append(value)
		}
	}
} 
Example 14
Source File: KafkaProducerActor.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.connector

import java.util.Properties

import akka.actor.{Props, ActorLogging}
import io.coral.actors.{NoEmitTrigger, CoralActor}
import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import io.coral.lib.{KafkaJsonProducer, ConfigurationBuilder}
import org.json4s.JsonAST.{JObject, JValue}
import kafka.serializer.Encoder

object KafkaProducerActor {
	implicit val formats = org.json4s.DefaultFormats
	val builder = new ConfigurationBuilder("kafka.producer")

	def getParams(json: JValue) = {
		for {
			kafka <- (json \ "params" \ "kafka").extractOpt[JObject]
			topic <- (json \ "params" \ "topic").extractOpt[String]
		} yield {
			val properties = producerProperties(kafka)
			(properties, topic)
		}
	}

	private def producerProperties(json: JObject): Properties = {
		val properties = builder.properties
		json.values.foreach { case (k: String, v: String) => properties.setProperty(k, v) }
		properties
	}

	def apply(json: JValue): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaProducerActor[KafkaEncoder]], json, KafkaJsonProducer()))
	}

	def apply[T <: KafkaEncoder](json: JValue, encoder: Class[T]): Option[Props] = {
		getParams(json).map(_ => Props(classOf[KafkaProducerActor[T]], json, KafkaJsonProducer(encoder)))
	}
}

class KafkaProducerActor[T <: Encoder[JValue]](json: JObject, connection: KafkaJsonProducer[T])
	extends CoralActor(json)
	with NoEmitTrigger
	with ActorLogging {
	val (properties, topic) = KafkaProducerActor.getParams(json).get
	lazy val kafkaSender = connection.createSender(topic, properties)

	override def noEmitTrigger(json: JObject) = {
		val key = (json \ "key").extractOpt[String]
		val message = (json \ "message").extract[JObject]
		send(key, message)
	}

	private def send(key: Option[String], message: JObject) = {
		try {
			kafkaSender.send(key, message)
		} catch {
			case e: Exception => log.error(e, "failed to send message to Kafka")
		}
	}
} 
Example 15
Source File: OpPipelineStageWriter.scala    From TransmogrifAI   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.salesforce.op.stages

import com.salesforce.op.stages.OpPipelineStageReaderWriter._
import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams
import org.apache.hadoop.fs.Path
import org.apache.spark.ml.util.MLWriter
import org.apache.spark.ml.{Estimator, SparkDefaultParamsReadWrite}
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.util.{Failure, Success}


  def writeToJson(path: String): JObject = {
    stage match {
      case _: Estimator[_] => return JObject() // no need to serialize estimators
      case s: SparkWrapperParams[_] =>
        // Set save path for all Spark wrapped stages of type [[SparkWrapperParams]] so they can save
        s.setStageSavePath(path)
      case _ =>
    }
    // We produce stage metadata for all the Spark params
    val metadata = SparkDefaultParamsReadWrite.getMetadataToSave(stage)

    // Write out the stage using the specified writer instance
    val writer = readerWriterFor[OpPipelineStageBase](stage.getClass.asInstanceOf[Class[OpPipelineStageBase]])
    val stageJson: JValue = writer.write(stage) match {
      case Failure(err) => throw new RuntimeException(s"Failed to write out stage '${stage.uid}'", err)
      case Success(json) => json
    }

    // Join metadata & with stage ctor args
    val j = metadata.merge(JObject(FieldNames.CtorArgs.entryName -> stageJson))
    render(j).asInstanceOf[JObject]
  }

} 
Example 16
Source File: Trigger.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors

import org.json4s._

import scala.concurrent.Future
import org.json4s.JsonAST.{JObject, JValue}

trait Trigger {
	type TriggerType = JObject => Future[Option[JValue]]

	def trigger: TriggerType
}

trait NoEmitTrigger extends Trigger {
	override def trigger: TriggerType =
		json => {
			noEmitTrigger(json)
			Future.successful(Some(JNothing))
		}

	def noEmitTrigger(json: JObject): Unit
}

trait NoTrigger extends Trigger {
	override def trigger: TriggerType = json => Future.successful(Some(JNothing))
}

trait SimpleEmitTrigger extends Trigger {
	override def trigger: TriggerType = {
		json =>
			Future.successful(simpleEmitTrigger(json))
	}

	def simpleEmitTrigger(json: JObject): Option[JValue]
} 
Example 17
Source File: Timer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors

import org.json4s._

import scala.concurrent.Future
import org.json4s.JsonAST.JValue

trait Timer {
	type TimerType = Future[Option[JValue]]
	def timer: TimerType
}

trait SimpleTimer extends Timer {
	override def timer: TimerType = {
		Future.successful(simpleTimer)
	}

	def simpleTimer: Option[JValue]
}

trait NoTimer extends Timer {
	override def timer: TimerType = Future.successful(Some(JNothing))
} 
Example 18
Source File: KafkaJsonProducer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.producer.{KeyedMessage, ProducerConfig, Producer}
import kafka.serializer.Encoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.jackson.JsonMethods._

object KafkaJsonProducer {
	type KafkaEncoder = Encoder[JValue]
	def apply() = new KafkaJsonProducer(classOf[JsonEncoder])
	def apply[T <: KafkaEncoder](encoder: Class[T]) = new KafkaJsonProducer(encoder)
}

class KafkaJsonProducer[T <: KafkaEncoder](encoderClass: Class[T]) {
	def createSender(topic: String, properties: Properties): KafkaSender = {
		val props = properties.clone.asInstanceOf[Properties]
		props.put("serializer.class", encoderClass.getName)
		val producer = createProducer(props)
		new KafkaSender(topic, producer)
	}

	def createProducer(props: Properties): Producer[String, JValue] = {
		new Producer[String, JValue](new ProducerConfig(props))
	}
}

class KafkaSender(topic: String, producer: Producer[String, JValue]) {
	def send(key: Option[String], message: JObject) = {
		val keyedMessage: KeyedMessage[String, JValue] = key match {
			case Some(key) => new KeyedMessage(topic, key, message)
			case None => new KeyedMessage(topic, message)
		}

		producer.send(keyedMessage)
	}
}

class JsonEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		compact(value).getBytes("UTF-8")
	}
} 
Example 19
Source File: KafkaJsonConsumer.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import com.fasterxml.jackson.core.JsonParseException
import kafka.consumer._
import kafka.serializer.{Decoder, DefaultDecoder}
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.jackson.JsonMethods._

object KafkaJsonConsumer {
	def apply() = new KafkaJsonConsumer(JsonDecoder)
	def apply(decoder: Decoder[JValue]) = new KafkaJsonConsumer(decoder)
}

class KafkaJsonConsumer(decoder: Decoder[JValue]) {
	def stream(topic: String, properties: Properties): KafkaJsonStream = {
		val connection = Consumer.create(new ConsumerConfig(properties))
		val stream = connection.createMessageStreamsByFilter(
			Whitelist(topic), 1, new DefaultDecoder, decoder)(0)
		new KafkaJsonStream(connection, stream)
	}
}

class KafkaJsonStream(connection: ConsumerConnector, stream: KafkaStream[Array[Byte], JValue]) {
	private lazy val it = stream.iterator

	// this method relies on a timeout value having been set
	@inline def hasNextInTime: Boolean =
		try {
			it.hasNext
		} catch {
			case cte: ConsumerTimeoutException => false
		}

	@inline def next: JValue = it.next.message
	@inline def commitOffsets = connection.commitOffsets
}

object JsonDecoder extends Decoder[JValue] {
	val encoding = "UTF8"

	override def fromBytes(bytes: Array[Byte]): JValue = {
		val s = new String(bytes, encoding)
		try {
			parse(s)
		} catch {
			case jpe: JsonParseException => JNothing
		}
	}
} 
Example 20
Source File: JsonActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import org.json4s.JsonAST.JValue
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.duration._

class JsonActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("JsonActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	implicit val timeout = Timeout(100.millis)
	def createJsonActor(json: JValue): JsonActor = {
		val props = JsonActor(json).get
		val actorRef = TestActorRef[JsonActor](props)
		actorRef.underlyingActor
	}

	"JsonActor" should {
		"have a standard coral props supplier" in {
			val json = parse("""{ "type": "json", "params": { "template": {} } }""")
			val props = JsonActor(json).get
			props.actorClass shouldBe classOf[JsonActor]
		}

		"read the template parameter" in {
			val template = """{ "a": "someReference" }"""
			val json = parse(s"""{ "type": "json", "params": { "template": $template } }""")
			val actor = createJsonActor(json)
			actor.template.template shouldBe parse(template)
		}

		"emit the json based on template" in {
			val templateJson =
				"""{ "a": "ALPHA",
				  |  "b": "${beta}",
				  |  "c": { "d": 123,
				  |         "e": "${epsilon}"
				  |       },
				  |  "f": 1,
				  |  "g": 1.0
				  |}""".stripMargin
			val json = parse(s"""{ "type": "json", "params": { "template": ${templateJson} } }""")
			val actor = createJsonActor(json)
			val triggerJson = parse(
				"""{ "beta": "xyz",
				  |  "epsilon": 987
				  |}""".stripMargin)
			val expectedJson = parse(
				"""{ "a": "ALPHA",
				  |  "c": { "d": 123,
				  |         "e": 987
				  |       },
				  |  "f": 1,
				  |  "b": "xyz",
				  |  "g": 1.0
				  |}""".stripMargin)
			actor.simpleEmitTrigger(triggerJson.asInstanceOf[JObject]) shouldBe Some(expectedJson)
		}
	}
} 
Example 21
Source File: StatsActorSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors.transform

import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import io.coral.actors.CoralActorFactory
import io.coral.api.DefaultModule
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.concurrent.Await
import scala.concurrent.duration._

class StatsActorSpec(_system: ActorSystem)
	extends TestKit(_system)
	with ImplicitSender
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("StatsActorSpec"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	implicit val timeout = Timeout(100.millis)
	implicit val injector = new DefaultModule(system.settings.config)

	def createStatsActor: StatsActor = {
		val createJson = parse( """{ "type": "stats", "params": { "field": "val" } }""")
			.asInstanceOf[JObject]
		val props = CoralActorFactory.getProps(createJson).get
		val actorRef = TestActorRef[StatsActor](props)
		actorRef.underlyingActor
	}

	val expectedInitialState = Map(
		("count", render(0L)),
		("avg", render(JNull)),
		("sd", render(JNull)),
		("min", render(JNull)),
		("max", render(JNull))
	)

	"StatsActor" should {
		"have a field corresponding to the json definition" in {
			val actor = createStatsActor
			actor.field should be("val")
		}

		"supply it's state" in {
			val actor = createStatsActor
			actor.state should be(expectedInitialState)
		}

		"accept a value as trigger" in {
			val actor = createStatsActor
			val triggerJson = parse( """{ "bla": 1.0, "val": 2.7 }""").asInstanceOf[JObject]
			actor.trigger(triggerJson)
			actor.state should be(
				Map(
					("count", render(1L)),
					("avg", render(2.7)),
					("sd", render(0.0)),
					("min", render(2.7)),
					("max", render(2.7))
				))
		}

		"have timer reset statistics" in {
			val actor = createStatsActor
			val triggerJson = parse( """{ "val": 2.7 }""").asInstanceOf[JObject]
			actor.trigger(triggerJson)
			actor.state should be(
				Map(
					("count", render(1L)),
					("avg", render(2.7)),
					("sd", render(0.0)),
					("min", render(2.7)),
					("max", render(2.7))
				))
			val future = actor.timer
			val json = Await.result(future, timeout.duration).get
			json should be(JNothing)
			actor.state should be(expectedInitialState)
		}
	}
} 
Example 22
Source File: DefaultModuleSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.actors

import akka.actor.{ActorSystem, Props}
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import io.coral.api.DefaultModule
import org.json4s.JsonAST.JValue
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scaldi.Injectable._

class DefaultModuleSpec(_system: ActorSystem) extends TestKit(_system)
	with WordSpecLike
	with Matchers
	with BeforeAndAfterAll {
	def this() = this(ActorSystem("testSystem"))

	override def afterAll() {
		TestKit.shutdownActorSystem(system)
	}

	"The DefaultModule" should {
		"have the DefaultActorPropFactory when no configuration is made" in {
			implicit val module = new DefaultModule(ConfigFactory.empty)
			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 1)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
		}

		"have the DefaultActorPropFactory when a configuration is made" in {
			val config = """injections.actorPropFactories = ["io.coral.actors.AdditionalActorPropFactoryOne"]"""
			implicit val module = new DefaultModule(ConfigFactory.parseString(config))

			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 2)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
			assert(actorPropFactories(1).getClass == classOf[AdditionalActorPropFactoryOne])
		}

		"should have the ActorPropFactories in the defined order" in {
			val config =
				"""injections.actorPropFactories = ["io.coral.actors.AdditionalActorPropFactoryOne",
				  |"io.coral.actors.AdditionalActorPropFactoryTwo"]""".stripMargin
			implicit val module = new DefaultModule(ConfigFactory.parseString(config))

			val actorPropFactories = inject[List[ActorPropFactory]]

			assert(actorPropFactories.size == 3)
			assert(actorPropFactories(0).getClass == classOf[DefaultActorPropFactory])
			assert(actorPropFactories(1).getClass == classOf[AdditionalActorPropFactoryOne])
			assert(actorPropFactories(2).getClass == classOf[AdditionalActorPropFactoryTwo])
		}
	}
}

class AdditionalActorPropFactoryOne extends ActorPropFactory {
	override def getProps(actorType: String, params: JValue): Option[Props] = None
}

class AdditionalActorPropFactoryTwo extends ActorPropFactory {
	override def getProps(actorType: String, params: JValue): Option[Props] = None
} 
Example 23
Source File: KafkaJsonProducerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import io.coral.lib.KafkaJsonProducer.KafkaEncoder
import kafka.utils.VerifiableProperties
import org.json4s.JsonAST.{JObject, JValue}
import org.scalatest.{Matchers, WordSpec}
import org.json4s.jackson.JsonMethods._
import kafka.producer.{ProducerConfig, KeyedMessage, Producer}
import org.mockito.{Mockito, ArgumentCaptor}
import org.mockito.Mockito._
import scala.collection.mutable

class KafkaJsonProducerSpec extends WordSpec with Matchers {
	"A KafkaJsonProducer" should {
		"create a KafkaJsonProducer with the JsonEncoder" in {
			val producer = KafkaJsonProducer()
			assert(producer.getClass == classOf[KafkaJsonProducer[JsonEncoder]])
		}

		"create a KafkaJsonProducer with the specified Encoder" in {
			val producer = KafkaJsonProducer(classOf[MyEncoder])
			assert(producer.getClass == classOf[KafkaJsonProducer[MyEncoder]])
		}

		"create a sender" in {
			val producer = new MyKafkaJsonProducer
			producer.createSender("topic", new Properties)
			val serializer = producer.receivedProperties.get("serializer.class")
			assert(serializer == classOf[MyEncoder].getName)
		}
	}

	"A KafkaSender" should {
		"send the JSON provided without a key to Kafka" in {
			val messageJson = """{"key1": "value1", "key2": "value2"}"""

			val keyedMessage = sendMessage(None, messageJson)

			assert(keyedMessage.topic == "test")
			assert(keyedMessage.hasKey == false)
			assert(keyedMessage.message == parse(messageJson))
		}

		"send the JSON provided with a key to Kafka" in {
			val messageJson = """{"key3": "value3", "key4": "value4"}"""

			val keyedMessage = sendMessage(Some("key"), messageJson)

			assert(keyedMessage.key == "key")
			assert(keyedMessage.topic == "test")
			assert(keyedMessage.message == parse(messageJson))
		}
	}

	"A JsonEncoder" should {
		"encode the provided json" in {
			val json = """{"key1": "value1"}"""
			val encoder = new JsonEncoder(new VerifiableProperties)
			val result = encoder.toBytes(parse(json))
			assert(parse(new String(result, "UTF-8")) == parse(json))
		}
	}

	private def sendMessage(key: Option[String], messageJson: String): KeyedMessage[String, JValue] = {
		val producer = Mockito.mock(classOf[Producer[String, JValue]])
		val sender = new KafkaSender("test", producer)
		sender.send(key, parse(messageJson).asInstanceOf[JObject])

		val argumentCaptor = ArgumentCaptor.forClass(classOf[KeyedMessage[String, JValue]])
		verify(producer).send(argumentCaptor.capture())

		val keyedMessages = argumentCaptor.getAllValues
		assert(keyedMessages.size == 1)

		// The following construction is necessary because capturing of parameters
		// with Mockito, Scala type interference, and multiple arguments
		// don't work together without explicit casts.
		keyedMessages.get(0).asInstanceOf[mutable.WrappedArray.ofRef[KeyedMessage[String, JValue]]](0)
	}
}

class MyEncoder(verifiableProperties: VerifiableProperties) extends KafkaEncoder {
	override def toBytes(value: JValue): Array[Byte] = {
		Array()
	}
}

class MyKafkaJsonProducer extends KafkaJsonProducer(classOf[MyEncoder]) {
	var receivedProperties: Properties = _

	override def createProducer(props: Properties): Producer[String, JValue] = {
		receivedProperties = props
		Mockito.mock(classOf[Producer[String, JValue]])
	}
} 
Example 24
Source File: KafkaJsonConsumerSpec.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.lib

import java.util.Properties

import kafka.consumer._
import kafka.message.MessageAndMetadata
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}

class KafkaJsonConsumerSpec extends WordSpec with Matchers with MockitoSugar {
	"KafkaJsonConsumer" should {
		"provide a stream" in {
			val consumer = KafkaJsonConsumer()
			intercept[IllegalArgumentException] {
        		consumer.stream("abc", new Properties())
      		}
		}
	}

	"KafkaJsonStream" should {
		val fakeConnection = mock[ConsumerConnector]
		doNothing.when(fakeConnection).commitOffsets

		val fakeMessage = mock[MessageAndMetadata[Array[Byte], JValue]]
		when(fakeMessage.key()).thenReturn("TestKey".getBytes)
		when(fakeMessage.message()).thenReturn(parse( """{ "json": "test" }"""))

		val fakeIterator = mock[ConsumerIterator[Array[Byte], JValue]]
		when(fakeIterator.hasNext()).thenReturn(true).thenReturn(false)
		when(fakeIterator.next()).thenReturn(fakeMessage)

		val fakeStream = mock[KafkaStream[Array[Byte], JValue]]
		when(fakeStream.iterator()).thenReturn(fakeIterator)

		"provide a next value" in {
			val kjs = new KafkaJsonStream(fakeConnection, fakeStream)
			kjs.hasNextInTime shouldBe true
			kjs.next shouldBe parse( """{ "json": "test" }""")
		}
	}

	"JsonDecoder" should {
		"convert bytes to Json object" in {
			val jsonString = """{ "hello": "json" }"""
			val bytes = jsonString.getBytes
			val jsonValue = parse(jsonString)
			JsonDecoder.fromBytes(bytes) shouldBe jsonValue
		}

		"return JNothing for invalid JSon" in {
			val jsonString = """hello"""
			val bytes = jsonString.getBytes
			JsonDecoder.fromBytes(bytes) shouldBe JNothing
		}
	}
} 
Example 25
Source File: Library.scala    From scala-clippy   with Apache License 2.0 5 votes vote down vote up
package com.softwaremill.clippy

import org.json4s.JsonAST.{JField, JObject, JString, JValue}

case class Library(groupId: String, artifactId: String, version: String) {
  def toJson: JValue = JObject(
    "groupId"    -> JString(groupId),
    "artifactId" -> JString(artifactId),
    "version"    -> JString(version)
  )

  override def toString = s"$groupId:$artifactId:$version"
}

object Library {
  def fromJson(jvalue: JValue): Option[Library] =
    (for {
      JObject(fields)                           <- jvalue
      JField("groupId", JString(groupId))       <- fields
      JField("artifactId", JString(artifactId)) <- fields
      JField("version", JString(version))       <- fields
    } yield Library(groupId, artifactId, version)).headOption
} 
Example 26
Source File: AnyFormat.scala    From scalapb-json4s   with Apache License 2.0 5 votes vote down vote up
package scalapb.json4s

import com.google.protobuf.any.{Any => PBAny}
import org.json4s.JsonAST.{JNothing, JObject, JString, JValue}

import scala.language.existentials

object AnyFormat {
  val anyWriter: (Printer, PBAny) => JValue = {
    case (printer, any) =>
      // Find the companion so it can be used to JSON-serialize the message. Perhaps this can be circumvented by
      // including the original GeneratedMessage with the Any (at least in memory).
      val cmp = printer.typeRegistry
        .findType(any.typeUrl)
        .getOrElse(
          throw new IllegalStateException(
            s"Unknown type ${any.typeUrl} in Any.  Add a TypeRegistry that supports this type to the Printer."
          )
        )

      // Unpack the message...
      val message = any.unpack(cmp)

      // ... and add the @type marker to the resulting JSON
      printer.toJson(message) match {
        case JObject(fields) =>
          JObject(("@type" -> JString(any.typeUrl)) +: fields)
        case value =>
          // Safety net, this shouldn't happen
          throw new IllegalStateException(
            s"Message of type ${any.typeUrl} emitted non-object JSON: $value"
          )
      }
  }

  val anyParser: (Parser, JValue) => PBAny = {
    case (parser, obj @ JObject(fields)) =>
      obj \ "@type" match {
        case JString(typeUrl) =>
          val cmp = parser.typeRegistry
            .findType(typeUrl)
            .getOrElse(
              throw new JsonFormatException(
                s"Unknown type ${typeUrl} in Any.  Add a TypeRegistry that supports this type to the Parser."
              )
            )
          val message = parser.fromJson(obj, true)(cmp)
          PBAny(typeUrl = typeUrl, value = message.toByteString)

        case JNothing =>
          throw new JsonFormatException(s"Missing type url when parsing $obj")

        case unknown =>
          throw new JsonFormatException(
            s"Expected string @type field, got $unknown"
          )
      }

    case (_, unknown) =>
      throw new JsonFormatException(s"Expected an object, got $unknown")
  }
} 
Example 27
Source File: ReceiptRenderer.scala    From apple-of-my-iap   with MIT License 5 votes vote down vote up
package com.meetup.iap.receipt

import java.text.SimpleDateFormat

import com.meetup.iap.AppleApi
import AppleApi.{ReceiptResponse, ReceiptInfo}

import java.util.{Date, TimeZone}

import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.JsonAST.JValue
import org.slf4j.LoggerFactory

object ReceiptRenderer {
  val log = LoggerFactory.getLogger(ReceiptRenderer.getClass)

  private def appleDateFormat(date: Date): String = {
    val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss 'Etc/GMT'")
    sdf.setTimeZone(TimeZone.getTimeZone("UTC"))
    sdf.format(date)
  }

  def apply(response: ReceiptResponse): String = {
    pretty(render(
      ("status" -> response.statusCode) ~
        ("latest_receipt_info" -> response.latestReceiptInfo.reverse.map(renderReceipt)) ~
        ("latest_receipt" -> response.latestReceipt)))
  }

  private def renderReceipt(receiptInfo: ReceiptInfo): JValue = {
    val origPurchaseDate = receiptInfo.originalPurchaseDate
    val origPurchaseDateStr = appleDateFormat(origPurchaseDate)
    val origPurchaseDateMs = origPurchaseDate.getTime

    val purchaseDate = receiptInfo.purchaseDate
    val purchaseDateStr = appleDateFormat(purchaseDate)
    val purchaseDateMs = purchaseDate.getTime

    val expiresDate = receiptInfo.expiresDate
    val expiresDateStr = appleDateFormat(expiresDate)
    val expiresDateMs = expiresDate.getTime

    val cancellationDate = receiptInfo.cancellationDate.map { date =>
      appleDateFormat(date)
    }
    ("quantity" -> "1") ~
      ("product_id" -> receiptInfo.productId) ~
      ("transaction_id" -> receiptInfo.transactionId) ~
      ("original_transaction_id" -> receiptInfo.originalTransactionId) ~
      ("purchase_date" -> purchaseDateStr) ~
      ("purchase_date_ms" -> purchaseDateMs.toString) ~
      ("original_purchase_date" -> origPurchaseDateStr) ~
      ("original_purchase_date_ms" -> origPurchaseDateMs.toString) ~
      ("expires_date" -> expiresDateStr) ~
      ("expires_date_ms" -> expiresDateMs.toString) ~
      ("is_trial_period" -> receiptInfo.isTrialPeriod.toString) ~ //We mimic Apple's weird json here by converting the boolean type to a string
      ("is_in_intro_offer_period" -> receiptInfo.isInIntroOfferPeriod.map(_.toString)) ~
      ("cancellation_date" -> cancellationDate)
  }
} 
Example 28
Source File: UnifiedSparkListener.scala    From spark-monitoring   with MIT License 5 votes vote down vote up
package org.apache.spark.listeners

import java.time.Instant

import org.apache.spark.{SparkConf, SparkException, SparkInformation}
import org.apache.spark.internal.Logging
import org.apache.spark.listeners.sink.SparkListenerSink
import org.apache.spark.scheduler._
import org.apache.spark.sql.streaming.StreamingQueryListener
import org.apache.spark.util.JsonProtocol
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods.{compact, render}

import scala.util.control.NonFatal


class UnifiedSparkListener(override val conf: SparkConf)
  extends UnifiedSparkListenerHandler
    with Logging
    with SparkListenerHandlers
    with StreamingListenerHandlers
    with StreamingQueryListenerHandlers {

  private val listenerSink = this.createSink(this.conf)

  override def onOtherEvent(event: SparkListenerEvent): Unit = {
    // All events in Spark that are not specific to SparkListener go through
    // this method.  The typed ListenerBus implementations intercept and forward to
    // their "local" listeners.
    // We will just handle everything here so we only have to have one listener.
    // The advantage is that this can be registered in extraListeners, so no
    // code change is required to add listener support.
    event match {
      // We will use the ClassTag for the private wrapper class to match
      case this.streamingListenerEventClassTag(e) =>
        this.onStreamingListenerEvent(e)
      case streamingQueryListenerEvent: StreamingQueryListener.Event =>
        this.onStreamingQueryListenerEvent(streamingQueryListenerEvent)
      case sparkListenerEvent: SparkListenerEvent => if (sparkListenerEvent.logEvent) {
        logSparkListenerEvent(sparkListenerEvent)
      }
    }
  }

  private def createSink(conf: SparkConf): SparkListenerSink = {
    val sink = conf.getOption("spark.unifiedListener.sink") match {
      case Some(listenerSinkClassName) => listenerSinkClassName
      case None => throw new SparkException("spark.unifiedListener.sink setting is required")
    }
    logInfo(s"Creating listener sink: ${sink}")
    org.apache.spark.util.Utils.loadExtensions(
      classOf[SparkListenerSink],
      Seq(sink),
      conf).head
  }

  protected def logSparkListenerEvent(
                                       event: SparkListenerEvent,
                                       getTimestamp: () => Instant =
                                       () => Instant.now()): Unit = {
    val json = try {
      // Add a well-known time field.
      Some(
        JsonProtocol.sparkEventToJson(event)
          .merge(render(
            SparkInformation.get() + ("SparkEventTime" -> getTimestamp().toString)
          ))
      )
    } catch {
      case NonFatal(e) =>
        logError(s"Error serializing SparkListenerEvent to JSON: $event", e)
        None
    }

    sendToSink(json)
  }

  private[spark] def sendToSink(json: Option[JValue]): Unit = {
    try {
      json match {
        case Some(j) => {
          logDebug(s"Sending event to listener sink: ${compact(j)}")
          this.listenerSink.logEvent(json)
        }
        case None => {
          logWarning("json value was None")
        }
      }
    } catch {
      case NonFatal(e) =>
        logError(s"Error sending to listener sink: $e")
    }
  }
} 
Example 29
Source File: JsonRpcError.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.jsonrpc

import org.json4s.JsonAST.JValue

case class JsonRpcError(code: Int, message: String, data: Option[JValue])

// scalastyle:off magic.number
// scalastyle:off public.methods.have.type
object JsonRpcErrors {
  val ParseError = JsonRpcError(-32700, "An error occurred on the server while parsing the JSON text", None)
  val InvalidRequest = JsonRpcError(-32600, "The JSON sent is not a valid Request object", None)
  val MethodNotFound = JsonRpcError(-32601, "The method does not exist / is not available", None)
  def InvalidParams(msg: String = "Invalid method parameters") = JsonRpcError(-32602, msg, None)
  val InternalError = JsonRpcError(-32603, "Internal JSON-RPC error", None)
  def LogicError(msg: String) = JsonRpcError(-32000, msg, None)
  val AccountLocked = LogicError("account is locked or unknown")
} 
Example 30
Source File: EncryptedKeyJsonCodec.scala    From mantis   with Apache License 2.0 5 votes vote down vote up
package io.iohk.ethereum.keystore

import java.util.UUID

import akka.util.ByteString
import io.iohk.ethereum.domain.Address
import io.iohk.ethereum.keystore.EncryptedKey._
import org.json4s.JsonAST.{JObject, JString, JValue}
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.{CustomSerializer, DefaultFormats, Extraction, JField}
import org.spongycastle.util.encoders.Hex

import scala.util.Try

object EncryptedKeyJsonCodec {

  private val byteStringSerializer = new CustomSerializer[ByteString](_ => (
    { case JString(s) => ByteString(Hex.decode(s)) },
    { case bs: ByteString => JString(Hex.toHexString(bs.toArray)) }
  ))

  private implicit val formats = DefaultFormats + byteStringSerializer

  private def asHex(bs: ByteString): String =
    Hex.toHexString(bs.toArray)

  def toJson(encKey: EncryptedKey): String = {
    import encKey._
    import cryptoSpec._

    val json =
      ("id" -> id.toString) ~
      ("address" -> asHex(address.bytes)) ~
      ("version" -> version) ~
      ("crypto" -> (
        ("cipher" -> cipher) ~
        ("ciphertext" -> asHex(ciphertext)) ~
        ("cipherparams" -> ("iv" -> asHex(iv))) ~
        encodeKdf(kdfParams) ~
        ("mac" -> asHex(mac))
      ))

    pretty(render(json))
  }

  def fromJson(jsonStr: String): Either[String, EncryptedKey] = Try {
    val json = parse(jsonStr).transformField { case JField(k, v) => JField(k.toLowerCase, v) }

    val uuid = UUID.fromString((json \ "id").extract[String])
    val address = Address((json \ "address").extract[String])
    val version = (json \ "version").extract[Int]

    val crypto = json \ "crypto"
    val cipher = (crypto \ "cipher").extract[String]
    val ciphertext = (crypto \ "ciphertext").extract[ByteString]
    val iv = (crypto \ "cipherparams" \ "iv").extract[ByteString]
    val mac = (crypto \ "mac").extract[ByteString]

    val kdfParams = extractKdf(crypto)
    val cryptoSpec = CryptoSpec(cipher, ciphertext, iv, kdfParams, mac)
    EncryptedKey(uuid, address, cryptoSpec, version)

  }.fold(ex => Left(ex.toString), encKey => Right(encKey))

  private def encodeKdf(kdfParams: KdfParams): JObject =
    kdfParams match {
      case ScryptParams(salt, n, r, p, dklen) =>
        ("kdf" -> Scrypt) ~
        ("kdfparams" -> Extraction.decompose(kdfParams))

      case Pbkdf2Params(salt, prf, c, dklen) =>
        ("kdf" -> Pbkdf2) ~
        ("kdfparams" -> Extraction.decompose(kdfParams))
    }

  private def extractKdf(crypto: JValue): KdfParams = {
    val kdf = (crypto \ "kdf").extract[String]
    kdf.toLowerCase match {
      case Scrypt =>
        (crypto \ "kdfparams").extract[ScryptParams]

      case Pbkdf2 =>
        (crypto \ "kdfparams").extract[Pbkdf2Params]
    }
  }

} 
Example 31
Source File: OrderSerializerTest.scala    From akka-serialization-test   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.serializer.json

import akka.serialization.Serializer
import com.github.dnvriend.TestSpec
import com.github.dnvriend.generator.OrderGenerator
import org.json4s.JsonAST.JValue
import org.json4s.native.JsonMethods._

class OrderSerializerTest extends TestSpec {

  it should "serialize random orders to JSON" in {
    forAll(OrderGenerator.genOrder) { order ⇒
      val serializer: Serializer = serialization.findSerializerFor(order)
      val binary = serializer.toBinary(order)
      val jsonAst = parse(new String(binary))
      jsonAst shouldBe a[JValue]
    }
  }

  it should "serialize random orders from AnyRef to JSON and back to AnyRef" in {
    forAll(OrderGenerator.genOrder) { order ⇒
      val serializer: Serializer = serialization.findSerializerFor(order)
      val binary = serializer.toBinary(order)
      parse(new String(binary)) shouldBe a[JValue]
      serializer.fromBinary(binary) shouldBe order
    }
  }
} 
Example 32
Source File: StreamingQueryStatus.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.streaming

import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.annotation.InterfaceStability


  def prettyJson: String = pretty(render(jsonValue))

  override def toString: String = prettyJson

  private[sql] def copy(
      message: String = this.message,
      isDataAvailable: Boolean = this.isDataAvailable,
      isTriggerActive: Boolean = this.isTriggerActive): StreamingQueryStatus = {
    new StreamingQueryStatus(
      message = message,
      isDataAvailable = isDataAvailable,
      isTriggerActive = isTriggerActive)
  }

  private[sql] def jsonValue: JValue = {
    ("message" -> JString(message.toString)) ~
    ("isDataAvailable" -> JBool(isDataAvailable)) ~
    ("isTriggerActive" -> JBool(isTriggerActive))
  }
} 
Example 33
Source File: DescTableReponse.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JArray, JObject, JValue}
import org.json4s.JsonDSL._
import scala.collection.{immutable, mutable}

case class DescTableRequest(dataSource: String) {

  def toJson: JValue = {
    JObject("queryType" -> "segmentMetadata", "dataSource" -> dataSource)
  }
}

case class DescTableResponse(data: Seq[(String, Any)])

object DescTableResponse {
  def parse(js: JValue): DescTableResponse = {
    var arr = new mutable.HashMap[String, Any]
    js match {
      case JArray(results) =>
        val columns = (results.last \ "columns").asInstanceOf[JObject].values
        columns.foreach { col =>
          arr += (col._1 -> col._2
            .asInstanceOf[immutable.HashMap[String, String]]
            .get("type")
            .get)
        }
        DescTableResponse(arr.toSeq.sortBy(_._1))
      case err @ _ =>
        throw new IllegalArgumentException("Invalid time series response: " + err)
    }
  }
} 
Example 34
Source File: DataSourceScannerResponse.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JArray, JValue}

case class DataSourceScannerResponse(data: Seq[String])

object DataSourceScannerResponse {
  def parse(js: JValue): DataSourceScannerResponse = {
    js match {
      case JArray(datasources) =>
        val res = datasources.map(ds => ds.values.toString).sortWith(_ < _)
        DataSourceScannerResponse(res)
      case err @ _ =>
        throw new IllegalArgumentException("Invalid datasource response: " + err)
    }
  }

} 
Example 35
Source File: PostAggregation.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JObject, JValue}
import org.json4s.JsonDSL._

trait PostAggregationFieldSpec extends Expression {
  private def arith(rhs: PostAggregationFieldSpec, fn: String): PostAggregation =
    ArithmeticPostAggregation("n/a", fn, Seq(this, rhs))

  def *(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "*")

  def /(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "/")

  def +(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "+")

  def -(rhs: PostAggregationFieldSpec): PostAggregation = arith(rhs, "-")
}

trait PostAggregation extends PostAggregationFieldSpec {
  def as(outputName: String): PostAggregation
}

object PostAggregation {
  def constant(value: Double): ConstantPostAggregation =
    ConstantPostAggregation("constant", value)

  case class FieldAccess(fieldName: String) extends PostAggregationFieldSpec {
    def toJson: JValue = JObject("type" -> "fieldAccess", "fieldName" -> fieldName)
  }

}

case class ConstantPostAggregation(outputName: String, value: Double) extends PostAggregation {
  def toJson: JValue = JObject("type" -> "constant", "name" -> outputName, "value" -> value)

  def as(outputName: String): PostAggregation = copy(outputName = outputName)
}

case class ArithmeticPostAggregation(
    outputName: String,
    fn: String,
    fields: Seq[PostAggregationFieldSpec])
  extends PostAggregation {
  def toJson: JValue =
    JObject(
      "type" -> "arithmetic",
      "name" -> outputName,
      "fn" -> fn,
      "fields" -> fields.map(_.toJson))

  def as(outputName: String): ArithmeticPostAggregation = copy(outputName = outputName)
} 
Example 36
Source File: QueryFilter.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.druid

import org.json4s.JsonAST.{JArray, JNull, JObject, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

sealed trait QueryFilter extends Expression {
  def and(other: QueryFilter): QueryFilter = And(Seq(this, other))

  def or(other: QueryFilter): QueryFilter = Or(Seq(this, other))
}

case class And(filters: Seq[Expression]) extends QueryFilter {

  override def and(other: QueryFilter): QueryFilter = copy(other +: filters)

  def toJson: JValue = JObject("type" -> "and", "fields" -> JArray(filters.toList.map(_.toJson)))
}

case class Or(filters: Seq[Expression]) extends QueryFilter {

  override def or(other: QueryFilter): QueryFilter = copy(other +: filters)

  def toJson: JValue = JObject("type" -> "or", "fields" -> JArray(filters.toList.map(_.toJson)))
}

case class Not(filter: Expression) extends QueryFilter {

  def toJson: JValue = JObject("type" -> "not", "field" -> filter.toJson)
}

case class IsNotNull(attributeNotNull: String) extends QueryFilter {

//  {
//    "field": {
//      "type": "selector",
//      "dimension": "added",
//      "value": ""
//    },
//    "type": "not"
//  }
  def toJson: JValue =
    JObject(
      "field" -> JObject("type" -> "selector", "dimension" -> attributeNotNull, "value" -> ""),
      "type" -> "not")
}

case class ExprQueryFilter(typeName: String, dimension: String, value: String)
  extends QueryFilter {
  def toJson: JValue = JObject("type" -> typeName, "dimension" -> dimension, "value" -> value)
}

case class SelectorQueryFilter(dimension: String, value: String) extends QueryFilter {
  def toJson: JValue = JObject("type" -> "selector", "dimension" -> dimension, "value" -> value)
}

case class RegexQueryFilter(dimension: String, pattern: String) extends QueryFilter {
  def toJson: JValue = JObject("type" -> "regex", "dimension" -> dimension, "pattern" -> pattern)
}

case class AllQueryFilter(condition: java.util.HashMap[String, Any]) extends QueryFilter {
  //  val json = JSONObject.fromObject(condition.get("filter")).toString
  //  def toJson: JValue = parse(json)
  def toJson: JValue = fromJsonNode(mapper.valueToTree(condition.get("filter")))
}

object QueryFilter {

  def custom(typeName: String, dimension: String, value: String): ExprQueryFilter =
    ExprQueryFilter(typeName, dimension, value)

  def where(dimension: String, value: String): SelectorQueryFilter =
    SelectorQueryFilter(dimension, value)

  def regex(dimension: String, pattern: String): RegexQueryFilter =
    RegexQueryFilter(dimension, pattern)

  val All = new QueryFilter {
    def toJson: JValue = JNull
  }
} 
Example 37
Source File: ColumnAnnotation.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core

import org.json4s.JsonAST.{JNull, JObject, JValue}
import org.json4s.scalaz.JsonScalaz._


sealed trait ColumnAnnotation {
  def asJSON: JObject = makeObj(
    List(
      ("annotation" -> toJSON(this.getClass.getSimpleName))
    )
  )
}

sealed trait SingletonColumn

trait ClassNameHashCode {
  override final val hashCode: Int = this.getClass.toString.hashCode
  override def equals(other: Any) : Boolean =  this.hashCode == other.hashCode()
}

sealed trait ColumnAnnotationInstance extends ColumnAnnotation with ClassNameHashCode {
  def instance: ColumnAnnotation
}

case class HiveShardingExpression(expression: HiveDerivedExpression) extends ColumnAnnotationInstance with WithHiveEngine {
  def instance: ColumnAnnotation = HiveShardingExpression.instance

  val jUtils = JsonUtils

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("expression" -> jUtils.asJSON(expression))
      )
    )
}
case object HiveShardingExpression {
  val instance: ColumnAnnotation = HiveShardingExpression(null)
}

case class PrestoShardingExpression(expression: PrestoDerivedExpression) extends ColumnAnnotationInstance with WithPrestoEngine {
  def instance: ColumnAnnotation = PrestoShardingExpression.instance

  val jUtils = JsonUtils

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("expression" -> jUtils.asJSON(expression))
      )
    )
}
case object PrestoShardingExpression {
  val instance: ColumnAnnotation = PrestoShardingExpression(null)
}

case object PrimaryKey extends ColumnAnnotation
case object EscapingRequired extends ColumnAnnotation
case object HiveSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithHiveEngine
case object OracleSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithOracleEngine
case object PostgresSnapshotTimestamp extends ColumnAnnotation with SingletonColumn with WithPostgresEngine
case object IsAggregation extends ColumnAnnotation
case object CaseInsensitive extends ColumnAnnotation
case class ForeignKey(publicDimName: String) extends ColumnAnnotationInstance {
  def instance: ColumnAnnotation = ForeignKey.instance

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("publicDimName" -> toJSON(publicDimName))
      )
    )
}
case object ForeignKey {
  val instance: ColumnAnnotation = ForeignKey("instance")
}
case class DayColumn(fmt: String) extends ColumnAnnotationInstance {
  def instance: ColumnAnnotation = DayColumn.instance

  override def asJSON(): JObject =
    makeObj(
      List(
        ("annotation" -> toJSON(this.getClass.getSimpleName))
        ,("fmt" -> toJSON(fmt))
      )
    )
}
case object DayColumn {
  val instance: ColumnAnnotation = DayColumn("instance")
} 
Example 38
Source File: JsonUtils.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.core

import org.json4s._
import org.json4s.scalaz.JsonScalaz
import org.json4s.scalaz.JsonScalaz._
import _root_.scalaz.{Scalaz, syntax}
import org.json4s.JsonAST.{JArray, JValue}
import syntax.validation._



  implicit def mapJSONW: JSONW[Map[String, Set[String]]] = new JSONW[Map[String, Set[String]]] {
    def write(values: Map[String, Set[String]]) = makeObj(values.map(kv => kv._1 -> toJSON(kv._2.toList)).toList)
  }

  implicit def bdJSONW: JSONW[BigDecimal] = new JSONW[BigDecimal] {
    def write(value: BigDecimal): JValue = makeObj(List(("value" -> toJSON(value.doubleValue()))))
  }

  implicit def toStringJSONW[A]: JSONW[A] = new JSONW[A] {
    def write(value: A): JValue = value match {
      case a: ColumnAnnotation =>
        a.asJSON
      case null =>
        JNull
      case _ =>
        toJSON(value.toString)
    }
  }

  implicit def setJSONW[A]: JSONW[Set[A]] = new JSONW[Set[A]] {
    def write(values: Set[A]) = JArray(values.map(x => toJSON(x)).toList)
  }

  implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] {
    def write(values: List[A]) = JArray(values.map(x => implicitly[JSONW[A]].write(x)))
  }

  def asJSON[A](a: A): JValue = {
    toJSON(a)
  }

} 
Example 39
Source File: Point.scala    From magellan   with Apache License 2.0 5 votes vote down vote up
package magellan

import com.fasterxml.jackson.annotation.{JsonIgnore, JsonProperty}
import org.apache.spark.sql.types._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._


  override def transform(fn: (Point) => Point): Point = fn(this)

  def withinCircle(origin: Point, radius: Double): Boolean = {
    val sqrdL2Norm = Math.pow((origin.getX() - getX()), 2) + Math.pow((origin.getY() - getY()), 2)
    sqrdL2Norm <= Math.pow(radius, 2)
  }

  @JsonProperty
  override def getType(): Int = 1

  override def jsonValue: JValue =
    ("type" -> "udt") ~
      ("class" -> this.getClass.getName) ~
      ("pyClass" -> "magellan.types.PointUDT") ~
      ("x" -> x) ~
      ("y" -> y)

  @JsonProperty
  override def boundingBox = BoundingBox(x, y, x, y)

  @JsonIgnore
  override def isEmpty(): Boolean = true

}

object Point {

  def apply(x: Double, y: Double) = {
    val p = new Point()
    p.setX(x)
    p.setY(y)
    p
  }
} 
Example 40
Source File: LookupTable.scala    From jigg   with Apache License 2.0 5 votes vote down vote up
package jigg.util



import java.io.Reader

import breeze.linalg.DenseMatrix
import org.json4s.{DefaultFormats, _}
import org.json4s.jackson.JsonMethods
import org.json4s.JsonAST.JValue

class LookupTable(rawTable: JValue) {

  implicit private val formats = DefaultFormats
  private val tables = rawTable.extract[Map[String, Map[String, Map[String, String]]]]

  private val key2id = tables("_lookup")("_key2id")
  private val id2key = tables("_lookup")("_id2key")

  // For raw text
  def encodeCharacter(str: String): DenseMatrix[Float] = {
    val strArray = str.map{x =>
      // Note: For skipping unknown character, this encoder returns dummy id.
      key2id.getOrElse(x.toString, "3").toFloat
    }.toArray
    new DenseMatrix[Float](1, str.length, strArray)
  }

  // For list of words
  def encodeWords(words: Array[String]): DenseMatrix[Float] = {
    val wordsArray = words.map{x =>
      // Note: For skipping unknown words, this encoder returns dummy id.
      key2id.getOrElse(x.toString, "3").toFloat
    }
    new DenseMatrix[Float](1, words.length, wordsArray)
  }

  def decode(data: DenseMatrix[Float]): Array[String] =
    data.map{x => id2key.getOrElse(x.toInt.toString, "NONE")}.toArray

  def getId(key: String): Int = key2id.getOrElse(key, "0").toInt
  def getId(key: Char): Int = getId(key.toString)

  def getKey(id: Int): String = id2key.getOrElse(id.toString, "UNKNOWN")
}


object LookupTable {

  // Load from a path on the file system
  def fromFile(path: String) = mkTable(IOUtil.openIn(path))

  // Load from class loader
  def fromResource(path: String) = mkTable(IOUtil.openResourceAsReader(path))

  private def mkTable(input: Reader) = {
    val j = try { JsonMethods.parse(input) } finally { input.close }
    new LookupTable(j)
  }
} 
Example 41
Source File: SourceStatus.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.streaming

import java.{util => ju}

import scala.collection.JavaConverters._

import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._

import org.apache.spark.annotation.Experimental
import org.apache.spark.sql.streaming.StreamingQueryStatus.indent
import org.apache.spark.util.JsonProtocol


private[sql] object SourceStatus {
  def apply(
      desc: String,
      offsetDesc: String,
      inputRate: Double,
      processingRate: Double,
      triggerDetails: Map[String, String]): SourceStatus = {
    new SourceStatus(desc, offsetDesc, inputRate, processingRate, triggerDetails.asJava)
  }
} 
Example 42
Source File: StreamingQueryStatus.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.streaming

import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._


  def prettyJson: String = pretty(render(jsonValue))

  override def toString: String = prettyJson

  private[sql] def copy(
      message: String = this.message,
      isDataAvailable: Boolean = this.isDataAvailable,
      isTriggerActive: Boolean = this.isTriggerActive): StreamingQueryStatus = {
    new StreamingQueryStatus(
      message = message,
      isDataAvailable = isDataAvailable,
      isTriggerActive = isTriggerActive)
  }

  private[sql] def jsonValue: JValue = {
    ("message" -> JString(message.toString)) ~
    ("isDataAvailable" -> JBool(isDataAvailable)) ~
    ("isTriggerActive" -> JBool(isTriggerActive))
  }
} 
Example 43
Source File: EarnDotComFeeProvider.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.fee

import com.softwaremill.sttp._
import com.softwaremill.sttp.json4s._
import org.json4s.DefaultFormats
import org.json4s.JsonAST.{JArray, JInt, JValue}
import org.json4s.jackson.Serialization

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}


class EarnDotComFeeProvider(readTimeOut: Duration)(implicit http: SttpBackend[Future, Nothing], ec: ExecutionContext) extends FeeProvider {

  import EarnDotComFeeProvider._

  implicit val formats = DefaultFormats
  implicit val serialization = Serialization

  val uri = uri"https://bitcoinfees.earn.com/api/v1/fees/list"

  override def getFeerates: Future[FeeratesPerKB] =
    for {
      json <- sttp.readTimeout(readTimeOut).get(uri)
        .response(asJson[JValue])
        .send()
      feeRanges = parseFeeRanges(json.unsafeBody)
    } yield extractFeerates(feeRanges)

}

object EarnDotComFeeProvider {

  case class FeeRange(minFee: Long, maxFee: Long, memCount: Long, minDelay: Long, maxDelay: Long)

  def parseFeeRanges(json: JValue): Seq[FeeRange] = {
    val JArray(items) = json \ "fees"
    items.map(item => {
      val JInt(minFee) = item \ "minFee"
      val JInt(maxFee) = item \ "maxFee"
      val JInt(memCount) = item \ "memCount"
      val JInt(minDelay) = item \ "minDelay"
      val JInt(maxDelay) = item \ "maxDelay"
      // earn.com returns fees in Satoshi/byte and we want Satoshi/KiloByte
      FeeRange(minFee = 1000 * minFee.toLong, maxFee = 1000 * maxFee.toLong, memCount = memCount.toLong, minDelay = minDelay.toLong, maxDelay = maxDelay.toLong)
    })
  }

  def extractFeerate(feeRanges: Seq[FeeRange], maxBlockDelay: Int): Long = {
    // first we keep only fee ranges with a max block delay below the limit
    val belowLimit = feeRanges.filter(_.maxDelay <= maxBlockDelay)
    // out of all the remaining fee ranges, we select the one with the minimum higher bound and make sure it is > 0
    Math.max(belowLimit.minBy(_.maxFee).maxFee, 1)
  }

  def extractFeerates(feeRanges: Seq[FeeRange]): FeeratesPerKB =
    FeeratesPerKB(
      block_1 = extractFeerate(feeRanges, 1),
      blocks_2 = extractFeerate(feeRanges, 2),
      blocks_6 = extractFeerate(feeRanges, 6),
      blocks_12 = extractFeerate(feeRanges, 12),
      blocks_36 = extractFeerate(feeRanges, 36),
      blocks_72 = extractFeerate(feeRanges, 72),
      blocks_144 = extractFeerate(feeRanges, 144))

} 
Example 44
Source File: BitgoFeeProvider.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.fee

import com.softwaremill.sttp._
import com.softwaremill.sttp.json4s._
import fr.acinq.bitcoin.{Block, ByteVector32}
import org.json4s.DefaultFormats
import org.json4s.JsonAST.{JInt, JValue}
import org.json4s.jackson.Serialization

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

class BitgoFeeProvider(chainHash: ByteVector32, readTimeOut: Duration)(implicit http: SttpBackend[Future, Nothing], ec: ExecutionContext) extends FeeProvider {

  import BitgoFeeProvider._

  implicit val formats = DefaultFormats
  implicit val serialization = Serialization

  val uri = chainHash match {
    case Block.LivenetGenesisBlock.hash => uri"https://www.bitgo.com/api/v2/btc/tx/fee"
    case _ => uri"https://test.bitgo.com/api/v2/tbtc/tx/fee"
  }

  override def getFeerates: Future[FeeratesPerKB] =
    for {
      res <- sttp.readTimeout(readTimeOut).get(uri)
        .response(asJson[JValue])
        .send()
      feeRanges = parseFeeRanges(res.unsafeBody)
    } yield extractFeerates(feeRanges)

}

object BitgoFeeProvider {

  case class BlockTarget(block: Int, fee: Long)

  def parseFeeRanges(json: JValue): Seq[BlockTarget] = {
    val blockTargets = json \ "feeByBlockTarget"
    blockTargets.foldField(Seq.empty[BlockTarget]) {
      // BitGo returns estimates in Satoshi/KB, which is what we want
      case (list, (strBlockTarget, JInt(feePerKB))) => list :+ BlockTarget(strBlockTarget.toInt, feePerKB.longValue)
    }
  }

  def extractFeerate(feeRanges: Seq[BlockTarget], maxBlockDelay: Int): Long = {
    // first we keep only fee ranges with a max block delay below the limit
    val belowLimit = feeRanges.filter(_.block <= maxBlockDelay)
    // out of all the remaining fee ranges, we select the one with the minimum higher bound
    belowLimit.map(_.fee).min
  }

  def extractFeerates(feeRanges: Seq[BlockTarget]): FeeratesPerKB =
    FeeratesPerKB(
      block_1 = extractFeerate(feeRanges, 1),
      blocks_2 = extractFeerate(feeRanges, 2),
      blocks_6 = extractFeerate(feeRanges, 6),
      blocks_12 = extractFeerate(feeRanges, 12),
      blocks_36 = extractFeerate(feeRanges, 36),
      blocks_72 = extractFeerate(feeRanges, 72),
      blocks_144 = extractFeerate(feeRanges, 144))

} 
Example 45
Source File: BasicBitcoinJsonRPCClient.scala    From eclair   with Apache License 2.0 5 votes vote down vote up
package fr.acinq.eclair.blockchain.bitcoind.rpc

import com.softwaremill.sttp._
import com.softwaremill.sttp.json4s._
import fr.acinq.bitcoin.ByteVector32
import fr.acinq.eclair.KamonExt
import fr.acinq.eclair.blockchain.Monitoring.{Metrics, Tags}
import org.json4s.{CustomSerializer, DefaultFormats}
import org.json4s.JsonAST.{JString, JValue}
import org.json4s.jackson.Serialization

import scala.concurrent.{ExecutionContext, Future}

class BasicBitcoinJsonRPCClient(user: String, password: String, host: String = "127.0.0.1", port: Int = 8332, ssl: Boolean = false)(implicit http: SttpBackend[Future, Nothing]) extends BitcoinJsonRPCClient {

  // necessary to properly serialize ByteVector32 into String readable by bitcoind
  object ByteVector32Serializer extends CustomSerializer[ByteVector32](_ => ( {
    null
  }, {
    case x: ByteVector32 => JString(x.toHex)
  }))
  implicit val formats = DefaultFormats.withBigDecimal + ByteVector32Serializer
  private val scheme = if (ssl) "https" else "http"
  private val serviceUri = uri"$scheme://$host:$port/wallet/" // wallet/ specifies to use the default bitcoind wallet, named ""
  implicit val serialization = Serialization

  override def invoke(method: String, params: Any*)(implicit ec: ExecutionContext): Future[JValue] =
    invoke(Seq(JsonRPCRequest(method = method, params = params))).map(l => jsonResponse2Exception(l.head).result)

  def jsonResponse2Exception(jsonRPCResponse: JsonRPCResponse): JsonRPCResponse = jsonRPCResponse match {
    case JsonRPCResponse(_, Some(error), _) => throw JsonRPCError(error)
    case o => o
  }

  def invoke(requests: Seq[JsonRPCRequest])(implicit ec: ExecutionContext): Future[Seq[JsonRPCResponse]] = {
    requests.groupBy(_.method).foreach {
      case (method, calls) => Metrics.RpcBasicInvokeCount.withTag(Tags.Method, method).increment(calls.size)
    }
    KamonExt.timeFuture(Metrics.RpcBasicInvokeDuration.withoutTags()) {
      for {
        res <- sttp
          .post(serviceUri)
          .body(requests)
          .auth.basic(user, password)
          .response(asJson[Seq[JsonRPCResponse]])
          .send()
      } yield res.unsafeBody
    }
  }

} 
Example 46
Source File: StreamingQueryStatus.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.streaming

import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._


  def prettyJson: String = pretty(render(jsonValue))

  override def toString: String = prettyJson

  private[sql] def copy(
      message: String = this.message,
      isDataAvailable: Boolean = this.isDataAvailable,
      isTriggerActive: Boolean = this.isTriggerActive): StreamingQueryStatus = {
    new StreamingQueryStatus(
      message = message,
      isDataAvailable = isDataAvailable,
      isTriggerActive = isTriggerActive)
  }

  private[sql] def jsonValue: JValue = {
    ("message" -> JString(message.toString)) ~
    ("isDataAvailable" -> JBool(isDataAvailable)) ~
    ("isTriggerActive" -> JBool(isTriggerActive))
  }
} 
Example 47
Source File: HostTimeSpan.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.timespan

import com.qubole.sparklens.common.AggregateMetrics
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.TaskInfo
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JValue

import scala.collection.mutable


class HostTimeSpan(val hostID: String) extends TimeSpan {
  var hostMetrics = new AggregateMetrics()


  override def duration():Option[Long] = {
    Some(super.duration().getOrElse(System.currentTimeMillis() - startTime))
  }

  def updateAggregateTaskMetrics (taskMetrics: TaskMetrics, taskInfo: TaskInfo): Unit = {
    hostMetrics.update(taskMetrics, taskInfo)
  }
  override def getMap(): Map[String, _ <: Any] = {
    implicit val formats = DefaultFormats
    Map("hostID" -> hostID, "hostMetrics" -> hostMetrics.getMap) ++ super.getStartEndTime()
  }

}

object HostTimeSpan {
  def getTimeSpan(json: Map[String, JValue]): mutable.HashMap[String, HostTimeSpan] = {
    implicit val formats = DefaultFormats
    val map = new mutable.HashMap[String, HostTimeSpan]

    json.keys.map(key => {
      val value = json.get(key).get
      val timeSpan = new HostTimeSpan((value \ "hostID").extract[String])
      timeSpan.hostMetrics = AggregateMetrics.getAggregateMetrics((value \ "hostMetrics")
        .extract[JValue])
      timeSpan.addStartEnd(value)
      map.put(key, timeSpan)
    })

    map
  }
} 
Example 48
Source File: TimeSpan.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.timespan

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JValue


trait TimeSpan  {
  var startTime: Long = 0
  var endTime: Long = 0

  def setEndTime(time: Long): Unit = {
    endTime = time
  }

  def setStartTime(time: Long): Unit = {
    startTime = time
  }
  def isFinished(): Boolean = (endTime != 0 && startTime != 0)

  def duration(): Option[Long] = {
    if (isFinished()) {
      Some(endTime - startTime)
    } else {
      None
    }
  }
  def getMap(): Map[String, _ <: Any]

  def getStartEndTime(): Map[String, Long] = {
    Map("startTime" -> startTime, "endTime" -> endTime)
  }

  def addStartEnd(json: JValue): Unit = {
    implicit val formats = DefaultFormats
    this.startTime = (json \ "startTime").extract[Long]
    this.endTime = (json \ "endTime").extract[Long]
  }
} 
Example 49
Source File: JobTimeSpan.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.timespan

import com.qubole.sparklens.common.{AggregateMetrics, AppContext}
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.TaskInfo
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JValue

import scala.collection.{immutable, mutable}



  private def criticalTime(stageID: Int, data: mutable.HashMap[Int, (Seq[Int], Long)]): Long = {
    //Provide 0 value for
    val stageData = data.getOrElse(stageID, (List.empty[Int], 0L))
    stageData._2 + {
      if (stageData._1.size == 0) {
        0L
      }else {
        stageData._1.map(x => criticalTime(x, data)).max
      }
    }
  }

  override def getMap(): Map[String, _ <: Any] = {
    implicit val formats = DefaultFormats

    Map(
      "jobID" -> jobID,
      "jobMetrics" -> jobMetrics.getMap,
      "stageMap" -> AppContext.getMap(stageMap)) ++ super.getStartEndTime()
  }
}

object JobTimeSpan {
  def getTimeSpan(json: Map[String, JValue]): mutable.HashMap[Long, JobTimeSpan] = {
    implicit val formats = DefaultFormats
    val map = new mutable.HashMap[Long, JobTimeSpan]

    json.keys.map(key => {
      val value = json.get(key).get.extract[JValue]
      val timeSpan = new JobTimeSpan((value \ "jobID").extract[Long])

      timeSpan.jobMetrics = AggregateMetrics.getAggregateMetrics((value \ "jobMetrics")
              .extract[JValue])
      timeSpan.stageMap = StageTimeSpan.getTimeSpan((value \ "stageMap").extract[
        immutable.Map[String, JValue]])
      timeSpan.addStartEnd(value)
      map.put(key.toLong, timeSpan)

    })
    map
  }
} 
Example 50
Source File: ExecutorTimeSpan.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.timespan

import com.qubole.sparklens.common.AggregateMetrics
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.TaskInfo
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JValue

import scala.collection.mutable

class ExecutorTimeSpan(val executorID: String,
                       val hostID: String,
                       val cores: Int) extends TimeSpan {
  var executorMetrics = new AggregateMetrics()

  def updateAggregateTaskMetrics (taskMetrics: TaskMetrics, taskInfo: TaskInfo): Unit = {
    executorMetrics.update(taskMetrics, taskInfo)
  }

  override def getMap(): Map[String, _ <: Any] = {
    implicit val formats = DefaultFormats

    Map("executorID" -> executorID, "hostID" -> hostID, "cores" -> cores, "executorMetrics" ->
      executorMetrics.getMap()) ++ super.getStartEndTime()
  }
}

object ExecutorTimeSpan {
  def getTimeSpan(json: Map[String, JValue]): mutable.HashMap[String, ExecutorTimeSpan] = {

    implicit val formats = DefaultFormats
    val map = new mutable.HashMap[String, ExecutorTimeSpan]

    json.keys.map(key => {
      val value = json.get(key).get
      val timeSpan = new ExecutorTimeSpan(
        (value \ "executorID").extract[String],
        (value \ "hostID").extract[String],
        (value \ "cores").extract[Int]
      )
      timeSpan.executorMetrics = AggregateMetrics.getAggregateMetrics((value
              \ "executorMetrics").extract[JValue])
      timeSpan.addStartEnd(value)
      map.put(key, timeSpan)
    })
    map
  }
} 
Example 51
Source File: ApplicationInfo.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.common

import org.json4s.DefaultFormats
import org.json4s.JsonAST.JValue

case class ApplicationInfo (var applicationID:String = "NA",
                            var startTime:Long = 0L,
                            var endTime:Long = 0L) {

  def getMap(): Map[String, Any] = {
    implicit val formats = DefaultFormats
    Map("applicationID" -> applicationID, "startTime" -> startTime, "endTime" -> endTime)
  }
}

object ApplicationInfo {

  def getObject(jvalue: JValue): ApplicationInfo = {
    implicit val formats = DefaultFormats

    ApplicationInfo(
      (jvalue \ "applicationID").extract[String],
      (jvalue \ "startTime").extract[Long],
      (jvalue \ "endTime").extract[Long])
  }
} 
Example 52
Source File: StructField.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.types

import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._


  protected def this() = this(null, null)

  private[sql] def buildFormattedString(prefix: String, builder: StringBuilder): Unit = {
    builder.append(s"$prefix-- $name: ${dataType.typeName} (nullable = $nullable)\n")
    DataType.buildFormattedString(dataType, s"$prefix    |", builder)
  }

  // override the default toString to be compatible with legacy parquet files.
  override def toString: String = s"StructField($name,$dataType,$nullable)"

  private[sql] def jsonValue: JValue = {
    ("name" -> name) ~
      ("type" -> dataType.jsonValue) ~
      ("nullable" -> nullable) ~
      ("metadata" -> metadata.jsonValue)
  }
} 
Example 53
Source File: UpgradeConfiguration.scala    From RTran   with Apache License 2.0 5 votes vote down vote up
package com.ebay.rtran.core

import com.fasterxml.jackson.databind.JsonNode
import com.typesafe.scalalogging.LazyLogging
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods._
import com.ebay.rtran.api.{IModel, IRule, IRuleConfigFactory}
import org.json4s.DefaultFormats

import scala.util.{Failure, Success, Try}


trait RuleProducer {
  val ruleInstances: List[_ <: IRule[_ <: IModel]]
}

trait UpgradeConfiguration extends RuleProducer {
  val ruleConfigs: List[JsonRuleConfiguration]
}

case class JsonRuleConfiguration(name: String, metadata: Option[JValue] = None, config: Option[JValue] = None)

case class JsonUpgradeConfiguration(ruleConfigs: List[JsonRuleConfiguration])
  extends UpgradeConfiguration with JsonRuleProducer

trait JsonRuleProducer extends RuleProducer with LazyLogging {self: UpgradeConfiguration =>

  lazy val ruleInstances = ruleConfigs map {
    case JsonRuleConfiguration(name, metadata, configOpt) =>
      logger.info("Creating instance for {} with config {}", name, configOpt)
      implicit val formats = DefaultFormats

      //copy settings from metadata to Rule Registry
      RuleRegistry.findRuleDefinition(name) flatMap { case (ruleClass, rule) =>
        val properties = metadata.map(json => json.extract[Map[String, Any]])
        val configFactory = (rule.configFactory getOrElse DefaultJsonRuleConfigFactory)
          .asInstanceOf[IRuleConfigFactory[JsonNode]]
        configOpt map { config =>
          Try(JsonConfigurableRuleFactory.createRuleWithConfig(ruleClass, configFactory, asJsonNode(config)))
        } getOrElse Try(JsonConfigurableRuleFactory.createRule(ruleClass)) match {
          case Success(instance) =>
            properties.map(m => m.mapValues(_.toString)).map(m => RuleRegistry.saveRuleMetadata(instance, m))
            Some(instance)
          case Failure(e) =>
            logger.warn(e.getMessage)
            None
        }
      }
  } collect {
    case Some(instance) => instance
  }

}