org.json4s.JsonAST.JArray Scala Examples
The following examples show how to use org.json4s.JsonAST.JArray.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: IndexedSeqSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.json4s import org.json4s.JsonAST.JArray import org.json4s._ import org.json4s.reflect.TypeInfo object IndexedSeqSerializer extends Serializer[IndexedSeq[_]] { def deserialize(implicit format: Formats) = { case (TypeInfo(clazz, optionalParamType), json) if classOf[IndexedSeq[_]].isAssignableFrom(clazz) => json match { case JArray(elems) => val paramType = optionalParamType.getOrElse(throw new MappingException("Parametrized type not known")) val elemTypeInfo = TypeInfo(paramType.getActualTypeArguments()(0).asInstanceOf[Class[_]], None) elems.map(Extraction.extract(_, elemTypeInfo)).toIndexedSeq case other => throw new MappingException(s"Can't convert $other to IndexedSeq") } } def serialize(implicit format: Formats) = { case seq: IndexedSeq[_] => JArray(seq.map(Extraction.decompose).toList) } }
Example 2
Source File: DescTableReponse.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.druid import org.json4s.JsonAST.{JArray, JObject, JValue} import org.json4s.JsonDSL._ import scala.collection.{immutable, mutable} case class DescTableRequest(dataSource: String) { def toJson: JValue = { JObject("queryType" -> "segmentMetadata", "dataSource" -> dataSource) } } case class DescTableResponse(data: Seq[(String, Any)]) object DescTableResponse { def parse(js: JValue): DescTableResponse = { var arr = new mutable.HashMap[String, Any] js match { case JArray(results) => val columns = (results.last \ "columns").asInstanceOf[JObject].values columns.foreach { col => arr += (col._1 -> col._2 .asInstanceOf[immutable.HashMap[String, String]] .get("type") .get) } DescTableResponse(arr.toSeq.sortBy(_._1)) case err @ _ => throw new IllegalArgumentException("Invalid time series response: " + err) } } }
Example 3
Source File: DataSourceScannerResponse.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.druid import org.json4s.JsonAST.{JArray, JValue} case class DataSourceScannerResponse(data: Seq[String]) object DataSourceScannerResponse { def parse(js: JValue): DataSourceScannerResponse = { js match { case JArray(datasources) => val res = datasources.map(ds => ds.values.toString).sortWith(_ < _) DataSourceScannerResponse(res) case err @ _ => throw new IllegalArgumentException("Invalid datasource response: " + err) } } }
Example 4
Source File: QueryFilter.scala From XSQL with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.execution.datasources.druid import org.json4s.JsonAST.{JArray, JNull, JObject, JValue} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ sealed trait QueryFilter extends Expression { def and(other: QueryFilter): QueryFilter = And(Seq(this, other)) def or(other: QueryFilter): QueryFilter = Or(Seq(this, other)) } case class And(filters: Seq[Expression]) extends QueryFilter { override def and(other: QueryFilter): QueryFilter = copy(other +: filters) def toJson: JValue = JObject("type" -> "and", "fields" -> JArray(filters.toList.map(_.toJson))) } case class Or(filters: Seq[Expression]) extends QueryFilter { override def or(other: QueryFilter): QueryFilter = copy(other +: filters) def toJson: JValue = JObject("type" -> "or", "fields" -> JArray(filters.toList.map(_.toJson))) } case class Not(filter: Expression) extends QueryFilter { def toJson: JValue = JObject("type" -> "not", "field" -> filter.toJson) } case class IsNotNull(attributeNotNull: String) extends QueryFilter { // { // "field": { // "type": "selector", // "dimension": "added", // "value": "" // }, // "type": "not" // } def toJson: JValue = JObject( "field" -> JObject("type" -> "selector", "dimension" -> attributeNotNull, "value" -> ""), "type" -> "not") } case class ExprQueryFilter(typeName: String, dimension: String, value: String) extends QueryFilter { def toJson: JValue = JObject("type" -> typeName, "dimension" -> dimension, "value" -> value) } case class SelectorQueryFilter(dimension: String, value: String) extends QueryFilter { def toJson: JValue = JObject("type" -> "selector", "dimension" -> dimension, "value" -> value) } case class RegexQueryFilter(dimension: String, pattern: String) extends QueryFilter { def toJson: JValue = JObject("type" -> "regex", "dimension" -> dimension, "pattern" -> pattern) } case class AllQueryFilter(condition: java.util.HashMap[String, Any]) extends QueryFilter { // val json = JSONObject.fromObject(condition.get("filter")).toString // def toJson: JValue = parse(json) def toJson: JValue = fromJsonNode(mapper.valueToTree(condition.get("filter"))) } object QueryFilter { def custom(typeName: String, dimension: String, value: String): ExprQueryFilter = ExprQueryFilter(typeName, dimension, value) def where(dimension: String, value: String): SelectorQueryFilter = SelectorQueryFilter(dimension, value) def regex(dimension: String, pattern: String): RegexQueryFilter = RegexQueryFilter(dimension, pattern) val All = new QueryFilter { def toJson: JValue = JNull } }
Example 5
Source File: RollupExpression.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.core.fact import com.yahoo.maha.core._ import org.json4s.JsonAST.{JArray, JObject} import org.json4s.scalaz.JsonScalaz._ case class HiveCustomRollup(expression: HiveDerivedExpression) extends CustomRollup with WithHiveEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = expression.sourceColumns override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns } case class PrestoCustomRollup(expression: PrestoDerivedExpression) extends CustomRollup with WithPrestoEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = expression.sourceColumns override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns } case class OracleCustomRollup(expression: OracleDerivedExpression) extends CustomRollup with WithOracleEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = expression.sourceColumns override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns } case class PostgresCustomRollup(expression: PostgresDerivedExpression) extends CustomRollup with WithPostgresEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = expression.sourceColumns override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns } case class DruidCustomRollup(expression: DruidDerivedExpression) extends CustomRollup with WithDruidEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = expression.sourceColumns override lazy val sourcePrimitiveColumns: Set[String] = expression.sourcePrimitiveColumns } case class DruidFilteredRollup(filter: Filter, factCol: DruidExpression.FieldAccess, delegateAggregatorRollupExpression: RollupExpression) extends CustomRollup with WithDruidEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = Set(filter.field, factCol.name) ++ delegateAggregatorRollupExpression.sourceColumns } case class DruidFilteredListRollup(filter: List[Filter], factCol: DruidExpression.FieldAccess, delegateAggregatorRollupExpression: RollupExpression) extends CustomRollup with WithDruidEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = filter.map(fil => fil.field).toSet ++ delegateAggregatorRollupExpression.sourceColumns ++ Set(factCol.name) } case class DruidHyperUniqueRollup(column: String) extends CustomRollup with WithDruidEngine { override val hasDerivedExpression: Boolean = true override lazy val sourceColumns: Set[String] = Set(column) } case object DruidThetaSketchRollup extends CustomRollup with WithDruidEngine
Example 6
Source File: JsonUtils.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.core import org.json4s._ import org.json4s.scalaz.JsonScalaz import org.json4s.scalaz.JsonScalaz._ import _root_.scalaz.{Scalaz, syntax} import org.json4s.JsonAST.{JArray, JValue} import syntax.validation._ implicit def mapJSONW: JSONW[Map[String, Set[String]]] = new JSONW[Map[String, Set[String]]] { def write(values: Map[String, Set[String]]) = makeObj(values.map(kv => kv._1 -> toJSON(kv._2.toList)).toList) } implicit def bdJSONW: JSONW[BigDecimal] = new JSONW[BigDecimal] { def write(value: BigDecimal): JValue = makeObj(List(("value" -> toJSON(value.doubleValue())))) } implicit def toStringJSONW[A]: JSONW[A] = new JSONW[A] { def write(value: A): JValue = value match { case a: ColumnAnnotation => a.asJSON case null => JNull case _ => toJSON(value.toString) } } implicit def setJSONW[A]: JSONW[Set[A]] = new JSONW[Set[A]] { def write(values: Set[A]) = JArray(values.map(x => toJSON(x)).toList) } implicit def listJSONW[A: JSONW]: JSONW[List[A]] = new JSONW[List[A]] { def write(values: List[A]) = JArray(values.map(x => implicitly[JSONW[A]].write(x))) } def asJSON[A](a: A): JValue = { toJSON(a) } }
Example 7
Source File: DataPipelineDefGroup.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion import com.amazonaws.services.datapipeline.model.{ParameterObject => AwsParameterObject, PipelineObject => AwsPipelineObject} import org.json4s.JsonAST.JArray import org.json4s.JsonDSL._ import org.json4s.JValue import com.krux.hyperion.activity.MainClass import com.krux.hyperion.aws.{AdpJsonSerializer, AdpParameterSerializer, AdpPipelineSerializer} import com.krux.hyperion.common.{DefaultObject, HdfsUriHelper, PipelineObject, S3UriHelper} import com.krux.hyperion.expression.{Parameter, ParameterValues, Duration} import com.krux.hyperion.workflow.{WorkflowExpression, WorkflowExpressionImplicits} trait DataPipelineDefGroup extends S3UriHelper with HdfsUriHelper with WorkflowExpressionImplicits { def nameKeySeparator = DataPipelineDefGroup.DefaultNameKeySeparator private lazy val context = new HyperionContext() implicit def hc: HyperionContext = context implicit val pv: ParameterValues = new ParameterValues() def pipelineName: String = MainClass(this).toString def schedule: Schedule def pipelineLifeCycle: PipelineLifeCycle = new PipelineLifeCycle { } def setParameterValue(id: String, value: String, ignoreMissing: Boolean = true): Unit = { val foundParam = parameters.find(_.id == id) if (ignoreMissing) foundParam.foreach(_.withValueFromString(value)) else foundParam.get.withValueFromString(value) } private[hyperion] def nameForKey(key: WorkflowKey): String = pipelineName + key.map(nameKeySeparator + _).getOrElse("") } object DataPipelineDefGroup { final val DefaultNameKeySeparator = "#" private def delayedSchedule(dpdg: DataPipelineDefGroup, multiplier: Int): Schedule = dpdg.scheduleDelay match { case None => dpdg.schedule case Some(delay) => Schedule.delay(dpdg.schedule, delay, multiplier) } implicit class DataPipelineDefGroupOps(dpdg: DataPipelineDefGroup) { def ungroup(): Map[WorkflowKey, DataPipelineDef] = dpdg.workflows .toSeq .sortBy(_._1) // order by key .zipWithIndex .map { case ((key, workflow), idx) => ( key, DataPipelineDefWrapper( dpdg.hc, dpdg.nameForKey(key), delayedSchedule(dpdg, idx), dpdg.pipelineLifeCycle, () => workflow, dpdg.tags, dpdg.parameters ) ) } .toMap def objects: Map[WorkflowKey, Iterable[PipelineObject]] = dpdg.workflows .toSeq .sortBy(_._1) .zipWithIndex .map { case ((key, workflow), idx) => val dObj = dpdg.defaultObject.withSchedule(delayedSchedule(dpdg, idx)) key -> (dObj +: dObj.objects ++: workflow.toPipelineObjects.toList) } .toMap def toAwsParameters: Seq[AwsParameterObject] = dpdg.parameters.flatMap(_.serialize).map(o => AdpParameterSerializer(o)).toList def toAwsPipelineObjects: Map[WorkflowKey, Seq[AwsPipelineObject]] = objects.mapValues(_.map(_.serialize).toList.sortBy(_.id).map(AdpPipelineSerializer(_))) def toJson: JValue = ("objects" -> JArray(objects.values.flatten.map(_.serialize).toList.sortBy(_.id).map(AdpJsonSerializer(_)))) ~ ("parameters" -> JArray(dpdg.parameters.flatMap(_.serialize).map(o => AdpJsonSerializer(o)).toList)) } }
Example 8
Source File: AccountResponse.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import java.nio.charset.StandardCharsets.UTF_8 import org.json4s.{DefaultFormats, Formats} import org.json4s.JsonAST.{JArray, JObject} import stellar.sdk._ import stellar.sdk.model.Amount.toBaseUnits import stellar.sdk.model._ import stellar.sdk.util.ByteArrays case class AccountResponse(id: PublicKey, lastSequence: Long, subEntryCount: Int, thresholds: Thresholds, authRequired: Boolean, authRevocable: Boolean, balances: List[Balance], signers: List[Signer], data: Map[String, Array[Byte]]) { def toAccount: Account = Account(AccountId(id.publicKey), lastSequence + 1) def decodedData: Map[String, String] = data.map { case (k, v) => k -> new String(v, UTF_8) } } object AccountRespDeserializer extends ResponseParser[AccountResponse]({ o: JObject => implicit val formats: Formats = DefaultFormats val id = KeyPair.fromAccountId((o \ "id").extract[String]) val seq = (o \ "sequence").extract[String].toLong val subEntryCount = (o \ "subentry_count").extract[Int] val lowThreshold = (o \ "thresholds" \ "low_threshold").extract[Int] val mediumThreshold = (o \ "thresholds" \ "med_threshold").extract[Int] val highThreshold = (o \ "thresholds" \ "high_threshold").extract[Int] val authRequired = (o \ "flags" \ "auth_required").extract[Boolean] val authRevocable = (o \ "flags" \ "auth_revocable").extract[Boolean] val JArray(jsBalances) = o \ "balances" val balances = jsBalances.map { case balObj: JObject => val units = toBaseUnits((balObj \ "balance").extract[String].toDouble).get val amount = (balObj \ "asset_type").extract[String] match { case "credit_alphanum4" => Amount(units, IssuedAsset4( code = (balObj \ "asset_code").extract[String], issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String]) )) case "credit_alphanum12" => Amount(units, IssuedAsset12( code = (balObj \ "asset_code").extract[String], issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String]) )) case "native" => NativeAmount(units) case t => throw new RuntimeException(s"Unrecognised asset type: $t") } val limit = (balObj \ "limit").extractOpt[String].map(BigDecimal(_)).map(toBaseUnits).map(_.get) val buyingLiabilities = toBaseUnits(BigDecimal((balObj \ "buying_liabilities").extract[String])).get val sellingLiabilities = toBaseUnits(BigDecimal((balObj \ "selling_liabilities").extract[String])).get val authorised = (balObj \ "is_authorized").extractOpt[Boolean].getOrElse(false) val authorisedToMaintainLiabilities = (balObj \ "is_authorized_to_maintain_liabilities") .extractOpt[Boolean].getOrElse(false) Balance(amount, limit, buyingLiabilities, sellingLiabilities, authorised, authorisedToMaintainLiabilities) case _ => throw new RuntimeException(s"Expected js object at 'balances'") } val JArray(jsSigners) = o \ "signers" val signers = jsSigners.map { case signerObj: JObject => val key = StrKey.decodeFromString((signerObj \ "key").extract[String]).asInstanceOf[SignerStrKey] val weight = (signerObj \ "weight").extract[Int] Signer(key, weight) case _ => throw new RuntimeException(s"Expected js object at 'signers'") } val JObject(dataFields) = o \ "data" val data = dataFields.map{ case (k, v) => k -> ByteArrays.base64(v.extract[String]) }.toMap AccountResponse(id, seq, subEntryCount, Thresholds(lowThreshold, mediumThreshold, highThreshold), authRequired, authRevocable, balances, signers, data) })
Example 9
Source File: CheckPoint.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.blockchain.electrum import java.io.InputStream import fr.acinq.bitcoin.{Block, ByteVector32, encodeCompact} import fr.acinq.eclair.blockchain.electrum.db.HeaderDb import org.json4s.JsonAST.{JArray, JInt, JString} import org.json4s.jackson.JsonMethods def load(chainHash: ByteVector32, headerDb: HeaderDb): Vector[CheckPoint] = { val checkpoints = CheckPoint.load(chainHash) val checkpoints1 = headerDb.getTip match { case Some((height, header)) => val newcheckpoints = for {h <- checkpoints.size * RETARGETING_PERIOD - 1 + RETARGETING_PERIOD to height - RETARGETING_PERIOD by RETARGETING_PERIOD} yield { // we * should * have these headers in our db val cpheader = headerDb.getHeader(h).get val nextDiff = headerDb.getHeader(h + 1).get.bits CheckPoint(cpheader.hash, nextDiff) } checkpoints ++ newcheckpoints case None => checkpoints } checkpoints1 } }
Example 10
Source File: EarnDotComFeeProvider.scala From eclair with Apache License 2.0 | 5 votes |
package fr.acinq.eclair.blockchain.fee import com.softwaremill.sttp._ import com.softwaremill.sttp.json4s._ import org.json4s.DefaultFormats import org.json4s.JsonAST.{JArray, JInt, JValue} import org.json4s.jackson.Serialization import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} class EarnDotComFeeProvider(readTimeOut: Duration)(implicit http: SttpBackend[Future, Nothing], ec: ExecutionContext) extends FeeProvider { import EarnDotComFeeProvider._ implicit val formats = DefaultFormats implicit val serialization = Serialization val uri = uri"https://bitcoinfees.earn.com/api/v1/fees/list" override def getFeerates: Future[FeeratesPerKB] = for { json <- sttp.readTimeout(readTimeOut).get(uri) .response(asJson[JValue]) .send() feeRanges = parseFeeRanges(json.unsafeBody) } yield extractFeerates(feeRanges) } object EarnDotComFeeProvider { case class FeeRange(minFee: Long, maxFee: Long, memCount: Long, minDelay: Long, maxDelay: Long) def parseFeeRanges(json: JValue): Seq[FeeRange] = { val JArray(items) = json \ "fees" items.map(item => { val JInt(minFee) = item \ "minFee" val JInt(maxFee) = item \ "maxFee" val JInt(memCount) = item \ "memCount" val JInt(minDelay) = item \ "minDelay" val JInt(maxDelay) = item \ "maxDelay" // earn.com returns fees in Satoshi/byte and we want Satoshi/KiloByte FeeRange(minFee = 1000 * minFee.toLong, maxFee = 1000 * maxFee.toLong, memCount = memCount.toLong, minDelay = minDelay.toLong, maxDelay = maxDelay.toLong) }) } def extractFeerate(feeRanges: Seq[FeeRange], maxBlockDelay: Int): Long = { // first we keep only fee ranges with a max block delay below the limit val belowLimit = feeRanges.filter(_.maxDelay <= maxBlockDelay) // out of all the remaining fee ranges, we select the one with the minimum higher bound and make sure it is > 0 Math.max(belowLimit.minBy(_.maxFee).maxFee, 1) } def extractFeerates(feeRanges: Seq[FeeRange]): FeeratesPerKB = FeeratesPerKB( block_1 = extractFeerate(feeRanges, 1), blocks_2 = extractFeerate(feeRanges, 2), blocks_6 = extractFeerate(feeRanges, 6), blocks_12 = extractFeerate(feeRanges, 12), blocks_36 = extractFeerate(feeRanges, 36), blocks_72 = extractFeerate(feeRanges, 72), blocks_144 = extractFeerate(feeRanges, 144)) }
Example 11
Source File: BatchClientSuite.scala From hail with MIT License | 5 votes |
package is.hail.services.batch_client import is.hail.utils._ import org.json4s.JsonAST.{JArray, JBool, JInt, JObject, JString} import org.json4s.{DefaultFormats, Formats} import org.scalatest.testng.TestNGSuite import org.testng.annotations.Test class BatchClientSuite extends TestNGSuite { @Test def testBasic(): Unit = { val client = new BatchClient() val token = tokenUrlSafe(32) val batch = client.run( JObject( "billing_project" -> JString("test"), "n_jobs" -> JInt(1), "token" -> JString(token)), FastIndexedSeq( JObject( "always_run" -> JBool(false), "image" -> JString("ubuntu:18.04"), "mount_docker_socket" -> JBool(false), "command" -> JArray(List( JString("/bin/bash"), JString("-c"), JString("echo 'Hello, world!'"))), "job_id" -> JInt(0), "parent_ids" -> JArray(List())))) implicit val formats: Formats = DefaultFormats assert((batch \ "state").extract[String] == "success") } }
Example 12
Source File: TransientFeatureArrayParam.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages import com.salesforce.op.features._ import org.apache.spark.ml.param._ import org.apache.spark.ml.util.Identifiable import org.json4s.DefaultFormats import org.json4s.JsonAST.{JArray, JValue} import org.json4s.jackson.JsonMethods.{compact, parse, render} import scala.util.{Failure, Success} override def w(value: Array[TransientFeature]): ParamPair[Array[TransientFeature]] = super.w(value) override def jsonEncode(value: Array[TransientFeature]): String = { compact(render(JArray(value.map(_.toJson).toList))) } override def jsonDecode(json: String): Array[TransientFeature] = { parse(json).extract[Array[JValue]].map(obj => { TransientFeature(obj) match { case Failure(e) => throw new RuntimeException("Failed to parse TransientFeature", e) case Success(v) => v } }) } }
Example 13
Source File: BoardConfig.scala From slab with Apache License 2.0 | 5 votes |
package com.criteo.slab.app import com.criteo.slab.core.{Box, Layout} import com.criteo.slab.utils.Jsonable import org.json4s.JsonAST.{JArray, JString} import org.json4s.{CustomSerializer, Serializer} private[slab] case class BoardConfig( title: String, layout: Layout, links: Seq[(Box[_], Box[_])] = Seq.empty, slo: Double ) object BoardConfig { implicit object ToJSON extends Jsonable[BoardConfig] { override val serializers: Seq[Serializer[_]] = implicitly[Jsonable[Box[_]]].serializers ++ implicitly[Jsonable[Layout]].serializers :+ LinkSer object LinkSer extends CustomSerializer[Box[_] Tuple2 Box[_]](_ => ( { case _ => throw new NotImplementedError("Not deserializable") }, { case (Box(title1, _, _, _, _), Box(title2, _, _, _, _)) => JArray(List(JString(title1), JString(title2))) } )) } }
Example 14
Source File: GraphiteMetric.scala From slab with Apache License 2.0 | 5 votes |
package com.criteo.slab.lib.graphite import com.criteo.slab.utils.Jsonable import org.json4s.JsonAST.{JArray, JDouble, JInt, JNull} import org.json4s.{CustomSerializer, Serializer} private[slab] case class DataPoint(value: Option[Double], timestamp: Long) object DataPoint { implicit object ToJSON extends Jsonable[DataPoint] { override val serializers: Seq[Serializer[_]] = List(Ser) object Ser extends CustomSerializer[DataPoint](_ => ( { case JArray(JDouble(value) :: JInt(date) :: Nil) => DataPoint(Some(value), date.toLong) case JArray(JNull :: JInt(date) :: Nil) => DataPoint(None, date.toLong) }, { case DataPoint(value, date) => val v = value match { case Some(v) => JDouble(v) case None => JNull } JArray( List( v, JInt(date) ) ) } )) } } private[slab] case class GraphiteMetric( target: String, datapoints: List[DataPoint] ) object GraphiteMetric { implicit object ToJSON extends Jsonable[GraphiteMetric] { override val serializers: Seq[Serializer[_]] = implicitly[Jsonable[DataPoint]].serializers } }