scala.util.parsing.json.JSONObject Scala Examples

The following examples show how to use scala.util.parsing.json.JSONObject. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: JsonUtil.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.util

import scala.util.parsing.json.{JSONArray, JSONFormat, JSONObject}


object JsonUtil {

  def toJson(arr : List[Any]) : JSONArray = {
    JSONArray(arr.map {
      case (innerMap : Map[String, Any]) => toJson(innerMap)
      case (innerArray : List[Any]) => toJson(innerArray)
      case (other) => other
    })
  }

  def toJson(map:Map[String,Any]):JSONObject = {
    JSONObject(map.map {
      case(key, innerMap:Map[String, Any]) => (key, toJson(innerMap))
      case(key, innerArray: List[Any]) => (key, toJson(innerArray))
      case(key, other) => (key, if (other == null) "" else other )
    })
  }

  def format(t:Any, i: Int = 0) : String = t match {
    case o: JSONObject =>
      o.obj.map{
        case (k, v) =>
          "    "*(i+1) + JSONFormat.defaultFormatter(k) + ": " + format(v, i+1)
      }.mkString("{\n",",\n","\n" + "    "*i + "}")
    case a: JSONArray =>
      a.list.map{
        e => "    "*(i+1) + format(e, i+1)
      }.mkString("[\n",",\n","\n" + "    "*i + "]")
    case _ => JSONFormat defaultFormatter t
  }

} 
Example 2
Source File: AppendLoadConfiguration.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.config

import com.adidas.analytics.algo.core.Algorithm.SafeWriteOperation
import com.adidas.analytics.config.shared.{ConfigurationContext, LoadConfiguration, MetadataUpdateStrategy}
import com.adidas.analytics.util.DataFormat.ParquetFormat
import com.adidas.analytics.util.{LoadMode, OutputWriter}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.util.DropMalformedMode
import org.apache.spark.sql.types.{DataType, StructType}

import scala.util.parsing.json.JSONObject


trait AppendLoadConfiguration extends ConfigurationContext
  with LoadConfiguration
  with SafeWriteOperation
  with MetadataUpdateStrategy {

  protected def spark: SparkSession

  private val regexFilename: Seq[String] = configReader.getAsSeq[String]("regex_filename")

  protected val headerDir: String = configReader.getAs[String]("header_dir")

  protected val targetTable: Option[String] = configReader.getAsOption[String]("target_table")


  // This option is used to specify whether the input data schema must be the same as target schema specified in the configuration file
  // Note: if it is set to True, it will cause input data to be read more than once
  private val verifySchemaOption: Option[Boolean] = configReader.getAsOption[Boolean]("verify_schema")

  protected val verifySchema: Boolean = dataType match {
    case SEMISTRUCTURED => verifySchemaOption.getOrElse(true)
    case _ => false
  }

  protected val columnToRegexPairs: Seq[(String, String)] = targetPartitions zip regexFilename

  private val jsonSchemaOption: Option[JSONObject] = configReader.getAsOption[JSONObject]("schema")

  protected val targetSchema: StructType = getTargetSchema

  private val targetDir: Option[String] = configReader.getAsOption[String]("target_dir")

  override protected val writer: OutputWriter.AtomicWriter = dataType match {
    case STRUCTURED if targetTable.isDefined => OutputWriter.newTableLocationWriter(
      table = targetTable.get,
      format = ParquetFormat(Some(targetSchema)),
      targetPartitions = targetPartitions,
      loadMode = LoadMode.OverwritePartitionsWithAddedColumns,
      metadataConfiguration = getMetaDataUpdateStrategy(targetTable.get,targetPartitions)
    )
    case SEMISTRUCTURED if targetDir.isDefined => OutputWriter.newFileSystemWriter(
      location = targetDir.get,
      format = ParquetFormat(Some(targetSchema)),
      targetPartitions = targetPartitions,
      loadMode = LoadMode.OverwritePartitions
    )
    case anotherDataType => throw new RuntimeException(s"Unsupported data type: $anotherDataType in AppendLoad or the configuration file is malformed.")
  }

  private def getTargetSchemaFromHiveTable: StructType = {
    targetTable match {
      case Some(tableName) => spark.table(tableName).schema
      case None => throw new RuntimeException("No schema definition found.")
    }
  }

  private def getTargetSchema: StructType = {
    dataType match {
      case STRUCTURED => getTargetSchemaFromHiveTable
      case SEMISTRUCTURED if jsonSchemaOption.isDefined => DataType.fromJson(jsonSchemaOption.get.toString()).asInstanceOf[StructType]
      case anotherDataType => throw new RuntimeException(s"Unsupported data type: $anotherDataType in AppendLoad or the configuration file is malformed.")
    }
  }

  override def loadMode: String = readerModeSetter(DropMalformedMode.name)
} 
Example 3
Source File: ConfigReader.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.util

import java.text.DecimalFormatSymbols

import org.slf4j.{Logger, LoggerFactory}

import scala.util.parsing.json.{JSON, JSONArray, JSONObject}


class ConfigReader(jsonContent: String) extends Serializable {

  private val logger: Logger = LoggerFactory.getLogger(getClass)

  private val decimalSeparator: Char = new DecimalFormatSymbols().getDecimalSeparator

  JSON.globalNumberParser = (in: String) => if (in.contains(decimalSeparator)) in.toDouble else in.toInt

  private lazy val config = JSON.parseRaw(jsonContent) match {
    case Some(JSONObject(obj)) => obj
    case _ => throw new IllegalArgumentException(s"Wrong format of the configuration file: $jsonContent")
  }

  def getAsSeq[T](propertyName: String): Seq[T] = {
    config.get(propertyName) match {
      case Some(JSONArray(list)) => list.map(_.asInstanceOf[T])
      case _ => throw new IllegalArgumentException(s"Unable to find configuration property $propertyName")
    }
  }

  def getAsMap[K, V](propertyName: String): Map[K,V] = {
    config.get(propertyName) match {
      case Some(JSONObject(obj)) => obj.asInstanceOf[Map[K,V]]
      case _ => throw new IllegalArgumentException(s"Unable to find configuration property $propertyName")
    }
  }

  def getAs[T](propertyName: String): T = {
    config.get(propertyName) match {
      case Some(property) => property.asInstanceOf[T]
      case None => throw new IllegalArgumentException(s"Unable to find configuration property $propertyName")
    }
  }

  def getAsOption[T](propertyName: String): Option[T] = {
    config.get(propertyName).map(property => property.asInstanceOf[T])
  }

  def getAsOptionSeq[T](propertyName: String): Option[Seq[T]] = {
    config.get(propertyName).map(_ => getAsSeq(propertyName))
  }

  def contains(propertyName: String): Boolean = {
    config.contains(propertyName)
  }
}

object ConfigReader {
  def apply(jsonContent: String): ConfigReader = new ConfigReader(jsonContent)
} 
Example 4
Source File: CurrentDayMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.stream.api.dayWindow

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.venn.common.Common
import com.venn.util.MathUtil
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject


  def getCreateTime(): String = {
//    minute = minute + 1
    calendar.add(Calendar.MINUTE, 10)
    sdf.format(calendar.getTime)
  }
  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")

  def main(args: Array[String]): Unit = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    calendar.setTime(new Date())
    println(sdf.format(calendar.getTime))
    var i =0;
    while (true) {

//      val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis()))
      val map = Map("id"-> i, "createTime"-> getCreateTime(), "amt"-> (MathUtil.random.nextInt(10) +"." + MathUtil.random.nextInt(10)))
      val jsonObject: JSONObject = new JSONObject(map)
      println(jsonObject.toString())
      // topic current_day
      val msg = new ProducerRecord[String, String]("current_day", jsonObject.toString())
      producer.send(msg)
      producer.flush()
      Thread.sleep(1000)
      i = i + 1
//      System.exit(-1)
    }
  }

} 
Example 5
Source File: LateDataMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.stream.api.sideoutput.lateDataProcess

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.venn.common.Common
import com.venn.util.MathUtil
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject


  def getCreateTime(): String = {
//    minute = minute + 1
    calendar.add(Calendar.SECOND, 10)
    sdf.format(calendar.getTime)
  }
  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")

  def main(args: Array[String]): Unit = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    calendar.setTime(new Date())
    println(sdf.format(calendar.getTime))
    var i =74540;
    while (true) {

//      val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis()))
      val map = Map("id"-> i, "createTime"-> getCreateTime(), "amt"-> (MathUtil.random.nextInt(10) +"." + MathUtil.random.nextInt(10)))
      val jsonObject: JSONObject = new JSONObject(map)
      println(jsonObject.toString())
      // topic current_day
      val msg = new ProducerRecord[String, String]("late_data", jsonObject.toString())
      producer.send(msg)
      producer.flush()
      Thread.sleep(200)
      i = i + 1
//      System.exit(-1)
    }
  }

} 
Example 6
Source File: WindowDemoMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.stream.api.trigger

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.venn.common.Common
import com.venn.util.MathUtil
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject


  def getCreateTime(): String = {
    //    minute = minute + 1
    calendar.add(Calendar.MILLISECOND, 10)
    sdf.format(calendar.getTime)
  }

  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")

  def main(args: Array[String]): Unit = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    calendar.setTime(new Date())
    println(sdf.format(calendar.getTime))
    var i = 0;
    while (true) {
      val map = Map("id" -> i, "createTime" -> getCreateTime(), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10)))
      val jsonObject: JSONObject = new JSONObject(map)
      println(jsonObject.toString())
      // topic current_day
      val msg = new ProducerRecord[String, String]("current_day", jsonObject.toString())
      producer.send(msg)
      producer.flush()
      if (MathUtil.random.nextBoolean()) {
        Thread.sleep(1500)
      } else {
        Thread.sleep(500)

      }
      i = i + 1
      //      System.exit(-1)
    }
  }

} 
Example 7
Source File: FileSinkMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.stream.api.filesink

import java.text.SimpleDateFormat
import java.util.Calendar

import com.venn.common.Common
import com.venn.stream.api.dayWindow.CurrentDayMaker.{calendar, getCreateTime, sdf}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject



object FileSinkMaker {
  val topic = "async"

  def main(args: Array[String]): Unit = {

    while (true) {

      left("roll_file_sink")
      Thread.sleep(100)
    }
  }

  val sdf = new SimpleDateFormat("yyyyMMddHHmmss")

  var idLeft = 0

  def left(topic: String) = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    idLeft = idLeft + 1
    val map = Map("id" -> idLeft, "name" -> ("venn" + System.currentTimeMillis()), "date" -> getCreateTime)
    val jsonObject: JSONObject = new JSONObject(map)
    println("left : " + jsonObject.toString())
    val msg = new ProducerRecord[String, String](topic, jsonObject.toString())
//    producer.send(msg)
//    producer.flush()
  }

  var minute : Int = 1
  val calendar: Calendar = Calendar.getInstance()
  def getCreateTime(): String = {
    //    minute = minute + 1
    calendar.add(Calendar.MINUTE, 10)
    sdf.format(calendar.getTime)
  }

} 
Example 8
Source File: IntervalJoinKafkaKeyMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.stream.api.intervalJoin

import java.text.SimpleDateFormat

import com.venn.common.Common
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject



object IntervalJoinKafkaKeyMaker {
  val topic = "async"

  def main(args: Array[String]): Unit = {

    while (true) {

      left("topic_left")
      right("topic_right")
      Thread.sleep(500)
    }
  }

  val sdf = new SimpleDateFormat("yyyyMMddHHmmss")

  var idLeft = 0

  def left(topic: String) = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    idLeft = idLeft + 1
    val map = Map("id" -> idLeft, "name" -> ("venn" + System.currentTimeMillis()), "date" -> sdf.format(System.currentTimeMillis()))
    val jsonObject: JSONObject = new JSONObject(map)
    println("left : " + jsonObject.toString())
    val msg = new ProducerRecord[String, String](topic, jsonObject.toString())
    producer.send(msg)
    producer.flush()
  }

  var idRight = 0

  def right(topic: String) = {
    val producer = new KafkaProducer[String, String](Common.getProp)
    idRight = idRight + 1
    val map = Map("id" -> idRight,  "phone" -> ("17713333333" + idRight), "date" -> sdf.format(System.currentTimeMillis()))
    val jsonObject: JSONObject = new JSONObject(map)
    println("right : \t\t\t\t\t\t\t\t" + jsonObject.toString())
    val msg = new ProducerRecord[String, String](topic, jsonObject.toString())
    producer.send(msg)
    producer.flush()
  }

} 
Example 9
Source File: SlotPartitionMaker.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.demo

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.venn.common.Common
import com.venn.util.MathUtil
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject


  def getCreateTime(): String = {
    //    minute = minute + 1
    calendar.add(Calendar.MILLISECOND, 10)
    sdf.format(calendar.getTime)
  }

  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")

  def main(args: Array[String]): Unit = {

    val prop = Common.getProp
    prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

    val producer = new KafkaProducer[String, String](Common.getProp)
    calendar.setTime(new Date())
    println(sdf.format(calendar.getTime))
    var i = 0;
    while (true) {
      val map = Map("id" -> i, "createTime" -> getCreateTime(), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10)))
      val jsonObject: JSONObject = new JSONObject(map)
      println(jsonObject.toString())
      // topic current_day
      val msg = new ProducerRecord[String, String]("slot_partition", jsonObject.toString())
      producer.send(msg)
      producer.flush()
      if (MathUtil.random.nextBoolean()) {
        Thread.sleep(1500)
      } else {
        Thread.sleep(500)

      }
      i = i + 1
      //      System.exit(-1)
    }
  }

} 
Example 10
Source File: KafkaOffsetRevertTest.scala    From flink-rookie   with Apache License 2.0 5 votes vote down vote up
package com.venn.kafka

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import com.venn.common.Common
import com.venn.util.MathUtil
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.util.parsing.json.JSONObject


  val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")

  def main(args: Array[String]): Unit = {
    val producer = new KafkaProducer[String, String](Common.getProp(true))
    var i = 0;
    while (true) {

      //      val map = Map("id"-> i, "createTime"-> sdf.format(System.currentTimeMillis()))
      val map = Map("id" -> i, "createTime" -> sdf.format(System.currentTimeMillis()), "amt" -> (MathUtil.random.nextInt(10) + "." + MathUtil.random.nextInt(10)))
      val jsonObject: JSONObject = new JSONObject(map)
      println(jsonObject.toString())
      // topic current_day
      val msg = new ProducerRecord[String, String]("kafka_offset", jsonObject.toString())
      producer.send(msg)
      producer.flush()
      Thread.sleep(1000)
      i = i + 1
      //      System.exit(-1)
    }
  }

}