org.json4s.JValue Scala Examples
The following examples show how to use org.json4s.JValue.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: NativeReaderOptions.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import is.hail.types.virtual._ import is.hail.expr.JSONAnnotationImpex import is.hail.utils._ import org.json4s.{CustomSerializer, DefaultFormats, Formats, JObject, JValue} import org.json4s.JsonDSL._ class NativeReaderOptionsSerializer() extends CustomSerializer[NativeReaderOptions]( format => ({ case jObj: JObject => implicit val fmt = format val filterIntervals = (jObj \ "filterIntervals").extract[Boolean] val intervalPointType = IRParser.parseType((jObj \ "intervalPointType").extract[String]) val intervals = { val jv = jObj \ "intervals" val ty = TArray(TInterval(intervalPointType)) JSONAnnotationImpex.importAnnotation(jv, ty).asInstanceOf[IndexedSeq[Interval]] } NativeReaderOptions(intervals, intervalPointType, filterIntervals) }, { case opts: NativeReaderOptions => implicit val fmt = format val ty = TArray(TInterval(opts.intervalPointType)) (("name" -> opts.getClass.getSimpleName) ~ ("intervals" -> JSONAnnotationImpex.exportAnnotation(opts.intervals, ty)) ~ ("intervalPointType" -> opts.intervalPointType.parsableString()) ~ ("filterIntervals" -> opts.filterIntervals)) }) ) object NativeReaderOptions { def fromJValue(jv: JValue): NativeReaderOptions = { implicit val formats: Formats = DefaultFormats val filterIntervals = (jv \ "filterIntervals").extract[Boolean] val intervalPointType = IRParser.parseType((jv \ "intervalPointType").extract[String]) val intervals = { val jvIntervals = jv \ "intervals" val ty = TArray(TInterval(intervalPointType)) JSONAnnotationImpex.importAnnotation(jvIntervals, ty).asInstanceOf[IndexedSeq[Interval]] } NativeReaderOptions(intervals, intervalPointType, filterIntervals) } } case class NativeReaderOptions( intervals: IndexedSeq[Interval], intervalPointType: Type, filterIntervals: Boolean = false) { def toJson: JValue = { val ty = TArray(TInterval(intervalPointType)) JObject( "name" -> "NativeReaderOptions", "intervals" -> JSONAnnotationImpex.exportAnnotation(intervals, ty), "intervalPointType" -> intervalPointType.parsableString(), "filterIntervals" -> filterIntervals) } }
Example 2
Source File: FilterJsonLigatures.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import java.io.PrintWriter import java.util.regex.Pattern import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.FileUtils import org.clulab.wm.eidos.utils.Sinker import org.clulab.wm.eidos.utils.TsvWriter import org.json4s.DefaultFormats import org.json4s.JString import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonLigatures extends App { val pattern: Pattern = Pattern.compile("([A-Za-z]+(f([bhkl]|[ft]|[ij])|ij)) ([A-Za-z]+)") class Filter(tsvWriter: TsvWriter) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats tsvWriter.println("file", "left", "right") def filter(jValue: JValue, inputFile: File): Unit = { val extractions: JValue = jValue \ "_source" \ "extracted_text" extractions match { case text: JString => val matcher = pattern.matcher(text.extract[String]) while (matcher.find) tsvWriter.println(inputFile.getName, matcher.group(1), matcher.group(4)) case _ => throw new RuntimeException(s"Unexpected extractions value: $extractions") } } } val inputDir = args(0) val extension = args(1) val outputFile = args(2) new TsvWriter(Sinker.printWriterFromFile(outputFile)).autoClose { tsvWriter => val filter = new Filter(tsvWriter) val inputFiles = FileUtils.findFiles(inputDir, extension) inputFiles.sortBy(_.getName).foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(json, inputFile) } } }
Example 3
Source File: FilterJsonPretty.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.serialization.json.stringify import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.FileEditor import org.clulab.wm.eidos.utils.FileUtils import org.json4s.DefaultFormats import org.json4s.JObject import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonPretty extends App { class Filter(outputDir: String) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(jValue: JValue, inputFile: File): Unit = { val extractions: JValue = jValue extractions match { case jObject: JObject => val json = stringify(jObject, pretty = true) val path = FileEditor(inputFile).setDir(outputDir).get FileUtils.printWriterFromFile(path).autoClose { pw => pw.println(json) } case _ => throw new RuntimeException(s"Unexpected extractions value: $extractions") } } } val inputDir = args(0) val outputDir = args(1) val filter = new Filter(outputDir) val inputFiles = FileUtils.findFiles(inputDir, "json") inputFiles.sortBy(_.getName).foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(json, inputFile) } }
Example 4
Source File: FilterJsonText.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.FileEditor import org.clulab.wm.eidos.utils.FileUtils import org.clulab.wm.eidos.utils.StringUtils import org.json4s.DefaultFormats import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonText extends App { class Filter(outputDir: String) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(jValue: JValue, inputFile: File): Unit = { val jString: JValue = jValue \ "extracted_text" val text: String = jString.extract[String] val path = FileEditor(inputFile).setDir(outputDir).setExt("txt").get FileUtils.printWriterFromFile(path).autoClose { pw => pw.println(text) } } } val inputDir = args(0) val outputDir = args(1) val filter = new Filter(outputDir) val inputFiles = FileUtils.findFiles(inputDir, "json") inputFiles.sortBy(_.getName).foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(json, inputFile) } }
Example 5
Source File: SeparateCdrTextFromDirectory.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps.batch import java.io.File import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.FileEditor import org.clulab.wm.eidos.utils.FileUtils import org.clulab.wm.eidos.utils.StringUtils import org.clulab.wm.eidos.utils.meta.CluText import org.json4s.DefaultFormats import org.json4s.JValue import org.json4s.jackson.JsonMethods object SeparateCdrTextFromDirectory extends App { class Filter(outputDir: String) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(inputFile: File, jValue: JValue): Unit = { println(s"Extracting from ${inputFile.getName}") val jString: JValue = jValue \ "extracted_text" val text: String = jString.extract[String] val outputFile = FileEditor(inputFile).setDir(outputDir).setExt("txt").get FileUtils.printWriterFromFile(outputFile).autoClose { printWriter => printWriter.print(text) } } } val inputDir = args(0) val outputDir = args(1) val inputFiles = FileUtils.findFiles(inputDir, "json") val filter = new Filter(outputDir) inputFiles.foreach { inputFile => val json = CluText.getJValue(inputFile) filter.filter(inputFile, json) } }
Example 6
Source File: FilterJsonSource.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.serialization.json.stringify import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.FileEditor import org.clulab.wm.eidos.utils.FileUtils import org.clulab.wm.eidos.utils.StringUtils import org.json4s.DefaultFormats import org.json4s.JObject import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonSource extends App { class Filter(outputDir: String) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(jValue: JValue, inputFile: File): Unit = { val extractions: JValue = jValue \ "_source" extractions match { case jObject: JObject => val json = stringify(jObject, pretty = true) val path = FileEditor(inputFile).setDir(outputDir).get FileUtils.printWriterFromFile(path).autoClose { pw => pw.println(json) } case _ => throw new RuntimeException(s"Unexpected extractions value: $extractions") } } } val inputDir = args(0) val outputDir = args(1) val filter = new Filter(outputDir) val inputFiles = FileUtils.findFiles(inputDir, "json") inputFiles.sortBy(_.getName).foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(json, inputFile) } }
Example 7
Source File: FilterJsonExtractions.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.wm.eidos.utils.FileUtils import org.json4s.DefaultFormats import org.json4s.JArray import org.json4s.JNothing import org.json4s.JObject import org.json4s.JString import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonExtractions extends App { class Filter() { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(inputFile: File, jValue: JValue): Unit = { println(s"Extracting from ${inputFile.getName}") val extractions: JValue = (jValue \\ "extractions") extractions match { case JArray(extractions: List[_]) => // Type erasure removes the [JObject] extractions.foreach { extraction => val jString = (extraction \ "text") val text = jString.extract[String] val oneLiner = text .replace("\n", "\\n") .replace("\t", "\\t") println("\t" + oneLiner) } case JObject(_) => case _ => throw new RuntimeException(s"Unexpected extractions value: $extractions") } } } val inputDir = args(0) val extension = args(1) val inputFiles = FileUtils.findFiles(inputDir, extension) val filter = new Filter() inputFiles.foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(inputFile, json) } }
Example 8
Source File: SwaggerSupport.scala From chatoverflow with Eclipse Public License 2.0 | 5 votes |
package org.codeoverflow.chatoverflow.ui.web import org.codeoverflow.chatoverflow.Launcher import org.json4s.JsonDSL._ import org.json4s.{JValue, _} import org.scalatra.ScalatraServlet import org.scalatra.swagger.{Api, ApiInfo, JacksonSwaggerBase, Swagger} override def renderSwagger2(docs: List[Api]): JValue = { val swagger2 = super.renderSwagger2(docs) val schemes: JObject = "schemes" -> List("http") val host: JObject = "host" -> s"localhost:${Launcher.server.get.port}" val additionalElements = schemes ~ host additionalElements merge swagger2 } } object CodeOverflowApiInfo extends ApiInfo( "Code Overflow API", "This API is the main entry point of the Chat Overflow GUI and third party projects.", "http://codeoverflow.org", "[email protected]", "Eclipse Public License 2.0", "https://github.com/codeoverflow-org/chatoverflow/blob/master/LICENSE") class CodeOverflowSwagger(apiVersion: String) extends Swagger(Swagger.SpecVersion, apiVersion, CodeOverflowApiInfo)
Example 9
Source File: ChatSupervisor.scala From heimdallr with Apache License 2.0 | 5 votes |
package chat import akka.actor._ import akka.actor.SupervisorStrategy._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import org.json4s._ import org.json4s.{DefaultFormats, JValue} import java.util.concurrent.TimeUnit import EventConstants._ import akka.stream.ActorMaterializer def createNewChatRoom(number: Int): ActorRef = { var chatroom: ActorRef = null try { //creates new ChatRoomActor and returns as an ActorRef chatroom = context.actorOf(Props(new ChatRoomActor(number, envType)), s"${number}") ChatRooms.chatRooms += number -> chatroom } catch { case e: Exception => log.info(s"FIXME: Create new chat room(${number}) => " + e) self ! CreateChatRoom(number) } chatroom } def removeChatRoom(chatRoomID: Int): Unit = { this.synchronized { ChatRooms.chatRooms.remove(chatRoomID) } } override def receive: Receive = { case CreateChatRoom(chatRoomID) => getChatRoomActorRef(chatRoomID) case RemoveChatRoom(chatRoomID) => removeChatRoom(chatRoomID) case RegChatUser(chatRoomID, userActor) => userActor ! JoinRoom(getChatRoomActorRef(chatRoomID)) case RegProps(props, name) => context.actorOf(props, name) case HeimdallrError => throw new ArithmeticException() case HeimdallrChatStatus => log.info( "Heimdallr ChatSupervisor Running ..." ) // *** supervisor ! "akka://heimdallr/user/{Valid ActorName}" case path: String => log.debug(s"checking path => $path") context.actorSelection(path) ! Identify(path) case ActorIdentity(path, Some(ref)) => log.debug(s"found actor $ref on $path") // *** supervisor ! "/user/{Invalid ActorName}" case ActorIdentity(path, None) => log.debug(s"could not find an actor on $path") case Terminated(user) => log.info("Receive Terminated Event of ChatRoomActor") case x => log.warning("ChatSupervisor Unknown message : " + x) } }
Example 10
Source File: ApkModelSerializer.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.amandroid.serialization import org.argus.jawa.core.util._ import org.argus.amandroid.core.model.ApkModel import org.argus.amandroid.core.appInfo.ApkCertificate import org.argus.amandroid.core.decompile.DecompileLayout import org.argus.amandroid.core.parser.{ComponentInfo, ComponentType, IntentFilterDataBase, LayoutControl} import org.argus.jawa.core.elements.{JawaType, Signature} import org.json4s.{CustomSerializer, Extraction, Formats, JValue} import org.json4s.JsonDSL._ object ApkModelSerializer extends CustomSerializer[ApkModel](format => ( { case jv: JValue => implicit val formats: Formats = format + JawaTypeSerializer + JawaTypeKeySerializer + SignatureSerializer + SignatureKeySerializer + IntentFilterDataBaseSerializer + DecompileLayoutSerializer + new org.json4s.ext.EnumNameSerializer(ComponentType) val nameUri = (jv \ "nameUri").extract[FileResourceUri] val layout = (jv \ "layout").extract[DecompileLayout] val certificates = (jv \ "certificates").extract[ISet[ApkCertificate]] val activities = (jv \ "activities").extract[ISet[JawaType]] val services = (jv \ "services").extract[ISet[JawaType]] val receivers = (jv \ "receivers").extract[ISet[JawaType]] val providers = (jv \ "provider").extract[ISet[JawaType]] val drReceivers = (jv \ "drReceivers").extract[ISet[JawaType]] val rpcMethods = (jv \ "rpcMethods").extract[IMap[JawaType, IMap[Signature, Boolean]]] val uses_permissions = (jv \ "uses_permissions").extract[ISet[String]] val callbackMethods = (jv \ "callbackMethods").extract[IMap[JawaType, ISet[Signature]]] val componentInfos = (jv \ "componentInfos").extract[ISet[ComponentInfo]] val layoutControls = (jv \ "layoutControls").extract[IMap[Int, LayoutControl]] val appPackageName = (jv \ "appPackageName").extract[Option[String]] val intentFdb = (jv \ "intentFdb").extract[IntentFilterDataBase] val codeLineCounter = (jv \ "codeLineCounter").extract[Int] val envMap = (jv \ "envMap").extract[IMap[JawaType, (Signature, String)]] val apk = ApkModel(nameUri, layout) apk.addCertificates(certificates) apk.addActivities(activities) apk.addServices(services) apk.addReceivers(receivers) apk.addProviders(providers) apk.addDynamicRegisteredReceivers(drReceivers) apk.addRpcMethods(rpcMethods) apk.addUsesPermissions(uses_permissions) apk.addCallbackMethods(callbackMethods) apk.addComponentInfos(componentInfos) apk.addLayoutControls(layoutControls) apk.setPackageName(appPackageName.getOrElse("")) apk.setIntentFilterDB(intentFdb) apk.setCodeLineCounter(codeLineCounter) apk.addEnvMap(envMap) apk }, { case model: ApkModel => implicit val formats: Formats = format + JawaTypeSerializer + JawaTypeKeySerializer + SignatureSerializer + SignatureKeySerializer + IntentFilterDataBaseSerializer + DecompileLayoutSerializer + new org.json4s.ext.EnumNameSerializer(ComponentType) val nameUri: FileResourceUri = model.nameUri val layout: DecompileLayout = model.layout val certificates: ISet[ApkCertificate] = model.getCertificates val activities: ISet[JawaType] = model.getActivities val services: ISet[JawaType] = model.getServices val receivers: ISet[JawaType] = model.getReceivers val providers: ISet[JawaType] = model.getProviders val drReceivers: ISet[JawaType] = model.getDynamicRegisteredReceivers val rpcMethods: IMap[JawaType, IMap[Signature, Boolean]] = model.getRpcMethodMapping val uses_permissions: ISet[String] = model.getUsesPermissions val callbackMethods: IMap[JawaType, ISet[Signature]] = model.getCallbackMethodMapping val componentInfos: ISet[ComponentInfo] = model.getComponentInfos val layoutControls: IMap[Int, LayoutControl] = model.getLayoutControls val appPackageName: String = model.getPackageName val intentFdb: IntentFilterDataBase = model.getIntentFilterDB val codeLineCounter: Int = model.getCodeLineCounter val envMap: IMap[JawaType, (Signature, String)] = model.getEnvMap ("nameUri" -> nameUri) ~ ("layout" -> Extraction.decompose(layout)) ~ ("certificates" -> Extraction.decompose(certificates)) ~ ("activities" -> Extraction.decompose(activities)) ~ ("services" -> Extraction.decompose(services)) ~ ("receivers" -> Extraction.decompose(receivers)) ~ ("providers" -> Extraction.decompose(providers)) ~ ("drReceivers" -> Extraction.decompose(drReceivers)) ~ ("rpcMethods" -> Extraction.decompose(rpcMethods)) ~ ("uses_permissions" -> Extraction.decompose(uses_permissions)) ~ ("callbackMethods" -> Extraction.decompose(callbackMethods)) ~ ("componentInfos" -> Extraction.decompose(componentInfos)) ~ ("layoutControls" -> Extraction.decompose(layoutControls)) ~ ("appPackageName" -> Option(appPackageName)) ~ ("intentFdb" -> Extraction.decompose(intentFdb)) ~ ("codeLineCounter" -> codeLineCounter) ~ ("envMap" -> Extraction.decompose(envMap)) } ))
Example 11
Source File: DecompileLayoutSerializer.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.amandroid.serialization import org.argus.amandroid.core.decompile.DecompileLayout import org.argus.jawa.core.util.{FileResourceUri, ISet} import org.json4s.{CustomSerializer, Extraction, Formats, JValue} import org.json4s.JsonDSL._ object DecompileLayoutSerializer extends CustomSerializer[DecompileLayout](format => ( { case jv: JValue => implicit val formats: Formats = format val outputUri = (jv \ "outputUri").extract[FileResourceUri] val createFolder = (jv \ "createFolder").extract[Boolean] val srcFolder = (jv \ "srcFolder").extract[String] val libFolder = (jv \ "libFolder").extract[String] val createSeparateFolderForDexes = (jv \ "createSeparateFolderForDexes").extract[Boolean] val pkg = (jv \ "pkg").extract[String] val outputSrcUri = (jv \ "outputSrcUri").extract[FileResourceUri] val sourceFolders = (jv \ "sourceFolders").extract[ISet[String]] val libFolders = (jv \ "libFolders").extract[ISet[String]] val dependencies = (jv \ "dependencies").extract[ISet[String]] val thirdPartyLibraries = (jv \ "thirdPartyLibraries").extract[ISet[String]] val layout = DecompileLayout(outputUri, createFolder, srcFolder, libFolder, createSeparateFolderForDexes) layout.pkg = pkg layout.outputSrcUri = outputSrcUri layout.sourceFolders = sourceFolders layout.libFolders = libFolders layout.dependencies = dependencies layout.thirdPartyLibraries = thirdPartyLibraries layout }, { case layout: DecompileLayout => implicit val formats: Formats = format val outputUri: FileResourceUri = layout.outputSrcUri val createFolder: Boolean = layout.createFolder val srcFolder: String = layout.srcFolder val libFolder: String = layout.libFolder val createSeparateFolderForDexes: Boolean = layout.createSeparateFolderForDexes val pkg: String = layout.pkg val outputSrcUri: FileResourceUri = layout.outputSrcUri val sourceFolders: ISet[String] = layout.sourceFolders val libFolders: ISet[String] = layout.libFolders val dependencies: ISet[String] = layout.dependencies val thirdPartyLibraries: ISet[String] = layout.thirdPartyLibraries ("outputUri" -> outputUri) ~ ("createFolder" -> createFolder) ~ ("srcFolder" -> srcFolder) ~ ("libFolder" -> libFolder) ~ ("createSeparateFolderForDexes" -> createSeparateFolderForDexes) ~ ("pkg" -> pkg) ~ ("outputSrcUri" -> outputSrcUri) ~ ("sourceFolders" -> Extraction.decompose(sourceFolders)) ~ ("libFolders" -> Extraction.decompose(libFolders)) ~ ("dependencies" -> Extraction.decompose(dependencies)) ~ ("thirdPartyLibraries" -> Extraction.decompose(thirdPartyLibraries)) } ))
Example 12
Source File: DeployConfig.scala From hail with MIT License | 5 votes |
package is.hail.services import java.io.{File, FileInputStream} import is.hail.utils._ import org.json4s.{DefaultFormats, Formats, JValue} import org.json4s.jackson.JsonMethods object DeployConfig { lazy val get: DeployConfig = fromConfigFile() def fromConfigFile(file0: String = null): DeployConfig = { var file = file0 if (file == null) file = System.getenv("HAIL_DEPLOY_CONFIG_FILE") if (file == null) { val fromHome = s"${ System.getenv("HOME") }/.hail/deploy-config.json" if (new File(fromHome).exists()) file = fromHome } if (file == null) { val f = "/deploy-config/deploy-config.json" if (new File(f).exists()) file = f } if (file != null) { using(new FileInputStream(file)) { in => fromConfig(JsonMethods.parse(in)) } } else new DeployConfig( "external", "default", Map()) } def fromConfig(config: JValue): DeployConfig = { implicit val formats: Formats = DefaultFormats new DeployConfig( (config \ "location").extract[String], (config \ "default_namespace").extract[String], (config \ "service_namespace").extract[Map[String, String]]) } } class DeployConfig( val location: String, val defaultNamespace: String, val serviceNamespace: Map[String, String]) { def scheme(baseScheme: String = "http"): String = { if (location == "external" || location == "k8s") baseScheme + "s" else baseScheme } def getServiceNamespace(service: String): String = { serviceNamespace.getOrElse(service, defaultNamespace) } def domain(service: String): String = { val ns = getServiceNamespace(service) location match { case "k8s" => s"$service.$ns" case "gce" => if (ns == "default") s"$service.hail" else "internal.hail" case "external" => if (ns == "default") s"$service.hail.is" else "internal.hail.is" } } def basePath(service: String): String = { val ns = getServiceNamespace(service) if (ns == "default") "" else s"/$ns/$service" } def baseUrl(service: String, baseScheme: String = "http"): String = { s"${ scheme(baseScheme) }://${ domain(service) }${ basePath(service) }" } }
Example 13
Source File: AbstractTableSpec.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import java.io.OutputStreamWriter import is.hail.utils._ import is.hail.types._ import is.hail.io.fs.FS import is.hail.rvd._ import org.json4s.jackson.JsonMethods import org.json4s.{DefaultFormats, Extraction, Formats, JValue, ShortTypeHints} import scala.language.implicitConversions object SortOrder { def deserialize(b: Byte): SortOrder = if (b == 0.toByte) Ascending else if (b == 1.toByte) Descending else throw new RuntimeException(s"invalid sort order: $b") } sealed abstract class SortOrder { def serialize: Byte } case object Ascending extends SortOrder { def serialize: Byte = 0.toByte } case object Descending extends SortOrder { def serialize: Byte = 1.toByte } case class SortField(field: String, sortOrder: SortOrder) abstract class AbstractTableSpec extends RelationalSpec { def references_rel_path: String def table_type: TableType def rowsComponent: RVDComponentSpec = getComponent[RVDComponentSpec]("rows") def rowsSpec: AbstractRVDSpec def globalsSpec: AbstractRVDSpec def indexed: Boolean = rowsSpec.indexed } object TableSpec { def apply(fs: FS, path: String, params: TableSpecParameters): TableSpec = { val globalsComponent = params.components("globals").asInstanceOf[RVDComponentSpec] val globalsSpec = globalsComponent.rvdSpec(fs, path) val rowsComponent = params.components("rows").asInstanceOf[RVDComponentSpec] val rowsSpec = rowsComponent.rvdSpec(fs, path) new TableSpec(params, globalsSpec, rowsSpec) } def fromJValue(fs: FS, path: String, jv: JValue): TableSpec = { implicit val formats: Formats = RelationalSpec.formats val params = jv.extract[TableSpecParameters] TableSpec(fs, path, params) } } case class TableSpecParameters( file_version: Int, hail_version: String, references_rel_path: String, table_type: TableType, components: Map[String, ComponentSpec]) { def write(fs: FS, path: String) { using(new OutputStreamWriter(fs.create(path + "/metadata.json.gz"))) { out => out.write(JsonMethods.compact(decomposeWithName(this, "TableSpec")(RelationalSpec.formats))) } } } class TableSpec( val params: TableSpecParameters, val globalsSpec: AbstractRVDSpec, val rowsSpec: AbstractRVDSpec) extends AbstractTableSpec { def file_version: Int = params.file_version def hail_version: String = params.hail_version def components: Map[String, ComponentSpec] = params.components def references_rel_path: String = params.references_rel_path def table_type: TableType = params.table_type def toJValue: JValue = { decomposeWithName(params, "TableSpec")(RelationalSpec.formats) } }
Example 14
Source File: FilterJsonCanonicalNames.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import java.io.PrintWriter import org.clulab.wm.eidos.utils.FileUtils import org.clulab.wm.eidos.utils.Sinker import org.clulab.wm.eidos.utils.Closer.AutoCloser import org.clulab.wm.eidos.utils.TsvWriter import org.json4s.DefaultFormats import org.json4s.JArray import org.json4s.JObject import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonCanonicalNames extends App { class Filter(tsvWriter: TsvWriter) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats tsvWriter.println("file", "id", "text", "canonicalName") def filter(jValue: JValue, inputFile: File): Unit = { val extractions: JValue = jValue \\ "extractions" extractions match { case JArray(extractions: List[_]) => // Type erasure removes the [JObject] extractions.foreach { extraction => val id = (extraction \ "@id").extract[String] val text = (extraction \ "text").extract[String] val canonicalName = (extraction \ "canonicalName").extract[String] tsvWriter.println(inputFile.getName, id, text, canonicalName) } case JObject(_) => case _ => throw new RuntimeException(s"Unexpected extractions value: $extractions") } } } val inputDir = args(0) val extension = args(1) val outputFile = args(2) new TsvWriter(Sinker.printWriterFromFile(outputFile)).autoClose { tsvWriter => val filter = new Filter(tsvWriter) val inputFiles = FileUtils.findFiles(inputDir, extension) inputFiles.sortBy(_.getName).foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(json, inputFile) } } }
Example 15
Source File: ACLJsonProtocol.scala From vinyldns with Apache License 2.0 | 5 votes |
package vinyldns.api.route import org.json4s.JValue import cats.data._, cats.implicits._ import vinyldns.core.domain.record.RecordType.RecordType import vinyldns.core.domain.zone.{ACLRule, ACLRuleInfo, AccessLevel} trait ACLJsonProtocol extends JsonValidation { val aclSerializers = Seq( ACLRuleInfoSerializer, JsonEnumV(AccessLevel), JsonV[ACLRule] ) case object ACLRuleInfoSerializer extends ValidationSerializer[ACLRuleInfo] { override def fromJson(js: JValue): ValidatedNel[String, ACLRuleInfo] = { val deserialized = ( (js \ "accessLevel").required(AccessLevel, "Missing ACLRule.accessLevel"), (js \ "description").optional[String], (js \ "userId").optional[String], (js \ "groupId").optional[String], (js \ "recordMask").optional[String], (js \ "recordTypes").default[Set[RecordType]](Set.empty[RecordType]), (js \ "displayName").optional[String] ).mapN(ACLRuleInfo.apply) deserialized.check( ("Cannot specify both a userId and a groupId", { rule => !(rule.userId.isDefined && rule.groupId.isDefined) }) ) } } }
Example 16
Source File: package.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa import com.ibm.aardpfark.pfa.expression.PFAExpression import org.json4s.JsonAST.{JBool, JLong} import org.json4s.{JDouble, JField, JInt, JNull, JObject, JString, JValue} import org.json4s.JsonDSL._ package object types { private[pfa] trait API { sealed trait Literal extends PFAExpression implicit class StringLiteral(s: String) extends Literal { override def json: JValue = "string" -> s } implicit class DoubleLiteral(d: Double) extends Literal { override def json: JValue = "double" -> d } implicit class FloatLiteral(f: Float) extends Literal { override def json: JValue = "float" -> f } implicit class IntLiteral(i: Int) extends Literal { override def json: JValue = "int" -> i } implicit class LongLiteral(l: Long) extends Literal { override def json: JValue = "long" -> l } implicit class BooleanLiteral(b: Boolean) extends Literal { override def json: JValue = b } case object NullLiteral extends Literal { override def json: JValue = JNull } } }
Example 17
Source File: LetSet.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.dsl.StringExpr import org.json4s.JValue trait LetSet { type LetArgs = (String, Option[String], PFAExpression) // implicits implicit def LetToLetExpr(let: Let) = new LetExpr(Seq((let.x, let.`type`, let.expr))) implicit def LetsToLetExpr(lets: Seq[Let]) = { new LetExpr(lets.map(let => (let.x, let.`type`, let.expr))) } case class Let(x: String, `type`: Option[String], expr: PFAExpression) { def ref = StringExpr(x) } object Let { def apply(ref: String, expr: PFAExpression): Let = Let(ref, None, expr) } object Set { def apply(ref: StringExpr, expr: PFAExpression): SetExpr = new SetExpr(ref, expr) } class LetExpr(args: Seq[LetArgs]) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { val lets = args.map { case (name, schema, init) => schema.map { s => (name -> ("type" -> s) ~ ("value" -> init.json)) }.getOrElse { (name -> init.json) } }.toMap ("let" -> lets) } } class SetExpr(ref: StringExpr, expr: PFAExpression) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { ("set" -> (ref.s -> expr.json)) } } }
Example 18
Source File: Casts.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.SchemaSerializer import com.ibm.aardpfark.pfa.dsl.StringExpr import com.sksamuel.avro4s.{SchemaFor, ToSchema} import org.apache.avro.Schema import org.json4s.JValue trait Casts { case class As(schema: Schema, named: String, `do`: PFAExpression) object As { def apply(schema: Schema, named: String, `do`: (StringExpr) => PFAExpression): As = { As(schema, named, `do`(StringExpr(named))) } def apply[T](named: String, `do`: (StringExpr) => PFAExpression)(implicit s: ToSchema[T]): As = { As(s(), named, `do`(StringExpr(named))) } } object Cast { def apply(cast: PFAExpression, cases: Seq[As]) = new CastExpr(cast, cases) def apply(cast: PFAExpression, case1: As, cases: As*) = new CastExpr(cast, Seq(case1) ++ cases) } class CastExpr(cast: PFAExpression, cases: Seq[As]) extends PFAExpression { import org.json4s.JsonDSL._ implicit val converter: Schema => JValue = SchemaSerializer.convert override def json: JValue = { ("cast" -> cast.json) ~ ("cases" -> cases.map { as => ("as" -> as.schema) ~ ("named" -> as.named) ~ ("do" -> as.`do`.json) }) } } }
Example 19
Source File: FunctionCalls.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.{PFAExpressionSerializer, ParamSerializer, SchemaSerializer} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.json4s.native.Serialization.write import org.json4s.{JDouble, JField, JInt, JObject, JString, JValue, NoTypeHints} class FunctionCall(name: String, args: Any*) extends PFAExpression { import com.ibm.aardpfark.pfa.dsl._ import org.json4s.JsonDSL._ override def json: JValue = { val jArgs = args.map { case n: Double => JDouble(n) case i: Int => JInt(i) case s: String => JString(s) case expr: PFAExpression => expr.json case fnDef: FunctionDef => implicit val formats = Serialization.formats(NoTypeHints) + new SchemaSerializer + new PFAExpressionSerializer + new ParamSerializer parse(write(fnDef)) } JObject(JField(name, jArgs) :: Nil) } }
Example 20
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.SchemaSerializer import com.sksamuel.avro4s.{AvroSchema, SchemaFor, ToSchema} import org.apache.avro.Schema import org.json4s.JValue import org.json4s.JsonAST.JString import org.json4s.native.JsonMethods.parse trait New { object NewRecord { def apply(schema: Schema, init: Map[String, PFAExpression], fullSchema: Boolean = true) = NewRecordExpr(schema, init, fullSchema) } case class NewRecordExpr(schema: Schema, init: Map[String, PFAExpression], fullSchema: Boolean) extends PFAExpression { import org.json4s.JsonDSL._ private val s = if (fullSchema) SchemaSerializer.convert(schema) else JString(schema.getFullName) override def json: JValue = { ("type" -> s) ~ ("new" -> init.mapValues(_.json)) } } case class NewArrayExpr(schema: Schema, init: Seq[PFAExpression]) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { ("type" -> parse(schema.toString)) ~ ("new" -> init.map(_.json)) } } object NewArray { def apply(schema: Schema, init: Seq[PFAExpression]) = NewArrayExpr(schema, init) def apply[T](init: Seq[PFAExpression])(implicit s: ToSchema[Seq[T]]) = { NewArrayExpr(s(), init) } } case class NewMap(schema: Schema, init: Map[String, PFAExpression]) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { ("type" -> parse(schema.toString)) ~ ("new" -> init.mapValues(_.json)) } } }
Example 21
Source File: AttributeRetrieval.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.types.WithSchema import org.json4s.{JField, JObject, JValue} private[pfa] class CellRetrieval[T <: WithSchema](name: String, path: Seq[PFAExpression] = Seq()) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { JObject(JField("cell", name), JField("path", path.map(_.json))) } } private[pfa] class AttrRetrieval(attr: PFAExpression, path: Seq[PFAExpression] = Seq()) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { JObject(JField("attr", attr.json), JField("path", path.map(_.json))) } } trait AttributeRetrieval { object Attr { def apply(ref: PFAExpression, path: PFAExpression*) = new AttrRetrieval(ref, path) } }
Example 22
Source File: JSONSerializers.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.document import scala.util.Try import com.ibm.aardpfark.pfa.dsl._ import com.ibm.aardpfark.pfa.expression.PFAExpression import com.ibm.aardpfark.spark.ml.tree.{TreeNode, Trees} import org.apache.avro.Schema import org.json4s.native.JsonMethods.parse import org.json4s.{CustomSerializer, JValue} object SchemaSerializer { def convert(s: Schema): JValue = { import Schema.Type._ import org.json4s.JsonDSL._ s.getType match { case DOUBLE | FLOAT | INT | LONG | STRING | BOOLEAN | BYTES | NULL => ("type" -> s.getType.getName) case _ => parse(s.toString) } } } class SchemaSerializer extends CustomSerializer[Schema](format => ( { case j: JValue => new Schema.Parser().parse(j.toString) }, { case s: Schema => SchemaSerializer.convert(s) } ) ) class PFAExpressionSerializer extends CustomSerializer[PFAExpression](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case expr: PFAExpression => expr.json } ) ) class TreeSerializer extends CustomSerializer[TreeNode](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case tree: TreeNode => Trees.json(tree) } ) ) class ParamSerializer extends CustomSerializer[Param](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case p: Param => import org.json4s.JsonDSL._ if (p.simpleSchema) { (p.name -> p.`type`.getFullName) } else { val schemaSerializer = new SchemaSerializer().serialize(format) (p.name -> schemaSerializer(p.`type`)) } } ) )
Example 23
Source File: SampleActor.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.Props import io.coral.actors.{SimpleEmitTrigger, CoralActor} import io.coral.lib.Random import org.json4s.JsonAST.JNothing import org.json4s.{JObject, JValue} object SampleActor { implicit val formats = org.json4s.DefaultFormats def getParams(json: JValue) = { for { fraction <- (json \ "params" \ "fraction").extractOpt[Double] } yield { fraction } } def apply(json: JValue): Option[Props] = { getParams(json).map(_ => Props(classOf[SampleActor], json, Random)) } } class SampleActor(json: JObject, random: Random) extends CoralActor(json) with SimpleEmitTrigger { val fraction: Double = SampleActor.getParams(json).get var randomStream: Stream[Boolean] = random.binomial(fraction) def next(): Boolean = { val value = randomStream.head randomStream = randomStream.tail value } override def simpleEmitTrigger(json: JObject): Option[JValue] = { next() match { case false => Some(JNothing) case true => Some(json) } } }
Example 24
Source File: JsonExpressionParser.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.lib import org.json4s.{JArray, JValue, JObject} import org.json4s.JsonAST.JNothing import scala.util.parsing.combinator.{PackratParsers, JavaTokenParsers} import scala.util.parsing.input.CharSequenceReader object JsonExpressionParser extends JavaTokenParsers with PackratParsers { abstract class FieldElement // Represents the complete list of identifiers ("field.array[0].reference['elem']") // A FieldReference is a concatenation of FieldElements. // A FieldElement is either a simple identifier, an array // access element or a dictionary access element. case class FieldReference(items: List[FieldElement]) // Represents a simple identifier between dots case class JsonIdentifier(id: String) extends FieldElement // Represents an array access identifier ("field[0]") case class ArrayAccess(id: JsonIdentifier, index: Int) extends FieldElement // Represents a dictionary access identifier ("field['inner']") case class DictionaryAccess(id: JsonIdentifier, field: String) extends FieldElement object ReferenceAll extends FieldElement def getFieldValue(json: JObject, id: FieldReference): JValue = { // tempJson holds the result we want to return var tempJson: JValue = json id.items.foreach({ case ReferenceAll => tempJson case i: JsonIdentifier => tempJson = tempJson \ i.id case a: ArrayAccess => val obj = tempJson \ a.id.id obj match { case array: JArray => if (a.index < array.arr.length) tempJson = array(a.index) else return JNothing case _ => return JNothing } case d: DictionaryAccess => tempJson = tempJson \ d.id.id \ d.field case _ => }) tempJson } type P[+T] = PackratParser[T] lazy val local_field_reference: P[FieldReference] = repsep(field_element, ".") ^^ { case i => FieldReference(i) } lazy val field_element: P[FieldElement] = reference_all | array_access | dictionary_access | json_identifier lazy val json_identifier: P[JsonIdentifier] = ident ^^ { case i => JsonIdentifier(i) } lazy val array_access: P[ArrayAccess] = json_identifier ~ "[" ~ wholeNumber ~ "]" ^^ { case id ~ "[" ~ index ~ "]" => ArrayAccess(id, index.toInt) } lazy val dictionary_access: P[DictionaryAccess] = json_identifier ~ "[" ~ "'" ~ ident ~ "'" ~ "]" ^^ { case id ~ "[" ~ "'" ~ field ~ "'" ~ "]" => DictionaryAccess(id, field) } lazy val reference_all: P[FieldElement] = "*" ^^ { case _ => ReferenceAll } }
Example 25
Source File: DeserializationRoundtripSpec.scala From twitter4s with Apache License 2.0 | 5 votes |
package com.danielasfregola.twitter4s.entities import com.danielasfregola.twitter4s.helpers.{FixturesSupport, JsonDiffSupport} import org.json4s.native.Serialization.writePretty import org.json4s.native.{JsonParser, Serialization} import org.json4s.{JNothing, JValue} import org.specs2.matcher.{Expectable, Matcher} import org.specs2.mutable.Specification import org.specs2.specification.core.Fragment import scala.reflect._ class DeserializationRoundtripSpec extends Specification with FixturesSupport with JsonDiffSupport { "JSON deserialization" should { def roundtripTest[T <: AnyRef: Manifest](jsonFile: String): Fragment = { val className = classTag[T].runtimeClass.getSimpleName s"round-trip successfully for $className in $jsonFile" in { val originalJson = load(jsonFile) val deserializedEntity = Serialization.read[T](originalJson) val serializedJson = Serialization.writePretty[T](deserializedEntity) originalJson must beASubsetOfJson(serializedJson) } } roundtripTest[User]("/twitter/rest/users/user.json") } def beASubsetOfJson(otherJson: String): Matcher[String] = new Matcher[String] { def apply[S <: String](t: Expectable[S]) = { val alpha: JValue = JsonParser.parse(t.value) val beta: JValue = JsonParser.parse(otherJson) jsonDiff(alpha, beta) match { case diff @ JsonDiff(JNothing, _, JNothing) => success(s"""${t.value} |is a subset of |$otherJson |${renderDiff(diff)} """.stripMargin, t) case diff => failure(s"""${t.value} |is not a subset of |$otherJson |${renderDiff(diff)} """.stripMargin, t) } } private def renderDiff(diff: JsonDiff) = { val changed = diff.changed.toOption.map { c => s"""Changed: |${writePretty(c)} """.stripMargin } val deleted = diff.deleted.toOption.map { d => s"""Deleted: |${writePretty(d)} """.stripMargin } val added = diff.added.toOption.map { a => s"""Added: |${writePretty(a)} """.stripMargin } (changed ++ deleted ++ added).mkString } } }
Example 26
Source File: FilterJson.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.wm.eidos.utils.FileUtils import org.json4s.DefaultFormats import org.json4s.JArray import org.json4s.JNothing import org.json4s.JString import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJson extends App { class Filter(val outputDir: String) { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(inputFile: File, jValue: JValue): Unit = { // println(s"Extracting from ${inputFile.getName}") val geoLocations: JValue = (jValue \\ "geolocs" \ "text") geoLocations match { case JArray(geoLocations: List[_]) => // Type erasure removes the [JString] geoLocations.foreach { geoLocation => println(geoLocation.extract[String]) } case JNothing => case _ => throw new RuntimeException(s"Unexpected geoLocations value: $geoLocations") } } } val inputDir = args(0) val extension = args(1) val outputDir = args(2) val inputFiles = FileUtils.findFiles(inputDir, extension) val filter = new Filter(outputDir) inputFiles.foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(inputFile, json) } }
Example 27
Source File: package.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service import com.netflix.archaius.config.PollingDynamicConfig import com.yahoo.maha.service.config.dynamic.DynamicConfigurations import com.yahoo.maha.service.config.dynamic.DynamicConfigurationUtils._ import grizzled.slf4j.Logging import org.json4s.{JValue, _} import org.json4s.jackson.JsonMethods.parse import org.json4s.scalaz.JsonScalaz import org.json4s.scalaz.JsonScalaz.{JSONR, _} package object request extends Logging { implicit val formats = org.json4s.DefaultFormats private var dynamicConfigurations:Option[DynamicConfigurations] = None def setDynamicConfigurations(value: DynamicConfigurations): Unit = dynamicConfigurations = Some(value) def fieldExtended[A: JSONR](name: String)(json: JValue): Result[A] = { val dynamicField = (extractDynamicFields(json)).filter(f => f._2._1.equals(name)).headOption val result = { if (dynamicField.isDefined && dynamicConfigurations.isDefined) { val defaultValue = JsonScalaz.fromJSON[A](parse(dynamicField.get._2._2)) dynamicConfigurations.get.addProperty(dynamicField.get._1, defaultValue.toOption.get.asInstanceOf[Int]) val dynamicValue = JsonScalaz.fromJSON[A](parse(dynamicConfigurations.get.getDynamicConfiguration(dynamicField.get._1).get.toString)) if (dynamicValue.isSuccess) { dynamicValue } else { error(s"Failed to fetch dynamic config value failure: $dynamicValue. Returning default: $defaultValue") defaultValue } } else { field[A](name)(json) } } result.leftMap { nel => nel.map { case UnexpectedJSONError(was, expected) => UncategorizedError(name, s"unexpected value : $was expected : ${expected.getSimpleName}", List.empty) case a => a } } } }
Example 28
Source File: MahaRequestLogWriterFactory.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.factory import com.yahoo.maha.core.request.fieldExtended import com.yahoo.maha.log.{KafkaMahaRequestLogWriter, MahaRequestLogWriter, MultiColoMahaRequestLogWriter, NoopMahaRequestLogWriter} import com.yahoo.maha.service.MahaServiceConfig import com.yahoo.maha.service.MahaServiceConfig._ import com.yahoo.maha.service.config.JsonKafkaRequestLoggingConfig import org.json4s.JValue import org.json4s.scalaz.JsonScalaz._ import scalaz.Scalaz class KafkaMahaRequestLogWriterFactory extends MahaRequestLogWriterFactory { override def fromJson(config: JValue, isLoggingEnabled: Boolean): MahaConfigResult[MahaRequestLogWriter] = { val kafkaRequestLoggingConfigResult: Result[JsonKafkaRequestLoggingConfig] = JsonKafkaRequestLoggingConfig.parse.read(config) for { kafkaRequestLoggingConfig <- kafkaRequestLoggingConfigResult } yield { new KafkaMahaRequestLogWriter(kafkaRequestLoggingConfig.config, isLoggingEnabled) } } override def supportedProperties: List[(String, Boolean)] = List.empty } class NoopMahaRequestLogWriterFactory extends MahaRequestLogWriterFactory { import Scalaz._ override def fromJson(config: JValue, isLoggingEnabled: Boolean): MahaConfigResult[MahaRequestLogWriter] = new NoopMahaRequestLogWriter().successNel override def supportedProperties: List[(String, Boolean)] = List.empty } class MultiColoMahaRequestLogWriterFactory extends MahaRequestLogWriterFactory { override def fromJson(config: JValue, isLoggingEnabled: Boolean): MahaConfigResult[MahaRequestLogWriter] = { val multiColoConfigListResult: MahaServiceConfig.MahaConfigResult[List[JsonKafkaRequestLoggingConfig]] = fieldExtended[List[JsonKafkaRequestLoggingConfig]]("multiColoConfigList")(config) for { multiColoConfigList <- multiColoConfigListResult } yield { MultiColoMahaRequestLogWriter(multiColoConfigList.map(cnf => new KafkaMahaRequestLogWriter(cnf.config, isLoggingEnabled))) } } override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 29
Source File: LiteralMapperFactory.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.factory import com.yahoo.maha.service.MahaServiceConfig.MahaConfigResult import com.yahoo.maha.core.{DruidLiteralMapper, OracleLiteralMapper, PostgresLiteralMapper} import org.json4s.JValue import _root_.scalaz._ import com.yahoo.maha.executor.postgres.PostgresLiteralMapperUsingDriver import com.yahoo.maha.service.MahaServiceConfigContext import syntax.validation._ class DefaultOracleLiteralMapperFactory extends OracleLiteralMapperFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[OracleLiteralMapper] = { new OracleLiteralMapper().successNel } override def supportedProperties: List[(String, Boolean)] = List.empty } class DefaultDruidLiteralMapperFactory extends DruidLiteralMapperFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DruidLiteralMapper] = { new DruidLiteralMapper().successNel } override def supportedProperties: List[(String, Boolean)] = List.empty } class DefaultPostgresLiteralMapperFactory extends PostgresLiteralMapperFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[PostgresLiteralMapper] = { new PostgresLiteralMapper().successNel } override def supportedProperties: List[(String, Boolean)] = List.empty } class DefaultPostgresLiteralMapperUsingDriverFactory extends PostgresLiteralMapperFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[PostgresLiteralMapper] = { new PostgresLiteralMapperUsingDriver().successNel } override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 30
Source File: ParallelServiceExecutoryFactory.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.factory import java.util.concurrent.RejectedExecutionHandler import com.yahoo.maha.core.request._ import com.yahoo.maha.parrequest2.future.ParallelServiceExecutor import com.yahoo.maha.service.{MahaServiceConfig, MahaServiceConfigContext} import com.yahoo.maha.service.MahaServiceConfig.MahaConfigResult import org.json4s.JValue import scalaz.Validation.FlatMap._ import scalaz.syntax.applicative._ class DefaultParallelServiceExecutoryFactory extends ParallelServiceExecutoryFactory{ """ |{ |"rejectedExecutionHandlerClass" : "", |"rejectedExecutionHandlerConfig" : "", |"poolName" : "", |"defaultTimeoutMillis" : 10000, |"threadPoolSize" : 3, |"queueSize" : 3 |} """.stripMargin override def fromJson(configJson: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[ParallelServiceExecutor] = { import org.json4s.scalaz.JsonScalaz._ val rejectedExecutionHandlerClassResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("rejectedExecutionHandlerClass")(configJson) val rejectedExecutionHandlerConfigResult: MahaServiceConfig.MahaConfigResult[JValue] = fieldExtended[JValue]("rejectedExecutionHandlerConfig")(configJson) val poolNameResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("poolName")(configJson) val defaultTimeoutMillisResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("defaultTimeoutMillis")(configJson) val threadPoolSizeResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("threadPoolSize")(configJson) val queueSizeResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("queueSize")(configJson) val rejectedExecutionHandlerResult : MahaServiceConfig.MahaConfigResult[RejectedExecutionHandler] = for { rejectedExecutionHandlerClass <- rejectedExecutionHandlerClassResult rejectedExecutionHandlerConfig <- rejectedExecutionHandlerConfigResult factory <- getFactory[RejectedExecutionHandlerFactory](rejectedExecutionHandlerClass) rejectedExecutionHandler <- factory.fromJson(rejectedExecutionHandlerConfig)(context) } yield rejectedExecutionHandler (rejectedExecutionHandlerResult |@| poolNameResult |@| defaultTimeoutMillisResult |@| threadPoolSizeResult |@| queueSizeResult ) { (rejectedExecutionHandler, poolName, defaultTimeoutMillis, threadPoolSize, queueSize) => { val parallelServiceExecutor = new ParallelServiceExecutor() parallelServiceExecutor.setRejectedExecutionHandler(rejectedExecutionHandler) parallelServiceExecutor.setPoolName(poolName) parallelServiceExecutor.setDefaultTimeoutMillis(defaultTimeoutMillis) parallelServiceExecutor.setThreadPoolSize(threadPoolSize) parallelServiceExecutor.setQueueSize(queueSize) parallelServiceExecutor.init() parallelServiceExecutor } } } override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 31
Source File: DruidQueryOptimizerFactory.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.factory import com.yahoo.maha.service.{MahaServiceConfig, MahaServiceConfigContext} import com.yahoo.maha.service.MahaServiceConfig.MahaConfigResult import com.yahoo.maha.core.query.druid.{AsyncDruidQueryOptimizer, DruidQueryOptimizer, SyncDruidQueryOptimizer} import com.yahoo.maha.core.request._ import org.json4s.JValue import _root_.scalaz._ import syntax.applicative._ import syntax.validation._ class SyncDruidQueryOptimizerFactory extends DruidQueryOptimizerFactory { """ | { | "maxSingleThreadedDimCardinality" : 40000, | "maxNoChunkCost": 280000, | "maxChunks": 3, | "timeout": 300000 | } """.stripMargin override def fromJson(configJson: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DruidQueryOptimizer] = { import org.json4s.scalaz.JsonScalaz._ val maxSingleThreadedDimCardinalityResult: MahaServiceConfig.MahaConfigResult[Long] = fieldExtended[Long]("maxSingleThreadedDimCardinality")(configJson) val maxNoChunkCostResult: MahaServiceConfig.MahaConfigResult[Long] = fieldExtended[Long]("maxNoChunkCost")(configJson) val maxChunksResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("maxChunks")(configJson) val timeoutResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("timeout")(configJson) (maxSingleThreadedDimCardinalityResult |@| maxNoChunkCostResult |@| maxChunksResult |@| timeoutResult) { (a, b, c ,d) => new SyncDruidQueryOptimizer(a, b, c, d) } } override def supportedProperties: List[(String, Boolean)] = List.empty } class AsyncDruidQueryOptimizerFactory extends DruidQueryOptimizerFactory { override def fromJson(configJson: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DruidQueryOptimizer] = { import org.json4s.scalaz.JsonScalaz._ val maxSingleThreadedDimCardinalityResult: MahaServiceConfig.MahaConfigResult[Long] = fieldExtended[Long]("maxSingleThreadedDimCardinality")(configJson) val maxNoChunkCostResult: MahaServiceConfig.MahaConfigResult[Long] = fieldExtended[Long]("maxNoChunkCost")(configJson) val maxChunksResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("maxChunks")(configJson) val timeoutResult: MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("timeout")(configJson) (maxSingleThreadedDimCardinalityResult |@| maxNoChunkCostResult |@| maxChunksResult |@| timeoutResult) { (a, b, c ,d) => new AsyncDruidQueryOptimizer(a, b, c, d) } } override def supportedProperties: List[(String, Boolean)] = List.empty } class DefaultDruidQueryOptimizerFactory extends DruidQueryOptimizerFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DruidQueryOptimizer] = new SyncDruidQueryOptimizer().successNel override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 32
Source File: NoopAuthHeaderProviderFactory.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.service.factory import com.yahoo.maha.core.request.fieldExtended import com.yahoo.maha.executor.druid.NoopAuthHeaderProvider import com.yahoo.maha.service.{MahaServiceConfig, MahaServiceConfigContext} import org.json4s.JValue import scalaz.syntax.applicative._ class NoopAuthHeaderProviderFactory extends AuthHeaderProviderFactory { """ |{ |"domain" : "", |"service" :"", |"privateKeyName" : "", |"privateKeyId" : "" |} """.stripMargin override def fromJson(configJson: JValue)(implicit context: MahaServiceConfigContext): MahaServiceConfig.MahaConfigResult[NoopAuthHeaderProvider] = { import org.json4s.scalaz.JsonScalaz._ val noopDomainResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("domain")(configJson) val noopServiceResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("service")(configJson) val noopPrivateKeyNameResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("privateKeyName")(configJson) val noopPrivateKeyIdResult: MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("privateKeyId")(configJson) (noopDomainResult |@| noopServiceResult |@| noopPrivateKeyNameResult |@| noopPrivateKeyIdResult) { (_, _, _, _) => { new NoopAuthHeaderProvider } } } override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 33
Source File: DrilldownConfig.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.service.curators import org.json4s.DefaultFormats import org.json4s.scalaz.JsonScalaz._ import com.yahoo.maha.core.request._ import com.yahoo.maha.service.MahaServiceConfig import com.yahoo.maha.service.factory._ import org.json4s.JValue import org.json4s.scalaz.JsonScalaz object DrilldownConfig { val MAXIMUM_ROWS : BigInt = 1000 val DEFAULT_ENFORCE_FILTERS : Boolean = true implicit val formats: DefaultFormats.type = DefaultFormats def parse(curatorJsonConfig: CuratorJsonConfig) : JsonScalaz.Result[DrilldownConfig] = { import _root_.scalaz.syntax.validation._ val config: JValue = curatorJsonConfig.json val dimension : Field = assignDim(config) val maxRows : BigInt = assignMaxRows(config) val enforceFilters : Boolean = assignEnforceFilters(config) val ordering : IndexedSeq[SortBy] = assignOrdering(config) val cube : String = assignCube(config, "") DrilldownConfig(enforceFilters, dimension, cube, ordering, maxRows).successNel } private def assignCube(config: JValue, default: String) : String = { val cubeResult : MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("cube")(config) if (cubeResult.isSuccess) { cubeResult.toOption.get } else{ default } } private def assignDim(config: JValue): Field = { val drillDim : MahaServiceConfig.MahaConfigResult[String] = fieldExtended[String]("dimension")(config) require(drillDim.isSuccess, "CuratorConfig for a DrillDown should have a dimension declared!") Field(drillDim.toOption.get, None, None) } private def assignMaxRows(config: JValue): BigInt = { val maxRowsLimitResult : MahaServiceConfig.MahaConfigResult[Int] = fieldExtended[Int]("mr")(config) if(maxRowsLimitResult.isSuccess) { maxRowsLimitResult.toOption.get } else{ MAXIMUM_ROWS } } private def assignEnforceFilters(config: JValue): Boolean = { val enforceFiltersResult : MahaServiceConfig.MahaConfigResult[Boolean] = fieldExtended[Boolean]("enforceFilters")(config) if(enforceFiltersResult.isSuccess) enforceFiltersResult.toOption.get else{ DEFAULT_ENFORCE_FILTERS } } private def assignOrdering(config: JValue): IndexedSeq[SortBy] = { val orderingResult : MahaServiceConfig.MahaConfigResult[List[SortBy]] = fieldExtended[List[SortBy]]("ordering")(config) if(orderingResult.isSuccess){ orderingResult.toOption.get.toIndexedSeq }else { if(orderingResult.toEither.left.get.toString().contains("order must be asc|desc not")){ throw new IllegalArgumentException (orderingResult.toEither.left.get.head.message) } else{ IndexedSeq.empty } } } } case class DrilldownConfig(enforceFilters: Boolean, dimension: Field, cube: String, ordering: IndexedSeq[SortBy], maxRows: BigInt) extends CuratorConfig
Example 34
Source File: JsonKafkaMahaRequestLogConfig.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.config import com.yahoo.maha.core.request._ import com.yahoo.maha.log.KafkaRequestLoggingConfig import org.json4s.JValue case class JsonMahaRequestLogConfig(className: String, kafkaConfig: JValue, isLoggingEnabled : Boolean) import scalaz.syntax.applicative._ object JsonMahaRequestLogConfig { import org.json4s.scalaz.JsonScalaz._ implicit def parse: JSONR[JsonMahaRequestLogConfig] = new JSONR[JsonMahaRequestLogConfig] { override def read(json: JValue): Result[JsonMahaRequestLogConfig] = { val name: Result[String] = fieldExtended[String]("factoryClass")(json) val config: Result[JValue] = fieldExtended[JValue]("config")(json) val isLoggingEnabled: Result[Boolean] = fieldExtended[Boolean]("isLoggingEnabled")(json) (name |@| config |@| isLoggingEnabled) { (a,b,c) => JsonMahaRequestLogConfig(a, b, c) } } } } case class JsonKafkaRequestLoggingConfig(config: KafkaRequestLoggingConfig) object JsonKafkaRequestLoggingConfig { import org.json4s.scalaz.JsonScalaz._ implicit def parse: JSONR[JsonKafkaRequestLoggingConfig] = new JSONR[JsonKafkaRequestLoggingConfig] { override def read(json: JValue): Result[JsonKafkaRequestLoggingConfig] = { val kafkaBrokerListResult: Result[String] = fieldExtended[String]("kafkaBrokerList")(json) val bootstrapServersResult: Result[String] = fieldExtended[String]("bootstrapServers")(json) val producerTypeResult: Result[String] = fieldExtended[String]("producerType")(json) val serializerClassResult: Result[String] = fieldExtended[String]("serializerClass")(json) val requestRequiredAcksResult: Result[String] = fieldExtended[String]("requestRequiredAcks")(json) val kafkaBlockOnBufferFullResult: Result[String] = fieldExtended[String]("kafkaBlockOnBufferFull")(json) val batchNumMessagesResult: Result[String] = fieldExtended[String]("batchNumMessages")(json) val topicNameResult: Result[String] = fieldExtended[String]("topicName")(json) val bufferMemoryResult: Result[String] = fieldExtended[String]("bufferMemory")(json) val maxBlockMsResult: Result[String] = fieldExtended[String]("maxBlockMs")(json) (kafkaBrokerListResult |@| bootstrapServersResult |@| producerTypeResult |@| serializerClassResult |@| requestRequiredAcksResult |@| kafkaBlockOnBufferFullResult |@| batchNumMessagesResult |@| topicNameResult |@| bufferMemoryResult |@| maxBlockMsResult) { case(kafkaBrokerList, bootstrapServers, producerType, serializerClass, requestRequiredAcks, kafkaBlockOnBufferFull, batchNumMessages, topicName, bufferMemory, maxBlockMs) => JsonKafkaRequestLoggingConfig(KafkaRequestLoggingConfig(kafkaBrokerList, bootstrapServers, producerType, serializerClass, requestRequiredAcks, kafkaBlockOnBufferFull, batchNumMessages, topicName, bufferMemory, maxBlockMs)) } } } }
Example 35
Source File: DynamicConfigurationUtils.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.config.dynamic import java.util.regex.Pattern import grizzled.slf4j.Logging import org.json4s.JsonAST.JString import org.json4s.{JField, JValue} import scala.collection.mutable object DynamicConfigurationUtils extends Logging { private val START = Pattern.quote("<%(") private val END = Pattern.quote(")%>") val DYNAMIC_CONFIG_PATTERN = Pattern.compile(s"$START(.*),(.*)$END") def extractDynamicFields(json: JValue): Map[String, (String, String)] = { val dynamicFieldMap = new mutable.HashMap[String, (String, String)]() val dynamicFields = getDynamicFields(json) dynamicFields.foreach(f => { require(f._2.isInstanceOf[JString], s"Cannot extract dynamic property from non-string field: $f") implicit val formats = org.json4s.DefaultFormats val matcher = DYNAMIC_CONFIG_PATTERN.matcher(f._2.extract[String]) require(matcher.find(), s"Field does not contain dynamic property $f. Pattern - $DYNAMIC_CONFIG_PATTERN") require(matcher.groupCount() == 2, s"Expected name and default value in dynamic property field: $f") val propertyKey = matcher.group(1).trim val defaultValue = matcher.group(2).trim dynamicFieldMap.put(propertyKey, (f._1, defaultValue)) }) dynamicFieldMap.toMap } def getDynamicFields(json: JValue): List[JField] = { implicit val formats = org.json4s.DefaultFormats json.filterField(_._2 match { case JString(s) => { DYNAMIC_CONFIG_PATTERN.matcher(s).find() } case a => false }) } }
Example 36
Source File: TestFactories.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.service.factory import com.yahoo.maha.core.{DimCostEstimator, DimensionCandidate, FactCostEstimator, Filter, RowsEstimate} import com.yahoo.maha.core.request.ReportingRequest import com.yahoo.maha.service.MahaServiceConfig.MahaConfigResult import com.yahoo.maha.service.MahaServiceConfigContext import org.json4s.JValue import scala.collection.immutable.SortedSet import scala.collection.mutable class TestFactEstimator extends FactCostEstimator { override def isGrainKey(grainKey: String): Boolean = true def getRowsEstimate(schemaRequiredEntitySet:Set[(String, Filter)] , dimensionsCandidates: SortedSet[DimensionCandidate] , factDimList: List[String] , request: ReportingRequest , filters: scala.collection.mutable.Map[String, Filter] , defaultRowCount:Long): RowsEstimate = { if (request.isDebugEnabled) { RowsEstimate(10000, true, Long.MaxValue, false) } else RowsEstimate(1000, true, Long.MaxValue, false) } override def getSchemaBasedGrainRows(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter], defaultRowCount: Long): Option[Long] = Option(1000) override def getAllBasedGrainRows(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter]): Option[Long] = Option(1000) } class TestDimEstimator extends DimCostEstimator { override def getCardinalityEstimate(grainKey: String, request: ReportingRequest, filters: mutable.Map[String, Filter]): Option[Long] = { if(request.isDebugEnabled) { Some(10000) } else Some(1000) } } import _root_.scalaz._ import Scalaz._ class TestFactCostEstimatoryFactory extends FactCostEstimatorFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[FactCostEstimator] = new TestFactEstimator().successNel override def supportedProperties: List[(String, Boolean)] = List.empty } class TestDimCostEstimatoryFactory extends DimCostEstimatorFactory { override def fromJson(config: JValue)(implicit context: MahaServiceConfigContext): MahaConfigResult[DimCostEstimator] = new TestDimEstimator().successNel override def supportedProperties: List[(String, Boolean)] = List.empty }
Example 37
Source File: DataSourceConnectionPoolFactoryTest.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.service.factory import java.util.UUID import javax.sql.DataSource import com.yahoo.maha.jdbc.JdbcConnection import com.yahoo.maha.service.{DefaultMahaServiceConfigContext, MahaServiceConfigContext} import com.zaxxer.hikari.HikariDataSource import org.json4s.JValue import org.json4s.jackson.JsonMethods._ import org.scalatest.{FunSuite, Matchers} class DataSourceConnectionPoolFactoryTest extends BaseFactoryTest{ implicit val context: MahaServiceConfigContext = DefaultMahaServiceConfigContext() test("Test Creation of HikariDataSource") { val uuid = UUID.randomUUID().toString.replace("-","") val jsonString = s""" |{ |"driverClassName" : "org.h2.Driver", |"jdbcUrl" : "jdbc:h2:mem:$uuid;MODE=Oracle;DB_CLOSE_DELAY=-1", |"username" : "sa", |"passwordProviderFactoryClassName" : "com.yahoo.maha.service.factory.PassThroughPasswordProviderFactory", |"passwordProviderConfig" : [{"key" : "value"}], |"passwordKey" : "h2.test.database.password", |"poolName" : "test-pool", |"maximumPoolSize" : 10, |"minimumIdle" : 1, |"autoCommit": true, |"connectionTestQuery" : "SELECT 1 FROM DUAL", |"validationTimeout" : 1000000, |"idleTimeout" : 1000000, |"maxLifetime" : 10000000, |"dataSourceProperties": [{"key": "propertyKey" , "value": "propertyValue"}] |} """.stripMargin val factoryResult = getFactory[DataSourceFactory]("com.yahoo.maha.service.factory.HikariDataSourceFactory", closer) assert(factoryResult.isSuccess) val factory = factoryResult.toOption.get val json = parse(jsonString) val generatorResult = factory.fromJson(json) assert(generatorResult.isSuccess, generatorResult) assert(generatorResult.toList.head.isInstanceOf[DataSource]) generatorResult.foreach { ds=> val connection = new JdbcConnection(ds) assert(ds.asInstanceOf[HikariDataSource].getIdleTimeout == 1000000) assert(ds.asInstanceOf[HikariDataSource].getPoolName == "test-pool") val ddlResult = connection.executeUpdate("create table test(key varchar(20), value varchar(20));") assert(ddlResult.isSuccess) } assert(KvPair.fieldJSONW.write(KvPair("1", "2")).isInstanceOf[JValue]) } }
Example 38
Source File: package.scala From maha with Apache License 2.0 | 5 votes |
// Copyright 2017, Yahoo Holdings Inc. // Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms. package com.yahoo.maha.core import org.json4s.JValue import org.json4s.scalaz.JsonScalaz.{JSONR, _} import scalaz.Validation package object request { implicit val formats = org.json4s.DefaultFormats def fieldExtended[A: JSONR](name: String)(json: JValue): Result[A] = { val result = field[A](name)(json) result.leftMap { nel => nel.map { case UnexpectedJSONError(was, expected) => UncategorizedError(name, s"unexpected value : $was expected : ${expected.getSimpleName}", List.empty) case a => a } } } def optionalFieldExtended[A: JSONR](name: String, default: A)(json: JValue): Result[A] = { val result = field[A](name)(json) if(result.isSuccess) result else { Validation.success[Error, A](default).asInstanceOf[Result[A]] } } }
Example 39
Source File: QueryResult.scala From maha with Apache License 2.0 | 5 votes |
package com.yahoo.maha.core.query import com.yahoo.maha.core.query.QueryResultStatus.QueryResultStatus import org.json4s.JValue case class QueryResult[T <: RowList](rowList: T , queryAttributes: QueryAttributes , queryResultStatus: QueryResultStatus , exception: Option[Throwable] = None , pagination: Option[JValue] = None ) { def isFailure: Boolean = { queryResultStatus == QueryResultStatus.FAILURE } def requireSuccess(errMessage: String): Unit = { if(isFailure) { if(exception.isDefined) { throw new RuntimeException(s"$errMessage : ${exception.get.getMessage}", exception.get) } else { throw new RuntimeException(errMessage) } } } } object QueryResultStatus extends Enumeration { type QueryResultStatus = Value val SUCCESS, FAILURE = Value }
Example 40
Source File: DataPipelineDefGroup.scala From hyperion with Apache License 2.0 | 5 votes |
package com.krux.hyperion import com.amazonaws.services.datapipeline.model.{ParameterObject => AwsParameterObject, PipelineObject => AwsPipelineObject} import org.json4s.JsonAST.JArray import org.json4s.JsonDSL._ import org.json4s.JValue import com.krux.hyperion.activity.MainClass import com.krux.hyperion.aws.{AdpJsonSerializer, AdpParameterSerializer, AdpPipelineSerializer} import com.krux.hyperion.common.{DefaultObject, HdfsUriHelper, PipelineObject, S3UriHelper} import com.krux.hyperion.expression.{Parameter, ParameterValues, Duration} import com.krux.hyperion.workflow.{WorkflowExpression, WorkflowExpressionImplicits} trait DataPipelineDefGroup extends S3UriHelper with HdfsUriHelper with WorkflowExpressionImplicits { def nameKeySeparator = DataPipelineDefGroup.DefaultNameKeySeparator private lazy val context = new HyperionContext() implicit def hc: HyperionContext = context implicit val pv: ParameterValues = new ParameterValues() def pipelineName: String = MainClass(this).toString def schedule: Schedule def pipelineLifeCycle: PipelineLifeCycle = new PipelineLifeCycle { } def setParameterValue(id: String, value: String, ignoreMissing: Boolean = true): Unit = { val foundParam = parameters.find(_.id == id) if (ignoreMissing) foundParam.foreach(_.withValueFromString(value)) else foundParam.get.withValueFromString(value) } private[hyperion] def nameForKey(key: WorkflowKey): String = pipelineName + key.map(nameKeySeparator + _).getOrElse("") } object DataPipelineDefGroup { final val DefaultNameKeySeparator = "#" private def delayedSchedule(dpdg: DataPipelineDefGroup, multiplier: Int): Schedule = dpdg.scheduleDelay match { case None => dpdg.schedule case Some(delay) => Schedule.delay(dpdg.schedule, delay, multiplier) } implicit class DataPipelineDefGroupOps(dpdg: DataPipelineDefGroup) { def ungroup(): Map[WorkflowKey, DataPipelineDef] = dpdg.workflows .toSeq .sortBy(_._1) // order by key .zipWithIndex .map { case ((key, workflow), idx) => ( key, DataPipelineDefWrapper( dpdg.hc, dpdg.nameForKey(key), delayedSchedule(dpdg, idx), dpdg.pipelineLifeCycle, () => workflow, dpdg.tags, dpdg.parameters ) ) } .toMap def objects: Map[WorkflowKey, Iterable[PipelineObject]] = dpdg.workflows .toSeq .sortBy(_._1) .zipWithIndex .map { case ((key, workflow), idx) => val dObj = dpdg.defaultObject.withSchedule(delayedSchedule(dpdg, idx)) key -> (dObj +: dObj.objects ++: workflow.toPipelineObjects.toList) } .toMap def toAwsParameters: Seq[AwsParameterObject] = dpdg.parameters.flatMap(_.serialize).map(o => AdpParameterSerializer(o)).toList def toAwsPipelineObjects: Map[WorkflowKey, Seq[AwsPipelineObject]] = objects.mapValues(_.map(_.serialize).toList.sortBy(_.id).map(AdpPipelineSerializer(_))) def toJson: JValue = ("objects" -> JArray(objects.values.flatten.map(_.serialize).toList.sortBy(_.id).map(AdpJsonSerializer(_)))) ~ ("parameters" -> JArray(dpdg.parameters.flatMap(_.serialize).map(o => AdpJsonSerializer(o)).toList)) } }
Example 41
Source File: OrderBook.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model import org.json4s.JsonAST.JObject import org.json4s.native.JsonMethods._ import org.json4s.{DefaultFormats, JValue} import stellar.sdk.KeyPair import stellar.sdk.model.response.ResponseParser case class OrderBook(selling: Asset, buying: Asset, bids: Seq[Order], asks: Seq[Order]) case class Order(price: Price, quantity: Long) object OrderBookDeserializer extends ResponseParser[OrderBook]({ o: JObject => implicit val formats = DefaultFormats def asset(obj: JValue) = { def assetCode = (obj \ s"asset_code").extract[String] def assetIssuer = KeyPair.fromAccountId((obj \ s"asset_issuer").extract[String]) (obj \ s"asset_type").extract[String] match { case "native" => NativeAsset case "credit_alphanum4" => IssuedAsset4(assetCode, assetIssuer) case "credit_alphanum12" => IssuedAsset12(assetCode, assetIssuer) case t => throw new RuntimeException(s"Unrecognised asset type '$t'") } } def orders(obj: JValue) = { obj.children.map(c => Order( price = Price( n = (c \ "price_r" \ "n").extract[Int], d = (c \ "price_r" \ "d").extract[Int] ), quantity = Amount.toBaseUnits((c \ "amount").extract[String]).get )) } try { OrderBook( selling = asset(o \ "base"), buying = asset(o \ "counter"), bids = orders(o \ "bids"), asks = orders(o \ "asks") ) } catch { case t: Throwable => throw new RuntimeException(pretty(render(o)), t) } })
Example 42
Source File: PaymentPath.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model import org.json4s.JsonAST.JObject import org.json4s.{DefaultFormats, Formats, JArray, JValue} import stellar.sdk.KeyPair import stellar.sdk.model.AmountParser.{AssetDeserializer, parseAsset} import stellar.sdk.model.response.ResponseParser case class PaymentPath(source: Amount, destination: Amount, path: Seq[Asset]) object PaymentPathDeserializer extends ResponseParser[PaymentPath]({ o: JObject => implicit val formats = DefaultFormats implicit val assetDeserializer = AssetDeserializer PaymentPath( source = AmountParser.amount("source_", o), destination = AmountParser.amount("destination_", o), path = { val JArray(values) = (o \ "path").extract[JArray] values.map { jv => parseAsset("", jv) } } ) }) object AmountParser { implicit val formats = DefaultFormats def parseAsset(prefix: String, o: JValue)(implicit formats: Formats): Asset = { val assetType = (o \ s"${prefix}asset_type").extract[String] def code = (o \ s"${prefix}asset_code").extract[String] def issuer = KeyPair.fromAccountId((o \ s"${prefix}asset_issuer").extract[String]) assetType match { case "native" => NativeAsset case "credit_alphanum4" => IssuedAsset4(code, issuer) case "credit_alphanum12" => IssuedAsset12(code, issuer) case t => throw new RuntimeException(s"Unrecognised ${prefix}asset type: $t") } } def amount(prefix: String, o: JObject)(implicit formats: Formats): Amount = { val asset = parseAsset(prefix, o) val units = Amount.toBaseUnits((o \ s"${prefix}amount").extract[String]).get Amount(units, asset) } object AssetDeserializer extends ResponseParser[Asset](parseAsset("", _)) }
Example 43
Source File: TradeAggregation.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model import java.time.Instant import java.util.concurrent.TimeUnit import org.json4s.JsonAST.JObject import org.json4s.{DefaultFormats, JValue} import stellar.sdk.model.response.ResponseParser import scala.concurrent.duration.Duration case class TradeAggregation(instant: Instant, tradeCount: Int, baseVolume: Double, counterVolume: Double, average: Double, open: Price, high: Price, low: Price, close: Price) object TradeAggregationDeserializer extends ResponseParser[TradeAggregation]({ o: JObject => implicit val formats = DefaultFormats def price(p: JValue): Price = Price((p \ "N").extract[Int], (p \ "D").extract[Int]) TradeAggregation( instant = Instant.ofEpochMilli((o \ "timestamp").extract[String].toLong), tradeCount = (o \ "trade_count").extract[String].toInt, baseVolume = (o \ "base_volume").extract[String].toDouble, counterVolume = (o \ "counter_volume").extract[String].toDouble, average = (o \ "avg").extract[String].toDouble, open = price(o \ "open_r"), high = price(o \ "high_r"), low = price(o \ "low_r"), close = price(o \ "close_r")) }) object TradeAggregation { sealed class Resolution(val duration: Duration) val OneMinute = new Resolution(Duration.create(1, TimeUnit.MINUTES)) val FiveMinutes = new Resolution(OneMinute.duration * 5.0) val FifteenMinutes = new Resolution(FiveMinutes.duration * 3.0) val OneHour = new Resolution(FifteenMinutes.duration * 4.0) val OneDay = new Resolution(OneHour.duration * 24.0) val OneWeek = new Resolution(OneDay.duration * 7.0) }
Example 44
Source File: HorizonServerError.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.inet import okhttp3.HttpUrl import org.json4s.native.JsonMethods import org.json4s.{DefaultFormats, Formats, JObject, JValue} import scala.concurrent.duration.Duration import scala.util.Try case class HorizonServerError(uri: HttpUrl, body: JObject)(implicit val formats: Formats) extends Exception( s"Server error when communicating with Horizon. $uri -> ${ implicit val formats: Formats = DefaultFormats Try((body \ "detail").extract[String]).getOrElse(JsonMethods.compact(JsonMethods.render(body))) }" ) case class HorizonEntityNotFound(uri: HttpUrl, body: JValue)(implicit val formats: Formats) extends Exception( s"Requested entity was not found in Horizon. $uri -> ${ implicit val formats: Formats = DefaultFormats Try((body \ "detail").extract[String]).getOrElse(JsonMethods.compact(JsonMethods.render(body))) }" ) case class HorizonRateLimitExceeded(uri: HttpUrl, retryAfter: Duration)(implicit val formats: Formats) extends Exception( s"Horizon request rate limit was exceeded. Try again in $retryAfter" ) case class HorizonBadRequest(uri: HttpUrl, body: String) extends Exception( s"Bad request. $uri -> ${ implicit val formats: Formats = DefaultFormats Try( (JsonMethods.parse(body) \ "extras" \ "reason").extract[String] ).getOrElse(body) }") case class FailedResponse(cause: String) extends Exception(cause)
Example 45
Source File: DataTypeUtils.scala From incubator-livy with Apache License 2.0 | 5 votes |
package org.apache.livy.thriftserver.types import org.json4s.{DefaultFormats, JValue} import org.json4s.JsonAST.{JObject, JString} import org.json4s.jackson.JsonMethods.parse def schemaFromSparkJson(sparkJson: String): Schema = { val schema = parse(sparkJson) \ "fields" val fields = schema.children.map { field => val name = (field \ "name").extract[String] val hiveType = toFieldType(field \ "type") // TODO: retrieve comment from metadata Field(name, hiveType, "") } Schema(fields.toArray) } }
Example 46
Source File: ResultLoader.scala From donut with MIT License | 5 votes |
package report.donut import java.io.File import org.apache.commons.lang3.StringUtils import org.json4s.{DefaultFormats, JValue} import report.donut.gherkin.processors.JSONProcessor import report.donut.transformers.cucumber.Feature import scala.util.Try trait ResultLoader { def load(): Either[String, List[Feature]] } object ResultLoader { private[donut] class CucumberResultLoader(sourceDir: File) extends ResultLoader { override def load(): Either[String, List[Feature]] = { if (!sourceDir.exists) { return Left(s"Source directory does not exist: $sourceDir") } val jsonValues = JSONProcessor.loadFrom(sourceDir) match { case Left(errors) => return Left(errors) case Right(r) => if (r.isEmpty) return Left("No files found of correct format") else Right(r) } Try(loadCukeFeatures(jsonValues.right.get)).toEither(_.getMessage) } private[donut] def loadCukeFeatures(json: List[JValue]) = { implicit val formats = DefaultFormats json.flatMap(f => f.extract[List[Feature]]) } } def apply(resultSource: String): ResultLoader = { val pattern = "([a-zA-z]{2,}):(.*)".r pattern.findFirstMatchIn(resultSource) match { case Some(m) => { val format = m.group(1) val sourcePath = m.group(2) if (StringUtils.isBlank(sourcePath)) { throw new DonutException("Please provide the source directory path.") } format match { case "cucumber" => new CucumberResultLoader(new File(sourcePath)) case _ => throw DonutException(s"Unsupported result format: $format") } } case None => new CucumberResultLoader(new File(resultSource)) //Defaults to cucumber result format } } }
Example 47
Source File: FilterJsonGeoAndTime.scala From eidos with Apache License 2.0 | 5 votes |
package org.clulab.wm.eidos.apps import java.io.File import org.clulab.wm.eidos.utils.FileUtils import org.json4s.DefaultFormats import org.json4s.JArray import org.json4s.JNothing import org.json4s.JObject import org.json4s.JString import org.json4s.JValue import org.json4s.jackson.JsonMethods object FilterJsonGeoAndTime extends App { class Filter() { implicit val formats: DefaultFormats.type = org.json4s.DefaultFormats def filter(inputFile: File, jValue: JValue): Unit = { println(s"Extracting from ${inputFile.getName}") def filterGeo(): Unit = { val geoLocations: JValue = (jValue \\ "geolocs" \ "text") geoLocations match { case JArray(geoLocations: List[_]) => // Type erasure removes the [JString] geoLocations.foreach { geoLocation => val text = geoLocation.extract[String] val oneLiner = text .replace("\n", "\\n") .replace("\t", "\\t") println("\tGeo\t" + oneLiner) } case JNothing => case _ => throw new RuntimeException(s"Unexpected geoLocations value: $geoLocations") } } def filterTime(): Unit = { val timexes: JValue = (jValue \\ "timexes" \ "text") timexes match { case JArray(timexes: List[_]) => // Type erasure removes the [JString] timexes.foreach { timex => val text = timex.extract[String] val oneLiner = text .replace("\n", "\\n") .replace("\t", "\\t") println("\tTime\t" + oneLiner) } case JNothing => case _ => throw new RuntimeException(s"Unexpected geoLocations value: $timexes") } } filterGeo() filterTime() } } val inputDir = args(0) val extension = args(1) val inputFiles = FileUtils.findFiles(inputDir, extension) val filter = new Filter() inputFiles.foreach { inputFile => val text = FileUtils.getTextFromFile(inputFile) val json = JsonMethods.parse(text) filter.filter(inputFile, json) } }
Example 48
Source File: DispatchHttpClient.scala From web3scala with Apache License 2.0 | 4 votes |
package org.web3scala.http import dispatch._ import Defaults._ import org.json4s.JValue class DispatchHttpClient(host: String = "127.0.0.1", port: Int = 8545, secure: Boolean = false) extends JValueHttpClient { override def async(request: Array[Byte]): Future[JValue] = { val req = DispatchHttpClient.httpRequest(host, port, secure, request) DispatchHttpClient.http(req OK as.json4s.Json) } override def sync(request: Array[Byte]): JValue = { async(request).apply() } } object DispatchHttpClient { private val httpInstance: Http = Http.default private def http: Http = httpInstance private def httpRequest(host: String, port: Int, secure: Boolean, request: Array[Byte]): Req = { val req = dispatch.host(host, port) .setBody(request) .setHeader("Content-Type", "application/json") if (secure) req.secure else req } }
Example 49
Source File: ExecuteResponse.scala From Linkis with Apache License 2.0 | 4 votes |
package com.webank.wedatasphere.linkis.engine import com.webank.wedatasphere.linkis.engine.exception.QueryFailedException import org.json4s.JValue import org.json4s.JsonAST.JString sealed abstract class ExecuteResponse() case class ExecuteComplete(value: JValue) extends ExecuteResponse() { def this(output: String) = this(JString(output)) } object ExecuteComplete { def apply(output: String) = new ExecuteComplete(output) // def apply(value: JValue) = new ExecuteComplete(value) } case class ExecuteIncomplete(output: String) extends ExecuteResponse() case class ExecuteError(t: Throwable) extends ExecuteResponse() { def this(errorMsg: String) = this(new QueryFailedException(60001,errorMsg)) }