org.json4s.native.Serialization.write Scala Examples
The following examples show how to use org.json4s.native.Serialization.write.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: Json4sSerialization.scala From kafka-serialization with Apache License 2.0 | 5 votes |
package com.ovoenergy.kafka.serialization.json4s import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStreamReader, OutputStreamWriter} import java.nio.charset.StandardCharsets import com.ovoenergy.kafka.serialization.core._ import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer} import org.json4s.Formats import org.json4s.native.Serialization.{read, write} import scala.reflect.ClassTag import scala.reflect.runtime.universe._ trait Json4sSerialization { def json4sSerializer[T <: AnyRef](implicit jsonFormats: Formats): KafkaSerializer[T] = serializer { (_, data) => val bout = new ByteArrayOutputStream() val writer = new OutputStreamWriter(bout, StandardCharsets.UTF_8) // TODO Use scala-arm try { write(data, writer) writer.flush() } finally { writer.close() } bout.toByteArray } def json4sDeserializer[T: TypeTag](implicit jsonFormats: Formats): KafkaDeserializer[T] = deserializer { (_, data) => val tt = implicitly[TypeTag[T]] implicit val cl = ClassTag[T](tt.mirror.runtimeClass(tt.tpe)) read[T](new InputStreamReader(new ByteArrayInputStream(data), StandardCharsets.UTF_8)) } }
Example 2
Source File: LinearRegressionActorSpec.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.actors.transform import akka.actor.{ActorRef, ActorSystem} import akka.testkit.{TestProbe, TestActorRef, ImplicitSender, TestKit} import io.coral.actors.CoralActorFactory import io.coral.api.DefaultModule import org.json4s._ import org.json4s.jackson.JsonMethods._ import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike} import akka.util.Timeout import org.json4s.native.Serialization.write import scala.concurrent.duration._ class LinearRegressionActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with WordSpecLike with Matchers with BeforeAndAfterAll { def this() = this(ActorSystem("LinearRegressionActorSpec")) implicit val timeout = Timeout(100.millis) implicit val injector = new DefaultModule(system.settings.config) override def afterAll() { TestKit.shutdownActorSystem(system) } def createLinearRegressionActor(intercept: Double, weights: Map[String, Double]) = { implicit val formats = DefaultFormats val str = s"""{ "type":"linearregression", |"params": { "intercept": $intercept, |"weights": ${write(weights)} |}}""".stripMargin val createJson = parse(str).asInstanceOf[JObject] val props = CoralActorFactory.getProps(createJson).get val actorTestRef = TestActorRef[LinearRegressionActor](props) val probe = TestProbe() actorTestRef.underlyingActor.emitTargets += probe.ref (actorTestRef, probe) } "LinearRegressionActor" should { "Instantiate from companion object" in { val (actor, _) = createLinearRegressionActor(0, Map("salary" -> 2000)) actor.underlyingActor.intercept should be(0) actor.underlyingActor.weights should be(Map("salary" -> 2000)) } "process trigger data when all the features are available even with different order" in { val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 0.1)) val message = parse( s"""{"salary": 4000, "age": 40}""").asInstanceOf[JObject] actor ! message probe.expectMsg(parse( s"""{"score": 408.0, "salary": 4000, "age": 40}""")) } "emit when score is calculated" in { val (actor, probe) = createLinearRegressionActor(0, Map("salary" -> 10)) val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject] actor ! message probe.expectMsg(parse( s"""{"score": 20000.0, "salary": 2000}""")) } "not emit when keys are missing" in { val (actor, probe) = createLinearRegressionActor(0, Map("age" -> 0.2, "salary" -> 10)) val message = parse( s"""{"salary": 2000}""").asInstanceOf[JObject] actor ! message probe.expectNoMsg } } }
Example 3
Source File: PFADocument.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.document import com.ibm.aardpfark.pfa.dsl._ import com.ibm.aardpfark.pfa.expression.PFAExpression import com.ibm.aardpfark.pfa.utils.Utils import org.apache.avro.Schema import org.json4s.native.Serialization import org.json4s.native.Serialization.{write, writePretty} import org.json4s.{FieldSerializer, NoTypeHints} trait ToPFA { def pfa: PFADocument } trait HasAction { protected def action: PFAExpression } trait HasModelCell { protected def modelCell: NamedCell[_] } case class PFADocument( name: Option[String] = None, version: Option[Long] = Some(1L), doc: Option[String] = Some(s"Auto-generated by Aardpfark at ${Utils.getCurrentDate}"), metadata: Map[String, String] = Map(), // options, input: Schema, output: Schema, // begin: Seq[String] = Seq(), // end: Seq[String] = Seq(), // method: String = "map", action: Seq[PFAExpression], cells: Map[String, Cell[_]] = Map(), // pools fcns: Map[String, FunctionDef] = Map() // randseed // zero // merge ) { implicit val formats = Serialization.formats(NoTypeHints) + new SchemaSerializer + new PFAExpressionSerializer + new ParamSerializer + new FieldSerializer[Cell[_]] + new TreeSerializer def toJSON(pretty: Boolean = false) = { if (pretty) writePretty(this) else write(this) } }
Example 4
Source File: FunctionCalls.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.{PFAExpressionSerializer, ParamSerializer, SchemaSerializer} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.json4s.native.Serialization.write import org.json4s.{JDouble, JField, JInt, JObject, JString, JValue, NoTypeHints} class FunctionCall(name: String, args: Any*) extends PFAExpression { import com.ibm.aardpfark.pfa.dsl._ import org.json4s.JsonDSL._ override def json: JValue = { val jArgs = args.map { case n: Double => JDouble(n) case i: Int => JInt(i) case s: String => JString(s) case expr: PFAExpression => expr.json case fnDef: FunctionDef => implicit val formats = Serialization.formats(NoTypeHints) + new SchemaSerializer + new PFAExpressionSerializer + new ParamSerializer parse(write(fnDef)) } JObject(JField(name, jArgs) :: Nil) } }
Example 5
Source File: WorkflowValueResolver.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.model.resolver import io.vamp.common.{ Config, NamespaceProvider } import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat } import io.vamp.common.notification.NotificationProvider import io.vamp.common.util.TextUtil import io.vamp.model.artifact._ import org.json4s.native.Serialization.write trait WorkflowValueResolver extends ValueResolver with ConfigurationValueResolver with ClassLoaderValueResolver { this: NamespaceProvider with NotificationProvider ⇒ private val resolversPath = "vamp.model.resolvers.workflow" override def resolverClasses: List[String] = if (Config.has(resolversPath)(namespace)()) Config.stringList(resolversPath)() else Nil def valueFor(workflow: Workflow)(reference: ValueReference): String = { resolve( (super[ClassLoaderValueResolver].valueForReference((workflow, None)) orElse super[ConfigurationValueResolver].valueForReference orElse PartialFunction[ValueReference, String] { referenceAsPart })(reference), valueForWorkflow(workflow, None) orElse PartialFunction[ValueReference, String] { _ ⇒ "" } ) } def resolveEnvironmentVariable(workflow: Workflow, data: Any): EnvironmentVariable ⇒ EnvironmentVariable = { env ⇒ env.copy(interpolated = env.value.map { value ⇒ resolve( resolve( value, super[ClassLoaderValueResolver].valueForReference((workflow, data)) orElse super[ConfigurationValueResolver].valueForReference orElse PartialFunction[ValueReference, String] { referenceAsPart } ), valueForWorkflow(workflow: Workflow, data) orElse super[ConfigurationValueResolver].valueForReference orElse PartialFunction[ValueReference, String] { _ ⇒ "" } ) }) } override def valueForReference: PartialFunction[ValueReference, String] = PartialFunction.empty private def valueForWorkflow(workflow: Workflow, data: Any): PartialFunction[ValueReference, String] = { case LocalReference("data") ⇒ TextUtil.encodeBase64(write(data.asInstanceOf[AnyRef])(SerializationFormat(OffsetDateTimeSerializer))) case LocalReference("workflow") ⇒ workflow.name case LocalReference("namespace") ⇒ namespace.name case LocalReference(ref) if workflow.breed.isInstanceOf[DefaultBreed] ⇒ workflow.breed.asInstanceOf[DefaultBreed].traits.find(_.name == ref).flatMap(_.value).getOrElse("") case NoGroupReference("workflow", "name") ⇒ workflow.name } }
Example 6
Source File: PersistenceRecord.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.persistence import java.time.OffsetDateTime import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat } import io.vamp.common.notification.NotificationProvider import io.vamp.common.{ Artifact, Config, Namespace, NamespaceProvider } import io.vamp.model.Model import io.vamp.persistence.notification.UnknownDataFormatException import org.json4s.Formats import org.json4s.native.Serialization import org.json4s.native.Serialization.write import scala.util.Try object PersistenceRecord { def apply(name: String, kind: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, None) def apply(name: String, kind: String, artifact: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, Option(artifact)) } case class PersistenceRecord(version: String, instance: String, timestamp: OffsetDateTime, name: String, kind: String, artifact: Option[String]) abstract class PersistenceRecordTransformer(namespace: Namespace) { def timeDependent: Boolean = false def read(input: String): String def write(input: String): String } trait PersistenceRecordMarshaller { this: NamespaceProvider ⇒ protected val transformersPath = "vamp.persistence.transformers.classes" private lazy val transformers = { val transformerClasses = if (Config.has(transformersPath)(namespace)()) Config.stringList(transformersPath)() else Nil transformerClasses.map { clazz ⇒ Class.forName(clazz).getConstructor(classOf[Namespace]).newInstance(namespace).asInstanceOf[PersistenceRecordTransformer] } } lazy val timeDependent: Boolean = transformers.exists(_.timeDependent) def marshallRecord(record: PersistenceRecord): String = { val content = write(record)(SerializationFormat(OffsetDateTimeSerializer)) transformers.foldLeft[String](content)((input, transformer) ⇒ transformer.write(input)) } def unmarshallRecord(source: String): PersistenceRecord = { val input = transformers.foldRight[String](source)((transformer, source) ⇒ transformer.read(source)) implicit val format: Formats = SerializationFormat(OffsetDateTimeSerializer) Serialization.read[PersistenceRecord](input) } } trait PersistenceDataReader extends PersistenceRecordMarshaller with PersistenceMarshaller { this: PersistenceApi with NamespaceProvider with NotificationProvider ⇒ protected def dataSet(artifact: Artifact, kind: String): Artifact protected def dataDelete(name: String, kind: String): Unit protected def dataRead(data: String): PersistenceRecord = { val record = Try(unmarshallRecord(data)).getOrElse(throwException(UnknownDataFormatException(""))) record.artifact match { case Some(content) ⇒ unmarshall(record.kind, content).map(a ⇒ dataSet(a, record.kind)).getOrElse(throwException(UnknownDataFormatException(record.kind))) case None ⇒ dataDelete(record.name, record.kind) } record } }
Example 7
Source File: CustomDefaults.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s import magnolia.{SealedTrait, Subtype} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization.write import org.apache.avro.Schema import org.apache.avro.Schema.Type import org.json4s.DefaultFormats import scala.collection.JavaConverters._ sealed trait CustomDefault case class CustomUnionDefault(className: String, values: java.util.Map[String, Any]) extends CustomDefault case class CustomUnionWithEnumDefault(parentName: String, default: String, value: String) extends CustomDefault case class CustomEnumDefault(value: String) extends CustomDefault object CustomDefaults { implicit val formats = DefaultFormats def customScalaEnumDefault(value: Any) = CustomEnumDefault(value.toString) def customDefault(p: Product, schema: Schema): CustomDefault = if(isEnum(p, schema.getType)) CustomEnumDefault(trimmedClassName(p)) else { if(isUnionOfEnum(schema)) { val enumType = schema.getTypes.asScala.filter(_.getType == Schema.Type.ENUM).head CustomUnionWithEnumDefault(enumType.getName, trimmedClassName(p), p.toString) } else CustomUnionDefault(trimmedClassName(p), parse(write(p)).extract[Map[String, Any]].map { case (name, b: BigInt) if b.isValidInt => name -> b.intValue case (name, b: BigInt) if b.isValidLong => name -> b.longValue case (name, z) if schema.getType == Type.UNION => name -> schema.getTypes.asScala.find(_.getName == trimmedClassName(p)).map(_.getField(name).schema()) .map(DefaultResolver(z, _)).getOrElse(z) case (name, z) => name -> DefaultResolver(z, schema.getField(name).schema()) }.asJava) } def isUnionOfEnum(schema: Schema) = schema.getType == Schema.Type.UNION && schema.getTypes.asScala.map(_.getType).contains(Schema.Type.ENUM) def sealedTraitEnumDefaultValue[T](ctx: SealedTrait[SchemaFor, T]) = { val defaultExtractor = new AnnotationExtractors(ctx.annotations) defaultExtractor.enumDefault.flatMap { default => ctx.subtypes.flatMap { st: Subtype[SchemaFor, T] => if(st.typeName.short == default.toString) Option(st.typeName.short) else None }.headOption } } def isScalaEnumeration(value: Any) = value.getClass.getCanonicalName == "scala.Enumeration.Val" private def isEnum(product: Product, schemaType: Schema.Type) = product.productArity == 0 && schemaType == Schema.Type.ENUM private def trimmedClassName(p: Product) = trimDollar(p.getClass.getSimpleName) private def trimDollar(s: String) = if(s.endsWith("$")) s.dropRight(1) else s }
Example 8
Source File: WebClient.scala From twitter-stream-ml with GNU General Public License v3.0 | 5 votes |
package com.giorgioinf.twtml.web import org.json4s._ import org.json4s.native.Serialization import org.json4s.native.Serialization.{write,read} import scala.reflect.Manifest import scalaj.http.{Http,HttpRequest} class WebClient (val server:String) { implicit val formats = Serialization.formats(ShortTypeHints(List(classOf[Config], classOf[Stats]))) def this() = this("http://localhost:8888") private def request(kind:String = ""):HttpRequest = { Http(server + "/api" + kind) .header("content-type", "application/json") .header("accept", "application/json") } private def post(data:TypeData) { val json = write(data) request().postData(json).asString } private def get[A:Manifest](kind:String):A = { val json = request(kind).asString.body read[A](json) } def config(id:String, host:String, viz:List[String]) = { post(Config(id, host, viz)) } def stats(count:Long, batch:Long, mse:Long, realStddev:Long, predStddev:Long) = { post(Stats(count, batch, mse, realStddev, predStddev)) } def config():Config = { get[Config]("/config") } def stats():Stats = { get[Stats]("/stats") } } object WebClient { def apply(host: String = ""): WebClient = { host match { case "" => new WebClient() case _ => new WebClient(host) } } }
Example 9
Source File: WebClient.scala From twitter-stream-ml with GNU General Public License v3.0 | 5 votes |
package com.giorgioinf.twtml.web import org.json4s._ import org.json4s.native.Serialization import org.json4s.native.Serialization.{write,read} import scala.reflect.Manifest import scalaj.http.{Http,HttpRequest} class WebClient (val server:String) { implicit val formats = Serialization.formats(ShortTypeHints(List(classOf[Config], classOf[Stats]))) def this() = this("http://localhost:8888") private def request(kind:String = ""):HttpRequest = { Http(server + "/api" + kind) .header("content-type", "application/json") .header("accept", "application/json") } private def post(data:TypeData) { val json = write(data) request().postData(json).asString } private def get[A:Manifest](kind:String):A = { val json = request(kind).asString.body read[A](json) } def config(id:String, host:String, viz:List[String]) = { post(Config(id, host, viz)) } def stats(count:Long, batch:Long, mse:Long, realStddev:Long, predStddev:Long) = { post(Stats(count, batch, mse, realStddev, predStddev)) } def config():Config = { get[Config]("/config") } def stats():Stats = { get[Stats]("/stats") } } object WebClient { def apply(host: String = ""): WebClient = { host match { case "" => new WebClient() case _ => new WebClient(host) } } }
Example 10
Source File: ApiHandler.scala From twitter-stream-ml with GNU General Public License v3.0 | 5 votes |
package com.giorgioinf.twtml.web import org.mashupbots.socko.events.{HttpRequestEvent,WebSocketFrameEvent} import org.mashupbots.socko.infrastructure.Logger import akka.actor.Actor import org.json4s.DefaultFormats import org.json4s.native.Serialization.write // command events case class PostHandler(event: HttpRequestEvent) case class GetConfigHandler(event: HttpRequestEvent) case class GetStatsHandler(event: HttpRequestEvent) case class WsFrameHandler(event: WebSocketFrameEvent) case class WsStartHandler(webSocketId: String) //case class HomePage(event: HttpRequestEvent) //case class ShowQueryStringDataPage(event: HttpRequestEvent) //case class ShowPostDataPage(event: HttpRequestEvent) class ApiHandler extends Logger with Actor { implicit val formats = DefaultFormats val ok = write(("status" -> "OK")) def response(event:HttpRequestEvent, json:String) = { event.response.contentType = "application/json" event.response.write(json) context.stop(self) } def receive = { case GetConfigHandler(event) => { val json = ApiCache.config log.debug("http - get config {}", json) response(event, json) } case GetStatsHandler(event) => { val json = ApiCache.stats log.debug("http - get stats {}", json) response(event, json) } case PostHandler(event) => { val json = event.request.content.toString() log.debug("http - post data {}", json) ApiCache.cache(json) response(event, ok) log.debug("websocket - send all data {}", json) Server.web.webSocketConnections.writeText(json) } case WsFrameHandler(event) => { val json = event.readText log.debug("websocket - {} - read data {}", Array(event.webSocketId, json)) ApiCache.cache(json) log.debug("websocket - send all data {}", json) Server.web.webSocketConnections.writeText(json) context.stop(self) } case WsStartHandler(webSocketId) => { val json = ApiCache.config log.debug("websocket - {} - connected, get config {}", Array(webSocketId, json)) Server.web.webSocketConnections.writeText(json, webSocketId) context.stop(self) } } }
Example 11
Source File: ApiCache.scala From twitter-stream-ml with GNU General Public License v3.0 | 5 votes |
package com.giorgioinf.twtml.web import org.json4s._ import org.json4s.native.Serialization import org.json4s.native.Serialization.{write,read} import org.mashupbots.socko.infrastructure.Logger import scala.io.Source import scala.tools.nsc.io.File import scala.util.{Properties,Try} object ApiCache extends Logger { private val backupFile = Properties.tmpDir + "/twtml-web.json" private var typeStats = Stats() private var typeConfig = Config() implicit val formats = Serialization.formats( ShortTypeHints(List(classOf[Config], classOf[Stats]))) private def cacheStats(data:Stats) = { log.debug("caching stats") typeStats = data } private def cacheConfig(data:Config) = { log.debug("caching config") typeConfig = data backup } def config():String = { write(typeConfig) } def stats():String = { write(typeStats) } def cache(json:String) = { val data = read[TypeData](json) data match { case stat:Stats => cacheStats(stat) case conf:Config => cacheConfig(conf) case _ => log.error("json not recognized: {}", json) } } def restore() = { Try(cache(Source.fromFile(backupFile).mkString)) } def backup() = { File(backupFile).writeAll(config) } }
Example 12
Source File: DeleteDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia._ import algolia.http.HttpPayload import algolia.objects.{Query, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization.write case class DeleteObjectDefinition( index: Option[String] = None, oid: Option[String] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = DeleteObjectDefinition def from(ind: String): DeleteObjectDefinition = copy(index = Some(ind)) @deprecated("use objectFromIndex", "1.30.0") def index(ind: String): DeleteObjectDefinition = copy(index = Some(ind)) @deprecated("use objectFromIndex", "1.30.0") def objectId(objectId: String): DeleteObjectDefinition = copy(oid = Some(objectId)) def objectIds(objectIds: Iterable[String]): BatchDefinition = BatchDefinition(objectIds.map { oid => DeleteObjectDefinition(index, Some(oid)) }) def by(query: Query): DeleteByDefinition = DeleteByDefinition(index, query, requestOptions) override def options(requestOptions: RequestOptions): DeleteObjectDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = HttpPayload( http.DELETE, Seq("1", "indexes") ++ index ++ oid, isSearch = false, requestOptions = requestOptions ) } case class DeleteIndexDefinition( index: String, requestOptions: Option[RequestOptions] = None ) extends Definition { type T = DeleteIndexDefinition override def options(requestOptions: RequestOptions): DeleteIndexDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = HttpPayload( http.DELETE, Seq("1", "indexes", index), isSearch = false, requestOptions = requestOptions ) } case class DeleteByDefinition( index: Option[String], query: Query, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = DeleteByDefinition override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.toParam) HttpPayload( http.POST, Seq("1", "indexes") ++ index ++ Some("deleteByQuery"), isSearch = false, body = Some(write(body)), requestOptions = requestOptions ) } override def options(requestOptions: RequestOptions): DeleteByDefinition = copy(requestOptions = Some(requestOptions)) }
Example 13
Source File: SaveLiveData.scala From scaladex with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.scala.index package data package elastic import model._ import project._ import org.json4s._ import org.json4s.native.Serialization.{read, write, writePretty} import org.json4s.native.parseJson import java.nio.file._ import java.nio.charset.StandardCharsets import org.slf4j.LoggerFactory import scala.concurrent.{ExecutionContext, Future} // this allows us to save project as json object sorted by keys case class LiveProjects(projects: Map[Project.Reference, ProjectForm]) object LiveProjectsSerializer extends CustomSerializer[LiveProjects]( format => ( { case JObject(obj) => { implicit val formats = DefaultFormats LiveProjects( obj.map { case (k, v) => val List(organization, repository) = k.split('/').toList (Project.Reference(organization, repository), v.extract[ProjectForm]) }.toMap ) } }, { case l: LiveProjects => JObject( l.projects.toList .sortBy { case (Project.Reference(organization, repository), _) => (organization, repository) } .map { case (Project.Reference(organization, repository), v) => import ch.epfl.scala.index.search.SearchProtocol._ JField(s"$organization/$repository", parseJson(write(v))) } ) } ) ) trait LiveProjectsProtocol { implicit val formats: Formats = DefaultFormats ++ Seq(LiveProjectsSerializer) implicit val serialization: Serialization = native.Serialization } object SaveLiveData extends LiveProjectsProtocol { val logger = LoggerFactory.getLogger(getClass) def storedProjects(paths: DataPaths): Map[Project.Reference, ProjectForm] = read[LiveProjects]( Files .readAllLines(paths.liveProjects) .toArray .mkString("") ).projects def saveProjects(paths: DataPaths, live: Map[Project.Reference, ProjectForm]): Unit = { val projects = LiveProjects(live) val liveDir = paths.liveProjects.getParent if (!Files.isDirectory(liveDir)) { Files.createDirectory(liveDir) } Files.write( paths.liveProjects, writePretty(projects).getBytes(StandardCharsets.UTF_8) ) } // Note: we use a future here just to catch exceptions. Our code is blocking, though. def saveProject(project: Project, paths: DataPaths)(implicit ec: ExecutionContext): Future[_] = Future { concurrent.blocking { val stored = SaveLiveData.storedProjects(paths) val newProject = (project.reference -> ProjectForm(project)) logger.info(s"Writing projects at ${paths.liveProjects}") saveProjects(paths, stored + newProject) } } }
Example 14
Source File: SbtPluginsData.scala From scaladex with BSD 3-Clause "New" or "Revised" License | 5 votes |
package ch.epfl.scala.index.data.bintray import java.nio.file.{Files, Path} import ch.epfl.scala.index.data.LocalRepository import ch.epfl.scala.index.data.LocalRepository.BintraySbtPlugins import ch.epfl.scala.index.data.maven._ import jawn.support.json4s.Parser import org.apache.ivy.core.module.descriptor.ModuleDescriptor import org.joda.time.DateTime import org.json4s.native.Serialization.write case class SbtPluginsData(ivysData: Path) extends BintrayProtocol { None, exclusions = dependency.getAllExcludeRules .map(_.getId.getModuleId) .map(rule => Exclusion(rule.getOrganisation, rule.getName)) .toSet ) }, sbtPluginTarget = Some( SbtPluginTarget(scalaVersion, sbtVersion) ) ) val publicationDate = new DateTime(descriptor.getPublicationDate).toString new SbtPluginReleaseModel(releaseModel, publicationDate, sha1) } }
Example 15
Source File: TestDriver.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.tests.e2e import io.restassured.RestAssured._ import io.restassured.builder.RequestSpecBuilder import io.restassured.config.HttpClientConfig import io.restassured.config.HttpClientConfig.HttpClientFactory import io.restassured.filter.log.LogDetail import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse import io.restassured.response.ValidatableResponse import io.restassured.specification.RequestSpecification import org.apache.http.client.HttpClient import org.apache.http.impl.client.SystemDefaultHttpClient import org.apache.http.params.HttpConnectionParams import org.json4s.Formats import org.json4s.native.Serialization.write import org.scalatest.{Matchers, WordSpecLike} import pl.newicom.dddd.aggregate.Command trait TestDriver extends WordSpecLike with Matchers { val clientConfig: HttpClientConfig = config.getHttpClientConfig .httpClientFactory(new HttpClientFactory() { override def createHttpClient: HttpClient = { val rv = new SystemDefaultHttpClient val httpParams = rv.getParams HttpConnectionParams.setConnectionTimeout(httpParams, 2 * 1000) //Wait 5s for a connection HttpConnectionParams.setSoTimeout(httpParams, 60 * 1000) // Default session is 60s rv } }) .reuseHttpClientInstance() def using[R](endpoint: EndpointConfig)(testBody: RequestSpecBuilder => R): R = { testBody( new RequestSpecBuilder() .setConfig(config.httpClient(clientConfig)) .setBaseUri(endpoint.toUrl) .setContentType("application/json") .log(LogDetail.ALL) ) } def POST(implicit builder: RequestSpecBuilder): POSTOps = new POSTOps(builder.build()) def GET(implicit builder: RequestSpecBuilder): GETOps = new GETOps(builder.build()) class POSTOps(reqSpec: RequestSpecification) { def command(c: Command)(implicit formats: Formats): ValidatableResponse = given(reqSpec) .body(write(c)) .header("Command-Type", c.getClass.getName) .post() .Then() .log().all() .statusCode(200) } class GETOps(reqSpec: RequestSpecification) { def /(subPath: String): ValidatableResponse = given(reqSpec) .get(subPath) .Then() .log().all() .statusCode(200) } }
Example 16
Source File: L8-3-6-7DataFrameCreation.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.reflect.runtime.universe import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.SQLContext import org.apache.spark.sql.functions.desc import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.native.Serialization.write import org.json4s.DefaultFormats object DataframeCreationApp { case class Cdr(squareId: Int, timeInterval: Long, countryCode: Int, smsInActivity: Float, smsOutActivity: Float, callInActivity: Float, callOutActivity: Float, internetTrafficActivity: Float) def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: CdrDataframeApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) val sqlC = new SQLContext(ssc.sparkContext) import sqlC.implicits._ val cdrStream = ssc.socketTextStream(hostname, port.toInt) .map(_.split("\\t", -1)) .foreachRDD(rdd => { //val cdrs = sqlC.createDataFrame(seqToCdr(rdd)) //val cdrs = sqlC.createDataFrame(seqToCdr(rdd).collect()) //val cdrs = seqToCdr(rdd).toDF() val cdrsJson = seqToCdr(rdd).map(r => { implicit val formats = DefaultFormats write(r) }) val cdrs = sqlC.read.json(cdrsJson) cdrs.groupBy("countryCode").count().orderBy(desc("count")).show(5) }) ssc.start() ssc.awaitTermination() } def seqToCdr(rdd: RDD[Array[String]]): RDD[Cdr] = { rdd.map(c => c.map(f => f match { case x if x.isEmpty() => "0" case x => x })).map(c => Cdr(c(0).toInt, c(1).toLong, c(2).toInt, c(3).toFloat, c(4).toFloat, c(5).toFloat, c(6).toFloat, c(7).toFloat)) } }
Example 17
Source File: InsightsEventDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs.InsightsEvent import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization.write case class InsightsEventDefinition( events: Iterable[InsightsEvent], requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = InsightsEventDefinition override def options( requestOptions: RequestOptions ): InsightsEventDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("events" -> events) HttpPayload( POST, Seq("1", "events"), body = Some(write(body)), isSearch = false, isInsights = true, requestOptions = requestOptions ) } }
Example 18
Source File: IndexingDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia._ import algolia.http.HttpPayload import algolia.objects.RequestOptions import algolia.responses.TaskIndexing import org.json4s.Formats import org.json4s.native.Serialization.write import scala.concurrent.{ExecutionContext, Future} case class IndexingDefinition( index: String, objectId: Option[String] = None, obj: Option[AnyRef] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = IndexingDefinition override def options(requestOptions: RequestOptions): IndexingDefinition = copy(requestOptions = Some(requestOptions)) def objects(objectsWithIds: Map[String, AnyRef]): IndexingBatchDefinition = IndexingBatchDefinition(index, objectsWithIds.map { case (oid, o) => IndexingDefinition(index, Some(oid), Some(o)) }) def objects(objects: Iterable[AnyRef]): IndexingBatchDefinition = IndexingBatchDefinition(index, objects.map { obj => copy(index = index, obj = Some(obj)) }) def objectId(objectId: String): IndexingDefinition = copy(objectId = Some(objectId)) def `object`(objectId: String, obj: AnyRef): IndexingDefinition = copy(objectId = Some(objectId), obj = Some(obj)) def `object`(obj: AnyRef): IndexingDefinition = copy(obj = Some(obj)) override private[algolia] def build(): HttpPayload = { val body: Option[String] = obj.map(o => write(o)) val verb = objectId match { case Some(_) => http.PUT case None => http.POST } HttpPayload( verb, Seq("1", "indexes", index) ++ objectId, body = body, isSearch = false, requestOptions = requestOptions ) } } trait IndexingDsl { implicit val formats: Formats case object index { def into(index: String): IndexingDefinition = IndexingDefinition(index) } implicit object IndexingDefinitionExecutable extends Executable[IndexingDefinition, TaskIndexing] { override def apply(client: AlgoliaClient, query: IndexingDefinition)( implicit executor: ExecutionContext ): Future[TaskIndexing] = { client.request[TaskIndexing](query.build()) } } }
Example 19
Source File: StrategyDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload, POST} import algolia.objects.Strategy import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization.write case class GetPersonalizationStrategyDefinition( requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = GetPersonalizationStrategyDefinition override def options( requestOptions: RequestOptions ): GetPersonalizationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( GET, Seq("1", "recommendation", "personalization", "strategy"), isSearch = true, requestOptions = requestOptions ) } } case class SetPersonalizationStrategyDefinition( s: Strategy, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = SetPersonalizationStrategyDefinition override def options( requestOptions: RequestOptions ): SetPersonalizationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( POST, Seq("1", "recommendation", "personalization", "strategy"), body = Some(write(s)), isSearch = false, requestOptions = requestOptions ) } }
Example 20
Source File: SearchDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.objects.{Query, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization.write case class SearchDefinition( index: String, query: Option[Query] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = SearchDefinition def facet(facetName: String) = SearchFacetDefinition(index, facetName, "") def query(q: Query): SearchDefinition = copy(query = Some(q)) override def options(requestOptions: RequestOptions): SearchDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.map(_.toParam)) HttpPayload( POST, Seq("1", "indexes", index, "query"), body = Some(write(body)), isSearch = true, requestOptions = requestOptions ) } } case class SearchFacetDefinition( index: String, facetName: String, values: String, query: Query = Query(), requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = SearchFacetDefinition def values(facetQuery: String): SearchFacetDefinition = copy(values = facetQuery) def query(q: Query): SearchFacetDefinition = copy(query = q) override def options(requestOptions: RequestOptions): SearchFacetDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.copy(facetQuery = Some(values)).toParam) HttpPayload( POST, Seq("1", "indexes", index, "facets", facetName, "query"), body = Some(write(body)), isSearch = true, requestOptions = requestOptions ) } }
Example 21
Source File: RecommendationDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload, POST} import algolia.objects.{RequestOptions, SetStrategyRequest} import org.json4s.Formats import org.json4s.native.Serialization.write case class GetRecommendationStrategyDefinition( requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = GetRecommendationStrategyDefinition override def options( requestOptions: RequestOptions ): GetRecommendationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( GET, Seq("1", "strategies", "personalization"), isSearch = false, isRecommendation = true, requestOptions = requestOptions ) } } case class SetRecommendationStrategyDefinition( s: SetStrategyRequest, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = SetRecommendationStrategyDefinition override def options( requestOptions: RequestOptions ): SetRecommendationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( POST, Seq("1", "strategies", "personalization"), body = Some(write(s)), isSearch = false, isRecommendation = true, requestOptions = requestOptions ) } }