org.json4s.Formats Scala Examples
The following examples show how to use org.json4s.Formats.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: JsonSerializer.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend.serializer.json import akka.serialization.Serializer import com.github.dnvriend.domain.OrderDomain import org.json4s.native.JsonMethods._ import org.json4s.native.Serialization import org.json4s.native.Serialization._ import org.json4s.{ DefaultFormats, Formats, NoTypeHints } case class EventWrapper(manifest: String, payload: String) class JsonSerializer extends Serializer { implicit val formats: Formats = DefaultFormats + OrderDomain.DirectDebitTypeSerializer override def identifier: Int = Int.MaxValue override def includeManifest: Boolean = true override def toBinary(o: AnyRef): Array[Byte] = write(EventWrapper(o.getClass.getName, write(o))).getBytes() override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = { val wrapper: EventWrapper = parse(new String(bytes)).extract[EventWrapper] implicit val mf = Manifest.classType(Class.forName(wrapper.manifest)) read(wrapper.payload) } }
Example 2
Source File: RPCProduct.scala From Linkis with Apache License 2.0 | 5 votes |
package com.webank.wedatasphere.linkis.rpc.transform import java.lang.reflect.{ParameterizedType, Type} import java.util import com.webank.wedatasphere.linkis.DataWorkCloudApplication import com.webank.wedatasphere.linkis.common.utils.Logging import com.webank.wedatasphere.linkis.rpc.exception.DWCURIException import com.webank.wedatasphere.linkis.server.{BDPJettyServerHelper, EXCEPTION_MSG, Message} import org.apache.commons.lang.ClassUtils import org.json4s.jackson.Serialization import org.json4s.{DefaultFormats, Formats, Serializer} import scala.collection.JavaConversions private[linkis] trait RPCProduct { def toMessage(t: Any): Message def notFound(): Message def ok(): Message } private[linkis] object RPCProduct extends Logging { private[rpc] val IS_SCALA_CLASS = "rpc_is_scala_class" private[rpc] val CLASS_VALUE = "rpc_object_class" private[rpc] val OBJECT_VALUE = "rpc_object_value" private[rpc] implicit var formats: Formats = DefaultFormats + JavaCollectionSerializer + JavaMapSerializer private var serializerClasses: List[Class[_]] = List.empty private val rpcProduct: RPCProduct = new RPCProduct { private val rpcFormats = DataWorkCloudApplication.getApplicationContext.getBeansOfType(classOf[RPCFormats]) if(rpcFormats != null && !rpcFormats.isEmpty) { val serializers = JavaConversions.mapAsScalaMap(rpcFormats).map(_._2.getSerializers).toArray.flatMap(_.iterator) setFormats(serializers) } override def toMessage(t: Any): Message = { if(t == null) throw new DWCURIException(10001, "The transmitted bean is Null.(传输的bean为Null.)") val message = Message.ok("RPC Message.") if(isScalaClass(t)){ message.data(IS_SCALA_CLASS, "true") message.data(OBJECT_VALUE, Serialization.write(t.asInstanceOf[AnyRef])) } else { message.data(IS_SCALA_CLASS, "false") message.data(OBJECT_VALUE, BDPJettyServerHelper.gson.toJson(t)) } message.setMethod("/rpc/message") message.data(CLASS_VALUE, t.getClass.getName) } override def notFound(): Message = { val message = Message.error("RPC Message.") message.setMethod("/rpc/message") message.data(EXCEPTION_MSG, new DWCURIException(10000, "The service does not exist for the available Receiver.(服务不存在可用的Receiver.)").toMap) } override def ok(): Message = { val message = Message.ok("RPC Message.") message.setMethod("/rpc/message") message } } private[rpc] def setFormats(serializer: Array[Serializer[_]]): Unit ={ this.formats = (serializer :+ JavaCollectionSerializer :+ JavaMapSerializer).foldLeft(DefaultFormats.asInstanceOf[Formats])(_ + _) serializerClasses = formats.customSerializers.map(s => getActualTypeClass(s.getClass.getGenericSuperclass)) .filter(_ != null) ++: List(classOf[util.List[_]], classOf[util.Map[_, _]]) info("RPC Serializers: " + this.formats.customSerializers.map(_.getClass.getSimpleName) + ", serializerClasses: " + "" + serializerClasses) } private def getActualTypeClass(classType: Type): Class[_] = classType match { case p: ParameterizedType => val params = p.getActualTypeArguments if(params == null || params.isEmpty) null else getActualTypeClass(params(0)) case c: Class[_] => c case _ => null } private[rpc] def isScalaClass(obj: Any): Boolean = (obj.isInstanceOf[Product] && obj.isInstanceOf[Serializable]) || serializerClasses.exists(ClassUtils.isAssignable(obj.getClass, _)) || obj.getClass.getName.startsWith("scala.") private[rpc] def getSerializableScalaClass(clazz: Class[_]): Class[_] = serializerClasses.find(ClassUtils.isAssignable(clazz, _)).getOrElse(clazz) def getRPCProduct: RPCProduct = rpcProduct }
Example 3
Source File: SynonymsDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.objects.{AbstractSynonym, Rule} import algolia.responses.{SearchSynonymResult, SynonymTask} import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait SynonymsDsl { implicit val formats: Formats implicit object GetSynonymDefinitionExecutable extends Executable[GetSynonymDefinition, AbstractSynonym] { override def apply(client: AlgoliaClient, query: GetSynonymDefinition)( implicit executor: ExecutionContext ): Future[AbstractSynonym] = { client.request[AbstractSynonym](query.build()) } } implicit object DeleteSynonymDefinitionExecutable extends Executable[DeleteSynonymDefinition, SynonymTask] { override def apply(client: AlgoliaClient, query: DeleteSynonymDefinition)( implicit executor: ExecutionContext ): Future[SynonymTask] = { client.request[SynonymTask](query.build()) } } implicit object ClearSynonymsDefinitionExecutable extends Executable[ClearSynonymsDefinition, SynonymTask] { override def apply(client: AlgoliaClient, query: ClearSynonymsDefinition)( implicit executor: ExecutionContext ): Future[SynonymTask] = { client.request[SynonymTask](query.build()) } } implicit object SaveSynonymDefinitionExecutable extends Executable[SaveSynonymDefinition, SynonymTask] { override def apply(client: AlgoliaClient, query: SaveSynonymDefinition)( implicit executor: ExecutionContext ): Future[SynonymTask] = { client.request[SynonymTask](query.build()) } } implicit object BatchSynonymsDefinitionExecutable extends Executable[BatchSynonymsDefinition, SynonymTask] { override def apply(client: AlgoliaClient, query: BatchSynonymsDefinition)( implicit executor: ExecutionContext ): Future[SynonymTask] = { client.request[SynonymTask](query.build()) } } implicit object SearchSynonymsDefinitionExecutable extends Executable[SearchSynonymsDefinition, SearchSynonymResult] { override def apply(client: AlgoliaClient, query: SearchSynonymsDefinition)( implicit executor: ExecutionContext ): Future[SearchSynonymResult] = { client.request[SearchSynonymResult](query.build()) } } }
Example 4
Source File: ClearDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.AlgoliaDsl.Of import algolia.definitions.{ ClearIndexDefinition, ClearRulesDefinition, ClearSynonymsDefinition } import algolia.responses.Task import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait ClearDsl { implicit val formats: Formats case object clear { def index(index: String): ClearIndexDefinition = ClearIndexDefinition(index) def synonyms(of: Of): ClearSynonymsDefinition = ClearSynonymsDefinition() def rules(of: Of): ClearRulesDefinition = ClearRulesDefinition() } implicit object ClearIndexDefinitionExecutable extends Executable[ClearIndexDefinition, Task] { override def apply(client: AlgoliaClient, query: ClearIndexDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } }
Example 5
Source File: HasDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.HadPendingMappingsDefinition import algolia.responses.HasPendingMappings import org.json4s.Formats import algolia.{AlgoliaClient, Executable} import scala.concurrent.{ExecutionContext, Future} trait HasDsl { implicit val formats: Formats case object has { def pendingMappings(pending: Boolean) = HadPendingMappingsDefinition(pending) def pendingMappings() = HadPendingMappingsDefinition() } implicit object HadPendingMappingsDefinitionExecutable extends Executable[HadPendingMappingsDefinition, HasPendingMappings] { override def apply( client: AlgoliaClient, query: HadPendingMappingsDefinition )(implicit executor: ExecutionContext): Future[HasPendingMappings] = { client.request[HasPendingMappings](query.build()) } } }
Example 6
Source File: KeyDefinitionDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.objects.ApiKey import algolia.responses.{AllKeys, CreateUpdateKey, DeleteKey} import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait KeyDefinitionDsl { implicit val formats: Formats implicit object GetApiKeyDefinitionExecutable extends Executable[GetKeyDefinition, ApiKey] { override def apply(client: AlgoliaClient, query: GetKeyDefinition)( implicit executor: ExecutionContext ): Future[ApiKey] = client.request[ApiKey](query.build()) } implicit object AddApiKeyDefinitionExecutable extends Executable[AddKeyDefinition, CreateUpdateKey] { override def apply(client: AlgoliaClient, query: AddKeyDefinition)( implicit executor: ExecutionContext ): Future[CreateUpdateKey] = client.request[CreateUpdateKey](query.build()) } implicit object DeleteApiKeyDefinitionExecutable extends Executable[DeleteKeyDefinition, DeleteKey] { override def apply(client: AlgoliaClient, query: DeleteKeyDefinition)( implicit executor: ExecutionContext ): Future[DeleteKey] = client.request[DeleteKey](query.build()) } implicit object UpdateApiKeyDefinitionExecutable extends Executable[UpdateKeyDefinition, CreateUpdateKey] { override def apply(client: AlgoliaClient, query: UpdateKeyDefinition)( implicit executor: ExecutionContext ): Future[CreateUpdateKey] = client.request[CreateUpdateKey](query.build()) } implicit object GetAllApiKeyDefinitionExecutable extends Executable[ListKeysDefinition, AllKeys] { override def apply(client: AlgoliaClient, query: ListKeysDefinition)( implicit executor: ExecutionContext ): Future[AllKeys] = client.request[AllKeys](query.build()) } }
Example 7
Source File: SetDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{ SetPersonalizationStrategyDefinition, SetRecommendationStrategyDefinition } import algolia.objects.{SetStrategyRequest, Strategy} import algolia.responses.{SetStrategyResponse, SetStrategyResult} import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait SetDsl { implicit val formats: Formats case object set { @deprecated( "Method is deprecated, please use personalizationRecommendationStrategy methods instead", "1.34" ) def personalizationStrategy( s: Strategy ): SetPersonalizationStrategyDefinition = SetPersonalizationStrategyDefinition(s) def personalizationRecommendationStrategy( strategy: SetStrategyRequest ): SetRecommendationStrategyDefinition = SetRecommendationStrategyDefinition(strategy) } @deprecated( "Method is deprecated, please use personalizationRecommendationStrategy methods instead", "1.34" ) implicit object SetPersonalizationStrategyExecutable extends Executable[ SetPersonalizationStrategyDefinition, SetStrategyResult ] { override def apply( client: AlgoliaClient, query: SetPersonalizationStrategyDefinition )(implicit executor: ExecutionContext): Future[SetStrategyResult] = { client.request[SetStrategyResult](query.build()) } } implicit object SetPersonalizationRecommendationStrategy extends Executable[ SetRecommendationStrategyDefinition, SetStrategyResponse ] { override def apply( client: AlgoliaClient, query: SetRecommendationStrategyDefinition )(implicit executor: ExecutionContext): Future[SetStrategyResponse] = { client.request[SetStrategyResponse](query.build()) } } }
Example 8
Source File: RemoveDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.{AlgoliaClient, Executable} import algolia.definitions.{ PartialUpdateObjectOperationDefinition, Remove, RemoveUserIDDefinition } import algolia.responses.Deleted import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait RemoveDsl { implicit val formats: Formats case object remove { def value(value: String): PartialUpdateObjectOperationDefinition = PartialUpdateObjectOperationDefinition(Remove, value = Some(value)) def userID(userID: String) = RemoveUserIDDefinition(userID) } implicit object RemoveUserIDExecutable extends Executable[RemoveUserIDDefinition, Deleted] { override def apply(client: AlgoliaClient, query: RemoveUserIDDefinition)( implicit executor: ExecutionContext ): Future[Deleted] = { client.request[Deleted](query.build()) } } }
Example 9
Source File: LogsDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.LogsDefinition import algolia.responses.Logs import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait LogsDsl { implicit val formats: Formats def getLogs = LogsDefinition() @deprecated("use getLogs", "1.27.1") def logs() = LogsDefinition() implicit object LogsDefinitionExecutable extends Executable[LogsDefinition, Logs] { override def apply(client: AlgoliaClient, query: LogsDefinition)( implicit executor: ExecutionContext ): Future[Logs] = { client.request[Logs](query.build()) } } }
Example 10
Source File: RulesDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.objects.Rule import algolia.responses.{SearchRuleResult, Task} import algolia.{AlgoliaClient, AlgoliaClientException, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait RulesDsl { implicit val formats: Formats implicit object GetRuleDefinitionExecutable extends Executable[GetRuleDefinition, Rule] { override def apply(client: AlgoliaClient, query: GetRuleDefinition)( implicit executor: ExecutionContext ): Future[Rule] = { client.request[Rule](query.build()) } } implicit object DeleteRuleDefinitionExecutable extends Executable[DeleteRuleDefinition, Task] { override def apply(client: AlgoliaClient, query: DeleteRuleDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object ClearRulesDefinitionExecutable extends Executable[ClearRulesDefinition, Task] { override def apply(client: AlgoliaClient, query: ClearRulesDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object SaveRuleDefinitionExecutable extends Executable[SaveRuleDefinition, Task] { override def apply(client: AlgoliaClient, query: SaveRuleDefinition)( implicit executor: ExecutionContext ): Future[Task] = { if (query.rule.objectID.isEmpty) { return Future.failed( new AlgoliaClientException(s"rule's 'objectID' cannot be empty") ) } client.request[Task](query.build()) } } implicit object BatchRulesDefinitionExecutable extends Executable[BatchRulesDefinition, Task] { override def apply(client: AlgoliaClient, query: BatchRulesDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object SearchRulesDefinitionExecutable extends Executable[SearchRulesDefinition, SearchRuleResult] { override def apply(client: AlgoliaClient, query: SearchRulesDefinition)( implicit executor: ExecutionContext ): Future[SearchRuleResult] = { client.request[SearchRuleResult](query.build()) } } }
Example 11
Source File: MultiQueriesDefinitionDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{MultiQueriesDefinition, SearchDefinition} import algolia.responses.MultiQueriesResult import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait MultiQueriesDefinitionDsl { implicit val formats: Formats @deprecated("use multipleQueries", "1.27.1") def multiQueries( queries: Iterable[SearchDefinition] ): MultiQueriesDefinition = { MultiQueriesDefinition(queries) } @deprecated("use multipleQueries", "1.27.1") def multiQueries(queries: SearchDefinition*): MultiQueriesDefinition = { MultiQueriesDefinition(queries) } def multipleQueries( queries: Iterable[SearchDefinition] ): MultiQueriesDefinition = { MultiQueriesDefinition(queries) } def multipleQueries(queries: SearchDefinition*): MultiQueriesDefinition = { MultiQueriesDefinition(queries) } implicit object MultiQueriesExecutable extends Executable[MultiQueriesDefinition, MultiQueriesResult] { override def apply(client: AlgoliaClient, query: MultiQueriesDefinition)( implicit executor: ExecutionContext ): Future[MultiQueriesResult] = { client.request[MultiQueriesResult](query.build()) } } }
Example 12
Source File: IndexSettingsDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{ IndexChangeSettingsDefinition, IndexSettingsDefinition } import algolia.objects.IndexSettings import algolia.responses.Task import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait IndexSettingsDsl { implicit val formats: Formats case object settings { def of(index: String) = IndexSettingsDefinition(index) } @deprecated("use setSettings", "1.27.1") case object changeSettings { def of(index: String) = IndexSettingsDefinition(index) } case object setSettings { def of(index: String) = IndexSettingsDefinition(index) } implicit object IndexSettingsDefinitionExecutable extends Executable[IndexSettingsDefinition, IndexSettings] { override def apply( client: AlgoliaClient, settings: IndexSettingsDefinition )(implicit executor: ExecutionContext): Future[IndexSettings] = { client.request[IndexSettings](settings.build()) } } implicit object IndexChangeSettingsDefinitionExecutable extends Executable[IndexChangeSettingsDefinition, Task] { override def apply( client: AlgoliaClient, settings: IndexChangeSettingsDefinition )(implicit executor: ExecutionContext): Future[Task] = { client.request[Task](settings.build()) } } }
Example 13
Source File: ABTestDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.responses._ import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait ABTestDsl { implicit val formats: Formats implicit object AddABTestDefinitionExecutable extends Executable[AddABTestDefinition, ABTestTask] { override def apply(client: AlgoliaClient, query: AddABTestDefinition)( implicit executor: ExecutionContext ): Future[ABTestTask] = { client.request[ABTestTask](query.build()) } } implicit object GetABTestDefinitionExecutable extends Executable[GetABTestDefinition, ABTestResponse] { override def apply(client: AlgoliaClient, query: GetABTestDefinition)( implicit executor: ExecutionContext ): Future[ABTestResponse] = { client.request[ABTestResponse](query.build()) } } implicit object StopABTestDefinitionExecutable extends Executable[StopABTestDefinition, ABTestTask] { override def apply(client: AlgoliaClient, query: StopABTestDefinition)( implicit executor: ExecutionContext ): Future[ABTestTask] = { client.request[ABTestTask](query.build()) } } implicit object DeleteABTestDefinitionExecutable extends Executable[DeleteABTestDefinition, ABTestTask] { override def apply(client: AlgoliaClient, query: DeleteABTestDefinition)( implicit executor: ExecutionContext ): Future[ABTestTask] = { client.request[ABTestTask](query.build()) } } implicit object GetABTestsDefinitionExecutable extends Executable[GetABTestsDefinition, ABTestsResponse] { override def apply(client: AlgoliaClient, query: GetABTestsDefinition)( implicit executor: ExecutionContext ): Future[ABTestsResponse] = { client.request[ABTestsResponse](query.build()) } } }
Example 14
Source File: DeleteDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.inputs.SafeDeleteObjectOperation import algolia.responses.Task import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait DeleteDsl { implicit val formats: Formats case object delete { //Index def index(index: String): DeleteIndexDefinition = DeleteIndexDefinition(index) //Object @deprecated("use objectFromIndex", "1.30.0") def objectId(objectId: String) = DeleteObjectDefinition(oid = Some(objectId)) def objectFromIndex(op: SafeDeleteObjectOperation) = SafeDeleteObjectDefinition(op) //Object(s) def from(index: String) = DeleteObjectDefinition(index = Some(index)) def key(keyName: String) = DeleteKeyDefinition(keyName) def synonym(synId: String) = DeleteSynonymDefinition(synId = synId) def rule(ruleId: String) = DeleteRuleDefinition(objectId = ruleId) // AB test def abTest(id: Int) = DeleteABTestDefinition(id) } implicit object DeleteObjectDefinitionExecutable extends Executable[DeleteObjectDefinition, Task] { override def apply(client: AlgoliaClient, query: DeleteObjectDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object DeleteIndexDefinitionExecutable extends Executable[DeleteIndexDefinition, Task] { override def apply(client: AlgoliaClient, query: DeleteIndexDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object DeleteByDefinitionExecutable extends Executable[DeleteByDefinition, Task] { override def apply(client: AlgoliaClient, query: DeleteByDefinition)( implicit executor: ExecutionContext ): Future[Task] = { client.request[Task](query.build()) } } implicit object SafeDeleteObjectDefinitionExecutable extends Executable[SafeDeleteObjectDefinition, Task] { override def apply( client: AlgoliaClient, query: SafeDeleteObjectDefinition )(implicit executor: ExecutionContext): Future[Task] = { client.request[Task](query.build()) } } }
Example 15
Source File: SearchDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.AlgoliaDsl.In import algolia.definitions._ import algolia.responses.{SearchFacetResult, SearchResult, SearchUserID} import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait SearchDsl { implicit val formats: Formats case object search { def into(index: String) = SearchDefinition(index) def synonyms(i: In): SearchSynonymsDefinition = SearchSynonymsDefinition() def rules(i: In): SearchRulesDefinition = SearchRulesDefinition() def userIDs(query: String) = SearchUserIDDefinition(query) } implicit object SearchDefinitionExecutable extends Executable[SearchDefinition, SearchResult] { override def apply(client: AlgoliaClient, query: SearchDefinition)( implicit executor: ExecutionContext ): Future[SearchResult] = { client.request[SearchResult](query.build()) } } implicit object SearchFacetDefinitionExecutable extends Executable[SearchFacetDefinition, SearchFacetResult] { override def apply(client: AlgoliaClient, query: SearchFacetDefinition)( implicit executor: ExecutionContext ): Future[SearchFacetResult] = { client.request[SearchFacetResult](query.build()) } } implicit object SearchUserIDsExecutable extends Executable[SearchUserIDDefinition, SearchUserID] { override def apply(client: AlgoliaClient, query: SearchUserIDDefinition)( implicit executor: ExecutionContext ): Future[SearchUserID] = { client.request[SearchUserID](query.build()) } } }
Example 16
Source File: PartialUpdateObjectDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions._ import algolia.responses.Task import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait PartialUpdateObjectDsl { implicit val formats: Formats case object increment { def attribute(attribute: String): PartialUpdateObjectOperationDefinition = PartialUpdateObjectOperationDefinition( Increment, attribute = Some(attribute) ) } case object decrement { def attribute(attribute: String): PartialUpdateObjectOperationDefinition = PartialUpdateObjectOperationDefinition( Decrement, attribute = Some(attribute) ) } case object addUnique { def value(value: String): PartialUpdateObjectOperationDefinition = PartialUpdateObjectOperationDefinition(AddUnique, value = Some(value)) } case object update { def attribute(attribute: String): PartialUpdateObjectDefinition = PartialUpdateObjectDefinition(attribute = Some(attribute)) def key(keyName: String) = UpdateKeyDefinition(keyName) } case object partialUpdate { def from(index: String): PartialUpdateOneObjectDefinition = { PartialUpdateOneObjectDefinition(index = index) } } implicit object PartialUpdateObjectOperationExecutable extends Executable[PartialUpdateObjectOperationDefinition, Task] { override def apply( client: AlgoliaClient, query: PartialUpdateObjectOperationDefinition )(implicit executor: ExecutionContext): Future[Task] = { client.request[Task](query.build()) } } implicit object PartialUpdateObjectExecutable extends Executable[PartialUpdateObjectDefinition, Task] { override def apply( client: AlgoliaClient, query: PartialUpdateObjectDefinition )(implicit executor: ExecutionContext): Future[Task] = { client.request[Task](query.build()) } } implicit object PartialUpdateOneObjectDefinitionExecutable extends Executable[PartialUpdateOneObjectDefinition, Task] { override def apply( client: AlgoliaClient, query: PartialUpdateOneObjectDefinition )(implicit executor: ExecutionContext): Future[Task] = { client.request[Task](query.build()) } } }
Example 17
Source File: ListDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{ ListClustersDefinition, ListIndexesDefinition, ListKeysDefinition, ListUserIDsDefinition } import algolia.responses.{ClusterList, Indices, UserIDList} import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait ListDsl { implicit val formats: Formats case object list { def indices = ListIndexesDefinition() def indexes = ListIndexesDefinition() def keys = ListKeysDefinition() def clusters = ListClustersDefinition() def userIDs = ListUserIDsDefinition() @deprecated("use without index", "1.27.0") def keysFrom(indexName: String) = ListKeysDefinition(indexName = Some(indexName)) } implicit object ListIndexesDefinitionExecutable extends Executable[ListIndexesDefinition, Indices] { override def apply(client: AlgoliaClient, query: ListIndexesDefinition)( implicit executor: ExecutionContext ): Future[Indices] = { client.request[Indices](query.build()) } } implicit object ListClustersExecutable extends Executable[ListClustersDefinition, ClusterList] { override def apply(client: AlgoliaClient, query: ListClustersDefinition)( implicit executor: ExecutionContext ): Future[ClusterList] = { client.request[ClusterList](query.build()) } } implicit object ListUserIDsExecutable extends Executable[ListUserIDsDefinition, UserIDList] { override def apply(client: AlgoliaClient, query: ListUserIDsDefinition)( implicit executor: ExecutionContext ): Future[UserIDList] = { client.request[UserIDList](query.build()) } } }
Example 18
Source File: BatchDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{BatchDefinition, Definition} import algolia.responses.TasksMultipleIndex import algolia.{AlgoliaClient, Executable} import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait BatchDsl { implicit val formats: Formats def batch(batches: Iterable[Definition]): BatchDefinition = { BatchDefinition(batches) } def batch(batches: Definition*): BatchDefinition = { BatchDefinition(batches) } implicit object BatchDefinitionExecutable extends Executable[BatchDefinition, TasksMultipleIndex] { override def apply(client: AlgoliaClient, query: BatchDefinition)( implicit executor: ExecutionContext ): Future[TasksMultipleIndex] = { client.request[TasksMultipleIndex](query.build()) } } }
Example 19
Source File: SendDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.{AlgoliaClient, Executable} import algolia.definitions.InsightsEventDefinition import algolia.inputs._ import algolia.responses.InsightsEventResponse import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait SendDsl { implicit val formats: Formats case object send { def event(e: InsightsEvent): InsightsEventDefinition = InsightsEventDefinition(Seq(e)) def events(e: Iterable[InsightsEvent]): InsightsEventDefinition = InsightsEventDefinition(e) } implicit object SendInsightEventExecutable extends Executable[InsightsEventDefinition, InsightsEventResponse] { override def apply(client: AlgoliaClient, query: InsightsEventDefinition)( implicit executor: ExecutionContext ): Future[InsightsEventResponse] = { client.request[InsightsEventResponse](query.build()) } } }
Example 20
Source File: AssignDsl.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.dsl import algolia.definitions.{AssignUserIDDefinition, AssignUserIDsDefinition} import algolia.inputs.{UserIDAssignment, UserIDsAssignment} import algolia.{AlgoliaClient, Executable} import algolia.responses.Created import org.json4s.Formats import scala.concurrent.{ExecutionContext, Future} trait AssignDsl { implicit val formats: Formats case object assign { def userID(assignment: UserIDAssignment): AssignUserIDDefinition = AssignUserIDDefinition(assignment) def userIDs(assignment: UserIDsAssignment): AssignUserIDsDefinition = AssignUserIDsDefinition(assignment) } implicit object AssignUserIDExecutable extends Executable[AssignUserIDDefinition, Created] { override def apply(client: AlgoliaClient, query: AssignUserIDDefinition)( implicit executor: ExecutionContext ): Future[Created] = { client.request[Created](query.build()) } } implicit object AssignUserIDsExecutable extends Executable[AssignUserIDsDefinition, Created] { override def apply(client: AlgoliaClient, query: AssignUserIDsDefinition)( implicit executor: ExecutionContext ): Future[Created] = { client.request[Created](query.build()) } } }
Example 21
Source File: AddABTestDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import java.time.ZoneOffset import algolia.http.{HttpPayload, POST} import algolia.inputs.ABTest import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization._ case class AddABTestDefinition(abtest: ABTest)(implicit val formats: Formats) extends Definition { type T = AddABTestDefinition override def options(requestOptions: RequestOptions): AddABTestDefinition = this override private[algolia] def build(): HttpPayload = { val body = Map( "name" -> abtest.name, "variants" -> abtest.variants, "endAt" -> abtest.endAt.atOffset(ZoneOffset.UTC).toString ) HttpPayload( POST, Seq("2", "abtests"), body = Some(write(body)), isSearch = false, isAnalytics = true, requestOptions = None ) } }
Example 22
Source File: MultiQueriesDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs._ import algolia.objects.{MultiQueries, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization._ case class MultiQueriesDefinition( definitions: Iterable[SearchDefinition], strategy: Option[MultiQueries.Strategy] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = MultiQueriesDefinition def strategy(strategy: MultiQueries.Strategy): MultiQueriesDefinition = copy(strategy = Some(strategy)) override def options(requestOptions: RequestOptions): MultiQueriesDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val parameters = strategy.flatMap(s => Some(Map("strategy" -> s.name))) HttpPayload( POST, Seq("1", "indexes", "*", "queries"), queryParameters = parameters, body = Some(write(MultiQueriesRequests(definitions.map(transform)))), isSearch = true, requestOptions = requestOptions ) } private def transform(definition: SearchDefinition): MultiQueriesRequest = { MultiQueriesRequest( indexName = definition.index, params = definition.query.map(_.toParam) ) } }
Example 23
Source File: BrowseIndexDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload} import algolia.objects.{Query, RequestOptions} import org.json4s.Formats case class BrowseIndexDefinition( source: String, query: Option[Query] = None, cursor: Option[String] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = BrowseIndexDefinition def from(cursor: String): BrowseIndexDefinition = copy(cursor = Some(cursor)) def query(query: Query): BrowseIndexDefinition = copy(query = Some(query)) override def options(requestOptions: RequestOptions): BrowseIndexDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val q = query.getOrElse(Query()).copy(cursor = cursor) HttpPayload( GET, Seq("1", "indexes", source, "browse"), queryParameters = Some(q.toQueryParam), isSearch = true, requestOptions = requestOptions ) } }
Example 24
Source File: StopABTestDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.objects.RequestOptions import org.json4s.Formats case class StopABTestDefinition(id: Int)(implicit val formats: Formats) extends Definition { type T = StopABTestDefinition override def options(requestOptions: RequestOptions): StopABTestDefinition = this override private[algolia] def build(): HttpPayload = HttpPayload( verb = POST, path = Seq("2", "abtests", id.toString, "stop"), isSearch = false, isAnalytics = true, requestOptions = None ) }
Example 25
Source File: DeleteDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia._ import algolia.http.HttpPayload import algolia.objects.{Query, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization.write case class DeleteObjectDefinition( index: Option[String] = None, oid: Option[String] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = DeleteObjectDefinition def from(ind: String): DeleteObjectDefinition = copy(index = Some(ind)) @deprecated("use objectFromIndex", "1.30.0") def index(ind: String): DeleteObjectDefinition = copy(index = Some(ind)) @deprecated("use objectFromIndex", "1.30.0") def objectId(objectId: String): DeleteObjectDefinition = copy(oid = Some(objectId)) def objectIds(objectIds: Iterable[String]): BatchDefinition = BatchDefinition(objectIds.map { oid => DeleteObjectDefinition(index, Some(oid)) }) def by(query: Query): DeleteByDefinition = DeleteByDefinition(index, query, requestOptions) override def options(requestOptions: RequestOptions): DeleteObjectDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = HttpPayload( http.DELETE, Seq("1", "indexes") ++ index ++ oid, isSearch = false, requestOptions = requestOptions ) } case class DeleteIndexDefinition( index: String, requestOptions: Option[RequestOptions] = None ) extends Definition { type T = DeleteIndexDefinition override def options(requestOptions: RequestOptions): DeleteIndexDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = HttpPayload( http.DELETE, Seq("1", "indexes", index), isSearch = false, requestOptions = requestOptions ) } case class DeleteByDefinition( index: Option[String], query: Query, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = DeleteByDefinition override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.toParam) HttpPayload( http.POST, Seq("1", "indexes") ++ index ++ Some("deleteByQuery"), isSearch = false, body = Some(write(body)), requestOptions = requestOptions ) } override def options(requestOptions: RequestOptions): DeleteByDefinition = copy(requestOptions = Some(requestOptions)) }
Example 26
Source File: DeleteABTestDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{DELETE, HttpPayload} import algolia.objects.RequestOptions import org.json4s.Formats case class DeleteABTestDefinition(id: Int)(implicit val formats: Formats) extends Definition { type T = DeleteABTestDefinition override def options(requestOptions: RequestOptions): DeleteABTestDefinition = this override private[algolia] def build(): HttpPayload = HttpPayload( DELETE, Seq("2", "abtests", id.toString), isSearch = false, isAnalytics = true, requestOptions = None ) }
Example 27
Source File: RecommendationDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload, POST} import algolia.objects.{RequestOptions, SetStrategyRequest} import org.json4s.Formats import org.json4s.native.Serialization.write case class GetRecommendationStrategyDefinition( requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = GetRecommendationStrategyDefinition override def options( requestOptions: RequestOptions ): GetRecommendationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( GET, Seq("1", "strategies", "personalization"), isSearch = false, isRecommendation = true, requestOptions = requestOptions ) } } case class SetRecommendationStrategyDefinition( s: SetStrategyRequest, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = SetRecommendationStrategyDefinition override def options( requestOptions: RequestOptions ): SetRecommendationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( POST, Seq("1", "strategies", "personalization"), body = Some(write(s)), isSearch = false, isRecommendation = true, requestOptions = requestOptions ) } }
Example 28
Source File: MoveIndexDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs.IndexOperation import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization._ case class MoveIndexDefinition( source: String, destination: Option[String] = None, scope: Option[Seq[String]] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = MoveIndexDefinition def to(destination: String): MoveIndexDefinition = copy(source, Some(destination)) override def options(requestOptions: RequestOptions): MoveIndexDefinition = copy(requestOptions = Some(requestOptions)) def scope(scope: Seq[String]): MoveIndexDefinition = copy(scope = Some(scope)) override private[algolia] def build(): HttpPayload = { val operation = IndexOperation("move", destination, scope) HttpPayload( POST, Seq("1", "indexes", source, "operation"), body = Some(write(operation)), isSearch = false, requestOptions = requestOptions ) } }
Example 29
Source File: IndexingBatchDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs.{ AddObjectOperation, BatchOperations, UpdateObjectOperation } import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization._ case class IndexingBatchDefinition( index: String, definitions: Iterable[Definition] = Iterable(), requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition with BatchOperationUtils { type T = IndexingBatchDefinition override def options( requestOptions: RequestOptions ): IndexingBatchDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val operations = definitions.map { case IndexingDefinition(_, None, Some(obj), _) => hasObjectId(obj) match { case (true, o) => UpdateObjectOperation(o) case (false, o) => AddObjectOperation(o) } case IndexingDefinition(_, Some(objectId), Some(obj), _) => UpdateObjectOperation(addObjectId(obj, objectId)) } HttpPayload( POST, Seq("1", "indexes", index, "batch"), body = Some(write(BatchOperations(operations))), isSearch = false, requestOptions = requestOptions ) } }
Example 30
Source File: GetABTestsDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload} import algolia.objects.RequestOptions import org.json4s.Formats case class GetABTestsDefinition(offset: Int = 0, limit: Int = 10)( implicit val formats: Formats ) extends Definition { type T = GetABTestsDefinition override def options(requestOptions: RequestOptions): GetABTestsDefinition = this def offset(offset: Int): GetABTestsDefinition = copy(offset = offset) def limit(limit: Int): GetABTestsDefinition = copy(limit = limit) override private[algolia] def build(): HttpPayload = HttpPayload( verb = GET, path = Seq("2", "abtests"), queryParameters = Some(Map("offset" -> offset.toString, "limit" -> limit.toString)), isSearch = false, isAnalytics = true, requestOptions = None ) }
Example 31
Source File: GetABTestDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload} import algolia.objects.RequestOptions import org.json4s.Formats case class GetABTestDefinition(id: Int)(implicit val formats: Formats) extends Definition { type T = GetABTestDefinition override def options(requestOptions: RequestOptions): GetABTestDefinition = this override private[algolia] def build(): HttpPayload = HttpPayload( GET, Seq("2", "abtests", id.toString), isSearch = false, isAnalytics = true, requestOptions = None ) }
Example 32
Source File: GetDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload, POST} import algolia.inputs.{Request, Requests} import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization._ case class GetObjectDefinition( index: Option[String] = None, oid: Option[String] = None, attributesToRetrieve: Iterable[String] = Iterable.empty, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = GetObjectDefinition def objectIds(oids: Seq[String]): GetObjectsDefinition = GetObjectsDefinition(index, oids) def from(ind: String): GetObjectDefinition = copy(index = Some(ind)) def objectId(objectId: String): GetObjectDefinition = copy(oid = Some(objectId)) def attributesToRetrieve( attributesToRetrieve: Iterable[String] ): GetObjectDefinition = copy(attributesToRetrieve = attributesToRetrieve) override def options(requestOptions: RequestOptions): GetObjectDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val parameters = if (attributesToRetrieve.isEmpty) { None } else { Some( Map( "attributesToRetrieve" -> attributesToRetrieve.mkString(",") ) ) } HttpPayload( GET, Seq("1", "indexes") ++ index ++ oid, queryParameters = parameters, isSearch = true, requestOptions = requestOptions ) } } case class GetObjectsDefinition( index: Option[String], oids: Seq[String] = Seq(), requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = GetObjectsDefinition override def options(requestOptions: RequestOptions): GetObjectsDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val requests = oids.map { oid => Request(index, oid) } HttpPayload( POST, Seq("1", "indexes", "*", "objects"), body = Some(write(Requests(requests))), isSearch = true, requestOptions = requestOptions ) } }
Example 33
Source File: SearchDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.objects.{Query, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization.write case class SearchDefinition( index: String, query: Option[Query] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = SearchDefinition def facet(facetName: String) = SearchFacetDefinition(index, facetName, "") def query(q: Query): SearchDefinition = copy(query = Some(q)) override def options(requestOptions: RequestOptions): SearchDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.map(_.toParam)) HttpPayload( POST, Seq("1", "indexes", index, "query"), body = Some(write(body)), isSearch = true, requestOptions = requestOptions ) } } case class SearchFacetDefinition( index: String, facetName: String, values: String, query: Query = Query(), requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = SearchFacetDefinition def values(facetQuery: String): SearchFacetDefinition = copy(values = facetQuery) def query(q: Query): SearchFacetDefinition = copy(query = q) override def options(requestOptions: RequestOptions): SearchFacetDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("params" -> query.copy(facetQuery = Some(values)).toParam) HttpPayload( POST, Seq("1", "indexes", index, "facets", facetName, "query"), body = Some(write(body)), isSearch = true, requestOptions = requestOptions ) } }
Example 34
Source File: StrategyDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{GET, HttpPayload, POST} import algolia.objects.Strategy import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization.write case class GetPersonalizationStrategyDefinition( requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = GetPersonalizationStrategyDefinition override def options( requestOptions: RequestOptions ): GetPersonalizationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( GET, Seq("1", "recommendation", "personalization", "strategy"), isSearch = true, requestOptions = requestOptions ) } } case class SetPersonalizationStrategyDefinition( s: Strategy, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = SetPersonalizationStrategyDefinition override def options( requestOptions: RequestOptions ): SetPersonalizationStrategyDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( POST, Seq("1", "recommendation", "personalization", "strategy"), body = Some(write(s)), isSearch = false, requestOptions = requestOptions ) } }
Example 35
Source File: IndexingDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia._ import algolia.http.HttpPayload import algolia.objects.RequestOptions import algolia.responses.TaskIndexing import org.json4s.Formats import org.json4s.native.Serialization.write import scala.concurrent.{ExecutionContext, Future} case class IndexingDefinition( index: String, objectId: Option[String] = None, obj: Option[AnyRef] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = IndexingDefinition override def options(requestOptions: RequestOptions): IndexingDefinition = copy(requestOptions = Some(requestOptions)) def objects(objectsWithIds: Map[String, AnyRef]): IndexingBatchDefinition = IndexingBatchDefinition(index, objectsWithIds.map { case (oid, o) => IndexingDefinition(index, Some(oid), Some(o)) }) def objects(objects: Iterable[AnyRef]): IndexingBatchDefinition = IndexingBatchDefinition(index, objects.map { obj => copy(index = index, obj = Some(obj)) }) def objectId(objectId: String): IndexingDefinition = copy(objectId = Some(objectId)) def `object`(objectId: String, obj: AnyRef): IndexingDefinition = copy(objectId = Some(objectId), obj = Some(obj)) def `object`(obj: AnyRef): IndexingDefinition = copy(obj = Some(obj)) override private[algolia] def build(): HttpPayload = { val body: Option[String] = obj.map(o => write(o)) val verb = objectId match { case Some(_) => http.PUT case None => http.POST } HttpPayload( verb, Seq("1", "indexes", index) ++ objectId, body = body, isSearch = false, requestOptions = requestOptions ) } } trait IndexingDsl { implicit val formats: Formats case object index { def into(index: String): IndexingDefinition = IndexingDefinition(index) } implicit object IndexingDefinitionExecutable extends Executable[IndexingDefinition, TaskIndexing] { override def apply(client: AlgoliaClient, query: IndexingDefinition)( implicit executor: ExecutionContext ): Future[TaskIndexing] = { client.request[TaskIndexing](query.build()) } } }
Example 36
Source File: InsightsEventDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs.InsightsEvent import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization.write case class InsightsEventDefinition( events: Iterable[InsightsEvent], requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = InsightsEventDefinition override def options( requestOptions: RequestOptions ): InsightsEventDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val body = Map("events" -> events) HttpPayload( POST, Seq("1", "events"), body = Some(write(body)), isSearch = false, isInsights = true, requestOptions = requestOptions ) } }
Example 37
Source File: SafeDeleteObjectDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia._ import algolia.http.HttpPayload import algolia.inputs.SafeDeleteObjectOperation import algolia.objects.RequestOptions import org.json4s.Formats case class SafeDeleteObjectDefinition( op: SafeDeleteObjectOperation, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { override type T = SafeDeleteObjectDefinition override def options( requestOptions: RequestOptions ): SafeDeleteObjectDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = HttpPayload( http.DELETE, Seq("1", "indexes", op.index, op.objectID), isSearch = false, requestOptions = requestOptions ) }
Example 38
Source File: IndexSettingsDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.AlgoliaDsl.ForwardToReplicas import algolia.http.{GET, HttpPayload, PUT} import algolia.objects.{IndexSettings, RequestOptions} import org.json4s.Formats import org.json4s.native.Serialization._ case class IndexSettingsDefinition( index: String, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = IndexSettingsDefinition def `with`(settings: IndexSettings) = IndexChangeSettingsDefinition(index, settings) override def options( requestOptions: RequestOptions ): IndexSettingsDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { HttpPayload( GET, Seq("1", "indexes", index, "settings"), queryParameters = Some(Map("getVersion" -> "2")), isSearch = true, requestOptions = requestOptions ) } } case class IndexChangeSettingsDefinition( index: String, settings: IndexSettings, forward: Option[ForwardToReplicas] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = IndexChangeSettingsDefinition override def options( requestOptions: RequestOptions ): IndexChangeSettingsDefinition = copy(requestOptions = Some(requestOptions)) def and(forward: ForwardToReplicas): IndexChangeSettingsDefinition = copy(forward = Some(forward)) override private[algolia] def build(): HttpPayload = { val queryParameters = if (forward.isDefined) { Some(Map("forwardToReplicas" -> "true")) } else { None } HttpPayload( PUT, Seq("1", "indexes", index, "settings"), body = Some(write(settings)), queryParameters = queryParameters, isSearch = false, requestOptions = requestOptions ) } }
Example 39
Source File: CopyIndexDefinition.scala From algoliasearch-client-scala with MIT License | 5 votes |
package algolia.definitions import algolia.http.{HttpPayload, POST} import algolia.inputs.IndexOperation import algolia.objects.RequestOptions import org.json4s.Formats import org.json4s.native.Serialization._ case class CopyIndexDefinition( source: String, destination: Option[String] = None, scope: Option[Seq[String]] = None, requestOptions: Option[RequestOptions] = None )(implicit val formats: Formats) extends Definition { type T = CopyIndexDefinition def to(destination: String): CopyIndexDefinition = copy(source, Some(destination)) @deprecated("use scope", "1.27.1") def scopes(scope: Seq[String]): CopyIndexDefinition = copy(scope = Some(scope)) def scope(scope: Seq[String]): CopyIndexDefinition = copy(scope = Some(scope)) override def options(requestOptions: RequestOptions): CopyIndexDefinition = copy(requestOptions = Some(requestOptions)) override private[algolia] def build(): HttpPayload = { val operation = IndexOperation("copy", destination, scope) HttpPayload( POST, Seq("1", "indexes", source, "operation"), body = Some(write(operation)), isSearch = false, requestOptions = requestOptions ) } }
Example 40
Source File: package.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc import akka.http.scaladsl.model.{HttpRequest, HttpResponse} import org.json4s.Formats import rhttpc.akkahttp.json4s._ import rhttpc.client.proxy.ReliableProxy import rhttpc.client.{InOnlyReliableClient, InOutReliableClient} import rhttpc.transport.json4s.CommonFormats package object akkahttp { type InOutReliableHttpClient = InOutReliableClient[HttpRequest] type InOnlyReliableHttpClient = InOnlyReliableClient[HttpRequest] type ReliableHttpProxy = ReliableProxy[HttpRequest, HttpResponse] implicit val formats: Formats = CommonFormats.formats + ContentTypeSerializer + ByteStringSerializer + UriSerializer }
Example 41
Source File: Json4sSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.akkapersistence.json4s import java.nio.ByteBuffer import java.nio.charset.Charset import akka.actor.ExtendedActorSystem import akka.serialization.Serializer import org.json4s.native.Serialization._ import org.json4s.{DefaultFormats, Formats, TypeHints} import rhttpc.transport.json4s.{AllTypeHints, ObjectSerializer} class Json4sSerializer(system: ExtendedActorSystem) extends Serializer { import Json4sSerializer._ import rhttpc.transport.json4s.CommonFormats._ override def identifier: Int = ID override def includeManifest: Boolean = true override def fromBinary(bytes: Array[Byte], manifestOpt: Option[Class[_]]): AnyRef = { implicit val manifest = manifestOpt match { case Some(x) => Manifest.classType(x) case None => Manifest.AnyRef } read(new String(bytes, UTF8)) } override def toBinary(o: AnyRef): Array[Byte] = { writePretty(o).getBytes(UTF8) } } object Json4sSerializer { private val UTF8: Charset = Charset.forName("UTF-8") private val ID: Int = ByteBuffer.wrap("json4s".getBytes(UTF8)).getInt }
Example 42
Source File: CustomSubTypesSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.json4s import org.json4s.reflect.TypeInfo import org.json4s.{Formats, Serializer, _} import scala.reflect.ClassTag class CustomSubTypesSerializer[T: Manifest, JV <: JValue: ClassTag]( ser: Formats => (PartialFunction[JV, T], PartialFunction[T, JV])) extends Serializer[T] { private val Class = implicitly[Manifest[T]].runtimeClass def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), T] = { val d = ser(format)._1 val pf: PartialFunction[(TypeInfo, JValue), T] = { case (TypeInfo(clazz, _), jValue: JV) if Class.isAssignableFrom(clazz) && d.isDefinedAt(jValue) => d(jValue) } pf } def serialize(implicit format: Formats): PartialFunction[Any, JValue] = { val s = ser(format)._2 val pf: PartialFunction[Any, JValue] = { case obj: T if s.isDefinedAt(obj) => s(obj) } pf } }
Example 43
Source File: ExceptionSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.json4s import java.lang.reflect.Constructor import org.json4s.{CustomSerializer, Extraction, Formats, Serializer, TypeInfo} import org.json4s.JsonAST._ import scala.util.Try object ExceptionSerializer extends Serializer[Throwable] { override def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Throwable] = { case ( TypeInfo(clazz, _), JObject(("jsonClass", JString(ExceptionClassHavingConstructorWithMessageAndCause(constructor))) :: ("message", JString(message)) :: ("cause", cause) :: Nil)) if classOf[Throwable].isAssignableFrom(clazz) => constructor.newInstance(message, Extraction.extract[Throwable](cause)) case ( TypeInfo(clazz, _), JObject(("jsonClass", JString(ExceptionClassHavingConstructorWithMessageAndCause(constructor))) :: ("message", JNull) :: ("cause", cause) :: Nil)) if classOf[Throwable].isAssignableFrom(clazz) => constructor.newInstance(null, Extraction.extract[Throwable](cause)) case ( TypeInfo(clazz, _), JObject(("jsonClass", JString(ExceptionClassHavingConstructorWithMessageOnly(constructor))) :: ("message", JString(message)) :: Nil)) if classOf[Throwable].isAssignableFrom(clazz) => constructor.newInstance(message) case ( TypeInfo(clazz, _), JObject(("jsonClass", JString(ExceptionClassHavingConstructorWithMessageOnly(constructor))) :: ("message", JNull) :: Nil)) if classOf[Throwable].isAssignableFrom(clazz) => constructor.newInstance(null) } override def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = { case ExceptionInstanceHavingConstructorWithMessageAndCause(ex) => JObject( formats.typeHintFieldName -> JString(ex.getClass.getName), "message" -> Option(ex.getMessage).map(JString).getOrElse(JNull), "cause" -> Extraction.decompose(ex.getCause) ) case ExceptionInstanceHavingConstructorWithMessageOnly(ex) => JObject( formats.typeHintFieldName -> JString(ex.getClass.getName), "message" -> Option(ex.getMessage).map(JString).getOrElse(JNull) ) } } object ExceptionClassHavingConstructorWithMessageAndCause { def unapply(className: String): Option[Constructor[Throwable]] = { (for { clazz <- Try(Class.forName(className)) if classOf[Throwable].isAssignableFrom(clazz) constructor <- constructorWithMessageAndCause(clazz) } yield constructor).toOption } def constructorWithMessageAndCause(clazz: Class[_]): Try[Constructor[Throwable]] = Try(clazz.getDeclaredConstructor(classOf[String], classOf[Throwable]).asInstanceOf[Constructor[Throwable]]) } object ExceptionInstanceHavingConstructorWithMessageAndCause { def unapply(instance: Throwable): Option[Throwable] = { ExceptionClassHavingConstructorWithMessageAndCause.constructorWithMessageAndCause(instance.getClass).map(_ => instance).toOption } } object ExceptionClassHavingConstructorWithMessageOnly { def unapply(className: String): Option[Constructor[Throwable]] = { (for { clazz <- Try(Class.forName(className)) if classOf[Throwable].isAssignableFrom(clazz) constructor <- constructorWithMessageOnly(clazz) } yield constructor).toOption } def constructorWithMessageOnly(clazz: Class[_]): Try[Constructor[Throwable]] = Try(clazz.getDeclaredConstructor(classOf[String]).asInstanceOf[Constructor[Throwable]]) } object ExceptionInstanceHavingConstructorWithMessageOnly { def unapply(instance: Throwable): Option[Throwable] = { ExceptionClassHavingConstructorWithMessageOnly.constructorWithMessageOnly(instance.getClass).map(_ => instance).toOption } }
Example 44
Source File: Json4sSerializer.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.json4s import org.json4s.Formats import org.json4s.native.Serialization import rhttpc.transport.{Deserializer, Serializer} import scala.util.Try class Json4sSerializer[Msg <: AnyRef](implicit formats: Formats) extends Serializer[Msg] { override def serialize(msg: Msg): String = { Serialization.write(msg)(formats) } } class Json4sDeserializer[Msg: Manifest](implicit formats: Formats) extends Deserializer[Msg] { override def deserialize(value: String): Try[Msg] = { Try(Serialization.read[Msg](value)) } }
Example 45
Source File: CommonFormats.scala From reliable-http-client with Apache License 2.0 | 5 votes |
package rhttpc.transport.json4s import org.json4s.{DefaultFormats, Formats, TypeHints} object CommonFormats { implicit val formats: Formats = new DefaultFormats { override def dateFormatter = DefaultFormats.losslessDate() override val typeHints: TypeHints = AllTypeHints override val strictOptionParsing: Boolean = true } + ExceptionSerializer + ObjectSerializer + IndexedSeqSerializer }
Example 46
Source File: EventStatsServlet.scala From spark-streaming-demo with Apache License 2.0 | 5 votes |
package com.datastax.examples.meetup import org.joda.time.{DateTimeZone, DateTime, Duration} import org.scalatra.scalate.ScalateSupport import org.scalatra.{CorsSupport, ScalatraServlet} import scala.concurrent.Await import scala.concurrent.duration._ import org.json4s.{DefaultFormats, Formats} import org.scalatra.json._ class EventStatsServlet() extends ScalatraServlet with CorsSupport with JacksonJsonSupport with ScalateSupport { protected implicit val jsonFormats: Formats = DefaultFormats before() { contentType = formats("json") } options("/*"){ response.setHeader("Access-Control-Allow-Headers", request.getHeader("Access-Control-Request-Headers")); } get("/trending") { val time = new DateTime(DateTimeZone.UTC) // Scan 5 second intervals within the past 1 minute. // Stop as soon as first successful found. val result = (for (i <- Stream range (0,12); v = getTrendingTopics(i, time); if v.nonEmpty) yield v).headOption // Order topics by count in desc order and take top 20 result.map(r => r.toIndexedSeq.sortBy(_._2).reverse.take(20)) } get("/countries") { val attendeesByCountry = Event.dimensions("attending", "ALL") Await.result(attendeesByCountry, 5 seconds) .map{ case (a,b) => Map("code" -> a.toUpperCase, "value" -> b)} } get("/") { contentType="text/html" layoutTemplate("dashboard.ssp") } def roundDateTime(t: DateTime, d: Duration) = { t minus (t.getMillis - (t.getMillis.toDouble / d.getMillis).round * d.getMillis) } def getTrendingTopics(i:Int, time:DateTime) = { val t = roundDateTime(time minusSeconds 5*i, Duration.standardSeconds(5)) val trendingTopics = Event.dimensions("trending", "S" + t.toString("yyyyMMddHHmmss")) Await.result(trendingTopics, 5 seconds) } }
Example 47
Source File: RedisSourceOffset.scala From spark-redis with BSD 3-Clause "New" or "Revised" License | 5 votes |
package org.apache.spark.sql.redis.stream import com.redislabs.provider.redis.util.JsonUtils import org.apache.spark.sql.execution.streaming.{Offset, SerializedOffset} import org.json4s.jackson.Serialization import org.json4s.{Formats, NoTypeHints} case class RedisSourceOffset(offsets: Map[String, RedisConsumerOffset]) extends Offset { override def json(): String = JsonUtils.toJson(this) } object RedisSourceOffset { private implicit val formats: Formats = Serialization.formats(NoTypeHints) def fromOffset(offset: Offset): RedisSourceOffset = { offset match { case o: RedisSourceOffset => o case so: SerializedOffset => fromJson(so.json) case _ => throw new IllegalArgumentException( s"Invalid conversion from offset of ${offset.getClass} to RedisSourceOffset") } fromJson(offset.json()) } def fromJson(json: String): RedisSourceOffset = { try { Serialization.read[RedisSourceOffset](json) } catch { case e: Throwable => val example = RedisSourceOffset(Map("my-stream" -> RedisConsumerOffset("redis-source", "1543674099961-0"))) val jsonExample = Serialization.write(example) throw new RuntimeException(s"Unable to parse offset json. Example of valid json: $jsonExample", e) } } } case class RedisConsumerOffset(groupName: String, offset: String) case class RedisSourceOffsetRange(start: Option[String], end: String, config: RedisConsumerConfig)
Example 48
Source File: KinesisRDDWriter.scala From aws-kinesis-scala with Apache License 2.0 | 5 votes |
package jp.co.bizreach.kinesis.spark import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration import com.amazonaws.regions.Regions import jp.co.bizreach.kinesis._ import org.apache.commons.codec.digest.DigestUtils import org.apache.spark.TaskContext import org.json4s.jackson.JsonMethods import org.json4s.{DefaultFormats, Extraction, Formats} import org.slf4j.LoggerFactory class KinesisRDDWriter[A <: AnyRef](streamName: String, region: Regions, credentials: SparkAWSCredentials, chunk: Int, endpoint: Option[String]) extends Serializable { private val logger = LoggerFactory.getLogger(getClass) def write(task: TaskContext, data: Iterator[A]): Unit = { // send data, including retry def put(a: Seq[PutRecordsEntry]) = endpoint.map(e => KinesisRDDWriter.endpointClient(credentials)(e)(region)) .getOrElse(KinesisRDDWriter.client(credentials)(region)) .putRecordsWithRetry(PutRecordsRequest(streamName, a)) .zipWithIndex.collect { case (Left(e), i) => a(i) -> s"${e.errorCode}: ${e.errorMessage}" } val errors = data.foldLeft( (Nil: Seq[PutRecordsEntry], Nil: Seq[(PutRecordsEntry, String)]) ){ (z, x) => val (records, failed) = z val payload = serialize(x) val entry = PutRecordsEntry(DigestUtils.sha256Hex(payload), payload) // record exceeds max size if (entry.recordSize > recordMaxDataSize) records -> ((entry -> "per-record size limit") +: failed) // execute else if (records.size >= chunk || (records.map(_.recordSize).sum + entry.recordSize) >= recordsMaxDataSize) (entry +: Nil) -> (put(records) ++ failed) // buffering else (entry +: records) -> failed } match { case (Nil, e) => e case (rest, e) => put(rest) ++ e } // failed records if (errors.nonEmpty) dump(errors) } protected def dump(errors: Seq[(PutRecordsEntry, String)]): Unit = logger.error( s"""Could not put record, count: ${errors.size}, following details: |${errors map { case (entry, message) => message + "\n" + new String(entry.data, "UTF-8") } mkString "\n"} """.stripMargin) protected def serialize(a: A)(implicit formats: Formats = DefaultFormats): Array[Byte] = JsonMethods.mapper.writeValueAsBytes(Extraction.decompose(a)(formats)) } object KinesisRDDWriter { private val cache = collection.concurrent.TrieMap.empty[Regions, AmazonKinesis] private val client: SparkAWSCredentials => Regions => AmazonKinesis = { credentials => implicit region => cache.getOrElseUpdate(region, AmazonKinesis(credentials.provider)) } private val endpointClient: SparkAWSCredentials => String => Regions => AmazonKinesis = { credentials => endpoint => implicit region => cache.getOrElseUpdate(region, AmazonKinesis(credentials.provider, new EndpointConfiguration(endpoint, region.getName))) } }
Example 49
Source File: GeneratorThread.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.benchmark.generator.threads import java.util.{Date, UUID} import akka.event.slf4j.SLF4JLogging import com.stratio.benchmark.generator.runners.StoppedThreads import com.stratio.kafka.benchmark.generator.kafka.KafkaProducer import com.stratio.models.benchmark.generator.models.{RawModel, RawModelCommonData} import kafka.producer.Producer import org.json4s.native.Serialization._ import org.json4s.{DefaultFormats, Formats} class GeneratorThread(producer: Producer[String,String], timeout: Long, stoppedThreads: StoppedThreads, topic: String) extends Runnable with SLF4JLogging with RawModelCommonData { implicit val formats: Formats = DefaultFormats var numberOfEvents = 0 override def run: Unit = { generateRaw(new Date().getTime) producer.close() stoppedThreads.incrementNumberOfEvents(numberOfEvents) stoppedThreads.incrementNumberOfThreads } private def generateRaw(startTimeInMillis: Long): Unit = { while(((startTimeInMillis + timeout) - new Date().getTime) > 0) { val id = UUID.randomUUID.toString val timestamp = RawModel.generateTimestamp val clientId = RawModel.generateRandomInt(RawModel.Range_client_id._1, RawModel.Range_client_id._2) val latitude = clientIdGeo.get(clientId).get._1 val longitude = clientIdGeo.get(clientId).get._2 val paymentMethod = RawModel.generatePaymentMethod() val creditCard = clientIdCreditCard.get(clientId).get val shoppingCenter = RawModel.generateShoppingCenter() val employee = RawModel.generateRandomInt(RawModel.Range_employee._1, RawModel.Range_employee._2) val rawModel = new RawModel( id, timestamp, clientId, latitude, longitude, paymentMethod, creditCard, shoppingCenter, employee) KafkaProducer.send(producer, topic, write(rawModel)) numberOfEvents = numberOfEvents + 1 } } }
Example 50
Source File: KafkaBase.scala From sparta with Apache License 2.0 | 5 votes |
package com.stratio.sparta.plugin.input.kafka import java.io.{Serializable => JSerializable} import com.stratio.sparta.plugin.input.kafka.models.TopicsModel import com.stratio.sparta.sdk.properties.JsoneyStringSerializer import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._ import org.json4s.jackson.Serialization._ import org.json4s.{DefaultFormats, Formats} import scala.util.Try trait KafkaBase { val DefaultHost = "localhost" val DefaultBrokerPort = "9092" val DefaultZkPort = "2181" val DefaultZookeeperPath = "" val properties: Map[String, JSerializable] def extractTopics: Set[String] = if (properties.contains("topics")) getTopicsPartitions.topics.map(topicPartitionModel => topicPartitionModel.topic).toSet else throw new IllegalStateException(s"Invalid configuration, topics must be declared in direct approach") private def getTopicsPartitions: TopicsModel = { implicit val json4sJacksonFormats: Formats = DefaultFormats + new JsoneyStringSerializer() val topicsModel = read[TopicsModel]( s"""{"topics": ${properties.get("topics").fold("[]") { values => values.toString }}}"""" ) if (topicsModel.topics.isEmpty) throw new IllegalStateException(s"topics is mandatory") else topicsModel } }
Example 51
Source File: AccountResponse.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import java.nio.charset.StandardCharsets.UTF_8 import org.json4s.{DefaultFormats, Formats} import org.json4s.JsonAST.{JArray, JObject} import stellar.sdk._ import stellar.sdk.model.Amount.toBaseUnits import stellar.sdk.model._ import stellar.sdk.util.ByteArrays case class AccountResponse(id: PublicKey, lastSequence: Long, subEntryCount: Int, thresholds: Thresholds, authRequired: Boolean, authRevocable: Boolean, balances: List[Balance], signers: List[Signer], data: Map[String, Array[Byte]]) { def toAccount: Account = Account(AccountId(id.publicKey), lastSequence + 1) def decodedData: Map[String, String] = data.map { case (k, v) => k -> new String(v, UTF_8) } } object AccountRespDeserializer extends ResponseParser[AccountResponse]({ o: JObject => implicit val formats: Formats = DefaultFormats val id = KeyPair.fromAccountId((o \ "id").extract[String]) val seq = (o \ "sequence").extract[String].toLong val subEntryCount = (o \ "subentry_count").extract[Int] val lowThreshold = (o \ "thresholds" \ "low_threshold").extract[Int] val mediumThreshold = (o \ "thresholds" \ "med_threshold").extract[Int] val highThreshold = (o \ "thresholds" \ "high_threshold").extract[Int] val authRequired = (o \ "flags" \ "auth_required").extract[Boolean] val authRevocable = (o \ "flags" \ "auth_revocable").extract[Boolean] val JArray(jsBalances) = o \ "balances" val balances = jsBalances.map { case balObj: JObject => val units = toBaseUnits((balObj \ "balance").extract[String].toDouble).get val amount = (balObj \ "asset_type").extract[String] match { case "credit_alphanum4" => Amount(units, IssuedAsset4( code = (balObj \ "asset_code").extract[String], issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String]) )) case "credit_alphanum12" => Amount(units, IssuedAsset12( code = (balObj \ "asset_code").extract[String], issuer = KeyPair.fromAccountId((balObj \ "asset_issuer").extract[String]) )) case "native" => NativeAmount(units) case t => throw new RuntimeException(s"Unrecognised asset type: $t") } val limit = (balObj \ "limit").extractOpt[String].map(BigDecimal(_)).map(toBaseUnits).map(_.get) val buyingLiabilities = toBaseUnits(BigDecimal((balObj \ "buying_liabilities").extract[String])).get val sellingLiabilities = toBaseUnits(BigDecimal((balObj \ "selling_liabilities").extract[String])).get val authorised = (balObj \ "is_authorized").extractOpt[Boolean].getOrElse(false) val authorisedToMaintainLiabilities = (balObj \ "is_authorized_to_maintain_liabilities") .extractOpt[Boolean].getOrElse(false) Balance(amount, limit, buyingLiabilities, sellingLiabilities, authorised, authorisedToMaintainLiabilities) case _ => throw new RuntimeException(s"Expected js object at 'balances'") } val JArray(jsSigners) = o \ "signers" val signers = jsSigners.map { case signerObj: JObject => val key = StrKey.decodeFromString((signerObj \ "key").extract[String]).asInstanceOf[SignerStrKey] val weight = (signerObj \ "weight").extract[Int] Signer(key, weight) case _ => throw new RuntimeException(s"Expected js object at 'signers'") } val JObject(dataFields) = o \ "data" val data = dataFields.map{ case (k, v) => k -> ByteArrays.base64(v.extract[String]) }.toMap AccountResponse(id, seq, subEntryCount, Thresholds(lowThreshold, mediumThreshold, highThreshold), authRequired, authRevocable, balances, signers, data) })
Example 52
Source File: PaymentPath.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model import org.json4s.JsonAST.JObject import org.json4s.{DefaultFormats, Formats, JArray, JValue} import stellar.sdk.KeyPair import stellar.sdk.model.AmountParser.{AssetDeserializer, parseAsset} import stellar.sdk.model.response.ResponseParser case class PaymentPath(source: Amount, destination: Amount, path: Seq[Asset]) object PaymentPathDeserializer extends ResponseParser[PaymentPath]({ o: JObject => implicit val formats = DefaultFormats implicit val assetDeserializer = AssetDeserializer PaymentPath( source = AmountParser.amount("source_", o), destination = AmountParser.amount("destination_", o), path = { val JArray(values) = (o \ "path").extract[JArray] values.map { jv => parseAsset("", jv) } } ) }) object AmountParser { implicit val formats = DefaultFormats def parseAsset(prefix: String, o: JValue)(implicit formats: Formats): Asset = { val assetType = (o \ s"${prefix}asset_type").extract[String] def code = (o \ s"${prefix}asset_code").extract[String] def issuer = KeyPair.fromAccountId((o \ s"${prefix}asset_issuer").extract[String]) assetType match { case "native" => NativeAsset case "credit_alphanum4" => IssuedAsset4(code, issuer) case "credit_alphanum12" => IssuedAsset12(code, issuer) case t => throw new RuntimeException(s"Unrecognised ${prefix}asset type: $t") } } def amount(prefix: String, o: JObject)(implicit formats: Formats): Amount = { val asset = parseAsset(prefix, o) val units = Amount.toBaseUnits((o \ s"${prefix}amount").extract[String]).get Amount(units, asset) } object AssetDeserializer extends ResponseParser[Asset](parseAsset("", _)) }
Example 53
Source File: TransactionHistory.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.result import java.time.ZonedDateTime import org.json4s.{DefaultFormats, Formats} import org.json4s.JsonAST.JObject import stellar.sdk.model._ import stellar.sdk.model.ledger.TransactionLedgerEntries.arr import stellar.sdk.model.ledger.{LedgerEntryChange, LedgerEntryChanges, TransactionLedgerEntries} import stellar.sdk.model.response.ResponseParser import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{KeyPair, PublicKey} import scala.util.Try case class TransactionHistory(hash: String, ledgerId: Long, createdAt: ZonedDateTime, account: PublicKey, sequence: Long, maxFee: NativeAmount, feeCharged: NativeAmount, operationCount: Int, memo: Memo, signatures: Seq[String], envelopeXDR: String, resultXDR: String, resultMetaXDR: String, feeMetaXDR: String, validAfter: Option[ZonedDateTime], validBefore: Option[ZonedDateTime], feeBump: Option[FeeBumpHistory]) { lazy val result: TransactionResult = TransactionResult.decodeXDR(resultXDR) def ledgerEntries: TransactionLedgerEntries = TransactionLedgerEntries.decodeXDR(resultMetaXDR) def feeLedgerEntries: Seq[LedgerEntryChange] = LedgerEntryChanges.decodeXDR(feeMetaXDR) @deprecated("Replaced by `feeCharged`", "v0.7.2") val feePaid: NativeAmount = feeCharged } object TransactionHistoryDeserializer extends { } with ResponseParser[TransactionHistory]({ o: JObject => implicit val formats: Formats = DefaultFormats val maxFee = NativeAmount((o \ "max_fee").extract[String].toInt) val signatures = (o \ "signatures").extract[List[String]] val hash = (o \ "hash").extract[String] val inner = for { hash <- (o \ "inner_transaction" \ "hash").extractOpt[String] maxFee <- (o \ "inner_transaction" \ "max_fee").extractOpt[Int].map(NativeAmount(_)) signatures <- (o \ "inner_transaction" \ "signatures").extractOpt[List[String]] } yield (hash, maxFee, signatures) TransactionHistory( hash = inner.map(_._1).getOrElse(hash), ledgerId = (o \ "ledger").extract[Long], createdAt = ZonedDateTime.parse((o \ "created_at").extract[String]), account = KeyPair.fromAccountId((o \ "source_account").extract[String]), sequence = (o \ "source_account_sequence").extract[String].toLong, maxFee = inner.map(_._2).getOrElse(maxFee), feeCharged = NativeAmount((o \ "fee_charged").extract[String].toInt), operationCount = (o \ "operation_count").extract[Int], memo = (o \ "memo_type").extract[String] match { case "none" => NoMemo case "id" => MemoId(BigInt((o \ "memo").extract[String]).toLong) case "text" => MemoText((o \ "memo").extractOpt[String].getOrElse("")) case "hash" => MemoHash(base64((o \ "memo").extract[String]).toIndexedSeq) case "return" => MemoReturnHash(base64((o \ "memo").extract[String]).toIndexedSeq) }, signatures = inner.map(_._3).getOrElse(signatures), envelopeXDR = (o \ "envelope_xdr").extract[String], resultXDR = (o \ "result_xdr").extract[String], resultMetaXDR = (o \ "result_meta_xdr").extract[String], feeMetaXDR = (o \ "fee_meta_xdr").extract[String], // TODO (jem) - Remove the Try wrappers when https://github.com/stellar/go/issues/1381 is fixed. validBefore = Try((o \ "valid_before").extractOpt[String].map(ZonedDateTime.parse)).getOrElse(None), validAfter = Try((o \ "valid_after").extractOpt[String].map(ZonedDateTime.parse)).getOrElse(None), feeBump = inner.map { _ => FeeBumpHistory(maxFee, hash, signatures) } ) })
Example 54
Source File: FederationServer.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk import java.net.HttpURLConnection.HTTP_NOT_FOUND import com.typesafe.scalalogging.LazyLogging import okhttp3.{Headers, HttpUrl, OkHttpClient, Request} import org.json4s.native.{JsonMethods, Serialization} import org.json4s.{Formats, NoTypeHints} import stellar.sdk.inet.RestException import stellar.sdk.model.response.{FederationResponse, FederationResponseDeserialiser} import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} case class FederationServer(base: HttpUrl) extends LazyLogging { implicit val formats: Formats = Serialization.formats(NoTypeHints) + FederationResponseDeserialiser private val client = new OkHttpClient() private val headers = Headers.of( "X-Client-Name", BuildInfo.name, "X-Client-Version", BuildInfo.version) def byName(name: String)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] = fetchFederationResponse(base.newBuilder() .addQueryParameter("q", name) .addQueryParameter("type", "name") .build(), _.copy(address = name)) def byAccount(account: PublicKey)(implicit ec: ExecutionContext): Future[Option[FederationResponse]] = fetchFederationResponse(base.newBuilder() .addQueryParameter("q", account.accountId) .addQueryParameter("type", "id") .build(), _.copy(account = account)) private def fetchFederationResponse(url: HttpUrl, fillIn: FederationResponse => FederationResponse) (implicit ec: ExecutionContext): Future[Option[FederationResponse]] = Future(client.newCall(new Request.Builder().url(url).headers(headers).build()).execute()) .map { response => response.code() match { case HTTP_NOT_FOUND => None case e if e >= 500 => throw RestException(response.body().string()) case _ => Try(response.body().string()) .map(JsonMethods.parse(_)) .map(_.extract[FederationResponse]) .map(fillIn) .map(validate) match { case Success(fr) => Some(fr) case Failure(t) => throw RestException("Could not parse document as FederationResponse.", t) } } } private def validate(fr: FederationResponse): FederationResponse = { if (fr.account == null) throw RestException(s"Document did not contain account_id") if (fr.address == null) throw RestException(s"Document did not contain stellar_address") fr } } object FederationServer { def apply(uriString: String): FederationServer = new FederationServer(HttpUrl.parse(uriString)) }
Example 55
Source File: HorizonServerError.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.inet import okhttp3.HttpUrl import org.json4s.native.JsonMethods import org.json4s.{DefaultFormats, Formats, JObject, JValue} import scala.concurrent.duration.Duration import scala.util.Try case class HorizonServerError(uri: HttpUrl, body: JObject)(implicit val formats: Formats) extends Exception( s"Server error when communicating with Horizon. $uri -> ${ implicit val formats: Formats = DefaultFormats Try((body \ "detail").extract[String]).getOrElse(JsonMethods.compact(JsonMethods.render(body))) }" ) case class HorizonEntityNotFound(uri: HttpUrl, body: JValue)(implicit val formats: Formats) extends Exception( s"Requested entity was not found in Horizon. $uri -> ${ implicit val formats: Formats = DefaultFormats Try((body \ "detail").extract[String]).getOrElse(JsonMethods.compact(JsonMethods.render(body))) }" ) case class HorizonRateLimitExceeded(uri: HttpUrl, retryAfter: Duration)(implicit val formats: Formats) extends Exception( s"Horizon request rate limit was exceeded. Try again in $retryAfter" ) case class HorizonBadRequest(uri: HttpUrl, body: String) extends Exception( s"Bad request. $uri -> ${ implicit val formats: Formats = DefaultFormats Try( (JsonMethods.parse(body) \ "extras" \ "reason").extract[String] ).getOrElse(body) }") case class FailedResponse(cause: String) extends Exception(cause)
Example 56
Source File: PageParser.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.inet import java.net.HttpURLConnection.{HTTP_BAD_REQUEST, HTTP_NOT_FOUND} import okhttp3.HttpUrl import org.json4s.{DefaultFormats, Formats} import org.json4s.native.JsonMethods import scala.reflect.ClassTag object PageParser { def parse[T: ClassTag](url: HttpUrl, responseCode: Int, body: => String) (implicit m: Manifest[T], customFormats: Formats): Page[T] = { responseCode match { case HTTP_NOT_FOUND => Page(List.empty[T], None) case HTTP_BAD_REQUEST => throw HorizonBadRequest(url, body) case _ => JsonMethods.parse(body) .extract[RawPage] .parse[T](url) } } }
Example 57
Source File: PathPaymentStrictReceiveOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PathPaymentStrictReceiveOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PathPaymentStrictReceiveOperation]] = Arbitrary(genTransacted(genPathPaymentStrictReceiveOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "path payment operation" should { "serde via xdr string" >> prop { actual: PathPaymentStrictReceiveOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PathPaymentStrictReceiveOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PathPaymentStrictReceiveOperation] => val doc = s""" |{ | "_links":{ | "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"}, | "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"}, | "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"}, | "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"}, | "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type":"path_payment", | "type_i":2, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.destinationAmount)} | ${amountDocPortion(op.operation.sendMax, "source_max", "source_")} | "from":"${op.operation.sourceAccount.get.accountId}", | "to":"${op.operation.destinationAccount.publicKey.accountId}", | "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}] |} """.stripMargin parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPathPaymentStrictReceiveOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictReceiveOperation]): Transacted[PathPaymentStrictReceiveOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 58
Source File: CreateAccountOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class CreateAccountOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[CreateAccountOperation]] = Arbitrary(genTransacted(genCreateAccountOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer + OperationDeserializer "create account operation" should { "serde via xdr string" >> prop { actual: CreateAccountOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: CreateAccountOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "be parsed from json " >> prop { op: Transacted[CreateAccountOperation] => val doc = s""" |{ | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "create_account", | "type_i": 0, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "starting_balance": "${amountString(op.operation.startingBalance)}", | "funder": "${op.operation.sourceAccount.get.accountId}", | "account": "${op.operation.destinationAccount.publicKey.accountId}" |} """.stripMargin parse(doc).extract[Transacted[CreateAccountOperation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genCreateAccountOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[CreateAccountOperation]): Transacted[CreateAccountOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 59
Source File: PathPaymentStrictSendOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PathPaymentStrictSendOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PathPaymentStrictSendOperation]] = Arbitrary(genTransacted(genPathPaymentStrictSendOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "path payment operation" should { "serde via xdr string" >> prop { actual: PathPaymentStrictSendOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PathPaymentStrictSendOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PathPaymentStrictSendOperation] => val doc = s""" |{ | "_links":{ | "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"}, | "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"}, | "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"}, | "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"}, | "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type":"path_payment_strict_send", | "type_i":13, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.sendAmount, assetPrefix = "source_")} | ${amountDocPortion(op.operation.destinationMin, "destination_min")} | "from":"${op.operation.sourceAccount.get.accountId}", | "to":"${op.operation.destinationAccount.publicKey.accountId}", | "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}] |} """.stripMargin parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPathPaymentStrictSendOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictSendOperation]): Transacted[PathPaymentStrictSendOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 60
Source File: PaymentOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PaymentOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PaymentOperation]] = Arbitrary(genTransacted(genPaymentOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "payment operation" should { "serde via xdr string" >> prop { actual: PaymentOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PaymentOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PaymentOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "payment", | "type_i": 1, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.amount)}, | "from": "${op.operation.sourceAccount.get.accountId}", | "to": "${op.operation.destinationAccount.publicKey.accountId}", |} """.stripMargin parse(doc).extract[Transacted[PaymentOperation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPaymentOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PaymentOperation]): Transacted[PaymentOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 61
Source File: TrustLineAuthEffectResponseSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods._ import org.json4s.native.Serialization import org.scalacheck.Gen import org.specs2.mutable.Specification import stellar.sdk._ import stellar.sdk.model.NonNativeAsset class TrustLineAuthEffectResponseSpec extends Specification with ArbitraryInput { implicit val formats: Formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer "an authorize trustline effect document" should { "parse to an authorize trustline effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) => val json = doc(id, "trustline_authorized", accn, asset, 0.0) parse(json).extract[EffectResponse] mustEqual EffectTrustLineAuthorized(id, accn.asPublicKey, asset) }.setGen1(Gen.identifier) } "an authorize to maintain liabilities effect document" should { "parse to an authorize to maintain liabilities effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) => val json = doc(id, "trustline_authorized_to_maintain_liabilities", accn, asset, 0.0) parse(json).extract[EffectResponse] mustEqual EffectTrustLineAuthorizedToMaintainLiabilities(id, accn.asPublicKey, asset) }.setGen1(Gen.identifier) } "a deauthorize trustline effect document" should { "parse to a deauthorize trustline effect" >> prop { (id: String, accn: KeyPair, asset: NonNativeAsset) => val json = doc(id, "trustline_deauthorized", accn, asset, 0.0) parse(json).extract[EffectResponse] mustEqual EffectTrustLineDeauthorized(id, accn.asPublicKey, asset) }.setGen1(Gen.identifier) } def doc(id: String, tpe: String, accn: PublicKeyOps, asset: NonNativeAsset, limit: Double) = { s""" |{ | "_links": { | "operation": { | "href": "https://horizon-testnet.stellar.org/operations/10157597659144" | }, | "succeeds": { | "href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144-2" | }, | "precedes": { | "href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144-2" | } | }, | "id": "$id", | "paging_token": "10157597659144-2", | "account": "${asset.issuer.accountId}", | "type": "$tpe", | "type_i": 23, | "asset_type": "${asset.typeString}", | "asset_code": "${asset.code}", | "trustor": "${accn.accountId}" |} """.stripMargin } }
Example 62
Source File: TagProcessor.scala From donut with MIT License | 5 votes |
package report.donut.gherkin.processors import org.json4s.jackson.Serialization import org.json4s.{Formats, NoTypeHints, jackson} import report.donut.gherkin.model._ case class ReportTag(tag: String, scenarios: List[Scenario], scenariosMetrics: Metrics, tagStatus: String, htmlElements: String = "") case class TagMetricsForChart(tag: String, scenariosMetrics: Metrics) object TagProcessor { def apply(features: List[Feature]): (List[ReportTag], String) = { val allReportTags = createAllReportTags(features) (allReportTags, createChart(allReportTags)) } private[processors] def createChart(reportTags: List[ReportTag]): String = { implicit def json4sJacksonFormats: Formats = jackson.Serialization.formats(NoTypeHints) Serialization.writePretty(reportTags.map(t => new TagMetricsForChart(t.tag, t.scenariosMetrics))) } private[processors] def createAllReportTags(features: List[Feature]): List[ReportTag] = { val scenarios: List[Scenario] = features.flatMap(f => addFeatureTagsToScenarios(f.scenariosExcludeBackgroundAndUnitTests, f.tags)) groupElementsByTag(scenarios) .map { case (tag, scenarioList) => new ReportTag(tag, scenarioList, ScenarioMetrics(scenarioList), tagStatus(scenarioList)) }.toList .zipWithIndex.map { case (t, i) => t.copy(htmlElements = HTMLTagsProcessor(t.scenarios, i.toString.trim)) } .sortWith((left, right) => left.scenariosMetrics.total > right.scenariosMetrics.total) } // tagName -> List[Elements], excluding background elements private[processors] def groupElementsByTag(scenarios: List[Scenario]): Map[String, List[Scenario]] = scenarios.flatMap(s => s.tags.map(tag => (tag, s))).groupBy(_._1).mapValues(value => value.map(_._2)) // cucumber 1 backwards compatibility - adds the parent (feature) tag to all children (scenarios) private[processors] def addFeatureTagsToScenarios(scenarios: List[Scenario], featureTags: List[String]): List[Scenario] = scenarios.map(e => e.copy(tags = (e.tags ::: featureTags).distinct)) // Returns `passed` or `failed` private[processors] def tagStatus(scenarios: List[Scenario]): String = { val statuses = scenarios.map(s => s.status.statusStr) if (statuses.contains(Status.FAILED)) Status.FAILED else Status.PASSED } }
Example 63
Source File: ImageProcessor.scala From donut with MIT License | 5 votes |
package report.donut.gherkin.processors import report.donut.gherkin.model.Embedding import org.json4s.jackson.Serialization import org.json4s.{Formats, NoTypeHints, jackson} object ImageProcessor { var imageMap = scala.collection.mutable.Map[Int, Embedding]() def getScreenshotIds(embeddings: List[Embedding]): String = { val a: Map[Int, Embedding] = embeddings.map(e => (e.data.hashCode -> e)).toMap a.map(a => imageMap += a) a.map(a => a._1).toList.mkString(",") } def allImages: String = { implicit def json4sJacksonFormats: Formats = jackson.Serialization.formats(NoTypeHints) val b: List[Embedding] = imageMap.map { case (k, v) => new Embedding(v.mime_type, v.data, k) }.toList Serialization.writePretty(b) } }
Example 64
Source File: HttpService.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.sales import akka.actor.{Actor, ActorLogging, Props} import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives import pl.newicom.dddd.streams.ImplicitMaterializer import akka.util.Timeout import com.typesafe.config.Config import ecommerce.sales.app.ReservationViewEndpoint import io.github.lhotari.akka.http.health.HealthEndpoint.createDefaultHealthRoute import org.json4s.Formats import pl.newicom.dddd.serialization.JsonSerHints._ import pl.newicom.dddd.view.sql.SqlViewStore import scala.concurrent.duration.FiniteDuration import slick.jdbc.{JdbcProfile, PostgresProfile} object HttpService { def props(interface: String, port: Int, askTimeout: FiniteDuration): Props = Props(new HttpService(interface, port)(askTimeout)) } class HttpService(interface: String, port: Int)(implicit askTimeout: Timeout) extends Actor with ActorLogging with SalesReadFrontConfiguration with ImplicitMaterializer with Directives { import context.dispatcher implicit val formats: Formats = fromConfig(config) implicit val profile: JdbcProfile = PostgresProfile Http(context.system).bindAndHandle(route, interface, port) log.info(s"Listening on $interface:$port") override def receive: Receive = Actor.emptyBehavior override def config: Config = context.system.settings.config lazy val endpoints: ReservationViewEndpoint = new ReservationViewEndpoint private def route = pathPrefix("ecommerce" / "sales") { createDefaultHealthRoute() ~ provide(new SqlViewStore(config))(endpoints) } }
Example 65
Source File: ReservationViewEndpoint.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.sales.app import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Route import ecommerce.sales.ReadEndpoint import ecommerce.sales.view.ReservationDao import org.json4s.Formats import pl.newicom.dddd.view.sql.SqlViewStore import slick.jdbc.JdbcProfile import scala.concurrent.ExecutionContext class ReservationViewEndpoint(implicit ec: ExecutionContext, profile: JdbcProfile, formats: Formats) extends ReadEndpoint { lazy val dao = new ReservationDao def route(viewStore: SqlViewStore): Route = { path("reservation" / "all") { get { complete { viewStore.run { dao.all } } } } ~ path("reservation" / Segment) { id => get { onSuccess(viewStore.run(dao.byId(id))) { case Some(res) => complete(res) case None => complete(StatusCodes.NotFound -> "unknown reservation") } } } } }
Example 66
Source File: ReservationViewEndpointSpec.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.sales.app import java.sql.Date import akka.http.scaladsl.model.StatusCodes.NotFound import akka.http.scaladsl.server._ import akka.http.scaladsl.testkit.ScalatestRouteTest import com.typesafe.config.ConfigFactory import ecommerce.sales.view.{ReservationDao, ReservationView, ViewTestSupport} import ecommerce.sales.{ReservationStatus, SalesSerializationHintsProvider} import org.joda.time.DateTime._ import org.json4s.Formats import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike} import pl.newicom.dddd.serialization.JsonSerHints._ import pl.newicom.dddd.utils.UUIDSupport.uuid7 class ReservationViewEndpointSpec extends WordSpecLike with Matchers with ScalatestRouteTest with ViewTestSupport with BeforeAndAfter { override lazy val config = ConfigFactory.load implicit val formats: Formats = new SalesSerializationHintsProvider().hints() lazy val dao = new ReservationDao val reservationId = uuid7 before { viewStore.run { dao.createOrUpdate(ReservationView(reservationId, "client-1", ReservationStatus.Opened, new Date(now.getMillis))) }.futureValue } after { viewStore.run { dao.remove(reservationId) }.futureValue } "Reservation view endpoint" should { def response = responseAs[String] val route: Route = new ReservationViewEndpoint().route(viewStore) "respond to /reservation/all with all reservations" in { Get("/reservation/all") ~> route ~> check { response should include (reservationId) } } "respond to /reservation/{reservationId} with requested reservation" in { Get(s"/reservation/$reservationId") ~> route ~> check { response should include (reservationId) } } "respond to /reservation/{reservationId} with NotFound if reservation unknown" in { Get(s"/reservation/invalid") ~> route ~> check { status shouldBe NotFound } } } def ensureSchemaDropped = dao.ensureSchemaDropped def ensureSchemaCreated = dao.ensureSchemaCreated }
Example 67
Source File: HttpService.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.sales.app import akka.actor.{Actor, ActorLogging, Props} import akka.event.Logging import akka.http.scaladsl.Http import akka.util.Timeout import ecommerce.sales.ReservationOfficeId import io.github.lhotari.akka.http.health.HealthEndpoint.createDefaultHealthRoute import org.json4s.Formats import pl.newicom.dddd.serialization.JsonSerHints.fromConfig import pl.newicom.dddd.writefront.HttpCommandHandler import scala.concurrent.duration.FiniteDuration object HttpService { def props(interface: String, port: Int, askTimeout: FiniteDuration): Props = Props(new HttpService(interface, port)(askTimeout)) } class HttpService(interface: String, port: Int)(implicit val timeout: Timeout) extends Actor with SalesFrontConfiguration with HttpCommandHandler with ActorLogging { import context.dispatcher implicit val formats: Formats = fromConfig(config) Http(context.system).bindAndHandle(route, interface, port) log.info(s"Listening on $interface:$port") override def receive: Receive = Actor.emptyBehavior override def offices = Set(ReservationOfficeId) private def route = logRequestResult(("sales", Logging.InfoLevel)) { pathPrefix("ecommerce" / "sales") { createDefaultHealthRoute() ~ handle[ecommerce.sales.Command] } } }
Example 68
Source File: HttpService.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.shipping import akka.actor.{Actor, ActorLogging, Props} import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives import pl.newicom.dddd.streams.ImplicitMaterializer import akka.util.Timeout import com.typesafe.config.Config import ecommerce.shipping.app.ShipmentViewEndpoint import org.json4s.Formats import pl.newicom.dddd.serialization.JsonSerHints._ import pl.newicom.dddd.view.sql.SqlViewStore import scala.concurrent.duration.FiniteDuration import slick.jdbc.PostgresProfile object HttpService { def props(interface: String, port: Int, askTimeout: FiniteDuration): Props = Props(new HttpService(interface, port)(askTimeout)) } class HttpService(interface: String, port: Int)(implicit askTimeout: Timeout) extends Actor with ActorLogging with ShippingReadFrontConfiguration with ImplicitMaterializer with Directives { import context.dispatcher implicit val formats: Formats = fromConfig(config) implicit val profile = PostgresProfile Http(context.system).bindAndHandle(route, interface, port) log.info(s"Listening on $interface:$port") override def receive: Receive = Actor.emptyBehavior override def config: Config = context.system.settings.config lazy val endpoints: ShipmentViewEndpoint = new ShipmentViewEndpoint private def route = (provide(new SqlViewStore(config)) & pathPrefix("ecommerce" / "shipping"))(endpoints) }
Example 69
Source File: ShipmentViewEndpoint.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.shipping.app import akka.http.scaladsl.model.StatusCodes import akka.http.scaladsl.server.Route import ecommerce.shipping.view.ShipmentDao import ecommerce.shipping.ReadEndpoint import org.json4s.Formats import pl.newicom.dddd.view.sql.SqlViewStore import scala.concurrent.ExecutionContext import slick.jdbc.JdbcProfile class ShipmentViewEndpoint(implicit val ec: ExecutionContext, profile: JdbcProfile, formats: Formats) extends ReadEndpoint { lazy val dao = new ShipmentDao def route(viewStore: SqlViewStore): Route = { path("shipment" / "all") { get { complete { viewStore.run { dao.all } } } } ~ path("shipment" / Segment) { id => get { onSuccess(viewStore.run(dao.byId(id))) { case Some(res) => complete(res) case None => complete(StatusCodes.NotFound -> "unknown shipment") } } } ~ path("shipment" / "order" / Segment) { id => get { onSuccess(viewStore.run(dao.byOrderId(id))) { case seq if seq.isEmpty => complete(StatusCodes.NotFound -> "unknown order") case orders => complete(orders) } } } } }
Example 70
Source File: ShipmentViewEndpointSpec.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.shipping.app import akka.http.scaladsl.model.StatusCodes.NotFound import akka.http.scaladsl.server._ import akka.http.scaladsl.testkit.{RouteTestTimeout, ScalatestRouteTest} import akka.testkit.TestDuration import com.typesafe.config.ConfigFactory import ecommerce.sales.view.ViewTestSupport import ecommerce.shipping.view.{ShipmentDao, ShipmentView} import ecommerce.shipping.{ShippingSerializationHintsProvider, ShippingStatus} import org.json4s.Formats import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike} import pl.newicom.dddd.serialization.JsonSerHints._ import pl.newicom.dddd.utils.UUIDSupport.uuid7 import scala.concurrent.duration.DurationInt class ShipmentViewEndpointSpec extends WordSpecLike with Matchers with ScalatestRouteTest with ViewTestSupport with BeforeAndAfter { override lazy val config = ConfigFactory.load implicit val formats: Formats = new ShippingSerializationHintsProvider().hints() implicit val routeTimeout = RouteTestTimeout(3.seconds dilated) lazy val dao = new ShipmentDao val shipmentId = uuid7 before { viewStore.run { dao.createOrUpdate(ShipmentView(shipmentId, "order-1", ShippingStatus.Delivered)) }.futureValue } after { viewStore.run { dao.remove(shipmentId) }.futureValue } "Shipment view endpoint" should { def response = responseAs[String] val route: Route = new ShipmentViewEndpoint().route(viewStore) "respond to /shipment/all with all shipments" in { Get("/shipment/all") ~> route ~> check { response should include (shipmentId) } } "respond to /shipment/{shipmentId} with requested shipment" in { Get(s"/shipment/$shipmentId") ~> route ~> check { response should include (shipmentId) } } "respond to /shipment/{shipmentId} with NotFound if shipment unknown" in { Get(s"/shipment/invalid") ~> route ~> check { status shouldBe NotFound } } } def ensureSchemaDropped = dao.ensureSchemaDropped def ensureSchemaCreated = dao.ensureSchemaCreated }
Example 71
Source File: TestDriver.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.tests.e2e import io.restassured.RestAssured._ import io.restassured.builder.RequestSpecBuilder import io.restassured.config.HttpClientConfig import io.restassured.config.HttpClientConfig.HttpClientFactory import io.restassured.filter.log.LogDetail import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse import io.restassured.response.ValidatableResponse import io.restassured.specification.RequestSpecification import org.apache.http.client.HttpClient import org.apache.http.impl.client.SystemDefaultHttpClient import org.apache.http.params.HttpConnectionParams import org.json4s.Formats import org.json4s.native.Serialization.write import org.scalatest.{Matchers, WordSpecLike} import pl.newicom.dddd.aggregate.Command trait TestDriver extends WordSpecLike with Matchers { val clientConfig: HttpClientConfig = config.getHttpClientConfig .httpClientFactory(new HttpClientFactory() { override def createHttpClient: HttpClient = { val rv = new SystemDefaultHttpClient val httpParams = rv.getParams HttpConnectionParams.setConnectionTimeout(httpParams, 2 * 1000) //Wait 5s for a connection HttpConnectionParams.setSoTimeout(httpParams, 60 * 1000) // Default session is 60s rv } }) .reuseHttpClientInstance() def using[R](endpoint: EndpointConfig)(testBody: RequestSpecBuilder => R): R = { testBody( new RequestSpecBuilder() .setConfig(config.httpClient(clientConfig)) .setBaseUri(endpoint.toUrl) .setContentType("application/json") .log(LogDetail.ALL) ) } def POST(implicit builder: RequestSpecBuilder): POSTOps = new POSTOps(builder.build()) def GET(implicit builder: RequestSpecBuilder): GETOps = new GETOps(builder.build()) class POSTOps(reqSpec: RequestSpecification) { def command(c: Command)(implicit formats: Formats): ValidatableResponse = given(reqSpec) .body(write(c)) .header("Command-Type", c.getClass.getName) .post() .Then() .log().all() .statusCode(200) } class GETOps(reqSpec: RequestSpecification) { def /(subPath: String): ValidatableResponse = given(reqSpec) .get(subPath) .Then() .log().all() .statusCode(200) } }
Example 72
Source File: SystemSpec.scala From ddd-leaven-akka-v2 with MIT License | 5 votes |
package ecommerce.tests.e2e import ecommerce.invoicing.ReceivePayment import ecommerce.sales._ import ecommerce.shipping.ShippingSerializationHintsProvider import ecommerce.tests.e2e.SystemSpec._ import org.json4s.Formats import org.scalatest.concurrent.Eventually import org.scalatest.time.{Seconds, Span} import pl.newicom.dddd.utils.UUIDSupport.uuid7 class SystemSpec extends TestDriver with Eventually { implicit override val patienceConfig: PatienceConfig = PatienceConfig( timeout = scaled(Span(10, Seconds)), interval = scaled(Span(2, Seconds)) ) "Ecommerce system" should { val reservationId = new ReservationId(uuid7) val invoiceId = reservationId val customerId = uuid7 using(sales_write) { implicit b => "create reservation" in eventually { POST command { CreateReservation(reservationId, customerId) } } } using(sales_read) { implicit b => "respond to reservation/{reservationId} query" in eventually { GET / s"reservation/$reservationId" } } using(sales_write) { implicit b => "reserve product" in eventually { val product = Product( productId = uuid7, name = "DDDD For Dummies - 7th Edition", productType = ProductType.Standard, price = Money(10.0) ) POST command { ReserveProduct(reservationId, product, quantity = 1) } } "confirm reservation" in eventually { POST command { ConfirmReservation(reservationId) } } } using(invoicing_write) { implicit b => "pay" in eventually { POST command { ReceivePayment(invoiceId, reservationId.value, Money(10.0), paymentId = "230982342") } } } using(shipping_read) { implicit b => "respond to /shipment/order/{orderId}" in eventually { GET / s"shipment/order/$reservationId" } } } } import pl.newicom.dddd.serialization.JsonSerHints._ object SystemSpec { val sales = EndpointConfig(path = "ecommerce/sales") val invoicing = EndpointConfig(path = "ecommerce/invoicing") val shipping = EndpointConfig(path = "ecommerce/shipping") val sales_write: EndpointConfig = sales.copy(port = 9100) val invoicing_write: EndpointConfig = invoicing.copy(port = 9200) val sales_read: EndpointConfig = sales.copy(port = 9110) val shipping_read: EndpointConfig = shipping.copy(port = 9310) implicit val formats: Formats = new SalesSerializationHintsProvider().hints() ++ new ShippingSerializationHintsProvider().hints() }
Example 73
Source File: JsonServlet.scala From chatoverflow with Eclipse Public License 2.0 | 5 votes |
package org.codeoverflow.chatoverflow.ui.web import javax.servlet.http.HttpServletRequest import org.codeoverflow.chatoverflow.ui.web.rest.DTOs.ResultMessage import org.codeoverflow.chatoverflow.{ChatOverflow, Launcher} import org.json4s.{DefaultFormats, Formats} import org.scalatra.json.JacksonJsonSupport import org.scalatra.{BadRequest, CorsSupport, ScalatraServlet, Unauthorized} protected def authKeyRequired(func: => Any)(implicit request: HttpServletRequest): Any = { val authKeyKey = "authKey" if (request.header(authKeyKey).isEmpty) { BadRequest() } else if (request.header(authKeyKey).get != chatOverflow.credentialsService.generateAuthKey()) { Unauthorized() } else { func } } }
Example 74
Source File: ErrorEventsWriter.scala From etl-light with MIT License | 5 votes |
package yamrcraft.etlite.writers import java.io.OutputStream import org.apache.hadoop.fs.Path import org.apache.hadoop.io.{BytesWritable, IntWritable, SequenceFile, Text} import org.json4s.jackson.Serialization import org.json4s.{DefaultFormats, Formats, ShortTypeHints} import yamrcraft.etlite.utils.FileUtils case class ErrorInfo( errorType: String, errorMsg: Option[String] ) class ErrorEventWriter(folder: String, jobId: Long, partitionId: Int) extends ErrorEventsWriter { // incremental record id var recordId = 1 val fs = FileUtils.getFS(folder) val seqPath = new Path(folder, s"errors_job${jobId}_part$partitionId.seq") if (fs.exists(seqPath)) { fs.delete(seqPath, false) } val metaPath = new Path(folder, s"errors_job${jobId}_part$partitionId.meta.seq") if (fs.exists(metaPath)) { fs.delete(metaPath, false) } private var seqWriter: Option[SequenceFile.Writer] = None private var metaWriter: Option[SequenceFile.Writer] = None implicit val formats = new Formats { val dateFormat = DefaultFormats.lossless.dateFormat override val typeHints = ShortTypeHints(List(classOf[ErrorInfo])) override val typeHintFieldName = "type" } override def write(errorEvent: (Array[Byte], ErrorInfo)) = { if (seqWriter.isEmpty) { seqWriter = createSequenceFile(seqPath, classOf[IntWritable], classOf[BytesWritable]) metaWriter = createSequenceFile(metaPath, classOf[IntWritable], classOf[Text]) } val id = new IntWritable(recordId) seqWriter.get.append(id, new BytesWritable(errorEvent._1)) metaWriter.get.append(id, new Text(Serialization.write(errorEvent._2))) recordId += 1 } override def commit() = { seqWriter.foreach(p => p.close()) metaWriter.foreach(p => p.close()) } private def createSequenceFile(path: Path, keyClass: Class[_], valueClass: Class[_]) = { val optPath = SequenceFile.Writer.file(path) val optKey = SequenceFile.Writer.keyClass(keyClass) val optVal = SequenceFile.Writer.valueClass(valueClass) Some(SequenceFile.createWriter(fs.getConf, optPath, optKey, optVal)) } }
Example 75
Source File: ApkModelSerializer.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.amandroid.serialization import org.argus.jawa.core.util._ import org.argus.amandroid.core.model.ApkModel import org.argus.amandroid.core.appInfo.ApkCertificate import org.argus.amandroid.core.decompile.DecompileLayout import org.argus.amandroid.core.parser.{ComponentInfo, ComponentType, IntentFilterDataBase, LayoutControl} import org.argus.jawa.core.elements.{JawaType, Signature} import org.json4s.{CustomSerializer, Extraction, Formats, JValue} import org.json4s.JsonDSL._ object ApkModelSerializer extends CustomSerializer[ApkModel](format => ( { case jv: JValue => implicit val formats: Formats = format + JawaTypeSerializer + JawaTypeKeySerializer + SignatureSerializer + SignatureKeySerializer + IntentFilterDataBaseSerializer + DecompileLayoutSerializer + new org.json4s.ext.EnumNameSerializer(ComponentType) val nameUri = (jv \ "nameUri").extract[FileResourceUri] val layout = (jv \ "layout").extract[DecompileLayout] val certificates = (jv \ "certificates").extract[ISet[ApkCertificate]] val activities = (jv \ "activities").extract[ISet[JawaType]] val services = (jv \ "services").extract[ISet[JawaType]] val receivers = (jv \ "receivers").extract[ISet[JawaType]] val providers = (jv \ "provider").extract[ISet[JawaType]] val drReceivers = (jv \ "drReceivers").extract[ISet[JawaType]] val rpcMethods = (jv \ "rpcMethods").extract[IMap[JawaType, IMap[Signature, Boolean]]] val uses_permissions = (jv \ "uses_permissions").extract[ISet[String]] val callbackMethods = (jv \ "callbackMethods").extract[IMap[JawaType, ISet[Signature]]] val componentInfos = (jv \ "componentInfos").extract[ISet[ComponentInfo]] val layoutControls = (jv \ "layoutControls").extract[IMap[Int, LayoutControl]] val appPackageName = (jv \ "appPackageName").extract[Option[String]] val intentFdb = (jv \ "intentFdb").extract[IntentFilterDataBase] val codeLineCounter = (jv \ "codeLineCounter").extract[Int] val envMap = (jv \ "envMap").extract[IMap[JawaType, (Signature, String)]] val apk = ApkModel(nameUri, layout) apk.addCertificates(certificates) apk.addActivities(activities) apk.addServices(services) apk.addReceivers(receivers) apk.addProviders(providers) apk.addDynamicRegisteredReceivers(drReceivers) apk.addRpcMethods(rpcMethods) apk.addUsesPermissions(uses_permissions) apk.addCallbackMethods(callbackMethods) apk.addComponentInfos(componentInfos) apk.addLayoutControls(layoutControls) apk.setPackageName(appPackageName.getOrElse("")) apk.setIntentFilterDB(intentFdb) apk.setCodeLineCounter(codeLineCounter) apk.addEnvMap(envMap) apk }, { case model: ApkModel => implicit val formats: Formats = format + JawaTypeSerializer + JawaTypeKeySerializer + SignatureSerializer + SignatureKeySerializer + IntentFilterDataBaseSerializer + DecompileLayoutSerializer + new org.json4s.ext.EnumNameSerializer(ComponentType) val nameUri: FileResourceUri = model.nameUri val layout: DecompileLayout = model.layout val certificates: ISet[ApkCertificate] = model.getCertificates val activities: ISet[JawaType] = model.getActivities val services: ISet[JawaType] = model.getServices val receivers: ISet[JawaType] = model.getReceivers val providers: ISet[JawaType] = model.getProviders val drReceivers: ISet[JawaType] = model.getDynamicRegisteredReceivers val rpcMethods: IMap[JawaType, IMap[Signature, Boolean]] = model.getRpcMethodMapping val uses_permissions: ISet[String] = model.getUsesPermissions val callbackMethods: IMap[JawaType, ISet[Signature]] = model.getCallbackMethodMapping val componentInfos: ISet[ComponentInfo] = model.getComponentInfos val layoutControls: IMap[Int, LayoutControl] = model.getLayoutControls val appPackageName: String = model.getPackageName val intentFdb: IntentFilterDataBase = model.getIntentFilterDB val codeLineCounter: Int = model.getCodeLineCounter val envMap: IMap[JawaType, (Signature, String)] = model.getEnvMap ("nameUri" -> nameUri) ~ ("layout" -> Extraction.decompose(layout)) ~ ("certificates" -> Extraction.decompose(certificates)) ~ ("activities" -> Extraction.decompose(activities)) ~ ("services" -> Extraction.decompose(services)) ~ ("receivers" -> Extraction.decompose(receivers)) ~ ("providers" -> Extraction.decompose(providers)) ~ ("drReceivers" -> Extraction.decompose(drReceivers)) ~ ("rpcMethods" -> Extraction.decompose(rpcMethods)) ~ ("uses_permissions" -> Extraction.decompose(uses_permissions)) ~ ("callbackMethods" -> Extraction.decompose(callbackMethods)) ~ ("componentInfos" -> Extraction.decompose(componentInfos)) ~ ("layoutControls" -> Extraction.decompose(layoutControls)) ~ ("appPackageName" -> Option(appPackageName)) ~ ("intentFdb" -> Extraction.decompose(intentFdb)) ~ ("codeLineCounter" -> codeLineCounter) ~ ("envMap" -> Extraction.decompose(envMap)) } ))
Example 76
Source File: DecompileLayoutSerializer.scala From Argus-SAF with Apache License 2.0 | 5 votes |
package org.argus.amandroid.serialization import org.argus.amandroid.core.decompile.DecompileLayout import org.argus.jawa.core.util.{FileResourceUri, ISet} import org.json4s.{CustomSerializer, Extraction, Formats, JValue} import org.json4s.JsonDSL._ object DecompileLayoutSerializer extends CustomSerializer[DecompileLayout](format => ( { case jv: JValue => implicit val formats: Formats = format val outputUri = (jv \ "outputUri").extract[FileResourceUri] val createFolder = (jv \ "createFolder").extract[Boolean] val srcFolder = (jv \ "srcFolder").extract[String] val libFolder = (jv \ "libFolder").extract[String] val createSeparateFolderForDexes = (jv \ "createSeparateFolderForDexes").extract[Boolean] val pkg = (jv \ "pkg").extract[String] val outputSrcUri = (jv \ "outputSrcUri").extract[FileResourceUri] val sourceFolders = (jv \ "sourceFolders").extract[ISet[String]] val libFolders = (jv \ "libFolders").extract[ISet[String]] val dependencies = (jv \ "dependencies").extract[ISet[String]] val thirdPartyLibraries = (jv \ "thirdPartyLibraries").extract[ISet[String]] val layout = DecompileLayout(outputUri, createFolder, srcFolder, libFolder, createSeparateFolderForDexes) layout.pkg = pkg layout.outputSrcUri = outputSrcUri layout.sourceFolders = sourceFolders layout.libFolders = libFolders layout.dependencies = dependencies layout.thirdPartyLibraries = thirdPartyLibraries layout }, { case layout: DecompileLayout => implicit val formats: Formats = format val outputUri: FileResourceUri = layout.outputSrcUri val createFolder: Boolean = layout.createFolder val srcFolder: String = layout.srcFolder val libFolder: String = layout.libFolder val createSeparateFolderForDexes: Boolean = layout.createSeparateFolderForDexes val pkg: String = layout.pkg val outputSrcUri: FileResourceUri = layout.outputSrcUri val sourceFolders: ISet[String] = layout.sourceFolders val libFolders: ISet[String] = layout.libFolders val dependencies: ISet[String] = layout.dependencies val thirdPartyLibraries: ISet[String] = layout.thirdPartyLibraries ("outputUri" -> outputUri) ~ ("createFolder" -> createFolder) ~ ("srcFolder" -> srcFolder) ~ ("libFolder" -> libFolder) ~ ("createSeparateFolderForDexes" -> createSeparateFolderForDexes) ~ ("pkg" -> pkg) ~ ("outputSrcUri" -> outputSrcUri) ~ ("sourceFolders" -> Extraction.decompose(sourceFolders)) ~ ("libFolders" -> Extraction.decompose(libFolders)) ~ ("dependencies" -> Extraction.decompose(dependencies)) ~ ("thirdPartyLibraries" -> Extraction.decompose(thirdPartyLibraries)) } ))
Example 77
Source File: DeployConfig.scala From hail with MIT License | 5 votes |
package is.hail.services import java.io.{File, FileInputStream} import is.hail.utils._ import org.json4s.{DefaultFormats, Formats, JValue} import org.json4s.jackson.JsonMethods object DeployConfig { lazy val get: DeployConfig = fromConfigFile() def fromConfigFile(file0: String = null): DeployConfig = { var file = file0 if (file == null) file = System.getenv("HAIL_DEPLOY_CONFIG_FILE") if (file == null) { val fromHome = s"${ System.getenv("HOME") }/.hail/deploy-config.json" if (new File(fromHome).exists()) file = fromHome } if (file == null) { val f = "/deploy-config/deploy-config.json" if (new File(f).exists()) file = f } if (file != null) { using(new FileInputStream(file)) { in => fromConfig(JsonMethods.parse(in)) } } else new DeployConfig( "external", "default", Map()) } def fromConfig(config: JValue): DeployConfig = { implicit val formats: Formats = DefaultFormats new DeployConfig( (config \ "location").extract[String], (config \ "default_namespace").extract[String], (config \ "service_namespace").extract[Map[String, String]]) } } class DeployConfig( val location: String, val defaultNamespace: String, val serviceNamespace: Map[String, String]) { def scheme(baseScheme: String = "http"): String = { if (location == "external" || location == "k8s") baseScheme + "s" else baseScheme } def getServiceNamespace(service: String): String = { serviceNamespace.getOrElse(service, defaultNamespace) } def domain(service: String): String = { val ns = getServiceNamespace(service) location match { case "k8s" => s"$service.$ns" case "gce" => if (ns == "default") s"$service.hail" else "internal.hail" case "external" => if (ns == "default") s"$service.hail.is" else "internal.hail.is" } } def basePath(service: String): String = { val ns = getServiceNamespace(service) if (ns == "default") "" else s"/$ns/$service" } def baseUrl(service: String, baseScheme: String = "http"): String = { s"${ scheme(baseScheme) }://${ domain(service) }${ basePath(service) }" } }
Example 78
Source File: package.scala From hail with MIT License | 5 votes |
package is.hail.services import is.hail.utils._ import org.json4s.{DefaultFormats, Formats} import java.io.{File, FileInputStream} import java.security.KeyStore import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory} import org.apache.log4j.{LogManager, Logger} import org.json4s.jackson.JsonMethods class NoSSLConfigFound( message: String, cause: Throwable ) extends Exception(message, cause) { def this() = this(null, null) def this(message: String) = this(message, null) } case class SSLConfig( outgoing_trust: String, outgoing_trust_store: String, incoming_trust: String, incoming_trust_store: String, key: String, cert: String, key_store: String) package object tls { lazy val log: Logger = LogManager.getLogger("is.hail.tls") private[this] lazy val _getSSLConfig: SSLConfig = { var configFile = System.getenv("HAIL_SSL_CONFIG_FILE") if (configFile == null) configFile = "/ssl-config/ssl-config.json" if (!new File(configFile).isFile) throw new NoSSLConfigFound(s"no ssl config file found at $configFile") log.info(s"ssl config file found at $configFile") using(new FileInputStream(configFile)) { is => implicit val formats: Formats = DefaultFormats JsonMethods.parse(is).extract[SSLConfig] } } lazy val getSSLContext: SSLContext = { val sslConfig = _getSSLConfig val pw = "dummypw".toCharArray val ks = KeyStore.getInstance("PKCS12") using(new FileInputStream(sslConfig.key_store)) { is => ks.load(is, pw) } val kmf = KeyManagerFactory.getInstance("SunX509") kmf.init(ks, pw) val ts = KeyStore.getInstance("JKS") using(new FileInputStream(sslConfig.outgoing_trust_store)) { is => ts.load(is, pw) } val tmf = TrustManagerFactory.getInstance("SunX509") tmf.init(ts) val ctx = SSLContext.getInstance("TLS") ctx.init(kmf.getKeyManagers, tmf.getTrustManagers, null) ctx } }
Example 79
Source File: Tokens.scala From hail with MIT License | 5 votes |
package is.hail.services import is.hail.utils._ import java.io.{File, FileInputStream} import org.apache.http.client.methods.HttpUriRequest import org.apache.log4j.{LogManager, Logger} import org.json4s.{DefaultFormats, Formats} import org.json4s.jackson.JsonMethods object Tokens { lazy val log: Logger = LogManager.getLogger("Tokens") def get: Tokens = { val file = getTokensFile() if (new File(file).isFile) { using(new FileInputStream(file)) { is => implicit val formats: Formats = DefaultFormats new Tokens(JsonMethods.parse(is).extract[Map[String, String]]) } } else { log.info(s"tokens file not found: $file") new Tokens(Map()) } } def getTokensFile(): String = { if (DeployConfig.get.location == "external") s"${ System.getenv("HOME") }/.hail/tokens.json" else "/user-tokens/tokens.json" } } class Tokens( tokens: Map[String, String] ) { def namespaceToken(ns: String): String = tokens(ns) def addNamespaceAuthHeaders(ns: String, req: HttpUriRequest): Unit = { val token = namespaceToken(ns) req.addHeader("Authorization", s"Bearer $token") val location = DeployConfig.get.location if (location == "external" && ns != "default") req.addHeader("X-Hail-Internal-Authorization", s"Bearer ${ namespaceToken("default") }") } def addServiceAuthHeaders(service: String, req: HttpUriRequest): Unit = { addNamespaceAuthHeaders(DeployConfig.get.getServiceNamespace(service), req) } }
Example 80
Source File: MatrixWriter.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import is.hail.types.virtual.Type import is.hail.io._ import is.hail.io.gen.{ExportBGEN, ExportGen} import is.hail.io.plink.ExportPlink import is.hail.io.vcf.ExportVCF import is.hail.utils.ExportType import org.json4s.{DefaultFormats, Formats, ShortTypeHints} object MatrixWriter { implicit val formats: Formats = new DefaultFormats() { override val typeHints = ShortTypeHints( List(classOf[MatrixNativeWriter], classOf[MatrixVCFWriter], classOf[MatrixGENWriter], classOf[MatrixBGENWriter], classOf[MatrixPLINKWriter], classOf[WrappedMatrixWriter])) override val typeHintFieldName = "name" } } case class WrappedMatrixWriter(writer: MatrixWriter, colsFieldName: String, entriesFieldName: String, colKey: IndexedSeq[String]) extends TableWriter { def path: String = writer.path def apply(ctx: ExecuteContext, tv: TableValue): Unit = writer(ctx, tv.toMatrixValue(colKey, colsFieldName, entriesFieldName)) } abstract class MatrixWriter { def path: String def apply(ctx: ExecuteContext, mv: MatrixValue): Unit } case class MatrixNativeWriter( path: String, overwrite: Boolean = false, stageLocally: Boolean = false, codecSpecJSONStr: String = null, partitions: String = null, partitionsTypeStr: String = null ) extends MatrixWriter { def apply(ctx: ExecuteContext, mv: MatrixValue): Unit = mv.write(ctx, path, overwrite, stageLocally, codecSpecJSONStr, partitions, partitionsTypeStr) } case class MatrixVCFWriter( path: String, append: Option[String] = None, exportType: String = ExportType.CONCATENATED, metadata: Option[VCFMetadata] = None ) extends MatrixWriter { def apply(ctx: ExecuteContext, mv: MatrixValue): Unit = ExportVCF(ctx, mv, path, append, exportType, metadata) } case class MatrixGENWriter( path: String, precision: Int = 4 ) extends MatrixWriter { def apply(ctx: ExecuteContext, mv: MatrixValue): Unit = ExportGen(ctx, mv, path, precision) } case class MatrixBGENWriter( path: String, exportType: String ) extends MatrixWriter { def apply(ctx: ExecuteContext, mv: MatrixValue): Unit = ExportBGEN(ctx, mv, path, exportType) } case class MatrixPLINKWriter( path: String ) extends MatrixWriter { def apply(ctx: ExecuteContext, mv: MatrixValue): Unit = ExportPlink(ctx, mv, path) } object MatrixNativeMultiWriter { implicit val formats: Formats = new DefaultFormats() { override val typeHints = ShortTypeHints(List(classOf[MatrixNativeMultiWriter])) override val typeHintFieldName = "name" } } case class MatrixNativeMultiWriter( prefix: String, overwrite: Boolean = false, stageLocally: Boolean = false ) { def apply(ctx: ExecuteContext, mvs: IndexedSeq[MatrixValue]): Unit = MatrixValue.writeMultiple(ctx, mvs, prefix, overwrite, stageLocally) }
Example 81
Source File: AbstractTableSpec.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import java.io.OutputStreamWriter import is.hail.utils._ import is.hail.types._ import is.hail.io.fs.FS import is.hail.rvd._ import org.json4s.jackson.JsonMethods import org.json4s.{DefaultFormats, Extraction, Formats, JValue, ShortTypeHints} import scala.language.implicitConversions object SortOrder { def deserialize(b: Byte): SortOrder = if (b == 0.toByte) Ascending else if (b == 1.toByte) Descending else throw new RuntimeException(s"invalid sort order: $b") } sealed abstract class SortOrder { def serialize: Byte } case object Ascending extends SortOrder { def serialize: Byte = 0.toByte } case object Descending extends SortOrder { def serialize: Byte = 1.toByte } case class SortField(field: String, sortOrder: SortOrder) abstract class AbstractTableSpec extends RelationalSpec { def references_rel_path: String def table_type: TableType def rowsComponent: RVDComponentSpec = getComponent[RVDComponentSpec]("rows") def rowsSpec: AbstractRVDSpec def globalsSpec: AbstractRVDSpec def indexed: Boolean = rowsSpec.indexed } object TableSpec { def apply(fs: FS, path: String, params: TableSpecParameters): TableSpec = { val globalsComponent = params.components("globals").asInstanceOf[RVDComponentSpec] val globalsSpec = globalsComponent.rvdSpec(fs, path) val rowsComponent = params.components("rows").asInstanceOf[RVDComponentSpec] val rowsSpec = rowsComponent.rvdSpec(fs, path) new TableSpec(params, globalsSpec, rowsSpec) } def fromJValue(fs: FS, path: String, jv: JValue): TableSpec = { implicit val formats: Formats = RelationalSpec.formats val params = jv.extract[TableSpecParameters] TableSpec(fs, path, params) } } case class TableSpecParameters( file_version: Int, hail_version: String, references_rel_path: String, table_type: TableType, components: Map[String, ComponentSpec]) { def write(fs: FS, path: String) { using(new OutputStreamWriter(fs.create(path + "/metadata.json.gz"))) { out => out.write(JsonMethods.compact(decomposeWithName(this, "TableSpec")(RelationalSpec.formats))) } } } class TableSpec( val params: TableSpecParameters, val globalsSpec: AbstractRVDSpec, val rowsSpec: AbstractRVDSpec) extends AbstractTableSpec { def file_version: Int = params.file_version def hail_version: String = params.hail_version def components: Map[String, ComponentSpec] = params.components def references_rel_path: String = params.references_rel_path def table_type: TableType = params.table_type def toJValue: JValue = { decomposeWithName(params, "TableSpec")(RelationalSpec.formats) } }
Example 82
Source File: NativeReaderOptions.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import is.hail.types.virtual._ import is.hail.expr.JSONAnnotationImpex import is.hail.utils._ import org.json4s.{CustomSerializer, DefaultFormats, Formats, JObject, JValue} import org.json4s.JsonDSL._ class NativeReaderOptionsSerializer() extends CustomSerializer[NativeReaderOptions]( format => ({ case jObj: JObject => implicit val fmt = format val filterIntervals = (jObj \ "filterIntervals").extract[Boolean] val intervalPointType = IRParser.parseType((jObj \ "intervalPointType").extract[String]) val intervals = { val jv = jObj \ "intervals" val ty = TArray(TInterval(intervalPointType)) JSONAnnotationImpex.importAnnotation(jv, ty).asInstanceOf[IndexedSeq[Interval]] } NativeReaderOptions(intervals, intervalPointType, filterIntervals) }, { case opts: NativeReaderOptions => implicit val fmt = format val ty = TArray(TInterval(opts.intervalPointType)) (("name" -> opts.getClass.getSimpleName) ~ ("intervals" -> JSONAnnotationImpex.exportAnnotation(opts.intervals, ty)) ~ ("intervalPointType" -> opts.intervalPointType.parsableString()) ~ ("filterIntervals" -> opts.filterIntervals)) }) ) object NativeReaderOptions { def fromJValue(jv: JValue): NativeReaderOptions = { implicit val formats: Formats = DefaultFormats val filterIntervals = (jv \ "filterIntervals").extract[Boolean] val intervalPointType = IRParser.parseType((jv \ "intervalPointType").extract[String]) val intervals = { val jvIntervals = jv \ "intervals" val ty = TArray(TInterval(intervalPointType)) JSONAnnotationImpex.importAnnotation(jvIntervals, ty).asInstanceOf[IndexedSeq[Interval]] } NativeReaderOptions(intervals, intervalPointType, filterIntervals) } } case class NativeReaderOptions( intervals: IndexedSeq[Interval], intervalPointType: Type, filterIntervals: Boolean = false) { def toJson: JValue = { val ty = TArray(TInterval(intervalPointType)) JObject( "name" -> "NativeReaderOptions", "intervals" -> JSONAnnotationImpex.exportAnnotation(intervals, ty), "intervalPointType" -> intervalPointType.parsableString(), "filterIntervals" -> filterIntervals) } }
Example 83
Source File: BlockMatrixWriter.scala From hail with MIT License | 5 votes |
package is.hail.expr.ir import is.hail.HailContext import is.hail.io.fs.FS import is.hail.linalg.BlockMatrix import is.hail.utils.richUtils.RichDenseMatrixDouble import org.json4s.{DefaultFormats, Formats, ShortTypeHints} object BlockMatrixWriter { implicit val formats: Formats = new DefaultFormats() { override val typeHints = ShortTypeHints( List(classOf[BlockMatrixNativeWriter], classOf[BlockMatrixBinaryWriter], classOf[BlockMatrixRectanglesWriter], classOf[BlockMatrixBinaryMultiWriter], classOf[BlockMatrixTextMultiWriter], classOf[BlockMatrixPersistWriter])) override val typeHintFieldName: String = "name" } } abstract class BlockMatrixWriter { def pathOpt: Option[String] def apply(ctx: ExecuteContext, bm: BlockMatrix): Unit } case class BlockMatrixNativeWriter( path: String, overwrite: Boolean, forceRowMajor: Boolean, stageLocally: Boolean) extends BlockMatrixWriter { def pathOpt: Option[String] = Some(path) def apply(ctx: ExecuteContext, bm: BlockMatrix): Unit = bm.write(ctx, path, overwrite, forceRowMajor, stageLocally) } case class BlockMatrixBinaryWriter(path: String) extends BlockMatrixWriter { def pathOpt: Option[String] = Some(path) def apply(ctx: ExecuteContext, bm: BlockMatrix): Unit = { RichDenseMatrixDouble.exportToDoubles(ctx.fs, path, bm.toBreezeMatrix(), forceRowMajor = true) } } case class BlockMatrixPersistWriter(id: String, storageLevel: String) extends BlockMatrixWriter { def pathOpt: Option[String] = None def apply(ctx: ExecuteContext, bm: BlockMatrix): Unit = HailContext.sparkBackend("BlockMatrixPersistWriter").bmCache.persistBlockMatrix(id, bm, storageLevel) } case class BlockMatrixRectanglesWriter( path: String, rectangles: Array[Array[Long]], delimiter: String, binary: Boolean) extends BlockMatrixWriter { def pathOpt: Option[String] = Some(path) def apply(ctx: ExecuteContext, bm: BlockMatrix): Unit = { bm.exportRectangles(ctx, path, rectangles, delimiter, binary) } } abstract class BlockMatrixMultiWriter { def apply(fs: FS, bms: IndexedSeq[BlockMatrix]): Unit } case class BlockMatrixBinaryMultiWriter( prefix: String, overwrite: Boolean) extends BlockMatrixMultiWriter { def apply(fs: FS, bms: IndexedSeq[BlockMatrix]): Unit = BlockMatrix.binaryWriteBlockMatrices(fs, bms, prefix, overwrite) } case class BlockMatrixTextMultiWriter( prefix: String, overwrite: Boolean, delimiter: String, header: Option[String], addIndex: Boolean, compression: Option[String], customFilenames: Option[Array[String]]) extends BlockMatrixMultiWriter { def apply(fs: FS, bms: IndexedSeq[BlockMatrix]): Unit = BlockMatrix.exportBlockMatrices(fs, bms, prefix, overwrite, delimiter, header, addIndex, compression, customFilenames) }
Example 84
Source File: BatchClientSuite.scala From hail with MIT License | 5 votes |
package is.hail.services.batch_client import is.hail.utils._ import org.json4s.JsonAST.{JArray, JBool, JInt, JObject, JString} import org.json4s.{DefaultFormats, Formats} import org.scalatest.testng.TestNGSuite import org.testng.annotations.Test class BatchClientSuite extends TestNGSuite { @Test def testBasic(): Unit = { val client = new BatchClient() val token = tokenUrlSafe(32) val batch = client.run( JObject( "billing_project" -> JString("test"), "n_jobs" -> JInt(1), "token" -> JString(token)), FastIndexedSeq( JObject( "always_run" -> JBool(false), "image" -> JString("ubuntu:18.04"), "mount_docker_socket" -> JBool(false), "command" -> JArray(List( JString("/bin/bash"), JString("-c"), JString("echo 'Hello, world!'"))), "job_id" -> JInt(0), "parent_ids" -> JArray(List())))) implicit val formats: Formats = DefaultFormats assert((batch \ "state").extract[String] == "success") } }
Example 85
Source File: Json4sSerialization.scala From kafka-serialization with Apache License 2.0 | 5 votes |
package com.ovoenergy.kafka.serialization.json4s import java.io.{ByteArrayInputStream, ByteArrayOutputStream, InputStreamReader, OutputStreamWriter} import java.nio.charset.StandardCharsets import com.ovoenergy.kafka.serialization.core._ import org.apache.kafka.common.serialization.{Deserializer => KafkaDeserializer, Serializer => KafkaSerializer} import org.json4s.Formats import org.json4s.native.Serialization.{read, write} import scala.reflect.ClassTag import scala.reflect.runtime.universe._ trait Json4sSerialization { def json4sSerializer[T <: AnyRef](implicit jsonFormats: Formats): KafkaSerializer[T] = serializer { (_, data) => val bout = new ByteArrayOutputStream() val writer = new OutputStreamWriter(bout, StandardCharsets.UTF_8) // TODO Use scala-arm try { write(data, writer) writer.flush() } finally { writer.close() } bout.toByteArray } def json4sDeserializer[T: TypeTag](implicit jsonFormats: Formats): KafkaDeserializer[T] = deserializer { (_, data) => val tt = implicitly[TypeTag[T]] implicit val cl = ClassTag[T](tt.mirror.runtimeClass(tt.tpe)) read[T](new InputStreamReader(new ByteArrayInputStream(data), StandardCharsets.UTF_8)) } }
Example 86
Source File: SparkStageParam.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.stages import com.salesforce.op.stages.sparkwrappers.generic.SparkWrapperParams import org.apache.hadoop.fs.Path import org.apache.spark.ml.PipelineStage import org.apache.spark.ml.param.{Param, ParamPair, Params} import org.apache.spark.ml.util.{Identifiable, MLReader, MLWritable} import org.apache.spark.util.SparkUtils import org.json4s.JsonAST.{JObject, JValue} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods.{compact, parse, render} import org.json4s.{DefaultFormats, Formats, JString} class SparkStageParam[S <: PipelineStage with Params] ( parent: String, name: String, doc: String, isValid: Option[S] => Boolean ) extends Param[Option[S]](parent, name, doc, isValid) { import SparkStageParam._ override def jsonDecode(jsonStr: String): Option[S] = { val json = parse(jsonStr) val uid = (json \ "uid").extractOpt[String] val path = (json \ "path").extractOpt[String] path -> uid match { case (None, _) | (_, None) | (_, Some(NoUID)) => savePath = None None case (Some(p), Some(stageUid)) => savePath = Option(p) val stagePath = new Path(p, stageUid).toString val className = (json \ "className").extract[String] val cls = SparkUtils.classForName(className) val stage = cls.getMethod("read").invoke(null).asInstanceOf[MLReader[PipelineStage]].load(stagePath) Option(stage).map(_.asInstanceOf[S]) } } } object SparkStageParam { implicit val formats: Formats = DefaultFormats val NoClass = "" val NoUID = "" def updateParamsMetadataWithPath(jValue: JValue, path: String): JValue = jValue match { case JObject(pairs) => JObject( pairs.map { case (SparkWrapperParams.SparkStageParamName, j) => SparkWrapperParams.SparkStageParamName -> j.merge(JObject("path" -> JString(path))) case param => param } ) case j => throw new IllegalArgumentException(s"Cannot recognize JSON Spark params metadata: $j") } }
Example 87
Source File: FeatureJsonHelper.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.features import com.salesforce.op.features.types._ import com.salesforce.op.stages.{OPStage, OpPipelineStage} import org.json4s.JsonAST.{JObject, JValue} import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods import org.json4s.jackson.JsonMethods._ import org.json4s.{DefaultFormats, Formats} import scala.reflect.runtime.universe.WeakTypeTag import scala.util.Try def fromJson( json: JValue, stages: Map[String, OPStage], features: Map[String, OPFeature] ): Try[OPFeature] = Try { val typeName = (json \ "typeName").extract[String] val uid = (json \ "uid").extract[String] val name = (json \ "name").extract[String] val isResponse = (json \ "isResponse").extract[Boolean] val originStageUid = (json \ "originStage").extract[String] val parentUids = (json \ "parents").extract[Array[String]] val originStage: Option[OPStage] = stages.get(originStageUid) if (originStage.isEmpty) { throw new RuntimeException(s"Origin stage $originStageUid not found for feature $name ($uid)") } // Order is important and so are duplicates, eg f = f1 + f1 has 2 parents but both the same feature val parents: Seq[OPFeature] = parentUids.flatMap(id => features.get(id)) if (parents.length != parentUids.length) { throw new RuntimeException(s"Not all the parent features were found for feature $name ($uid)") } val wtt = FeatureType.featureTypeTag(typeName).asInstanceOf[WeakTypeTag[FeatureType]] Feature[FeatureType]( uid = uid, name = name, isResponse = isResponse, parents = parents, originStage = originStage.get.asInstanceOf[OpPipelineStage[FeatureType]] )(wtt = wtt) } }
Example 88
Source File: SpecialDoubleSerializerTest.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.utils.json import com.salesforce.op.test.TestCommon import org.json4s.jackson.JsonMethods._ import org.json4s.{DefaultFormats, Extraction, Formats} import org.junit.runner.RunWith import org.scalatest.FlatSpec import org.scalatest.junit.JUnitRunner @RunWith(classOf[JUnitRunner]) class SpecialDoubleSerializerTest extends FlatSpec with TestCommon { val data = Map( "normal" -> Seq(-1.1, 0.0, 2.3), "infs" -> Seq(Double.NegativeInfinity, Double.PositiveInfinity), "minMax" -> Seq(Double.MinValue, Double.MaxValue), "nan" -> Seq(Double.NaN) ) Spec[SpecialDoubleSerializer] should behave like readWriteDoubleValues(data)( json = """{"normal":[-1.1,0.0,2.3],"infs":["-Infinity","Infinity"],"minMax":[-1.7976931348623157E308,1.7976931348623157E308],"nan":["NaN"]}""" // scalastyle:off )(DefaultFormats + new SpecialDoubleSerializer) Spec[SpecialDoubleSerializer] + " (with big decimal)" should behave like readWriteDoubleValues(data)( json = """{"normal":[-1.1,0.0,2.3],"infs":["-Infinity","Infinity"],"minMax":[-1.7976931348623157E+308,1.7976931348623157E+308],"nan":["NaN"]}""" // scalastyle:off )(DefaultFormats.withBigDecimal + new SpecialDoubleSerializer) def readWriteDoubleValues(input: Map[String, Seq[Double]])(json: String)(implicit formats: Formats): Unit = { it should "write double entries" in { compact(Extraction.decompose(input)) shouldBe json } it should "read double entries" in { val parsed = parse(json).extract[Map[String, Seq[Double]]] parsed.keys shouldBe input.keys parsed zip input foreach { case (("nan", a), ("nan", b)) => a.foreach(_.isNaN shouldBe true) case ((_, a), (_, b)) => a should contain theSameElementsAs b } } } }
Example 89
Source File: PackedMessage.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.commons.packer import java.nio.ByteBuffer import com.google.protobuf.GeneratedMessageV3 import org.json4s.jackson.Serialization import org.json4s.{DefaultFormats, Formats} object PackedMessage { implicit val formats: Formats = DefaultFormats + new org.json4s.ext.EnumSerializer(PackerType) val MAGIC_BYTES: Array[Byte] = "hytc".getBytes("utf-8") } case class PackedMessage[T <: GeneratedMessageV3](protoObj: T, private val pack: (T => Array[Byte]), private val metadata: PackedMetadata) { import PackedMessage._ private lazy val metadataBytes: Array[Byte] = Serialization.write(metadata).getBytes("utf-8") val packedDataBytes: Array[Byte] = { val packedDataBytes = pack(protoObj) if (PackerType.NONE == metadata.t) { packedDataBytes } else { ByteBuffer .allocate(MAGIC_BYTES.length + 4 + metadataBytes.length + packedDataBytes.length) .put(MAGIC_BYTES) .putInt(metadataBytes.length) .put(metadataBytes) .put(packedDataBytes).array() } } }
Example 90
Source File: WhitelistIndexFieldConfigurationSpec.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.commons.unit import com.expedia.www.haystack.trace.commons.config.entities.{IndexFieldType, WhiteListIndexFields, WhitelistIndexField, WhitelistIndexFieldConfiguration} import org.json4s.ext.EnumNameSerializer import org.json4s.jackson.Serialization import org.json4s.{DefaultFormats, Formats} import org.scalatest.{Entry, FunSpec, Matchers} import scala.collection.JavaConverters._ class WhitelistIndexFieldConfigurationSpec extends FunSpec with Matchers { protected implicit val formats: Formats = DefaultFormats + new EnumNameSerializer(IndexFieldType) describe("whitelist field configuration") { it("an empty configuration should return whitelist fields as empty") { val config = WhitelistIndexFieldConfiguration() config.indexFieldMap shouldBe 'empty config.whitelistIndexFields shouldBe 'empty } it("a loaded configuration should return the non empty whitelist fields") { val whitelistField_1 = WhitelistIndexField(name = "role", `type` = IndexFieldType.string, enableRangeQuery = true) val whitelistField_2 = WhitelistIndexField(name = "Errorcode", `type` = IndexFieldType.long) val config = WhitelistIndexFieldConfiguration() val cfgJsonData = Serialization.write(WhiteListIndexFields(List(whitelistField_1, whitelistField_2))) // reload config.onReload(cfgJsonData) config.whitelistIndexFields.map(_.name) should contain allOf("role", "errorcode") config.whitelistIndexFields.filter(r => r.name == "role").head.enableRangeQuery shouldBe true config.indexFieldMap.size() shouldBe 2 config.indexFieldMap.keys().asScala.toList should contain allOf("role", "errorcode") config.globalTraceContextIndexFieldNames.size shouldBe 0 val whitelistField_3 = WhitelistIndexField(name = "status", `type` = IndexFieldType.string, aliases = Set("_status", "HTTP-STATUS")) val whitelistField_4 = WhitelistIndexField(name = "something", `type` = IndexFieldType.long, searchContext = "trace") val newCfgJsonData = Serialization.write(WhiteListIndexFields(List(whitelistField_1, whitelistField_3, whitelistField_4))) config.onReload(newCfgJsonData) config.whitelistIndexFields.size shouldBe 5 config.whitelistIndexFields.map(_.name).toSet should contain allOf("status", "something", "role") config.indexFieldMap.size shouldBe 5 config.indexFieldMap.keys().asScala.toList should contain allOf("status", "something", "role", "http-status", "_status") config.onReload(newCfgJsonData) config.whitelistIndexFields.size shouldBe 5 config.whitelistIndexFields.map(_.name).toSet should contain allOf("status", "something", "role") config.indexFieldMap.size() shouldBe 5 config.indexFieldMap.keys().asScala.toList should contain allOf("status", "something", "role", "http-status", "_status") config.indexFieldMap.get("http-status").name shouldEqual "status" config.indexFieldMap.get("_status").name shouldEqual "status" config.globalTraceContextIndexFieldNames.size shouldBe 1 config.globalTraceContextIndexFieldNames.head shouldEqual "something" } } }
Example 91
Source File: ElasticSearchReadResultListenerSpec.scala From haystack-traces with Apache License 2.0 | 5 votes |
package com.expedia.www.haystack.trace.reader.unit.stores.readers.es.query import com.codahale.metrics.{Meter, Timer} import com.expedia.open.tracing.api.{Field, TracesSearchRequest} import com.expedia.www.haystack.trace.commons.config.entities.{IndexFieldType, WhitelistIndexFieldConfiguration} import com.expedia.www.haystack.trace.reader.config.entities.SpansIndexConfiguration import com.expedia.www.haystack.trace.reader.exceptions.ElasticSearchClientError import com.expedia.www.haystack.trace.reader.stores.readers.es.ElasticSearchReadResultListener import com.expedia.www.haystack.trace.reader.stores.readers.es.query.TraceSearchQueryGenerator import com.expedia.www.haystack.trace.reader.unit.BaseUnitTestSpec import io.searchbox.core.SearchResult import org.easymock.EasyMock import org.json4s.ext.EnumNameSerializer import org.json4s.{DefaultFormats, Formats} import scala.concurrent.Promise class ElasticSearchReadResultListenerSpec extends BaseUnitTestSpec { protected implicit val formats: Formats = DefaultFormats + new EnumNameSerializer(IndexFieldType) val ES_INDEX_HOUR_BUCKET = 6 val ES_INDEX_HOUR_TTL = 72 private val spansIndexConfiguration = SpansIndexConfiguration( indexNamePrefix = "haystack-traces", indexType = "spans", indexHourTtl = ES_INDEX_HOUR_TTL, indexHourBucket = ES_INDEX_HOUR_BUCKET, useRootDocumentStartTime = false) private val searchRequest = { val generator = new TraceSearchQueryGenerator(spansIndexConfiguration, "spans", WhitelistIndexFieldConfiguration()) val field = Field.newBuilder().setName("serviceName").setValue("expweb").build() generator.generate(TracesSearchRequest.newBuilder().setStartTime(1510469157572000l).setEndTime(1510469161172000l).setLimit(40).addFields(field).build(), true) } describe("ElasticSearch Read Result Listener") { it("should invoke successful promise with search result") { val promise = mock[Promise[SearchResult]] val timer = mock[Timer.Context] val failureMeter = mock[Meter] val searchResult = mock[SearchResult] expecting { timer.close().once() searchResult.getResponseCode.andReturn(200).atLeastOnce() promise.success(searchResult).andReturn(promise).once() } whenExecuting(promise, timer, failureMeter, searchResult) { val listener = new ElasticSearchReadResultListener(searchRequest, promise, timer, failureMeter) listener.completed(searchResult) } } it("should invoke failed promise with exception object if response code is not 2xx ") { val promise = mock[Promise[SearchResult]] val timer = mock[Timer.Context] val failureMeter = mock[Meter] val searchResult = mock[SearchResult] expecting { timer.close().once() searchResult.getResponseCode.andReturn(500).atLeastOnce() searchResult.getJsonString.andReturn("json-string").times(2) failureMeter.mark() promise.failure(EasyMock.anyObject(classOf[ElasticSearchClientError])).andReturn(promise).once() } whenExecuting(promise, timer, failureMeter, searchResult) { val listener = new ElasticSearchReadResultListener(searchRequest, promise, timer, failureMeter) listener.completed(searchResult) } } it("should invoke failed promise with exception object if failure is generated") { val promise = mock[Promise[SearchResult]] val timer = mock[Timer.Context] val failureMeter = mock[Meter] val expectedException = new Exception expecting { timer.close().once() failureMeter.mark() promise.failure(expectedException).andReturn(promise).once() } whenExecuting(promise, timer, failureMeter) { val listener = new ElasticSearchReadResultListener(searchRequest, promise, timer, failureMeter) listener.failed(expectedException) } } } }
Example 92
Source File: QueryValidationApi.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.web.routes import akka.actor.ActorRef import akka.event.LoggingAdapter import akka.http.scaladsl.model.HttpResponse import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.pattern.ask import akka.util.Timeout import io.radicalbit.nsdb.common.statement.SelectSQLStatement import io.radicalbit.nsdb.protocol.MessageProtocol.Commands.ValidateStatement import io.radicalbit.nsdb.protocol.MessageProtocol.Events._ import io.radicalbit.nsdb.security.http.NSDBAuthProvider import io.radicalbit.nsdb.security.model.Metric import io.radicalbit.nsdb.sql.parser.SQLStatementParser import io.radicalbit.nsdb.sql.parser.StatementParserResult._ import io.swagger.annotations._ import javax.ws.rs.Path import org.json4s.Formats import scala.annotation.meta.field import scala.util.{Failure, Success} @ApiModel(description = "Query Validation body") case class QueryValidationBody(@(ApiModelProperty @field)(value = "database name ") db: String, @(ApiModelProperty @field)(value = "namespace name ") namespace: String, @(ApiModelProperty @field)(value = "metric name ") metric: String, @(ApiModelProperty @field)(value = "sql query string") queryString: String) extends Metric @Api(value = "/query/validate", produces = "application/json") @Path("/query/validate") trait QueryValidationApi { import io.radicalbit.nsdb.web.NSDbJson._ def readCoordinator: ActorRef def authenticationProvider: NSDBAuthProvider implicit val timeout: Timeout implicit val formats: Formats @ApiOperation(value = "Perform query", nickname = "query", httpMethod = "POST", response = classOf[String]) @ApiImplicitParams( Array( new ApiImplicitParam(name = "body", value = "query definition", required = true, dataTypeClass = classOf[QueryValidationBody], paramType = "body") )) @ApiResponses( Array( new ApiResponse(code = 200, message = "Query is valid"), new ApiResponse(code = 404, message = "Not found item reason"), new ApiResponse(code = 400, message = "statement is invalid") )) def queryValidationApi(implicit logger: LoggingAdapter): Route = { path("query" / "validate") { post { entity(as[QueryValidationBody]) { qb => optionalHeaderValueByName(authenticationProvider.headerName) { header => authenticationProvider.authorizeMetric(ent = qb, header = header, writePermission = false) { new SQLStatementParser().parse(qb.db, qb.namespace, qb.queryString) match { case SqlStatementParserSuccess(_, statement: SelectSQLStatement) => onComplete(readCoordinator ? ValidateStatement(statement)) { case Success(SelectStatementValidated(_)) => complete(HttpResponse(OK)) case Success(SelectStatementValidationFailed(_, reason, MetricNotFound(_))) => complete(HttpResponse(NotFound, entity = reason)) case Success(SelectStatementValidationFailed(_, reason, _)) => complete(HttpResponse(BadRequest, entity = reason)) case Success(r) => logger.error("unknown response received {}", r) complete(HttpResponse(InternalServerError, entity = "unknown response")) case Failure(ex) => logger.error("", ex) complete(HttpResponse(InternalServerError, entity = ex.getMessage)) } case SqlStatementParserSuccess(queryString, _) => complete(HttpResponse(BadRequest, entity = s"statement ${queryString} is not a select statement")) case SqlStatementParserFailure(queryString, _) => complete(HttpResponse(BadRequest, entity = s"statement ${queryString} is invalid")) } } } } } } } }
Example 93
Source File: DataApi.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.web.routes import javax.ws.rs.Path import akka.actor.ActorRef import akka.pattern.ask import akka.http.scaladsl.model.HttpResponse import akka.http.scaladsl.model.StatusCodes.{BadRequest, InternalServerError} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.util.Timeout import io.radicalbit.nsdb.common.protocol.Bit import io.radicalbit.nsdb.protocol.MessageProtocol.Commands.MapInput import io.radicalbit.nsdb.protocol.MessageProtocol.Events.{InputMapped, RecordRejected} import io.radicalbit.nsdb.security.http.NSDBAuthProvider import io.radicalbit.nsdb.security.model.Metric import io.swagger.annotations._ import org.json4s.Formats import scala.annotation.meta.field import scala.util.{Failure, Success} @ApiModel(description = "Data insertion body") case class InsertBody(@(ApiModelProperty @field)(value = "database name") db: String, @(ApiModelProperty @field)(value = "namespace name") namespace: String, @(ApiModelProperty @field)(value = "metric name") metric: String, @(ApiModelProperty @field)( value = "bit representing a single row" ) bit: Bit) extends Metric @Api(value = "/data", produces = "application/json") @Path("/data") trait DataApi { import io.radicalbit.nsdb.web.NSDbJson._ import io.radicalbit.nsdb.web.validation.ValidationDirective._ import io.radicalbit.nsdb.web.validation.Validators._ def writeCoordinator: ActorRef def authenticationProvider: NSDBAuthProvider implicit val timeout: Timeout implicit val formats: Formats @ApiOperation(value = "Insert Bit", nickname = "insert", httpMethod = "POST", response = classOf[String]) @ApiImplicitParams( Array( new ApiImplicitParam(name = "body", value = "bit definition", required = true, dataTypeClass = classOf[InsertBody], paramType = "body") )) @ApiResponses( Array( new ApiResponse(code = 500, message = "Internal server error"), new ApiResponse(code = 400, message = "insert statement is invalid") )) def dataApi: Route = pathPrefix("data") { post { entity(as[InsertBody]) { insertBody => optionalHeaderValueByName(authenticationProvider.headerName) { header => validateModel(insertBody).apply { validatedInsertBody => authenticationProvider.authorizeMetric(ent = validatedInsertBody, header = header, writePermission = true) { onComplete( writeCoordinator ? MapInput(validatedInsertBody.bit.timestamp, validatedInsertBody.db, validatedInsertBody.namespace, validatedInsertBody.metric, validatedInsertBody.bit)) { case Success(_: InputMapped) => complete("OK") case Success(RecordRejected(_, _, _, _, _, reasons, _)) => complete(HttpResponse(BadRequest, entity = reasons.mkString(","))) case Success(_) => complete(HttpResponse(InternalServerError, entity = "unknown response")) case Failure(ex) => complete(HttpResponse(InternalServerError, entity = ex.getMessage)) } } } } } } } }
Example 94
Source File: WebResources.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.web import java.util.concurrent.TimeUnit import akka.actor.ActorRef import akka.event.LoggingAdapter import akka.http.scaladsl.Http import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server.Route import akka.stream.ActorMaterializer import akka.util.Timeout import com.typesafe.config.Config import io.radicalbit.nsdb.common.configuration.NSDbConfig.HighLevel._ import io.radicalbit.nsdb.security.NsdbSecurity import org.json4s.Formats import scala.concurrent.duration._ import scala.concurrent.{Await, Future} import scala.util.{Failure, Success} trait WebResources extends WsResources with SSLSupport { this: NsdbSecurity => import CORSSupport._ import VersionHeader._ implicit def formats: Formats def config: Config implicit lazy val materializer = ActorMaterializer() implicit lazy val dispatcher = system.dispatcher implicit lazy val httpTimeout: Timeout = Timeout(config.getDuration("nsdb.http-endpoint.timeout", TimeUnit.SECONDS), TimeUnit.SECONDS) def initWebEndpoint(writeCoordinator: ActorRef, readCoordinator: ActorRef, metadataCoordinator: ActorRef, publisher: ActorRef)(implicit logger: LoggingAdapter) = authProvider match { case Success(provider) => val api: Route = wsResources(publisher, provider) ~ new ApiResources(publisher, readCoordinator, writeCoordinator, metadataCoordinator, provider).apiResources(config) val httpExt = akka.http.scaladsl.Http() val http: Future[Http.ServerBinding] = if (isSSLEnabled) { val interface = config.getString(HttpInterface) val port = config.getInt(HttpsPort) logger.info(s"Cluster Apis started with https protocol at interface $interface on port $port") httpExt.bindAndHandle(withCors(withNSDbVersion(api)), interface, port, connectionContext = serverContext) } else { val interface = config.getString(HttpInterface) val port = config.getInt(HttpPort) logger.info(s"Cluster Apis started with http protocol at interface $interface and port $port") httpExt.bindAndHandle(withCors(withNSDbVersion(api)), interface, port) } scala.sys.addShutdownHook { http .flatMap(_.unbind()) .onComplete { _ => system.terminate() } Await.result(system.whenTerminated, 60 seconds) } case Failure(ex) => logger.error("error on loading authorization provider", ex) System.exit(1) } }
Example 95
Source File: ApiResources.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.web import akka.actor.ActorRef import akka.event.LoggingAdapter import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ import akka.util.Timeout import com.typesafe.config.Config import io.radicalbit.nsdb.common.configuration.NSDbConfig.HighLevel._ import io.radicalbit.nsdb.security.http.NSDBAuthProvider import io.radicalbit.nsdb.web.routes._ import io.radicalbit.nsdb.web.swagger.SwaggerDocService import org.json4s.Formats import scala.concurrent.ExecutionContext class ApiResources(val publisherActor: ActorRef, val readCoordinator: ActorRef, val writeCoordinator: ActorRef, val metadataCoordinator: ActorRef, val authenticationProvider: NSDBAuthProvider)(override implicit val timeout: Timeout, implicit val logger: LoggingAdapter, override implicit val ec: ExecutionContext, override implicit val formats: Formats) extends CommandApi with QueryApi with QueryValidationApi with DataApi { def healthCheckApi: Route = { pathPrefix("status") { (pathEnd & get) { complete("RUNNING") } } } def swagger = path("swagger") { getFromResource("swagger-ui/index.html") } ~ getFromResourceDirectory("swagger-ui") def apiResources(config: Config)(implicit ec: ExecutionContext): Route = queryApi ~ queryValidationApi ~ dataApi ~ healthCheckApi ~ commandsApi ~ swagger ~ new SwaggerDocService(config.getString(HttpInterface), config.getInt(HttpPort)).routes }
Example 96
Source File: NsdbNodeEndpoint.scala From NSDb with Apache License 2.0 | 5 votes |
package io.radicalbit.nsdb.cluster import akka.actor.{ActorRef, ActorSystem} import akka.event.{Logging, LoggingAdapter} import com.typesafe.config.Config import io.radicalbit.nsdb.cluster.endpoint.GrpcEndpoint import io.radicalbit.nsdb.security.NsdbSecurity import io.radicalbit.nsdb.web.{BitSerializer, CustomSerializers, WebResources} import org.json4s.{DefaultFormats, Formats} class NsdbNodeEndpoint(readCoordinator: ActorRef, writeCoordinator: ActorRef, metadataCoordinator: ActorRef, publisher: ActorRef)(override implicit val system: ActorSystem) extends WebResources with NsdbSecurity { override val config: Config = system.settings.config override implicit val logger: LoggingAdapter = Logging.getLogger(system, this) new GrpcEndpoint(readCoordinator = readCoordinator, writeCoordinator = writeCoordinator, metadataCoordinator = metadataCoordinator) implicit val formats: Formats = DefaultFormats ++ CustomSerializers.customSerializers + BitSerializer initWebEndpoint(writeCoordinator, readCoordinator, metadataCoordinator, publisher) }
Example 97
Source File: SttpJson4sApi.scala From sttp with Apache License 2.0 | 5 votes |
package sttp.client.json4s import sttp.client._ import sttp.client.internal.Utf8 import org.json4s.{Formats, Serialization} import sttp.model._ import sttp.client.{ResponseAs, ResponseError} trait SttpJson4sApi { implicit def json4sBodySerializer[B <: AnyRef](implicit formats: Formats, serialization: Serialization ): BodySerializer[B] = b => StringBody(serialization.write(b), Utf8, Some(MediaType.ApplicationJson)) def asJsonAlwaysUnsafe[B: Manifest](implicit formats: Formats, serialization: Serialization ): ResponseAs[B, Nothing] = asStringAlways.map(deserializeJson) def deserializeJson[B: Manifest](implicit formats: Formats, serialization: Serialization ): String => B = JsonInput.sanitize[B].andThen(serialization.read[B]) }
Example 98
Source File: Json4sSupport.scala From kafka-serde-scala with Apache License 2.0 | 5 votes |
package io.github.azhur.kafkaserdejson4s import java.nio.charset.StandardCharsets.UTF_8 import java.util import org.apache.kafka.common.errors.SerializationException import org.apache.kafka.common.serialization.{ Deserializer, Serde, Serializer } import org.json4s.{ Formats, Serialization } import scala.language.implicitConversions import scala.util.control.NonFatal trait Json4sSupport { implicit def toSerializer[T <: AnyRef](implicit serialization: Serialization, formats: Formats): Serializer[T] = new Serializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serialize(topic: String, data: T): Array[Byte] = if (data == null) null else try serialization.write[T](data).getBytes(UTF_8) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toDeserializer[T >: Null <: AnyRef: Manifest]( implicit serialization: Serialization, formats: Formats ): Deserializer[T] = new Deserializer[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def deserialize(topic: String, data: Array[Byte]): T = if (data == null) null else try serialization.read[T](new String(data, UTF_8)) catch { case NonFatal(e) => throw new SerializationException(e) } } implicit def toSerde[T >: Null <: AnyRef: Manifest](implicit serialization: Serialization, formats: Formats): Serde[T] = new Serde[T] { override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {} override def close(): Unit = {} override def serializer(): Serializer[T] = toSerializer[T] override def deserializer(): Deserializer[T] = toDeserializer[T] } } object Json4sSupport extends Json4sSupport
Example 99
Source File: Json4sSupport.scala From service-container with Apache License 2.0 | 5 votes |
package com.github.vonnagy.service.container.http.json import java.lang.reflect.InvocationTargetException import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} import akka.http.scaladsl.model.MediaTypes.`application/json` import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller} import akka.util.ByteString import org.json4s.JsonAST.JValue import org.json4s.{Formats, MappingException, Serialization} implicit def json4sMarshaller[A <: AnyRef]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } implicit def json4sJValueMarshaller[A <: JValue]( implicit serialization: Serialization, formats: Formats, shouldWritePretty: ShouldWritePretty = ShouldWritePretty.False ): ToEntityMarshaller[A] = { shouldWritePretty match { case ShouldWritePretty.False => jsonStringMarshaller.compose(serialization.write[A]) case ShouldWritePretty.True => jsonStringMarshaller.compose(serialization.writePretty[A]) } } }
Example 100
Source File: PersistenceRecord.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.persistence import java.time.OffsetDateTime import io.vamp.common.json.{ OffsetDateTimeSerializer, SerializationFormat } import io.vamp.common.notification.NotificationProvider import io.vamp.common.{ Artifact, Config, Namespace, NamespaceProvider } import io.vamp.model.Model import io.vamp.persistence.notification.UnknownDataFormatException import org.json4s.Formats import org.json4s.native.Serialization import org.json4s.native.Serialization.write import scala.util.Try object PersistenceRecord { def apply(name: String, kind: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, None) def apply(name: String, kind: String, artifact: String): PersistenceRecord = PersistenceRecord(Model.version, Model.uuid, OffsetDateTime.now(), name, kind, Option(artifact)) } case class PersistenceRecord(version: String, instance: String, timestamp: OffsetDateTime, name: String, kind: String, artifact: Option[String]) abstract class PersistenceRecordTransformer(namespace: Namespace) { def timeDependent: Boolean = false def read(input: String): String def write(input: String): String } trait PersistenceRecordMarshaller { this: NamespaceProvider ⇒ protected val transformersPath = "vamp.persistence.transformers.classes" private lazy val transformers = { val transformerClasses = if (Config.has(transformersPath)(namespace)()) Config.stringList(transformersPath)() else Nil transformerClasses.map { clazz ⇒ Class.forName(clazz).getConstructor(classOf[Namespace]).newInstance(namespace).asInstanceOf[PersistenceRecordTransformer] } } lazy val timeDependent: Boolean = transformers.exists(_.timeDependent) def marshallRecord(record: PersistenceRecord): String = { val content = write(record)(SerializationFormat(OffsetDateTimeSerializer)) transformers.foldLeft[String](content)((input, transformer) ⇒ transformer.write(input)) } def unmarshallRecord(source: String): PersistenceRecord = { val input = transformers.foldRight[String](source)((transformer, source) ⇒ transformer.read(source)) implicit val format: Formats = SerializationFormat(OffsetDateTimeSerializer) Serialization.read[PersistenceRecord](input) } } trait PersistenceDataReader extends PersistenceRecordMarshaller with PersistenceMarshaller { this: PersistenceApi with NamespaceProvider with NotificationProvider ⇒ protected def dataSet(artifact: Artifact, kind: String): Artifact protected def dataDelete(name: String, kind: String): Unit protected def dataRead(data: String): PersistenceRecord = { val record = Try(unmarshallRecord(data)).getOrElse(throwException(UnknownDataFormatException(""))) record.artifact match { case Some(content) ⇒ unmarshall(record.kind, content).map(a ⇒ dataSet(a, record.kind)).getOrElse(throwException(UnknownDataFormatException(record.kind))) case None ⇒ dataDelete(record.name, record.kind) } record } }
Example 101
Source File: ObjectUtil.scala From vamp with Apache License 2.0 | 5 votes |
package io.vamp.common.util import org.json4s.{ Extraction, Formats } import scala.collection.JavaConverters._ import scala.reflect.runtime.currentMirror import scala.reflect.runtime.universe._ object ObjectUtil { def isPrimitive(any: Any) = any match { case _: Boolean ⇒ true case _: Byte ⇒ true case _: Char ⇒ true case _: Short ⇒ true case _: Int ⇒ true case _: Long ⇒ true case _: Float ⇒ true case _: Double ⇒ true case _: String ⇒ true case _ ⇒ false } def unwrap: Any ⇒ Any = { case p if isPrimitive(p) ⇒ p case e: Enumeration#Value ⇒ e case l: List[_] ⇒ l.map(unwrap) case m: Map[_, _] ⇒ m.map { case (k, v) ⇒ k → unwrap(v) } case null ⇒ None case Some(s) ⇒ Option(unwrap(s)) case None ⇒ None case any ⇒ val reflection = currentMirror.reflect(any) currentMirror.reflect(any).symbol.typeSignature.members.toList .collect { case s: TermSymbol if !s.isMethod ⇒ reflection.reflectField(s) } .map(r ⇒ r.symbol.name.toString.trim → unwrap(r.get)) .toMap } def asJava: Any ⇒ Any = { case l: List[_] ⇒ l.map(asJava).asJava case m: Map[_, _] ⇒ m.map({ case (k, v) ⇒ k → asJava(v) }).asJava case Some(s) ⇒ Option(asJava(s)) case any ⇒ any } def asScala: Any ⇒ AnyRef = { case value: java.util.Map[_, _] ⇒ value.asScala.map({ case (k, v) ⇒ k → asScala(v) }).toMap case value: java.util.List[_] ⇒ value.asScala.map(asScala).toList case value: java.lang.Iterable[_] ⇒ value.asScala.map(asScala).toList case value: java.util.Optional[_] ⇒ if (value.isPresent) Option(asScala(value.get)) else None case value ⇒ value.asInstanceOf[AnyRef] } def merge(maps: Map[String, Any]*)(implicit formats: Formats): Map[String, AnyRef] = { maps.tail.foldLeft(Extraction.decompose(maps.head)) { (op1, op2) ⇒ op1 merge Extraction.decompose(op2) }.extract[Map[String, AnyRef]] } }
Example 102
Source File: JsonConversions.scala From coral with Apache License 2.0 | 5 votes |
package io.coral.api import java.lang.reflect.InvocationTargetException import org.json4s.{MappingException, DefaultFormats, Formats} import spray.http.{HttpCharsets, HttpEntity, MediaTypes} import spray.httpx.Json4sJacksonSupport import spray.httpx.unmarshalling.Unmarshaller object JsonConversions extends Json4sJacksonSupport { implicit def json4sJacksonFormats: Formats = DefaultFormats implicit def jsonApiUnmarshaller[T: Manifest] = Unmarshaller[T](MediaTypes.`application/json`) { case x: HttpEntity.NonEmpty ⇒ try serialization.read[T](x.asString(defaultCharset = HttpCharsets.`UTF-8`)) catch { case MappingException("unknown error", ite: InvocationTargetException) ⇒ throw ite.getCause } } }
Example 103
Source File: CustomFormats.scala From twitter4s with Apache License 2.0 | 5 votes |
package com.danielasfregola.twitter4s.http.serializers import java.time._ import com.danielasfregola.twitter4s.entities.enums.DisconnectionCode import com.danielasfregola.twitter4s.entities.enums.DisconnectionCode.DisconnectionCode import com.danielasfregola.twitter4s.entities.ProfileImage import org.json4s.JsonAST.{JInt, JLong, JNull, JString} import org.json4s.{CustomSerializer, Formats} private[twitter4s] object CustomFormats extends FormatsComposer { override def compose(f: Formats): Formats = f + InstantSerializer + LocalDateSerializer + DisconnectionCodeSerializer + ProfileImageSerializer } private[twitter4s] case object InstantSerializer extends CustomSerializer[Instant](format => ({ case JInt(i) => DateTimeFormatter.parseInstant(i.toLong) case JLong(l) => DateTimeFormatter.parseInstant(l) case JString(s) if DateTimeFormatter.canParseInstant(s) => DateTimeFormatter.parseInstant(s) case JString(stringAsUnixTime) if stringAsUnixTime.forall(_.isDigit) => Instant.ofEpochMilli(stringAsUnixTime.toLong) }, { case instant: Instant => JString(DateTimeFormatter.formatInstant(instant)) })) private[twitter4s] case object LocalDateSerializer extends CustomSerializer[LocalDate](format => ({ case JString(dateString) => dateString.split("-") match { case Array(year, month, date) => LocalDate.of(year.toInt, month.toInt, date.toInt) case _ => null } case JNull => null }, { case date: LocalDate => JString(date.toString) })) private[twitter4s] case object DisconnectionCodeSerializer extends CustomSerializer[DisconnectionCode](format => ({ case JInt(n) => DisconnectionCode(n.toInt) case JNull => null }, { case code: DisconnectionCode => JInt(code.id) })) private[twitter4s] case object ProfileImageSerializer extends CustomSerializer[ProfileImage](format => ({ case JString(n) => ProfileImage(n) case JNull => null }, { case img: ProfileImage => JString(img.normal) }))
Example 104
Source File: EnumFormats.scala From twitter4s with Apache License 2.0 | 5 votes |
package com.danielasfregola.twitter4s.http.serializers import com.danielasfregola.twitter4s.entities.enums._ import org.json4s.Formats import org.json4s.ext.EnumNameSerializer private[twitter4s] object EnumFormats extends FormatsComposer { override def compose(f: Formats): Formats = f + new EnumNameSerializer(Alignment) + new EnumNameSerializer(ContributorType) + new EnumNameSerializer(DisconnectionCode) + new EnumNameSerializer(SimpleEventCode) + new EnumNameSerializer(TweetEventCode) + new EnumNameSerializer(TwitterListEventCode) + new EnumNameSerializer(Granularity) + new EnumNameSerializer(Hour) + new EnumNameSerializer(Language) + new EnumNameSerializer(Measure) + new EnumNameSerializer(Mode) + new EnumNameSerializer(Resource) + new EnumNameSerializer(ResultType) + new EnumNameSerializer(TimeZone) + new EnumNameSerializer(WidgetType) + new EnumNameSerializer(WithFilter) }