org.json4s.native.JsonMethods.parse Scala Examples
The following examples show how to use org.json4s.native.JsonMethods.parse.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: FeeStatsResponseSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.specs2.mutable.Specification import stellar.sdk.ArbitraryInput class FeeStatsResponseSpec extends Specification with ArbitraryInput { implicit val formats = Serialization.formats(NoTypeHints) + FeeStatsRespDeserializer "a fee stats response document" should { "parse to a fee stats response" >> prop { r: FeeStatsResponse => val json = s""" |{ | "last_ledger": "${r.lastLedger}", | "last_ledger_base_fee": "${r.lastLedgerBaseFee.units}", | "ledger_capacity_usage": "${r.ledgerCapacityUsage}", | "fee_charged": { | "max": "${r.chargedFees.max.units}", | "min": "${r.chargedFees.min.units}", | "mode": "${r.chargedFees.mode.units}", | "p10": "${r.chargedFees.percentiles(10).units}", | "p20": "${r.chargedFees.percentiles(20).units}", | "p30": "${r.chargedFees.percentiles(30).units}", | "p40": "${r.chargedFees.percentiles(40).units}", | "p50": "${r.chargedFees.percentiles(50).units}", | "p60": "${r.chargedFees.percentiles(60).units}", | "p70": "${r.chargedFees.percentiles(70).units}", | "p80": "${r.chargedFees.percentiles(80).units}", | "p90": "${r.chargedFees.percentiles(90).units}", | "p95": "${r.chargedFees.percentiles(95).units}", | "p99": "${r.chargedFees.percentiles(99).units}" | }, | "max_fee": { | "max": "${r.maxFees.max.units}", | "min": "${r.maxFees.min.units}", | "mode": "${r.maxFees.mode.units}", | "p10": "${r.maxFees.percentiles(10).units}", | "p20": "${r.maxFees.percentiles(20).units}", | "p30": "${r.maxFees.percentiles(30).units}", | "p40": "${r.maxFees.percentiles(40).units}", | "p50": "${r.maxFees.percentiles(50).units}", | "p60": "${r.maxFees.percentiles(60).units}", | "p70": "${r.maxFees.percentiles(70).units}", | "p80": "${r.maxFees.percentiles(80).units}", | "p90": "${r.maxFees.percentiles(90).units}", | "p95": "${r.maxFees.percentiles(95).units}", | "p99": "${r.maxFees.percentiles(99).units}" | } |} """.stripMargin val actual = parse(json).extract[FeeStatsResponse] actual mustEqual r actual.acceptedFeePercentiles mustEqual actual.chargedFees.percentiles actual.minAcceptedFee mustEqual actual.chargedFees.min actual.modeAcceptedFee mustEqual actual.chargedFees.mode } } }
Example 2
Source File: PathPaymentStrictReceiveOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PathPaymentStrictReceiveOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PathPaymentStrictReceiveOperation]] = Arbitrary(genTransacted(genPathPaymentStrictReceiveOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "path payment operation" should { "serde via xdr string" >> prop { actual: PathPaymentStrictReceiveOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PathPaymentStrictReceiveOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PathPaymentStrictReceiveOperation] => val doc = s""" |{ | "_links":{ | "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"}, | "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"}, | "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"}, | "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"}, | "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type":"path_payment", | "type_i":2, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.destinationAmount)} | ${amountDocPortion(op.operation.sendMax, "source_max", "source_")} | "from":"${op.operation.sourceAccount.get.accountId}", | "to":"${op.operation.destinationAccount.publicKey.accountId}", | "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}] |} """.stripMargin parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPathPaymentStrictReceiveOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictReceiveOperation]): Transacted[PathPaymentStrictReceiveOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 3
Source File: CreateAccountOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class CreateAccountOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[CreateAccountOperation]] = Arbitrary(genTransacted(genCreateAccountOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer + OperationDeserializer "create account operation" should { "serde via xdr string" >> prop { actual: CreateAccountOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: CreateAccountOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "be parsed from json " >> prop { op: Transacted[CreateAccountOperation] => val doc = s""" |{ | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "create_account", | "type_i": 0, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "starting_balance": "${amountString(op.operation.startingBalance)}", | "funder": "${op.operation.sourceAccount.get.accountId}", | "account": "${op.operation.destinationAccount.publicKey.accountId}" |} """.stripMargin parse(doc).extract[Transacted[CreateAccountOperation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genCreateAccountOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[CreateAccountOperation]): Transacted[CreateAccountOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 4
Source File: InflationOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class InflationOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[InflationOperation]] = Arbitrary(genTransacted(genInflationOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "the inflation operation" should { "serde via xdr string" >> prop { actual: InflationOperation => Operation.decodeXDR(base64(actual.encode)) mustEqual actual } "serde via xdr bytes" >> prop { actual: InflationOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[InflationOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "inflation", | "type_i": 9, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", |} """.stripMargin parse(doc).extract[Transacted[InflationOperation]] mustEqual op }.setGen(genTransacted(genInflationOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 5
Source File: BumpSequenceOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays import stellar.sdk.{ArbitraryInput, DomainMatchers} class BumpSequenceOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[BumpSequenceOperation]] = Arbitrary(genTransacted(genBumpSequenceOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "bump sequence operation" should { "serde via xdr bytes" >> prop { actual: BumpSequenceOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "serde via xdr string" >> prop { actual: BumpSequenceOperation => Operation.decodeXDR(ByteArrays.base64(actual.encode)) mustEqual actual } "parse from json" >> prop { op: Transacted[BumpSequenceOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "bump_sequence", | "type_i": 11, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "bump_to": ${op.operation.bumpTo} |} """.stripMargin parse(doc).extract[Transacted[BumpSequenceOperation]] mustEqual op }.setGen(genTransacted(genBumpSequenceOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 6
Source File: CreatePassiveSellOfferOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class CreatePassiveSellOfferOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[CreatePassiveSellOfferOperation]] = Arbitrary(genTransacted(genCreatePassiveSellOfferOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer + OperationDeserializer "create passive offer operation" should { "serde via xdr string" >> prop { actual: CreatePassiveSellOfferOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: CreatePassiveSellOfferOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[CreatePassiveSellOfferOperation] => val doc = s""" |{ | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "create_passive_sell_offer", | "type_i": 4, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.selling, assetPrefix = "selling_")}, | ${asset(op.operation.buying, "buying_")}, | "offer_id": 0, | "price": "1.0", | "price_r": { | "d": ${op.operation.price.d}, | "n": ${op.operation.price.n} | } |} """.stripMargin parse(doc).extract[Transacted[CreatePassiveSellOfferOperation]] mustEqual op }.setGen(genTransacted(genCreatePassiveSellOfferOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 7
Source File: PathPaymentStrictSendOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PathPaymentStrictSendOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PathPaymentStrictSendOperation]] = Arbitrary(genTransacted(genPathPaymentStrictSendOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "path payment operation" should { "serde via xdr string" >> prop { actual: PathPaymentStrictSendOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PathPaymentStrictSendOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PathPaymentStrictSendOperation] => val doc = s""" |{ | "_links":{ | "self":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713"}, | "transaction":{"href":"https://horizon-testnet.stellar.org/transactions/a995af17837d1b53fb5782269250a36e9dbe74170260b46f2708e5f23f7c864a"}, | "effects":{"href":"https://horizon-testnet.stellar.org/operations/940258535411713/effects"}, | "succeeds":{"href":"https://horizon-testnet.stellar.org/effects?order=desc&cursor=940258535411713"}, | "precedes":{"href":"https://horizon-testnet.stellar.org/effects?order=asc&cursor=940258535411713"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type":"path_payment_strict_send", | "type_i":13, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.sendAmount, assetPrefix = "source_")} | ${amountDocPortion(op.operation.destinationMin, "destination_min")} | "from":"${op.operation.sourceAccount.get.accountId}", | "to":"${op.operation.destinationAccount.publicKey.accountId}", | "path":[${if (op.operation.path.isEmpty) "" else op.operation.path.map(asset(_)).mkString("{", "},{", "}")}] |} """.stripMargin parse(doc).extract[Transacted[Operation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPathPaymentStrictSendOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PathPaymentStrictSendOperation]): Transacted[PathPaymentStrictSendOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 8
Source File: AllowTrustOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers, KeyPair} class AllowTrustOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[AllowTrustOperation]] = Arbitrary(genTransacted(genAllowTrustOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "allow trust operation" should { "serde via xdr string" >> prop { actual: AllowTrustOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: AllowTrustOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[AllowTrustOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "allow_trust", | "type_i": 7, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "asset_type": "${if (op.operation.assetCode.length <= 4) "credit_alphanum4" else "credit_alphanum12"}", | "asset_code": "${op.operation.assetCode}", | "asset_issuer": "${op.operation.sourceAccount.get.accountId}" | "trustor": "${op.operation.trustor.accountId}", | "trustee": "${op.operation.sourceAccount.get.accountId}", | "authorize": ${op.operation.trustLineFlags.contains(TrustLineAuthorized)} | "authorize_to_maintain_liabilities": ${op.operation.trustLineFlags.contains(TrustLineCanMaintainLiabilities)} |} """.stripMargin val parsed = parse(doc).extract[Transacted[AllowTrustOperation]] parsed mustEqual op parsed.operation.authorize mustEqual op.operation.authorize parsed.operation.authorizeToMaintainLiabilities mustEqual op.operation.authorizeToMaintainLiabilities }.setGen(genTransacted(genAllowTrustOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 9
Source File: ChangeTrustOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class ChangeTrustOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[ChangeTrustOperation]] = Arbitrary(genTransacted(genChangeTrustOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "change trust operation" should { "serde via xdr string" >> prop { actual: ChangeTrustOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: ChangeTrustOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[ChangeTrustOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "change_trust", | "type_i": 6, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.limit, "limit")}, | "trustee": "${op.operation.limit.asset.issuer.accountId}", | "trustor": "${op.operation.sourceAccount.get.accountId}", |} """.stripMargin parse(doc).extract[Transacted[ChangeTrustOperation]] mustEqual op }.setGen(genTransacted(genChangeTrustOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 10
Source File: ManageDataOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.apache.commons.codec.binary.Base64 import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.{Arbitrary, Gen} import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers, PublicKey} class ManageDataOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arbDelete: Arbitrary[Transacted[DeleteDataOperation]] = Arbitrary(genTransacted(genDeleteDataOperation)) implicit val arbWrite: Arbitrary[Transacted[WriteDataOperation]] = Arbitrary(genTransacted(genWriteDataOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer def doc[O <: ManageDataOperation](op: Transacted[O]) = { val dataValue = op.operation match { case WriteDataOperation(_, value, _) => Base64.encodeBase64String(value.toArray) case _ => "" } s""" |{ | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "manage_data", | "type_i": 1, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "name": "${op.operation.name}", | "value": "$dataValue" |}""".stripMargin } "a write data operation" should { "serde via xdr string" >> prop { actual: WriteDataOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: WriteDataOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded must beEquivalentTo(actual) remaining must beEmpty } "parse from json" >> prop { op: Transacted[WriteDataOperation] => parse(doc(op)).extract[Transacted[ManageDataOperation]] must beEquivalentTo(op) }.setGen(genTransacted(genWriteDataOperation.suchThat(_.sourceAccount.nonEmpty))) "encode a string payload as UTF-8 in base64" >> prop { (s: String, source: PublicKey) => val value = new String(s.take(64).getBytes("UTF-8").take(60), "UTF-8") WriteDataOperation("name", value).value.toSeq mustEqual value.getBytes("UTF-8").toSeq WriteDataOperation("name", value, None).value.toSeq mustEqual value.getBytes("UTF-8").toSeq WriteDataOperation("name", value, Some(source)).value.toSeq mustEqual value.getBytes("UTF-8").toSeq }.setGen1(Arbitrary.arbString.arbitrary.suchThat(_.nonEmpty)) "fail if the key is greater than 64 bytes" >> prop { s: String => WriteDataOperation(s, "value") must throwAn[IllegalArgumentException] }.setGen(Gen.identifier.suchThat(_.getBytes("UTF-8").length > 64)) "fail if the value is greater than 64 bytes" >> prop { s: String => WriteDataOperation("name", s) must throwAn[IllegalArgumentException] }.setGen(Gen.identifier.suchThat(_.getBytes("UTF-8").length > 64)) } "a delete data operation" should { "serde via xdr string" >> prop { actual: DeleteDataOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: DeleteDataOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[DeleteDataOperation] => parse(doc(op)).extract[Transacted[ManageDataOperation]] mustEqual op }.setGen(genTransacted(genDeleteDataOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 11
Source File: PaymentOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.{Formats, NoTypeHints} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class PaymentOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[PaymentOperation]] = Arbitrary(genTransacted(genPaymentOperation)) implicit val formats: Formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "payment operation" should { "serde via xdr string" >> prop { actual: PaymentOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: PaymentOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[PaymentOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "type": "payment", | "type_i": 1, | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${amountDocPortion(op.operation.amount)}, | "from": "${op.operation.sourceAccount.get.accountId}", | "to": "${op.operation.destinationAccount.publicKey.accountId}", |} """.stripMargin parse(doc).extract[Transacted[PaymentOperation]] mustEqual removeDestinationSubAccountId(op) }.setGen(genTransacted(genPaymentOperation.suchThat(_.sourceAccount.nonEmpty))) } // Because sub accounts are not yet supported in Horizon JSON. private def removeDestinationSubAccountId(op: Transacted[PaymentOperation]): Transacted[PaymentOperation] = { op.copy(operation = op.operation.copy(destinationAccount = op.operation.destinationAccount.copy(subAccountId = None))) } }
Example 12
Source File: SetOptionsOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers} class SetOptionsOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[SetOptionsOperation]] = Arbitrary(genTransacted(genSetOptionsOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "set options operation" should { "serde via xdr string" >> prop { actual: SetOptionsOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: SetOptionsOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded must beEquivalentTo(actual) remaining must beEmpty } "parse from json" >> prop { op: Transacted[SetOptionsOperation] => val doc = s""" |{ | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659137/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659137"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659137"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account": "${op.operation.sourceAccount.get.accountId}", | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | ${opt("inflation_dest", op.operation.inflationDestination.map(_.accountId))} | ${opt("home_domain", op.operation.homeDomain)} | ${opt("master_key_weight", op.operation.masterKeyWeight)} | ${opt("signer_key", op.operation.signer.map(_.key.encodeToChars.mkString))} | ${opt("signer_weight", op.operation.signer.map(_.weight))} | ${opt("set_flags", op.operation.setFlags.map(_.map(_.i)))} | ${opt("set_flags_s", op.operation.setFlags.map(_.map(_.s)))} | ${opt("clear_flags", op.operation.clearFlags.map(_.map(_.i)))} | ${opt("clear_flags_s", op.operation.clearFlags.map(_.map(_.s)))} | ${opt("low_threshold", op.operation.lowThreshold)} | ${opt("med_threshold", op.operation.mediumThreshold)} | ${opt("high_threshold", op.operation.highThreshold)} | "type": "set_options", | "type_i": 5, |} """.stripMargin parse(doc).extract[Transacted[SetOptionsOperation]] must beEquivalentTo(op) }.setGen(genTransacted(genSetOptionsOperation.suchThat(_.sourceAccount.nonEmpty))) } }
Example 13
Source File: LedgerResponseSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import java.time.ZoneId import java.time.format.DateTimeFormatter import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.specs2.mutable.Specification import stellar.sdk.ArbitraryInput class LedgerResponseSpec extends Specification with ArbitraryInput { val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneId.of("UTC")) implicit val formats = Serialization.formats(NoTypeHints) + LedgerRespDeserializer "a ledger response document" should { "parse to a ledger response" >> prop { lr: LedgerResponse => val json = s""" |{ | "_links": { | "self": { | "href": "http://horizon-testnet.stellar.org/ledgers/11" | }, | "transactions": { | "href": "http://horizon-testnet.stellar.org/ledgers/11/transactions{?cursor,limit,order}", | "templated": true | }, | "operations": { | "href": "http://horizon-testnet.stellar.org/ledgers/11/operations{?cursor,limit,order}", | "templated": true | }, | "payments": { | "href": "http://horizon-testnet.stellar.org/ledgers/11/payments{?cursor,limit,order}", | "templated": true | }, | "effects": { | "href": "http://horizon-testnet.stellar.org/ledgers/11/effects{?cursor,limit,order}", | "templated": true | } | }, | "id": "${lr.id}", | "paging_token": "47244640256", | "hash": "${lr.hash}", | ${lr.previousHash.map(h => s""""prev_hash": "$h",""").getOrElse("")} | "sequence": ${lr.sequence}, | "successful_transaction_count": ${lr.successTransactionCount}, | "failed_transaction_count": ${lr.failureTransactionCount}, | "operation_count": ${lr.operationCount}, | "closed_at": "${formatter.format(lr.closedAt)}", | "total_coins": "${lr.totalCoins.toDisplayUnits}", | "fee_pool": "${lr.feePool.toDisplayUnits}", | "base_fee_in_stroops": ${lr.baseFee.units}, | "base_reserve_in_stroops": ${lr.baseReserve.units}, | "max_tx_set_size": ${lr.maxTxSetSize}, | "protocol_version": 4 |} """.stripMargin parse(json).extract[LedgerResponse] must beLike { case actual: LedgerResponse => actual.copy(closedAt = lr.closedAt) mustEqual lr actual.closedAt.toInstant.toEpochMilli mustEqual lr.closedAt.toInstant.toEpochMilli } } "calculate transaction count as sum of failed and successful transactions" >> prop { lr: LedgerResponse => lr.transactionCount mustEqual lr.failureTransactionCount + lr.successTransactionCount } } }
Example 14
Source File: TradeEffectResponseSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import java.util.Locale import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Gen import org.specs2.mutable.Specification import stellar.sdk._ import stellar.sdk.model.{Amount, NonNativeAsset} class TradeEffectResponseSpec extends Specification with ArbitraryInput { implicit val formats = Serialization.formats(NoTypeHints) + EffectResponseDeserializer "a trade effect document" should { "parse to a trade effect" >> prop { (id: String, offerId: Long, buyer: KeyPair, bought: Amount, seller: KeyPair, sold: Amount) => val json = doc(id, offerId, buyer, bought, seller, sold) parse(json).extract[EffectResponse] mustEqual EffectTrade(id, offerId, buyer, bought, seller, sold) }.setGen1(Gen.identifier).setGen2(Gen.posNum[Long]) } def doc(id: String, offerId: Long, buyer: PublicKeyOps, bought: Amount, seller: PublicKeyOps, sold: Amount) = { s""" { "_links": { "operation": { "href": "https://horizon-testnet.stellar.org/operations/31161168848490497" }, "succeeds": { "href": "https://horizon-testnet.stellar.org/effects?order=desc&cursor=31161168848490497-2" }, "precedes": { "href": "https://horizon-testnet.stellar.org/effects?order=asc&cursor=31161168848490497-2" } }, "id": "$id", "paging_token": "31161168848490497-2", "account": "${buyer.accountId}", "type": "trade", "type_i": 33, "seller": "${seller.accountId}", "offer_id": $offerId, ${amountDocPortion(sold, sold = true)}, ${amountDocPortion(bought, sold = false)} }""" } def amountDocPortion(amount: Amount, sold: Boolean): String = { val bs = if (sold) "sold" else "bought" amount.asset match { case nn: NonNativeAsset => s""""${bs}_amount": "${amountString(amount)}", |"${bs}_asset_type": "${nn.typeString}", |"${bs}_asset_code": "${nn.code}", |"${bs}_asset_issuer": "${nn.issuer.accountId}" """.stripMargin.trim case _ => s""""${bs}_amount": "${amountString(amount)}", |"${bs}_asset_type": "native" """.stripMargin.trim } } def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7)) }
Example 15
Source File: OrderBookSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.specs2.mutable.Specification import stellar.sdk._ import stellar.sdk.model.op.JsonSnippets import stellar.sdk.model.{Order, OrderBook, OrderBookDeserializer} class OrderBookSpec extends Specification with ArbitraryInput with JsonSnippets { implicit val formats = Serialization.formats(NoTypeHints) + OrderBookDeserializer "order book" should { "parse from json" >> prop { ob: OrderBook => val doc = s""" |{ | "bids": [${ob.bids.map(order).mkString(",")}], | "asks": [${ob.asks.map(order).mkString(",")}], | "base": {${asset(ob.selling)}} | "counter": {${asset(ob.buying)}} |} """.stripMargin parse(doc).extract[OrderBook] mustEqual ob } } private def order(o: Order) = s"""{ | "price_r": { | "n": ${o.price.n}, | "d": ${o.price.d} | }, | "price": "${o.price.asDecimalString}", | "amount": "${o.quantity / math.pow(10, 7)}" |} """.stripMargin }
Example 16
Source File: OfferResponseSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.response import java.time.ZoneId import java.time.format.DateTimeFormatter import java.util.Locale import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.specs2.mutable.Specification import stellar.sdk.model.{Amount, Asset, NonNativeAsset} import stellar.sdk.ArbitraryInput class OfferResponseSpec extends Specification with ArbitraryInput { implicit val formats = Serialization.formats(NoTypeHints) + OfferRespDeserializer private val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss'Z'").withZone(ZoneId.of("UTC")) "an offer response document" should { "parse to an offer response" >> prop { or: OfferResponse => val json = s""" |{ | "_links": { | "self": { | "href": "https://horizon-testnet.stellar.org/offers/101542" | }, | "offer_maker": { | "href": "https://horizon-testnet.stellar.org/accounts/GCXYKQF35XWATRB6AWDDV2Y322IFU2ACYYN5M2YB44IBWAIITQ4RYPXK" | } | }, | "id": ${or.id}, | "paging_token": "101542", | "seller": "${or.seller.accountId}", | "selling": { | ${assetJson(or.selling.asset)} | }, | "buying": { | ${assetJson(or.buying)} | }, | "amount": "${amountString(or.selling)}", | "price_r": { | "n": ${or.price.n}, | "d": ${or.price.d} | }, | "price": "3.0300000", | "last_modified_ledger": ${or.lastModifiedLedger}, | "last_modified_time": "${formatter.format(or.lastModifiedTime)}" |} | """.stripMargin parse(json).extract[OfferResponse] mustEqual or } } def assetJson(asset: Asset) = asset match { case nn: NonNativeAsset => s""" |"asset_type": "${nn.typeString}", |"asset_code": "${nn.code}", |"asset_issuer": "${nn.issuer.accountId}" """.stripMargin.trim case _ => """"asset_type": "native"""" } def amountString(a: Amount): String = "%.7f".formatLocal(Locale.ROOT, a.units / math.pow(10, 7)) }
Example 17
Source File: IngestExampleSpec.scala From Hands-On-Data-Analysis-with-Scala with MIT License | 5 votes |
package handson.example.ingest import org.json4s.DefaultFormats import org.json4s.native.JsonMethods.parse import org.scalatest.{FlatSpec, Matchers} class IngestExampleSpec extends FlatSpec with Matchers { "Ingest of various formats" should "produce same results" in { implicit val formats = DefaultFormats // XML val xml = <person> <fname>Jon</fname> <lname>Doe</lname> <phone>123-456-7890</phone> <zip>12345</zip> <state>NY</state> </person> val normXml = Person(xml \ "fname" text, xml \ "lname" text, xml \ "phone" text, xml \ "zip" text, xml \ "state" text) // JSON val jsonStr = """ { "fname": "Jon", "lname": "Doe", "phone": "123-456-7890", "zip": "12345", "state": "NY" }""" val json = parse(jsonStr) val normJson = json.extract[Person] // CSV (for simplicity, we use split method of String to parse CSV) val csvStr = "Jon,Doe,123-456-7890,12345,NY" val csvCols = csvStr.split(",") val normCsv = Person(csvCols(0), csvCols(1), csvCols(2), csvCols(3), csvCols(4)) // Let us make sure that all normal objects are same assert(normXml === normJson) assert(normXml === normCsv) } "getState" should "return MA for 02701" in { assert(IngestExample.getState("02701") === "MA") } "filter and filterNot API" should "produce same outcome with appropriate conditions" in { val originalPersons = List( Person("Jon", "Doe", "123-456-7890", "12345", "NY"), Person("James", "Smith", "555-456-7890", "00600", "PR"), Person("Don", "Duck", "777-456-7890", "00800", "VI"), Person("Doug", "Miller", "444-456-7890", "02800", "RI"), Person("Van", "Peter", "333-456-7890", "02700", "MA") ) val exclusionStates = Set("PR", "VI") // we want to exclude these states val filteredPersons1 = originalPersons.filterNot(p => exclusionStates.contains(p.state)) val filteredPersons2 = originalPersons.filter(p => !exclusionStates.contains(p.state)) assert(filteredPersons1 === filteredPersons2) } }
Example 18
Source File: CustomDefaults.scala From avro4s with Apache License 2.0 | 5 votes |
package com.sksamuel.avro4s import magnolia.{SealedTrait, Subtype} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization.write import org.apache.avro.Schema import org.apache.avro.Schema.Type import org.json4s.DefaultFormats import scala.collection.JavaConverters._ sealed trait CustomDefault case class CustomUnionDefault(className: String, values: java.util.Map[String, Any]) extends CustomDefault case class CustomUnionWithEnumDefault(parentName: String, default: String, value: String) extends CustomDefault case class CustomEnumDefault(value: String) extends CustomDefault object CustomDefaults { implicit val formats = DefaultFormats def customScalaEnumDefault(value: Any) = CustomEnumDefault(value.toString) def customDefault(p: Product, schema: Schema): CustomDefault = if(isEnum(p, schema.getType)) CustomEnumDefault(trimmedClassName(p)) else { if(isUnionOfEnum(schema)) { val enumType = schema.getTypes.asScala.filter(_.getType == Schema.Type.ENUM).head CustomUnionWithEnumDefault(enumType.getName, trimmedClassName(p), p.toString) } else CustomUnionDefault(trimmedClassName(p), parse(write(p)).extract[Map[String, Any]].map { case (name, b: BigInt) if b.isValidInt => name -> b.intValue case (name, b: BigInt) if b.isValidLong => name -> b.longValue case (name, z) if schema.getType == Type.UNION => name -> schema.getTypes.asScala.find(_.getName == trimmedClassName(p)).map(_.getField(name).schema()) .map(DefaultResolver(z, _)).getOrElse(z) case (name, z) => name -> DefaultResolver(z, schema.getField(name).schema()) }.asJava) } def isUnionOfEnum(schema: Schema) = schema.getType == Schema.Type.UNION && schema.getTypes.asScala.map(_.getType).contains(Schema.Type.ENUM) def sealedTraitEnumDefaultValue[T](ctx: SealedTrait[SchemaFor, T]) = { val defaultExtractor = new AnnotationExtractors(ctx.annotations) defaultExtractor.enumDefault.flatMap { default => ctx.subtypes.flatMap { st: Subtype[SchemaFor, T] => if(st.typeName.short == default.toString) Option(st.typeName.short) else None }.headOption } } def isScalaEnumeration(value: Any) = value.getClass.getCanonicalName == "scala.Enumeration.Val" private def isEnum(product: Product, schemaType: Schema.Type) = product.productArity == 0 && schemaType == Schema.Type.ENUM private def trimmedClassName(p: Product) = trimDollar(p.getClass.getSimpleName) private def trimDollar(s: String) = if(s.endsWith("$")) s.dropRight(1) else s }
Example 19
Source File: FunctionCalls.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.{PFAExpressionSerializer, ParamSerializer, SchemaSerializer} import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.json4s.native.Serialization.write import org.json4s.{JDouble, JField, JInt, JObject, JString, JValue, NoTypeHints} class FunctionCall(name: String, args: Any*) extends PFAExpression { import com.ibm.aardpfark.pfa.dsl._ import org.json4s.JsonDSL._ override def json: JValue = { val jArgs = args.map { case n: Double => JDouble(n) case i: Int => JInt(i) case s: String => JString(s) case expr: PFAExpression => expr.json case fnDef: FunctionDef => implicit val formats = Serialization.formats(NoTypeHints) + new SchemaSerializer + new PFAExpressionSerializer + new ParamSerializer parse(write(fnDef)) } JObject(JField(name, jArgs) :: Nil) } }
Example 20
package com.ibm.aardpfark.pfa.expression import com.ibm.aardpfark.pfa.document.SchemaSerializer import com.sksamuel.avro4s.{AvroSchema, SchemaFor, ToSchema} import org.apache.avro.Schema import org.json4s.JValue import org.json4s.JsonAST.JString import org.json4s.native.JsonMethods.parse trait New { object NewRecord { def apply(schema: Schema, init: Map[String, PFAExpression], fullSchema: Boolean = true) = NewRecordExpr(schema, init, fullSchema) } case class NewRecordExpr(schema: Schema, init: Map[String, PFAExpression], fullSchema: Boolean) extends PFAExpression { import org.json4s.JsonDSL._ private val s = if (fullSchema) SchemaSerializer.convert(schema) else JString(schema.getFullName) override def json: JValue = { ("type" -> s) ~ ("new" -> init.mapValues(_.json)) } } case class NewArrayExpr(schema: Schema, init: Seq[PFAExpression]) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { ("type" -> parse(schema.toString)) ~ ("new" -> init.map(_.json)) } } object NewArray { def apply(schema: Schema, init: Seq[PFAExpression]) = NewArrayExpr(schema, init) def apply[T](init: Seq[PFAExpression])(implicit s: ToSchema[Seq[T]]) = { NewArrayExpr(s(), init) } } case class NewMap(schema: Schema, init: Map[String, PFAExpression]) extends PFAExpression { import org.json4s.JsonDSL._ override def json: JValue = { ("type" -> parse(schema.toString)) ~ ("new" -> init.mapValues(_.json)) } } }
Example 21
Source File: JSONSerializers.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa.document import scala.util.Try import com.ibm.aardpfark.pfa.dsl._ import com.ibm.aardpfark.pfa.expression.PFAExpression import com.ibm.aardpfark.spark.ml.tree.{TreeNode, Trees} import org.apache.avro.Schema import org.json4s.native.JsonMethods.parse import org.json4s.{CustomSerializer, JValue} object SchemaSerializer { def convert(s: Schema): JValue = { import Schema.Type._ import org.json4s.JsonDSL._ s.getType match { case DOUBLE | FLOAT | INT | LONG | STRING | BOOLEAN | BYTES | NULL => ("type" -> s.getType.getName) case _ => parse(s.toString) } } } class SchemaSerializer extends CustomSerializer[Schema](format => ( { case j: JValue => new Schema.Parser().parse(j.toString) }, { case s: Schema => SchemaSerializer.convert(s) } ) ) class PFAExpressionSerializer extends CustomSerializer[PFAExpression](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case expr: PFAExpression => expr.json } ) ) class TreeSerializer extends CustomSerializer[TreeNode](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case tree: TreeNode => Trees.json(tree) } ) ) class ParamSerializer extends CustomSerializer[Param](format => ( { case j: JValue => throw new UnsupportedOperationException("cannot deserialize") }, { case p: Param => import org.json4s.JsonDSL._ if (p.simpleSchema) { (p.name -> p.`type`.getFullName) } else { val schemaSerializer = new SchemaSerializer().serialize(format) (p.name -> schemaSerializer(p.`type`)) } } ) )
Example 22
Source File: SparkPredictorPFASuiteBase.scala From aardpfark with Apache License 2.0 | 5 votes |
package com.ibm.aardpfark.pfa import com.ibm.aardpfark.spark.ml.KMeansPipelineResult import com.opendatagroup.hadrian.jvmcompiler.PFAEngine import org.json4s.native.JsonMethods.parse abstract class SparkPredictorPFASuiteBase[A <: Result](implicit m: Manifest[A]) extends SparkFeaturePFASuiteBase[A] { override protected def testInputVsExpected( engine: PFAEngine[AnyRef, AnyRef], input: Array[String], expectedOutput: Array[String]) = { import ApproxEquality._ import org.scalactic.Tolerance._ val tol = 0.0001 input.zip(expectedOutput).foreach { case (in, out) => val pfaResult = engine.action(engine.jsonInput(in)) val actual = parse(engine.jsonOutput(pfaResult)).extract[A] val expected = parse(out).extract[A] (actual, expected) match { case (a: PredictorResult, e: PredictorResult) => assert(a.prediction === e.prediction +- tol) case (a: GLMResult, e: GLMResult) => assert(a.prediction === e.prediction +- tol) assert(a.link === e.link +- tol) case (a: ClassifierResult, e: ClassifierResult) => assert(a.prediction === e.prediction) assert(a.rawPrediction === e.rawPrediction) case (a: ProbClassifierResult, e: ProbClassifierResult) => assert(a.prediction === e.prediction) assert(a.rawPrediction === e.rawPrediction) assert(a.probability === e.probability) case (a: KMeansPipelineResult, e: KMeansPipelineResult) => assert(a.prediction === e.prediction) assert(a.ewp === e.ewp) assert(a.pca === e.pca) assert(a.s1 === e.s1) assert(a.s2 === e.s2) assert(a.kmeans === e.kmeans) case (_, _) => } } } } case class PredictorResult(prediction: Double) extends Result case class GLMResult(prediction: Double, link: Double = 0.0 ) extends Result case class ClassifierResult(prediction: Double, rawPrediction: Seq[Double]) extends Result case class ProbClassifierResult( prediction: Double, rawPrediction: Seq[Double], probability: Seq[Double]) extends Result
Example 23
Source File: L6-10LazyStatic.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput import org.apache.commons.pool2.PooledObject import org.apache.commons.pool2.BasePooledObjectFactory import org.apache.commons.pool2.impl.DefaultPooledObject import org.apache.commons.pool2.impl.GenericObjectPool import org.apache.commons.pool2.ObjectPool object MqttSinkAppE { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => rdd.foreachPartition { par => val mqttSink = MqttSinkPool().borrowObject() par.foreach(message => mqttSink.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8)))) MqttSinkPool().returnObject(mqttSink) } } ssc.start() ssc.awaitTermination() } } object MqttSinkPool { val poolSize = 8 val brokerUrl = "tcp://localhost:1883" val mqttPool = new GenericObjectPool[MqttClient](new MqttClientFactory(brokerUrl)) mqttPool.setMaxTotal(poolSize) sys.addShutdownHook { mqttPool.close() } def apply(): GenericObjectPool[MqttClient] = { mqttPool } } class MqttClientFactory(brokerUrl: String) extends BasePooledObjectFactory[MqttClient] { override def create() = { val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() client } override def wrap(client: MqttClient) = new DefaultPooledObject[MqttClient](client) override def validateObject(pObj: PooledObject[MqttClient]) = pObj.getObject.isConnected() override def destroyObject(pObj: PooledObject[MqttClient]) = { pObj.getObject.disconnect() pObj.getObject.close() } override def passivateObject(pObj: PooledObject[MqttClient]) = {} }
Example 24
Source File: L10-2DataProc.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.HashPartitioner import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.JsonAST.JNothing import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object DataProcApp { def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: DataProcApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) ssc.socketTextStream(hostname, port.toInt) .map(r => { implicit val formats = DefaultFormats parse(r) }) .filter(jvalue => { jvalue \ "attributes" \ "Wi-Fi" != JNothing }) .map(jvalue => { implicit val formats = DefaultFormats ((jvalue \ "attributes" \ "Wi-Fi").extract[String], (jvalue \ "stars").extract[Int]) }) .combineByKey( (v) => (v, 1), (accValue: (Int, Int), v) => (accValue._1 + v, accValue._2 + 1), (accCombine1: (Int, Int), accCombine2: (Int, Int)) => (accCombine1._1 + accCombine2._1, accCombine1._2 + accCombine2._2), new HashPartitioner(ssc.sparkContext.defaultParallelism)) .map({ case (k, v) => (k, v._1 / v._2.toFloat) }) .print() ssc.start() ssc.awaitTermination() } }
Example 25
Source File: L5-18Http.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object HttpApp { def main(args: Array[String]) { if (args.length != 2) { System.err.println( "Usage: HttpApp <appname> <outputPath>") System.exit(1) } val Seq(appName, outputPath) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://www.citibikenyc.com/stations/json", interval = batchInterval) .flatMap(rec => (parse(rec) \ "stationBeanList").children) .filter(rec => { implicit val formats = DefaultFormats (rec \ "statusKey").extract[Integer] != 1 }) .map(rec => rec.filterField { case JField("id", _) => true case JField("stationName", _) => true case JField("statusValue", _) => true case _ => false }) .map(rec => { implicit val formats = DefaultFormats (rec(0)._2.extract[Integer], rec(1)._2.extract[String], rec(2)._2.extract[String]) }) .saveAsTextFiles(outputPath) ssc.start() ssc.awaitTermination() } }
Example 26
Source File: L8-29DataFrameExamplesJoin.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.reflect.runtime.universe import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.SQLContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.JDouble import org.json4s.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.compact import org.json4s.native.JsonMethods.parse import org.json4s.native.JsonMethods.render import org.json4s.string2JsonInput object CdrDataframeExamples3App { case class Cdr(squareId: Int, timeInterval: Long, countryCode: Int, smsInActivity: Float, smsOutActivity: Float, callInActivity: Float, callOutActivity: Float, internetTrafficActivity: Float) def main(args: Array[String]) { if (args.length != 5) { System.err.println( "Usage: CdrDataframeExamples3App <appname> <batchInterval> <hostname> <port> <gridJsonPath>") System.exit(1) } val Seq(appName, batchInterval, hostname, port, gridJsonPath) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) val sqlC = new SQLContext(ssc.sparkContext) import sqlC.implicits._ implicit val formats = DefaultFormats val gridFile = scala.io.Source.fromFile(gridJsonPath).mkString val gridGeo = (parse(gridFile) \ "features") val gridStr = gridGeo.children.map(r => { val c = (r \ "geometry" \ "coordinates").extract[List[List[List[Float]]]].flatten.flatten.map(r => JDouble(r)) val l = List(("id", r \ "id"), ("x1", c(0)), ("y1", c(1)), ("x2", c(2)), ("y2", c(3)), ("x3", c(4)), ("y3", c(5)), ("x4", c(6)), ("y4", c(7))) compact(render(JObject(l))) }) val gridDF = sqlC.read.json(ssc.sparkContext.makeRDD(gridStr)) val cdrStream = ssc.socketTextStream(hostname, port.toInt) .map(_.split("\\t", -1)) .foreachRDD(rdd => { val cdrs = seqToCdr(rdd).toDF() cdrs.join(gridDF, $"squareId" === $"id").show() }) ssc.start() ssc.awaitTermination() } def seqToCdr(rdd: RDD[Array[String]]): RDD[Cdr] = { rdd.map(c => c.map(f => f match { case x if x.isEmpty() => "0" case x => x })).map(c => Cdr(c(0).toInt, c(1).toLong, c(2).toInt, c(3).toFloat, c(4).toFloat, c(5).toFloat, c(6).toFloat, c(7).toFloat)) } }
Example 27
Source File: T8-3DataFrameExamplesNA.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.reflect.runtime.universe import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.SQLContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.JDouble import org.json4s.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.compact import org.json4s.native.JsonMethods.parse import org.json4s.native.JsonMethods.render import org.json4s.string2JsonInput object CdrDataframeExamplesNAApp { case class Cdr(squareId: Int, timeInterval: Long, countryCode: Int, smsInActivity: Float, smsOutActivity: Float, callInActivity: Float, callOutActivity: Float, internetTrafficActivity: Float) def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: CdrDataframeExamplesNAApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) val sqlC = new SQLContext(ssc.sparkContext) import sqlC.implicits._ implicit val formats = DefaultFormats val cdrStream = ssc.socketTextStream(hostname, port.toInt) .map(_.split("\\t", -1)) .foreachRDD(rdd => { val cdrs = seqToCdr(rdd).toDF() cdrs.na.drop("any").show() cdrs.na.fill(0, Array("squareId")).show() cdrs.na.replace("squareId", Map(0 -> 1)).show() println("Correlation: " + cdrs.stat.corr("smsOutActivity", "callOutActivity")) println("Covariance: " + cdrs.stat.cov("smsInActivity", "callInActivity")) cdrs.stat.crosstab("squareId", "countryCode").show() cdrs.stat.freqItems(Array("squareId", "countryCode"), 0.1).show() cdrs.stat.crosstab("callOutActivity", "callInActivity").show() }) ssc.start() ssc.awaitTermination() } def seqToCdr(rdd: RDD[Array[String]]): RDD[Cdr] = { rdd.map(c => c.map(f => f match { case x if x.isEmpty() => "0" case x => x })).map(c => Cdr(c(0).toInt, c(1).toLong, c(2).toInt, c(3).toFloat, c(4).toFloat, c(5).toFloat, c(6).toFloat, c(7).toFloat)) } }
Example 28
Source File: L6-6PerRecord.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object MqttSinkAppB { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => rdd.foreach { rec => { val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8))) client.disconnect() client.close() } } } ssc.start() ssc.awaitTermination() } }
Example 29
Source File: L6-12StaticPool.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object MqttSinkAppF { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) val mqttSink = ssc.sparkContext.broadcast(MqttSinkLazy(outputBrokerUrl)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => rdd.foreachPartition { par => par.foreach(message => mqttSink.value.client.publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8)))) } } ssc.start() ssc.awaitTermination() } } class MqttSinkLazy(brokerUrl: String) extends Serializable { lazy val client = { val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() sys.addShutdownHook { client.disconnect() client.close() } client } } object MqttSinkLazy { val brokerUrl = "tcp://localhost:1883" val client = new MqttSinkLazy(brokerUrl) def apply(brokerUrl: String): MqttSinkLazy = { client } }
Example 30
Source File: L6-8Static.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object MqttSinkAppD { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => rdd.foreachPartition { par => par.foreach(message => MqttSink().publish(topic, new MqttMessage(message.getBytes(StandardCharsets.UTF_8)))) } } ssc.start() ssc.awaitTermination() } } object MqttSink { val brokerUrl = "tcp://localhost:1883" val client = new MqttClient(brokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() sys.addShutdownHook { client.disconnect() client.close() } def apply(): MqttClient = { client } }
Example 31
Source File: L6-18Cassandra.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.Text import java.nio.ByteBuffer import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat import org.apache.cassandra.hadoop.ConfigHelper import org.apache.cassandra.thrift.ColumnOrSuperColumn import org.apache.cassandra.thrift.Column import org.apache.cassandra.utils.ByteBufferUtil import org.apache.cassandra.thrift.Mutation import java.util.Arrays object CassandraSinkApp { def main(args: Array[String]) { if (args.length != 6) { System.err.println( "Usage: CassandraSinkApp <appname> <cassandraHost> <cassandraPort> <keyspace> <columnFamilyName> <columnName>") System.exit(1) } val Seq(appName, cassandraHost, cassandraPort, keyspace, columnFamilyName, columnName) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val windowSize = 20 val slideInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat)) }) .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval)) .foreachRDD(rdd => { val jobConf = new Configuration() ConfigHelper.setOutputRpcPort(jobConf, cassandraPort) ConfigHelper.setOutputInitialAddress(jobConf, cassandraHost) ConfigHelper.setOutputColumnFamily(jobConf, keyspace, columnFamilyName) ConfigHelper.setOutputPartitioner(jobConf, "Murmur3Partitioner") rdd.map(rec => { val c = new Column() c.setName(ByteBufferUtil.bytes(columnName)) c.setValue(ByteBufferUtil.bytes(rec._2 / (windowSize / batchInterval))) c.setTimestamp(System.currentTimeMillis) val m = new Mutation() m.setColumn_or_supercolumn(new ColumnOrSuperColumn()) m.column_or_supercolumn.setColumn(c) (ByteBufferUtil.bytes(rec._1), Arrays.asList(m)) }).saveAsNewAPIHadoopFile(keyspace, classOf[ByteBuffer], classOf[List[Mutation]], classOf[ColumnFamilyOutputFormat], jobConf) }) ssc.start() ssc.awaitTermination() } }
Example 32
Source File: L6-20CassandraConnector.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.reflect.runtime.universe import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput import com.datastax.spark.connector.SomeColumns import com.datastax.spark.connector.cql.CassandraConnector import com.datastax.spark.connector.streaming.toDStreamFunctions import com.datastax.spark.connector.toNamedColumnRef object CassandraConnectorSinkApp { def main(args: Array[String]) { if (args.length != 6) { System.err.println( "Usage: CassandraConnectorSinkApp <appname> <cassandraHost> <cassandraPort> <keyspace> <tableName> <columnName>") System.exit(1) } val Seq(appName, cassandraHost, cassandraPort, keyspace, tableName, columnName) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) .set("spark.cassandra.connection.host", cassandraHost) .set("spark.cassandra.connection.port", cassandraPort) val batchInterval = 10 val windowSize = 20 val slideInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) CassandraConnector(conf).withSessionDo { session => session.execute(s"CREATE KEYSPACE IF NOT EXISTS %s WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1 }".format(keyspace)) session.execute(s"CREATE TABLE IF NOT EXISTS %s.%s (key TEXT PRIMARY KEY, %s FLOAT)".format(keyspace, tableName, columnName)) } HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat)) }) .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval)) .map(stock => (stock._1, stock._2 / (windowSize / batchInterval))) .saveToCassandra(keyspace, tableName) ssc.start() ssc.awaitTermination() } }
Example 33
Source File: L6-5Exception.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object MqttSinkAppA { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() rdd.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8)))) client.disconnect() client.close() } ssc.start() ssc.awaitTermination() } }
Example 34
Source File: L10-9Graph.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.graphx.Edge import org.apache.spark.graphx.Graph import org.apache.spark.graphx.Graph.graphToGraphOps import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object UserRankApp { def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: UserRankApp <appname> <batchInterval> <hostname> <port>") System.exit(1) } val Seq(appName, batchInterval, hostname, port) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val ssc = new StreamingContext(conf, Seconds(batchInterval.toInt)) ssc.socketTextStream(hostname, port.toInt) .map(r => { implicit val formats = DefaultFormats parse(r) }) .foreachRDD(rdd => { val edges = rdd.map(jvalue => { implicit val formats = DefaultFormats ((jvalue \ "user_id").extract[String], (jvalue \ "friends").extract[Array[String]]) }) .flatMap(r => r._2.map(f => Edge(r._1.hashCode.toLong, f.hashCode.toLong, 1.0))) val vertices = rdd.map(jvalue => { implicit val formats = DefaultFormats ((jvalue \ "user_id").extract[String]) }) .map(r => (r.hashCode.toLong, r)) val tolerance = 0.0001 val graph = Graph(vertices, edges, "defaultUser") .subgraph(vpred = (id, idStr) => idStr != "defaultUser") val pr = graph.pageRank(tolerance).cache graph.outerJoinVertices(pr.vertices) { (userId, attrs, rank) => (rank.getOrElse(0.0).asInstanceOf[Number].doubleValue, attrs) }.vertices.top(10) { Ordering.by(_._2._1) }.foreach(rec => println("User id: %s, Rank: %f".format(rec._2._2, rec._2._1))) }) ssc.start() ssc.awaitTermination() } }
Example 35
Source File: L6-16SparkHBase.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.hadoop.hbase.HBaseConfiguration import org.apache.hadoop.hbase.TableName import org.apache.hadoop.hbase.client.Put import org.apache.hadoop.hbase.spark.HBaseContext import org.apache.hadoop.hbase.util.Bytes import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object SparkHBaseBulkPutApp { def main(args: Array[String]) { if (args.length != 4) { System.err.println( "Usage: SparkHBaseBulkPutApp <appname> <tableName> <columnFamilyName> <columnName>") System.exit(1) } val Seq(appName, tableName, columnFamilyName, columnName) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val windowSize = 20 val slideInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) val hbaseConf = HBaseConfiguration.create() val hContext = new HBaseContext(ssc.sparkContext, hbaseConf) val windowed = HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat)) }) .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval)) hContext.streamBulkPut[(String, Float)](windowed, TableName.valueOf(tableName), rec => { val put = new Put(rec._1.getBytes) put.addColumn(columnFamilyName.getBytes, columnName.getBytes, Bytes.toBytes(rec._2 / (windowSize / batchInterval))) put }) ssc.start() ssc.awaitTermination() } }
Example 36
Source File: L6-22Counters.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.util.concurrent.atomic.AtomicLong import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object StatefulCountersApp { def main(args: Array[String]) { if (args.length != 1) { System.err.println( "Usage: StatefulCountersApp <appname>") System.exit(1) } val Seq(appName) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) var globalMax: AtomicLong = new AtomicLong(Long.MinValue) var globalMin: AtomicLong = new AtomicLong(Long.MaxValue) var globalCounter500: AtomicLong = new AtomicLong(0) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat, (rec \ "Volume").extract[String].toLong)) }) .foreachRDD(rdd => { val stocks = rdd.take(10) stocks.foreach(stock => { val price = stock._2 val volume = stock._3 if (volume > globalMax.get()) { globalMax.set(volume) } if (volume < globalMin.get()) { globalMin.set(volume) } if (price > 500) { globalCounter500.incrementAndGet() } }) if (globalCounter500.get() > 1000L) { println("Global counter has reached 1000") println("Max ----> " + globalMax.get) println("Min ----> " + globalMin.get) globalCounter500.set(0) } }) ssc.start() ssc.awaitTermination() } }
Example 37
Source File: L6-24Accumulators.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.collection.mutable import org.apache.spark.AccumulableParam import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object StatefulAccumulatorsApp { object StockAccum extends AccumulableParam[mutable.HashMap[String, (Long, Long, Long)], (String, (Float, Long))] { def zero(t: mutable.HashMap[String, (Long, Long, Long)]): mutable.HashMap[String, (Long, Long, Long)] = { new mutable.HashMap[String, (Long, Long, Long)]() } def addInPlace(t1: mutable.HashMap[String, (Long, Long, Long)], t2: mutable.HashMap[String, (Long, Long, Long)]): mutable.HashMap[String, (Long, Long, Long)] = { t1 ++ t2.map { case (k, v2) => (k -> { val v1 = t1.getOrElse(k, (Long.MaxValue, Long.MinValue, 0L)) val newMin = if (v2._1 < v1._1) v2._1 else v1._1 val newMax = if (v2._2 > v1._2) v2._2 else v1._2 (newMin, newMax, v1._3 + v2._3) }) } } def addAccumulator(t1: mutable.HashMap[String, (Long, Long, Long)], t2: (String, (Float, Long))): mutable.HashMap[String, (Long, Long, Long)] = { val prevStats = t1.getOrElse(t2._1, (Long.MaxValue, Long.MinValue, 0L)) val newVals = t2._2 var newCount = prevStats._3 if (newVals._1 > 500.0) { newCount += 1 } val newMin = if (newVals._2 < prevStats._1) newVals._2 else prevStats._1 val newMax = if (newVals._2 > prevStats._2) newVals._2 else prevStats._2 t1 += t2._1 -> (newMin, newMax, newCount) } } def main(args: Array[String]) { if (args.length != 2) { System.err.println( "Usage: StatefulAccumulatorsApp <appname> <checkpointDir>") System.exit(1) } val Seq(appName, checkpointDir) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) val stateAccum = ssc.sparkContext.accumulable(new mutable.HashMap[String, (Long, Long, Long)]())(StockAccum) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], ((rec \ "LastTradePriceOnly").extract[String].toFloat, (rec \ "Volume").extract[String].toLong))) }) .foreachRDD(rdd => { rdd.foreach({ stock => stateAccum += (stock._1, (stock._2._1, stock._2._2)) }) for ((sym, stats) <- stateAccum.value.to) printf("Symbol: %s, Stats: %s\n", sym, stats) }) ssc.start() ssc.awaitTermination() } }
Example 38
Source File: L6-7PerPartition.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import java.nio.charset.StandardCharsets import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.eclipse.paho.client.mqttv3.MqttClient import org.eclipse.paho.client.mqttv3.MqttMessage import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence import org.json4s.DefaultFormats import org.json4s.JField import org.json4s.JsonAST.JObject import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object MqttSinkAppC { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: MqttSinkApp <appname> <outputBrokerUrl> <topic>") System.exit(1) } val Seq(appName, outputBrokerUrl, topic) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { val query = parse(rec) \ "query" ((query \ "results" \ "quote").children).map(rec => JObject(JField("Timestamp", query \ "created")).merge(rec)) }) .map(rec => { implicit val formats = DefaultFormats rec.children.map(f => f.extract[String]) mkString "," }) .foreachRDD { rdd => rdd.foreachPartition { par => val client = new MqttClient(outputBrokerUrl, MqttClient.generateClientId(), new MemoryPersistence()) client.connect() par.foreach(rec => client.publish(topic, new MqttMessage(rec.getBytes(StandardCharsets.UTF_8)))) client.disconnect() client.close() } } ssc.start() ssc.awaitTermination() } }
Example 39
Source File: L6-14HBase.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.hadoop.conf.Configuration import org.apache.hadoop.hbase.HBaseConfiguration import org.apache.hadoop.hbase.client.Put import org.apache.hadoop.hbase.mapreduce.TableOutputFormat import org.apache.hadoop.hbase.util.Bytes import org.apache.hadoop.io.Text import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD.rddToPairRDDFunctions import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object HBaseSinkApp { def main(args: Array[String]) { if (args.length != 5) { System.err.println( "Usage: HBaseSinkApp <appname> <hbaseMaster> <tableName> <columnFamilyName> <columnName>") System.exit(1) } val Seq(appName, hbaseMaster, tableName, columnFamilyName, columnName) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val windowSize = 20 val slideInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], (rec \ "LastTradePriceOnly").extract[String].toFloat)) }) .reduceByKeyAndWindow((x: Float, y: Float) => (x + y), Seconds(windowSize), Seconds(slideInterval)) .foreachRDD(rdd => { val hbaseConf = HBaseConfiguration.create() hbaseConf.set(TableOutputFormat.OUTPUT_TABLE, tableName) hbaseConf.set("hbase.master", hbaseMaster) val jobConf = new Configuration(hbaseConf) jobConf.set("mapreduce.job.outputformat.class", classOf[TableOutputFormat[Text]].getName) rdd.map(rec => { val put = new Put(rec._1.getBytes) put.addColumn(columnFamilyName.getBytes, columnName.getBytes, Bytes.toBytes(rec._2 / (windowSize / batchInterval))) (rec._1, put) }).saveAsNewAPIHadoopDataset(jobConf) }) ssc.start() ssc.awaitTermination() } }
Example 40
Source File: L6-23UpdateState.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.apache.spark.streaming.dstream.DStream.toPairDStreamFunctions import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput object StatefulUpdateStateApp { def main(args: Array[String]) { if (args.length != 2) { System.err.println( "Usage: StatefulUpdateStateApp <appname> <checkpointDir>") System.exit(1) } val Seq(appName, checkpointDir) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) ssc.checkpoint(checkpointDir) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], ((rec \ "LastTradePriceOnly").extract[String].toFloat, (rec \ "Volume").extract[String].toLong))) }) .updateStateByKey(updateState) .print() def updateState(values: Seq[(Float, Long)], state: Option[(Long, Long, Long)]): Option[(Long, Long, Long)] = { val volumes = values.map(s => s._2) val localMin = volumes.min val localMax = volumes.max val localCount500 = values.map(s => s._1).count(price => price > 500) val globalValues = state.getOrElse((Long.MaxValue, Long.MinValue, 0L)).asInstanceOf[(Long, Long, Long)] val newMin = if (localMin < globalValues._1) localMin else globalValues._1 val newMax = if (localMax > globalValues._2) localMax else globalValues._2 val newCount500 = globalValues._3 + localCount500 return Some(newMin, newMax, newCount500) } ssc.start() ssc.awaitTermination() } }
Example 41
Source File: L6-26Redis.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import scala.collection.JavaConversions.asScalaBuffer import scala.collection.JavaConversions.mutableMapAsJavaMap import scala.collection.mutable import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.streaming.Seconds import org.apache.spark.streaming.StreamingContext import org.json4s.DefaultFormats import org.json4s.jvalue2extractable import org.json4s.jvalue2monadic import org.json4s.native.JsonMethods.parse import org.json4s.string2JsonInput import redis.clients.jedis.Jedis object StatefulRedisApp { def main(args: Array[String]) { if (args.length != 3) { System.err.println( "Usage: StatefulRedisApp <appname> <checkpointDir> <hostname>") System.exit(1) } val Seq(appName, checkpointDir, hostname) = args.toSeq val conf = new SparkConf() .setAppName(appName) .setJars(SparkContext.jarOfClass(this.getClass).toSeq) val batchInterval = 10 val ssc = new StreamingContext(conf, Seconds(batchInterval)) HttpUtils.createStream(ssc, url = "https://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20yahoo.finance.quotes%20where%20symbol%20in%20(%22IBM,GOOG,MSFT,AAPL,FB,ORCL,YHOO,TWTR,LNKD,INTC%22)%0A%09%09&format=json&diagnostics=true&env=http%3A%2F%2Fdatatables.org%2Falltables.env", interval = batchInterval) .flatMap(rec => { implicit val formats = DefaultFormats val query = parse(rec) \ "query" ((query \ "results" \ "quote").children) .map(rec => ((rec \ "symbol").extract[String], ((rec \ "LastTradePriceOnly").extract[String].toFloat, (rec \ "Volume").extract[String].toLong))) }) .foreachRDD(rdd => { rdd.foreachPartition({ part => val jedis = new Jedis(hostname) part.foreach(f => { val prev = jedis.hmget(f._1, "min", "max", "count") if (prev(0) == null) { jedis.hmset(f._1, mutable.HashMap("min" -> Long.MaxValue.toString, "max" -> Long.MinValue.toString, "count" -> 0.toString)) } else { val prevLong = prev.toList.map(v => v.toLong) var newCount = prevLong(2) val newPrice = f._2._1 val newVolume = f._2._2 if (newPrice > 500.0) { newCount += 1 } val newMin = if (newVolume < prevLong(0)) newVolume else prevLong(0) val newMax = if (newVolume > prevLong(1)) newVolume else prevLong(1) jedis.hmset(f._1, mutable.HashMap("min" -> newMin.toString, "max" -> newMax.toString, "count" -> newCount.toString)) } }) jedis.close() }) val jedis = new Jedis(hostname) jedis.scan(0).getResult.foreach(sym => println("Symbol: %s, Stats: %s".format(sym, jedis.hmget(sym, "min", "max", "count").toString))) jedis.close() }) ssc.start() ssc.awaitTermination() } }
Example 42
Source File: TradeSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.specs2.mutable.Specification import stellar.sdk.ArbitraryInput import stellar.sdk.model.op.JsonSnippets class TradeSpec extends Specification with ArbitraryInput with JsonSnippets { implicit val formats = Serialization.formats(NoTypeHints) + TradeDeserializer "trade" should { "parse from json" >> prop { trade: Trade => val doc = s""" |{ | "_links": { | "self": {"href": ""}, | "base": {"href": "https://horizon.stellar.org/accounts/GCI7ILB37OFVHLLSA74UCXZFCTPEBJOZK7YCNBI7DKH7D76U4CRJBL2A"}, | "counter": {"href": "https://horizon.stellar.org/accounts/GDRFRGR2FDUFF2RI6PQE5KFSCJHGSEIOGET22R66XSATP3BYHZ46BPLO"}, | "operation": {"href": "https://horizon.stellar.org/operations/38583306127675393"} | }, | "id": "${trade.id}", | "paging_token": "38583306127675393-2", | "ledger_close_time": "${formatter.format(trade.ledgerCloseTime)}", | "offer_id": "${trade.offerId}", | "base_offer_id": "${trade.baseOfferId}", | "base_account": "${trade.baseAccount.accountId}", | ${amountDocPortion(trade.baseAmount, "base_amount", "base_")} | ${amountDocPortion(trade.counterAmount, "counter_amount", "counter_")} | "counter_account": "${trade.counterAccount.accountId}", | "counter_offer_id": "${trade.counterOfferId}", | "base_is_seller": ${trade.baseIsSeller} |} """.stripMargin parse(doc).extract[Trade] mustEqual trade } } }
Example 43
Source File: AccountMergeOperationSpec.scala From scala-stellar-sdk with Apache License 2.0 | 5 votes |
package stellar.sdk.model.op import org.json4s.NoTypeHints import org.json4s.native.JsonMethods.parse import org.json4s.native.Serialization import org.scalacheck.Arbitrary import org.specs2.mutable.Specification import stellar.sdk.util.ByteArrays.base64 import stellar.sdk.{ArbitraryInput, DomainMatchers, KeyPair} class AccountMergeOperationSpec extends Specification with ArbitraryInput with DomainMatchers with JsonSnippets { implicit val arb: Arbitrary[Transacted[AccountMergeOperation]] = Arbitrary(genTransacted(genAccountMergeOperation)) implicit val formats = Serialization.formats(NoTypeHints) + TransactedOperationDeserializer "account merge operation" should { "serde via xdr string" >> prop { actual: AccountMergeOperation => Operation.decodeXDR(base64(actual.encode)) must beEquivalentTo(actual) } "serde via xdr bytes" >> prop { actual: AccountMergeOperation => val (remaining, decoded) = Operation.decode.run(actual.encode).value decoded mustEqual actual remaining must beEmpty } "parse from json" >> prop { op: Transacted[AccountMergeOperation] => val doc = s""" | { | "_links": { | "self": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144"}, | "transaction": {"href": "https://horizon-testnet.stellar.org/transactions/17a670bc424ff5ce3b386dbfaae9990b66a2a37b4fbe51547e8794962a3f9e6a"}, | "effects": {"href": "https://horizon-testnet.stellar.org/operations/10157597659144/effects"}, | "succeeds": {"href": "https://horizon-testnet.stellar.org/effects?order=desc\u0026cursor=10157597659144"}, | "precedes": {"href": "https://horizon-testnet.stellar.org/effects?order=asc\u0026cursor=10157597659144"} | }, | "id": "${op.id}", | "paging_token": "10157597659137", | "source_account":"${op.operation.sourceAccount.get.accountId}", | "type_i": 8, | "type": "account_merge" | "created_at": "${formatter.format(op.createdAt)}", | "transaction_hash": "${op.txnHash}", | "account": "${op.operation.sourceAccount.get.accountId}", | "into": "${KeyPair.fromPublicKey(op.operation.destination.hash).accountId}", |} """.stripMargin parse(doc).extract[Transacted[AccountMergeOperation]] mustEqual op }.setGen(genTransacted(genAccountMergeOperation.suchThat(_.sourceAccount.nonEmpty))) } }