akka.serialization.SerializationExtension Scala Examples
The following examples show how to use akka.serialization.SerializationExtension.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: PayloadSerializer.scala From akka-stream-eventsourcing with Apache License 2.0 | 5 votes |
package com.github.krasserm.ases.serializer import akka.actor.ExtendedActorSystem import akka.serialization.{SerializationExtension, SerializerWithStringManifest} import com.github.krasserm.ases.serializer.PayloadFormatOuterClass.PayloadFormat import com.google.protobuf.ByteString import scala.util.Try class PayloadSerializer(system: ExtendedActorSystem) { def payloadFormatBuilder(payload: AnyRef): PayloadFormat.Builder = { val serializer = SerializationExtension(system).findSerializerFor(payload) val builder = PayloadFormat.newBuilder() if (serializer.includeManifest) { val (isStringManifest, manifest) = serializer match { case s: SerializerWithStringManifest => (true, s.manifest(payload)) case _ => (false, payload.getClass.getName) } builder.setIsStringManifest(isStringManifest) builder.setPayloadManifest(manifest) } builder.setSerializerId(serializer.identifier) builder.setPayload(ByteString.copyFrom(serializer.toBinary(payload))) } def payload(payloadFormat: PayloadFormat): AnyRef = { val payload = if (payloadFormat.getIsStringManifest) payloadFromStringManifest(payloadFormat) else if (payloadFormat.getPayloadManifest.nonEmpty) payloadFromClassManifest(payloadFormat) else payloadFromEmptyManifest(payloadFormat) payload.get } private def payloadFromStringManifest(payloadFormat: PayloadFormat): Try[AnyRef] = { SerializationExtension(system).deserialize( payloadFormat.getPayload.toByteArray, payloadFormat.getSerializerId, payloadFormat.getPayloadManifest ) } private def payloadFromClassManifest(payloadFormat: PayloadFormat): Try[AnyRef] = { val manifestClass = system.dynamicAccess.getClassFor[AnyRef](payloadFormat.getPayloadManifest).get SerializationExtension(system).deserialize( payloadFormat.getPayload.toByteArray, payloadFormat.getSerializerId, Some(manifestClass) ) } private def payloadFromEmptyManifest(payloadFormat: PayloadFormat): Try[AnyRef] = { SerializationExtension(system).deserialize( payloadFormat.getPayload.toByteArray, payloadFormat.getSerializerId, None ) } }
Example 2
Source File: KryoSerializerInitSpec.scala From nexus-kg with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.serializers import java.nio.file.Paths import akka.actor.ActorSystem import akka.serialization.SerializationExtension import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.kg.TestHelper import io.altoo.akka.serialization.kryo.KryoSerializer import org.scalatest.TryValues import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike class KryoSerializerInitSpec extends TestKit(ActorSystem("KryoSerializerInitSpec")) with AnyWordSpecLike with Matchers with TryValues with TestHelper { private val serialization = SerializationExtension(system) "A Path Kryo serialization" should { "succeed" in { val path = Paths.get("resources/application.conf") // Find the Serializer for it val serializer = serialization.findSerializerFor(path) serializer.getClass.equals(classOf[KryoSerializer]) shouldEqual true // Check serialization/deserialization val serialized = serialization.serialize(path) serialized.isSuccess shouldEqual true val deserialized = serialization.deserialize(serialized.get, path.getClass) deserialized.isSuccess shouldEqual true deserialized.success.value shouldEqual path } } }
Example 3
Source File: BankAccountEventJSONSerializerSpec.scala From akka-ddd-cqrs-es-example with MIT License | 5 votes |
package com.github.j5ik2o.bank.adaptor.serialization import akka.actor.ActorSystem import akka.serialization.SerializationExtension import com.github.j5ik2o.bank.adaptor.util.ActorSpec import com.github.j5ik2o.bank.domain.model._ import com.typesafe.config.ConfigFactory import org.sisioh.baseunits.scala.money.Money import org.sisioh.baseunits.scala.timeutil.Clock class BankAccountEventJSONSerializerSpec extends ActorSpec( ActorSystem("BankAccountEventJSONSerializerSpec", ConfigFactory.load("bank-account-aggregate-spec.conf")) ) { val extension = SerializationExtension(system) "BankAccountEventJSONSerializer" - { "should encode CreateEvent" in { val serializer = extension.serializerFor(classOf[BankAccountOpened]) val now = Clock.now val expectedEvent = BankAccountOpened(BankAccountId(1L), BankAccountName("test-1"), now) val byteArray = serializer.toBinary(expectedEvent) val event = serializer.fromBinary(byteArray, Some(classOf[BankAccountOpened])) event shouldBe expectedEvent } "should encode UpdateEvent" in { val serializer = extension.serializerFor(classOf[BankAccountEventUpdated]) val now = Clock.now val expectedEvent = BankAccountEventUpdated(BankAccountId(1L), BankAccountName("test-1"), now) val byteArray = serializer.toBinary(expectedEvent) val event = serializer.fromBinary(byteArray, Some(classOf[BankAccountEventUpdated])) event shouldBe expectedEvent } "should encode DepositEvent" in { val serializer = extension.serializerFor(classOf[BankAccountDeposited]) val now = Clock.now val expectedEvent = BankAccountDeposited(BankAccountId(1L), Money.yens(100), now) val byteArray = serializer.toBinary(expectedEvent) val event = serializer.fromBinary(byteArray, Some(classOf[BankAccountDeposited])) event shouldBe expectedEvent } "should encode WithdrawEvent" in { val serializer = extension.serializerFor(classOf[BankAccountWithdrawn]) val now = Clock.now val expectedEvent = BankAccountWithdrawn(BankAccountId(1L), Money.yens(100), now) val byteArray = serializer.toBinary(expectedEvent) val event = serializer.fromBinary(byteArray, Some(classOf[BankAccountWithdrawn])) event shouldBe expectedEvent } "should encode DestroyEvent" in { val serializer = extension.serializerFor(classOf[BankAccountClosed]) val now = Clock.now val expectedEvent = BankAccountClosed(BankAccountId(1L), now) val byteArray = serializer.toBinary(expectedEvent) val event = serializer.fromBinary(byteArray, Some(classOf[BankAccountClosed])) event shouldBe expectedEvent } } }
Example 4
Source File: MessageSerializerTest.scala From aecor with MIT License | 5 votes |
package aecor.runtime.akkageneric import aecor.runtime.akkageneric.GenericAkkaRuntime.KeyedCommand import aecor.runtime.akkageneric.GenericAkkaRuntimeActor.{ Command, CommandResult } import aecor.runtime.akkageneric.serialization.MessageSerializer import akka.actor.ActorSystem import akka.serialization.SerializationExtension import org.scalacheck.Arbitrary import org.scalacheck.Arbitrary._ import org.scalacheck.Prop.forAll import org.scalatest.BeforeAndAfterAll import org.scalatest.funsuite.AnyFunSuite import scodec.bits.BitVector import scala.concurrent.Await import scala.concurrent.duration._ class MessageSerializerTest extends AnyFunSuite with BeforeAndAfterAll { implicit val system: ActorSystem = ActorSystem("test") val serialization = SerializationExtension(system) implicit val bitVector: Arbitrary[BitVector] = Arbitrary(arbitrary[Array[Byte]].map(BitVector(_))) def canSerialize[A <: AnyRef](a: A): Boolean = { val ser = serialization.serializerFor(a.getClass) assert(ser.isInstanceOf[MessageSerializer]) val mser = ser.asInstanceOf[MessageSerializer] val (man, bytes) = (mser.manifest(a), mser.toBinary(a)) val out = mser.fromBinary(bytes, man) out === a } test("serialization") { forAll { bb: BitVector => canSerialize(Command(bb)) } forAll { bb: BitVector => canSerialize(CommandResult(bb)) } forAll { (key: String, bb: BitVector) => canSerialize(KeyedCommand(key, bb)) } } override protected def afterAll(): Unit = { Await.result(system.terminate(), 5.seconds) () } }
Example 5
Source File: KafkaPartitionerSpec.scala From affinity with Apache License 2.0 | 5 votes |
package io.amient.affinity.kafka import java.util import akka.serialization.SerializationExtension import com.typesafe.config.ConfigFactory import io.amient.affinity.AffinityActorSystem import io.amient.affinity.avro.MemorySchemaRegistry import io.amient.affinity.core.Murmur2Partitioner import org.apache.kafka.common.{Cluster, Node, PartitionInfo} import org.apache.kafka.streams.processor.internals.DefaultStreamPartitioner import org.scalatest.{FlatSpec, Matchers} import scala.collection.JavaConverters._ class KafkaPartitionerSpec extends FlatSpec with Matchers { def mockCluster(numParts: Int) = new Cluster("mock-cluster", util.Arrays.asList[Node](), (0 to numParts - 1).map(p => new PartitionInfo("test", p, null, Array(), Array())).asJava, new util.HashSet[String], new util.HashSet[String]) "kafka.DefaultPartitioner" should "have identical method to Murmur2Partitioner" in { val kafkaPartitioner = new org.apache.kafka.clients.producer.internals.DefaultPartitioner() val affinityPartitioner = new Murmur2Partitioner val key = "test-value-for-partitioner" val serializedKey: Array[Byte] = key.getBytes val kafkaP = kafkaPartitioner.partition("test", key, serializedKey, key, serializedKey, mockCluster(4)) val affinityP = affinityPartitioner.partition(serializedKey, 4) kafkaP should equal(affinityP) } "KafkaAvroSerde" should "have identical serialization footprint as Akka AvroSerdeProxy" in { val cfg = Map( "schema.registry.class" -> classOf[MemorySchemaRegistry].getName, "schema.registry.id" -> "1" ) val key = "6290853012217500191217" val system = AffinityActorSystem.create(ConfigFactory.parseMap( ((cfg.map { case (k, v) => ("affinity.avro." + k, v) }) + ("affinity.system.name" -> "KafkaPartitionerSpec")).asJava)) val akkaSerializedKey = try { val serialization = SerializationExtension(system) serialization.serialize(key).get } finally { system.terminate() } val kafkaSerde = new KafkaAvroSerde() kafkaSerde.configure(cfg.asJava, true) val kafkaSerialized = kafkaSerde.serializer().serialize("test", key) akkaSerializedKey.mkString(".") should equal(kafkaSerialized.mkString(".")) new Murmur2Partitioner().partition(akkaSerializedKey, 9) should be(4) new Murmur2Partitioner().partition(kafkaSerialized, 9) should be(4) val streamsPartitioner = new DefaultStreamPartitioner[Any, Any](kafkaSerde.serializer(), mockCluster(9)) streamsPartitioner.partition("test", key, null, 9) should be(4) streamsPartitioner.partition("test", key, "irrelevant", 9) should be(4) } }
Example 6
Source File: AkkaSerializationSystemTest.scala From affinity with Apache License 2.0 | 5 votes |
package io.amient.affinity.avro import akka.serialization.SerializationExtension import com.typesafe.config.{ConfigFactory, ConfigValueFactory} import io.amient.affinity.avro.HttpSchemaRegistry.HttpAvroConf import io.amient.affinity.avro.record.AvroRecord import io.amient.affinity.kafka.EmbeddedConfluentRegistry import io.amient.affinity.{AffinityActorSystem, Conf} import org.scalatest.FlatSpec case class ExampleType(val id: Int) extends AvroRecord { override def hashCode(): Int = id.hashCode() } class AkkaSerializationSystemTest extends FlatSpec with EmbeddedConfluentRegistry { val config = ConfigFactory.empty .withValue(Conf.Affi.SystemName.path, ConfigValueFactory.fromAnyRef("CfTest")) .withValue(Conf.Affi.Avro.Class.path, ConfigValueFactory.fromAnyRef(classOf[HttpSchemaRegistry].getName)) .withValue(HttpAvroConf(Conf.Affi.Avro).HttpSchemaRegistryUrl.path, ConfigValueFactory.fromAnyRef(registryUrl)) assert(config.getString(HttpAvroConf(Conf.Affi.Avro).HttpSchemaRegistryUrl.path) == registryUrl) override def numPartitions = 2 "Confluent Schema Registry " should "be available via akka SerializationExtension" in { val system = AffinityActorSystem.create(config) try { val serialization = SerializationExtension(system) val serde = serialization.serializerFor(classOf[ExampleType]) assert(serde.fromBinary(serde.toBinary(ExampleType(101))) == ExampleType(101)) } finally { system.terminate() } } }
Example 7
Source File: LocalCommitPublisher.scala From akka-cqrs with Apache License 2.0 | 5 votes |
package com.productfoundry.akka.cqrs import akka.serialization.{SerializationExtension, SerializerWithStringManifest} import scala.util.Try override def handleCommit(commit: Commit, response: AggregateResponse): AggregateResponse = { val handleAttempt = for { serializer <- Try(serialization.findSerializerFor(commit).asInstanceOf[SerializerWithStringManifest]) bytes <- Try(serializer.toBinary(commit)) deserialized <- Try(serializer.fromBinary(bytes, serializer.manifest(commit)).asInstanceOf[Commit]) validated <- Try (if (deserialized == commit) deserialized else throw new IllegalStateException(s"expected: $commit; actual: $deserialized")) } yield { context.system.eventStream.publish(validated) } handleAttempt.recover { case e => log.error(e, "Handling commit") } response } }
Example 8
Source File: CassandraEventUpdateSpec.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.journal import java.util.UUID import scala.concurrent.Await import akka.Done import akka.event.Logging import akka.persistence.PersistentRepr import akka.persistence.cassandra.journal.CassandraJournal.Serialized import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, TestTaggingActor, _ } import akka.serialization.SerializationExtension import com.typesafe.config.ConfigFactory import scala.concurrent.ExecutionContext import scala.concurrent.Future import akka.actor.ExtendedActorSystem import akka.stream.alpakka.cassandra.CqlSessionProvider import akka.stream.alpakka.cassandra.scaladsl.CassandraSession object CassandraEventUpdateSpec { val config = ConfigFactory.parseString(""" """).withFallback(CassandraLifecycle.config) } class CassandraEventUpdateSpec extends CassandraSpec(CassandraEventUpdateSpec.config) { s => private[akka] val log = Logging(system, getClass) private val serialization = SerializationExtension(system) val updater = new CassandraEventUpdate { override private[akka] val log = s.log override private[akka] def settings: PluginSettings = PluginSettings(system) override private[akka] implicit val ec: ExecutionContext = system.dispatcher // use separate session, not shared via CassandraSessionRegistry because init is different private val sessionProvider = CqlSessionProvider( system.asInstanceOf[ExtendedActorSystem], system.settings.config.getConfig(PluginSettings.DefaultConfigPath)) override private[akka] val session: CassandraSession = new CassandraSession( system, sessionProvider, ec, log, systemName, init = _ => Future.successful(Done), onClose = () => ()) } "CassandraEventUpdate" must { "update the event in messages" in { val pid = nextPid val a = system.actorOf(TestTaggingActor.props(pid)) a ! "e-1" expectMsgType[TestTaggingActor.Ack.type] val eventsBefore = events(pid) eventsBefore.map(_.pr.payload) shouldEqual Seq("e-1") val originalEvent = eventsBefore.head val modifiedEvent = serialize(originalEvent.pr.withPayload("secrets"), originalEvent.offset, Set("ignored")) updater.updateEvent(modifiedEvent).futureValue shouldEqual Done eventPayloadsWithTags(pid) shouldEqual Seq(("secrets", Set())) } "update the event in tag_views" in { val pid = nextPid val b = system.actorOf(TestTaggingActor.props(pid, Set("red", "blue"))) b ! "e-1" expectMsgType[TestTaggingActor.Ack.type] val eventsBefore = events(pid).head val modifiedEvent = serialize(eventsBefore.pr.withPayload("hidden"), eventsBefore.offset, Set("ignored")) expectEventsForTag(tag = "red", "e-1") expectEventsForTag(tag = "blue", "e-1") updater.updateEvent(modifiedEvent).futureValue shouldEqual Done expectEventsForTag(tag = "red", "hidden") expectEventsForTag(tag = "blue", "hidden") } def serialize(pr: PersistentRepr, offset: UUID, tags: Set[String]): Serialized = { import system.dispatcher Await.result(serializeEvent(pr, tags, offset, Hour, serialization, system), remainingOrDefault) } } }
Example 9
Source File: DirectWriting.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.query import java.nio.ByteBuffer import akka.actor.ActorSystem import akka.persistence.PersistentRepr import akka.persistence.cassandra.Hour import akka.persistence.cassandra.PluginSettings import akka.persistence.cassandra.journal.CassandraJournalStatements import akka.persistence.cassandra.journal.TimeBucket import akka.serialization.SerializationExtension import akka.serialization.Serializers import com.datastax.oss.driver.api.core.CqlSession import com.datastax.oss.driver.api.core.uuid.Uuids import org.scalatest.BeforeAndAfterAll import org.scalatest.Suite trait DirectWriting extends BeforeAndAfterAll { self: Suite => def system: ActorSystem private lazy val serialization = SerializationExtension(system) private lazy val settings = PluginSettings(system) def cluster: CqlSession private lazy val writeStatements: CassandraJournalStatements = new CassandraJournalStatements(settings) private lazy val preparedWriteMessage = cluster.prepare(writeStatements.writeMessage(withMeta = false)) private lazy val preparedDeleteMessage = cluster.prepare(writeStatements.deleteMessage) protected def writeTestEvent(persistent: PersistentRepr, partitionNr: Long = 1L): Unit = { val event = persistent.payload.asInstanceOf[AnyRef] val serializer = serialization.findSerializerFor(event) val serialized = ByteBuffer.wrap(serialization.serialize(event).get) val nowUuid = Uuids.timeBased() val now = Uuids.unixTimestamp(nowUuid) val serManifest = Serializers.manifestFor(serializer, persistent) val bs = preparedWriteMessage .bind() .setString("persistence_id", persistent.persistenceId) .setLong("partition_nr", partitionNr) .setLong("sequence_nr", persistent.sequenceNr) .setUuid("timestamp", nowUuid) .setString("timebucket", TimeBucket(now, Hour).key.toString) .setInt("ser_id", serializer.identifier) .setString("ser_manifest", serManifest) .setString("event_manifest", persistent.manifest) .setByteBuffer("event", serialized) cluster.execute(bs) system.log.debug("Directly wrote payload [{}] for entity [{}]", persistent.payload, persistent.persistenceId) } protected def deleteTestEvent(persistent: PersistentRepr, partitionNr: Long = 1L): Unit = { val bs = preparedDeleteMessage .bind() .setString("persistence_id", persistent.persistenceId) .setLong("partition_nr", partitionNr) .setLong("sequence_nr", persistent.sequenceNr) cluster.execute(bs) system.log.debug("Directly deleted payload [{}] for entity [{}]", persistent.payload, persistent.persistenceId) } }
Example 10
Source File: BuildTagViewForPersistenceId.scala From akka-persistence-cassandra with Apache License 2.0 | 5 votes |
package akka.persistence.cassandra.reconciler import akka.actor.ActorSystem import akka.persistence.cassandra.PluginSettings import akka.Done import akka.persistence.cassandra.journal.TagWriter._ import scala.concurrent.duration._ import scala.concurrent.Future import akka.stream.scaladsl.Source import akka.actor.ExtendedActorSystem import akka.persistence.query.PersistenceQuery import akka.persistence.cassandra.query.scaladsl.CassandraReadJournal import akka.event.Logging import akka.persistence.cassandra.journal.CassandraTagRecovery import akka.persistence.cassandra.Extractors import akka.util.Timeout import akka.stream.OverflowStrategy import akka.stream.scaladsl.Sink import akka.annotation.InternalApi import akka.serialization.SerializationExtension @InternalApi private[akka] final class BuildTagViewForPersisetceId( persistenceId: String, system: ActorSystem, recovery: CassandraTagRecovery, settings: PluginSettings) { import system.dispatcher private implicit val sys = system private val log = Logging(system, classOf[BuildTagViewForPersisetceId]) private val serialization = SerializationExtension(system) private val queries: CassandraReadJournal = PersistenceQuery(system.asInstanceOf[ExtendedActorSystem]) .readJournalFor[CassandraReadJournal]("akka.persistence.cassandra.query") private implicit val flushTimeout = Timeout(30.seconds) def reconcile(flushEvery: Int = 1000): Future[Done] = { val recoveryPrep = for { tp <- recovery.lookupTagProgress(persistenceId) _ <- recovery.setTagProgress(persistenceId, tp) } yield tp Source .futureSource(recoveryPrep.map((tp: Map[String, TagProgress]) => { log.debug("[{}] Rebuilding tag view table from: [{}]", persistenceId, tp) queries .eventsByPersistenceId( persistenceId, 0, Long.MaxValue, Long.MaxValue, None, settings.journalSettings.readProfile, "BuildTagViewForPersistenceId", extractor = Extractors.rawEvent(settings.eventsByTagSettings.bucketSize, serialization, system)) .map(recovery.sendMissingTagWriteRaw(tp, actorRunning = false)) .buffer(flushEvery, OverflowStrategy.backpressure) .mapAsync(1)(_ => recovery.flush(flushTimeout)) })) .runWith(Sink.ignore) } }
Example 11
Source File: KryoSerializerInitSpec.scala From nexus with Apache License 2.0 | 5 votes |
package ch.epfl.bluebrain.nexus.kg.serializers import java.nio.file.Paths import akka.actor.ActorSystem import akka.serialization.SerializationExtension import akka.testkit.TestKit import ch.epfl.bluebrain.nexus.kg.TestHelper import io.altoo.akka.serialization.kryo.KryoSerializer import org.scalatest.TryValues import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike class KryoSerializerInitSpec extends TestKit(ActorSystem("KryoSerializerInitSpec")) with AnyWordSpecLike with Matchers with TryValues with TestHelper { private val serialization = SerializationExtension(system) "A Path Kryo serialization" should { "succeed" in { val path = Paths.get("resources/application.conf") // Find the Serializer for it val serializer = serialization.findSerializerFor(path) serializer.getClass.equals(classOf[KryoSerializer]) shouldEqual true // Check serialization/deserialization val serialized = serialization.serialize(path) serialized.isSuccess shouldEqual true val deserialized = serialization.deserialize(serialized.get, path.getClass) deserialized.isSuccess shouldEqual true deserialized.success.value shouldEqual path } } }
Example 12
Source File: TestSpec.scala From akka-serialization-test with Apache License 2.0 | 5 votes |
package com.github.dnvriend import akka.actor.{ ActorRef, ActorSystem, PoisonPill } import akka.event.{ Logging, LoggingAdapter } import akka.serialization.SerializationExtension import akka.stream.{ ActorMaterializer, Materializer } import akka.testkit.TestProbe import akka.util.Timeout import org.scalatest.concurrent.{ Eventually, ScalaFutures } import org.scalatest.prop.PropertyChecks import org.scalatest.{ BeforeAndAfterAll, FlatSpec, GivenWhenThen, Matchers } import scala.concurrent.duration._ import scala.concurrent.{ ExecutionContext, Future } import scala.util.Try trait TestSpec extends FlatSpec with Matchers with GivenWhenThen with ScalaFutures with BeforeAndAfterAll with Eventually with PropertyChecks with AkkaPersistenceQueries with AkkaStreamUtils with InMemoryCleanup { implicit val timeout: Timeout = Timeout(10.seconds) implicit val system: ActorSystem = ActorSystem() implicit val ec: ExecutionContext = system.dispatcher implicit val mat: Materializer = ActorMaterializer() implicit val log: LoggingAdapter = Logging(system, this.getClass) implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds) val serialization = SerializationExtension(system) implicit class FutureToTry[T](f: Future[T]) { def toTry: Try[T] = Try(f.futureValue) } def killActors(actors: ActorRef*): Unit = { val probe = TestProbe() actors.foreach { actor ⇒ probe watch actor actor ! PoisonPill probe expectTerminated actor } } override protected def afterAll(): Unit = { system.terminate() system.whenTerminated.toTry should be a 'success } }
Example 13
Source File: ClusterStartupTaskSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.persistence.cluster import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import com.lightbend.lagom.persistence.ActorSystemSpec class ClusterStartupTaskSerializerSpec extends ActorSystemSpec { val serializer = new ClusterStartupTaskSerializer(system.asInstanceOf[ExtendedActorSystem]) def checkSerialization(obj: AnyRef): Unit = { // check that it is configured SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[ClusterStartupTaskSerializer]) // verify serialization-deserialization round trip val blob = serializer.toBinary(obj) val obj2 = serializer.fromBinary(blob, serializer.manifest(obj)) obj2 should be(obj) } "ClusterStartupTaskSerializerSpec" must { "serialize Execute" in { checkSerialization(ClusterStartupTaskActor.Execute) } } }
Example 14
Source File: PersistenceMessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.scaladsl.persistence.protobuf import java.io.NotSerializableException import scala.concurrent.duration._ import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive import com.lightbend.lagom.persistence.ActorSystemSpec import com.lightbend.lagom.scaladsl.persistence.CommandEnvelope import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.InvalidCommandException import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.PersistException import com.lightbend.lagom.scaladsl.persistence.PersistentEntity.UnhandledCommandException import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRef import com.lightbend.lagom.scaladsl.persistence.TestEntity import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry import com.lightbend.lagom.scaladsl.persistence.TestEntitySerializerRegistry class PersistenceMessageSerializerSpec extends ActorSystemSpec(JsonSerializerRegistry.actorSystemSetupFor(TestEntitySerializerRegistry)) { val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem]) def checkSerialization(obj: AnyRef): Unit = { // check that it is configured SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer]) // verify serialization-deserialization round trip val blob = serializer.toBinary(obj) val obj2 = serializer.fromBinary(blob, serializer.manifest(obj)) obj2 should be(obj) } "PersistenceMessageSerializer" must { "serialize CommandEnvelope" in { checkSerialization(CommandEnvelope("entityId", TestEntity.Add("a"))) } "serialize EnsureActive" in { checkSerialization(EnsureActive("foo")) } "serialize InvalidCommandException" in { checkSerialization(InvalidCommandException("wrong")) } "serialize UnhandledCommandException" in { checkSerialization(UnhandledCommandException("unhandled")) } "serialize PersistException" in { checkSerialization(PersistException("not stored")) } "not serialize PersistentEntityRef" in { intercept[NotSerializableException] { SerializationExtension(system) .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, system, 5.seconds)) .get } } } }
Example 15
Source File: PersistenceMessageSerializerSpec.scala From lagom with Apache License 2.0 | 5 votes |
package com.lightbend.lagom.internal.javadsl.persistence.protobuf import java.io.NotSerializableException import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import com.lightbend.lagom.internal.cluster.ClusterDistribution.EnsureActive import com.lightbend.lagom.persistence.ActorSystemSpec import com.lightbend.lagom.javadsl.persistence.CommandEnvelope import com.lightbend.lagom.javadsl.persistence.PersistentEntity.InvalidCommandException import com.lightbend.lagom.javadsl.persistence.PersistentEntity.PersistException import com.lightbend.lagom.javadsl.persistence.PersistentEntity.UnhandledCommandException import com.lightbend.lagom.javadsl.persistence.PersistentEntityRef import com.lightbend.lagom.javadsl.persistence.TestEntity import java.time.{ Duration => JDuration } class PersistenceMessageSerializerSpec extends ActorSystemSpec { val serializer = new PersistenceMessageSerializer(system.asInstanceOf[ExtendedActorSystem]) def checkSerialization(obj: AnyRef): Unit = { // check that it is configured SerializationExtension(system).serializerFor(obj.getClass).getClass should be(classOf[PersistenceMessageSerializer]) // verify serialization-deserialization round trip val blob = serializer.toBinary(obj) val obj2 = serializer.fromBinary(blob, serializer.manifest(obj)) obj2 should be(obj) } "PersistenceMessageSerializer" must { "serialize CommandEnvelope" in { checkSerialization(CommandEnvelope("entityId", TestEntity.Add.of("a"))) } "serialize EnsureActive" in { checkSerialization(EnsureActive("foo")) } "serialize InvalidCommandException" in { checkSerialization(InvalidCommandException("wrong")) } "serialize UnhandledCommandException" in { checkSerialization(UnhandledCommandException("unhandled")) } "serialize PersistException" in { checkSerialization(PersistException("not stored")) } "not serialize PersistentEntityRef" in { intercept[NotSerializableException] { SerializationExtension(system) .serialize(new PersistentEntityRef[String]("abc", system.deadLetters, JDuration.ofSeconds(5))) .get } } } }
Example 16
Source File: S3SnapshotStore.scala From akka-persistence-s3 with MIT License | 5 votes |
package akka.persistence.s3 package snapshot import java.io.ByteArrayInputStream import akka.actor.ActorLogging import akka.persistence.serialization.Snapshot import akka.persistence.{ SelectedSnapshot, SnapshotMetadata, SnapshotSelectionCriteria } import akka.persistence.snapshot.SnapshotStore import akka.serialization.SerializationExtension import com.amazonaws.services.s3.model.{ ObjectMetadata, S3ObjectInputStream, ListObjectsRequest } import com.typesafe.config.Config import scala.collection.JavaConversions._ import scala.collection.immutable import scala.concurrent.Future import scala.util.control.NonFatal case class SerializationResult(stream: ByteArrayInputStream, size: Int) class S3SnapshotStore(config: Config) extends SnapshotStore with ActorLogging with SnapshotKeySupport { import context.dispatcher val settings = new S3SnapshotConfig(config) val s3Client: S3Client = new S3Client { val s3ClientConfig = new S3ClientConfig(context.system.settings.config.getConfig("s3-client")) } private val serializationExtension = SerializationExtension(context.system) private val s3Dispatcher = context.system.dispatchers.lookup("s3-snapshot-store.s3-client-dispatcher") val extensionName = settings.extension override def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = { snapshotMetadatas(persistenceId, criteria) .map(_.sorted.takeRight(settings.maxLoadAttempts)) .flatMap(load) } private def load(metadata: immutable.Seq[SnapshotMetadata]): Future[Option[SelectedSnapshot]] = metadata.lastOption match { case None => Future.successful(None) case Some(md) => s3Client.getObject(settings.bucketName, snapshotKey(md))(s3Dispatcher) .map { obj => val snapshot = deserialize(obj.getObjectContent) Some(SelectedSnapshot(md, snapshot.data)) } recoverWith { case NonFatal(e) => log.error(e, s"Error loading snapshot [${md}]") load(metadata.init) // try older snapshot } } override def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = { val serialized = serialize(Snapshot(snapshot)) val objectMetadata = new ObjectMetadata() objectMetadata.setContentLength(serialized.size) s3Client.putObject( settings.bucketName, snapshotKey(metadata), serialized.stream, objectMetadata )(s3Dispatcher).map(_ => ()) } override def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = { if (metadata.timestamp == 0L) deleteAsync(metadata.persistenceId, SnapshotSelectionCriteria(metadata.sequenceNr, Long.MaxValue, metadata.sequenceNr, Long.MinValue)) else s3Client.deleteObject(settings.bucketName, snapshotKey(metadata))(s3Dispatcher) } override def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = { val metadatas = snapshotMetadatas(persistenceId, criteria) metadatas.map(list => Future.sequence(list.map(deleteAsync))) } private def snapshotMetadatas(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[List[SnapshotMetadata]] = { s3Client.listObjects( new ListObjectsRequest() .withBucketName(settings.bucketName) .withPrefix(prefixFromPersistenceId(persistenceId)) .withDelimiter("/") )(s3Dispatcher) .map(_.getObjectSummaries.toList.map(s => parseKeyToMetadata(s.getKey)) .filter(m => m.sequenceNr >= criteria.minSequenceNr && m.sequenceNr <= criteria.maxSequenceNr && m.timestamp >= criteria.minTimestamp && m.timestamp <= criteria.maxTimestamp)) } protected def deserialize(inputStream: S3ObjectInputStream): Snapshot = serializationExtension.deserialize(akka.persistence.serialization.streamToBytes(inputStream), classOf[Snapshot]).get protected def serialize(snapshot: Snapshot): SerializationResult = { val serialized = serializationExtension.findSerializerFor(snapshot).toBinary(snapshot) SerializationResult(new ByteArrayInputStream(serializationExtension.findSerializerFor(snapshot).toBinary(snapshot)), serialized.size) } }
Example 17
Source File: SerializationContext.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.serializer import akka.actor._ import akka.serialization.Serialization import akka.serialization.SerializationExtension import com.typesafe.config.Config import scala.collection.immutable.Seq import scala.concurrent.Await import scala.concurrent.duration._ object SerializationContext { class SenderActor(receiver: ActorSelection) extends Actor { def receive = { case msg => receiver ! msg } } class ReceiverActor(probe: ActorRef) extends Actor { def receive = { case msg => probe ! msg } } } class SerializationContext(configs: Config*) { val systems: Seq[ActorSystem] = configs.toList.zipWithIndex.map { case (config, idx) => ActorSystem(s"test-system-${idx + 1}", config) } val serializations: Seq[Serialization] = systems.map(SerializationExtension(_)) val ports: Seq[Int] = systems.map(port) def port(system: ActorSystem): Int = system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress.port.get def shutdown(): Unit = systems.foreach(system => Await.result(system.terminate(), 10.seconds)) }
Example 18
Source File: PayloadSerializer.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.serializer import akka.actor.ExtendedActorSystem import akka.serialization.SerializationExtension import akka.serialization.SerializerWithStringManifest import com.google.protobuf.ByteString import com.rbmhtechnology.eventuate.BinaryPayload import com.rbmhtechnology.eventuate.serializer.CommonFormats.PayloadFormat class BinaryPayloadSerializer(system: ExtendedActorSystem) extends PayloadSerializer { override def payloadFormatBuilder(payload: AnyRef): PayloadFormat.Builder = { val binaryPayload = payload.asInstanceOf[BinaryPayload] val builder = PayloadFormat.newBuilder() .setPayload(binaryPayload.bytes) .setSerializerId(binaryPayload.serializerId) .setIsStringManifest(binaryPayload.isStringManifest) binaryPayload.manifest.foreach(builder.setPayloadManifest) builder } override def payload(payloadFormat: PayloadFormat): AnyRef = { BinaryPayload( payloadFormat.getPayload, payloadFormat.getSerializerId, if (payloadFormat.hasPayloadManifest) Some(payloadFormat.getPayloadManifest) else None, payloadFormat.getIsStringManifest) } }
Example 19
Source File: SparkBatchAdapter.scala From eventuate with Apache License 2.0 | 5 votes |
package com.rbmhtechnology.eventuate.adapter.spark import akka.actor.ActorSystem import akka.serialization.SerializationExtension import com.datastax.spark.connector._ import com.datastax.spark.connector.types._ import com.rbmhtechnology.eventuate.DurableEvent import com.rbmhtechnology.eventuate.log.cassandra.CassandraEventLogSettings import com.typesafe.config._ import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD def eventBatch(logId: String, fromSequenceNr: Long = 1L): RDD[DurableEvent] = { context.cassandraTable(cassandraSettings.keyspace, s"${cassandraSettings.tablePrefix}_$logId") .select("event").where(s"sequence_nr >= $fromSequenceNr").as((event: DurableEvent) => event) } } private class DurableEventConverter(config: Config) extends TypeConverter[DurableEvent] { import scala.reflect.runtime.universe._ val converter = implicitly[TypeConverter[Array[Byte]]] // -------------------------------------- // FIXME: how to shutdown actor system? // -------------------------------------- @transient lazy val system = ActorSystem("TypeConverter", config) @transient lazy val serial = SerializationExtension(system) def targetTypeTag = implicitly[TypeTag[DurableEvent]] def convertPF = { case obj => deserialize(converter.convert(obj)) } def deserialize(bytes: Array[Byte]): DurableEvent = serial.deserialize(bytes, classOf[DurableEvent]).get }
Example 20
Source File: CouchbaseSnapshotStore.scala From akka-persistence-couchbase with Apache License 2.0 | 5 votes |
package akka.persistence.couchbase.snapshot import java.util.concurrent.TimeUnit import akka.actor.ActorLogging import akka.persistence.couchbase.{CouchbaseExtension, Message} import akka.persistence.serialization.Snapshot import akka.persistence.snapshot.SnapshotStore import akka.persistence.{SelectedSnapshot, SnapshotMetadata, SnapshotSelectionCriteria} import akka.serialization.SerializationExtension import com.couchbase.client.java.view.ViewRow import scala.collection.JavaConverters._ import scala.concurrent.Future import scala.util.Try class CouchbaseSnapshotStore extends SnapshotStore with CouchbaseStatements with ActorLogging { implicit val executionContext = context.dispatcher val couchbase = CouchbaseExtension(context.system) val serialization = SerializationExtension(context.system) def config = couchbase.snapshotStoreConfig def bucket = couchbase.snapshotStoreBucket override def saveAsync(metadata: SnapshotMetadata, data: Any): Future[Unit] = { Future.fromTry[Unit]( Try { val snapshot = Snapshot(data) val message = Message(serialization.findSerializerFor(snapshot).toBinary(snapshot)) SnapshotMessage.create(metadata, message) } flatMap executeSave ) } override def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = { Future.fromTry[Unit]( Try { bucket.remove(SnapshotMessageKey.fromMetadata(metadata).value) } ) } override def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = { Future.fromTry[Unit]( Try { query(persistenceId, criteria, Integer.MAX_VALUE).foreach { snapshotMessage => bucket.remove(SnapshotMessageKey.fromMetadata(snapshotMessage.metadata).value) } } ) } }
Example 21
Source File: DynamoDBSnapshotStore.scala From akka-persistence-dynamodb with Apache License 2.0 | 5 votes |
package com.github.j5ik2o.akka.persistence.dynamodb.snapshot import akka.actor.ExtendedActorSystem import akka.persistence.snapshot.SnapshotStore import akka.persistence.{ SelectedSnapshot, SnapshotMetadata, SnapshotSelectionCriteria } import akka.serialization.SerializationExtension import akka.stream.ActorMaterializer import akka.stream.scaladsl.{ Sink, Source } import com.github.j5ik2o.akka.persistence.dynamodb.config.SnapshotPluginConfig import com.github.j5ik2o.akka.persistence.dynamodb.model.{ PersistenceId, SequenceNumber } import com.github.j5ik2o.akka.persistence.dynamodb.snapshot.dao.{ SnapshotDao, SnapshotDaoImpl } import com.github.j5ik2o.akka.persistence.dynamodb.utils.V2DynamoDbClientBuilderUtils import com.github.j5ik2o.reactive.aws.dynamodb.DynamoDbAsyncClient import com.typesafe.config.Config import software.amazon.awssdk.services.dynamodb.{ DynamoDbAsyncClient => JavaDynamoDbAsyncClient } import scala.concurrent.{ ExecutionContext, Future } object DynamoDBSnapshotStore { def toSelectedSnapshot(tupled: (SnapshotMetadata, Any)): SelectedSnapshot = tupled match { case (meta: SnapshotMetadata, snapshot: Any) => SelectedSnapshot(meta, snapshot) } } class DynamoDBSnapshotStore(config: Config) extends SnapshotStore { import DynamoDBSnapshotStore._ implicit val ec: ExecutionContext = context.dispatcher implicit val system: ExtendedActorSystem = context.system.asInstanceOf[ExtendedActorSystem] implicit val mat = ActorMaterializer() private val serialization = SerializationExtension(system) protected val pluginConfig: SnapshotPluginConfig = SnapshotPluginConfig.fromConfig(config) protected val javaClient: JavaDynamoDbAsyncClient = V2DynamoDbClientBuilderUtils.setupAsync(system.dynamicAccess, pluginConfig).build() protected val asyncClient: DynamoDbAsyncClient = DynamoDbAsyncClient(javaClient) protected val snapshotDao: SnapshotDao = new SnapshotDaoImpl(asyncClient, serialization, pluginConfig) override def loadAsync( persistenceId: String, criteria: SnapshotSelectionCriteria ): Future[Option[SelectedSnapshot]] = { val result = criteria match { case SnapshotSelectionCriteria(Long.MaxValue, Long.MaxValue, _, _) => snapshotDao.latestSnapshot(PersistenceId(persistenceId)) case SnapshotSelectionCriteria(Long.MaxValue, maxTimestamp, _, _) => snapshotDao.snapshotForMaxTimestamp(PersistenceId(persistenceId), maxTimestamp) case SnapshotSelectionCriteria(maxSequenceNr, Long.MaxValue, _, _) => snapshotDao.snapshotForMaxSequenceNr(PersistenceId(persistenceId), SequenceNumber(maxSequenceNr)) case SnapshotSelectionCriteria(maxSequenceNr, maxTimestamp, _, _) => snapshotDao.snapshotForMaxSequenceNrAndMaxTimestamp( PersistenceId(persistenceId), SequenceNumber(maxSequenceNr), maxTimestamp ) case _ => Source.empty } result.map(_.map(toSelectedSnapshot)).runWith(Sink.head) } override def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = snapshotDao.save(metadata, snapshot).runWith(Sink.ignore).map(_ => ()) override def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = snapshotDao .delete(PersistenceId(metadata.persistenceId), SequenceNumber(metadata.sequenceNr)).map(_ => ()).runWith( Sink.ignore ).map(_ => ()) override def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = { val pid = PersistenceId(persistenceId) criteria match { case SnapshotSelectionCriteria(Long.MaxValue, Long.MaxValue, _, _) => snapshotDao.deleteAllSnapshots(pid).runWith(Sink.ignore).map(_ => ()) case SnapshotSelectionCriteria(Long.MaxValue, maxTimestamp, _, _) => snapshotDao.deleteUpToMaxTimestamp(pid, maxTimestamp).runWith(Sink.ignore).map(_ => ()) case SnapshotSelectionCriteria(maxSequenceNr, Long.MaxValue, _, _) => snapshotDao .deleteUpToMaxSequenceNr(pid, SequenceNumber(maxSequenceNr)).runWith(Sink.ignore).map(_ => ()) case SnapshotSelectionCriteria(maxSequenceNr, maxTimestamp, _, _) => snapshotDao .deleteUpToMaxSequenceNrAndMaxTimestamp(pid, SequenceNumber(maxSequenceNr), maxTimestamp).runWith( Sink.ignore ).map(_ => ()) case _ => Future.successful(()) } } }
Example 22
Source File: JacksonJsonSerializerTest.scala From akka-tools with MIT License | 5 votes |
package no.nextgentel.oss.akkatools.serializing import akka.actor.ActorSystem import akka.serialization.SerializationExtension import com.fasterxml.jackson.annotation.JsonTypeInfo import com.fasterxml.jackson.databind.{SerializationFeature, ObjectMapper} import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.typesafe.config.{ConfigFactory, Config} import org.scalatest.{Matchers, FunSuite} class JacksonJsonSerializerTest extends FunSuite with Matchers { val objectMapper = new ObjectMapper() objectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) objectMapper.registerModule(new DefaultScalaModule) test("serializer") { JacksonJsonSerializer.setObjectMapper(objectMapper) val serializer = new JacksonJsonSerializer() val a = Animal("our cat", 12, Cat("black", true)) val bytes = serializer.toBinary(a) val ar = serializer.fromBinary(bytes, classOf[Animal]).asInstanceOf[Animal] assert( a == ar) } test("Registering the serializer works") { JacksonJsonSerializer.setObjectMapper(objectMapper) val system = ActorSystem("JacksonJsonSerializerTest", ConfigFactory.load("akka-tools-json-serializing.conf")) val serialization = SerializationExtension.get(system) assert( classOf[JacksonJsonSerializer] == serialization.serializerFor(classOf[Animal]).getClass) system.terminate() } test("DepricatedTypeWithMigrationInfo") { JacksonJsonSerializer.setObjectMapper(objectMapper) val serializer = new JacksonJsonSerializer() val bytes = serializer.toBinary(OldType("12")) assert(NewType(12) == serializer.fromBinary(bytes, classOf[OldType])) } test("verifySerialization - no error") { JacksonJsonSerializer.setObjectMapper(objectMapper) JacksonJsonSerializer.setVerifySerialization(true) val serializer = new JacksonJsonSerializer() val a = Animal("our cat", 12, Cat("black", true)) val ow = ObjectWrapperWithTypeInfo(a) serializer.toBinary(ow) } test("verifySerialization - with error") { JacksonJsonSerializer.setObjectMapper(objectMapper) JacksonJsonSerializer.setVerifySerialization(true) val serializer = new JacksonJsonSerializer() val a = Animal("our cat", 12, Cat("black", true)) val ow = ObjectWrapperWithoutTypeInfo(a) intercept[JacksonJsonSerializerVerificationFailed] { serializer.toBinary(ow) } } test("verifySerialization - disabled") { JacksonJsonSerializer.setObjectMapper(objectMapper) JacksonJsonSerializer.setVerifySerialization(true) val serializer = new JacksonJsonSerializer() val a = Animal("our cat", 12, Cat("black", true)) val ow = ObjectWrapperWithoutTypeInfoOverrided(a) serializer.toBinary(ow) } } case class Animal(name:String, age:Int, t:Cat) extends JacksonJsonSerializable case class Cat(color:String, tail:Boolean) case class OldType(s:String) extends DepricatedTypeWithMigrationInfo { override def convertToMigratedType(): AnyRef = NewType(s.toInt) } case class NewType(i:Int) case class ObjectWrapperWithTypeInfo(@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "@any_class") any:AnyRef) case class ObjectWrapperWithoutTypeInfo(any:AnyRef) case class ObjectWrapperWithoutTypeInfoOverrided(any:AnyRef) extends JacksonJsonSerializableButNotDeserializable