akka.serialization.Serialization Scala Examples

The following examples show how to use akka.serialization.Serialization. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ByteArrayJournalSerializer.scala    From akka-persistence-dynamodb   with Apache License 2.0 5 votes vote down vote up
package com.github.j5ik2o.akka.persistence.dynamodb.serialization

import akka.persistence.PersistentRepr
import akka.serialization.Serialization
import com.github.j5ik2o.akka.persistence.dynamodb.journal.JournalRow
import com.github.j5ik2o.akka.persistence.dynamodb.model.{ PersistenceId, SequenceNumber }

import scala.util.{ Failure, Success }

class ByteArrayJournalSerializer(serialization: Serialization, separator: String)
    extends FlowPersistentReprSerializer[JournalRow] {

  override def serialize(
      persistentRepr: PersistentRepr,
      tags: Set[String],
      index: Option[Int]
  ): Either[Throwable, JournalRow] = {
    serialization
      .serialize(persistentRepr)
      .map(
        JournalRow(
          PersistenceId(persistentRepr.persistenceId),
          SequenceNumber(persistentRepr.sequenceNr),
          persistentRepr.deleted,
          _,
          System.currentTimeMillis(),
          encodeTags(tags, separator)
        )
      ) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }

  override def deserialize(journalRow: JournalRow): Either[Throwable, (PersistentRepr, Set[String], Long)] = {
    serialization
      .deserialize(journalRow.message, classOf[PersistentRepr])
      .map((_, decodeTags(journalRow.tags, separator), journalRow.ordering)) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }
} 
Example 2
Source File: ByteArraySnapshotSerializer.scala    From akka-persistence-dynamodb   with Apache License 2.0 5 votes vote down vote up
package com.github.j5ik2o.akka.persistence.dynamodb.serialization

import akka.persistence.SnapshotMetadata
import akka.persistence.serialization.Snapshot
import akka.serialization.Serialization
import com.github.j5ik2o.akka.persistence.dynamodb.model.{ PersistenceId, SequenceNumber }
import com.github.j5ik2o.akka.persistence.dynamodb.snapshot.dao.SnapshotRow

import scala.util.{ Failure, Success }

class ByteArraySnapshotSerializer(serialization: Serialization) extends SnapshotSerializer[SnapshotRow] {

  override def serialize(
      metadata: SnapshotMetadata,
      snapshot: Any
  ): Either[Throwable, SnapshotRow] = {
    serialization
      .serialize(Snapshot(snapshot))
      .map(
        SnapshotRow(PersistenceId(metadata.persistenceId), SequenceNumber(metadata.sequenceNr), metadata.timestamp, _)
      ) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }

  override def deserialize(snapshotRow: SnapshotRow): Either[Throwable, (SnapshotMetadata, Any)] = {
    serialization
      .deserialize(snapshotRow.snapshot, classOf[Snapshot])
      .map(snapshot => {
        val snapshotMetadata =
          SnapshotMetadata(snapshotRow.persistenceId.asString, snapshotRow.sequenceNumber.value, snapshotRow.created)
        (snapshotMetadata, snapshot.data)
      }) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }

} 
Example 3
Source File: ByteArraySnapshotSerializer.scala    From akka-persistence-dynamodb   with Apache License 2.0 5 votes vote down vote up
package com.github.j5ik2o.akka.persistence.dynamodb.snapshot.dao

import akka.persistence.SnapshotMetadata
import akka.persistence.serialization.Snapshot
import akka.serialization.Serialization
import com.github.j5ik2o.akka.persistence.dynamodb.model.{ PersistenceId, SequenceNumber }

import scala.util.{ Failure, Success }

trait SnapshotSerializer[T] {
  def serialize(metadata: SnapshotMetadata, snapshot: Any): Either[Throwable, T]

  def deserialize(t: T): Either[Throwable, (SnapshotMetadata, Any)]
}

class ByteArraySnapshotSerializer(serialization: Serialization) extends SnapshotSerializer[SnapshotRow] {

  override def serialize(
      metadata: SnapshotMetadata,
      snapshot: Any
  ): Either[Throwable, SnapshotRow] = {
    serialization
      .serialize(Snapshot(snapshot))
      .map(
        SnapshotRow(PersistenceId(metadata.persistenceId), SequenceNumber(metadata.sequenceNr), metadata.timestamp, _)
      ) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }

  override def deserialize(snapshotRow: SnapshotRow): Either[Throwable, (SnapshotMetadata, Any)] = {
    serialization
      .deserialize(snapshotRow.snapshot, classOf[Snapshot])
      .map(snapshot => {
        val snapshotMetadata =
          SnapshotMetadata(snapshotRow.persistenceId.asString, snapshotRow.sequenceNumber.value, snapshotRow.created)
        (snapshotMetadata, snapshot.data)
      }) match {
      case Success(value) => Right(value)
      case Failure(ex)    => Left(ex)
    }
  }
} 
Example 4
Source File: SerializationContext.scala    From eventuate   with Apache License 2.0 5 votes vote down vote up
package com.rbmhtechnology.eventuate.serializer

import akka.actor._
import akka.serialization.Serialization
import akka.serialization.SerializationExtension

import com.typesafe.config.Config

import scala.collection.immutable.Seq
import scala.concurrent.Await
import scala.concurrent.duration._

object SerializationContext {
  class SenderActor(receiver: ActorSelection) extends Actor {
    def receive = {
      case msg => receiver ! msg
    }
  }

  class ReceiverActor(probe: ActorRef) extends Actor {
    def receive = {
      case msg => probe ! msg
    }
  }
}

class SerializationContext(configs: Config*) {
  val systems: Seq[ActorSystem] = configs.toList.zipWithIndex.map {
    case (config, idx) => ActorSystem(s"test-system-${idx + 1}", config)
  }

  val serializations: Seq[Serialization] =
    systems.map(SerializationExtension(_))

  val ports: Seq[Int] =
    systems.map(port)

  def port(system: ActorSystem): Int =
    system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress.port.get

  def shutdown(): Unit =
    systems.foreach(system => Await.result(system.terminate(), 10.seconds))
} 
Example 5
Source File: EventProducerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import java.util.Date

import akka.Done
import akka.actor.ActorSystem
import akka.serialization.Serialization
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.omearac.producers.EventProducer
import com.omearac.shared.AkkaStreams
import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished}
import com.omearac.shared.KafkaMessages.ExampleAppEvent
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Future


class EventProducerSpec extends TestKit(ActorSystem("EventProducerSpec",ConfigFactory.parseString("""
    akka.loggers = ["akka.testkit.TestEventListener"] """)))
    with DefaultTimeout with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll
    with AkkaStreams {

    val testProducer = TestActorRef(new EventProducer)
    val producerActor = testProducer.underlyingActor
    val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
        override def complete(): Unit = println("complete")

        override def fail(ex: Throwable): Unit = println("fail")

        override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued}

        override def watchCompletion(): Future[Done] = Future{Done}
    }

    override def afterAll: Unit = {
        shutdown()
    }

    //Create an test event listener for the local message bus
    val testEventListener = TestProbe()
    system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent])


    "Sending ActivatedProducerStream to EventProducer in receive state" should {
        "save the stream ref and change state to producing " in {
            testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic")
            Thread.sleep(500)
            producerActor.producerStream should be(mockProducerStream)
            EventFilter.error(message = "EventProducer got the unknown message while producing: testMessage", occurrences = 1) intercept {
                testProducer ! "testMessage"
            }
        }
    }

    "Sending ExampleAppEvent to system bus while EventProducer is in publishEvent state" should {
        "offer the ExampleAppEvent to the stream " in {
            val producingState = producerActor.publishEvent
            producerActor.context.become(producingState)
            producerActor.producerStream = mockProducerStream
            val dateFormat = new java.text.SimpleDateFormat("dd:MM:yy:HH:mm:ss.SSS")
            lazy val timetag = dateFormat.format(new Date(System.currentTimeMillis()))
            val eventMsg = MessagesPublished(5)
            val testMessage = ExampleAppEvent(timetag,Serialization.serializedActorPath(self),eventMsg.toString)
            system.eventStream.publish(testMessage)
            testEventListener.expectMsgPF(){
                case ExampleAppEvent(_,_,m) => if (m == eventMsg.toString) () else fail()
            }
        }
    }
 } 
Example 6
Source File: FileSystemPersistenceEngine.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.io._

import scala.reflect.ClassTag

import akka.serialization.Serialization

import org.apache.spark.Logging



private[spark] class FileSystemPersistenceEngine(
    val dir: String,
    val serialization: Serialization)
  extends PersistenceEngine with Logging {

  new File(dir).mkdir()

  override def persist(name: String, obj: Object): Unit = {
    serializeIntoFile(new File(dir + File.separator + name), obj)
  }

  override def unpersist(name: String): Unit = {
    new File(dir + File.separator + name).delete()
  }

  override def read[T: ClassTag](prefix: String) = {
    val files = new File(dir).listFiles().filter(_.getName.startsWith(prefix))
    files.map(deserializeFromFile[T])
  }

  private def serializeIntoFile(file: File, value: AnyRef) {
    val created = file.createNewFile()
    if (!created) { throw new IllegalStateException("Could not create file: " + file) }
    val serializer = serialization.findSerializerFor(value)
    val serialized = serializer.toBinary(value)
    val out = new FileOutputStream(file)
    try {
      out.write(serialized)
    } finally {
      out.close()
    }
  }

  private def deserializeFromFile[T](file: File)(implicit m: ClassTag[T]): T = {
    val fileData = new Array[Byte](file.length().asInstanceOf[Int])
    val dis = new DataInputStream(new FileInputStream(file))
    try {
      dis.readFully(fileData)
    } finally {
      dis.close()
    }
    val clazz = m.runtimeClass.asInstanceOf[Class[T]]
    val serializer = serialization.serializerFor(clazz)
    serializer.fromBinary(fileData).asInstanceOf[T]
  }

} 
Example 7
Source File: ZooKeeperPersistenceEngine.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import akka.serialization.Serialization

import scala.collection.JavaConversions._
import scala.reflect.ClassTag

import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.CreateMode

import org.apache.spark.{Logging, SparkConf}


private[spark] class ZooKeeperPersistenceEngine(conf: SparkConf, val serialization: Serialization)
  extends PersistenceEngine
  with Logging
{
  val WORKING_DIR = conf.get("spark.deploy.zookeeper.dir", "/spark") + "/master_status"
  val zk: CuratorFramework = SparkCuratorUtil.newClient(conf)

  SparkCuratorUtil.mkdir(zk, WORKING_DIR)


  override def persist(name: String, obj: Object): Unit = {
    serializeIntoFile(WORKING_DIR + "/" + name, obj)
  }

  override def unpersist(name: String): Unit = {
    zk.delete().forPath(WORKING_DIR + "/" + name)
  }

  override def read[T: ClassTag](prefix: String) = {
    val file = zk.getChildren.forPath(WORKING_DIR).filter(_.startsWith(prefix))
    file.map(deserializeFromFile[T]).flatten
  }

  override def close() {
    zk.close()
  }

  private def serializeIntoFile(path: String, value: AnyRef) {
    val serializer = serialization.findSerializerFor(value)
    val serialized = serializer.toBinary(value)
    zk.create().withMode(CreateMode.PERSISTENT).forPath(path, serialized)
  }

  def deserializeFromFile[T](filename: String)(implicit m: ClassTag[T]): Option[T] = {
    val fileData = zk.getData().forPath(WORKING_DIR + "/" + filename)
    val clazz = m.runtimeClass.asInstanceOf[Class[T]]
    val serializer = serialization.serializerFor(clazz)
    try {
      Some(serializer.fromBinary(fileData).asInstanceOf[T])
    } catch {
      case e: Exception => {
        logWarning("Exception while reading persisted file, deleting", e)
        zk.delete().forPath(WORKING_DIR + "/" + filename)
        None
      }
    }
  }
} 
Example 8
Source File: ResolverCacheSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import ch.epfl.bluebrain.nexus.service.config.{ServiceConfig, Settings}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  implicit private val appConfig: ServiceConfig = Settings(system).serviceConfig
  implicit private val keyValueStoreCfg         = appConfig.kg.keyValueStore.keyValueStoreConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver       = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver   =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
} 
Example 9
Source File: JWKSetSerializerSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import akka.actor.{ActorSystem, ExtendedActorSystem}
import akka.serialization.Serialization
import akka.testkit.TestKit
import com.nimbusds.jose.jwk.JWKSet
import com.typesafe.config.ConfigFactory
import org.scalatest.TryValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class JWKSetSerializerSpec
    extends TestKit(ActorSystem("JWKSetSerializerSpec", ConfigFactory.load("test.conf")))
    with AnyWordSpecLike
    with Matchers
    with TryValues {

  private val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])

  private val json =
    """
      |{
      |  "keys": [
      |    {
      |      "kid": "-JoF9COvvt7UhyhJMC-YlTF6piRlZgQKRQks5sPMKxw",
      |      "kty": "RSA",
      |      "alg": "RS256",
      |      "use": "sig",
      |      "n": "iEk11wBlv0I4pawBSY6ZYCLvwVslfCvjwvg5tIAg9n",
      |      "e": "AQAB"
      |    }
      |  ]
      |}
    """.stripMargin

  private val jwks = JWKSet.parse(json)

  "A JWKSetSerializer" should {

    "serialize and deserialize" in {
      val bytes = serialization.serialize(jwks).success.value
      val obj   = serialization.deserialize(bytes, classOf[JWKSet]).success.value
      jwks.toJSONObject shouldEqual obj.toJSONObject // JWKSet doesn't have a proper equals method
    }
  }
} 
Example 10
Source File: TestTagWriter.scala    From akka-persistence-cassandra   with Apache License 2.0 5 votes vote down vote up
package akka.persistence.cassandra.query

import java.nio.ByteBuffer
import java.time.{ LocalDateTime, ZoneOffset }
import java.util.UUID

import akka.actor.ActorSystem
import akka.persistence.PersistentRepr
import akka.persistence.cassandra.BucketSize
import akka.persistence.cassandra.EventsByTagSettings
import akka.persistence.cassandra.PluginSettings
import akka.persistence.cassandra.formatOffset
import akka.persistence.cassandra.journal._
import akka.serialization.Serialization
import akka.serialization.Serializers
import com.datastax.oss.driver.api.core.CqlSession
import com.datastax.oss.driver.api.core.uuid.Uuids

private[akka] trait TestTagWriter {
  def system: ActorSystem
  def cluster: CqlSession
  val serialization: Serialization
  val settings: PluginSettings
  final def journalSettings: JournalSettings = settings.journalSettings
  final def eventsByTagSettings: EventsByTagSettings = settings.eventsByTagSettings

  lazy val (preparedWriteTagMessage, preparedWriteTagMessageWithMeta) = {
    val writeStatements: CassandraJournalStatements = new CassandraJournalStatements(settings)
    (cluster.prepare(writeStatements.writeTags(false)), cluster.prepare(writeStatements.writeTags(true)))
  }

  def clearAllEvents(): Unit = {
    cluster.execute(s"truncate ${journalSettings.keyspace}.${eventsByTagSettings.tagTable.name}")
  }

  def writeTaggedEvent(
      time: LocalDateTime,
      pr: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      bucketSize: BucketSize): Unit = {
    val timestamp = time.toInstant(ZoneOffset.UTC).toEpochMilli
    write(pr, tags, tagPidSequenceNr, uuid(timestamp), bucketSize)
  }

  def writeTaggedEvent(
      persistent: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      bucketSize: BucketSize): Unit = {
    val nowUuid = Uuids.timeBased()
    write(persistent, tags, tagPidSequenceNr, nowUuid, bucketSize)
  }

  def writeTaggedEvent(
      persistent: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      uuid: UUID,
      bucketSize: BucketSize): Unit =
    write(persistent, tags, tagPidSequenceNr, uuid, bucketSize)

  private def write(
      pr: PersistentRepr,
      tags: Set[String],
      tagPidSequenceNr: Long,
      uuid: UUID,
      bucketSize: BucketSize): Unit = {
    val event = pr.payload.asInstanceOf[AnyRef]
    val serializer = serialization.findSerializerFor(event)
    val serialized = ByteBuffer.wrap(serialization.serialize(event).get)
    val serManifest = Serializers.manifestFor(serializer, pr)
    val timeBucket = TimeBucket(Uuids.unixTimestamp(uuid), bucketSize)

    tags.foreach(tag => {
      val bs = preparedWriteTagMessage
        .bind()
        .setString("tag_name", tag)
        .setLong("timebucket", timeBucket.key)
        .setUuid("timestamp", uuid)
        .setLong("tag_pid_sequence_nr", tagPidSequenceNr)
        .setByteBuffer("event", serialized)
        .setString("event_manifest", pr.manifest)
        .setString("persistence_id", pr.persistenceId)
        .setInt("ser_id", serializer.identifier)
        .setString("ser_manifest", serManifest)
        .setString("writer_uuid", "ManualWrite")
        .setLong("sequence_nr", pr.sequenceNr)
      cluster.execute(bs)
    })

    system.log.debug(
      "Written event: {} Uuid: {} Timebucket: {} TagPidSeqNr: {}",
      pr.payload,
      formatOffset(uuid),
      timeBucket,
      tagPidSequenceNr)
  }
} 
Example 11
Source File: FileSystemPersistenceEngine.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.io._

import scala.reflect.ClassTag

import akka.serialization.Serialization

import org.apache.spark.Logging
import org.apache.spark.util.Utils



private[master] class FileSystemPersistenceEngine(
    val dir: String,
    val serialization: Serialization)
  extends PersistenceEngine with Logging {

  new File(dir).mkdir()

  override def persist(name: String, obj: Object): Unit = {
    serializeIntoFile(new File(dir + File.separator + name), obj)
  }

  override def unpersist(name: String): Unit = {
    new File(dir + File.separator + name).delete()
  }

  override def read[T: ClassTag](prefix: String): Seq[T] = {
    val files = new File(dir).listFiles().filter(_.getName.startsWith(prefix))
    files.map(deserializeFromFile[T])
  }

  private def serializeIntoFile(file: File, value: AnyRef) {
    val created = file.createNewFile()
    if (!created) { throw new IllegalStateException("Could not create file: " + file) }
    val serializer = serialization.findSerializerFor(value)
    val serialized = serializer.toBinary(value)
    val out = new FileOutputStream(file)
    Utils.tryWithSafeFinally {
      out.write(serialized)
    } {
      out.close()
    }
  }

  private def deserializeFromFile[T](file: File)(implicit m: ClassTag[T]): T = {
    val fileData = new Array[Byte](file.length().asInstanceOf[Int])
    val dis = new DataInputStream(new FileInputStream(file))
    try {
      dis.readFully(fileData)
    } finally {
      dis.close()
    }
    val clazz = m.runtimeClass.asInstanceOf[Class[T]]
    val serializer = serialization.serializerFor(clazz)
    serializer.fromBinary(fileData).asInstanceOf[T]
  }

} 
Example 12
Source File: ZooKeeperPersistenceEngine.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import akka.serialization.Serialization

import scala.collection.JavaConversions._
import scala.reflect.ClassTag

import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.CreateMode

import org.apache.spark.{Logging, SparkConf}
import org.apache.spark.deploy.SparkCuratorUtil


private[master] class ZooKeeperPersistenceEngine(conf: SparkConf, val serialization: Serialization)
  extends PersistenceEngine
  with Logging {

  private val WORKING_DIR = conf.get("spark.deploy.zookeeper.dir", "/spark") + "/master_status"
  private val zk: CuratorFramework = SparkCuratorUtil.newClient(conf)

  SparkCuratorUtil.mkdir(zk, WORKING_DIR)


  override def persist(name: String, obj: Object): Unit = {
    serializeIntoFile(WORKING_DIR + "/" + name, obj)
  }

  override def unpersist(name: String): Unit = {
    zk.delete().forPath(WORKING_DIR + "/" + name)
  }

  override def read[T: ClassTag](prefix: String): Seq[T] = {
    val file = zk.getChildren.forPath(WORKING_DIR).filter(_.startsWith(prefix))
    file.map(deserializeFromFile[T]).flatten
  }

  override def close() {
    zk.close()
  }

  private def serializeIntoFile(path: String, value: AnyRef) {
    val serializer = serialization.findSerializerFor(value)
    val serialized = serializer.toBinary(value)
    zk.create().withMode(CreateMode.PERSISTENT).forPath(path, serialized)
  }

  private def deserializeFromFile[T](filename: String)(implicit m: ClassTag[T]): Option[T] = {
    val fileData = zk.getData().forPath(WORKING_DIR + "/" + filename)
    val clazz = m.runtimeClass.asInstanceOf[Class[T]]
    val serializer = serialization.serializerFor(clazz)
    try {
      Some(serializer.fromBinary(fileData).asInstanceOf[T])
    } catch {
      case e: Exception => {
        logWarning("Exception while reading persisted file, deleting", e)
        zk.delete().forPath(WORKING_DIR + "/" + filename)
        None
      }
    }
  }
} 
Example 13
Source File: CustomRecoveryModeFactory.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
// This file is placed in different package to make sure all of these components work well
// when they are outside of org.apache.spark.
package other.supplier

import scala.collection.mutable
import scala.reflect.ClassTag

import akka.serialization.Serialization

import org.apache.spark.SparkConf
import org.apache.spark.deploy.master._

class CustomRecoveryModeFactory(
  conf: SparkConf,
  serialization: Serialization
) extends StandaloneRecoveryModeFactory(conf, serialization) {

  CustomRecoveryModeFactory.instantiationAttempts += 1

  
  override def read[T: ClassTag](prefix: String): Seq[T] = {
    CustomPersistenceEngine.readAttempts += 1
    val clazz = implicitly[ClassTag[T]].runtimeClass.asInstanceOf[Class[T]]
    val results = for ((name, bytes) <- data; if name.startsWith(prefix))
      yield serialization.deserialize(bytes, clazz)

    results.find(_.isFailure).foreach {
      case util.Failure(cause) => throw new RuntimeException(cause)
    }

    results.flatMap(_.toOption).toSeq
  }
}

object CustomPersistenceEngine {
  @volatile var persistAttempts = 0
  @volatile var unpersistAttempts = 0
  @volatile var readAttempts = 0

  @volatile var lastInstance: Option[CustomPersistenceEngine] = None
}

class CustomLeaderElectionAgent(val masterActor: LeaderElectable) extends LeaderElectionAgent {
  masterActor.electedLeader()
} 
Example 14
Source File: JWKSetSerializerSpec.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.io

import akka.actor.{ActorSystem, ExtendedActorSystem}
import akka.serialization.Serialization
import akka.testkit.TestKit
import com.nimbusds.jose.jwk.JWKSet
import com.typesafe.config.ConfigFactory
import org.scalatest.TryValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class JWKSetSerializerSpec
    extends TestKit(ActorSystem("JWKSetSerializerSpec", ConfigFactory.load("akka-test.conf")))
    with AnyWordSpecLike
    with Matchers
    with TryValues {

  private val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])

  private val json =
    """
      |{
      |  "keys": [
      |    {
      |      "kid": "-JoF9COvvt7UhyhJMC-YlTF6piRlZgQKRQks5sPMKxw",
      |      "kty": "RSA",
      |      "alg": "RS256",
      |      "use": "sig",
      |      "n": "iEk11wBlv0I4pawBSY6ZYCLvwVslfCvjwvg5tIAg9n",
      |      "e": "AQAB"
      |    }
      |  ]
      |}
    """.stripMargin

  private val jwks = JWKSet.parse(json)

  "A JWKSetSerializer" should {

    "serialize and deserialize" in {
      val bytes = serialization.serialize(jwks).success.value
      val obj   = serialization.deserialize(bytes, classOf[JWKSet]).success.value
      jwks.toJSONObject shouldEqual obj.toJSONObject // JWKSet doesn't have a proper equals method
    }
  }
} 
Example 15
Source File: ResolverCacheSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.cache

import akka.actor.ExtendedActorSystem
import akka.serialization.Serialization
import akka.testkit._
import ch.epfl.bluebrain.nexus.commons.test.ActorSystemFixture
import ch.epfl.bluebrain.nexus.iam.client.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.kg.resolve.Resolver._
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.{ProjectLabel, ProjectRef}
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.matchers.should.Matchers
import org.scalatest.{Inspectors, TryValues}

import scala.concurrent.duration._

//noinspection NameBooleanParameters
class ResolverCacheSpec
    extends ActorSystemFixture("ResolverCacheSpec", true)
    with Matchers
    with Inspectors
    with ScalaFutures
    with TryValues
    with TestHelper {

  override implicit def patienceConfig: PatienceConfig = PatienceConfig(3.seconds.dilated, 5.milliseconds)

  private implicit val appConfig: AppConfig = Settings(system).appConfig

  val ref1 = ProjectRef(genUUID)
  val ref2 = ProjectRef(genUUID)

  val label1 = ProjectLabel(genString(), genString())
  val label2 = ProjectLabel(genString(), genString())

  val resolver: InProjectResolver = InProjectResolver(ref1, genIri, 1L, false, 10)
  val crossRefs: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(ref1, ref2), Set(Anonymous), ref1, genIri, 0L, false, 1)
  val crossLabels: CrossProjectResolver =
    CrossProjectResolver(Set(genIri), List(label1, label2), Set(Anonymous), ref1, genIri, 0L, false, 1)

  val resolverProj1: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri)).toSet
  val resolverProj2: Set[InProjectResolver] = List.fill(5)(resolver.copy(id = genIri, ref = ref2)).toSet

  private val cache = ResolverCache[Task]

  "ResolverCache" should {

    "index resolvers" in {
      val list = (resolverProj1 ++ resolverProj2).toList
      forAll(list) { resolver =>
        cache.put(resolver).runToFuture.futureValue
        cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual Some(resolver)
      }
    }

    "list resolvers" in {
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1
      cache.get(ref2).runToFuture.futureValue should contain theSameElementsAs resolverProj2
    }

    "deprecate resolver" in {
      val resolver = resolverProj1.head
      cache.put(resolver.copy(deprecated = true, rev = 2L)).runToFuture.futureValue
      cache.get(resolver.ref, resolver.id).runToFuture.futureValue shouldEqual None
      cache.get(ref1).runToFuture.futureValue should contain theSameElementsAs resolverProj1.filterNot(_ == resolver)
    }

    "serialize cross project resolver" when {
      val serialization = new Serialization(system.asInstanceOf[ExtendedActorSystem])
      "parameterized with ProjectRef" in {
        val bytes = serialization.serialize(crossRefs).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossRefs
      }
      "parameterized with ProjectLabel" in {
        val bytes = serialization.serialize(crossLabels).success.value
        val out   = serialization.deserialize(bytes, classOf[CrossProjectResolver]).success.value
        out shouldEqual crossLabels
      }
    }
  }
}