com.esotericsoftware.kryo.Serializer Scala Examples

The following examples show how to use com.esotericsoftware.kryo.Serializer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: WritableSerializer.scala    From spark-util   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.hadoop.kryo

import java.io.{ DataInputStream, DataOutputStream }

import com.esotericsoftware.kryo
import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import org.apache.hadoop.io.Writable


class WritableSerializer[T <: Writable](ctorArgs: Any*)
  extends kryo.Serializer[T] {
  override def read(kryo: Kryo, input: Input, clz: Class[T]): T = {
    val t = clz.newInstance()
    t.readFields(new DataInputStream(input))
    t
  }

  override def write(kryo: Kryo, output: Output, t: T): Unit = {
    t.write(new DataOutputStream(output))
  }
} 
Example 2
Source File: FlagsSerializer.scala    From spark-bam   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.bam.check.full.error

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }

import scala.collection.immutable.BitSet


class FlagsSerializer
  extends Serializer[Flags] {
  override def read(kryo: Kryo, input: Input, clz: Class[Flags]): Flags = {
    kryo
      .readClassAndObject(input)
      .asInstanceOf[(BitSet, Int)]
  }

  override def write(kryo: Kryo, output: Output, flags: Flags): Unit =
    kryo.writeClassAndObject(output, flags: (BitSet, Int))
} 
Example 3
Source File: JodaSerializer.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.coders.instances.kryo

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import org.joda.time.{DateTime, DateTimeZone, LocalDate, LocalDateTime, LocalTime}
import org.joda.time.chrono.ISOChronology

private[coders] class JodaLocalDateTimeSerializer extends Serializer[LocalDateTime] {
  setImmutable(true)

  def write(kryo: Kryo, output: Output, ldt: LocalDateTime): Unit = {
    output.writeInt(ldt.getYear,  false)
    val month = input.readByte().toInt
    val day = input.readByte().toInt

    new LocalDate(year, month, day)
  }
}

private[coders] class JodaDateTimeSerializer extends Serializer[DateTime] {
  setImmutable(true)

  def write(kryo: Kryo, output: Output, dt: DateTime): Unit = {
    output.writeLong(dt.getMillis)
    output.writeString(dt.getZone.getID)
  }

  def read(kryo: Kryo, input: Input, tpe: Class[DateTime]): DateTime = {
    val millis = input.readLong()
    val zone = DateTimeZone.forID(input.readString())
    new DateTime(millis, zone)
  }
} 
Example 4
Source File: ModelStateSerializerKryo.scala    From model-serving-tutorial   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.modelserving.spark

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.lightbend.model.winerecord.WineRecord
import com.lightbend.modelserving.model.ModelFactoryResolver
import org.apache.spark.serializer.KryoRegistrator


  override def write(kryo: Kryo, output: Output, value: ModelState): Unit = {
    val start = System.currentTimeMillis()
    output.writeLong(value.name.length)
    output.write(value.name.getBytes)
    output.writeLong(value.model.getType.value.toLong)
    val bytes = value.model.toBytes
    output.writeLong(bytes.length)
    output.write(bytes)
    println(s"KRYO serialization in ${System.currentTimeMillis() - start} ms")
  }
}

object ModelStateSerializerKryo{

  // Model Factory resolver
  private var resolver : ModelFactoryResolver[WineRecord, Double] = _

  // This method has to be invoked before execution starts
  def setResolver(res : ModelFactoryResolver[WineRecord, Double]) : Unit = resolver = res
  // Ensure that resolver is set
  private def validateResolver() : Unit = if(resolver == null) throw new Exception("Model factory resolver is not set")
}

class ModelStateRegistrator extends KryoRegistrator {
  override def registerClasses(kryo: Kryo) {
    kryo.register(classOf[ModelState], new ModelStateSerializerKryo())
  }
} 
Example 5
Source File: KryoSerializerInit.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.serializers

import java.nio.file.Path

import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import io.altoo.akka.serialization.kryo.DefaultKryoInitializer
import io.altoo.akka.serialization.kryo.serializer.scala.ScalaKryo

class PathSerializer extends Serializer[Path] {

  override def write(kryo: Kryo, output: Output, path: Path): Unit =
    output.writeString(path.toString)

  override def read(kryo: Kryo, input: Input, `type`: Class[Path]): Path =
    Path.of(input.readString())
}

class KryoSerializerInit extends DefaultKryoInitializer {

  override def postInit(kryo: ScalaKryo): Unit = {
    super.postInit(kryo)
    kryo.addDefaultSerializer(classOf[Path], classOf[PathSerializer])
    kryo.register(classOf[Path], new PathSerializer)
    ()
  }
} 
Example 6
Source File: SparkSqlSerializer.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution

import java.nio.ByteBuffer
import java.util.{HashMap => JavaHashMap}

import scala.reflect.ClassTag
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.twitter.chill.ResourcePool
import org.apache.spark.serializer.{KryoSerializer, SerializerInstance}
import org.apache.spark.sql.types.{Decimal, StructField, StructType}
import org.apache.spark.util.MutablePair
import org.apache.spark.{SparkConf, SparkEnv}


//private[sql]
class SparkSqlSerializer(conf: SparkConf) extends KryoSerializer(conf) {
  override def newKryo(): Kryo = {
    val kryo = super.newKryo()
    kryo.setRegistrationRequired(false)
    kryo.register(classOf[MutablePair[_, _]])
    kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericRow])
    kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericInternalRow])
    kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericMutableRow])
    kryo.register(classOf[java.math.BigDecimal], new JavaBigDecimalSerializer)
    kryo.register(classOf[BigDecimal], new ScalaBigDecimalSerializer)

    kryo.register(classOf[Decimal])
    kryo.register(classOf[JavaHashMap[_, _]])

    // APS
    kryo.register(classOf[StructType])
    kryo.register(classOf[StructField])

    kryo.setReferences(false)
    kryo
  }
}

private[execution] class KryoResourcePool(size: Int)
  extends ResourcePool[SerializerInstance](size) {

  val ser: SparkSqlSerializer = {
    val sparkConf = Option(SparkEnv.get).map(_.conf).getOrElse(new SparkConf())
    new SparkSqlSerializer(sparkConf)
  }

  def newInstance(): SerializerInstance = ser.newInstance()
}

//private[sql]
object SparkSqlSerializer {
  @transient lazy val resourcePool = new KryoResourcePool(30)

  private[this] def acquireRelease[O](fn: SerializerInstance => O): O = {
    val kryo = resourcePool.borrow
    try {
      fn(kryo)
    } finally {
      resourcePool.release(kryo)
    }
  }

  def serialize[T: ClassTag](o: T): Array[Byte] =
    acquireRelease { k =>
      k.serialize(o).array()
    }

  def deserialize[T: ClassTag](bytes: Array[Byte]): T =
    acquireRelease { k =>
      k.deserialize[T](ByteBuffer.wrap(bytes))
    }
}

private[sql] class JavaBigDecimalSerializer extends Serializer[java.math.BigDecimal] {
  def write(kryo: Kryo, output: Output, bd: java.math.BigDecimal) {
    // TODO: There are probably more efficient representations than strings...
    output.writeString(bd.toString)
  }

  def read(kryo: Kryo, input: Input, tpe: Class[java.math.BigDecimal]): java.math.BigDecimal = {
    new java.math.BigDecimal(input.readString())
  }
}

private[sql] class ScalaBigDecimalSerializer extends Serializer[BigDecimal] {
  def write(kryo: Kryo, output: Output, bd: BigDecimal) {
    // TODO: There are probably more efficient representations than strings...
    output.writeString(bd.toString)
  }

  def read(kryo: Kryo, input: Input, tpe: Class[BigDecimal]): BigDecimal = {
    new java.math.BigDecimal(input.readString())
  }
} 
Example 7
Source File: TimeSeriesKryoRegistrator.scala    From spark-timeseries   with Apache License 2.0 5 votes vote down vote up
package com.cloudera.sparkts

import com.esotericsoftware.kryo.{Serializer, Kryo}
import com.esotericsoftware.kryo.io.{Output, Input}

import org.apache.spark.SparkConf
import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
import com.cloudera.sparkts.TimeSeriesUtils._

import java.time._

class TimeSeriesKryoRegistrator extends KryoRegistrator {
  def registerClasses(kryo: Kryo): Unit = {
    kryo.register(classOf[TimeSeries[_]])
    kryo.register(classOf[UniformDateTimeIndex])
    kryo.register(classOf[IrregularDateTimeIndex])
    kryo.register(classOf[BusinessDayFrequency])
    kryo.register(classOf[DayFrequency])
    kryo.register(classOf[ZonedDateTime], new DateTimeSerializer)
  }
}

class DateTimeSerializer extends Serializer[ZonedDateTime] {
  def write(kryo: Kryo, out: Output, dt: ZonedDateTime): Unit = {
    out.writeLong(zonedDateTimeToLong(dt), true)
  }

  def read(kryo: Kryo, in: Input, clazz: Class[ZonedDateTime]): ZonedDateTime = {
    longToZonedDateTime(in.readLong(true), ZoneId.systemDefault())
  }
}

object TimeSeriesKryoRegistrator {
  def registerKryoClasses(conf: SparkConf): Unit = {
    conf.set("spark.serializer", classOf[KryoSerializer].getName)
    conf.set("spark.kryo.registrator", classOf[TimeSeriesKryoRegistrator].getName)
  }
} 
Example 8
Source File: IkeKryoRegistrator.scala    From ike   with Apache License 2.0 5 votes vote down vote up
package org.allenai.ike

import org.allenai.ike.patterns.NamedPattern

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import org.apache.spark.serializer.KryoRegistrator


class IkeKryoRegistrator extends KryoRegistrator {
  override def registerClasses(kryo: Kryo): Unit = {
    OptionSerializers.register(kryo)
    kryo.register(Class.forName("scala.collection.immutable.Nil$"))

    val classes: Array[Class[_]] = Array(
      classOf[BlackLabResult],
      classOf[Interval],
      classOf[WordData],
      classOf[java.time.Instant],
      classOf[java.time.LocalDate],
      classOf[java.time.Year]
    )

    classes.foreach(kryo.register)
  }
} 
Example 9
Source File: SerializableSerializer.scala    From spark-util   with Apache License 2.0 5 votes vote down vote up
package org.hammerlab.hadoop.kryo

import java.io.{ ObjectInputStream, ObjectOutputStream }

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }


case class SerializableSerializer[T <: Serializable]()
  extends Serializer[T] {
  override def read(kryo: Kryo, input: Input, `type`: Class[T]): T =
    new ObjectInputStream(input)
      .readObject()
      .asInstanceOf[T]

  override def write(kryo: Kryo, output: Output, t: T): Unit =
    new ObjectOutputStream(output)
      .writeObject(t)
} 
Example 10
Source File: MatfastSerializer.scala    From MatRel   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.matfast.util

import java.math.BigDecimal
import java.nio.ByteBuffer
import java.util.{HashMap => JavaHashMap}

import scala.reflect.ClassTag

import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import com.twitter.chill.ResourcePool

import org.apache.spark.{SparkConf, SparkEnv}
import org.apache.spark.serializer.{KryoSerializer, SerializerInstance}
import org.apache.spark.sql.matfast.matrix._
import org.apache.spark.sql.types.Decimal
import org.apache.spark.util.MutablePair


private[matfast] class MatfastSerializer(conf: SparkConf) extends KryoSerializer(conf) {
  override def newKryo(): Kryo = {
    val kryo = super.newKryo()
    kryo.setRegistrationRequired(false)
    kryo.register(classOf[MutablePair[_, _]])
    kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericRow])
    kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.GenericInternalRow])
    kryo.register(classOf[java.math.BigDecimal], new JavaBigDecimalSerializer)
    kryo.register(classOf[BigDecimal], new ScalaBigDecimalSerializer)

    kryo.register(classOf[Decimal])
    kryo.register(classOf[JavaHashMap[_, _]])
    kryo.register(classOf[DenseMatrix])
    kryo.register(classOf[SparseMatrix])

    kryo.setReferences(false)
    kryo
  }
}

private[matfast] class KryoResourcePool(size: Int) extends ResourcePool[SerializerInstance](size) {
  val ser: MatfastSerializer = {
    val sparkConf = Option(SparkEnv.get).map(_.conf).getOrElse(new SparkConf())
    new MatfastSerializer(sparkConf)
  }

  def newInstance(): SerializerInstance = ser.newInstance()
}

private[matfast] object MatfastSerializer {
  @transient lazy val resourcePool = new KryoResourcePool(50)

  private[this] def acquireRelease[O](fn: SerializerInstance => O): O = {
    val kryo = resourcePool.borrow()
    try {
      fn(kryo)
    } finally {
      resourcePool.release(kryo)
    }
  }

  def serialize[T: ClassTag](o: T): Array[Byte] = {
    acquireRelease { k =>
      k.serialize(o).array()
    }
  }

  def deserialize[T: ClassTag](bytes: Array[Byte]): T =
    acquireRelease { k =>
      k.deserialize[T](ByteBuffer.wrap(bytes))
    }
}

private[matfast] class JavaBigDecimalSerializer extends Serializer[java.math.BigDecimal] {
  def write(kryo: Kryo, output: Output, bd: java.math.BigDecimal) {
    output.writeString(bd.toString)
  }

  def read(kryo: Kryo, input: Input, tpe: Class[java.math.BigDecimal]): java.math.BigDecimal = {
    new java.math.BigDecimal(input.readString())
  }
}

private[matfast] class ScalaBigDecimalSerializer extends Serializer[BigDecimal] {
  def write(kryo: Kryo, output: Output, bd: BigDecimal): Unit = {
    output.writeString(bd.toString)
  }

  def read(kryo: Kryo, input: Input, tpe: Class[BigDecimal]): BigDecimal = {
    new java.math.BigDecimal(input.readString())
  }
} 
Example 11
Source File: RocksDBStorage.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.storage

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, File}
import java.util.UUID

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.storage.PluggableStorageProtocol.{Ack, StorageGetData}
import org.rocksdb.{FlushOptions, Options, RocksDB}

import scala.concurrent.Future

// TODO:
// Current version store every single data under one file (totally doesn't care about data originality).
// Data should be eventually splitted by ring partitionId.
// This might be an issue during possible data movements between nodes.
final class RocksDBStorage(dir: File) extends PluggableStorageProtocol {
  import RocksDBStorage._

  {
    RocksDB.loadLibrary()
  }

  private[this] val kryo = new Kryo()

  private[this] val db: RocksDB = {
    val options: Options = new Options().setCreateIfMissing(true)
    RocksDB.open(options, dir.getPath)
  }

  override def get(id: UUID)(resolveOriginality: (UUID) => PluggableStorageProtocol.DataOriginality): Future[PluggableStorageProtocol.StorageGetData] = {
    val key: Array[Byte] = uuid2bytes(kryo, id)
    val dataBytes: Array[Byte] = db.get(key)

    val justinDataOpt = Option(dataBytes).map { dataBytes =>
      val input = new Input(new ByteArrayInputStream(dataBytes))
      JustinDataSerializer.read(kryo, input, classOf[JustinData])
    }

    Future.successful(justinDataOpt.map(StorageGetData.Single).getOrElse(StorageGetData.None))
  }

  override def put(data: JustinData)(resolveOriginality: (UUID) => PluggableStorageProtocol.DataOriginality): Future[PluggableStorageProtocol.Ack] = {
    val key: Array[Byte] = uuid2bytes(kryo, data.id)
    val dataBytes: Array[Byte] = {
      val output = new Output(new ByteArrayOutputStream())
      JustinDataSerializer.write(kryo, output, data)
      output.getBuffer
    }

    db.put(key, dataBytes)
    db.flush(new FlushOptions().setWaitForFlush(true))

    Ack.future
  }
}

object RocksDBStorage {

  def uuid2bytes(kryo: Kryo, id: UUID): Array[Byte] = {
    val output = new Output(new ByteArrayOutputStream(), 16)
    UUIDSerializer.write(kryo, output, id)
    output.getBuffer
  }

  object UUIDSerializer extends Serializer[UUID] {
    override def read(kryo: Kryo, input: Input, `type`: Class[UUID]): UUID = {
      new UUID(input.readLong, input.readLong)
    }

    override def write(kryo: Kryo, output: Output, uuid: UUID): Unit = {
      output.writeLong(uuid.getMostSignificantBits)
      output.writeLong(uuid.getLeastSignificantBits)
    }
  }

  object JustinDataSerializer extends Serializer[JustinData] {
    override def read(kryo: Kryo, input: Input, `type`: Class[JustinData]): JustinData = {
      JustinData(
        id        = UUIDSerializer.read(kryo, input, classOf[UUID]),
        value     = input.readString(),
        vclock    = input.readString(),
        timestamp = input.readLong()
      )
    }

    override def write(kryo: Kryo, output: Output, data: JustinData): Unit = {
      UUIDSerializer.write(kryo, output, data.id)
      output.writeString(data.value)
      output.writeString(data.vclock)
      output.writeLong(data.timestamp)
    }
  }
} 
Example 12
Source File: StorageNodeReadResponseSerializer.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.kryo

import java.util.UUID

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.Data
import justin.db.actors.protocol._

object StorageNodeReadResponseSerializer extends Serializer[StorageNodeReadResponse] {

  private object Discriminator {
    val Found      = 1
    val Conflicted = 2
    val NotFound   = 3
    val Failed     = 4
  }

  override def write(kryo: Kryo, output: Output, readResponse: StorageNodeReadResponse): Unit = readResponse match {
    case StorageNodeFoundRead(data)           =>
      output.writeInt(Discriminator.Found)
      DataSerializer.write(kryo, output, data)
    case StorageNodeConflictedRead(conflicts) =>
      output.writeInt(Discriminator.Conflicted)
      ListOfDataSerializer.write(kryo, output, conflicts)
    case StorageNodeNotFoundRead(id)          =>
      output.writeInt(Discriminator.NotFound)
      output.writeString(id.toString)
    case StorageNodeFailedRead(id)            =>
      output.writeInt(Discriminator.Failed)
      output.writeString(id.toString)
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[StorageNodeReadResponse]): StorageNodeReadResponse = {
    input.readInt() match {
      case Discriminator.Found      => StorageNodeFoundRead(DataSerializer.read(kryo, input, classOf[Data]))
      case Discriminator.Conflicted => StorageNodeConflictedRead(ListOfDataSerializer.read(kryo, input, classOf[List[Data]]))
      case Discriminator.NotFound   => StorageNodeNotFoundRead(UUID.fromString(input.readString()))
      case Discriminator.Failed     => StorageNodeFailedRead(UUID.fromString(input.readString()))
    }
  }
} 
Example 13
Source File: ListOfDataSerializer.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.kryo

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.Data

object ListOfDataSerializer extends Serializer[List[Data]] {
  override def write(kryo: Kryo, output: Output, listOfData: List[Data]): Unit = {
    val length = listOfData.size
    output.writeInt(length, true)
    if(length != 0) {
      val it = listOfData.iterator
      while(it.hasNext)
        DataSerializer.write(kryo, output, it.next())
    }
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[List[Data]]): List[Data] = {
    var length = input.readInt(true)
    var result = List.empty[Data]

    while(length > 0) {
      result = result :+ DataSerializer.read(kryo, input, classOf[Data])
      length -= 1
    }
    result
  }
} 
Example 14
Source File: DataSerializer.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.kryo

import java.util.UUID

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.Data
import justin.db.versioning.NodeIdVectorClockBase64

object DataSerializer extends Serializer[Data] {
  override def write(kryo: Kryo, output: Output, data: Data): Unit = {
    output.writeString(data.id.toString) // UUID
    output.writeString(data.value)       // Value
    output.writeString(new NodeIdVectorClockBase64().encode(data.vclock).get) // Vector Clock
    output.writeLong(data.timestamp)    // Timestamp
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[Data]): Data = {
    Data(
      id        = UUID.fromString(input.readString()), // UUID
      value     = input.readString(),                  // Value
      vclock    = new NodeIdVectorClockBase64().decode(input.readString()).get, // Vector Clock
      timestamp = input.readLong()                     // Timestamp
    )
  }
} 
Example 15
Source File: StorageNodeWriteResponseSerializer.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.kryo

import java.util.UUID

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.Data
import justin.db.actors.protocol.{StorageNodeConflictedWrite, StorageNodeFailedWrite, StorageNodeSuccessfulWrite, StorageNodeWriteResponse}

object StorageNodeWriteResponseSerializer extends Serializer[StorageNodeWriteResponse] {

  private object Discriminator {
    val SuccessfulWrite = 1
    val FailedWrite     = 2
    val ConflictedWrite = 3
  }

  override def write(kryo: Kryo, output: Output, response: StorageNodeWriteResponse): Unit = response match {
    case StorageNodeSuccessfulWrite(id)               =>
      output.writeInt(Discriminator.SuccessfulWrite)
      output.writeString(id.toString) // UUID
    case StorageNodeFailedWrite(id)                   =>
      output.writeInt(Discriminator.FailedWrite)
      output.writeString(id.toString) // UUID
    case StorageNodeConflictedWrite(oldData, newData) =>
      output.writeInt(Discriminator.ConflictedWrite)
      DataSerializer.write(kryo, output, oldData)
      DataSerializer.write(kryo, output, newData)
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[StorageNodeWriteResponse]): StorageNodeWriteResponse = input.readInt() match {
    case Discriminator.SuccessfulWrite =>
      StorageNodeSuccessfulWrite(UUID.fromString(input.readString()))
    case Discriminator.FailedWrite     =>
      StorageNodeFailedWrite(UUID.fromString(input.readString()))
    case Discriminator.ConflictedWrite =>
      StorageNodeConflictedWrite(
        oldData = DataSerializer.read(kryo, input, classOf[Data]),
        newData = DataSerializer.read(kryo, input, classOf[Data])
      )
  }
} 
Example 16
Source File: StorageNodeWriteDataLocalSerializer.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db.kryo

import java.util.UUID

import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, Serializer}
import justin.db.Data
import justin.db.actors.protocol.StorageNodeWriteDataLocal
import justin.db.versioning.NodeIdVectorClockBase64

object StorageNodeWriteDataLocalSerializer extends Serializer[StorageNodeWriteDataLocal] {
  override def write(kryo: Kryo, output: Output, local: StorageNodeWriteDataLocal): Unit = {
    output.writeString(local.data.id.toString) // UUID
    output.writeString(local.data.value)       // Value
    output.writeString(new NodeIdVectorClockBase64().encode(local.data.vclock).get)  // Vector Clock
    output.writeLong(local.data.timestamp)     // Timestamp
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[StorageNodeWriteDataLocal]): StorageNodeWriteDataLocal = {
    val id = UUID.fromString(input.readString()) // UUID
    val value = input.readString()               // Value
    val vectorClock = new NodeIdVectorClockBase64().decode(input.readString()).get // Vector Clock
    val timestamp = input.readLong()             // Timestamp

    StorageNodeWriteDataLocal(Data(id, value, vectorClock, timestamp))
  }
} 
Example 17
Source File: KryoStringEventBatch.scala    From maha   with Apache License 2.0 5 votes vote down vote up
// Copyright 2017, Yahoo Holdings Inc.
// Licensed under the terms of the Apache License 2.0. Please see LICENSE file in project root for terms.
package com.yahoo.maha.log

import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import com.yahoo.maha.data.StringEventBatch
import org.slf4j.{Logger, LoggerFactory}


object KryoStringEventBatch {
  private val logger: Logger = LoggerFactory.getLogger(classOf[KryoStringEventBatch])
}

class KryoStringEventBatch extends Serializer[StringEventBatch] {
  KryoStringEventBatch.logger.info("Created instance of " + this.getClass.getSimpleName)

  override def write(kryo: Kryo, output: Output, stringEventBatch: StringEventBatch): Unit = {
    val size: Int = stringEventBatch.getEvents.size
    output.writeInt(size)
    stringEventBatch.getEvents.stream().forEach(output.writeString(_))
  }

  override def read(kryo: Kryo, input: Input, `type`: Class[StringEventBatch]): StringEventBatch = {
    val size: Int = input.readInt
    val builder: StringEventBatch.Builder = new StringEventBatch.Builder(size)
    var i: Int = 0
    while ( i < size) {
      builder.add(input.readString)
      i += 1
    }
    builder.build.asInstanceOf[StringEventBatch]
  }
} 
Example 18
Source File: KryoMLMatrixSerializer.scala    From MatRel   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.matfast.util

import com.esotericsoftware.kryo.{Kryo, Serializer}
import com.esotericsoftware.kryo.io.{Input, Output}

import org.apache.spark.sql.matfast.matrix._

class KryoMLMatrixSerializer extends Serializer[MLMatrix]{

  private def getTypeInt(m: MLMatrix): Short = m match {
    case _: SparseMatrix => 0
    case _: DenseMatrix => 1
    case _ => -1
  }

   override def write(kryo: Kryo, output: Output, matrix: MLMatrix) {
    output.writeShort(getTypeInt(matrix))
    matrix match {
      case dense: DenseMatrix =>
        output.writeInt(dense.numRows, true)
        output.writeInt(dense.numCols, true)
        output.writeInt(dense.values.length, true)
        dense.values.foreach(output.writeDouble)
        output.writeBoolean(dense.isTransposed)
      case sp: SparseMatrix =>
        output.writeInt(sp.numRows, true)
        output.writeInt(sp.numCols, true)
        output.writeInt(sp.colPtrs.length, true)
        sp.colPtrs.foreach(x => output.writeInt(x, true))
        output.writeInt(sp.rowIndices.length, true)
        sp.rowIndices.foreach(x => output.writeInt(x, true))
        output.writeInt(sp.values.length, true)
        sp.values.foreach(output.writeDouble)
        output.writeBoolean(sp.isTransposed)
    }
  }

  override def read(kryo: Kryo, input: Input, typ: Class[MLMatrix]): MLMatrix = {
    val typInt = input.readShort()
    if (typInt == 1) { // DenseMatrix
      val numRows = input.readInt(true)
      val numCols = input.readInt(true)
      val dim = input.readInt(true)
      val values = Array.ofDim[Double](dim)
      for (i <- 0 until dim) values(i) = input.readDouble()
      val isTransposed = input.readBoolean()
      new DenseMatrix(numRows, numCols, values, isTransposed)
    } else if (typInt == 0) { // SparseMatrix
      val numRows = input.readInt(true)
      val numCols = input.readInt(true)
      val colPtrsDim = input.readInt(true)
      val colPtrs = Array.ofDim[Int](colPtrsDim)
      for (i <- 0 until colPtrsDim) colPtrs(i) = input.readInt(true)
      val rowIndicesDim = input.readInt(true)
      val rowIndices = Array.ofDim[Int](rowIndicesDim)
      for (i <- 0 until rowIndicesDim) rowIndices(i) = input.readInt(true)
      val valueDim = input.readInt(true)
      val values = Array.ofDim[Double](valueDim)
      for (i <- 0 until valueDim) values(i) = input.readDouble()
      val isTransposed = input.readBoolean()
      new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values, isTransposed)
    } else null
  }
}