org.apache.spark.serializer.KryoRegistrator Scala Examples
The following examples show how to use org.apache.spark.serializer.KryoRegistrator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: ShapeLuceneRDDKryoRegistrator.scala From spark-lucenerdd with Apache License 2.0 | 5 votes |
package org.zouzias.spark.lucenerdd.spatial.shape import com.twitter.algebird.TopK import com.twitter.chill.Kryo import org.apache.spark.SparkConf import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer} import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema import org.apache.spark.sql.types._ import org.zouzias.spark.lucenerdd.models.{SparkFacetResult, SparkScoreDoc} import org.zouzias.spark.lucenerdd.spatial.shape.partition.ShapeLuceneRDDPartition class ShapeLuceneRDDKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo): Unit = { kryo.register(classOf[ShapeLuceneRDD[_, _]]) kryo.register(classOf[ShapeLuceneRDDPartition[_, _]]) kryo.register(classOf[Number]) kryo.register(classOf[java.lang.Double]) kryo.register(classOf[java.lang.Float]) kryo.register(classOf[java.lang.Integer]) kryo.register(classOf[java.lang.Long]) kryo.register(classOf[java.lang.Short]) kryo.register(classOf[StructType]) kryo.register(classOf[StructField]) kryo.register(classOf[IntegerType]) kryo.register(classOf[IntegerType$]) kryo.register(classOf[DoubleType]) kryo.register(classOf[DoubleType$]) kryo.register(classOf[FloatType]) kryo.register(classOf[StringType]) kryo.register(classOf[StringType$]) kryo.register(classOf[GenericRowWithSchema]) kryo.register(classOf[Metadata]) kryo.register(classOf[Object]) kryo.register(classOf[Array[Object]]) kryo.register(classOf[Array[Array[Byte]]]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofRef]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofFloat]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofDouble]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofInt]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofLong]) kryo.register(classOf[Array[String]]) kryo.register(classOf[Array[Number]]) kryo.register(classOf[Array[Float]]) kryo.register(classOf[Array[Int]]) kryo.register(classOf[Array[Long]]) kryo.register(classOf[Array[Double]]) kryo.register(classOf[Array[Boolean]]) kryo.register(classOf[Array[SparkScoreDoc]]) kryo.register(classOf[Array[StructType]]) kryo.register(classOf[Array[StructField]]) kryo.register(classOf[Range]) kryo.register(classOf[scala.collection.immutable.Map[String, String]]) kryo.register(classOf[scala.collection.immutable.Map[String, Number]]) kryo.register(classOf[scala.collection.immutable.Map$EmptyMap$]) kryo.register(classOf[scala.collection.immutable.Set$EmptySet$]) kryo.register(classOf[scala.collection.immutable.Map[_, _]]) kryo.register(classOf[Array[scala.collection.immutable.Map[_, _]]]) kryo.register(classOf[SparkFacetResult]) kryo.register(classOf[SparkScoreDoc]) kryo.register(classOf[TopK[_]]) () } } } }
Example 2
Source File: LuceneRDDKryoRegistrator.scala From spark-lucenerdd with Apache License 2.0 | 5 votes |
package org.zouzias.spark.lucenerdd import com.twitter.algebird.TopK import com.twitter.chill.Kryo import org.apache.spark.SparkConf import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer} import org.zouzias.spark.lucenerdd.facets.FacetedLuceneRDD import org.zouzias.spark.lucenerdd.models.{SparkFacetResult, SparkScoreDoc} import org.zouzias.spark.lucenerdd.partition.LuceneRDDPartition import org.zouzias.spark.lucenerdd.response.{LuceneRDDResponse, LuceneRDDResponsePartition} import org.zouzias.spark.lucenerdd.testing.{FavoriteCaseClass, Person} class LuceneRDDKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo): Unit = { kryo.register(classOf[LuceneRDD[_]]) kryo.register(classOf[LuceneRDDPartition[_]]) kryo.register(classOf[FacetedLuceneRDD[_]]) kryo.register(classOf[Number]) kryo.register(classOf[java.lang.Double]) kryo.register(classOf[java.lang.Float]) kryo.register(classOf[java.lang.Integer]) kryo.register(classOf[java.lang.Long]) kryo.register(classOf[java.lang.Short]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofRef]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofFloat]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofDouble]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofInt]) kryo.register(classOf[scala.collection.mutable.WrappedArray$ofLong]) kryo.register(classOf[Array[String]]) kryo.register(classOf[Array[Number]]) kryo.register(classOf[Array[Float]]) kryo.register(classOf[Array[Int]]) kryo.register(classOf[Array[Long]]) kryo.register(classOf[Array[Double]]) kryo.register(classOf[Array[Boolean]]) kryo.register(classOf[Range]) kryo.register(classOf[scala.collection.immutable.Map[String, String]]) kryo.register(classOf[scala.collection.immutable.Map[String, Number]]) kryo.register(classOf[scala.collection.immutable.Map$EmptyMap$]) kryo.register(classOf[scala.collection.immutable.Set$EmptySet$]) kryo.register(classOf[scala.collection.immutable.Map[_, _]]) kryo.register(classOf[Array[scala.collection.immutable.Map[_, _]]]) kryo.register(classOf[SparkFacetResult]) kryo.register(classOf[SparkScoreDoc]) kryo.register(classOf[LuceneRDDResponse]) kryo.register(classOf[LuceneRDDResponsePartition]) kryo.register(classOf[TopK[_]]) kryo.register(classOf[FavoriteCaseClass]) } }
Example 3
Source File: GraphKryoRegistrator.scala From graphx-algorithm with GNU General Public License v2.0 | 5 votes |
package org.apache.spark.graphx import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.apache.spark.util.BoundedPriorityQueue import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx.impl._ import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap import org.apache.spark.util.collection.OpenHashSet @deprecated("Register GraphX classes with Kryo using GraphXUtils.registerKryoClasses", "1.2.0") class GraphKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo) { kryo.register(classOf[Edge[Object]]) kryo.register(classOf[(VertexId, Object)]) kryo.register(classOf[EdgePartition[Object, Object]]) kryo.register(classOf[BitSet]) kryo.register(classOf[VertexIdToIndexMap]) kryo.register(classOf[VertexAttributeBlock[Object]]) kryo.register(classOf[PartitionStrategy]) kryo.register(classOf[BoundedPriorityQueue[Object]]) kryo.register(classOf[EdgeDirection]) kryo.register(classOf[GraphXPrimitiveKeyOpenHashMap[VertexId, Int]]) kryo.register(classOf[OpenHashSet[Int]]) kryo.register(classOf[OpenHashSet[Long]]) } }
Example 4
Source File: Registration.scala From spark-util with Apache License 2.0 | 5 votes |
package org.hammerlab.kryo import com.esotericsoftware.kryo import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.hammerlab.kryo.spark.Registrator import scala.reflect.ClassTag implicit def classNameWithImplicits( className: String ): ClassWithSerializerToRegister[_] = ClassWithSerializerToRegister( Class.forName(className), None, None ) }
Example 5
Source File: Kryo.scala From spark-util with Apache License 2.0 | 5 votes |
package org.hammerlab.spark.confs import org.apache.spark.serializer.{ KryoRegistrator, KryoSerializer } import org.hammerlab.kryo.Registrar import org.hammerlab.spark.SparkConfBase import scala.reflect.ClassTag case class UserRegistrar(name: String) object UserRegistrar { implicit def fromInstance[T <: KryoRegistrator](t: T): UserRegistrar = UserRegistrar(t.getClass.getName) implicit def romClass[T <: KryoRegistrator](cls: Class[T]): UserRegistrar = UserRegistrar(cls.getName) implicit def fromClassTag[T <: KryoRegistrator](implicit ct: ClassTag[T]): UserRegistrar = UserRegistrar(ct.runtimeClass.getName) } trait Kryo extends SparkConfBase with Registrar { def registrationRequired: Boolean = true def referenceTracking: Boolean = false def registrar(userRegistrar: UserRegistrar): Unit = sparkConf( "spark.kryo.registrator" → userRegistrar.name ) def registrar[T <: KryoRegistrator](implicit ct: ClassTag[T]): Unit = registrar(UserRegistrar.fromClassTag(ct)) sparkConf( "spark.serializer" → classOf[KryoSerializer].getName, "spark.kryo.referenceTracking" → referenceTracking.toString, "spark.kryo.registrationRequired" → registrationRequired.toString ) }
Example 6
Source File: KryoTest.scala From spark-util with Apache License 2.0 | 5 votes |
package org.hammerlab.spark.confs import com.esotericsoftware.kryo import org.apache.spark.serializer.KryoRegistrator import org.hammerlab.spark.SparkConfBase import org.hammerlab.test.Suite class KryoTest extends Suite { test("override registration requirement") { val conf = HasSparkConf.conf conf.get("spark.kryo.referenceTracking") should be("true") conf.get("spark.kryo.registrationRequired") should be("false") conf.get("spark.kryo.registrator") should be("org.hammerlab.spark.confs.TestRegistrator") } } class TestRegistrator extends KryoRegistrator { override def registerClasses(k: kryo.Kryo): Unit = ??? } object HasSparkConf extends SparkConfBase with Kryo { lazy val conf = makeSparkConf override def registrationRequired = false override def referenceTracking = true registrar[TestRegistrator] }
Example 7
Source File: IkeKryoRegistrator.scala From ike with Apache License 2.0 | 5 votes |
package org.allenai.ike import org.allenai.ike.patterns.NamedPattern import com.esotericsoftware.kryo.io.{ Input, Output } import com.esotericsoftware.kryo.{ Kryo, Serializer } import org.apache.spark.serializer.KryoRegistrator class IkeKryoRegistrator extends KryoRegistrator { override def registerClasses(kryo: Kryo): Unit = { OptionSerializers.register(kryo) kryo.register(Class.forName("scala.collection.immutable.Nil$")) val classes: Array[Class[_]] = Array( classOf[BlackLabResult], classOf[Interval], classOf[WordData], classOf[java.time.Instant], classOf[java.time.LocalDate], classOf[java.time.Year] ) classes.foreach(kryo.register) } }
Example 8
Source File: TimeSeriesKryoRegistrator.scala From spark-timeseries with Apache License 2.0 | 5 votes |
package com.cloudera.sparkts import com.esotericsoftware.kryo.{Serializer, Kryo} import com.esotericsoftware.kryo.io.{Output, Input} import org.apache.spark.SparkConf import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer} import com.cloudera.sparkts.TimeSeriesUtils._ import java.time._ class TimeSeriesKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo): Unit = { kryo.register(classOf[TimeSeries[_]]) kryo.register(classOf[UniformDateTimeIndex]) kryo.register(classOf[IrregularDateTimeIndex]) kryo.register(classOf[BusinessDayFrequency]) kryo.register(classOf[DayFrequency]) kryo.register(classOf[ZonedDateTime], new DateTimeSerializer) } } class DateTimeSerializer extends Serializer[ZonedDateTime] { def write(kryo: Kryo, out: Output, dt: ZonedDateTime): Unit = { out.writeLong(zonedDateTimeToLong(dt), true) } def read(kryo: Kryo, in: Input, clazz: Class[ZonedDateTime]): ZonedDateTime = { longToZonedDateTime(in.readLong(true), ZoneId.systemDefault()) } } object TimeSeriesKryoRegistrator { def registerKryoClasses(conf: SparkConf): Unit = { conf.set("spark.serializer", classOf[KryoSerializer].getName) conf.set("spark.kryo.registrator", classOf[TimeSeriesKryoRegistrator].getName) } }
Example 9
Source File: HailKryoRegistrator.scala From hail with MIT License | 5 votes |
package is.hail.kryo import com.esotericsoftware.kryo.Kryo import com.esotericsoftware.kryo.serializers.JavaSerializer import is.hail.annotations.{Region, UnsafeIndexedSeq, UnsafeRow} import is.hail.utils.{Interval, SerializableHadoopConfiguration} import is.hail.variant.Locus import org.apache.spark.serializer.KryoRegistrator import org.apache.spark.sql.catalyst.expressions.GenericRow class HailKryoRegistrator extends KryoRegistrator { override def registerClasses(kryo: Kryo) { kryo.register(classOf[SerializableHadoopConfiguration], new JavaSerializer()) kryo.register(classOf[UnsafeRow]) kryo.register(classOf[GenericRow]) kryo.register(classOf[Locus]) kryo.register(classOf[Interval]) kryo.register(classOf[UnsafeIndexedSeq]) kryo.register(classOf[Region]) } }
Example 10
Source File: GraphKryoRegistrator.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.apache.spark.util.BoundedPriorityQueue import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx.impl._ import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap import org.apache.spark.util.collection.OpenHashSet @deprecated("Register GraphX classes with Kryo using GraphXUtils.registerKryoClasses", "1.2.0") class GraphKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo) { kryo.register(classOf[Edge[Object]]) kryo.register(classOf[(VertexId, Object)]) kryo.register(classOf[EdgePartition[Object, Object]]) kryo.register(classOf[BitSet]) kryo.register(classOf[VertexIdToIndexMap]) kryo.register(classOf[VertexAttributeBlock[Object]]) kryo.register(classOf[PartitionStrategy]) kryo.register(classOf[BoundedPriorityQueue[Object]]) kryo.register(classOf[EdgeDirection]) kryo.register(classOf[GraphXPrimitiveKeyOpenHashMap[VertexId, Int]]) kryo.register(classOf[OpenHashSet[Int]]) kryo.register(classOf[OpenHashSet[Long]]) } }
Example 11
Source File: OpKryoRegistrator.scala From TransmogrifAI with BSD 3-Clause "New" or "Revised" License | 5 votes |
package com.salesforce.op.utils.kryo import java.util.TreeMap import com.esotericsoftware.kryo.{Kryo, Registration} import com.esotericsoftware.kryo.serializers.DefaultSerializers.TreeMapSerializer import com.salesforce.op.utils.stats.StreamingHistogram import com.salesforce.op.utils.stats.StreamingHistogram.{StreamingHistogramBuilder, StreamingHistogramComparator} import com.twitter.chill.algebird.AlgebirdRegistrar import com.twitter.chill.avro.AvroSerializer import org.apache.avro.generic.GenericData import org.apache.avro.specific.SpecificRecordBase import org.apache.spark.serializer.KryoRegistrator import scala.collection.mutable.{WrappedArray => MWrappedArray} import scala.reflect._ class OpKryoRegistrator extends KryoRegistrator { protected def doAvroRegistration[T <: SpecificRecordBase : ClassTag](kryo: Kryo): Registration = kryo.register(classTag[T].runtimeClass, AvroSerializer.SpecificRecordBinarySerializer[T]) protected def doClassRegistration(kryo: Kryo)(seqPC: Class[_]*): Unit = seqPC.foreach { pC => kryo.register(pC) // also register arrays of that class val arrayType = java.lang.reflect.Array.newInstance(pC, 0).getClass kryo.register(arrayType) } final override def registerClasses(kryo: Kryo): Unit = { doClassRegistration(kryo)( classOf[org.apache.avro.generic.GenericData], scala.collection.immutable.Map.empty[Any, Any].getClass ) doClassRegistration(kryo)( OpKryoClasses.ArraysOfPrimitives: _* ) // Avro generic-data array deserialization fails - hence providing workaround kryo.register( classOf[GenericData.Array[_]], new GenericJavaCollectionSerializer(classOf[java.util.ArrayList[_]]) ) new AlgebirdRegistrar().apply(kryo) registerCustomClasses(kryo) // Streaming histogram registration kryo.register(classOf[StreamingHistogram]) kryo.register(classOf[StreamingHistogramBuilder]) kryo.register(classOf[StreamingHistogramComparator]) kryo.register(classOf[TreeMap[_, _]], new TreeMapSerializer()) // Mutable wrapped arrays OpKryoClasses.WrappedArrays.foreach(kryo.register) } ) lazy val WrappedArrays: Seq[Class[_]] = Seq( MWrappedArray.make(Array[Boolean]()).getClass, MWrappedArray.make(Array[Byte]()).getClass, MWrappedArray.make(Array[Char]()).getClass, MWrappedArray.make(Array[Double]()).getClass, MWrappedArray.make(Array[Float]()).getClass, MWrappedArray.make(Array[Int]()).getClass, MWrappedArray.make(Array[Long]()).getClass, MWrappedArray.make(Array[Short]()).getClass, MWrappedArray.make(Array[String]()).getClass ) }
Example 12
Source File: GraphKryoRegistrator.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.apache.spark.util.BoundedPriorityQueue import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx.impl._ import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap import org.apache.spark.util.collection.OpenHashSet @deprecated("Register GraphX classes with Kryo using GraphXUtils.registerKryoClasses", "1.2.0") class GraphKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo) { kryo.register(classOf[Edge[Object]]) kryo.register(classOf[(VertexId, Object)]) kryo.register(classOf[EdgePartition[Object, Object]]) kryo.register(classOf[BitSet]) kryo.register(classOf[VertexIdToIndexMap]) kryo.register(classOf[VertexAttributeBlock[Object]]) kryo.register(classOf[PartitionStrategy]) kryo.register(classOf[BoundedPriorityQueue[Object]]) kryo.register(classOf[EdgeDirection]) kryo.register(classOf[GraphXPrimitiveKeyOpenHashMap[VertexId, Int]]) kryo.register(classOf[OpenHashSet[Int]]) kryo.register(classOf[OpenHashSet[Long]]) } }
Example 13
Source File: GraphKryoRegistrator.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.graphx import com.esotericsoftware.kryo.Kryo import org.apache.spark.serializer.KryoRegistrator import org.apache.spark.util.BoundedPriorityQueue import org.apache.spark.util.collection.BitSet import org.apache.spark.graphx.impl._ import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap import org.apache.spark.util.collection.OpenHashSet @deprecated("Register GraphX classes with Kryo using GraphXUtils.registerKryoClasses", "1.2.0") class GraphKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo) { kryo.register(classOf[Edge[Object]]) kryo.register(classOf[(VertexId, Object)]) kryo.register(classOf[EdgePartition[Object, Object]]) kryo.register(classOf[BitSet]) kryo.register(classOf[VertexIdToIndexMap]) kryo.register(classOf[VertexAttributeBlock[Object]]) kryo.register(classOf[PartitionStrategy]) kryo.register(classOf[BoundedPriorityQueue[Object]]) kryo.register(classOf[EdgeDirection]) kryo.register(classOf[GraphXPrimitiveKeyOpenHashMap[VertexId, Int]]) kryo.register(classOf[OpenHashSet[Int]]) kryo.register(classOf[OpenHashSet[Long]]) } }
Example 14
Source File: ReadingWritingData.scala From Spark-RSVD with Apache License 2.0 | 5 votes |
package com.criteo.rsvd import java.nio.ByteBuffer import com.esotericsoftware.kryo.Kryo import com.typesafe.scalalogging.slf4j.StrictLogging import de.javakaffee.kryoserializers.UnmodifiableCollectionsSerializer import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.hadoop.io.{BytesWritable, NullWritable} import org.apache.spark.mllib.linalg.distributed.MatrixEntry import org.apache.spark.rdd.RDD import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer} import org.apache.spark.{SparkConf, SparkContext} import scala.reflect.ClassTag object ReadingWritingData extends StrictLogging { def getInputDataSizeMB(inputPathPattern: String, sc: SparkContext): Int = { val fs = FileSystem.get(sc.hadoopConfiguration) val path = new Path(inputPathPattern) (fs.globStatus(path).map(f => f.getLen).sum / 1024 / 1024).toInt } def loadMatrixEntries(inputPath: String, singlePartitionSizeMB: Int, sc: SparkContext): RDD[MatrixEntry] = { logger.info(s"Input matrix path: $inputPath") val inputDataSizeMB = getInputDataSizeMB(inputPath + " def makeRddFromKryoFile[T: ClassTag]( sc: SparkContext, path: String, minPartitionsOpt: Option[Int] = None): RDD[T] = { val minPartitions = minPartitionsOpt.getOrElse(sc.defaultMinPartitions) val serializer = new KryoSerializer(sc.getConf) sc.sequenceFile(path, classOf[NullWritable], classOf[BytesWritable], minPartitions) .mapPartitions { it => val instance = serializer.newInstance() it.flatMap { case (_, v) => instance.deserialize[Array[T]](ByteBuffer.wrap(v.getBytes)) } } } object RandomizedSVDKryoRegistrator extends KryoRegistrator { def registerClasses(kryo: Kryo): Unit = { UnmodifiableCollectionsSerializer.registerSerializers(kryo) kryo.register(classOf[MatrixEntry]) kryo.register(classOf[Array[MatrixEntry]]) } } def appendBasicRegistratorToSparkConf(sparkConf: SparkConf): SparkConf = appendRegistratorToSparkConf(sparkConf, RandomizedSVDKryoRegistrator.getClass.getName) def appendRegistratorToSparkConf(sparkConf: SparkConf, registratorName: String): SparkConf = { val oldValue = sparkConf.get("spark.kryo.registrator", "") if (oldValue == "") { sparkConf.set("spark.kryo.registrator", registratorName) } else { sparkConf.set("spark.kryo.registrator", oldValue + "," + registratorName) } } }
Example 15
Source File: ModelStateSerializerKryo.scala From model-serving-tutorial with Apache License 2.0 | 5 votes |
package com.lightbend.modelserving.spark import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.{Kryo, Serializer} import com.lightbend.model.winerecord.WineRecord import com.lightbend.modelserving.model.ModelFactoryResolver import org.apache.spark.serializer.KryoRegistrator override def write(kryo: Kryo, output: Output, value: ModelState): Unit = { val start = System.currentTimeMillis() output.writeLong(value.name.length) output.write(value.name.getBytes) output.writeLong(value.model.getType.value.toLong) val bytes = value.model.toBytes output.writeLong(bytes.length) output.write(bytes) println(s"KRYO serialization in ${System.currentTimeMillis() - start} ms") } } object ModelStateSerializerKryo{ // Model Factory resolver private var resolver : ModelFactoryResolver[WineRecord, Double] = _ // This method has to be invoked before execution starts def setResolver(res : ModelFactoryResolver[WineRecord, Double]) : Unit = resolver = res // Ensure that resolver is set private def validateResolver() : Unit = if(resolver == null) throw new Exception("Model factory resolver is not set") } class ModelStateRegistrator extends KryoRegistrator { override def registerClasses(kryo: Kryo) { kryo.register(classOf[ModelState], new ModelStateSerializerKryo()) } }