com.esotericsoftware.kryo.KryoSerializable Scala Examples
The following examples show how to use com.esotericsoftware.kryo.KryoSerializable.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: MemoryContextStore.scala From dbpedia-spotlight-model with Apache License 2.0 | 5 votes |
package org.dbpedia.spotlight.db.memory import java.util.{HashMap, Map} import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.{Kryo, KryoException, KryoSerializable} import org.apache.commons.lang.NotImplementedException import org.dbpedia.spotlight.db.model.{ContextStore, TokenTypeStore} import org.dbpedia.spotlight.model.{DBpediaResource, TokenType} def calculateTotalTokenCounts(){ var i = 0 while(i < counts.size){ if (counts(i).isInstanceOf[Array[Short]]){ var j = 0 while(j < counts(i).size ){ totalTokenCounts(i) += qc(counts(i)(j)) j += 1 } } i += 1 } } def read(kryo: Kryo, input: Input) { val size = input.readInt() tokens = new Array[Array[Int]](size) counts = new Array[Array[Short]](size) totalTokenCounts = new Array[Int](size) var i = 0 var j = 0 while(i < size) { val subsize = input.readInt() if (subsize > 0) { tokens(i) = new Array[Int](subsize) counts(i) = new Array[Short](subsize) j = 0 while(j < subsize) { tokens(i)(j) = input.readInt() j += 1 } j = 0 while(j < subsize) { counts(i)(j) = input.readShort() j += 1 } } i += 1 } if(input.readChar() != '#') throw new KryoException("Error in deserializing context store...") } }
Example 2
Source File: L4-3ProtonFlux.scala From prosparkstreaming with Apache License 2.0 | 5 votes |
package org.apress.prospark import com.esotericsoftware.kryo.{KryoSerializable,Kryo} import com.esotericsoftware.kryo.io.{Output, Input} class ProtonFlux( var year: Int, var bin0_57to1_78: Double, var bin3_40to17_6: Double, var bin22_0to31_0: Double, var bin1_894to2_605: Double, var bin4_200to6_240: Double, var bin3_256to8_132: Double, var bin3_276to8_097: Double, var bin6_343to42_03: Double, var bin17_88to26_81: Double, var bin30_29to69_47: Double, var bin132_8to242_0: Double ) extends KryoSerializable { def this(year: String, bin0_57to1_78: String, bin3_40to17_6: String, bin22_0to31_0: String, bin1_894to2_605: String, bin4_200to6_240: String, bin3_256to8_132: String, bin3_276to8_097: String, bin6_343to42_03: String, bin17_88to26_81: String, bin30_29to69_47: String, bin132_8to242_0: String) { this(year.toInt, bin0_57to1_78.toDouble, bin3_40to17_6.toDouble, bin22_0to31_0.toDouble, bin1_894to2_605.toDouble, bin4_200to6_240.toDouble, bin3_256to8_132.toDouble, bin3_276to8_097.toDouble, bin6_343to42_03.toDouble, bin17_88to26_81.toDouble, bin30_29to69_47.toDouble, bin132_8to242_0.toDouble) } def isSolarStorm = (bin0_57to1_78 > 1.0 || bin3_40to17_6 > 1.0 || bin22_0to31_0 > 1.0 || bin1_894to2_605 > 1.0 || bin4_200to6_240 > 1.0 || bin3_256to8_132 > 1.0 || bin3_276to8_097 > 1.0 || bin6_343to42_03 > 1.0 || bin17_88to26_81 > 1.0 || bin30_29to69_47 > 1.0 || bin132_8to242_0 > 1.0) override def write(kryo: Kryo, output: Output) { output.writeInt(year) output.writeDouble(bin0_57to1_78) output.writeDouble(bin3_40to17_6) output.writeDouble(bin22_0to31_0) output.writeDouble(bin1_894to2_605) output.writeDouble(bin4_200to6_240) output.writeDouble(bin3_256to8_132) output.writeDouble(bin3_276to8_097) output.writeDouble(bin6_343to42_03) output.writeDouble(bin17_88to26_81) output.writeDouble(bin30_29to69_47) output.writeDouble(bin132_8to242_0) } override def read(kryo: Kryo, input: Input) { year = input.readInt() bin0_57to1_78 = input.readDouble() bin3_40to17_6 = input.readDouble() bin22_0to31_0 = input.readDouble() bin1_894to2_605 = input.readDouble() bin4_200to6_240 = input.readDouble() bin3_256to8_132 = input.readDouble() bin3_276to8_097 = input.readDouble() bin6_343to42_03 = input.readDouble() bin17_88to26_81 = input.readDouble() bin30_29to69_47 = input.readDouble() bin132_8to242_0 = input.readDouble() } }
Example 3
Source File: FeaturePoint.scala From spark-pip with Apache License 2.0 | 5 votes |
package com.esri import com.esotericsoftware.kryo.io.{Input, Output} import com.esotericsoftware.kryo.{Kryo, KryoSerializable} import com.vividsolutions.jts.geom.Geometry case class FeaturePoint(var geom: Geometry, var attr: Array[String]) extends Feature with KryoSerializable { def this() = this(null, null) override def toRowCols(cellSize: Double): Seq[(RowCol, FeaturePoint)] = { val coordinate = geom.getCoordinate val c = (coordinate.x / cellSize).floor.toInt val r = (coordinate.y / cellSize).floor.toInt Seq((RowCol(r, c), this)) } override def write(kryo: Kryo, output: Output): Unit = { val coordinate = geom.getCoordinate output.writeDouble(coordinate.x) output.writeDouble(coordinate.y) output.writeInt(attr.length) attr.foreach(output.writeString) } override def read(kryo: Kryo, input: Input): Unit = { val x = input.readDouble() val y = input.readDouble() geom = GeomFact.createPoint(x, y) val len = input.readInt() attr = Array.ofDim[String](len) for (i <- 0 until len) attr(i) = input.readString() } }