scala.collection.mutable.Map Scala Examples
The following examples show how to use scala.collection.mutable.Map.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: Structs.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Digraph import scala.collection.mutable.Map object Structs { def main(args: Array[String]): Unit = { val s = new Digraph("structs") s.node("struct1", """< <TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0"> <TR> <TD>left</TD> <TD PORT="f1">middle</TD> <TD PORT="f2">right</TD> </TR> </TABLE>>""") s.node("struct2", """< <TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0"> <TR> <TD PORT="f0">one</TD> <TD>two</TD> </TR> </TABLE>>""") s.node("struct3", """< <TABLE BORDER="0" CELLBORDER="1" CELLSPACING="0" CELLPADDING="4"> <TR> <TD ROWSPAN="3">hello<BR/>world</TD> <TD COLSPAN="3">b</TD> <TD ROWSPAN="3">g</TD> <TD ROWSPAN="3">h</TD> </TR> <TR> <TD>c</TD> <TD PORT="here">d</TD> <TD>e</TD> </TR> <TR> <TD COLSPAN="3">f</TD> </TR> </TABLE>>""") s.edges(Array(("struct1:f1", "struct2:f0"), ("struct1:f2", "struct3:here"))) s.view(fileName = "structs.gv", directory = ".") } }
Example 2
Source File: driver.scala From SparkSMOTE with MIT License | 5 votes |
import java.io._ import utils._ import SMOTE._ import org.apache.log4j.Logger import org.apache.log4j.Level import breeze.linalg._ import org.apache.spark.SparkConf import org.apache.spark.SparkContext import scala.collection.mutable.{ArrayBuffer,Map} object driver { def main(args: Array[String]) { val conf = new SparkConf() val options = args.map { arg => arg.dropWhile(_ == '-').split('=') match { case Array(opt, v) => (opt -> v) case Array(opt) => (opt -> "") case _ => throw new IllegalArgumentException("Invalid argument: "+arg) } }.toMap val rootLogger = Logger.getRootLogger() rootLogger.setLevel(Level.ERROR) val sc = new SparkContext(conf) // read in general inputs val inputDirectory = options.getOrElse("inputDirectory","") val outputDirectory = options.getOrElse("outputDirectory","") val numFeatures = options.getOrElse("numFeatures","0").toInt val oversamplingPctg = options.getOrElse("oversamplingPctg","1.0").toDouble val kNN = options.getOrElse("K","5").toInt val delimiter = options.getOrElse("delimiter",",") val numPartitions = options.getOrElse("numPartitions","20").toInt SMOTE.runSMOTE(sc, inputDirectory, outputDirectory, numFeatures, oversamplingPctg, kNN, delimiter, numPartitions) println("The algorithm has finished running") sc.stop() } }
Example 3
Source File: KyuubiDistributedCacheManager.scala From kyuubi with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.yarn import java.net.URI import scala.collection.mutable.{HashMap, Map} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} import org.apache.hadoop.yarn.api.records.{LocalResource, LocalResourceType} def addResource( fs: FileSystem, conf: Configuration, destPath: Path, localResources: HashMap[String, LocalResource], resourceType: LocalResourceType, link: String, statCache: Map[URI, FileStatus]): Unit = { cacheManager.addResource(fs, conf, destPath, localResources, resourceType, link, statCache, appMasterOnly = true) } }
Example 4
Source File: KyuubiDistributedCacheManagerSuite.scala From kyuubi with Apache License 2.0 | 5 votes |
package org.apache.spark.deploy.yarn import java.net.URI import scala.collection.mutable.{HashMap, Map} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} import org.apache.hadoop.yarn.api.records.{LocalResource, LocalResourceType, LocalResourceVisibility} import org.apache.hadoop.yarn.util.ConverterUtils import org.apache.spark.{KyuubiSparkUtil, SparkFunSuite} import org.mockito.Mockito.when import org.scalatest.mock.MockitoSugar import yaooqinn.kyuubi.utils.ReflectUtils class KyuubiDistributedCacheManagerSuite extends SparkFunSuite with MockitoSugar { class MockClientDistributedCacheManager extends ClientDistributedCacheManager { override def getVisibility(conf: Configuration, uri: URI, statCache: Map[URI, FileStatus]): LocalResourceVisibility = { LocalResourceVisibility.PRIVATE } } test("add resource") { val fs = mock[FileSystem] val conf = new Configuration() val destPath = new Path("file:///foo.bar.com:8080/tmp/testing") val localResources = HashMap[String, LocalResource]() val statCache = HashMap[URI, FileStatus]() val status = new FileStatus() when(fs.getFileStatus(destPath)).thenReturn(status) val fileLink = "link" ReflectUtils.setFieldValue( KyuubiDistributedCacheManager, "cacheManager", new MockClientDistributedCacheManager) KyuubiDistributedCacheManager.addResource( fs, conf, destPath, localResources, LocalResourceType.FILE, fileLink, statCache) val res = localResources(fileLink) assert(res.getVisibility === LocalResourceVisibility.PRIVATE) assert(ConverterUtils.getPathFromYarnURL(res.getResource) === destPath) assert(res.getSize === 0) assert(res.getTimestamp === 0) assert(res.getType === LocalResourceType.FILE) val status2 = new FileStatus( 10, false, 1, 1024, 10, 10, null, KyuubiSparkUtil.getCurrentUserName, null, new Path("/tmp/testing2")) val destPath2 = new Path("file:///foo.bar.com:8080/tmp/testing2") when(fs.getFileStatus(destPath2)).thenReturn(status2) val fileLink2 = "link2" KyuubiDistributedCacheManager.addResource( fs, conf, destPath2, localResources, LocalResourceType.FILE, fileLink2, statCache) val res2 = localResources(fileLink2) assert(res2.getVisibility === LocalResourceVisibility.PRIVATE) assert(ConverterUtils.getPathFromYarnURL(res2.getResource) === destPath2) assert(res2.getSize === 10) assert(res2.getTimestamp === 10) assert(res2.getType === LocalResourceType.FILE) } test("add resource when link null") { val distMgr = new MockClientDistributedCacheManager() val fs = mock[FileSystem] val conf = new Configuration() val destPath = new Path("file:///foo.bar.com:8080/tmp/testing") ReflectUtils.setFieldValue(KyuubiDistributedCacheManager, "cacheManager", distMgr) val localResources = HashMap[String, LocalResource]() val statCache = HashMap[URI, FileStatus]() when(fs.getFileStatus(destPath)).thenReturn(new FileStatus()) intercept[Exception] { KyuubiDistributedCacheManager.addResource( fs, conf, destPath, localResources, LocalResourceType.FILE, null, statCache) } assert(localResources.get("link") === None) assert(localResources.size === 0) } test("test addResource archive") { val distMgr = new MockClientDistributedCacheManager() ReflectUtils.setFieldValue(KyuubiDistributedCacheManager, "cacheManager", distMgr) val fs = mock[FileSystem] val conf = new Configuration() val destPath = new Path("file:///foo.bar.com:8080/tmp/testing") val localResources = HashMap[String, LocalResource]() val statCache = HashMap[URI, FileStatus]() val realFileStatus = new FileStatus(10, false, 1, 1024, 10, 10, null, "testOwner", null, new Path("/tmp/testing")) when(fs.getFileStatus(destPath)).thenReturn(realFileStatus) KyuubiDistributedCacheManager.addResource( fs, conf, destPath, localResources, LocalResourceType.ARCHIVE, "link", statCache) val resource = localResources("link") assert(resource.getVisibility === LocalResourceVisibility.PRIVATE) assert(ConverterUtils.getPathFromYarnURL(resource.getResource) === destPath) assert(resource.getTimestamp === 10) assert(resource.getSize === 10) assert(resource.getType === LocalResourceType.ARCHIVE) } }
Example 5
Source File: SparkSQLOperationManager.scala From spark1.52 with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver.server import java.util.{Map => JMap} import scala.collection.mutable.Map import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.{ExecuteStatementOperation, Operation, OperationManager} import org.apache.hive.service.cli.session.HiveSession import org.apache.spark.Logging import org.apache.spark.sql.hive.HiveContext import org.apache.spark.sql.hive.thriftserver.{SparkExecuteStatementOperation, ReflectionUtils} private[thriftserver] class SparkSQLOperationManager(hiveContext: HiveContext) extends OperationManager with Logging { val handleToOperation = ReflectionUtils .getSuperField[JMap[OperationHandle, Operation]](this, "handleToOperation") val sessionToActivePool = Map[SessionHandle, String]() override def newExecuteStatementOperation( parentSession: HiveSession, statement: String, confOverlay: JMap[String, String], async: Boolean): ExecuteStatementOperation = synchronized { val runInBackground = async && hiveContext.hiveThriftServerAsync val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay, runInBackground)(hiveContext, sessionToActivePool) handleToOperation.put(operation.getHandle, operation) logDebug(s"Created Operation for $statement with session=$parentSession, " + s"runInBackground=$runInBackground") operation } }
Example 6
Source File: TensorFlowWeightCollection.scala From SparkNet with MIT License | 5 votes |
package libs import scala.collection.mutable.Map import scala.collection.mutable.MutableList object TensorFlowWeightCollection { def scalarDivide(weights: Map[String, NDArray], v: Float) = { for (name <- weights.keys) { weights(name).scalarDivide(v) } } def add(wc1: Map[String, NDArray], wc2: Map[String, NDArray]): Map[String, NDArray] = { assert(wc1.keys == wc2.keys) // add the WeightCollection objects together var newWeights = Map[String, NDArray]() for (name <- wc1.keys) { newWeights += (name -> NDArray.plus(wc1(name), wc2(name))) } newWeights } def checkEqual(wc1: Map[String, NDArray], wc2: Map[String, NDArray], tol: Float): Boolean = { if (wc1.keys != wc2.keys) { return false } for (name <- wc1.keys) { if (!NDArray.checkEqual(wc1(name), wc2(name), tol)) { return false } } return true } }
Example 7
Source File: CaffeWeightCollection.scala From SparkNet with MIT License | 5 votes |
package libs import scala.collection.mutable.Map import scala.collection.mutable.MutableList object CaffeWeightCollection { def scalarDivide(weights: Map[String, MutableList[NDArray]], v: Float) = { for (name <- weights.keys) { for (j <- 0 to weights(name).length - 1) { weights(name)(j).scalarDivide(v) } } } def add(weights1: Map[String, MutableList[NDArray]], weights2: Map[String, MutableList[NDArray]]): Map[String, MutableList[NDArray]] = { if (weights1.keys != weights2.keys) { throw new Exception("weights1.keys != weights2.keys, weights1.keys = " + weights1.keys.toString + ", and weights2.keys = " + weights2.keys.toString + "\n") } val newWeights = Map[String, MutableList[NDArray]]() for (name <- weights1.keys) { newWeights += (name -> MutableList()) if (weights1(name).length != weights2(name).length) { throw new Exception("weights1(name).length != weights2(name).length, name = " + name + ", weights1(name).length = " + weights1(name).length.toString + ", weights2(name).length = " + weights2(name).length.toString) } for (j <- 0 to weights1(name).length - 1) { if (weights1(name)(j).shape.deep != weights2(name)(j).shape.deep) { throw new Exception("weights1(name)(j).shape != weights2(name)(j).shape, name = " + name + ", j = " + j.toString + ", weights1(name)(j).shape = " + weights1(name)(j).shape.deep.toString + ", weights2(name)(j).shape = " + weights2(name)(j).shape.deep.toString) } newWeights(name) += NDArray.plus(weights1(name)(j), weights2(name)(j)) } } newWeights } def checkEqual(weights1: Map[String, MutableList[NDArray]], weights2: Map[String, MutableList[NDArray]], tol: Float): Boolean = { if (weights1.keys != weights2.keys) { return false } for (name <- weights1.keys) { if (weights1(name).length != weights2(name).length) { return false } for (j <- 0 to weights1(name).length - 1) { if (!NDArray.checkEqual(weights1(name)(j), weights2(name)(j), tol)) { return false } } } return true } }
Example 8
Source File: Process.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Graph import scala.collection.mutable.Map object Process { def main(args: Array[String]): Unit = { val g = new Graph("G") g.edge("run", "intr") g.edge("intr", "runbl") g.edge("runbl", "run") g.edge("run", "kernel") g.edge("kernel", "zombie") g.edge("kernel", "sleep") g.edge("kernel", "runmem") g.edge("sleep", "swap") g.edge("swap", "runswap") g.edge("runswap", "new") g.edge("runswap", "runmem") g.edge("new", "runmem") g.edge("sleep", "runmem") g.view(engine = "sfdp", fileName = "process.gv", directory = ".") } }
Example 9
Source File: Fsm.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Digraph import scala.collection.mutable.Map object Fsm { def main(args: Array[String]): Unit = { val f = new Digraph("finite_state_machine") f.body += "rankdir=LR" f.body += """size="8,5"""" f.attr("node", Map("shape" -> "doublecircle")) f.node("LR_0") f.node("LR_3") f.node("LR_4") f.node("LR_8") f.attr("node", Map("shape" -> "circle")) f.edge("LR_0", "LR_2", label = "SS(B)") f.edge("LR_0", "LR_1", label = "SS(S)") f.edge("LR_1", "LR_3", label = "S($end)") f.edge("LR_2", "LR_6", label = "SS(b)") f.edge("LR_2", "LR_5", label = "SS(a)") f.edge("LR_2", "LR_4", label = "S(A)") f.edge("LR_5", "LR_7", label = "S(b)") f.edge("LR_5", "LR_5", label = "S(a)") f.edge("LR_6", "LR_6", label = "S(b)") f.edge("LR_6", "LR_5", label = "S(a)") f.edge("LR_7", "LR_8", label = "S(b)") f.edge("LR_7", "LR_5", label = "S(a)") f.edge("LR_8", "LR_6", label = "S(b)") f.edge("LR_8", "LR_5", label = "S(a)") f.view(fileName = "fsm.gv", directory = ".") } }
Example 10
Source File: Cluster.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Digraph import scala.collection.mutable.Map object Cluster { def main(args: Array[String]): Unit = { val g = new Digraph("G") val c0 = new Digraph("cluster_0") c0.body += "style=filled" c0.body += "color=lightgrey" c0.nodeAttr("style") = "filled" c0.nodeAttr("color") = "white" c0.edges(Array(("a0", "a1"), ("a1", "a2"), ("a2", "a3"))) c0.body += """label = "process #1"""" val c1 = new Digraph("cluster_1") c1.nodeAttr("style") = "filled" c1.edges(Array(("b0", "b1"), ("b1", "b2"), ("b2", "b3"))) c1.body += """label = "process #2"""" c1.body += "color=blue" g.subGraph(c0) g.subGraph(c1) g.edge("start", "a0") g.edge("start", "b0") g.edge("a1", "b3") g.edge("b2", "a3") g.edge("a3", "a0") g.edge("a3", "end") g.edge("b3", "end") g.node("start", attrs = Map("shape" -> "Mdiamond")) g.node("end", attrs = Map("shape" -> "Msquare")) g.view(fileName = "cluster.gv", directory = ".") } }
Example 11
Source File: Er.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Graph import scala.collection.mutable.Map object Er { def main(args: Array[String]): Unit = { val e = new Graph("ER") e.attr("node", Map("shape" -> "box")) e.node("course") e.node("institute") e.node("student") e.attr("node", Map("shape" -> "ellipse")) e.node("name0", attrs = Map("label" -> "name")) e.node("name1", attrs = Map("label" -> "name")) e.node("name2", attrs = Map("label" -> "name")) e.node("code") e.node("grade") e.node("number") e.attr("node", attrs = Map("shape" -> "diamond", "style" -> "filled", "color" -> "lightgrey")) e.node("C-I") e.node("S-C") e.node("S-I") e.edge("name0", "course") e.edge("code", "course") e.edge("course", """"C-I"""", label = "n", attrs = Map("len" -> "1.00")) e.edge(""""C-I"""", "institute", label = "1", attrs = Map("len" -> "1.00")) e.edge("institute", "name1") e.edge("institute", """"S-I"""", label = "1", attrs = Map("len" -> "1.00")) e.edge(""""S-I"""", "student", label = "n", attrs = Map("len" -> "1.00")) e.edge("student", "grade") e.edge("student", "name2") e.edge("student", "number") e.edge("student", """"S-C"""", label = "m", attrs = Map("len" -> "1.00")) e.edge(""""S-C"""", "course", label = "n", attrs = Map("len" -> "1.00")) e.body += """label = "\n\nEntity Relation Diagram\ndrawn by NEATO"""" e.body += "fontsize=20" e.view(engine = "neato", fileName = "er.gv", directory = ".") } }
Example 12
Source File: Unix.scala From Graphviz4S with MIT License | 5 votes |
package com.liangdp.graphviz4s.examples import com.liangdp.graphviz4s.Digraph import scala.collection.mutable.Map object Unix { def main(args: Array[String]): Unit = { val u = new Digraph("unix") u.body += """size="6,6"""" u.nodeAttr("color") = "lightblue2" u.nodeAttr("style") = "filled" u.edge(""""5th Edition"""", """"6th Edition"""") u.edge(""""5th Edition"""", """"PWB 1.0"""") u.edge(""""6th Edition"""", """"LSX"""") u.edge(""""6th Edition"""", """"1 BSD"""") u.edge(""""6th Edition"""", """"Mini Unix"""") u.edge(""""6th Edition"""", """"Wollongong"""") u.edge(""""6th Edition"""", """"Interdata"""") u.edge(""""Interdata"""", """"Unix/TS 3.0"""") u.edge(""""Interdata"""", """"PWB 2.0"""") u.edge(""""Interdata"""", """"7th Edition"""") u.edge(""""7th Edition"""", """"8th Edition"""") u.edge(""""7th Edition"""", """"32V"""") u.edge(""""7th Edition"""", """"V7M"""") u.edge(""""7th Edition"""", """"Ultrix-11"""") u.edge(""""7th Edition"""", """"Xenix"""") u.edge(""""7th Edition"""", """"UniPlus+"""") u.edge(""""V7M"""", """"Ultrix-11"""") u.edge(""""8th Edition"""", """"9th Edition"""") u.edge(""""1 BSD"""", """"2 BSD"""") u.edge(""""2 BSD"""", """"2.8 BSD"""") u.edge(""""2.8 BSD"""", """"Ultrix-11"""") u.edge(""""2.8 BSD"""", """"2.9 BSD"""") u.edge(""""32V"""", """"3 BSD"""") u.edge(""""3 BSD"""", """"4 BSD"""") u.edge(""""4 BSD"""", """"4.1 BSD"""") u.edge(""""4.1 BSD"""", """"4.2 BSD"""") u.edge(""""4.1 BSD"""", """"2.8 BSD"""") u.edge(""""4.1 BSD"""", """"8th Edition"""") u.edge(""""4.2 BSD"""", """"4.3 BSD"""") u.edge(""""4.2 BSD"""", """"Ultrix-32"""") u.edge(""""PWB 1.0"""", """"PWB 1.2"""") u.edge(""""PWB 1.0"""", """"USG 1.0"""") u.edge(""""PWB 1.2"""", """"PWB 2.0"""") u.edge(""""USG 1.0"""", """"CB Unix 1"""") u.edge(""""USG 1.0"""", """"USG 2.0"""") u.edge(""""CB Unix 1"""", """"CB Unix 2"""") u.edge(""""CB Unix 2"""", """"CB Unix 3"""") u.edge(""""CB Unix 3"""", """"Unix/TS++"""") u.edge(""""CB Unix 3"""", """"PDP-11 Sys V"""") u.edge(""""USG 2.0"""", """"USG 3.0"""") u.edge(""""USG 3.0"""", """"Unix/TS 3.0"""") u.edge(""""PWB 2.0"""", """"Unix/TS 3.0"""") u.edge(""""Unix/TS 1.0"""", """"Unix/TS 3.0"""") u.edge(""""Unix/TS 3.0"""", """"TS 4.0"""") u.edge(""""Unix/TS++"""", """"TS 4.0"""") u.edge(""""CB Unix 3"""", """"TS 4.0"""") u.edge(""""TS 4.0"""", """"System V.0"""") u.edge(""""System V.0"""", """"System V.2"""") u.edge(""""System V.2"""", """"System V.3"""") u.view(fileName = "unix.gv", directory = ".") } }
Example 13
Source File: TaskResult.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.scheduler import java.io._ import java.nio.ByteBuffer import scala.collection.mutable.Map import org.apache.spark.SparkEnv import org.apache.spark.executor.TaskMetrics import org.apache.spark.storage.BlockId import org.apache.spark.util.Utils // Task result. Also contains updates to accumulator variables. private[spark] sealed trait TaskResult[T] def value(): T = { if (valueObjectDeserialized) { valueObject } else { // This should not run when holding a lock because it may cost dozens of seconds for a large // value. val resultSer = SparkEnv.get.serializer.newInstance() valueObject = resultSer.deserialize(valueBytes) valueObjectDeserialized = true valueObject } } }
Example 14
Source File: SparkSQLOperationManager.scala From BigDatalog with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver.server import java.util.{Map => JMap} import scala.collection.mutable.Map import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.{ExecuteStatementOperation, Operation, OperationManager} import org.apache.hive.service.cli.session.HiveSession import org.apache.spark.Logging import org.apache.spark.sql.hive.HiveContext import org.apache.spark.sql.hive.thriftserver.{SparkExecuteStatementOperation, ReflectionUtils} private[thriftserver] class SparkSQLOperationManager() extends OperationManager with Logging { val handleToOperation = ReflectionUtils .getSuperField[JMap[OperationHandle, Operation]](this, "handleToOperation") val sessionToActivePool = Map[SessionHandle, String]() val sessionToContexts = Map[SessionHandle, HiveContext]() override def newExecuteStatementOperation( parentSession: HiveSession, statement: String, confOverlay: JMap[String, String], async: Boolean): ExecuteStatementOperation = synchronized { val hiveContext = sessionToContexts(parentSession.getSessionHandle) val runInBackground = async && hiveContext.hiveThriftServerAsync val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay, runInBackground)(hiveContext, sessionToActivePool) handleToOperation.put(operation.getHandle, operation) logDebug(s"Created Operation for $statement with session=$parentSession, " + s"runInBackground=$runInBackground") operation } }
Example 15
Source File: Metadata.scala From marvin-engine-executor with Apache License 2.0 | 5 votes |
package org.marvin.model import scala.collection.mutable.Map case class EngineMetadata(name:String, version:String, engineType:String, actions:List[EngineActionMetadata], artifactsRemotePath:String, artifactManagerType:String, s3BucketName:String, pipelineActions: List[String], onlineActionTimeout:Double, metricsTimeout:Double=10000, healthCheckTimeout:Double, reloadTimeout:Double, reloadStateTimeout: Option[Double], batchActionTimeout:Double, hdfsHost:String){ override def toString: String = name val artifactsLocalPath: String = sys.env.getOrElse("MARVIN_DATA_PATH", "/tmp").mkString.concat( "/.artifacts") val pipelineTimeout: Double = (reloadTimeout + batchActionTimeout) * pipelineActions.length * 1.20 val actionsMap: Map[String, EngineActionMetadata] = { val map = Map[String, EngineActionMetadata]() if (actions != null) { for (action <- actions) { map += ((action.name) -> action) } } map } } sealed abstract class ActionTypes(val name:String) { override def toString: String = name } case object BatchType extends ActionTypes(name="batch") case object OnlineType extends ActionTypes(name="online") case class EngineActionMetadata(name:String, actionType:String, port:Int, host:String, artifactsToPersist:List[String], artifactsToLoad:List[String]){ override def toString: String = name }
Example 16
Source File: CEPEngine.scala From piglet with Apache License 2.0 | 5 votes |
package dbis.piglet.cep.engines import dbis.piglet.backends.{SchemaClass => Event} import scala.reflect.ClassTag import scala.collection.mutable.ListBuffer import dbis.piglet.cep.nfa.NFAStructure import dbis.piglet.cep.nfa.NFAController import dbis.piglet.cep.nfa.NormalState import scala.collection.mutable.Map import dbis.piglet.cep.ops.MatchCollector abstract class CEPEngine[T <: Event: ClassTag](nfaController: NFAController[T], collector: MatchCollector[T]) extends Serializable { val structureID = { var sid: Long = 0; () => { sid += 1; sid } } var runningStructursPool: Map[Long, NFAStructure[T]] = Map() var wantToDeletedStructurs: ListBuffer[Long] = new ListBuffer() def createNewStructue(event: T): Unit = { val start = nfaController.getStartState start.edges.foreach { e => if (e.evaluate(event)) { val newStr = new NFAStructure[T](nfaController) newStr.addEvent(event, e) runningStructursPool += (structureID() -> newStr) } } } def runGCStructures(): Unit = { if(runningStructursPool.size > 0) { runningStructursPool --= wantToDeletedStructurs //wantToDeletedStructurs.foreach { x => runningStructursPool -= x } wantToDeletedStructurs.clear() } } def checkPredicate(event: T, currenStr: NFAStructure[T]): Int = { var result: Int = -1 if (currenStr.getCurrentState.isInstanceOf[NormalState[T]]) { val currentState = currenStr.getCurrentState.asInstanceOf[NormalState[T]] currentState.edges.zipWithIndex.foreach { case (e, i) => if (e.evaluate(event, currenStr)) { result = i } } } result } def runEngine(event: T): Unit //def printNumMatches(): Unit }
Example 17
Source File: SpatialFilter.scala From piglet with Apache License 2.0 | 5 votes |
package dbis.piglet.op import scala.collection.mutable.Map import dbis.piglet.expr.Predicate import dbis.piglet.expr.Ref import dbis.piglet.expr.Expr import dbis.piglet.expr.SpatialFilterPredicate import dbis.piglet.op.IndexMethod.IndexMethod import scala.collection.mutable override def lineageString: String = { s"""SPATIALFILTER%$pred%$idx""" + super.lineageString } override def resolveReferences(mapping: mutable.Map[String, Ref]): Unit = pred.resolveReferences(mapping) override def checkSchemaConformance: Boolean = { schema match { case Some(s) => // if we know the schema we check all named fields pred.traverseAnd(s, Expr.checkExpressionConformance) case None => // if we don't have a schema all expressions should contain only positional fields pred.traverseAnd(null, Expr.containsNoNamedFields) } } override def toString = s"""SPATIALFILTER | out = $outPipeName | in = $inPipeName | schema = $schema | expr = $pred | idx = $idx""".stripMargin }
Example 18
Source File: DoubleMill.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ // object DoubleMill { val conv: String=>Try[Double] = DoubleMill.valueOf _ val lookup: String=>Option[Double] = DoubleMill.constants.get _ implicit val store = Map[String,Double]() implicit val parser = new ExpressionParser[Double](conv,lookup) def apply(): Mill[Double] = new MillNumeric(Stack[Double]()) { def apply(s: String): Try[Double] = DoubleMill.valueOf(s) } def valueOf(s: String): Try[Double] = Try(s.toDouble) val constants = Map("e"->math.E, "pi"->math.Pi) }
Example 19
Source File: SpireMill.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ import spire.math._ import spire.implicits._ object SpireMill { val conv: String=>Try[Real] = SpireMill.valueOf _ val lookup: String=>Option[Real] = SpireMill.constants.get _ implicit val store = Map[String,Real]() implicit val parser = new ExpressionParser[Real](conv,lookup) def apply(): Mill[Real] = new MillSpire(Stack[Real]()) { def apply(s: String): Try[Real] = SpireMill.valueOf(s) } def valueOf(s: String): Try[Real] = Try(Real(s)) val constants = Map("e"->Real.e, "pi"->Real.pi) }
Example 20
Source File: MillNumeric.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ abstract class MillNumeric[A : Numeric](stack: Stack[A])(implicit store: Map[String,A]) extends Mill[A](stack)(store) { self => def operate(s: String): Unit = s match { case "+" => operate("plus") case "plus" => dyadic(implicitly[Numeric[A]].plus) case "-" => operate("chs"); operate("plus") case "chs" => monoadic(implicitly[Numeric[A]].negate) case "*" => operate("times") case "times" => dyadic(implicitly[Numeric[A]].times) case "div" => operate("/") case "/" => operate("inv"); operate("times") case "inv" => val i = implicitly[Numeric[A]]; if (i.isInstanceOf[Fractional[A]]) monoadic2(i.asInstanceOf[Fractional[A]].div _)(i.one) case "swap" => has(2); val (top,next) = (pop,pop); push(top); push(next) case "del" => has(1); pop case "clr" => stack.clear case x => throw new IllegalArgumentException(s"operator $x is not supported") } }
Example 21
Source File: RationalMill.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ import edu.neu.coe.scala.numerics.Rational object RationalMill { val conv: String=>Try[Rational] = RationalMill.valueOf _ val lookup: String=>Option[Rational] = RationalMill.constants.get _ implicit val store = Map[String,Rational]() implicit val parser = new ExpressionParser[Rational](conv,lookup) def apply(): Mill[Rational] = new MillNumeric(Stack[Rational]()) { def apply(s: String): Try[Rational] = RationalMill.valueOf(s) } def valueOf(s: String): Try[Rational] = Try(Rational(s)) val constants = Map("e"->Rational(BigDecimal(math.E)), "pi"->Rational(BigDecimal(math.Pi))) }
Example 22
Source File: Mill.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ abstract class Mill[A](stack: Stack[A])(implicit store: Map[String,A]) extends Function1[Valuable[A],Try[A]] { self => var debugMill = false; def value = if (stack.size>0) Success(stack.top) else Failure(new IllegalArgumentException("stack is empty")) def toSeq = stack.toSeq def show = println(stack) def push(x: A) = { if (debugMill) println(s"push $x");stack.push(x)} def pop = {val x = stack.pop; if (debugMill) println(s"popped $x"); x} def setDebug(b: Boolean) { debugMill = b } def has(n: Int) = assert(stack.size>=n,s"operation requires $n element(s) on stack") def apply(v: Valuable[A]) = v match { case n @ Number(x) => n.apply match {case Success(x) => push(x); case Failure(e) => throw e}; value case k @ Constant(x) => k.apply match {case Success(x) => push(x); case Failure(e) => throw e}; value case Operator(s) => operate(s); value case MemInst(s,n) => memInst(s,n); value } def dyadic(f: (A,A)=>A) = { has(2); push(f(pop,pop)) } def monoadic(f: (A)=>A) = { has(1); push(f(pop)) } def monoadic2(f: (A,A)=>A)(a: A) = { has(1); push(f(a,pop)) } def operate(s: String): Unit def memInst(s: String, k: String) = s.toLowerCase match { case "sto" => value match {case Success(x) => store.put(k,x); case Failure(e) => throw e} case "rcl" => store.get(k) match {case Some(x) => push(x); case None => throw new IllegalArgumentException(s"no value at memory location $k")} } def parse(s: String)(implicit parser: ExpressionParser[A]): Try[A] = parser.parseAll(parser.expr,s) match { case parser.Success(ws,_) => try { (for (w <- ws) yield apply(w)).reverse.head } catch { case t: Throwable => Failure(t) } case parser.Failure(e,_) => Failure(new IllegalArgumentException(s"parseResult error: $e")) case r @ _ => Failure(new IllegalArgumentException(s"logic error: parseResult is $r")) } }
Example 23
Source File: MillSpire.scala From Scalaprof with GNU General Public License v2.0 | 5 votes |
package models import scala.collection.mutable.{Stack,Map} import scala.util._ import spire.math._ import spire.implicits._ abstract class MillSpire[A : Numeric](stack: Stack[A])(implicit store: Map[String,A]) extends Mill[A](stack)(store) { self => def operate(s: String): Unit = s match { case "+" => operate("plus") case "plus" => dyadic(implicitly[Numeric[A]].plus _) case "-" => operate("chs"); operate("plus") case "chs" => monoadic(implicitly[Numeric[A]].negate) case "*" => operate("times") case "times" => dyadic(implicitly[Numeric[A]].times) case "div" => operate("/") case "/" => operate("inv"); operate("times") case "inv" => val i = implicitly[Numeric[A]]; if (i.isInstanceOf[Fractional[A]]) monoadic2(i.asInstanceOf[Fractional[A]].div _)(i.one) case "swap" => has(2); val (top,next) = (pop,pop); push(top); push(next) case "del" => has(1); pop case "clr" => stack.clear case x => throw new IllegalArgumentException(s"operator $x is not supported") } }
Example 24
Source File: plot.scala From Scientific-Computing-with-Scala with MIT License | 5 votes |
import scala.collection.mutable.{MutableList, Map} import scala.math._ import org.jfree.chart._ import org.jfree.data.xy._ import org.jfree.data.statistics._ import java.io.{FileReader, BufferedReader} import java.awt.GridLayout import javax.swing.JFrame import javax.swing.JPanel object ScatterPlotMatrix { def readCSVFile(filename: String): Map[String, MutableList[String]] = { val file = new FileReader(filename) val reader = new BufferedReader(file) val csvdata: Map[String, MutableList[String]] = Map() try { val alldata = new MutableList[Array[String]] var line:String = null while ({line = reader.readLine(); line} != null) { if (line.length != 0) { val delimiter: String = "," var splitline: Array[String] = line.split(delimiter).map(_.trim) alldata += splitline } } val labels = MutableList("sepal length", "sepal width", "petal length", "petal width", "class") val labelled = labels.zipWithIndex.map { case (label, index) => label -> alldata.map(x => x(index)) } for (pair <- labelled) { csvdata += pair } } finally { reader.close() } csvdata } def main(args: Array[String]) { val data = readCSVFile("iris.csv") val frame = new JFrame("Scatter Plot Matrix") frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE) frame.setLayout(new GridLayout(4, 4)) val attributes = List("sepal length", "sepal width", "petal length", "petal width") val classes = List("Iris-setosa", "Iris-versicolor", "Iris-virginica") for ((a1, i) <- attributes.zipWithIndex) { for ((a2, j) <- attributes.zipWithIndex) { if (a1 == a2) { val dataset = new HistogramDataset(); dataset.setType(HistogramType.RELATIVE_FREQUENCY); val xs = (for (x <- data(a1)) yield { x.toDouble }).toArray dataset.addSeries(a1, xs, 11); val chart = ChartFactory.createHistogram(null, a1, "frequency", dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, false, false, false) frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200, false, true, true, true, true, true)) } else { val dataset = new DefaultXYDataset for (cls <- classes) { val xs = (for ((x, index) <- data(a1).zipWithIndex if data("class")(index) == cls) yield { x.toDouble }).toArray val ys = (for ((y, index) <- data(a2).zipWithIndex if data("class")(index) == cls) yield { y.toDouble }).toArray dataset.addSeries(cls, Array(xs, ys)) } val chart = ChartFactory.createScatterPlot(null, a1, a2, dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, false, false, false) frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200, false, true, true, true, true, true)) } } } frame.pack() frame.setVisible(true) } }
Example 25
Source File: Main.scala From ros_hadoop with Apache License 2.0 | 5 votes |
package de.valtech.foss import scala.io.Source import scala.collection.mutable.Map import scala.collection.mutable.ListBuffer import scala.collection.JavaConverters._ import Console.{GREEN, RED, RESET} import scala.language.reflectiveCalls import java.io.File import java.io.FileInputStream import java.io.FileOutputStream import java.nio.channels.FileChannel.MapMode._ import java.nio.ByteOrder._ import java.nio.ByteBuffer import de.valtech.foss.proto.RosbagIdxOuterClass.RosbagIdx object Main extends App { def help() = { Console.err.printf(s""" ${RESET}${GREEN}Usage: --file <ros.bag> file to process --version print Rosbag version and exit --offset <offset> --number <records> Seek at offset < 1073741824 and read the specified number of records ${RESET}By default will just create the protobuf idx file needed for configuration.\n\n""") sys.exit(0) } val pargs = Map[String,AnyRef]() def process_cli(args: List[String]) :Boolean = args match { case Nil => true // parse success case "-v" :: rest => pargs += ("version" -> Some(true)); process_cli(rest) case "--version" :: rest => pargs += ("version" -> Some(true)); process_cli(rest) case "-f" :: x :: rest => pargs += ("file" -> x); process_cli(rest) case "--file" :: x :: rest => pargs += ("file" -> x); process_cli(rest) case "-n" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest) case "--number" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest) case "-o" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest) case "--offset" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest) case "-h" :: rest => help(); false case "--help" :: rest => help(); false case _ => Console.err.printf(s"${RESET}${RED}Unknown argument " + args.head); false } process_cli(args.toList) def use[T <: { def close() }] (resource: T) (code: T ⇒ Unit) = try code(resource) finally resource.close() pargs("file") match { case f:String => process() case _ => help() } def process(): Unit = { val fin = new File(pargs("file").asInstanceOf[String]) use(new FileInputStream(fin)) { stream => { //printf("min: %s\n", Math.min(1073741824, fin.length) ) val buffer = stream.getChannel.map(READ_ONLY, 0, Math.min(1073741824, fin.length)).order(LITTLE_ENDIAN) val p:RosbagParser = new RosbagParser(buffer) val version = p.read_version() val h = p.read_record().get if(pargs contains "version") { printf("%s\n%s\n\n", version, h) return } if(pargs contains "number"){ buffer position pargs.getOrElse("offset",None).asInstanceOf[Option[Int]].getOrElse(0) for(i <- List.range(0,pargs("number").asInstanceOf[Option[Int]].getOrElse(0))) println(p.read_record) return } val idxpos = h.header.fields("index_pos").asInstanceOf[Long] //printf("idxpos: %s %s\n", idxpos, Math.min(1073741824, fin.length) ) val b = stream.getChannel.map(READ_ONLY, idxpos, Math.min(1073741824, fin.length - idxpos)).order(LITTLE_ENDIAN) val pp:RosbagParser = new RosbagParser(b) val c = pp.read_connections(h.header, Nil) val chunk_idx = pp.read_chunk_infos(c) Console.err.printf(s"""${RESET}${GREEN}Found: """ + chunk_idx.size +s""" chunks\n${RESET}It should be the same number reported by rosbag tool.\nIf you encounter any issues try reindexing your file and submit an issue. ${RESET}\n""") val fout = new FileOutputStream(pargs("file").asInstanceOf[String] + ".idx.bin") val builder = RosbagIdx.newBuilder for(i <- chunk_idx) builder.addArray(i) builder.build().writeTo(fout) fout.close() //printf("[%s]\n",chunk_idx.toArray.mkString(",")) }} } }
Example 26
Source File: PythonBigDLValidator.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.python.api import java.lang.{Boolean => JBoolean} import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap} import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric import com.intel.analytics.bigdl.utils.Table import scala.collection.JavaConverters._ import scala.collection.mutable.Map import scala.language.existentials import scala.reflect.ClassTag object PythonBigDLValidator { def ofFloat(): PythonBigDLValidator[Float] = new PythonBigDLValidator[Float]() def ofDouble(): PythonBigDLValidator[Double] = new PythonBigDLValidator[Double]() } class PythonBigDLValidator[T: ClassTag](implicit ev: TensorNumeric[T]) extends PythonBigDL[T]{ def testDict(): JMap[String, String] = { return Map("jack" -> "40", "lucy" -> "50").asJava } def testDictJTensor(): JMap[String, JTensor] = { return Map("jack" -> JTensor(Array(1.0f, 2.0f, 3.0f, 4.0f), Array(4, 1), "float")).asJava } def testDictJMapJTensor(): JMap[String, JMap[String, JTensor]] = { val table = new Table() val tensor = JTensor(Array(1.0f, 2.0f, 3.0f, 4.0f), Array(4, 1), "float") val result = Map("jack" -> tensor).asJava table.insert(tensor) return Map("nested" -> result).asJava } def testActivityWithTensor(): JActivity = { val tensor = Tensor(Array(1.0f, 2.0f, 3.0f, 4.0f), Array(4, 1)) return JActivity(tensor) } def testActivityWithTableOfTensor(): JActivity = { val tensor1 = Tensor(Array(1.0f, 1.0f), Array(2)) val tensor2 = Tensor(Array(2.0f, 2.0f), Array(2)) val tensor3 = Tensor(Array(3.0f, 3.0f), Array(2)) val table = new Table() table.insert(tensor1) table.insert(tensor2) table.insert(tensor3) return JActivity(table) } def testActivityWithTableOfTable(): JActivity = { val tensor = Tensor(Array(1.0f, 2.0f, 3.0f, 4.0f), Array(4, 1)) val table = new Table() table.insert(tensor) val nestedTable = new Table() nestedTable.insert(table) nestedTable.insert(table) return JActivity(nestedTable) } }
Example 27
Source File: Metrics.scala From BigDL with Apache License 2.0 | 5 votes |
package com.intel.analytics.bigdl.optim import com.google.common.util.concurrent.AtomicDouble import org.apache.spark.{Accumulable, Accumulator, SparkContext} import scala.collection.mutable.{ArrayBuffer, Map} class Metrics extends Serializable { private val localMetricsMap: Map[String, LocalMetricsEntry] = Map() private val aggregateDistributeMetricsMap: Map[String, AggregateDistributeMetricsEntry] = Map() private val distributeMetricsMap: Map[String, DistributeMetricsEntry] = Map() def add(name: String, value: Double): this.type = { require(localMetricsMap.contains(name) || aggregateDistributeMetricsMap.contains(name) || distributeMetricsMap.contains(name)) if (localMetricsMap.contains(name)) { localMetricsMap(name).value.addAndGet(value) } if (aggregateDistributeMetricsMap.contains(name)) { aggregateDistributeMetricsMap(name).value += value } if (distributeMetricsMap.contains(name)) { distributeMetricsMap(name).value += value } this } def set(name: String, value: Double, parallel: Int = 1): this.type = { require(!aggregateDistributeMetricsMap.contains(name), "duplicated distribute metric") require(!distributeMetricsMap.contains(name), "duplicated distribute metric2") if (localMetricsMap.contains(name)) { localMetricsMap(name).value.set(value) localMetricsMap(name).parallel = parallel } else { localMetricsMap(name) = LocalMetricsEntry(new AtomicDouble(value), parallel) } this } def set(name: String, value: Double, sc: SparkContext, parallel: Int): this.type = { require(!localMetricsMap.contains(name), "duplicated local metric") if (aggregateDistributeMetricsMap.contains(name)) { aggregateDistributeMetricsMap(name).value.setValue(value) aggregateDistributeMetricsMap(name).parallel = parallel } else { aggregateDistributeMetricsMap(name) = AggregateDistributeMetricsEntry(sc.accumulator(value, name), parallel) } this } def set(name: String, value: ArrayBuffer[Double], sc: SparkContext): this.type = { require(!localMetricsMap.contains(name), "duplicated local metric") require(!aggregateDistributeMetricsMap.contains(name), "duplicated distribute metric") if (distributeMetricsMap.contains(name)) { distributeMetricsMap(name).value.setValue(value) } else { distributeMetricsMap(name) = DistributeMetricsEntry(sc.accumulableCollection(value)) } this } def get(name: String): (Double, Int) = { require(localMetricsMap.contains(name) || aggregateDistributeMetricsMap.contains(name)) if (localMetricsMap.contains(name)) { (localMetricsMap(name).value.get(), localMetricsMap(name).parallel) } else { (aggregateDistributeMetricsMap(name).value.value, aggregateDistributeMetricsMap(name).parallel) } } def get(name: String, number: Int): Array[Double] = { require(distributeMetricsMap.contains(name)) distributeMetricsMap(name).value.value.toArray.dropRight(number) } def summary(unit: String = "s", scale: Double = 1e9): String = { "========== Metrics Summary ==========\n" + localMetricsMap.map( entry => s"${entry._1} : ${entry._2.value.get() / entry._2.parallel / scale} $unit\n") .mkString("") + aggregateDistributeMetricsMap.map( entry => s"${entry._1} : ${entry._2.value.value / entry._2.parallel / scale} $unit\n") .mkString("") + distributeMetricsMap.map { entry => s"${entry._1} : ${entry._2.value.value.map(_ / scale).mkString(" ")} \n" }.mkString("") + "=====================================" } } private case class LocalMetricsEntry(value: AtomicDouble, var parallel: Int) private case class AggregateDistributeMetricsEntry(value: Accumulator[Double], var parallel: Int) private case class DistributeMetricsEntry(value: Accumulable[ArrayBuffer[Double], Double])
Example 28
Source File: Series.scala From salt-core with Apache License 2.0 | 5 votes |
package software.uncharted.salt.core.generation import software.uncharted.salt.core.analytic.Aggregator import software.uncharted.salt.core.projection.Projection import software.uncharted.salt.core.spreading.SpreadingFunction import software.uncharted.salt.core.generation.request.TileRequest import software.uncharted.salt.core.generation.output.{SeriesData, Tile} import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import scala.reflect.ClassTag import scala.collection.mutable.Map class Series[ RT, DC, TC, BC, @specialized(Int, Long, Double) T, @specialized(Int, Long, Double) U, @specialized(Int, Long, Double) V, W, X ]( val maxBin: BC, val cExtractor: (RT) => Option[DC], val projection: Projection[DC,TC,BC], val vExtractor: (RT) => Option[T], val binAggregator: Aggregator[T, U, V], val tileAggregator: Option[Aggregator[V, W, X]] = None, val spreadingFunction: Option[SpreadingFunction[TC, BC, T]] = None) extends Serializable { private[salt] val id: String = java.util.UUID.randomUUID.toString def apply(tile: Tile[TC]): Option[SeriesData[TC,BC,V,X]] = { tile.seriesData.get(id).asInstanceOf[Option[SeriesData[TC, BC, V, X]]] } }
Example 29
Source File: JsonMerkleRootsSerializer.scala From Sidechains-SDK with MIT License | 5 votes |
package com.horizen.serialization import com.fasterxml.jackson.core.JsonGenerator import com.fasterxml.jackson.databind import com.fasterxml.jackson.databind.SerializerProvider import com.horizen.utils.{ByteArrayWrapper, BytesUtils} import scala.collection.mutable.Map import scala.collection.mutable.Iterable class JsonMerkleRootsSerializer extends databind.JsonSerializer[Option[Map[ByteArrayWrapper, Array[Byte]]]] { override def serialize(t: Option[Map[ByteArrayWrapper, Array[Byte]]], jsonGenerator: JsonGenerator, serializerProvider: SerializerProvider): Unit = { if(t.isDefined){ var listOfPair : Iterable[Pair] = t.get.map(k => Pair(BytesUtils.toHexString(k._1.data), BytesUtils.toHexString(k._2))) jsonGenerator.writeObject(listOfPair) }else{ jsonGenerator.writeStartArray() jsonGenerator.writeEndArray() } } } private case class Pair(key : String, value : String)
Example 30
Source File: TestBroadcastVariables.scala From spark-dev with GNU General Public License v3.0 | 5 votes |
package examples import org.apache.spark.{ SparkContext, SparkConf } import org.apache.spark.rdd.RDD import org.apache.spark.broadcast.Broadcast import scala.io.Source import scala.util.{ Try, Success, Failure } import scala.collection.mutable.Map def loadCSVFile(filename: String): Option[Map[String, String]] = { val countries = Map[String, String]() Try { val bufferedSource = Source.fromFile(filename) for (line <- bufferedSource.getLines) { val Array(country, capital) = line.split(",").map(_.trim) countries += country -> capital } bufferedSource.close() return Some(countries) }.toOption } }
Example 31
Source File: LeanMessagingProvider.scala From openwhisk with Apache License 2.0 | 5 votes |
package org.apache.openwhisk.connector.lean import java.util.concurrent.BlockingQueue import java.util.concurrent.LinkedBlockingQueue import scala.collection.mutable.Map import scala.collection.concurrent.TrieMap import scala.concurrent.duration.FiniteDuration import scala.util.Success import scala.util.Try import akka.actor.ActorSystem import org.apache.openwhisk.common.Logging import org.apache.openwhisk.core.WhiskConfig import org.apache.openwhisk.core.connector.MessageConsumer import org.apache.openwhisk.core.connector.MessageProducer import org.apache.openwhisk.core.connector.MessagingProvider import org.apache.openwhisk.core.entity.ByteSize val queues: Map[String, BlockingQueue[Array[Byte]]] = new TrieMap[String, BlockingQueue[Array[Byte]]] def getConsumer(config: WhiskConfig, groupId: String, topic: String, maxPeek: Int, maxPollInterval: FiniteDuration)( implicit logging: Logging, actorSystem: ActorSystem): MessageConsumer = { val queue = queues.getOrElseUpdate(topic, new LinkedBlockingQueue[Array[Byte]]()) new LeanConsumer(queue, maxPeek) } def getProducer(config: WhiskConfig, maxRequestSize: Option[ByteSize] = None)( implicit logging: Logging, actorSystem: ActorSystem): MessageProducer = new LeanProducer(queues) def ensureTopic(config: WhiskConfig, topic: String, topicConfigKey: String, maxMessageBytes: Option[ByteSize] = None)( implicit logging: Logging): Try[Unit] = { if (queues.contains(topic)) { Success(logging.info(this, s"topic $topic already existed")) } else { queues.put(topic, new LinkedBlockingQueue[Array[Byte]]()) Success(logging.info(this, s"topic $topic created")) } } }
Example 32
Source File: ConcurrentHashMap.scala From zio with Apache License 2.0 | 5 votes |
package zio.test import scala.collection.mutable.Map private[test] final case class ConcurrentHashMap[K, V] private (private val map: Map[K, V]) { def foldLeft[B](z: B)(f: (B, (K, V)) => B): B = map.foldLeft(z)(f) def getOrElseUpdate(key: K, op: => V): V = map.getOrElseUpdate(key, op) } private[test] object ConcurrentHashMap { def empty[K, V]: ConcurrentHashMap[K, V] = new ConcurrentHashMap[K, V](Map.empty[K, V]) }
Example 33
Source File: KustoAzureFsSetupCache.scala From azure-kusto-spark with Apache License 2.0 | 5 votes |
package com.microsoft.kusto.spark.utils import org.joda.time.{DateTime, DateTimeZone, Period} import scala.collection.mutable.Map private[kusto] object KustoAzureFsSetupCache { private var storageAccountKeyMap: Map[String, String] = Map.empty[String,String] private var storageSasMap: Map[String, String] = Map.empty[String,String] private var nativeAzureFsSet = false private var lastRefresh: DateTime = new DateTime(DateTimeZone.UTC) // Return 'true' iff the entry exists in the cache. If it doesn't, or differs - update the cache // now is typically 'new DateTime(DateTimeZone.UTC)' def updateAndGetPrevStorageAccountAccess(account: String, secret: String, now: DateTime): Boolean = { var secretCached = storageAccountKeyMap.getOrElse(account, "") if (!secretCached.isEmpty && (secretCached != secret)) { // Entry exists but with a different secret - remove it and update storageAccountKeyMap.remove(account) secretCached = "" } if (secretCached.isEmpty || checkIfRefreshNeeded(now)) { storageAccountKeyMap.put(account, secret) lastRefresh = now false } else true } def updateAndGetPrevSas(container: String, account: String, secret: String, now: DateTime): Boolean = { val key = container + "." + account var secretCached = storageSasMap.getOrElse(key, "") if (!secretCached.isEmpty && (secretCached != secret)) { // Entry exists but with a different secret - remove it and update storageSasMap.remove(key) secretCached = "" } if (secretCached.isEmpty || checkIfRefreshNeeded(now)) { storageSasMap.put(key, secret) lastRefresh = now false } else true } def updateAndGetPrevNativeAzureFs(now: DateTime): Boolean = { if (nativeAzureFsSet || checkIfRefreshNeeded(now)) true else { nativeAzureFsSet = true false } } private[kusto] def checkIfRefreshNeeded(utcNow: DateTime) = { new Period(utcNow, lastRefresh).getMinutes > KustoConstants.sparkSettingsRefreshMinutes } }
Example 34
Source File: CSVReader.scala From Scientific-Computing-with-Scala with MIT License | 5 votes |
import scala.collection.mutable.{MutableList, Map} import java.io.{FileReader, BufferedReader} object CSVReader { def main(args: Array[String]) { val file = new FileReader("iris.csv") val reader = new BufferedReader(file) try { val alldata = new MutableList[Array[String]] var line:String = null while ({line = reader.readLine(); line} != null) { if (line.length != 0) { val delimiter: String = "," var splitline: Array[String] = line.split(delimiter).map(_.trim) alldata += splitline } } val labels = MutableList("sepal length", "sepal width", "petal length", "petal width", "class") val labelled = labels.zipWithIndex.map { case (label, index) => label -> alldata.map(x => x(index)) } val csvdata: Map[String, MutableList[String]] = Map() for (pair <- labelled) { csvdata += pair } } finally { reader.close() } } }
Example 35
Source File: plot.scala From Scientific-Computing-with-Scala with MIT License | 5 votes |
import org.jfree.chart._ import org.jfree.data.xy._ import scala.math._ import scala.collection.mutable.{MutableList, Map} import java.io.{FileReader, BufferedReader} object ParallelCoordinates { def readCSVFile(filename: String): Map[String, MutableList[String]] = { val file = new FileReader(filename) val reader = new BufferedReader(file) val csvdata: Map[String, MutableList[String]] = Map() try { val alldata = new MutableList[Array[String]] var line:String = null while ({line = reader.readLine(); line} != null) { if (line.length != 0) { val delimiter: String = "," var splitline: Array[String] = line.split(delimiter).map(_.trim) alldata += splitline } } val labels = MutableList("sepal length", "sepal width", "petal length", "petal width", "class") val labelled = labels.zipWithIndex.map { case (label, index) => label -> alldata.map(x => x(index)) } for (pair <- labelled) { csvdata += pair } } finally { reader.close() } csvdata } def main(args: Array[String]) { val data = readCSVFile("iris.csv") val dataset = new DefaultXYDataset for (i <- 0 until data("sepal length").size) { val x = Array(0.0, 1.0, 2.0, 3.0) val y1 = data("sepal length")(i).toDouble val y2 = data("sepal width")(i).toDouble val y3 = data("petal length")(i).toDouble val y4 = data("petal width")(i).toDouble val y = Array(y1, y2, y3, y4) val cls = data("class")(i) dataset.addSeries(cls + i, Array(x, y)) } val frame = new ChartFrame("Parallel Coordinates", ChartFactory.createXYLineChart("Parallel Coordinates", "x", "y", dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, false, false, false)) frame.pack() frame.setVisible(true) } }
Example 36
Source File: ListViewExample.scala From scalajs-react-native with Apache License 2.0 | 5 votes |
package chandu0101.scalajs.rn.examples.uiexplorer.components import chandu0101.scalajs.rn._ import chandu0101.scalajs.rn.components._ import chandu0101.scalajs.rn.examples.uiexplorer.{UIExplorerPage, UIExample} import japgolly.scalajs.react.BackendScope import chandu0101.scalajs.rn.styles.NativeStyleSheet import scala.collection.mutable.Map import scala.scalajs.js object ListViewExample extends UIExample { val THUMB_URLS = js.Array("https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-ash3/t39.1997/p128x128/851549_767334479959628_274486868_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851561_767334496626293_1958532586_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-ash3/t39.1997/p128x128/851579_767334503292959_179092627_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851589_767334513292958_1747022277_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851563_767334559959620_1193692107_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851593_767334566626286_1953955109_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851591_767334523292957_797560749_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851567_767334529959623_843148472_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851548_767334489959627_794462220_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851575_767334539959622_441598241_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-ash3/t39.1997/p128x128/851573_767334549959621_534583464_n.png", "https://fbcdn-dragon-a.akamaihd.net/hphotos-ak-prn1/t39.1997/p128x128/851583_767334573292952_1519550680_n.png") val LOREM_IPSUM = "Lorem ipsum dolor sit amet, ius ad pertinax oportere accommodare, an vix civibus corrumpit referrentur. Te nam case ludus inciderint, te mea facilisi adipiscing. Sea id integre luptatum. In tota sale consequuntur nec. Erat ocurreret mei ei. Eu paulo sapientem vulputate est, vel an accusam intellegam interesset. Nam eu stet pericula reprimique, ea vim illud modus, putant invidunt reprehendunt ne qui."; case class State(datasource : ListViewDataSource[String] = createListViewDataSource[String,js.Object](rowHasChanged = (r1,r2) => r1 != r2)) class Backend(t: BackendScope[_, State]) { val pressedData = scala.collection.mutable.Map[String,Boolean]().withDefaultValue(false) def genRows(pressedData : Map[String,Boolean]) = { val dataBlob = js.Array[String]() (1 to 100).toList.zipWithIndex.foreach { case (i,index) => { val pressedText = if(pressedData.getOrElse(index.toString,false)) "pressed" else "" dataBlob += s"Row $i $pressedText" } } dataBlob } def pressRow(rowID : String) = { pressedData.updated(rowID,pressedData(rowID)) t.modState(s => s.copy(s.datasource.cloneWithRows(genRows(pressedData)))) } def hashCode2(str : String) = { var hash = 15 str.reverse.foreach( c => { hash = ((hash << 5) - hash) + c.toInt }) hash } def renderRow(rowData : String, sectionID : String,rowID : String) = { val rowHash = Math.abs(hashCode2(rowData)) val imageSource = ImageSource(uri = THUMB_URLS(rowHash % THUMB_URLS.length)) TouchableHighlight(onPress = () => pressRow(rowID))( View()( View(style = styles.row)( Image(style = styles.thumb , source = imageSource), Text(style = styles.text)( s"$rowData - ${LOREM_IPSUM.substring(0,rowHash % 301 + 10)}" ), View(style = styles.separator)() ) ) ) } val propsDynamic = t.propsDynamic } val component = ReactNativeComponentB[Unit]("ListViewExample") .initialState(State()) .backend(new Backend(_)) .render((P,S,B) => { View()( ListView(dataSource = S.datasource,renderRow = B.renderRow _) ) }) .componentWillMount(scope => scope.modState(s => s.copy(s.datasource.cloneWithRows(scope.backend.genRows(Map()))))) .buildNative object styles extends NativeStyleSheet { val row = style( flexDirection.row, justifyContent.center, padding := 10, backgroundColor := "#F6F6F6" ) val separator = style( height := 1, backgroundColor := "#F6F6F6" ) val thumb = style(width := 64, height := 64) val text = style(flex := 1) } override def title: String = "ListView - simple" override def description: String = "Performant, scrollable list of data." }
Example 37
Source File: plot.scala From Scientific-Computing-with-Scala with MIT License | 5 votes |
import org.jfree.chart._ import org.jfree.data.xy._ import scala.math._ import scala.collection.mutable.{MutableList, Map} import java.io.{FileReader, BufferedReader} object AndrewsCurve { def readCSVFile(filename: String): Map[String, MutableList[String]] = { val file = new FileReader(filename) val reader = new BufferedReader(file) val csvdata: Map[String, MutableList[String]] = Map() try { val alldata = new MutableList[Array[String]] var line:String = null while ({line = reader.readLine(); line} != null) { if (line.length != 0) { val delimiter: String = "," var splitline: Array[String] = line.split(delimiter).map(_.trim) alldata += splitline } } val labels = MutableList("sepal length", "sepal width", "petal length", "petal width", "class") val labelled = labels.zipWithIndex.map { case (label, index) => label -> alldata.map(x => x(index)) } for (pair <- labelled) { csvdata += pair } } finally { reader.close() } csvdata } def andrewsCurve(row: Array[Double]) = (t: Double) => { var result: Double = 0.0 for ((attr, i) <- row.zipWithIndex) { if (i == 0) { result = result + row(i) / sqrt(2.0) } else if (i % 2 != 0) { result = result + row(i) * sin(((i + 1) / 2) * t) } else { result = result + row(i) * cos(((i + 1) / 2) * t) } } result } def main(args: Array[String]) { val data = readCSVFile("iris.csv") val x: Array[Double] = Array.tabulate(100) { (i: Int) => -Pi + 2.0 * Pi * (i / 100.0) } val dataset = new DefaultXYDataset for (i <- 0 until data("sepal length").size) { val x1 = data("sepal length")(i).toDouble val x2 = data("sepal width")(i).toDouble val x3 = data("petal length")(i).toDouble val x4 = data("petal width")(i).toDouble val cls = data("class")(i) val curve = x.map(andrewsCurve(Array(x1, x2, x3, x4))) dataset.addSeries(cls + i, Array(x, curve)) } val frame = new ChartFrame("Andrews Curve", ChartFactory.createXYLineChart("Andrews Curve", "x", "y", dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, false, false, false)) frame.pack() frame.setVisible(true) } }
Example 38
Source File: TaskResult.scala From SparkCore with Apache License 2.0 | 5 votes |
package org.apache.spark.scheduler import java.io._ import java.nio.ByteBuffer import scala.collection.mutable.Map import org.apache.spark.SparkEnv import org.apache.spark.executor.TaskMetrics import org.apache.spark.storage.BlockId import org.apache.spark.util.Utils // Task result. Also contains updates to accumulator variables. private[spark] sealed trait TaskResult[T] private[spark] class DirectTaskResult[T](var valueBytes: ByteBuffer, var accumUpdates: Map[Long, Any], var metrics: TaskMetrics) extends TaskResult[T] with Externalizable { def this() = this(null.asInstanceOf[ByteBuffer], null, null) override def writeExternal(out: ObjectOutput): Unit = Utils.tryOrIOException { out.writeInt(valueBytes.remaining); Utils.writeByteBuffer(valueBytes, out) out.writeInt(accumUpdates.size) for ((key, value) <- accumUpdates) { out.writeLong(key) out.writeObject(value) } out.writeObject(metrics) } override def readExternal(in: ObjectInput): Unit = Utils.tryOrIOException { val blen = in.readInt() val byteVal = new Array[Byte](blen) in.readFully(byteVal) valueBytes = ByteBuffer.wrap(byteVal) val numUpdates = in.readInt if (numUpdates == 0) { accumUpdates = null } else { accumUpdates = Map() for (i <- 0 until numUpdates) { accumUpdates(in.readLong()) = in.readObject() } } metrics = in.readObject().asInstanceOf[TaskMetrics] } def value(): T = { val resultSer = SparkEnv.get.serializer.newInstance() resultSer.deserialize(valueBytes) } }
Example 39
Source File: Student.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.behavioral.mediator import scala.collection.mutable.Map import scala.collection.mutable.Set trait Notifiable { def notify(message: String) } case class Student(name: String, age: Int) extends Notifiable { override def notify(message: String): Unit = { System.out.println(s"Student $name was notified with message: '$message'.") } } case class Group(name: String) trait Mediator { def addStudentToGroup(student: Student, group: Group) def isStudentInGroup(student: Student, group: Group): Boolean def removeStudentFromGroup(student: Student, group: Group) def getStudentsInGroup(group: Group): List[Student] def getGroupsForStudent(student: Student): List[Group] def notifyStudentsInGroup(group: Group, message: String) } class School extends Mediator { val studentsToGroups: Map[Student, Set[Group]] = Map() val groupsToStudents: Map[Group, Set[Student]] = Map() override def addStudentToGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElseUpdate(student, Set()) += group groupsToStudents.getOrElseUpdate(group, Set()) += student } override def isStudentInGroup(student: Student, group: Group): Boolean = groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group) override def getStudentsInGroup(group: Group): List[Student] = groupsToStudents.getOrElse(group, Set()).toList override def getGroupsForStudent(student: Student): List[Group] = studentsToGroups.getOrElse(student, Set()).toList override def notifyStudentsInGroup(group: Group, message: String): Unit = { groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message)) } override def removeStudentFromGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElse(student, Set()) -= group groupsToStudents.getOrElse(group, Set()) -= student } } object SchoolExample { def main(args: Array[String]): Unit = { val school = new School // create students val student1 = Student("Ivan", 26) val student2 = Student("Maria", 26) val student3 = Student("John", 25) // create groups val group1 = Group("Scala design patterns") val group2 = Group("Databases") val group3 = Group("Cloud computing") school.addStudentToGroup(student1, group1) school.addStudentToGroup(student1, group2) school.addStudentToGroup(student1, group3) school.addStudentToGroup(student2, group1) school.addStudentToGroup(student2, group3) school.addStudentToGroup(student3, group1) school.addStudentToGroup(student3, group2) // notify school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!") // see groups System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // remove from group school.removeStudentFromGroup(student3, group2) System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // see students in group System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}") } }
Example 40
Source File: Student.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.behavioral.mediator import scala.collection.mutable.Map import scala.collection.mutable.Set trait Notifiable { def notify(message: String) } case class Student(name: String, age: Int) extends Notifiable { override def notify(message: String): Unit = { System.out.println(s"Student $name was notified with message: '$message'.") } } case class Group(name: String) trait Mediator { def addStudentToGroup(student: Student, group: Group) def isStudentInGroup(student: Student, group: Group): Boolean def removeStudentFromGroup(student: Student, group: Group) def getStudentsInGroup(group: Group): List[Student] def getGroupsForStudent(student: Student): List[Group] def notifyStudentsInGroup(group: Group, message: String) } class School extends Mediator { val studentsToGroups: Map[Student, Set[Group]] = Map() val groupsToStudents: Map[Group, Set[Student]] = Map() override def addStudentToGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElseUpdate(student, Set()) += group groupsToStudents.getOrElseUpdate(group, Set()) += student } override def isStudentInGroup(student: Student, group: Group): Boolean = groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group) override def getStudentsInGroup(group: Group): List[Student] = groupsToStudents.getOrElse(group, Set()).toList override def getGroupsForStudent(student: Student): List[Group] = studentsToGroups.getOrElse(student, Set()).toList override def notifyStudentsInGroup(group: Group, message: String): Unit = { groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message)) } override def removeStudentFromGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElse(student, Set()) -= group groupsToStudents.getOrElse(group, Set()) -= student } } object SchoolExample { def main(args: Array[String]): Unit = { val school = new School // create students val student1 = Student("Ivan", 26) val student2 = Student("Maria", 26) val student3 = Student("John", 25) // create groups val group1 = Group("Scala design patterns") val group2 = Group("Databases") val group3 = Group("Cloud computing") school.addStudentToGroup(student1, group1) school.addStudentToGroup(student1, group2) school.addStudentToGroup(student1, group3) school.addStudentToGroup(student2, group1) school.addStudentToGroup(student2, group3) school.addStudentToGroup(student3, group1) school.addStudentToGroup(student3, group2) // notify school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!") // see groups System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // remove from group school.removeStudentFromGroup(student3, group2) System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // see students in group System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}") } }
Example 41
Source File: Student.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.behavioral.mediator import scala.collection.mutable.Map import scala.collection.mutable.Set trait Notifiable { def notify(message: String) } case class Student(name: String, age: Int) extends Notifiable { override def notify(message: String): Unit = { System.out.println(s"Student $name was notified with message: '$message'.") } } case class Group(name: String) trait Mediator { def addStudentToGroup(student: Student, group: Group) def isStudentInGroup(student: Student, group: Group): Boolean def removeStudentFromGroup(student: Student, group: Group) def getStudentsInGroup(group: Group): List[Student] def getGroupsForStudent(student: Student): List[Group] def notifyStudentsInGroup(group: Group, message: String) } class School extends Mediator { val studentsToGroups: Map[Student, Set[Group]] = Map() val groupsToStudents: Map[Group, Set[Student]] = Map() override def addStudentToGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElseUpdate(student, Set()) += group groupsToStudents.getOrElseUpdate(group, Set()) += student } override def isStudentInGroup(student: Student, group: Group): Boolean = groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group) override def getStudentsInGroup(group: Group): List[Student] = groupsToStudents.getOrElse(group, Set()).toList override def getGroupsForStudent(student: Student): List[Group] = studentsToGroups.getOrElse(student, Set()).toList override def notifyStudentsInGroup(group: Group, message: String): Unit = { groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message)) } override def removeStudentFromGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElse(student, Set()) -= group groupsToStudents.getOrElse(group, Set()) -= student } } object SchoolExample { def main(args: Array[String]): Unit = { val school = new School // create students val student1 = Student("Ivan", 26) val student2 = Student("Maria", 26) val student3 = Student("John", 25) // create groups val group1 = Group("Scala design patterns") val group2 = Group("Databases") val group3 = Group("Cloud computing") school.addStudentToGroup(student1, group1) school.addStudentToGroup(student1, group2) school.addStudentToGroup(student1, group3) school.addStudentToGroup(student2, group1) school.addStudentToGroup(student2, group3) school.addStudentToGroup(student3, group1) school.addStudentToGroup(student3, group2) // notify school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!") // see groups System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // remove from group school.removeStudentFromGroup(student3, group2) System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // see students in group System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}") } }
Example 42
Source File: Student.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.behavioral.mediator import scala.collection.mutable.Map import scala.collection.mutable.Set trait Notifiable { def notify(message: String) } case class Student(name: String, age: Int) extends Notifiable { override def notify(message: String): Unit = { System.out.println(s"Student $name was notified with message: '$message'.") } } case class Group(name: String) trait Mediator { def addStudentToGroup(student: Student, group: Group) def isStudentInGroup(student: Student, group: Group): Boolean def removeStudentFromGroup(student: Student, group: Group) def getStudentsInGroup(group: Group): List[Student] def getGroupsForStudent(student: Student): List[Group] def notifyStudentsInGroup(group: Group, message: String) } class School extends Mediator { val studentsToGroups: Map[Student, Set[Group]] = Map() val groupsToStudents: Map[Group, Set[Student]] = Map() override def addStudentToGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElseUpdate(student, Set()) += group groupsToStudents.getOrElseUpdate(group, Set()) += student } override def isStudentInGroup(student: Student, group: Group): Boolean = groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group) override def getStudentsInGroup(group: Group): List[Student] = groupsToStudents.getOrElse(group, Set()).toList override def getGroupsForStudent(student: Student): List[Group] = studentsToGroups.getOrElse(student, Set()).toList override def notifyStudentsInGroup(group: Group, message: String): Unit = { groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message)) } override def removeStudentFromGroup(student: Student, group: Group): Unit = { studentsToGroups.getOrElse(student, Set()) -= group groupsToStudents.getOrElse(group, Set()) -= student } } object SchoolExample { def main(args: Array[String]): Unit = { val school = new School // create students val student1 = Student("Ivan", 26) val student2 = Student("Maria", 26) val student3 = Student("John", 25) // create groups val group1 = Group("Scala design patterns") val group2 = Group("Databases") val group3 = Group("Cloud computing") school.addStudentToGroup(student1, group1) school.addStudentToGroup(student1, group2) school.addStudentToGroup(student1, group3) school.addStudentToGroup(student2, group1) school.addStudentToGroup(student2, group3) school.addStudentToGroup(student3, group1) school.addStudentToGroup(student3, group2) // notify school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!") // see groups System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // remove from group school.removeStudentFromGroup(student3, group2) System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}") // see students in group System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}") } }
Example 43
Source File: Circle.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.flyweight import scala.collection.mutable.{ListBuffer, Map} class Circle(color: Color) { System.out.println(s"Creating a circle with $color color.") override def toString(): String = s"Circle($color)" } object Circle { val cache = Map.empty[Color, Circle] def apply(color: Color): Circle = cache.getOrElseUpdate(color, new Circle(color)) def circlesCreated(): Int = cache.size } class Graphic { val items = ListBuffer.empty[(Int, Int, Double, Circle)] def addCircle(x: Int, y: Int, radius: Double, circle: Circle): Unit = { items += ((x, y, radius, circle)) } def draw(): Unit = { items.foreach { case (x, y, radius, circle) => System.out.println(s"Drawing a circle at ($x, $y) with radius $radius: $circle") } } } object FlyweightExample { def main(args: Array[String]): Unit = { val graphic = new Graphic graphic.addCircle(1, 1, 1.0, Circle(Green)) graphic.addCircle(1, 2, 1.0, Circle(Red)) graphic.addCircle(2, 1, 1.0, Circle(Blue)) graphic.addCircle(2, 2, 1.0, Circle(Green)) graphic.addCircle(2, 3, 1.0, Circle(Yellow)) graphic.addCircle(3, 2, 1.0, Circle(Magenta)) graphic.addCircle(3, 3, 1.0, Circle(Blue)) graphic.addCircle(4, 3, 1.0, Circle(Blue)) graphic.addCircle(3, 4, 1.0, Circle(Yellow)) graphic.addCircle(4, 4, 1.0, Circle(Red)) graphic.draw() System.out.println(s"Total number of circle objects created: ${Circle.circlesCreated()}") } }
Example 44
Source File: Circle.scala From Scala-Design-Patterns-Second-Edition with MIT License | 5 votes |
package com.ivan.nikolov.structural.flyweight import scala.collection.mutable.{ListBuffer, Map} class Circle(color: Color) { System.out.println(s"Creating a circle with $color color.") override def toString(): String = s"Circle($color)" } object Circle { val cache = Map.empty[Color, Circle] def apply(color: Color): Circle = cache.getOrElseUpdate(color, new Circle(color)) def circlesCreated(): Int = cache.size } class Graphic { val items = ListBuffer.empty[(Int, Int, Double, Circle)] def addCircle(x: Int, y: Int, radius: Double, circle: Circle): Unit = { items += ((x, y, radius, circle)) } def draw(): Unit = { items.foreach { case (x, y, radius, circle) => System.out.println(s"Drawing a circle at ($x, $y) with radius $radius: $circle") } } } object FlyweightExample { def main(args: Array[String]): Unit = { val graphic = new Graphic graphic.addCircle(1, 1, 1.0, Circle(Green)) graphic.addCircle(1, 2, 1.0, Circle(Red)) graphic.addCircle(2, 1, 1.0, Circle(Blue)) graphic.addCircle(2, 2, 1.0, Circle(Green)) graphic.addCircle(2, 3, 1.0, Circle(Yellow)) graphic.addCircle(3, 2, 1.0, Circle(Magenta)) graphic.addCircle(3, 3, 1.0, Circle(Blue)) graphic.addCircle(4, 3, 1.0, Circle(Blue)) graphic.addCircle(3, 4, 1.0, Circle(Yellow)) graphic.addCircle(4, 4, 1.0, Circle(Red)) graphic.draw() System.out.println(s"Total number of circle objects created: ${Circle.circlesCreated()}") } }
Example 45
Source File: LexicalTrie.scala From jgo with GNU General Public License v3.0 | 5 votes |
package jgo.tools.compiler package lexer import scala.collection.mutable.Map import scala.util.parsing.input.Reader object LexicalTrie { //this class is mutable, but LexicalTrie's public interface prevents modification private class TrieNode { private[LexicalTrie] val branches: Map[Char, TrieNode] = Map() private[LexicalTrie] var accepting: Option[String] = None } def apply(elems: String*) = { val ret = new LexicalTrie elems foreach { ret += _ } ret } } final class LexicalTrie { import LexicalTrie.TrieNode private val root = new TrieNode private def += (str: String): LexicalTrie = { var cur = root var i = 0 while (i < str.length) { cur = cur.branches.getOrElseUpdate(str(i), new TrieNode) i += 1 } cur.accepting = Some(str) this } def contains(str: String): Boolean = { var cur = root var i = 0 while (i < str.length) { if (cur.branches contains str(i)) { cur = cur.branches(str(i)) i += 1 } else return false } assert(if (cur.accepting.isDefined) cur.accepting.get == str else true) return cur.accepting.isDefined } def matchingPrefixOf(r: Reader[Char]): (Option[String], Reader[Char]) = prefixOfFromNode(root, r) private def prefixOfFromNode(cur: TrieNode, r: Reader[Char]): (Option[String], Reader[Char]) = if (cur.branches contains r.first) { val fromNextNode = prefixOfFromNode(cur.branches get r.first get, r.rest) if (fromNextNode._1.isDefined) //_1 refers to the first (there is no zeroth) term of the pair fromNextNode else (cur.accepting, r) } else (cur.accepting, r) }
Example 46
Source File: SparkSQLOperationManager.scala From iolap with Apache License 2.0 | 5 votes |
package org.apache.spark.sql.hive.thriftserver.server import java.util.{Map => JMap} import scala.collection.mutable.Map import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.{ExecuteStatementOperation, Operation, OperationManager} import org.apache.hive.service.cli.session.HiveSession import org.apache.spark.Logging import org.apache.spark.sql.hive.HiveContext import org.apache.spark.sql.hive.thriftserver.{SparkExecuteStatementOperation, ReflectionUtils} private[thriftserver] class SparkSQLOperationManager(hiveContext: HiveContext) extends OperationManager with Logging { val handleToOperation = ReflectionUtils .getSuperField[JMap[OperationHandle, Operation]](this, "handleToOperation") val sessionToActivePool = Map[SessionHandle, String]() override def newExecuteStatementOperation( parentSession: HiveSession, statement: String, confOverlay: JMap[String, String], async: Boolean): ExecuteStatementOperation = synchronized { val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay)( hiveContext, sessionToActivePool) handleToOperation.put(operation.getHandle, operation) operation } }