scala.collection.mutable.MutableList Scala Examples

The following examples show how to use scala.collection.mutable.MutableList. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: LIFO.scala    From spatial   with MIT License 5 votes vote down vote up
package spatial.lang

import argon._
import forge.tags._
import spatial.node._

import scala.collection.mutable.MutableList

@ref class LIFO[A:Bits] extends Top[LIFO[A]]
         with LocalMem1[A,LIFO]
         with Ref[MutableList[Any],LIFO[A]] {
  val A: Bits[A] = Bits[A]
  val evMem: LIFO[A] <:< LocalMem[A,LIFO] = implicitly[LIFO[A] <:< LocalMem[A,LIFO]]

  
  @api def peek(): A = stage(LIFOPeek(this,Set.empty))

  // --- Typeclass Methods
  @rig def __read(addr: Seq[Idx], ens: Set[Bit]): A = stage(LIFOPop(this,ens))
  @rig def __write(data: A, addr: Seq[Idx], ens: Set[Bit]): Void = stage(LIFOPush(this,data,ens))
  @rig def __reset(ens: Set[Bit]): Void = void
}
object LIFO {
  @api def apply[A:Bits](depth: I32): LIFO[A] = stage(LIFONew(depth))
} 
Example 2
Source File: CSVReader.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object CSVReader {
 def main(args: Array[String]) {
   val file = new FileReader("iris.csv")
   val reader = new BufferedReader(file)
   try {
     val alldata = new MutableList[Array[String]]
     var line:String = null
     while ({line = reader.readLine(); line} != null) {
       if (line.length != 0) {
         val delimiter: String = ","
         var splitline: Array[String] = line.split(delimiter).map(_.trim)
         alldata += splitline
       }
     }
     val labels = MutableList("sepal length", "sepal width",
       "petal length", "petal width", "class")
     val labelled = labels.zipWithIndex.map {
       case (label, index) => label -> alldata.map(x => x(index))
     }
     val csvdata: Map[String, MutableList[String]] = Map()
     for (pair <- labelled) {
       csvdata += pair                                           
     }
   }
   finally {
     reader.close()
   }
 }
} 
Example 3
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import org.jfree.chart._
import org.jfree.data.xy._
import scala.math._
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object ParallelCoordinates {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val dataset = new DefaultXYDataset
    for (i <- 0 until data("sepal length").size) {
      val x = Array(0.0, 1.0, 2.0, 3.0)
      val y1 = data("sepal length")(i).toDouble
      val y2 = data("sepal width")(i).toDouble
      val y3 = data("petal length")(i).toDouble
      val y4 = data("petal width")(i).toDouble
      val y = Array(y1, y2, y3, y4)
      val cls = data("class")(i)
      dataset.addSeries(cls + i, Array(x, y))
    }
    val frame = new ChartFrame("Parallel Coordinates",
      ChartFactory.createXYLineChart("Parallel Coordinates", "x", "y",
      dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
      false, false, false))
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 4
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import scala.collection.mutable.{MutableList, Map}
import scala.math._
import org.jfree.chart._
import org.jfree.data.xy._
import org.jfree.data.statistics._
import java.io.{FileReader, BufferedReader}
import java.awt.GridLayout
import javax.swing.JFrame
import javax.swing.JPanel

object ScatterPlotMatrix {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val frame = new JFrame("Scatter Plot Matrix")
    frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
    frame.setLayout(new GridLayout(4, 4))
    val attributes = List("sepal length", "sepal width", 
      "petal length", "petal width")
    val classes = List("Iris-setosa", "Iris-versicolor", "Iris-virginica")
    for ((a1, i) <- attributes.zipWithIndex) {
      for ((a2, j) <- attributes.zipWithIndex) {
        if (a1 == a2) {
          val dataset = new HistogramDataset();
          dataset.setType(HistogramType.RELATIVE_FREQUENCY);
          val xs = (for (x <- data(a1)) yield { x.toDouble }).toArray
          dataset.addSeries(a1, xs, 11);
          val chart = ChartFactory.createHistogram(null, a1, "frequency",
            dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
            false, false, false)
          frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200,
            false, true, true, true, true, true))
        } else {
          val dataset = new DefaultXYDataset
          for (cls <- classes) {
            val xs = (for ((x, index) <- data(a1).zipWithIndex
              if data("class")(index) == cls)
            yield { x.toDouble }).toArray
            val ys = (for ((y, index) <- data(a2).zipWithIndex
              if data("class")(index) == cls)
            yield { y.toDouble }).toArray
            dataset.addSeries(cls, Array(xs, ys))
          }
          val chart = ChartFactory.createScatterPlot(null, 
            a1, a2, dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL, 
            false, false, false)
          frame.add(new ChartPanel(chart, 200, 200, 200, 200, 200, 200,
            false, true, true, true, true, true))
        }
      }
    }
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 5
Source File: plot.scala    From Scientific-Computing-with-Scala   with MIT License 5 votes vote down vote up
import org.jfree.chart._
import org.jfree.data.xy._
import scala.math._
import scala.collection.mutable.{MutableList, Map}
import java.io.{FileReader, BufferedReader}

object AndrewsCurve {
  def readCSVFile(filename: String): Map[String, MutableList[String]] = {
    val file = new FileReader(filename)
    val reader = new BufferedReader(file)
    val csvdata: Map[String, MutableList[String]] = Map()
    try {
      val alldata = new MutableList[Array[String]]
      var line:String = null
      while ({line = reader.readLine(); line} != null) {
        if (line.length != 0) {
          val delimiter: String = ","
          var splitline: Array[String] = line.split(delimiter).map(_.trim)
          alldata += splitline
        }
      }
      val labels = MutableList("sepal length", "sepal width",
        "petal length", "petal width", "class")
      val labelled = labels.zipWithIndex.map {
        case (label, index) => label -> alldata.map(x => x(index))
      }
      for (pair <- labelled) {
        csvdata += pair
      }
    } finally {
      reader.close()
    }
    csvdata
  }

  def andrewsCurve(row: Array[Double]) = (t: Double) => {
    var result: Double = 0.0
    for ((attr, i) <- row.zipWithIndex) {
      if (i == 0) {
        result = result + row(i) / sqrt(2.0)
      } else if (i % 2 != 0) {
        result = result + row(i) * sin(((i + 1) / 2) * t)
      } else {
        result = result + row(i) * cos(((i + 1) / 2) * t)
      }
    }
    result
  }

  def main(args: Array[String]) {
    val data = readCSVFile("iris.csv")
    val x: Array[Double] = Array.tabulate(100) {
      (i: Int) => -Pi + 2.0 * Pi * (i / 100.0)
    }
    val dataset = new DefaultXYDataset
    for (i <- 0 until data("sepal length").size) {
      val x1 = data("sepal length")(i).toDouble
      val x2 = data("sepal width")(i).toDouble
      val x3 = data("petal length")(i).toDouble
      val x4 = data("petal width")(i).toDouble
      val cls = data("class")(i)
      val curve = x.map(andrewsCurve(Array(x1, x2, x3, x4)))
      dataset.addSeries(cls + i, Array(x, curve))
    }
    val frame = new ChartFrame("Andrews Curve",
      ChartFactory.createXYLineChart("Andrews Curve", "x", "y",
      dataset, org.jfree.chart.plot.PlotOrientation.VERTICAL,
      false, false, false))
    frame.pack()
    frame.setVisible(true)
  }
} 
Example 6
Source File: WrapperTrait.scala    From sparker   with GNU General Public License v3.0 5 votes vote down vote up
package SparkER.Wrappers

import SparkER.DataStructures.{KeyValue, MatchingEntities, Profile}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row

import scala.collection.mutable.MutableList


  def rowToAttributes(columnNames: Array[String], row: Row, explodeInnerFields: Boolean = false, innerSeparator: String = ","): MutableList[KeyValue] = {
    val attributes: MutableList[KeyValue] = new MutableList()
    for (i <- 0 to row.size - 1) {
      try {
        val value = row(i)
        val attributeKey = columnNames(i)

        if (value != null) {
          value match {
            case listOfAttributes: Iterable[Any] =>
              listOfAttributes map {
                attributeValue =>
                  attributes += KeyValue(attributeKey, attributeValue.toString)
              }
            case stringAttribute: String =>
              if (explodeInnerFields) {
                stringAttribute.split(innerSeparator) map {
                  attributeValue =>
                    attributes += KeyValue(attributeKey, attributeValue)
                }
              }
              else {
                attributes += KeyValue(attributeKey, stringAttribute)
              }
            case singleAttribute =>
              attributes += KeyValue(attributeKey, singleAttribute.toString)
          }
        }
      }
      catch {
        case e: Throwable => println(e)
      }
    }
    attributes
  }
} 
Example 7
Source File: WrapperTrait.scala    From sparker   with GNU General Public License v3.0 5 votes vote down vote up
package Wrappers

import DataStructures.{KeyValue, MatchingEntities, Profile}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row

import scala.collection.mutable.MutableList


  def rowToAttributes(columnNames: Array[String], row: Row, explodeInnerFields: Boolean = false, innerSeparator: String = ","): MutableList[KeyValue] = {
    val attributes: MutableList[KeyValue] = new MutableList()
    for (i <- 0 to row.size - 1) {
      try {
        val value = row(i)
        val attributeKey = columnNames(i)

        if (value != null) {
          value match {
            case listOfAttributes: Iterable[Any] =>
              listOfAttributes map {
                attributeValue =>
                  attributes += KeyValue(attributeKey, attributeValue.toString)
              }
            case stringAttribute: String =>
              if (explodeInnerFields) {
                stringAttribute.split(innerSeparator) map {
                  attributeValue =>
                    attributes += KeyValue(attributeKey, attributeValue)
                }
              }
              else {
                attributes += KeyValue(attributeKey, stringAttribute)
              }
            case singleAttribute =>
              attributes += KeyValue(attributeKey, singleAttribute.toString)
          }
        }
      }
      catch {
        case e: Throwable => println(e)
      }
    }
    attributes
  }
} 
Example 8
Source File: WrapperTrait.scala    From sparker   with GNU General Public License v3.0 5 votes vote down vote up
package Wrappers

import DataStructures.{KeyValue, MatchingEntities, Profile}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row

import scala.collection.mutable.MutableList


  def rowToAttributes(columnNames : Array[String], row : Row, explodeInnerFields:Boolean = false, innerSeparator : String = ",") : MutableList[KeyValue] = {
    val attributes: MutableList[KeyValue] = new MutableList()
    for(i <- 0 to row.size-1){
      try{
        val value = row(i)
        val attributeKey = columnNames(i)

        if(value != null){
          value match {
            case listOfAttributes : Iterable[Any] =>
              listOfAttributes map {
                attributeValue =>
                  attributes += KeyValue(attributeKey, attributeValue.toString)
              }
            case stringAttribute : String =>
              if(explodeInnerFields){
                stringAttribute.split(innerSeparator) map {
                  attributeValue =>
                    attributes += KeyValue(attributeKey, attributeValue)
                }
              }
              else {
                attributes += KeyValue(attributeKey, stringAttribute)
              }
            case singleAttribute =>
              attributes += KeyValue(attributeKey, singleAttribute.toString)
          }
        }
      }
      catch{
        case e : Throwable => println(e)
      }
    }
    attributes
  }
} 
Example 9
Source File: TensorFlowWeightCollection.scala    From SparkNet   with MIT License 5 votes vote down vote up
package libs

import scala.collection.mutable.Map
import scala.collection.mutable.MutableList

object TensorFlowWeightCollection {
	def scalarDivide(weights: Map[String, NDArray], v: Float) = {
		for (name <- weights.keys) {
      weights(name).scalarDivide(v)
    }
	}

  def add(wc1: Map[String, NDArray], wc2: Map[String, NDArray]): Map[String, NDArray] = {
    assert(wc1.keys == wc2.keys)
    // add the WeightCollection objects together
    var newWeights = Map[String, NDArray]()
    for (name <- wc1.keys) {
      newWeights += (name -> NDArray.plus(wc1(name), wc2(name)))
    }
    newWeights
  }

  def checkEqual(wc1: Map[String, NDArray], wc2: Map[String, NDArray], tol: Float): Boolean = {
    if (wc1.keys != wc2.keys) {
      return false
    }
    for (name <- wc1.keys) {
      if (!NDArray.checkEqual(wc1(name), wc2(name), tol)) {
        return false
      }
    }
    return true
  }
} 
Example 10
Source File: CaffeWeightCollection.scala    From SparkNet   with MIT License 5 votes vote down vote up
package libs

import scala.collection.mutable.Map
import scala.collection.mutable.MutableList

object CaffeWeightCollection {
  def scalarDivide(weights: Map[String, MutableList[NDArray]], v: Float) = {
    for (name <- weights.keys) {
      for (j <- 0 to weights(name).length - 1) {
        weights(name)(j).scalarDivide(v)
      }
    }
  }

  def add(weights1: Map[String, MutableList[NDArray]], weights2: Map[String, MutableList[NDArray]]): Map[String, MutableList[NDArray]] = {
    if (weights1.keys != weights2.keys) {
      throw new Exception("weights1.keys != weights2.keys, weights1.keys = " + weights1.keys.toString + ", and weights2.keys = " + weights2.keys.toString + "\n")
    }
    val newWeights = Map[String, MutableList[NDArray]]()
    for (name <- weights1.keys) {
      newWeights += (name -> MutableList())
      if (weights1(name).length != weights2(name).length) {
        throw new Exception("weights1(name).length != weights2(name).length, name = " + name + ", weights1(name).length = " + weights1(name).length.toString + ", weights2(name).length = " + weights2(name).length.toString)
      }
      for (j <- 0 to weights1(name).length - 1) {
        if (weights1(name)(j).shape.deep != weights2(name)(j).shape.deep) {
          throw new Exception("weights1(name)(j).shape != weights2(name)(j).shape, name = " + name + ", j = " + j.toString + ", weights1(name)(j).shape = " + weights1(name)(j).shape.deep.toString + ", weights2(name)(j).shape = " + weights2(name)(j).shape.deep.toString)
        }
        newWeights(name) += NDArray.plus(weights1(name)(j), weights2(name)(j))
      }
    }
    newWeights
  }

  def checkEqual(weights1: Map[String, MutableList[NDArray]], weights2: Map[String, MutableList[NDArray]], tol: Float): Boolean = {
    if (weights1.keys != weights2.keys) {
      return false
    }
    for (name <- weights1.keys) {
      if (weights1(name).length != weights2(name).length) {
        return false
      }
      for (j <- 0 to weights1(name).length - 1) {
        if (!NDArray.checkEqual(weights1(name)(j), weights2(name)(j), tol)) {
          return false
        }
      }
    }
    return true
  }

}