org.netlib.util.intW Scala Examples

The following examples show how to use org.netlib.util.intW. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: CholeskyDecomposition.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.linalg

import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.netlib.util.intW

import org.apache.spark.ml.optim.SingularMatrixException


  def inverse(UAi: Array[Double], k: Int): Array[Double] = {
    val info = new intW(0)
    lapack.dpptri("U", k, UAi, info)
    checkReturnValue(info, "dpptri")
    UAi
  }

  private def checkReturnValue(info: intW, method: String): Unit = {
    info.`val` match {
      case code if code < 0 =>
        throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
      case code if code > 0 =>
        throw new SingularMatrixException (
          s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
          "a singular matrix (e.g. collinear column values)?")
      case _ => // do nothing
    }
  }

} 
Example 2
Source File: Util.scala    From spark-lp   with Apache License 2.0 5 votes vote down vote up
  def triuToFull(U: Array[Double], n: Int): Matrix = {
    val G = new BDM[Double](n, n)

    var row = 0
    var col = 0
    var idx = 0
    var value = 0.0
    while (col < n) {
      row = 0
      while (row < col) {
        value = U(idx)
        G(row, col) = value
        G(col, row) = value
        idx += 1
        row += 1
      }
      G(col, col) = U(idx)
      idx += 1
      col +=1
    }

    Matrices.dense(n, n, G.data)
  }
} 
Example 3
Source File: ApproximatePCA.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import breeze.numerics._
import breeze.stats._
import breeze.stats.distributions.{Gaussian, ThreadLocalRandomGenerator, RandBasis}
import com.github.fommil.netlib.LAPACK._
import edu.berkeley.cs.amplab.mlmatrix.util.QRUtils
import org.apache.commons.math3.random.MersenneTwister
import org.apache.spark.rdd.RDD
import org.netlib.util.intW
import keystoneml.pipelines.Logging
import keystoneml.workflow.Estimator


  def approximateQ(A: DenseMatrix[Double], l: Int, q: Int, seed: Int = 0): DenseMatrix[Double] = {
    val d = A.cols

    val randBasis: RandBasis = new RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister(seed)))
    val omega = DenseMatrix.rand(d, l, Gaussian(0,1)(randBasis)) //cpu: d*l, mem: d*l
    val y0 = A*omega //cpu: n*d*l, mem: n*l

    var Q = QRUtils.qrQR(y0)._1 //cpu: n*l**2

    for (i <- 1 to q) {
      val YHat = Q.t * A //cpu: l*n*d, mem: l*d
      val Qh = QRUtils.qrQR(YHat.t)._1 //cpu: d*l^2, mem: d*l

      val Yj = A * Qh //cpu: n*d*l, mem: n*l
      Q = QRUtils.qrQR(Yj)._1 //cpu:  n*l^2, mem: n*l
    }

    Q
  }
} 
Example 4
Source File: DistributedPCA.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import breeze.numerics._
import breeze.stats._
import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.apache.spark.rdd.RDD
import org.netlib.util.intW
import keystoneml.pipelines._
import keystoneml.utils.MatrixUtils
import keystoneml.workflow.{Transformer, Estimator}

import edu.berkeley.cs.amplab.mlmatrix.{RowPartition, NormalEquations, RowPartitionedMatrix, TSQR}


  def fit(samples: RDD[DenseVector[Float]]): PCATransformer = {
    new PCATransformer(computePCA(samples, dims))
  }

  def computePCA(dataMat: RDD[DenseVector[Float]], dims: Int): DenseMatrix[Float] = {

    val mat = new RowPartitionedMatrix(dataMat.mapPartitions { part =>
      val dblIter = part.map(x => convert(x, Double))
      MatrixUtils.rowsToMatrixIter(dblIter).map(RowPartition(_))
    })
    val means = DenseVector(mat.colSums():_*) :/ mat.numRows().toDouble

    val meansBC = dataMat.context.broadcast(means)
    val zeroMeanMat = new RowPartitionedMatrix(mat.rdd.map { part =>
      RowPartition(part.mat(*, ::) - meansBC.value)
    })

    val rPart = new TSQR().qrR(zeroMeanMat)

    val svd.SVD(u, s, pcaT) = svd(rPart)

    val pca = convert(pcaT.t, Float)

    val matlabConventionPCA = PCAEstimator.enforceMatlabPCASignConvention(pca)

    // Return a subset of the columns.
    matlabConventionPCA(::, 0 until dims)
  }

  override def cost(
    n: Long,
    d: Int,
    k: Int,
    sparsity: Double,
    numMachines: Int,
    cpuWeight: Double,
    memWeight: Double,
    networkWeight: Double): Double = {
    val log2NumMachines = math.log(numMachines.toDouble) / math.log(2.0)
    val flops = n.toDouble * d * d / numMachines + d.toDouble * d * d * log2NumMachines
    val bytesScanned = n.toDouble * d
    val network = d.toDouble * d * log2NumMachines
    math.max(cpuWeight * flops, memWeight * bytesScanned) + networkWeight * network
  }
} 
Example 5
Source File: ZCAWhitener.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.learning

import breeze.linalg._
import breeze.numerics._
import breeze.stats._
import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.apache.spark.rdd.RDD
import org.netlib.util.intW
import keystoneml.pipelines._
import keystoneml.workflow.{Transformer, Estimator}

class ZCAWhitener(val whitener: DenseMatrix[Double], val means: DenseVector[Double])
  extends Transformer[DenseMatrix[Double],DenseMatrix[Double]] {

  def apply(in: DenseMatrix[Double]): DenseMatrix[Double] = {
    (in(*, ::) - means) * whitener
  }
}


class ZCAWhitenerEstimator(val eps: Double = 0.1)
  extends Estimator[DenseMatrix[Double],DenseMatrix[Double]] {

  def fit(in: RDD[DenseMatrix[Double]]): ZCAWhitener = {
    fitSingle(in.first)
  }

  def fitSingle(in: DenseMatrix[Double]): ZCAWhitener = {
    val means = (mean(in(::, *))).t

    val whitener: DenseMatrix[Double] = {
      val inc = convert(in(*, ::) - means, Float)
      val rows = inc.rows
      val cols = inc.cols

      val s1 = DenseVector.zeros[Float](math.min(rows, cols))
      val v1 = DenseMatrix.zeros[Float](inc.cols, inc.cols)

      // Get optimal workspace size
      // we do this by sending -1 as lwork to the lapack function
      val scratch, work = new Array[Float](1)
      val info = new intW(0)

      lapack.sgesvd("N", "A", rows, cols, scratch, rows, scratch, null, 1, scratch, cols, work, -1, info)

      val lwork1 = work(0).toInt
      val workspace = new Array[Float](lwork1)

      // Perform the SVD with sgesvd
      lapack.sgesvd("N", "A", rows, cols, inc.copy.data, rows, s1.data, null, 1, v1.data, cols, workspace, workspace.length, info)

      val s2  = pow(s1, 2.0f) / (rows - 1.0f)

      val sn1 = diag((s2 + eps.toFloat) :^ -0.5f)

      // NOTE: sgesvd returns singular values in the opposite order (when compared to eigenvalues)
      // Thus we need v.t * s * v here ?
      val svdMat = v1.t * sn1 * v1

      convert(svdMat, Double)
    }

    new ZCAWhitener(whitener, means)

  }
} 
Example 6
Source File: CholeskyDecomposition.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.linalg

import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.netlib.util.intW

import org.apache.spark.ml.optim.SingularMatrixException


  def inverse(UAi: Array[Double], k: Int): Array[Double] = {
    val info = new intW(0)
    lapack.dpptri("U", k, UAi, info)
    checkReturnValue(info, "dpptri")
    UAi
  }

  private def checkReturnValue(info: intW, method: String): Unit = {
    info.`val` match {
      case code if code < 0 =>
        throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
      case code if code > 0 =>
        throw new SingularMatrixException (
          s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
          "a singular matrix (e.g. collinear column values)?")
      case _ => // do nothing
    }
  }

} 
Example 7
Source File: CholeskyDecomposition.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.linalg

import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.netlib.util.intW

import org.apache.spark.ml.optim.SingularMatrixException


  def inverse(UAi: Array[Double], k: Int): Array[Double] = {
    val info = new intW(0)
    lapack.dpptri("U", k, UAi, info)
    checkReturnValue(info, "dpptri")
    UAi
  }

  private def checkReturnValue(info: intW, method: String): Unit = {
    info.`val` match {
      case code if code < 0 =>
        throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
      case code if code > 0 =>
        throw new SingularMatrixException (
          s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
          "a singular matrix (e.g. collinear column values)?")
      case _ => // do nothing
    }
  }

} 
Example 8
Source File: CholeskyDecomposition.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.linalg

import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.netlib.util.intW

import org.apache.spark.ml.optim.SingularMatrixException


  def inverse(UAi: Array[Double], k: Int): Array[Double] = {
    val info = new intW(0)
    lapack.dpptri("U", k, UAi, info)
    checkReturnValue(info, "dpptri")
    UAi
  }

  private def checkReturnValue(info: intW, method: String): Unit = {
    info.`val` match {
      case code if code < 0 =>
        throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
      case code if code > 0 =>
        throw new SingularMatrixException (
          s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
          "a singular matrix (e.g. collinear column values)?")
      case _ => // do nothing
    }
  }

} 
Example 9
Source File: CholeskyDecomposition.scala    From sona   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.linalg

import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import org.netlib.util.intW

/**
  * Compute Cholesky decomposition.
  */
object CholeskyDecomposition {

  /**
    * Solves a symmetric positive definite linear system via Cholesky factorization.
    * The input arguments are modified in-place to store the factorization and the solution.
    *
    * @param A  the upper triangular part of A
    * @param bx right-hand side
    * @return the solution array
    */
  def solve(A: Array[Double], bx: Array[Double]): Array[Double] = {
    val k = bx.length
    val info = new intW(0)
    lapack.dppsv("U", k, 1, A, bx, k, info)
    checkReturnValue(info, "dppsv")
    bx
  }

  /**
    * Computes the inverse of a real symmetric positive definite matrix A
    * using the Cholesky factorization A = U**T*U.
    * The input arguments are modified in-place to store the inverse matrix.
    *
    * @param UAi the upper triangular factor U from the Cholesky factorization A = U**T*U
    * @param k   the dimension of A
    * @return the upper triangle of the (symmetric) inverse of A
    */
  def inverse(UAi: Array[Double], k: Int): Array[Double] = {
    val info = new intW(0)
    lapack.dpptri("U", k, UAi, info)
    checkReturnValue(info, "dpptri")
    UAi
  }

  private def checkReturnValue(info: intW, method: String): Unit = {
    info.`val` match {
      case code if code < 0 =>
        throw new IllegalStateException(s"LAPACK.$method returned $code; arg ${-code} is illegal")
      case code if code > 0 =>
        throw new Exception(
          s"LAPACK.$method returned $code because A is not positive definite. Is A derived from " +
            "a singular matrix (e.g. collinear column values)?")
      case _ => // do nothing
    }
  }

}