org.apache.spark.ml.feature.DCT Scala Examples
The following examples show how to use org.apache.spark.ml.feature.DCT.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
Example 1
Source File: DCTExample.scala From drizzle-spark with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.linalg.Vectors // $example off$ import org.apache.spark.sql.SparkSession object DCTExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("DCTExample") .getOrCreate() // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = spark.createDataFrame(data.map(Tuple1.apply)).toDF("features") val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(false) // $example off$ spark.stop() } } // scalastyle:on println
Example 2
Source File: LocalDCT.scala From spark-ml-serving with Apache License 2.0 | 5 votes |
package io.hydrosphere.spark_ml_serving.preprocessors import io.hydrosphere.spark_ml_serving.TypedTransformerConverter import io.hydrosphere.spark_ml_serving.common.utils.DataUtils._ import io.hydrosphere.spark_ml_serving.common._ import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.linalg.Vector class LocalDCT(override val sparkTransformer: DCT) extends LocalTransformer[DCT] { override def transform(localData: LocalData): LocalData = { localData.column(sparkTransformer.getInputCol) match { case Some(column) => val method = classOf[DCT].getMethod("createTransformFunc") val newData = column.data.mapToMlVectors.map { r => method.invoke(sparkTransformer).asInstanceOf[Vector => Vector](r).toList } localData.withColumn(LocalDataColumn(sparkTransformer.getOutputCol, newData)) case None => localData } } } object LocalDCT extends SimpleModelLoader[DCT] with TypedTransformerConverter[DCT] { override def build(metadata: Metadata, data: LocalData): DCT = { new DCT(metadata.uid) .setInputCol(metadata.paramMap("inputCol").asInstanceOf[String]) .setOutputCol(metadata.paramMap("outputCol").asInstanceOf[String]) .setInverse(metadata.paramMap("inverse").asInstanceOf[Boolean]) } override implicit def toLocal(transformer: DCT) = new LocalDCT(transformer) }
Example 3
Source File: DCTOp.scala From mleap with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.bundle.ops.feature import ml.combust.bundle.BundleContext import ml.combust.bundle.dsl._ import ml.combust.bundle.op.{OpModel, OpNode} import ml.combust.mleap.core.types.TensorShape import org.apache.spark.ml.bundle.{ParamSpec, SimpleParamSpec, SimpleSparkOp, SparkBundleContext} import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.param.Param import org.apache.spark.sql.mleap.TypeConverters.sparkToMleapDataShape class DCTOp extends SimpleSparkOp[DCT] { override val Model: OpModel[SparkBundleContext, DCT] = new OpModel[SparkBundleContext, DCT] { override val klazz: Class[DCT] = classOf[DCT] override def opName: String = Bundle.BuiltinOps.feature.dct override def store(model: Model, obj: DCT) (implicit context: BundleContext[SparkBundleContext]): Model = { val dataset = context.context.dataset.get val inputShape = sparkToMleapDataShape(dataset.schema(obj.getInputCol), dataset).asInstanceOf[TensorShape] model.withValue("inverse", Value.boolean(obj.getInverse)) .withValue("input_size", Value.int(inputShape.dimensions.get.head)) } override def load(model: Model) (implicit context: BundleContext[SparkBundleContext]): DCT = { new DCT(uid = "").setInverse(model.value("inverse").getBoolean) } } override def sparkLoad(uid: String, shape: NodeShape, model: DCT): DCT = { new DCT(uid = uid).setInverse(model.getInverse) } override def sparkInputs(obj: DCT): Seq[ParamSpec] = { Seq("input" -> obj.inputCol) } override def sparkOutputs(obj: DCT): Seq[SimpleParamSpec] = { Seq("output" -> obj.outputCol) } }
Example 4
Source File: DCTParitySpec.scala From mleap with Apache License 2.0 | 5 votes |
package org.apache.spark.ml.parity.feature import org.apache.spark.ml.feature.{DCT, VectorAssembler} import org.apache.spark.ml.{Pipeline, Transformer} import org.apache.spark.ml.parity.SparkParityBase import org.apache.spark.sql._ class DCTParitySpec extends SparkParityBase { override val dataset: DataFrame = baseDataset.select("dti", "loan_amount") override val sparkTransformer: Transformer = new Pipeline().setStages(Array(new VectorAssembler(). setInputCols(Array("dti", "loan_amount")). setOutputCol("features"), new DCT(uid = "dct"). setInverse(true). setInputCol("features"). setOutputCol("filter_features"))).fit(dataset) }
Example 5
Source File: DCTExample.scala From sparkoscope with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.linalg.Vectors // $example off$ import org.apache.spark.sql.SparkSession object DCTExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("DCTExample") .getOrCreate() // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = spark.createDataFrame(data.map(Tuple1.apply)).toDF("features") val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(false) // $example off$ spark.stop() } } // scalastyle:on println
Example 6
Source File: DCTExample.scala From multi-tenancy-spark with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.linalg.Vectors // $example off$ import org.apache.spark.sql.SparkSession object DCTExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("DCTExample") .getOrCreate() // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = spark.createDataFrame(data.map(Tuple1.apply)).toDF("features") val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(false) // $example off$ spark.stop() } } // scalastyle:on println
Example 7
Source File: DiscreteCosineTransformer.scala From seahorse with Apache License 2.0 | 5 votes |
package ai.deepsense.deeplang.doperables.spark.wrappers.transformers import org.apache.spark.ml.feature.DCT import ai.deepsense.deeplang.doperables.SparkTransformerAsMultiColumnTransformer import ai.deepsense.deeplang.params.Param import ai.deepsense.deeplang.params.wrappers.spark.BooleanParamWrapper class DiscreteCosineTransformer extends SparkTransformerAsMultiColumnTransformer[DCT] { override def convertInputNumericToVector: Boolean = true override def convertOutputVectorToDouble: Boolean = true val inverse = new BooleanParamWrapper[DCT]( name = "inverse", description = Some("Indicates whether to perform the inverse DCT (true) or forward DCT (false)."), sparkParamGetter = _.inverse) setDefault(inverse, false) override protected def getSpecificParams: Array[Param[_]] = Array(inverse) }
Example 8
Source File: DCTExample.scala From spark1.52 with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.mllib.linalg.Vectors // $example off$ import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.sql.types.StringType import org.apache.spark.sql.{SQLContext, DataFrame} object DCTExample { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("DCTExample").setMaster("local[4]") val sc = new SparkContext(conf) val sqlContext = new SQLContext(sc) import sqlContext.implicits._ // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features") //离散余弦变换(DCT) val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) //transform()方法将DataFrame转化为另外一个DataFrame的算法 val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(3) // $example off$ sc.stop() } } // scalastyle:on println
Example 9
Source File: DCTExample.scala From Spark-2.3.1 with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.ml.linalg.Vectors // $example off$ import org.apache.spark.sql.SparkSession object DCTExample { def main(args: Array[String]): Unit = { val spark = SparkSession .builder .appName("DCTExample") .getOrCreate() // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = spark.createDataFrame(data.map(Tuple1.apply)).toDF("features") val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(false) // $example off$ spark.stop() } } // scalastyle:on println
Example 10
Source File: DCTExample.scala From BigDatalog with Apache License 2.0 | 5 votes |
// scalastyle:off println package org.apache.spark.examples.ml // $example on$ import org.apache.spark.ml.feature.DCT import org.apache.spark.mllib.linalg.Vectors // $example off$ import org.apache.spark.sql.SQLContext import org.apache.spark.{SparkConf, SparkContext} object DCTExample { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("DCTExample") val sc = new SparkContext(conf) val sqlContext = new SQLContext(sc) // $example on$ val data = Seq( Vectors.dense(0.0, 1.0, -2.0, 3.0), Vectors.dense(-1.0, 2.0, 4.0, -7.0), Vectors.dense(14.0, -2.0, -5.0, 1.0)) val df = sqlContext.createDataFrame(data.map(Tuple1.apply)).toDF("features") val dct = new DCT() .setInputCol("features") .setOutputCol("featuresDCT") .setInverse(false) val dctDf = dct.transform(df) dctDf.select("featuresDCT").show(3) // $example off$ sc.stop() } } // scalastyle:on println
Example 11
Source File: DiscreteCosineTransformer.scala From seahorse-workflow-executor with Apache License 2.0 | 5 votes |
package io.deepsense.deeplang.doperables.spark.wrappers.transformers import org.apache.spark.ml.feature.DCT import io.deepsense.deeplang.doperables.SparkTransformerAsMultiColumnTransformer import io.deepsense.deeplang.params.Param import io.deepsense.deeplang.params.wrappers.spark.BooleanParamWrapper class DiscreteCosineTransformer extends SparkTransformerAsMultiColumnTransformer[DCT] { override def convertInputNumericToVector: Boolean = true override def convertOutputVectorToDouble: Boolean = true val inverse = new BooleanParamWrapper[DCT]( name = "inverse", description = Some("Indicates whether to perform the inverse DCT (true) or forward DCT (false)."), sparkParamGetter = _.inverse) setDefault(inverse, false) override protected def getSpecificParams: Array[Param[_]] = Array(inverse) }