org.apache.spark.ml.feature.MinMaxScalerModel Java Examples
The following examples show how to use
org.apache.spark.ml.feature.MinMaxScalerModel.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MinMaxScalerModelInfoAdapter.java From spark-transformers with Apache License 2.0 | 6 votes |
@Override public MinMaxScalerModelInfo getModelInfo(final MinMaxScalerModel from) { final MinMaxScalerModelInfo modelInfo = new MinMaxScalerModelInfo(); modelInfo.setOriginalMax(from.originalMax().toArray()); modelInfo.setOriginalMin(from.originalMin().toArray()); modelInfo.setMax(from.getMax()); modelInfo.setMin(from.getMin()); Set<String> inputKeys = new LinkedHashSet<String>(); inputKeys.add(from.getInputCol()); modelInfo.setInputKeys(inputKeys); Set<String> outputKeys = new LinkedHashSet<String>(); outputKeys.add(from.getOutputCol()); modelInfo.setOutputKeys(outputKeys); return modelInfo; }
Example #2
Source File: MinMaxScalerModelInfoAdapter.java From spark-transformers with Apache License 2.0 | 6 votes |
@Override public MinMaxScalerModelInfo getModelInfo(final MinMaxScalerModel from, final DataFrame df) { final MinMaxScalerModelInfo modelInfo = new MinMaxScalerModelInfo(); modelInfo.setOriginalMax(from.originalMax().toArray()); modelInfo.setOriginalMin(from.originalMin().toArray()); modelInfo.setMax(from.getMax()); modelInfo.setMin(from.getMin()); Set<String> inputKeys = new LinkedHashSet<String>(); inputKeys.add(from.getInputCol()); modelInfo.setInputKeys(inputKeys); Set<String> outputKeys = new LinkedHashSet<String>(); outputKeys.add(from.getOutputCol()); modelInfo.setOutputKeys(outputKeys); return modelInfo; }
Example #3
Source File: JavaMinMaxScalerExample.java From SparkDemo with MIT License | 5 votes |
public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("JavaMinMaxScalerExample") .getOrCreate(); // $example on$ List<Row> data = Arrays.asList( RowFactory.create(0, Vectors.dense(1.0, 0.1, -1.0)), RowFactory.create(1, Vectors.dense(2.0, 1.1, 1.0)), RowFactory.create(2, Vectors.dense(3.0, 10.1, 3.0)) ); StructType schema = new StructType(new StructField[]{ new StructField("id", DataTypes.IntegerType, false, Metadata.empty()), new StructField("features", new VectorUDT(), false, Metadata.empty()) }); Dataset<Row> dataFrame = spark.createDataFrame(data, schema); MinMaxScaler scaler = new MinMaxScaler() .setInputCol("features") .setOutputCol("scaledFeatures"); // Compute summary statistics and generate MinMaxScalerModel MinMaxScalerModel scalerModel = scaler.fit(dataFrame); // rescale each feature to range [min, max]. Dataset<Row> scaledData = scalerModel.transform(dataFrame); System.out.println("Features scaled to range: [" + scaler.getMin() + ", " + scaler.getMax() + "]"); scaledData.select("features", "scaledFeatures").show(); // $example off$ spark.stop(); }
Example #4
Source File: MinMaxScalerBridgeTest.java From spark-transformers with Apache License 2.0 | 5 votes |
@Test public void testMinMaxScaler() { //prepare data JavaRDD<Row> jrdd = jsc.parallelize(Arrays.asList( RowFactory.create(1.0, Vectors.dense(data[0])), RowFactory.create(2.0, Vectors.dense(data[1])), RowFactory.create(3.0, Vectors.dense(data[2])), RowFactory.create(4.0, Vectors.dense(data[3])) )); StructType schema = new StructType(new StructField[]{ new StructField("label", DataTypes.DoubleType, false, Metadata.empty()), new StructField("features", new VectorUDT(), false, Metadata.empty()) }); Dataset<Row> df = spark.createDataFrame(jrdd, schema); //train model in spark MinMaxScalerModel sparkModel = new MinMaxScaler() .setInputCol("features") .setOutputCol("scaled") .setMin(-5) .setMax(5) .fit(df); //Export model, import it back and get transformer byte[] exportedModel = ModelExporter.export(sparkModel); final Transformer transformer = ModelImporter.importAndGetTransformer(exportedModel); //compare predictions List<Row> sparkOutput = sparkModel.transform(df).orderBy("label").select("features", "scaled").collectAsList(); assertCorrectness(sparkOutput, expected, transformer); }
Example #5
Source File: MinMaxScalerBridgeTest.java From spark-transformers with Apache License 2.0 | 5 votes |
@Test public void testStandardScaler() { //prepare data List<LabeledPoint> localTraining = Arrays.asList( new LabeledPoint(1.0, Vectors.dense(data[0])), new LabeledPoint(2.0, Vectors.dense(data[1])), new LabeledPoint(3.0, Vectors.dense(data[2])), new LabeledPoint(3.0, Vectors.dense(data[3]))); DataFrame df = sqlContext.createDataFrame(sc.parallelize(localTraining), LabeledPoint.class); //train model in spark MinMaxScalerModel sparkModel = new MinMaxScaler() .setInputCol("features") .setOutputCol("scaled") .setMin(-5) .setMax(5) .fit(df); //Export model, import it back and get transformer byte[] exportedModel = ModelExporter.export(sparkModel, df); final Transformer transformer = ModelImporter.importAndGetTransformer(exportedModel); //compare predictions Row[] sparkOutput = sparkModel.transform(df).orderBy("label").select("features", "scaled").collect(); assertCorrectness(sparkOutput, expected, transformer); }
Example #6
Source File: MinMaxScalerModelConverter.java From jpmml-sparkml with GNU Affero General Public License v3.0 | 4 votes |
public MinMaxScalerModelConverter(MinMaxScalerModel transformer){ super(transformer); }
Example #7
Source File: MinMaxScalerModelInfoAdapter.java From spark-transformers with Apache License 2.0 | 4 votes |
@Override public Class<MinMaxScalerModel> getSource() { return MinMaxScalerModel.class; }
Example #8
Source File: MinMaxScalerModelInfoAdapter.java From spark-transformers with Apache License 2.0 | 4 votes |
@Override public Class<MinMaxScalerModel> getSource() { return MinMaxScalerModel.class; }