Java Code Examples for org.apache.spark.mllib.evaluation.MulticlassMetrics#labels()
The following examples show how to use
org.apache.spark.mllib.evaluation.MulticlassMetrics#labels() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MulticlassClassificationEvaluatorByClass.java From ambiverse-nlu with Apache License 2.0 | 4 votes |
@Override public double evaluate(DataFrame dataset) { StructType schema = dataset.schema(); SchemaUtils.checkColumnType(schema, this.getPredictionCol(), DataTypes.DoubleType, ""); SchemaUtils.checkColumnType(schema, this.getLabelCol(), DataTypes.DoubleType, ""); MulticlassMetrics metrics = new MulticlassMetrics(dataset .select(this.getPredictionCol(), this.getLabelCol())); int labelColumn = 0; for(int i=0; i < metrics.labels().length; i++) { if(metrics.labels()[i] == evaluationClass) { labelColumn = i; } } double metric=0d; switch(getMetricName()) { case "f1": metric = metrics.fMeasure(metrics.labels()[labelColumn]); break; case "precision": metric = metrics.precision(metrics.labels()[labelColumn]); break; case "recall": metric = metrics.recall(metrics.labels()[labelColumn]); break; case "weightedPrecision": metric = metrics.weightedPrecision(); break; case "weightedRecall": metric = metrics.weightedRecall(); break; } return metric; }
Example 2
Source File: TrainingSparkRunner.java From ambiverse-nlu with Apache License 2.0 | 4 votes |
private void multiClassEvaluation(DataFrame predictions, String output, TrainingSettings trainingSettings) throws IOException { FileSystem fs = FileSystem.get(new Configuration()); Path evalPath = new Path(output+"multiclass_evaluation_"+trainingSettings.getClassificationMethod()+".txt"); fs.delete(evalPath, true); FSDataOutputStream fsdos = fs.create(evalPath); MulticlassMetrics metrics = new MulticlassMetrics(predictions .select("prediction", "label")); // Confusion matrix Matrix confusion = metrics.confusionMatrix(); IOUtils.write("\nConfusion matrix: \n" + confusion, fsdos); // Overall statistics IOUtils.write("\nPrecision = " + metrics.precision(), fsdos); IOUtils.write("\nRecall = " + metrics.recall(), fsdos); IOUtils.write("\nF1 Score = " + metrics.fMeasure(), fsdos); IOUtils.write("\n\n", fsdos); // Stats by labels for (int i = 0; i < metrics.labels().length; i++) { IOUtils.write(String.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision(metrics.labels()[i])), fsdos); IOUtils.write(String.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall(metrics.labels()[i])), fsdos); IOUtils.write(String.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure(metrics.labels()[i])), fsdos); System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision(metrics.labels()[i])); System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall(metrics.labels()[i])); System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure(metrics.labels()[i])); } //Weighted stats IOUtils.write("\nWeighted precision = "+metrics.weightedPrecision(), fsdos); IOUtils.write("\nWeighted recall = "+metrics.weightedRecall(), fsdos); IOUtils.write("\nWeighted F1 score ="+metrics.weightedFMeasure(), fsdos); IOUtils.write("\nWeighted false positive rate = " +metrics.weightedFalsePositiveRate(), fsdos); fsdos.flush(); IOUtils.closeQuietly(fsdos); }
Example 3
Source File: JavaMulticlassClassificationMetricsExample.java From SparkDemo with MIT License | 4 votes |
public static void main(String[] args) { SparkConf conf = new SparkConf().setAppName("Multi class Classification Metrics Example"); SparkContext sc = new SparkContext(conf); // $example on$ String path = "data/mllib/sample_multiclass_classification_data.txt"; JavaRDD<LabeledPoint> data = MLUtils.loadLibSVMFile(sc, path).toJavaRDD(); // Split initial RDD into two... [60% training data, 40% testing data]. JavaRDD<LabeledPoint>[] splits = data.randomSplit(new double[]{0.6, 0.4}, 11L); JavaRDD<LabeledPoint> training = splits[0].cache(); JavaRDD<LabeledPoint> test = splits[1]; // Run training algorithm to build the model. final LogisticRegressionModel model = new LogisticRegressionWithLBFGS() .setNumClasses(3) .run(training.rdd()); // Compute raw scores on the test set. JavaRDD<Tuple2<Object, Object>> predictionAndLabels = test.map( new Function<LabeledPoint, Tuple2<Object, Object>>() { public Tuple2<Object, Object> call(LabeledPoint p) { Double prediction = model.predict(p.features()); return new Tuple2<Object, Object>(prediction, p.label()); } } ); // Get evaluation metrics. MulticlassMetrics metrics = new MulticlassMetrics(predictionAndLabels.rdd()); // Confusion matrix Matrix confusion = metrics.confusionMatrix(); System.out.println("Confusion matrix: \n" + confusion); // Overall statistics System.out.println("Accuracy = " + metrics.accuracy()); // Stats by labels for (int i = 0; i < metrics.labels().length; i++) { System.out.format("Class %f precision = %f\n", metrics.labels()[i],metrics.precision( metrics.labels()[i])); System.out.format("Class %f recall = %f\n", metrics.labels()[i], metrics.recall( metrics.labels()[i])); System.out.format("Class %f F1 score = %f\n", metrics.labels()[i], metrics.fMeasure( metrics.labels()[i])); } //Weighted stats System.out.format("Weighted precision = %f\n", metrics.weightedPrecision()); System.out.format("Weighted recall = %f\n", metrics.weightedRecall()); System.out.format("Weighted F1 score = %f\n", metrics.weightedFMeasure()); System.out.format("Weighted false positive rate = %f\n", metrics.weightedFalsePositiveRate()); // Save and load model model.save(sc, "target/tmp/LogisticRegressionModel"); LogisticRegressionModel sameModel = LogisticRegressionModel.load(sc, "target/tmp/LogisticRegressionModel"); // $example off$ }