weka.classifiers.evaluation.NominalPrediction Java Examples
The following examples show how to use
weka.classifiers.evaluation.NominalPrediction.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ClassifierTools.java From tsml with GNU General Public License v3.0 | 5 votes |
/** * This method does a cross validation using the EvaluationUtils and stores the predicted and actual values. * I implemented this because I saw no way of using the built in cross vals to get the actual predictions, * useful for e.g McNemar's test (and cv variance). Note that the use of FastVector has been depreciated * @param c * @param allData * @param m * @return */ @SuppressWarnings({ "deprecation", "rawtypes" }) public static double[][] crossValidation(Classifier c, Instances allData, int m){ EvaluationUtils evalU; double[][] preds=new double[2][allData.numInstances()]; Object[] p; FastVector f; NominalPrediction nom; try{ evalU=new EvaluationUtils(); evalU.setSeed(10); f=evalU.getCVPredictions(c,allData,m); p=f.toArray(); for(int i=0;i<p.length;i++) { nom=((NominalPrediction)p[i]); preds[1][i]=nom.predicted(); preds[0][i]=nom.actual(); } }catch(Exception e){ System.out.println(" Error ="+e+" in method Cross Validate Experiment"); e.printStackTrace(); System.out.println(allData.relationName()); System.exit(0); } return preds; }
Example #2
Source File: MLUtils.java From meka with GNU General Public License v3.0 | 5 votes |
/** * Convert to Weka (multi-target) Predictions. * Note: currently only multi-label. * */ public static ArrayList<Prediction> toWekaPredictions(int y[], double p[]) { ArrayList<Prediction> predictions = new ArrayList<Prediction>(); for(int i = 0; i < y.length; i++) { predictions.add(new NominalPrediction((double)y[i], new double[]{1.-p[i],p[i]})); } return predictions; }
Example #3
Source File: ClassifierTools.java From tsml with GNU General Public License v3.0 | 4 votes |
/** * This method does a cross validation using the EvaluationUtils and stores t * he predicted and actual values. * Accuracy is stored in preds[0][0], StdDev of accuracy between folds SHOULD BE * stored in preds[1][0]. * TO IMPLEMENT! * Could do with some testing, there is some uncertainty over the last fold. * @param allData * @param m * @return */ @SuppressWarnings({ "deprecation", "rawtypes" }) public static double[][] crossValidationWithStats(Classifier c, Instances allData, int m) { EvaluationUtils evalU; double[][] preds=new double[2][allData.numInstances()+1]; int foldSize=allData.numInstances()/m; //Last fold may have fewer cases than this FastVector f; Object[] p; NominalPrediction nom; double acc=0,sum=0,sumsq=0; try{ evalU=new EvaluationUtils(); // evalU.setSeed(10); f=evalU.getCVPredictions(c,allData,m); p=f.toArray(); for(int i=0;i<p.length;i++) { nom=((NominalPrediction)p[i]); preds[1][i+1]=nom.predicted(); preds[0][i+1]=nom.actual(); // System.out.println(" pred = "+preds[i+1]); if(preds[0][i+1]==preds[1][i+1]){ preds[0][0]++; acc++; } if((i>0 && i%foldSize==0)){ //Sum Squares sumsq+=(acc/foldSize)*(acc/foldSize); //Sum sum+=(acc/foldSize); acc=0; } } //Accuracy stored in preds[0][0] preds[0][0]=preds[0][0]/p.length; preds[1][0]=(sumsq-sum*sum/m)/m; preds[1][0]=Math.sqrt(preds[1][0]); }catch(Exception e) { System.out.println(" Error ="+e+" in method Cross Validate Experiment"); e.printStackTrace(); System.out.println(allData.relationName()); System.exit(0); } return preds; }