Java Code Examples for weka.classifiers.trees.J48#setOptions()
The following examples show how to use
weka.classifiers.trees.J48#setOptions() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SaveModel.java From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License | 7 votes |
/** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here try { DataSource src = new DataSource("/Users/admin/Documents/NetBeansProjects/SaveModel/segment-challenge.arff"); Instances dt = src.getDataSet(); dt.setClassIndex(dt.numAttributes() - 1); String[] options = new String[4]; options[0] = "-C"; options[1] = "0.1"; options[2] = "-M"; options[3] = "2"; J48 mytree = new J48(); mytree.setOptions(options); mytree.buildClassifier(dt); weka.core.SerializationHelper.write("/Users/admin/Documents/NetBeansProjects/SaveModel/myDT.model", mytree); } catch (Exception e) { System.out.println("Error!!!!\n" + e.getMessage()); } }
Example 2
Source File: MakingPredictions.java From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License | 5 votes |
/** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here try { DataSource src = new DataSource("/Users/admin/Documents/NetBeansProjects/MakingPredictions/segment-challenge.arff"); Instances dt = src.getDataSet(); dt.setClassIndex(dt.numAttributes() - 1); String[] options = new String[4]; options[0] = "-C"; options[1] = "0.1"; options[2] = "-M"; options[3] = "2"; J48 mytree = new J48(); mytree.setOptions(options); mytree.buildClassifier(dt); DataSource src1 = new DataSource("/Users/admin/Documents/NetBeansProjects/MakingPredictions/segment-test.arff"); Instances tdt = src1.getDataSet(); tdt.setClassIndex(tdt.numAttributes()-1); System.out.println("ActualClass \t ActualValue \t PredictedValue \t PredictedClass"); for (int i = 0; i < tdt.numInstances(); i++) { String act = tdt.instance(i).stringValue(tdt.instance(i).numAttributes()-1); double actual = tdt.instance(i).classValue(); Instance inst = tdt.instance(i); double predict = mytree.classifyInstance(inst); String pred = inst.toString(inst .numAttributes()-1); System.out.println(act + " \t\t " + actual + " \t\t " + predict + " \t\t " + pred); } } catch (Exception e) { System.out.println(e.getCause()); } }
Example 3
Source File: DevelopClassifier.java From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License | 5 votes |
/** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here try{ DataSource src = new DataSource("/Users/admin/Documents/NetBeansProjects/DevelopClassifier/vote.arff"); Instances dt = src.getDataSet(); dt.setClassIndex(dt.numAttributes()-1); String[] options = new String[4]; options[0] = "-C"; options[1] = "0.1"; options[2] = "-M"; options[3] = "2"; J48 tree = new J48(); tree.setOptions(options); tree.buildClassifier(dt); System.out.println(tree.getCapabilities().toString()); System.out.println(tree.graph()); //uncomment the following three lines of code for Naive Bayes NaiveBayes nb = new NaiveBayes(); nb.buildClassifier(dt); System.out.println(nb.getCapabilities().toString()); } catch(Exception e){ System.out.println("Error!!!!\n" + e.getMessage()); } }
Example 4
Source File: BookDecisionTree.java From Java-for-Data-Science with MIT License | 5 votes |
private J48 performTraining() { J48 j48 = new J48(); String[] options = {"-U"}; // Use unpruned tree. -U try { j48.setOptions(options); j48.buildClassifier(trainingData); } catch (Exception ex) { ex.printStackTrace(); } return j48; }
Example 5
Source File: TestWekaJ48.java From Java-Data-Analysis with MIT License | 5 votes |
public static void main(String[] args) throws Exception { DataSource source = new DataSource("data/AnonFruit.arff"); Instances instances = source.getDataSet(); instances.setClassIndex(3); // target attribute: (Sweet) J48 j48 = new J48(); // an extension of ID3 j48.setOptions(new String[]{"-U"}); // use unpruned tree j48.buildClassifier(instances); for (Instance instance : instances) { double prediction = j48.classifyInstance(instance); System.out.printf("%4.0f%4.0f%n", instance.classValue(), prediction); } }
Example 6
Source File: ActivityRecognition.java From Machine-Learning-in-Java with MIT License | 5 votes |
public static void main(String[] args) throws Exception{ String databasePath = "data/features.arff"; // Load the data in arff format Instances data = new Instances(new BufferedReader(new FileReader(databasePath))); // Set class the last attribute as class data.setClassIndex(data.numAttributes() - 1); // Build a basic decision tree model String[] options = new String[]{}; J48 model = new J48(); model.setOptions(options); model.buildClassifier(data); // Output decision tree System.out.println("Decision tree model:\n"+model); // Output source code implementing the decision tree System.out.println("Source code:\n"+model.toSource("ActivityRecognitionEngine")); // Check accuracy of model using 10-fold cross-validation Evaluation eval = new Evaluation(data); eval.crossValidateModel(model, data, 10, new Random(1), new String[] {}); System.out.println("Model performance:\n"+eval.toSummaryString()); String[] activities = new String[]{"Walk", "Walk", "Walk", "Run", "Walk", "Run", "Run", "Sit", "Sit", "Sit"}; DiscreteLowPass dlpFilter = new DiscreteLowPass(3); for(String str : activities){ System.out.println(str +" -> "+ dlpFilter.filter(str)); } }
Example 7
Source File: ModelEvaluation.java From Hands-On-Artificial-Intelligence-with-Java-for-Beginners with MIT License | 4 votes |
/** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here try { DataSource src = new DataSource("/Users/admin/Documents/NetBeansProjects/ModelEvaluation/segment-challenge.arff"); Instances dt = src.getDataSet(); dt.setClassIndex(dt.numAttributes()- 1); String[] options = new String[4]; options[0] = "-C"; options[1] = "0.1"; options[2] = "-M"; options[3] = "2"; J48 mytree = new J48(); mytree.setOptions(options); mytree.buildClassifier(dt); Evaluation eval = new Evaluation(dt); Random rand = new Random(1); DataSource src1 = new DataSource("/Users/admin/Documents/NetBeansProjects/ModelEvaluation/segment-test.arff"); Instances tdt = src1.getDataSet(); tdt.setClassIndex(tdt.numAttributes() - 1); eval.evaluateModel(mytree, tdt); System.out.println(eval.toSummaryString("Evaluation results:\n", false)); System.out.println("Correct % = " + eval.pctCorrect()); System.out.println("Incorrect % = " + eval.pctIncorrect()); System.out.println("kappa = " + eval.kappa()); System.out.println("MAE = " + eval.meanAbsoluteError()); System.out.println("RMSE = " + eval.rootMeanSquaredError()); System.out.println("RAE = " + eval.relativeAbsoluteError()); System.out.println("Precision = " + eval.precision(1)); System.out.println("Recall = " + eval.recall(1)); System.out.println("fMeasure = " + eval.fMeasure(1)); System.out.println(eval.toMatrixString("=== Overall Confusion Matrix ===")); } catch (Exception e) { System.out.println("Error!!!!\n" + e.getMessage()); } }