Java Code Examples for weka.classifiers.lazy.IBk#buildClassifier()

The following examples show how to use weka.classifiers.lazy.IBk#buildClassifier() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: kNN.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
public static void test1NNvsIB1(boolean norm){
        System.out.println("FIRST BASIC SANITY TEST FOR THIS WRAPPER");
        System.out.print("Compare 1-NN with IB1, normalisation turned");
        String str=norm?" on":" off";
        System.out.println(str);
        System.out.println("Compare on the UCI data sets");
        System.out.print("If normalisation is off, then there may be differences");
        kNN knn = new kNN(1);
        IBk ib1=new IBk(1);
        knn.normalise(norm);
        int diff=0;
        DecimalFormat df = new DecimalFormat("####.###");
        for(String s:DatasetLists.uciFileNames){
            Instances train=DatasetLoading.loadDataNullable("Z:/ArchiveData/Uci_arff/"+s+"/"+s+"-train");
            Instances test=DatasetLoading.loadDataNullable("Z:/ArchiveData/Uci_arff/"+s+"/"+s+"-test");
            try{
                knn.buildClassifier(train);
//                ib1.buildClassifier(train);
                ib1.buildClassifier(train);
                double a1=ClassifierTools.accuracy(test, knn);
                double a2=ClassifierTools.accuracy(test, ib1);
                if(a1!=a2){
                    diff++;
                    System.out.println(s+": 1-NN ="+df.format(a1)+" ib1="+df.format(a2));
                }
            }catch(Exception e){
                System.out.println(" Exception builing a classifier");
                System.exit(0);
            }
        }
         System.out.println("Total problems ="+DatasetLists.uciFileNames.length+" different on "+diff);
    }
 
Example 2
Source File: kNN.java    From tsml with GNU General Public License v3.0 5 votes vote down vote up
public static void testkNNvsIBk(boolean norm, boolean crossValidate){
        System.out.println("FIRST BASIC SANITY TEST FOR THIS WRAPPER");
        System.out.print("Compare 1-NN with IB1, normalisation turned");
        String str=norm?" on":" off";
        System.out.println(str);
        System.out.print("Cross validation turned");
        str=crossValidate?" on":" off";
        System.out.println(str);
        System.out.println("Compare on the UCI data sets");
        System.out.print("If normalisation is off, then there may be differences");
        kNN knn = new kNN(100);
        IBk ibk=new IBk(100);
        knn.normalise(norm);
        knn.setCrossValidate(crossValidate);
        ibk.setCrossValidate(crossValidate);
        int diff=0;
        DecimalFormat df = new DecimalFormat("####.###");
        for(String s:DatasetLists.uciFileNames){
            Instances train=DatasetLoading.loadDataNullable("Z:/ArchiveData/Uci_arff/"+s+"\\"+s+"-train");
            Instances test=DatasetLoading.loadDataNullable("Z:/ArchiveData/Uci_arff/"+s+"\\"+s+"-test");
            try{
                knn.buildClassifier(train);
//                ib1.buildClassifier(train);
                ibk.buildClassifier(train);
                double a1=ClassifierTools.accuracy(test, knn);
                double a2=ClassifierTools.accuracy(test, ibk);
                if(a1!=a2){
                    diff++;
                    System.out.println(s+": 1-NN ="+df.format(a1)+" ibk="+df.format(a2));
                }
            }catch(Exception e){
                System.out.println(" Exception builing a classifier");
                System.exit(0);
            }
        }
         System.out.println("Total problems ="+DatasetLists.uciFileNames.length+" different on "+diff);
    }
 
Example 3
Source File: EvaluationUtils.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
public static double performEnsemble(Instances instances) throws Exception {
	List<Instances> subsample = WekaUtil.getStratifiedSplit(instances, 42, .05f);
	instances = subsample.get(0);

	/* Relief */
	ReliefFAttributeEval relief = new ReliefFAttributeEval();
	relief.buildEvaluator(instances);
	double attEvalSum = 0;
	for (int i = 0; i < instances.numAttributes() - 1; i++) {
		attEvalSum += relief.evaluateAttribute(i);
	}
	attEvalSum /= instances.numAttributes();

	/* Variance */
	double varianceMean = 0;
	int totalNumericCount = 0;
	for (int i = 0; i < instances.numAttributes() - 1; i++) {
		if (instances.attribute(i).isNumeric()) {
			instances.attributeStats(i).numericStats.calculateDerived();
			varianceMean += Math.pow(instances.attributeStats(i).numericStats.stdDev, 2);
			totalNumericCount++;
		}
	}
	varianceMean /= (totalNumericCount != 0 ? totalNumericCount : 1);

	/* KNN */
	List<Instances> split = WekaUtil.getStratifiedSplit(instances, 42, .7f);
	IBk knn = new IBk(10);
	knn.buildClassifier(split.get(0));
	Evaluation eval = new Evaluation(split.get(0));
	eval.evaluateModel(knn, split.get(1));
	double knnResult = eval.pctCorrect() / 100d;

	return 1 - (0.33 * attEvalSum + 0.33 * knnResult + 0.33 * varianceMean);
}
 
Example 4
Source File: EnsembleEvaluatorTest.java    From AILibs with GNU Affero General Public License v3.0 5 votes vote down vote up
@Test
public void knnEvaluatorTest() throws Exception {
    logger.info("Starting knn evaluation test...");

    /* load dataset and create a train-test-split */
    OpenmlConnector connector = new OpenmlConnector();
    DataSetDescription ds = connector.dataGet(DataSetUtils.SEGMENT_ID);
    File file = ds.getDataset(DataSetUtils.API_KEY);
    Instances data = new Instances(new BufferedReader(new FileReader(file)));
    data.setClassIndex(data.numAttributes() - 1);
    List<Instances> split = WekaUtil.getStratifiedSplit(data, 42, .05f);

    Instances insts = split.get(0);
    List<Instances> split2 = WekaUtil.getStratifiedSplit(insts, 42, .7f);

    long timeStart = System.currentTimeMillis();

    IBk knn = new IBk(10);
    knn.buildClassifier(split2.get(0));

    long timeStartEval = System.currentTimeMillis();

    Evaluation eval = new Evaluation(split2.get(0));
    eval.evaluateModel(knn, split2.get(1));
    logger.debug("Pct correct: " + eval.pctCorrect());
    Assert.assertTrue(eval.pctCorrect() > 0);

    long timeTaken = System.currentTimeMillis() - timeStart;
    long timeTakenEval = System.currentTimeMillis() - timeStartEval;

    logger.debug("KNN took " + (timeTaken / 1000) + " s.");
    logger.debug("KNN eval took " + (timeTakenEval / 1000) + " s.");
}
 
Example 5
Source File: TestIBk.java    From Java-Data-Analysis with MIT License 4 votes vote down vote up
public static void main(String[] args) throws Exception {
    DataSource source = new DataSource("data/AnonFruit.arff");
    Instances instances = source.getDataSet();
    instances.setClassIndex(3);  // target attribute: (Sweet)
    
    IBk ibk = new IBk();
    ibk.buildClassifier(instances);

    for (Instance instance : instances) {
        double prediction = ibk.classifyInstance(instance);
        System.out.printf("%4.0f%4.0f%n", 
                instance.classValue(), prediction);
    }
}