weka.classifiers.rules.ZeroR Java Examples
The following examples show how to use
weka.classifiers.rules.ZeroR.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ClassifierSubsetEval.java From tsml with GNU General Public License v3.0 | 5 votes |
/** * reset to defaults */ protected void resetOptions () { m_trainingInstances = null; m_Evaluation = null; m_Classifier = new ZeroR(); m_holdOutFile = new File("Click to set hold out or test instances"); m_holdOutInstances = null; m_useTraining = false; }
Example #2
Source File: WrapperSubsetEval.java From tsml with GNU General Public License v3.0 | 5 votes |
protected void resetOptions () { m_trainInstances = null; m_Evaluation = null; m_BaseClassifier = new ZeroR(); m_folds = 5; m_seed = 1; m_threshold = 0.01; }
Example #3
Source File: WekaInstancesUtilTester.java From AILibs with GNU Affero General Public License v3.0 | 5 votes |
@Test public void testTrainingAndPrediction() throws DatasetDeserializationFailedException, InterruptedException, TrainingException, SplitFailedException, PredictionException { ILabeledDataset<?> dataset = this.problemSet.getDataset(); List<ILabeledDataset<?>> split = SplitterUtil.getSimpleTrainTestSplit(dataset, 0, .7); WekaClassifier c = new WekaClassifier(new ZeroR()); ISingleLabelClassificationPredictionBatch batch = c.fitAndPredict(split.get(0), split.get(1)); assertEquals(split.get(1).size(), batch.size()); }
Example #4
Source File: ClassifierSubsetEval.java From tsml with GNU General Public License v3.0 | 4 votes |
/** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -B <classifier> * class name of the classifier to use for accuracy estimation. * Place any classifier options LAST on the command line * following a "--". eg.: * -B weka.classifiers.bayes.NaiveBayes ... -- -K * (default: weka.classifiers.rules.ZeroR)</pre> * * <pre> -T * Use the training data to estimate accuracy.</pre> * * <pre> -H <filename> * Name of the hold out/test set to * estimate accuracy on.</pre> * * <pre> * Options specific to scheme weka.classifiers.rules.ZeroR: * </pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions (String[] options) throws Exception { String optionString; resetOptions(); optionString = Utils.getOption('B', options); if (optionString.length() == 0) optionString = ZeroR.class.getName(); setClassifier(AbstractClassifier.forName(optionString, Utils.partitionOptions(options))); optionString = Utils.getOption('H',options); if (optionString.length() != 0) { setHoldOutFile(new File(optionString)); } setUseTraining(Utils.getFlag('T',options)); }
Example #5
Source File: AdditiveRegression.java From tsml with GNU General Public License v3.0 | 4 votes |
/** * Build the classifier on the supplied data * * @param data the training data * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { super.buildClassifier(data); // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class Instances newData = new Instances(data); newData.deleteWithMissingClass(); double sum = 0; double temp_sum = 0; // Add the model for the mean first m_zeroR = new ZeroR(); m_zeroR.buildClassifier(newData); // only class? -> use only ZeroR model if (newData.numAttributes() == 1) { System.err.println( "Cannot build model (only class attribute present in data!), " + "using ZeroR model instead!"); m_SuitableData = false; return; } else { m_SuitableData = true; } newData = residualReplace(newData, m_zeroR, false); for (int i = 0; i < newData.numInstances(); i++) { sum += newData.instance(i).weight() * newData.instance(i).classValue() * newData.instance(i).classValue(); } if (m_Debug) { System.err.println("Sum of squared residuals " +"(predicting the mean) : " + sum); } m_NumIterationsPerformed = 0; do { temp_sum = sum; // Build the classifier m_Classifiers[m_NumIterationsPerformed].buildClassifier(newData); newData = residualReplace(newData, m_Classifiers[m_NumIterationsPerformed], true); sum = 0; for (int i = 0; i < newData.numInstances(); i++) { sum += newData.instance(i).weight() * newData.instance(i).classValue() * newData.instance(i).classValue(); } if (m_Debug) { System.err.println("Sum of squared residuals : "+sum); } m_NumIterationsPerformed++; } while (((temp_sum - sum) > Utils.SMALL) && (m_NumIterationsPerformed < m_Classifiers.length)); }