Java Code Examples for org.neuroph.nnet.learning.BackPropagation#setMaxIterations()

The following examples show how to use org.neuroph.nnet.learning.BackPropagation#setMaxIterations() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: NeurophXOR.java    From tutorials with MIT License 6 votes vote down vote up
public static NeuralNetwork trainNeuralNetwork(NeuralNetwork ann) {
    int inputSize = 2;
    int outputSize = 1;
    DataSet ds = new DataSet(inputSize, outputSize);

    DataSetRow rOne = new DataSetRow(new double[] { 0, 1 }, new double[] { 1 });
    ds.addRow(rOne);
    DataSetRow rTwo = new DataSetRow(new double[] { 1, 1 }, new double[] { 0 });
    ds.addRow(rTwo);
    DataSetRow rThree = new DataSetRow(new double[] { 0, 0 }, new double[] { 0 });
    ds.addRow(rThree);
    DataSetRow rFour = new DataSetRow(new double[] { 1, 0 }, new double[] { 1 });
    ds.addRow(rFour);

    BackPropagation backPropagation = new BackPropagation();
    backPropagation.setMaxIterations(1000);

    ann.learn(ds, backPropagation);
    return ann;
}
 
Example 2
Source File: BackpropagationTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Create instance of learning rule and setup given parameters
 * @return returns learning rule with predefined parameters
 */
@Override
public LearningRule setParameters() {
    BackPropagation bp = new BackPropagation();
    bp.setLearningRate(getSettings().getLearningRate());
    bp.setMaxError(getSettings().getMaxError());
    bp.setBatchMode(getSettings().isBatchMode());
    bp.setMaxIterations(getSettings().getMaxIterations());
    return bp;
}
 
Example 3
Source File: DigitsRecognition.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String args[]) {

        //create training set from Data.DIGITS
        DataSet dataSet = generateTrainingSet();

        int inputCount = DigitData.CHAR_HEIGHT * DigitData.CHAR_WIDTH;
        int outputCount = DigitData.DIGITS.length;
        int hiddenNeurons = 19;

        //create neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputCount, hiddenNeurons, outputCount);
        //get backpropagation learning rule from network
        BackPropagation learningRule = neuralNet.getLearningRule();

        learningRule.setLearningRate(0.5);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(5000);

        //add learning listener in order to print out training info
        learningRule.addListener(new LearningEventListener() {
            @Override
            public void handleLearningEvent(LearningEvent event) {
                BackPropagation bp = (BackPropagation) event.getSource();
                if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
                    System.out.println();
                    System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations");
                    System.out.println("With total error " + bp.getTotalNetworkError() + '\n');
                } else {
                    System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
                }
            }
        });

        //train neural network
        neuralNet.learn(dataSet);

        //train the network with training set
        testNeuralNetwork(neuralNet, dataSet);

    }
 
Example 4
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }
 
Example 5
Source File: MLPMNISTOptimization.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
private static BackPropagation createLearningRule() {
    BackPropagation learningRule = new BackPropagation();
    learningRule.setMaxIterations(100);
    learningRule.setMaxError(0.0001);
    return learningRule;
}
 
Example 6
Source File: IrisOptimization.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
private static BackPropagation createLearningRule() {
    BackPropagation learningRule = new BackPropagation();
    learningRule.setMaxIterations(50);
    learningRule.setMaxError(0.0001);
    return learningRule;
}
 
Example 7
Source File: MultiLayerMNIST.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     * @param args Command line parameters used to initialize parameters of multi layer neural network optimizer
     *             [0] - maximal number of epochs during learning
     *             [1] - learning error stop condition
     *             [2] - learning rate used during learning process
     *             [3] - number of validation folds
     *             [4] - max number of layers in neural network
     *             [5] - min neuron count per layer
     *             [6] - max neuron count per layer
     *             [7] - neuron increment count
     */
    public static void main(String[] args) throws IOException {

        int maxIter = 10000; //Integer.parseInt(args[0]);
        double maxError = 0.01; // Double.parseDouble(args[1]);
        double learningRate = 0.2 ; // Double.parseDouble(args[2]);

        int validationFolds = Integer.parseInt(args[3]);

        int maxLayers = Integer.parseInt(args[4]);
        int minNeuronCount = Integer.parseInt(args[5]);
        int maxNeuronCount = Integer.parseInt(args[6]);
        int neuronIncrement = Integer.parseInt(args[7]);

        LOG.info("MLP learning for MNIST started.....");

        DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 60000);
        DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000);

        BackPropagation bp = new BackPropagation();
        bp.setMaxIterations(maxIter);
        bp.setMaxError(maxError);
        bp.setLearningRate(learningRate);
// commented out due to errors
//        KFoldCrossValidation errorEstimationMethod = new KFoldCrossValidation(neuralNet, trainSet, validationFolds);
//
//        NeuralNetwork neuralNet = new MultilayerPerceptronOptimazer<>()
//                .withLearningRule(bp)
//                .withErrorEstimationMethod(errorEstimationMethod)
//                .withMaxLayers(maxLayers)
//                .withMaxNeurons(maxNeuronCount)
//                .withMinNeurons(minNeuronCount)
//                .withNeuronIncrement(neuronIncrement)
//                .createOptimalModel(trainSet);

        LOG.info("Evaluating model on Test Set.....");
// commented out due to errors
      //  Evaluation.runFullEvaluation(neuralNet, testSet);

        LOG.info("MLP learning for MNIST successfully finished.....");
    }