Java Code Examples for org.neuroph.nnet.learning.BackPropagation#getTotalNetworkError()

The following examples show how to use org.neuroph.nnet.learning.BackPropagation#getTotalNetworkError() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TrainNetwork.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());

    }
}
 
Example 2
Source File: BreastCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 3
Source File: BrestCancerSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 4
Source File: GermanCreditDataSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 5
Source File: DiabetesSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 6
Source File: IonosphereSample2.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 7
Source File: IonosphereSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
@Override
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
        double error = bp.getTotalNetworkError();
        System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations, ");
        System.out.println("With total error: " + formatDecimalNumber(error));
    } else {
        System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
    }
}
 
Example 8
Source File: MultilayerPerceptronOptimazer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public void handleLearningEvent(LearningEvent event) {
    BackPropagation bp = (BackPropagation) event.getSource();
    foldErrors[bp.getCurrentIteration() - 1] += bp.getTotalNetworkError() / foldSize;
}
 
Example 9
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }