Java Code Examples for org.neuroph.nnet.learning.BackPropagation#setMaxError()

The following examples show how to use org.neuroph.nnet.learning.BackPropagation#setMaxError() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BackpropagationTraining.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
/**
 * Create instance of learning rule and setup given parameters
 * @return returns learning rule with predefined parameters
 */
@Override
public LearningRule setParameters() {
    BackPropagation bp = new BackPropagation();
    bp.setLearningRate(getSettings().getLearningRate());
    bp.setMaxError(getSettings().getMaxError());
    bp.setBatchMode(getSettings().isBatchMode());
    bp.setMaxIterations(getSettings().getMaxIterations());
    return bp;
}
 
Example 2
Source File: DigitsRecognition.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String args[]) {

        //create training set from Data.DIGITS
        DataSet dataSet = generateTrainingSet();

        int inputCount = DigitData.CHAR_HEIGHT * DigitData.CHAR_WIDTH;
        int outputCount = DigitData.DIGITS.length;
        int hiddenNeurons = 19;

        //create neural network
        MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputCount, hiddenNeurons, outputCount);
        //get backpropagation learning rule from network
        BackPropagation learningRule = neuralNet.getLearningRule();

        learningRule.setLearningRate(0.5);
        learningRule.setMaxError(0.001);
        learningRule.setMaxIterations(5000);

        //add learning listener in order to print out training info
        learningRule.addListener(new LearningEventListener() {
            @Override
            public void handleLearningEvent(LearningEvent event) {
                BackPropagation bp = (BackPropagation) event.getSource();
                if (event.getEventType().equals(LearningEvent.Type.LEARNING_STOPPED)) {
                    System.out.println();
                    System.out.println("Training completed in " + bp.getCurrentIteration() + " iterations");
                    System.out.println("With total error " + bp.getTotalNetworkError() + '\n');
                } else {
                    System.out.println("Iteration: " + bp.getCurrentIteration() + " | Network error: " + bp.getTotalNetworkError());
                }
            }
        });

        //train neural network
        neuralNet.learn(dataSet);

        //train the network with training set
        testNeuralNetwork(neuralNet, dataSet);

    }
 
Example 3
Source File: AutoTrainer.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     *
     * You can get results calling getResults() method.
     *
     * @param neuralNetwork type of neural net
     * @param dataSet
     */
    public void train(DataSet dataSet) {// mozda da se vrati Training setting koji je najbolje resenje za dati dataset.??
        generateTrainingSettings();
        List<TrainingResult> statResults = null;
        DataSet trainingSet, testSet; // validationSet;

        if (splitTrainTest) {
            DataSet[] dataSplit = dataSet.split(splitPercentage, 100-splitPercentage); //opet ne radi Maven za neuroph 2.92
            trainingSet = dataSplit[0];
            testSet = dataSplit[1];
        } else {
            trainingSet = dataSet;
            testSet = dataSet;
        }

        if (generateStatistics) {
            statResults = new ArrayList<>();
        }

        int trainingNo = 0;
        for (TrainingSettings trainingSetting : trainingSettingsList) {
            System.out.println("-----------------------------------------------------------------------------------");
            trainingNo++;
            System.out.println("##TRAINING: " + trainingNo);
            trainingSetting.setTrainingSet(splitPercentage);
            trainingSetting.setTestSet(100 - splitPercentage);
            //int subtrainNo = 0;

            for (int subtrainNo = 1; subtrainNo <= repeat; subtrainNo++) {
                System.out.println("#SubTraining: " + subtrainNo);

                MultiLayerPerceptron neuralNet
                        = new MultiLayerPerceptron(dataSet.getInputSize(), trainingSetting.getHiddenNeurons(), dataSet.getOutputSize());

                BackPropagation bp = neuralNet.getLearningRule();

                bp.setLearningRate(trainingSetting.getLearningRate());
                bp.setMaxError(trainingSetting.getMaxError());
                bp.setMaxIterations(trainingSetting.getMaxIterations());

                neuralNet.learn(trainingSet);
//                  testNeuralNetwork(neuralNet, testSet); // not implemented
                ConfusionMatrix cm = new ConfusionMatrix(new String[]{""});
                TrainingResult result = new TrainingResult(trainingSetting, bp.getTotalNetworkError(), bp.getCurrentIteration(),cm);
                System.out.println(subtrainNo + ") iterations: " + bp.getCurrentIteration());

                if (generateStatistics) {
                    statResults.add(result);
                } else {
                    results.add(result);
                }

            }

            if (generateStatistics) {
                TrainingResult trainingStats = calculateTrainingStatistics(trainingSetting, statResults);
                results.add(trainingStats);
                statResults.clear();
            }

        }

    }
 
Example 4
Source File: MLPMNISTOptimization.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
private static BackPropagation createLearningRule() {
    BackPropagation learningRule = new BackPropagation();
    learningRule.setMaxIterations(100);
    learningRule.setMaxError(0.0001);
    return learningRule;
}
 
Example 5
Source File: IrisOptimization.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
private static BackPropagation createLearningRule() {
    BackPropagation learningRule = new BackPropagation();
    learningRule.setMaxIterations(50);
    learningRule.setMaxError(0.0001);
    return learningRule;
}
 
Example 6
Source File: MultiLayerMNIST.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
/**
     * @param args Command line parameters used to initialize parameters of multi layer neural network optimizer
     *             [0] - maximal number of epochs during learning
     *             [1] - learning error stop condition
     *             [2] - learning rate used during learning process
     *             [3] - number of validation folds
     *             [4] - max number of layers in neural network
     *             [5] - min neuron count per layer
     *             [6] - max neuron count per layer
     *             [7] - neuron increment count
     */
    public static void main(String[] args) throws IOException {

        int maxIter = 10000; //Integer.parseInt(args[0]);
        double maxError = 0.01; // Double.parseDouble(args[1]);
        double learningRate = 0.2 ; // Double.parseDouble(args[2]);

        int validationFolds = Integer.parseInt(args[3]);

        int maxLayers = Integer.parseInt(args[4]);
        int minNeuronCount = Integer.parseInt(args[5]);
        int maxNeuronCount = Integer.parseInt(args[6]);
        int neuronIncrement = Integer.parseInt(args[7]);

        LOG.info("MLP learning for MNIST started.....");

        DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 60000);
        DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000);

        BackPropagation bp = new BackPropagation();
        bp.setMaxIterations(maxIter);
        bp.setMaxError(maxError);
        bp.setLearningRate(learningRate);
// commented out due to errors
//        KFoldCrossValidation errorEstimationMethod = new KFoldCrossValidation(neuralNet, trainSet, validationFolds);
//
//        NeuralNetwork neuralNet = new MultilayerPerceptronOptimazer<>()
//                .withLearningRule(bp)
//                .withErrorEstimationMethod(errorEstimationMethod)
//                .withMaxLayers(maxLayers)
//                .withMaxNeurons(maxNeuronCount)
//                .withMinNeurons(minNeuronCount)
//                .withNeuronIncrement(neuronIncrement)
//                .createOptimalModel(trainSet);

        LOG.info("Evaluating model on Test Set.....");
// commented out due to errors
      //  Evaluation.runFullEvaluation(neuralNet, testSet);

        LOG.info("MLP learning for MNIST successfully finished.....");
    }
 
Example 7
Source File: TrainingSample.java    From NeurophFramework with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws IOException {

        //     User input parameteres       
//*******************************************************************************************************************************       
        String imagePath = "C:/Users/Mihailo/Desktop/OCR/slova.png"; //path to the image with letters                        *
        String folderPath = "C:/Users/Mihailo/Desktop/OCR/ImagesDir/"; // loaction folder for storing segmented letters           *
        String textPath = "C:/Users/Mihailo/Desktop/OCR/slova.txt"; // path to the .txt file with text on the image          *
        String networkPath = "C:/Users/Mihailo/Desktop/OCR/network.nnet"; // location where the network will be stored     *
        int fontSize = 12; // fontSize, predicted by height of the letters, minimum font size is 12 pt                          *
        int scanQuality = 300; // scan quality, minimum quality is 300 dpi                                                      *
//*******************************************************************************************************************************

        BufferedImage image = ImageIO.read(new File(imagePath));
        ImageFilterChain chain = new ImageFilterChain();
        chain.addFilter(new GrayscaleFilter());
        chain.addFilter(new OtsuBinarizeFilter());
        BufferedImage binarizedImage = chain.apply(image);

        
        
        
        
        Letter letterInfo = new Letter(scanQuality, binarizedImage);
//        letterInfo.recognizeDots(); // call this method only if you want to recognize dots and other litle characters, TODO

        Text texTInfo = new Text(binarizedImage, letterInfo);

        OCRTraining ocrTraining = new OCRTraining(letterInfo, texTInfo);
        ocrTraining.setFolderPath(folderPath);
        ocrTraining.setTrainingTextPath(textPath);
        ocrTraining.prepareTrainingSet();
        
  
        
        List<String> characterLabels = ocrTraining.getCharacterLabels();

        Map<String, FractionRgbData> map = ImageRecognitionHelper.getFractionRgbDataForDirectory(new File(folderPath), new Dimension(20, 20));
        DataSet dataSet = ImageRecognitionHelper.createBlackAndWhiteTrainingSet(characterLabels, map);
        
        
        dataSet.setFilePath("C:/Users/Mihailo/Desktop/OCR/DataSet1.tset");
        dataSet.save();
        
        
        List<Integer> hiddenLayers = new ArrayList<Integer>();
        hiddenLayers.add(12);

        NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20, 20), ColorMode.BLACK_AND_WHITE, characterLabels, hiddenLayers, TransferFunctionType.SIGMOID);
        BackPropagation bp = (BackPropagation) nnet.getLearningRule();
        bp.setLearningRate(0.3);
        bp.setMaxError(0.1);

        
//        MultiLayerPerceptron mlp = new MultiLayerPerceptron(12,13);
//        mlp.setOutputNeurons(null);
        
        System.out.println("Start learning...");
        nnet.learn(dataSet);
        System.out.println("NNet learned");

        nnet.save(networkPath);

    }