Java Code Examples for org.neuroph.core.NeuralNetwork#learn()

The following examples show how to use org.neuroph.core.NeuralNetwork#learn() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CrossValidationBak.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
@Override
public EvaluationResult call() {
    NeuralNetwork neuralNet = SerializationUtils.clone(this.neuralNetwork);

    DataSet trainingSet = new DataSet(dataSet.size() - foldSize);
    DataSet testSet = new DataSet(foldSize);

    int startIndex = foldSize * foldIndex;
    int endIndex = foldSize * (foldIndex + 1);

    for (int i = 0; i < dataSet.size(); i++) {
        if (i >= startIndex && i < endIndex) {
            testSet.add(dataSet.getRowAt(i));
        } else {
            trainingSet.add(dataSet.getRowAt(i));
        }
    }
    neuralNet.learn(trainingSet);
    EvaluationResult evaluationResult = new EvaluationResult();
    evaluationResult.setNeuralNetwork(neuralNet);
    evaluationResult = evaluation.evaluate(neuralNet, testSet);
    return evaluationResult;
}
 
Example 2
Source File: SunSpots.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
public void run() {

		// uncomment the following line to use regular Neuroph (non-flat) processing
		//Neuroph.getInstance().setFlattenNetworks(false);
		// create neural network
		NeuralNetwork network = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, WINDOW_SIZE, 10, 1);

                // normalize training data
		normalizeSunspots(0.1, 0.9);

		network.getLearningRule().addListener(this);

                // create training set
		DataSet trainingSet = generateTrainingData();
		network.learn(trainingSet);
		predict(network);

		Neuroph.getInstance().shutdown();
	}
 
Example 3
Source File: PerceptronSample.java    From NeurophFramework with Apache License 2.0 6 votes vote down vote up
/**
 * Runs this sample
 */
public static void main(String args[]) {

        // create training set (logical AND function)
        DataSet trainingSet = new DataSet(2, 1);
        trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{0}));
        trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{1}));

        // create perceptron neural network
        NeuralNetwork myPerceptron = new Perceptron(2, 1);
        // learn the training set
        myPerceptron.learn(trainingSet);
        // test perceptron
        System.out.println("Testing trained perceptron");
        testNeuralNetwork(myPerceptron, trainingSet);
        // save trained perceptron
        myPerceptron.save("mySamplePerceptron.nnet");
        // load saved neural network
        NeuralNetwork loadedPerceptron = NeuralNetwork.load("mySamplePerceptron.nnet");
        // test loaded neural network
        System.out.println("Testing loaded perceptron");
        testNeuralNetwork(loadedPerceptron, trainingSet);
}
 
Example 4
Source File: NeurophXOR.java    From tutorials with MIT License 6 votes vote down vote up
public static NeuralNetwork trainNeuralNetwork(NeuralNetwork ann) {
    int inputSize = 2;
    int outputSize = 1;
    DataSet ds = new DataSet(inputSize, outputSize);

    DataSetRow rOne = new DataSetRow(new double[] { 0, 1 }, new double[] { 1 });
    ds.addRow(rOne);
    DataSetRow rTwo = new DataSetRow(new double[] { 1, 1 }, new double[] { 0 });
    ds.addRow(rTwo);
    DataSetRow rThree = new DataSetRow(new double[] { 0, 0 }, new double[] { 0 });
    ds.addRow(rThree);
    DataSetRow rFour = new DataSetRow(new double[] { 1, 0 }, new double[] { 1 });
    ds.addRow(rFour);

    BackPropagation backPropagation = new BackPropagation();
    backPropagation.setMaxIterations(1000);

    ann.learn(ds, backPropagation);
    return ann;
}
 
Example 5
Source File: IrisOptimization.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) {
    String inputFileName = "/iris_data.txt";

    DataSet irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false);
    BackPropagation learningRule = createLearningRule();

    NeuralNetwork neuralNet = new MultilayerPerceptronOptimazer<>()
            .withLearningRule(learningRule)
            .createOptimalModel(irisDataSet);

    neuralNet.learn(irisDataSet);
    Evaluation.runFullEvaluation(neuralNet, irisDataSet);

}
 
Example 6
Source File: RGBImageRecognitionTrainingSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws IOException {
    
    // path to image directory
    String imageDir ="/home/zoran/Downloads/MihailoHSLTest/trening";
    
    // image names - used for output neuron labels
    List<String> imageLabels = new ArrayList();
    imageLabels.add("bird");                                                   
    imageLabels.add("cat");
    imageLabels.add("dog");
            

    // create dataset
    Map<String,FractionRgbData> map = ImageRecognitionHelper.getFractionRgbDataForDirectory (new File(imageDir), new Dimension(20, 20));
    DataSet dataSet = ImageRecognitionHelper.createRGBTrainingSet(imageLabels, map);

    // create neural network
    List <Integer> hiddenLayers = new ArrayList<>();
    hiddenLayers.add(12);
    NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20,20), ColorMode.COLOR_RGB, imageLabels, hiddenLayers, TransferFunctionType.SIGMOID);

    // set learning rule parameters
    MomentumBackpropagation mb = (MomentumBackpropagation)nnet.getLearningRule();
    mb.setLearningRate(0.2);
    mb.setMaxError(0.9);
    mb.setMomentum(1);
  
    // traiin network
    System.out.println("NNet start learning...");
    nnet.learn(dataSet);
    System.out.println("NNet learned");                
    
}
 
Example 7
Source File: HSLImageRecognitionTrainingSample.java    From NeurophFramework with Apache License 2.0 5 votes vote down vote up
public static void main (String [] args) throws IOException {
    
    // path to image directory
    String imageDir ="/home/zoran/Downloads/MihailoHSLTest/trening";
    
    // image names - used for output neuron labels
    List<String> imageLabels = new ArrayList();
    imageLabels.add("bird");                                                   
    imageLabels.add("cat");
    imageLabels.add("dog");
            

    // create dataset
    Map<String,FractionHSLData> map = ImageRecognitionHelper.getFractionHSLDataForDirectory (new File(imageDir), new Dimension(20, 20));
    DataSet dataSet = ImageRecognitionHelper.createHSLTrainingSet(imageLabels, map);

    // create neural network
    List <Integer> hiddenLayers = new ArrayList<>();
    hiddenLayers.add(12);
    NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20,20), ColorMode.COLOR_HSL, imageLabels, hiddenLayers, TransferFunctionType.SIGMOID);

    // set learning rule parameters
    MomentumBackpropagation mb = (MomentumBackpropagation)nnet.getLearningRule();
    mb.setLearningRate(0.2);
    mb.setMaxError(0.9);
    mb.setMomentum(1);
  
    // traiin network
    System.out.println("NNet start learning...");
    nnet.learn(dataSet);
    System.out.println("NNet learned");        
}
 
Example 8
Source File: Main.java    From NeurophFramework with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) {
    System.out.println("Time stamp N1:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date()));

    int maxIterations = 10000;
    NeuralNetwork neuralNet = new MultiLayerPerceptron(4, 9, 1);
    ((LMS) neuralNet.getLearningRule()).setMaxError(0.001);//0-1
    ((LMS) neuralNet.getLearningRule()).setLearningRate(0.7);//0-1
    ((LMS) neuralNet.getLearningRule()).setMaxIterations(maxIterations);//0-1
    DataSet trainingSet = new DataSet(4, 1);

    double daxmax = 10000.0D;
    trainingSet.add(new DataSetRow(new double[]{3710.0D / daxmax, 3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax}, new double[]{3666.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax}, new double[]{3692.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax}, new double[]{3886.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax}, new double[]{3914.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax}, new double[]{3956.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax}, new double[]{3953.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax}, new double[]{4044.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax}, new double[]{3987.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax}, new double[]{3996.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax}, new double[]{4043.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax}, new double[]{4068.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax}, new double[]{4176.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax}, new double[]{4187.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax}, new double[]{4223.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax}, new double[]{4259.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax}, new double[]{4203.0D / daxmax}));
    trainingSet.add(new DataSetRow(new double[]{4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax}, new double[]{3989.0D / daxmax}));
    neuralNet.learn(trainingSet);
    System.out.println("Time stamp N2:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date()));

    DataSet testSet = new DataSet(4, 1);
    testSet.add(new DataSetRow(new double[]{4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax, 3989.0D / daxmax}));

    for (DataSetRow testDataRow : testSet.getRows()) {
        neuralNet.setInput(testDataRow.getInput());
        neuralNet.calculate();
        double[] networkOutput = neuralNet.getOutput();
        System.out.print("Input: " + Arrays.toString(testDataRow.getInput()) );
        System.out.println(" Output: " + Arrays.toString(networkOutput) );
    }

    //Experiments:
    //                   calculated
    //31;3;2009;4084,76 -> 4121 Error=0.01 Rate=0.7 Iterat=100
    //31;3;2009;4084,76 -> 4096 Error=0.01 Rate=0.7 Iterat=1000
    //31;3;2009;4084,76 -> 4093 Error=0.01 Rate=0.7 Iterat=10000
    //31;3;2009;4084,76 -> 4108 Error=0.01 Rate=0.7 Iterat=100000
    //31;3;2009;4084,76 -> 4084 Error=0.001 Rate=0.7 Iterat=10000

    System.out.println("Time stamp N3:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date()));
    System.exit(0);
}
 
Example 9
Source File: TrainingSample.java    From NeurophFramework with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws IOException {

        //     User input parameteres       
//*******************************************************************************************************************************       
        String imagePath = "C:/Users/Mihailo/Desktop/OCR/slova.png"; //path to the image with letters                        *
        String folderPath = "C:/Users/Mihailo/Desktop/OCR/ImagesDir/"; // loaction folder for storing segmented letters           *
        String textPath = "C:/Users/Mihailo/Desktop/OCR/slova.txt"; // path to the .txt file with text on the image          *
        String networkPath = "C:/Users/Mihailo/Desktop/OCR/network.nnet"; // location where the network will be stored     *
        int fontSize = 12; // fontSize, predicted by height of the letters, minimum font size is 12 pt                          *
        int scanQuality = 300; // scan quality, minimum quality is 300 dpi                                                      *
//*******************************************************************************************************************************

        BufferedImage image = ImageIO.read(new File(imagePath));
        ImageFilterChain chain = new ImageFilterChain();
        chain.addFilter(new GrayscaleFilter());
        chain.addFilter(new OtsuBinarizeFilter());
        BufferedImage binarizedImage = chain.apply(image);

        
        
        
        
        Letter letterInfo = new Letter(scanQuality, binarizedImage);
//        letterInfo.recognizeDots(); // call this method only if you want to recognize dots and other litle characters, TODO

        Text texTInfo = new Text(binarizedImage, letterInfo);

        OCRTraining ocrTraining = new OCRTraining(letterInfo, texTInfo);
        ocrTraining.setFolderPath(folderPath);
        ocrTraining.setTrainingTextPath(textPath);
        ocrTraining.prepareTrainingSet();
        
  
        
        List<String> characterLabels = ocrTraining.getCharacterLabels();

        Map<String, FractionRgbData> map = ImageRecognitionHelper.getFractionRgbDataForDirectory(new File(folderPath), new Dimension(20, 20));
        DataSet dataSet = ImageRecognitionHelper.createBlackAndWhiteTrainingSet(characterLabels, map);
        
        
        dataSet.setFilePath("C:/Users/Mihailo/Desktop/OCR/DataSet1.tset");
        dataSet.save();
        
        
        List<Integer> hiddenLayers = new ArrayList<Integer>();
        hiddenLayers.add(12);

        NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20, 20), ColorMode.BLACK_AND_WHITE, characterLabels, hiddenLayers, TransferFunctionType.SIGMOID);
        BackPropagation bp = (BackPropagation) nnet.getLearningRule();
        bp.setLearningRate(0.3);
        bp.setMaxError(0.1);

        
//        MultiLayerPerceptron mlp = new MultiLayerPerceptron(12,13);
//        mlp.setOutputNeurons(null);
        
        System.out.println("Start learning...");
        nnet.learn(dataSet);
        System.out.println("NNet learned");

        nnet.save(networkPath);

    }