Java Code Examples for org.neuroph.nnet.learning.MomentumBackpropagation#setMaxError()
The following examples show how to use
org.neuroph.nnet.learning.MomentumBackpropagation#setMaxError() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PredictingPokerHandsSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/predicting_poker_hands_data.txt"; int inputsCount = 85; int outputsCount = 9; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 65, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetPokerHands.nnet"); System.out.println("Done."); }
Example 2
Source File: AnimalsClassificationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/animals_data.txt"; int inputsCount = 20; int outputsCount = 7; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetAnimals.nnet"); System.out.println("Done."); }
Example 3
Source File: BreastCancerSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training and test set from file..."); String dataSetFile = "data_sets/breast_cancer.txt"; int numInputs = 30; int numOutputs = 1; //Create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, numInputs, numOutputs, ","); //Creatinig training set (70%) and test set (30%) DataSet[] trainTestSplit = dataSet.split(0.7, 0.3); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; //Normalizing data set Normalizer normalizer = new MaxNormalizer(trainingSet); normalizer.normalize(trainingSet); normalizer.normalize(testSet); //Create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(numInputs, 16, numOutputs); //attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); learningRule.setLearningRate(0.3); learningRule.setMaxError(0.01); learningRule.setMaxIterations(500); System.out.println("Training network..."); //train the network with training set neuralNet.learn(trainingSet); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, testSet); }
Example 4
Source File: WineClassificationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String dataSetFile = "data_sets/wine_classification_data.txt"; int inputsCount = 13; int outputsCount = 3; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetWineClassification.nnet"); System.out.println("Done."); }
Example 5
Source File: GlassIdentificationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/glass_identification_data.txt"; int inputsCount = 9; int outputsCount = 7; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false); //dataSet.normalize(); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralGlassIdentification.nnet"); System.out.println("Done."); }
Example 6
Source File: CarEvaluationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/car_evaluation_data.txt"; int inputsCount = 21; int outputsCount = 4; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetCarEvaluation.nnet"); System.out.println("Done."); }
Example 7
Source File: ConcreteStrenghtTestSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/concrete_strenght_test_data.txt"; int inputsCount = 8; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 22, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralConcreteStrenght.nnet"); System.out.println("Done."); }
Example 8
Source File: RGBImageRecognitionTrainingSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { // path to image directory String imageDir ="/home/zoran/Downloads/MihailoHSLTest/trening"; // image names - used for output neuron labels List<String> imageLabels = new ArrayList(); imageLabels.add("bird"); imageLabels.add("cat"); imageLabels.add("dog"); // create dataset Map<String,FractionRgbData> map = ImageRecognitionHelper.getFractionRgbDataForDirectory (new File(imageDir), new Dimension(20, 20)); DataSet dataSet = ImageRecognitionHelper.createRGBTrainingSet(imageLabels, map); // create neural network List <Integer> hiddenLayers = new ArrayList<>(); hiddenLayers.add(12); NeuralNetwork nnet = ImageRecognitionHelper.createNewNeuralNetwork("someNetworkName", new Dimension(20,20), ColorMode.COLOR_RGB, imageLabels, hiddenLayers, TransferFunctionType.SIGMOID); // set learning rule parameters MomentumBackpropagation mb = (MomentumBackpropagation)nnet.getLearningRule(); mb.setLearningRate(0.2); mb.setMaxError(0.9); mb.setMomentum(1); // traiin network System.out.println("NNet start learning..."); nnet.learn(dataSet); System.out.println("NNet learned"); }
Example 9
Source File: MomentumTraining.java From NeurophFramework with Apache License 2.0 | 5 votes |
/** * Create instance of learning rule and setup given parameters * * @return returns learning rule with predefined parameters */ @Override public LearningRule setParameters() { MomentumBackpropagation mbp = new MomentumBackpropagation(); mbp.setBatchMode(getSettings().isBatchMode()); mbp.setLearningRate(getSettings().getLearningRate()); mbp.setMaxError(getSettings().getMaxError()); mbp.setMaxIterations(getSettings().getMaxIterations()); mbp.setMomentum(getSettings().getMomentum()); return mbp; }
Example 10
Source File: BatchImageTrainer.java From FakeImageDetection with GNU General Public License v3.0 | 5 votes |
@Override public void doRun() { try { System.out.println("Starting training thread....." + sampleDimension.toString() + " and " + imageLabels.toString()); HashMap<String, BufferedImage> imagesMap = new HashMap<String, BufferedImage>(); for (File file : srcDirectory.listFiles()) { imageLabels.add(FilenameUtils.removeExtension(file.getName())); if (sampleDimension.getWidth() > 0 && sampleDimension.getHeight() > 0) { Double w = sampleDimension.getWidth(); Double h = sampleDimension.getHeight(); imagesMap.put(file.getName(), ImageUtilities.resizeImage(ImageUtilities.loadImage(file), w.intValue(), h.intValue())); } } Map<String, FractionRgbData> imageRgbData = ImageUtilities.getFractionRgbDataForImages(imagesMap); DataSet learningData = ImageRecognitionHelper.createRGBTrainingSet(imageLabels, imageRgbData); nnet = NeuralNetwork.load(new FileInputStream(nnFile)); //Load NNetwork MomentumBackpropagation mBackpropagation = (MomentumBackpropagation) nnet.getLearningRule(); mBackpropagation.setLearningRate(learningRate); mBackpropagation.setMaxError(maxError); mBackpropagation.setMomentum(momentum); System.out.println("Network Information\nLabel = " + nnet.getLabel() + "\n Input Neurons = " + nnet.getInputsCount() + "\n Number of layers = " + nnet.getLayersCount() ); mBackpropagation.addListener(this); System.out.println("Starting training......"); nnet.learn(learningData, mBackpropagation); //Training Completed listener.batchImageTrainingCompleted(); } catch (FileNotFoundException ex) { System.out.println(ex.getMessage() + "\n" + ex.getLocalizedMessage()); } }
Example 11
Source File: Banknote.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/databanknote.txt"; int inputsCount = 4; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false); DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 1, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 12
Source File: WheatSeeds.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/seeds.txt"; int inputsCount = 7; int outputsCount = 3; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t"); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 2, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); learningRule.setMaxIterations(5000); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 13
Source File: IonosphereSample.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training and test set from file..."); String dataSetFile = "data_sets/ionosphere.csv"; int inputsCount = 34; int outputsCount = 1; //Create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ","); dataSet.shuffle(); //Normalizing data set Normalizer normalizer = new MaxNormalizer(dataSet); normalizer.normalize(dataSet); //Creatinig training set (70%) and test set (30%) DataSet[] trainingAndTestSet = dataSet.createTrainingAndTestSubsets(70, 30); DataSet trainingSet = trainingAndTestSet[0]; DataSet testSet = trainingAndTestSet[1]; // for (int i = 0; i < 21; i++) { System.out.println("Creating neural network..."); //Create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, 8, outputsCount); // System.out.println("HIDDEN COUNT: " + i); //attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); learningRule.setMaxIterations(10000); System.out.println("Training network..."); //train the network with training set neuralNet.learn(trainingSet); System.out.println("Testing network...\n\n"); testNeuralNetwork(neuralNet, testSet); System.out.println("Done."); System.out.println("**************************************************"); // } }
Example 14
Source File: PimaIndiansDiabetes.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/pimadata.txt"; int inputsCount = 8; int outputsCount = 1; // create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", false); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize training and test set Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 15, 5, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.03); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 15
Source File: Banknote.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/databanknote.txt"; int inputsCount = 4; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false); DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 1, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 16
Source File: Ionosphere.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/ionospheredata.txt"; int inputsCount = 34; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 30, 25, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 17
Source File: BrestCancerSample.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training and test set from file..."); String dataSetFile = "data_sets/breast cancer.txt"; int inputsCount = 30; int outputsCount = 2; // use onlz one output - binarz classification, transform dat aset //Create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ","); //Creatinig training set (70%) and test set (30%) DataSet[] trainTestSplit = dataSet.split(0.7, 0.3); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; //Normalizing data set Normalizer normalizer = new MaxNormalizer(trainingSet); normalizer.normalize(trainingSet); normalizer.normalize(testSet); System.out.println("Creating neural network..."); //Create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount); //attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); learningRule.setLearningRate(0.3); learningRule.setMaxError(0.001); learningRule.setMaxIterations(5000); System.out.println("Training network..."); //train the network with training set neuralNet.learn(trainingSet); System.out.println("Testing network...\n\n"); testNeuralNetwork(neuralNet, testSet); System.out.println("Done."); System.out.println("**************************************************"); }
Example 18
Source File: Sonar.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/sonardata.txt"; int inputsCount = 60; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data using max normalization Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 19
Source File: LensesClassificationSample.java From NeurophFramework with Apache License 2.0 | 3 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/lenses_data.txt"; int inputsCount = 9; int outputsCount = 3; System.out.println("Creating training set..."); DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, " ", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetLenses.nnet"); System.out.println("Done."); }
Example 20
Source File: ShuttleLandingControlSample.java From NeurophFramework with Apache License 2.0 | 3 votes |
public void run() { System.out.println("Creating training set..."); String dataSetFile = "data_sets/shuttle_landing_control_data.txt"; int inputsCount = 15; int outputsCount = 2; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false); System.out.println("Creating neural network..."); // create MultiLayerPerceptron neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 16, outputsCount); // attach listener to learning rule MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(dataSet); System.out.println("Training completed."); System.out.println("Testing network..."); testNeuralNetwork(neuralNet, dataSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("MyNeuralNetShuttle.nnet"); System.out.println("Done."); }