Java Code Examples for org.neuroph.nnet.MultiLayerPerceptron#setLearningRule()
The following examples show how to use
org.neuroph.nnet.MultiLayerPerceptron#setLearningRule() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example 2
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example 3
Source File: NeuralNetworkFactory.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates and returns a new instance of Multi Layer Perceptron * @param layersStr space separated number of neurons in layers * @param transferFunctionType transfer function type for neurons * @return instance of Multi Layer Perceptron */ public static MultiLayerPerceptron createMLPerceptron(String layersStr, TransferFunctionType transferFunctionType, Class learningRule, boolean useBias, boolean connectIO) { ArrayList<Integer> layerSizes = VectorParser.parseInteger(layersStr); NeuronProperties neuronProperties = new NeuronProperties(transferFunctionType, useBias); MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, neuronProperties); // set learning rule - TODO: use reflection here if (learningRule.getName().equals(BackPropagation.class.getName())) { nnet.setLearningRule(new BackPropagation()); } else if (learningRule.getName().equals(MomentumBackpropagation.class.getName())) { nnet.setLearningRule(new MomentumBackpropagation()); } else if (learningRule.getName().equals(DynamicBackPropagation.class.getName())) { nnet.setLearningRule(new DynamicBackPropagation()); } else if (learningRule.getName().equals(ResilientPropagation.class.getName())) { nnet.setLearningRule(new ResilientPropagation()); } // connect io if (connectIO) { nnet.connectInputsToOutputs(); } return nnet; }
Example 4
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testXorMaxError() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); assertTrue(instance.getTotalNetworkError() < maxError); }
Example 5
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testIrisMaxError() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(4, 16, 3); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(irisDataSet); assertTrue(instance.getTotalNetworkError() < maxError); }
Example 6
Source File: WineQuality.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() throws InterruptedException, ExecutionException { System.out.println("Creating training set..."); // get path to training set String dataSetFile = "data_sets/wine.txt"; int inputsCount = 11; int outputsCount = 10; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true); Normalizer norm = new MaxNormalizer(dataSet); norm.normalize(dataSet); dataSet.shuffle(); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 20, 15, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxIterations(10); String classLabels[] = new String[]{"1", "2", "3", "4", "5", "6", "7", "8", "9", "10"}; neuralNet.setOutputLabels(classLabels); KFoldCrossValidation crossVal = new KFoldCrossValidation(neuralNet, dataSet, 10); EvaluationResult totalResult= crossVal.run(); List<FoldResult> cflist= crossVal.getResultsByFolds(); }
Example 7
Source File: WheatSeeds.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void run() throws InterruptedException, ExecutionException { System.out.println("Creating training set..."); // get path to training set String dataSetFile = "data_sets/seeds.txt"; int inputsCount = 7; int outputsCount = 3; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t"); dataSet.shuffle(); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 2, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); learningRule.setMaxIterations(1000); String[] classLabels = new String[]{"Cama", "Rosa", "Canadian"}; neuralNet.setOutputLabels(classLabels); KFoldCrossValidation crossVal = new KFoldCrossValidation(neuralNet, dataSet, 10); EvaluationResult totalResult= crossVal.run(); List<FoldResult> cflist= crossVal.getResultsByFolds(); }
Example 8
Source File: Abalone.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/abalonerings.txt"; int inputsCount = 8; int outputsCount = 29; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t", true); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxIterations(5000); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 9
Source File: Ionosphere.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/ionospheredata.txt"; int inputsCount = 34; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 30, 25, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 10
Source File: Ionosphere.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/ionospheredata.txt"; int inputsCount = 34; int outputsCount = 1; // create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 30, 25, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 11
Source File: Sonar.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/sonardata.txt"; int inputsCount = 60; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ",", false); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize data using max normalization Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 12
Source File: WineQualityClassification.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/wine.txt"; int inputsCount = 11; int outputsCount = 10; // create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 20, 15, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event)->{ MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxIterations(5000); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 13
Source File: Sonar.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { String dataSetFile = "data_sets/ml10standard/sonardata.txt"; int numInputs = 60; int numOutputs = 1; // create data set from csv file DataSet dataSet = DataSet.createFromFile(dataSetFile, numInputs, numOutputs, ","); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // create neural network MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, numInputs, 35, 15, numOutputs); // set learning rule and add listener neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.01); learningRule.setMaxError(0.01); // train the network with training set neuralNet.learn(trainingSet); // evaluate network performance on test set evaluate(neuralNet, testSet); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); //testNeuralNetwork(neuralNet, testSet); }
Example 14
Source File: Abalone.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/abalonerings.txt"; int inputsCount = 8; int outputsCount = 29; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, "\t", true); DataSet[] trainTestSplit = dataSet.split(0.7, 0.3); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 10, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxIterations(5000); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 15
Source File: WheatSeeds.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/seeds.txt"; int inputsCount = 7; int outputsCount = 3; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t"); // split data into train and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(inputsCount, 15, 2, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); learningRule.setMaxIterations(5000); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 16
Source File: Banknote.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/databanknote.txt"; int inputsCount = 4; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ",", false); DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 1, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event) -> { MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.01); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 17
Source File: IrisFlowers.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating data set..."); String dataSetFile = "data_sets/ml10standard/irisdatanormalised.txt"; int inputsCount = 4; int outputsCount = 3; // create data set from file DataSet dataSet = DataSet.createFromFile(dataSetFile, inputsCount, outputsCount, ","); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 2, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener((event)->{ MomentumBackpropagation bp = (MomentumBackpropagation) event.getSource(); System.out.println(bp.getCurrentIteration() + ". iteration | Total network error: " + bp.getTotalNetworkError()); }); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.03); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); }
Example 18
Source File: XorMultiLayerPerceptronSample.java From NeurophFramework with Apache License 2.0 | 4 votes |
/** * Runs this sample */ public void run() { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); System.out.println(Arrays.toString(myMlPerceptron.getWeights())); myMlPerceptron.setLearningRule(new BackPropagation()); myMlPerceptron.getLearningRule().setLearningRate(0.5); // enable batch if using MomentumBackpropagation // if( myMlPerceptron.getLearningRule() instanceof MomentumBackpropagation ) // ((MomentumBackpropagation)myMlPerceptron.getLearningRule()).setBatchMode(false); LearningRule learningRule = myMlPerceptron.getLearningRule(); learningRule.addListener(this); // learn the training set System.out.println("Training neural network..."); myMlPerceptron.learn(trainingSet); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.save("myMlPerceptron.nnet"); // load saved neural network NeuralNetwork loadedMlPerceptron = NeuralNetwork.createFromFile("myMlPerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded neural network"); testNeuralNetwork(loadedMlPerceptron, trainingSet); }
Example 19
Source File: IrisFlowers.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/irisdatanormalised.txt"; int inputsCount = 4; int outputsCount = 3; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, ","); // splid data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 2, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.2); learningRule.setMaxError(0.03); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }
Example 20
Source File: PimaIndiansDiabetes.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void run() { System.out.println("Creating training set..."); // get path to training set String trainingSetFileName = "data_sets/pimadata.txt"; int inputsCount = 8; int outputsCount = 1; // create training set from file DataSet dataSet = DataSet.createFromFile(trainingSetFileName, inputsCount, outputsCount, "\t", false); // split data into training and test set DataSet[] trainTestSplit = dataSet.split(0.6, 0.4); DataSet trainingSet = trainTestSplit[0]; DataSet testSet = trainTestSplit[1]; // normalize training and test set Normalizer norm = new MaxNormalizer(trainingSet); norm.normalize(trainingSet); norm.normalize(testSet); System.out.println("Creating neural network..."); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, inputsCount, 15, 5, outputsCount); neuralNet.setLearningRule(new MomentumBackpropagation()); MomentumBackpropagation learningRule = (MomentumBackpropagation) neuralNet.getLearningRule(); learningRule.addListener(this); // set learning rate and max error learningRule.setLearningRate(0.1); learningRule.setMaxError(0.03); System.out.println("Training network..."); // train the network with training set neuralNet.learn(trainingSet); System.out.println("Training completed."); System.out.println("Testing network..."); System.out.println("Network performance on the test set"); evaluate(neuralNet, testSet); System.out.println("Saving network"); // save neural network to file neuralNet.save("nn1.nnet"); System.out.println("Done."); System.out.println(); System.out.println("Network outputs for test set"); testNeuralNetwork(neuralNet, testSet); }