Java Code Examples for org.neuroph.core.data.DataSet#add()
The following examples show how to use
org.neuroph.core.data.DataSet#add() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ImageRecognitionHelper.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates training set for the specified image labels and rgb data * @param imageLabels image labels * @param rgbDataMap map collection of rgb data * @return training set for the specified image data */ public static DataSet createRGBTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) { int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length; int outputCount = imageLabels.size(); DataSet trainingSet = new DataSet(inputCount, outputCount); for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) { double[] input = entry.getValue().getFlattenedRgbValues(); double[] response = createResponse(entry.getKey(), imageLabels); trainingSet.add(new DataSetRow(input, response)); } // set labels for output columns int inputSize = trainingSet.getInputSize(); for (int c= 0; c<trainingSet.getOutputSize() ; c++) { trainingSet.setColumnName(inputSize+c, imageLabels.get(c)); } return trainingSet; }
Example 2
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new MomentumBackpropagation(); instance.setMomentum(0.5); xorDataSet = new DataSet(2, 1); xorDataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); xorDataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); maxError = 0.01; instance.setLearningRate(0.5); instance.setMaxError(maxError); String inputFileName = "src/test/resources/iris_normalized.txt"; irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false); }
Example 3
Source File: LMSTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new LMS(); double[] x = new double[] { 12.39999962, 14.30000019, 14.5, 14.89999962, 16.10000038, 16.89999962, 16.5, 15.39999962, 17, 17.89999962, 18.79999924, 20.29999924, 22.39999962, 19.39999962, 15.5, 16.70000076, 17.29999924, 18.39999962, 19.20000076, 17.39999962, 19.5, 19.70000076, 21.20000076 }; double[] y = new double[] { 11.19999981, 12.5, 12.69999981, 13.10000038, 14.10000038, 14.80000019, 14.39999962, 13.39999962, 14.89999962, 15.60000038, 16.39999962, 17.70000076, 19.60000038, 16.89999962, 14, 14.60000038, 15.10000038, 16.10000038, 16.79999924, 15.19999981, 17, 17.20000076, 18.60000038 }; dataSet = new DataSet(1, 1); for (int i = 0; i < x.length; i++) { dataSet.add(new DataSetRow(new double[] { x[i] }, new double[] { y[i] })); } Normalizer n = new MaxMinNormalizer(dataSet); n.normalize(dataSet); maxError = 0.01; instance.setMaxError(maxError); }
Example 4
Source File: TestMatrixMLP.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Create and run MLP with XOR training set */ public static void main(String[] args) { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); MultiLayerPerceptron nnet = new MultiLayerPerceptron( TransferFunctionType.TANH ,2, 3, 1); MatrixMultiLayerPerceptron mnet = new MatrixMultiLayerPerceptron(nnet); System.out.println("Training network..."); mnet.learn(trainingSet); System.out.println("Done training network."); }
Example 5
Source File: SigmoidDeltaRuleTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new SigmoidDeltaRule(); double[] x = new double[] { 12.39999962, 14.30000019, 14.5, 14.89999962, 16.10000038, 16.89999962, 16.5, 15.39999962, 17, 17.89999962, 18.79999924, 20.29999924, 22.39999962, 19.39999962, 15.5, 16.70000076, 17.29999924, 18.39999962, 19.20000076, 17.39999962, 19.5, 19.70000076, 21.20000076 }; double[] y = new double[] { 11.19999981, 12.5, 12.69999981, 13.10000038, 14.10000038, 14.80000019, 14.39999962, 13.39999962, 14.89999962, 15.60000038, 16.39999962, 17.70000076, 19.60000038, 16.89999962, 14, 14.60000038, 15.10000038, 16.10000038, 16.79999924, 15.19999981, 17, 17.20000076, 18.60000038 }; dataSet = new DataSet(1, 1); for (int i = 0; i < x.length; i++) { dataSet.add(new DataSetRow(new double[] { x[i] }, new double[] { y[i] })); } Normalizer n = new MaxMinNormalizer(dataSet); n.normalize(dataSet); maxError = 0.01; instance.setMaxError(maxError); }
Example 6
Source File: NormalizationSample.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Runs this sample */ public static void main(String[] args) { // create data set to normalize DataSet dataSet = new DataSet(2, 1); dataSet.add(new DataSetRow(new double[]{10, 12}, new double[]{0})); dataSet.add(new DataSetRow(new double[]{23, 19}, new double[]{0})); dataSet.add(new DataSetRow(new double[]{47, 76}, new double[]{0})); dataSet.add(new DataSetRow(new double[]{98, 123}, new double[]{1})); Normalizer norm = new MaxMinNormalizer(dataSet); norm.normalize(dataSet); // print out normalized training set for (DataSetRow dataSetRow : dataSet.getRows()) { System.out.print("Input: " + Arrays.toString(dataSetRow.getInput())); System.out.print("Output: " + Arrays.toString(dataSetRow.getDesiredOutput())); } }
Example 7
Source File: StockTrainingSetImport.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates and returns training set for stock market prediction using the provided data from array * @param values an array containing stock data * @param inputsCount training element (neural net) inputs count * @param outputsCount training element (neural net) ouputs count * @return training set with stock data */ public static DataSet importFromArray(double[] values, int inputsCount, int outputsCount) { DataSet trainingSet = new DataSet(inputsCount, outputsCount); for (int i = 0; i < values.length - inputsCount; i++) { ArrayList<Double> inputs = new ArrayList<Double>(); for (int j = i; j < i + inputsCount; j++) { inputs.add(values[j]); } ArrayList<Double> outputs = new ArrayList<Double>(); if (outputsCount > 0 && i + inputsCount + outputsCount <= values.length) { for (int j = i + inputsCount; j < i + inputsCount + outputsCount; j++) { outputs.add(values[j]); } if (outputsCount > 0) { trainingSet.add(new DataSetRow(inputs, outputs)); } else { trainingSet.add(new DataSetRow(inputs)); } } } return trainingSet; }
Example 8
Source File: SunSpots.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Generate the training data for the training sunspot years. * @return The training data. */ public DataSet generateTrainingData() { DataSet result = new DataSet(WINDOW_SIZE, 1); for (int year = TRAIN_START; year < TRAIN_END; year++) { double[] input = new double[WINDOW_SIZE]; double[] ideal = new double[1]; int index = 0; for (int i = year - WINDOW_SIZE; i < year; i++) { input[index++] = this.normalizedSunspots[i]; } ideal[0] = this.normalizedSunspots[year]; result.add(new DataSetRow(input, ideal)); } return result; }
Example 9
Source File: ImageRecognitionHelper.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates binary black and white training set for the specified image labels and rgb data * white = 0 black = 1 * @param imageLabels image labels * @param rgbDataMap map collection of rgb data * @return binary black and white training set for the specified image data */ public static DataSet createBlackAndWhiteTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) throws VectorSizeMismatchException { // TODO: Use some binarization image filter to do this; currently it works with averaging RGB values int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length / 3; int outputCount = imageLabels.size(); DataSet trainingSet = new DataSet(inputCount, outputCount); for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) { double[] inputRGB = entry.getValue().getFlattenedRgbValues(); double[] inputBW = FractionRgbData.convertRgbInputToBinaryBlackAndWhite(inputRGB); double[] response = createResponse(entry.getKey(), imageLabels); trainingSet.add(new DataSetRow(inputBW, response)); } // set labels for output columns int inputSize = trainingSet.getInputSize(); for (int c= 0; c<trainingSet.getOutputSize() ; c++) { trainingSet.setColumnName(inputSize+c, imageLabels.get(c)); } return trainingSet; }
Example 10
Source File: XorResilientPropagationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
/** * Runs this sample */ public void run() { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); // set ResilientPropagation learning rule myMlPerceptron.setLearningRule(new ResilientPropagation()); LearningRule learningRule = myMlPerceptron.getLearningRule(); learningRule.addListener(this); // learn the training set System.out.println("Training neural network..."); myMlPerceptron.learn(trainingSet); int iterations = ((SupervisedLearning)myMlPerceptron.getLearningRule()).getCurrentIteration(); System.out.println("Learned in "+iterations+" iterations"); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); }
Example 11
Source File: TestBinaryClass.java From NeurophFramework with Apache License 2.0 | 5 votes |
public static void main(String[] args) { DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1); neuralNet.learn(trainingSet); Evaluation.runFullEvaluation(neuralNet, trainingSet); }
Example 12
Source File: HopfieldSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
/** * Runs this sample */ public static void main(String args[]) { // create training set (H and T letter in 3x3 grid) DataSet trainingSet = new DataSet(9); trainingSet.add(new DataSetRow(new double[]{1, 0, 1, 1, 1, 1, 1, 0, 1})); // H letter trainingSet.add(new DataSetRow(new double[]{1, 1, 1, 0, 1, 0, 0, 1, 0})); // T letter // create hopfield network Hopfield myHopfield = new Hopfield(9); // learn the training set myHopfield.learn(trainingSet); // test hopfield network System.out.println("Testing network"); // add one more 'incomplete' H pattern for testing - it will be recognized as H trainingSet.add(new DataSetRow(new double[]{1, 0, 0, 1, 0, 1, 1, 0, 1})); // print network output for the each element from the specified training set. for(DataSetRow trainingSetRow : trainingSet.getRows()) { myHopfield.setInput(trainingSetRow.getInput()); myHopfield.calculate(); myHopfield.calculate(); double[] networkOutput = myHopfield.getOutput(); System.out.print("Input: " + Arrays.toString(trainingSetRow.getInput()) ); System.out.println(" Output: " + Arrays.toString(networkOutput) ); } }
Example 13
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
private DataSet createDataSetFromRows(DataSetRow... rows) { DataSet dataSet = new DataSet(rows[0].getInput().length); for (DataSetRow row : rows) { dataSet.add(row); } return dataSet; }
Example 14
Source File: ZeroMeanNormalizerTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
private DataSet createDataSetFromRows(DataSetRow... rows) { DataSet dataSet = new DataSet(rows[0].getInput().length); for (DataSetRow row : rows) { dataSet.add(row); } return dataSet; }
Example 15
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Before public void setUp() { instance = new BackPropagation(); xorDataSet = new DataSet(2, 1); xorDataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); xorDataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); maxError = 0.01; instance.setLearningRate(0.5); instance.setMaxError(maxError); String inputFileName = "src/test/resources/iris_normalized.txt"; irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false); }
Example 16
Source File: BinaryDeltaRuleTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Before public void setUp() { instance = new BinaryDeltaRule(); dataSet = new DataSet(2, 1); dataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); dataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); dataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); dataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); maxError = 0.4; instance.setMaxError(maxError); }
Example 17
Source File: SubSampling.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Override public DataSet[] sample(DataSet dataSet) { // if object was initializes by specifying numParts calculate subSetSizes so all subsets are equally sized if (subSetSizes == null) { final double singleSubSetSize = 1.0d / numSubSets; subSetSizes = new double[numSubSets]; for (int i = 0; i < numSubSets; i++) { subSetSizes[i] = singleSubSetSize; } } // create list of data sets to return List<DataSet> subSets = new ArrayList<>(); // shuffle dataset in order to randomize rows that will be used to fill subsets dataSet.shuffle(); int idxCounter = 0; // index of main data set for (int subSetIdx = 0; subSetIdx < numSubSets; subSetIdx++) { // create new subset DataSet newSubSet = new DataSet(dataSet.getInputSize(), dataSet.getOutputSize()); // cop column names if there are any newSubSet.setColumnNames(dataSet.getColumnNames()); // fill subset with rows long subSetSize = Math.round(subSetSizes[subSetIdx] * dataSet.size()); // calculate size of the current subset for (int i = 0; i < subSetSize; i++) { if (idxCounter >= dataSet.size()) { break; } newSubSet.add(dataSet.getRowAt(idxCounter)); idxCounter++; } // add current subset to list that will be returned subSets.add(newSubSet); } return subSets.toArray(new DataSet[numSubSets]); }
Example 18
Source File: Main.java From NeurophFramework with Apache License 2.0 | 4 votes |
public static void main(String[] args) { System.out.println("Time stamp N1:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date())); int maxIterations = 10000; NeuralNetwork neuralNet = new MultiLayerPerceptron(4, 9, 1); ((LMS) neuralNet.getLearningRule()).setMaxError(0.001);//0-1 ((LMS) neuralNet.getLearningRule()).setLearningRate(0.7);//0-1 ((LMS) neuralNet.getLearningRule()).setMaxIterations(maxIterations);//0-1 DataSet trainingSet = new DataSet(4, 1); double daxmax = 10000.0D; trainingSet.add(new DataSetRow(new double[]{3710.0D / daxmax, 3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax}, new double[]{3666.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax}, new double[]{3692.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax}, new double[]{3886.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax}, new double[]{3914.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax}, new double[]{3956.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax}, new double[]{3953.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax}, new double[]{4044.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax}, new double[]{3987.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax}, new double[]{3996.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax}, new double[]{4043.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax}, new double[]{4068.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax}, new double[]{4176.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax}, new double[]{4187.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax}, new double[]{4223.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax}, new double[]{4259.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax}, new double[]{4203.0D / daxmax})); trainingSet.add(new DataSetRow(new double[]{4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax}, new double[]{3989.0D / daxmax})); neuralNet.learn(trainingSet); System.out.println("Time stamp N2:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date())); DataSet testSet = new DataSet(4, 1); testSet.add(new DataSetRow(new double[]{4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax, 3989.0D / daxmax})); for (DataSetRow testDataRow : testSet.getRows()) { neuralNet.setInput(testDataRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); System.out.print("Input: " + Arrays.toString(testDataRow.getInput()) ); System.out.println(" Output: " + Arrays.toString(networkOutput) ); } //Experiments: // calculated //31;3;2009;4084,76 -> 4121 Error=0.01 Rate=0.7 Iterat=100 //31;3;2009;4084,76 -> 4096 Error=0.01 Rate=0.7 Iterat=1000 //31;3;2009;4084,76 -> 4093 Error=0.01 Rate=0.7 Iterat=10000 //31;3;2009;4084,76 -> 4108 Error=0.01 Rate=0.7 Iterat=100000 //31;3;2009;4084,76 -> 4084 Error=0.001 Rate=0.7 Iterat=10000 System.out.println("Time stamp N3:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date())); System.exit(0); }
Example 19
Source File: GenerateData.java From NeurophFramework with Apache License 2.0 | 4 votes |
public void createBalancedTrainingSet(int count) { //Creating empety data set DataSet balanced = new DataSet(54, 7); //Declare counter for all seven type of tree int firstType = 0; int secondType = 0; int thirdType = 0; int fourthType = 0; int fifthType = 0; int sixthType = 0; int seventhType = 0; DataSet trainingSet = DataSet.load(config.getTrainingFileName()); List<DataSetRow> rows = trainingSet.getRows(); System.out.println("Test set size: " + rows.size() + " rows. "); for (DataSetRow row : rows) { //Taking desired output vector from loaded file double[] DesiredOutput = row.getDesiredOutput(); int index = -1; //Find index of number one in output vector. for (int i = 0; i < DesiredOutput.length; i++) { if (DesiredOutput[i] == 1.0) { index = i; break; } } //Add row to balanced data set if number of that type of tree is less than maximum switch (index + 1) { case 1: if (firstType < count) { balanced.add(row); firstType++; } break; case 2: if (secondType < count) { balanced.add(row); secondType++; } break; case 3: if (thirdType < count) { balanced.add(row); thirdType++; } break; case 4: if (fourthType < count) { balanced.add(row); fourthType++; } break; case 5: if (fifthType < count) { balanced.add(row); fifthType++; } break; case 6: if (sixthType < count) { balanced.add(row); sixthType++; } break; case 7: if (seventhType < count) { balanced.add(row); seventhType++; } break; default: System.out.println("Error with output vector size! "); } } System.out.println("Balanced test set size: " + balanced.getRows().size() + " rows. "); System.out.println("Samples per tree: "); System.out.println("First type: " + firstType + " samples. "); System.out.println("Second type: " + secondType + " samples. "); System.out.println("Third type: " + thirdType + " samples. "); System.out.println("Fourth type: " + fourthType + " samples. "); System.out.println("Fifth type: " + fifthType + " samples. "); System.out.println("Sixth type: " + sixthType + " samples. "); System.out.println("Seventh type: " + seventhType + " samples. "); balanced.save(config.getBalancedFileName()); }
Example 20
Source File: XorMultiLayerPerceptronSample.java From NeurophFramework with Apache License 2.0 | 4 votes |
/** * Runs this sample */ public void run() { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); System.out.println(Arrays.toString(myMlPerceptron.getWeights())); myMlPerceptron.setLearningRule(new BackPropagation()); myMlPerceptron.getLearningRule().setLearningRate(0.5); // enable batch if using MomentumBackpropagation // if( myMlPerceptron.getLearningRule() instanceof MomentumBackpropagation ) // ((MomentumBackpropagation)myMlPerceptron.getLearningRule()).setBatchMode(false); LearningRule learningRule = myMlPerceptron.getLearningRule(); learningRule.addListener(this); // learn the training set System.out.println("Training neural network..."); myMlPerceptron.learn(trainingSet); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.save("myMlPerceptron.nnet"); // load saved neural network NeuralNetwork loadedMlPerceptron = NeuralNetwork.createFromFile("myMlPerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded neural network"); testNeuralNetwork(loadedMlPerceptron, trainingSet); }