org.neuroph.core.data.DataSetRow Java Examples
The following examples show how to use
org.neuroph.core.data.DataSetRow.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example #2
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example #3
Source File: BostonHousePrice.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) { System.out.println("Calculating performance indicators for neural network."); MeanSquaredError mse = new MeanSquaredError(); MeanAbsoluteError mae = new MeanAbsoluteError(); for (DataSetRow testSetRow : dataSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); double[] desiredOutput = testSetRow.getDesiredOutput(); mse.addPatternError(networkOutput, desiredOutput); mae.addPatternError(networkOutput, desiredOutput); } System.out.println("Mean squared error is: " + mse.getTotalError()); System.out.println("Mean absolute error is: " + mae.getTotalError()); }
Example #4
Source File: ImageRecognitionHelper.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates binary black and white training set for the specified image labels and rgb data * white = 0 black = 1 * @param imageLabels image labels * @param rgbDataMap map collection of rgb data * @return binary black and white training set for the specified image data */ public static DataSet createBlackAndWhiteTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) throws VectorSizeMismatchException { // TODO: Use some binarization image filter to do this; currently it works with averaging RGB values int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length / 3; int outputCount = imageLabels.size(); DataSet trainingSet = new DataSet(inputCount, outputCount); for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) { double[] inputRGB = entry.getValue().getFlattenedRgbValues(); double[] inputBW = FractionRgbData.convertRgbInputToBinaryBlackAndWhite(inputRGB); double[] response = createResponse(entry.getKey(), imageLabels); trainingSet.add(new DataSetRow(inputBW, response)); } // set labels for output columns int inputSize = trainingSet.getInputSize(); for (int c= 0; c<trainingSet.getOutputSize() ; c++) { trainingSet.setColumnName(inputSize+c, imageLabels.get(c)); } return trainingSet; }
Example #5
Source File: BreastCancerSample.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("********************** TEST RESULT **********************"); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); // get network output double[] networkOutput = neuralNet.getOutput(); int predicted = interpretOutput(networkOutput); // get target/desired output double[] desiredOutput = testSetRow.getDesiredOutput(); int target = (int)desiredOutput[0]; // count predictions countPredictions(predicted, target); } System.out.println("Total cases: " + total + ". "); System.out.println("Correctly predicted cases: " + correct); System.out.println("Incorrectly predicted cases: " + incorrect); double percentTotal = (correct / (double)total) * 100; System.out.println("Predicted correctly: " + formatDecimalNumber(percentTotal) + "%. "); }
Example #6
Source File: ImageRecognitionHelper.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates training set for the specified image labels and rgb data * @param imageLabels image labels * @param rgbDataMap map collection of rgb data * @return training set for the specified image data */ public static DataSet createRGBTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) { int inputCount = rgbDataMap.values().iterator().next().getFlattenedRgbValues().length; int outputCount = imageLabels.size(); DataSet trainingSet = new DataSet(inputCount, outputCount); for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) { double[] input = entry.getValue().getFlattenedRgbValues(); double[] response = createResponse(entry.getKey(), imageLabels); trainingSet.add(new DataSetRow(input, response)); } // set labels for output columns int inputSize = trainingSet.getInputSize(); for (int c= 0; c<trainingSet.getOutputSize() ; c++) { trainingSet.setColumnName(inputSize+c, imageLabels.get(c)); } return trainingSet; }
Example #7
Source File: SwedishAutoInsurance.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) { System.out.println("Calculating performance indicators for neural network."); MeanSquaredError mse = new MeanSquaredError(); MeanAbsoluteError mae = new MeanAbsoluteError(); for (DataSetRow testSetRow : dataSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); double[] desiredOutput = testSetRow.getDesiredOutput(); mse.addPatternError(networkOutput, desiredOutput); mae.addPatternError(networkOutput, desiredOutput); } System.out.println("Mean squared error is: " + mse.getTotalError()); System.out.println("Mean absolute error is: " + mae.getTotalError()); }
Example #8
Source File: DataSetStatistics.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * * @return arithmetic mean for each input in data set */ @Deprecated public double[] inputsMean() { double[] mean = new double[dataSet.getInputSize()]; for (DataSetRow row : dataSet.getRows()) { double[] currentInput = row.getInput(); for (int i = 0; i < dataSet.getInputSize(); i++) { mean[i] += currentInput[i]; } } for (int i = 0; i < dataSet.getInputSize(); i++) { mean[i] /= (double) dataSet.getRows().size(); } return mean; }
Example #9
Source File: KohonenLearning.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Override public void learn(DataSet trainingSet) { for (int phase = 0; phase < 2; phase++) { for (int k = 0; k < iterations[phase]; k++) { Iterator<DataSetRow> iterator = trainingSet.iterator(); while (iterator.hasNext() && !isStopped()) { DataSetRow trainingSetRow = iterator.next(); learnPattern(trainingSetRow, nR[phase]); } // while currentIteration = k; fireLearningEvent(new LearningEvent(this, LearningEvent.Type.EPOCH_ENDED)); if (isStopped()) return; } // for k learningRate = learningRate * 0.5; } // for phase }
Example #10
Source File: DataSetStatistics.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * * @param dataSet Neuroph dataset * @return minimum value for each variable in data set */ @Deprecated public double[] inputsMin() { int inputSize = dataSet.getInputSize(); double[] minColumnElements = new double[inputSize]; for (int i = 0; i < inputSize; i++) { minColumnElements[i] = Double.MAX_VALUE; } for (DataSetRow dataSetRow : dataSet.getRows()) { double[] input = dataSetRow.getInput(); for (int i = 0; i < inputSize; i++) { minColumnElements[i] = Math.min(minColumnElements[i], input[i]); } } return minColumnElements; }
Example #11
Source File: PerceptronLearningTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Test public void testDataSetMSE() { Perceptron perceptron = new Perceptron(2, 1); perceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); perceptron.setLearningRule(instance); perceptron.learn(dataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : dataSet.getRows()) { perceptron.setInput(testSetRow.getInput()); perceptron.calculate(); double[] networkOutput = perceptron.getOutput(); mse.addPatternError(new double[]{networkOutput[0]}, new double[]{testSetRow.getDesiredOutput()[0]}); } assertTrue(mse.getTotalError() < maxError); }
Example #12
Source File: TestMatrixMLP.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Create and run MLP with XOR training set */ public static void main(String[] args) { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); MultiLayerPerceptron nnet = new MultiLayerPerceptron( TransferFunctionType.TANH ,2, 3, 1); MatrixMultiLayerPerceptron mnet = new MatrixMultiLayerPerceptron(nnet); System.out.println("Training network..."); mnet.learn(trainingSet); System.out.println("Done training network."); }
Example #13
Source File: StockTrainingSetImport.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Creates and returns training set for stock market prediction using the provided data from array * @param values an array containing stock data * @param inputsCount training element (neural net) inputs count * @param outputsCount training element (neural net) ouputs count * @return training set with stock data */ public static DataSet importFromArray(double[] values, int inputsCount, int outputsCount) { DataSet trainingSet = new DataSet(inputsCount, outputsCount); for (int i = 0; i < values.length - inputsCount; i++) { ArrayList<Double> inputs = new ArrayList<Double>(); for (int j = i; j < i + inputsCount; j++) { inputs.add(values[j]); } ArrayList<Double> outputs = new ArrayList<Double>(); if (outputsCount > 0 && i + inputsCount + outputsCount <= values.length) { for (int j = i + inputsCount; j < i + inputsCount + outputsCount; j++) { outputs.add(values[j]); } if (outputsCount > 0) { trainingSet.add(new DataSetRow(inputs, outputs)); } else { trainingSet.add(new DataSetRow(inputs)); } } } return trainingSet; }
Example #14
Source File: PerceptronSample.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Runs this sample */ public static void main(String args[]) { // create training set (logical AND function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{1})); // create perceptron neural network NeuralNetwork myPerceptron = new Perceptron(2, 1); // learn the training set myPerceptron.learn(trainingSet); // test perceptron System.out.println("Testing trained perceptron"); testNeuralNetwork(myPerceptron, trainingSet); // save trained perceptron myPerceptron.save("mySamplePerceptron.nnet"); // load saved neural network NeuralNetwork loadedPerceptron = NeuralNetwork.load("mySamplePerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded perceptron"); testNeuralNetwork(loadedPerceptron, trainingSet); }
Example #15
Source File: SigmoidDeltaRuleTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new SigmoidDeltaRule(); double[] x = new double[] { 12.39999962, 14.30000019, 14.5, 14.89999962, 16.10000038, 16.89999962, 16.5, 15.39999962, 17, 17.89999962, 18.79999924, 20.29999924, 22.39999962, 19.39999962, 15.5, 16.70000076, 17.29999924, 18.39999962, 19.20000076, 17.39999962, 19.5, 19.70000076, 21.20000076 }; double[] y = new double[] { 11.19999981, 12.5, 12.69999981, 13.10000038, 14.10000038, 14.80000019, 14.39999962, 13.39999962, 14.89999962, 15.60000038, 16.39999962, 17.70000076, 19.60000038, 16.89999962, 14, 14.60000038, 15.10000038, 16.10000038, 16.79999924, 15.19999981, 17, 17.20000076, 18.60000038 }; dataSet = new DataSet(1, 1); for (int i = 0; i < x.length; i++) { dataSet.add(new DataSetRow(new double[] { x[i] }, new double[] { y[i] })); } Normalizer n = new MaxMinNormalizer(dataSet); n.normalize(dataSet); maxError = 0.01; instance.setMaxError(maxError); }
Example #16
Source File: BostonHousePrice.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void evaluate(NeuralNetwork neuralNet, DataSet dataSet) { System.out.println("Calculating performance indicators for neural network."); MeanSquaredError mse = new MeanSquaredError(); MeanAbsoluteError mae = new MeanAbsoluteError(); for (DataSetRow testSetRow : dataSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); double[] desiredOutput = testSetRow.getDesiredOutput(); mse.addPatternError(networkOutput, desiredOutput); mae.addPatternError(networkOutput, desiredOutput); } System.out.println("Mean squared error is: " + mse.getTotalError()); System.out.println("Mean absolute error is: " + mae.getTotalError()); }
Example #17
Source File: DigitsRecognition.java From NeurophFramework with Apache License 2.0 | 6 votes |
/** * Prints network output for the each element from the specified training * set. * * @param neuralNet neural network * @param testSet test data set */ public static void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("--------------------------------------------------------------------"); System.out.println("***********************TESTING NEURAL NETWORK***********************"); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); int outputIdx = maxOutput(neuralNet.getOutput()); String[] inputDigit = DigitData.convertDataIntoImage(testSetRow.getInput()); for (int i = 0; i < inputDigit.length; i++) { if (i != inputDigit.length - 1) { System.out.println(inputDigit[i]); } else { System.out.println(inputDigit[i] + "----> " + outputIdx); } } System.out.println(""); } }
Example #18
Source File: BinaryDeltaRuleTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") @Test public void testDataSetMSE() { Perceptron perceptron = new Perceptron(2, 1); perceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); perceptron.setLearningRule(instance); perceptron.learn(dataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : dataSet.getRows()) { perceptron.setInput(testSetRow.getInput()); perceptron.calculate(); double[] networkOutput = perceptron.getOutput(); mse.addPatternError(new double[]{networkOutput[0]}, new double[]{testSetRow.getDesiredOutput()[0]}); } assertTrue(mse.getTotalError() < maxError); }
Example #19
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new MomentumBackpropagation(); instance.setMomentum(0.5); xorDataSet = new DataSet(2, 1); xorDataSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); xorDataSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); xorDataSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); maxError = 0.01; instance.setLearningRate(0.5); instance.setMaxError(maxError); String inputFileName = "src/test/resources/iris_normalized.txt"; irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false); }
Example #20
Source File: LMSTest.java From NeurophFramework with Apache License 2.0 | 6 votes |
@Before public void setUp() { instance = new LMS(); double[] x = new double[] { 12.39999962, 14.30000019, 14.5, 14.89999962, 16.10000038, 16.89999962, 16.5, 15.39999962, 17, 17.89999962, 18.79999924, 20.29999924, 22.39999962, 19.39999962, 15.5, 16.70000076, 17.29999924, 18.39999962, 19.20000076, 17.39999962, 19.5, 19.70000076, 21.20000076 }; double[] y = new double[] { 11.19999981, 12.5, 12.69999981, 13.10000038, 14.10000038, 14.80000019, 14.39999962, 13.39999962, 14.89999962, 15.60000038, 16.39999962, 17.70000076, 19.60000038, 16.89999962, 14, 14.60000038, 15.10000038, 16.10000038, 16.79999924, 15.19999981, 17, 17.20000076, 18.60000038 }; dataSet = new DataSet(1, 1); for (int i = 0; i < x.length; i++) { dataSet.add(new DataSetRow(new double[] { x[i] }, new double[] { y[i] })); } Normalizer n = new MaxMinNormalizer(dataSet); n.normalize(dataSet); maxError = 0.01; instance.setMaxError(maxError); }
Example #21
Source File: DiabetesSample.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("**********************RESULT**********************"); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); // get network output double[] networkOutput = neuralNet.getOutput(); int predicted = interpretOutput(networkOutput); // get target/desired output double[] desiredOutput = testSetRow.getDesiredOutput(); int target = (int)desiredOutput[0]; // count predictions countPredictions(predicted, target); } System.out.println("Total cases: " + total + ". "); System.out.println("Correctly predicted cases: " + correct); System.out.println("Incorrectly predicted cases: " + incorrect); double percentTotal = (correct / (double)total) * 100; System.out.println("Predicted correctly: " + formatDecimalNumber(percentTotal) + "%. "); }
Example #22
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testMinManyRowManyDifferentValueColumns() { double value11 = 2; double value12 = 2; double value13 = 2; double value21 = 4; double value22 = 4; double value23 = 4; double value31 = 6; double value32 = 6; double value33 = 6; double[] firstRowData = new double[]{value11, value12, value13}; double[] secondRowData = new double[]{value21, value22, value23}; double[] thirdRowData = new double[]{value31, value32, value33}; DataSetRow firstRow = createDataRow(firstRowData); DataSetRow secondRow = createDataRow(secondRowData); DataSetRow thirdRow = createDataRow(thirdRowData); DataSet dataSet = createDataSetFromRows(firstRow, secondRow, thirdRow); DataSetStatistics stats = new DataSetStatistics(dataSet); double[] meanByColumns = stats.inputsMin(); for (int i = 0; i < meanByColumns.length; i++) { assertEquals(2, meanByColumns[i], LARGEST_MEAN_ERROR); } }
Example #23
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testMaxManyRowManyDifferentValueColumns() { double value11 = 2; double value12 = 2; double value13 = 2; double value21 = 4; double value22 = 4; double value23 = 4; double value31 = 6; double value32 = 6; double value33 = 6; double[] firstRowData = new double[]{value11, value12, value13}; double[] secondRowData = new double[]{value21, value22, value23}; double[] thirdRowData = new double[]{value31, value32, value33}; DataSetRow firstRow = createDataRow(firstRowData); DataSetRow secondRow = createDataRow(secondRowData); DataSetRow thirdRow = createDataRow(thirdRowData); DataSet dataSet = createDataSetFromRows(firstRow, secondRow, thirdRow); DataSetStatistics stats = new DataSetStatistics(dataSet); double[] meanByColumns = stats.inputsMax(); for (int i = 0; i < meanByColumns.length; i++) { assertEquals(6, meanByColumns[i], LARGEST_MEAN_ERROR); } }
Example #24
Source File: WheatSeeds.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("Showing inputs, desired output and neural network output for every row in test set."); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); System.out.println("Input: " + Arrays.toString(testSetRow.getInput())); System.out.println("Output: " + Arrays.toString(networkOutput)); System.out.println("Desired output" + Arrays.toString(testSetRow.getDesiredOutput())); } }
Example #25
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testMinOneRowManyDifferentValueColumns() { double value1 = 1; double value2 = 2; double value3 = 3; double[] firstRowData = new double[]{value1, value2, value3}; DataSetRow firstRow = createDataRow(firstRowData); DataSet dataSet = createDataSetFromRows(firstRow); DataSetStatistics stats = new DataSetStatistics(dataSet); double[] maxByColumns = stats.inputsMin(); for (int i = 0; i < maxByColumns.length; i++) { assertEquals(firstRowData[i], maxByColumns[i], LARGEST_MIN_MAX_ERROR); } }
Example #26
Source File: Sonar.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("Showing inputs, desired output and neural network output for every row in test set."); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); System.out.println("Input: " + Arrays.toString(testSetRow.getInput())); System.out.println("Output: " + Arrays.toString(networkOutput)); System.out.println("Desired output" + Arrays.toString(testSetRow.getDesiredOutput())); } }
Example #27
Source File: Abalone.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { System.out.println("Showing inputs, desired output and neural network output for every row in test set."); for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); System.out.println("Input: " + Arrays.toString(testSetRow.getInput())); System.out.println("Output: " + Arrays.toString(networkOutput)); System.out.println("Desired output" + Arrays.toString(testSetRow.getDesiredOutput())); } }
Example #28
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testMaxOneRowManyDifferentValueColumns() { double value1 = 1; double value2 = 2; double value3 = 3; double[] firstRowData = new double[]{value1, value2, value3}; DataSetRow firstRow = createDataRow(firstRowData); DataSet dataSet = createDataSetFromRows(firstRow); DataSetStatistics stats = new DataSetStatistics(dataSet); double[] maxByColumns = stats.inputsMax(); for (int i = 0; i < maxByColumns.length; i++) { assertEquals(firstRowData[i], maxByColumns[i], LARGEST_MIN_MAX_ERROR); } }
Example #29
Source File: StatisticsTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testMeanManyRowManyDifferentValueColumns() { double value11 = 2; double value12 = 2; double value13 = 2; double value21 = 4; double value22 = 4; double value23 = 4; double value31 = 6; double value32 = 6; double value33 = 6; double[] firstRowData = new double[]{value11, value12, value13}; double[] secondRowData = new double[]{value21, value22, value23}; double[] thirdRowData = new double[]{value31, value32, value33}; DataSetRow firstRow = createDataRow(firstRowData); DataSetRow secondRow = createDataRow(secondRowData); DataSetRow thirdRow = createDataRow(thirdRowData); DataSet dataSet = createDataSetFromRows(firstRow, secondRow, thirdRow); DataSetStatistics stats = new DataSetStatistics(dataSet); double[] meanByColumns = stats.inputsMean(); for (int i = 0; i < meanByColumns.length; i++) { assertEquals(secondRowData[i], meanByColumns[i], LARGEST_MEAN_ERROR); } }
Example #30
Source File: LensesClassificationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public void testNeuralNetwork(NeuralNetwork neuralNet, DataSet testSet) { for (DataSetRow testSetRow : testSet.getRows()) { neuralNet.setInput(testSetRow.getInput()); neuralNet.calculate(); double[] networkOutput = neuralNet.getOutput(); System.out.print("Input: " + Arrays.toString(testSetRow.getInput())); System.out.println(" Output: " + Arrays.toString(networkOutput)); } }