Java Code Examples for org.neuroph.util.TransferFunctionType#SIGMOID
The following examples show how to use
org.neuroph.util.TransferFunctionType#SIGMOID .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example 2
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 7 votes |
@Test public void testXorMSE() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); MeanSquaredError mse = new MeanSquaredError(); for (DataSetRow testSetRow : xorDataSet.getRows()) { myMlPerceptron.setInput(testSetRow.getInput()); myMlPerceptron.calculate(); double[] networkOutput = myMlPerceptron.getOutput(); mse.addPatternError(networkOutput, testSetRow.getDesiredOutput()); } assertTrue(mse.getTotalError() < maxError); }
Example 3
Source File: SunSpots.java From NeurophFramework with Apache License 2.0 | 6 votes |
public void run() { // uncomment the following line to use regular Neuroph (non-flat) processing //Neuroph.getInstance().setFlattenNetworks(false); // create neural network NeuralNetwork network = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, WINDOW_SIZE, 10, 1); // normalize training data normalizeSunspots(0.1, 0.9); network.getLearningRule().addListener(this); // create training set DataSet trainingSet = generateTrainingData(); network.learn(trainingSet); predict(network); Neuroph.getInstance().shutdown(); }
Example 4
Source File: MomentumBackPropagationTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testXorMaxError() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); assertTrue(instance.getTotalNetworkError() < maxError); }
Example 5
Source File: BackPropagationTest.java From NeurophFramework with Apache License 2.0 | 5 votes |
@Test public void testXorMaxError() { MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); myMlPerceptron.setLearningRule(instance); myMlPerceptron.learn(xorDataSet); assertTrue(instance.getTotalNetworkError() < maxError); }
Example 6
Source File: BackpropBenchmarksExample.java From NeurophFramework with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { BackPropBenchmarks bpb = new BackPropBenchmarks(); bpb.setNoOfRepetitions(3); MultiLayerPerceptron mlp = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 4, 7, 3); DataSet trainingSet = DataSet.createFromFile("iris_data_normalised.txt", 4, 3, ","); TrainingSettingsGenerator generator = new TrainingSettingsGenerator(); Properties prop = new Properties(); prop.setProperty(BackpropagationSettings.MIN_LEARNING_RATE, "0.1"); prop.setProperty(BackpropagationSettings.MAX_LEARNING_RATE, "0.4"); prop.setProperty(BackpropagationSettings.LEARNING_RATE_STEP, "0.5"); prop.setProperty(BackpropagationSettings.MIN_HIDDEN_NEURONS, "9"); prop.setProperty(BackpropagationSettings.MAX_HIDDEN_NEURONS, "10"); prop.setProperty(BackpropagationSettings.HIDDEN_NEURONS_STEP, "1"); prop.setProperty(BackpropagationSettings.MOMENTUM, "0.5"); prop.setProperty(BackpropagationSettings.MAX_ERROR, "0.1"); prop.setProperty(BackpropagationSettings.MAX_ITERATIONS, "10000"); prop.setProperty(BackpropagationSettings.BATCH_MODE, "true"); generator.setSettings(prop); List<TrainingSettings> settingsCollection = generator.generateSettings(); List<Class<? extends AbstractTraining>> trainingTypeCollection = new ArrayList<>(); trainingTypeCollection.add(BackpropagationTraining.class); trainingTypeCollection.add(MomentumTraining.class); bpb.startBenchmark(trainingTypeCollection, settingsCollection, trainingSet, mlp); bpb.saveResults("C:\\Users\\Mladen\\Desktop\\test123"); }
Example 7
Source File: AbstractTraining.java From NeurophFramework with Apache License 2.0 | 5 votes |
/** * Create instance of training with new neural network * * @param dataset * @param settings */ public AbstractTraining(DataSet dataset, TrainingSettings settings) { this.dataset = dataset; this.settings = settings; this.stats = new TrainingStatistics(); this.neuralNet = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, dataset.getInputSize(), settings.getHiddenNeurons(), dataset.getOutputSize()); }
Example 8
Source File: XorResilientPropagationSample.java From NeurophFramework with Apache License 2.0 | 5 votes |
/** * Runs this sample */ public void run() { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); // set ResilientPropagation learning rule myMlPerceptron.setLearningRule(new ResilientPropagation()); LearningRule learningRule = myMlPerceptron.getLearningRule(); learningRule.addListener(this); // learn the training set System.out.println("Training neural network..."); myMlPerceptron.learn(trainingSet); int iterations = ((SupervisedLearning)myMlPerceptron.getLearningRule()).getCurrentIteration(); System.out.println("Learned in "+iterations+" iterations"); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); }
Example 9
Source File: AutoTrainer.java From NeurophFramework with Apache License 2.0 | 4 votes |
/** * */ public AutoTrainer() { trainingSettingsList = new ArrayList<>(); results = new ArrayList<>(); transferFunction = TransferFunctionType.SIGMOID; }
Example 10
Source File: XorMultiLayerPerceptronSample.java From NeurophFramework with Apache License 2.0 | 4 votes |
/** * Runs this sample */ public void run() { // create training set (logical XOR function) DataSet trainingSet = new DataSet(2, 1); trainingSet.add(new DataSetRow(new double[]{0, 0}, new double[]{0})); trainingSet.add(new DataSetRow(new double[]{0, 1}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 0}, new double[]{1})); trainingSet.add(new DataSetRow(new double[]{1, 1}, new double[]{0})); // create multi layer perceptron MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.SIGMOID, 2, 3, 1); myMlPerceptron.randomizeWeights(new WeightsRandomizer(new Random(123))); System.out.println(Arrays.toString(myMlPerceptron.getWeights())); myMlPerceptron.setLearningRule(new BackPropagation()); myMlPerceptron.getLearningRule().setLearningRate(0.5); // enable batch if using MomentumBackpropagation // if( myMlPerceptron.getLearningRule() instanceof MomentumBackpropagation ) // ((MomentumBackpropagation)myMlPerceptron.getLearningRule()).setBatchMode(false); LearningRule learningRule = myMlPerceptron.getLearningRule(); learningRule.addListener(this); // learn the training set System.out.println("Training neural network..."); myMlPerceptron.learn(trainingSet); // test perceptron System.out.println("Testing trained neural network"); testNeuralNetwork(myMlPerceptron, trainingSet); // save trained neural network myMlPerceptron.save("myMlPerceptron.nnet"); // load saved neural network NeuralNetwork loadedMlPerceptron = NeuralNetwork.createFromFile("myMlPerceptron.nnet"); // test loaded neural network System.out.println("Testing loaded neural network"); testNeuralNetwork(loadedMlPerceptron, trainingSet); }
Example 11
Source File: NetworkcreatorController.java From FakeImageDetection with GNU General Public License v3.0 | 4 votes |
@FXML private void saveNeuralNet(ActionEvent event) { String neuralNetLbl = neuralNetLabel.getText(); Dimension samplingDimension = new Dimension(Integer.parseInt(width.getText()), Integer.parseInt(height.getText())); ColorMode mode; switch (colorMode.getSelectionModel().getSelectedItem()) { case "COLOR_RGB": mode = ColorMode.COLOR_RGB; break; case "COLOR_HSL": mode = ColorMode.COLOR_HSL; break; case "BLACK_AND_WHITE": mode = ColorMode.COLOR_RGB; break; default: mode = ColorMode.COLOR_RGB; break; } TransferFunctionType tFunction; switch (transferFunction.getSelectionModel().getSelectedItem()) { case "LINEAR": tFunction = TransferFunctionType.LINEAR; break; case "RAMP": tFunction = TransferFunctionType.RAMP; break; case "STEP": tFunction = TransferFunctionType.STEP; break; case "SIGMOID": tFunction = TransferFunctionType.SIGMOID; break; case "TANH": tFunction = TransferFunctionType.TANH; break; case "GAUSSIAN": tFunction = TransferFunctionType.GAUSSIAN; break; case "TRAPEZOID": tFunction = TransferFunctionType.TRAPEZOID; break; case "SGN": tFunction = TransferFunctionType.SGN; break; case "SIN": tFunction = TransferFunctionType.SIN; break; case "LOG": tFunction = TransferFunctionType.LOG; break; default: tFunction = TransferFunctionType.GAUSSIAN; break; } ArrayList<String> neuronLabels = new ArrayList(Arrays.asList(neuronLabelList.getText().split("[,]"))); ArrayList<Integer> neuronCounts = new ArrayList(); for (String neuronCount : neuronCountList.getText().split("[,]")) { neuronCounts.add(Integer.parseInt(neuronCount.replaceAll(" ", ""))); System.out.println("neuronCounts = " + neuronCount); } //Show File save dialog FileChooser fileChooser = new FileChooser(); fileChooser.setTitle("Save Neural Network"); File file = fileChooser.showSaveDialog(rootPane.getScene().getWindow()); if (file == null) { Calert.showAlert("Not a valid File", "Select target again", Alert.AlertType.ERROR); return; } MLPNetworkMaker maker = new MLPNetworkMaker(neuralNetLbl, samplingDimension, mode, neuronLabels, neuronCounts, tFunction, file.getAbsolutePath()); maker.setListener(this); Thread nnetCreator = new Thread(maker); nnetCreator.start(); loadingSpinner.setVisible(true); }