org.nd4j.linalg.learning.config.Sgd Java Examples
The following examples show how to use
org.nd4j.linalg.learning.config.Sgd.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BackPropMLPTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** Very simple back-prop config set up for Iris. * Learning Rate = 0.1 * No regularization, no Adagrad, no momentum etc. One iteration. */ private static MultiLayerConfiguration getIrisMLPSimpleConfig(int[] hiddenLayerSizes, Activation activationFunction) { NeuralNetConfiguration.ListBuilder lb = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)) .seed(12345L).list(); for (int i = 0; i < hiddenLayerSizes.length; i++) { int nIn = (i == 0 ? 4 : hiddenLayerSizes[i - 1]); lb.layer(i, new DenseLayer.Builder().nIn(nIn).nOut(hiddenLayerSizes[i]).weightInit(WeightInit.XAVIER) .activation(activationFunction).build()); } lb.layer(hiddenLayerSizes.length, new OutputLayer.Builder(LossFunction.MCXENT).nIn(hiddenLayerSizes[hiddenLayerSizes.length - 1]) .nOut(3).weightInit(WeightInit.XAVIER) .activation(activationFunction.equals(Activation.IDENTITY) ? Activation.IDENTITY : Activation.SOFTMAX) .build()); return lb.build(); }
Example #2
Source File: ModelGuesserTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
private MultiLayerNetwork getNetwork() { int nIn = 5; int nOut = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01).l2(0.01) .updater(new Sgd(0.1)).activation(Activation.TANH).weightInit(WeightInit.XAVIER).list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder() .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); return net; }
Example #3
Source File: TestFrozenLayers.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static ComputationGraph getOriginalGraph(int seed){ ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .seed(seed) .weightInit(WeightInit.XAVIER) .activation(Activation.TANH) .convolutionMode(ConvolutionMode.Same) .updater(new Sgd(0.3)) .graphBuilder() .addInputs("in") .layer("0", new ConvolutionLayer.Builder().nOut(3).kernelSize(2,2).stride(1,1).build(), "in") .layer("1", new SubsamplingLayer.Builder().kernelSize(2,2).stride(1,1).build(), "0") .layer("2", new ConvolutionLayer.Builder().nIn(3).nOut(3).kernelSize(2,2).stride(1,1).build(), "1") .layer("3", new DenseLayer.Builder().nOut(64).build(), "2") .layer("4", new DenseLayer.Builder().nIn(64).nOut(64).build(), "3") .layer("5", new OutputLayer.Builder().nIn(64).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build(), "4") .setOutputs("5") .setInputTypes(InputType.convolutionalFlat(28,28,1)) .build(); ComputationGraph net = new ComputationGraph(conf); net.init(); return net; }
Example #4
Source File: LayerConfigValidationTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testCompGraphNullLayer() { ComputationGraphConfiguration.GraphBuilder gb = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.01)) .seed(42).miniBatch(false).l1(0.2).l2(0.2) /* Graph Builder */ .updater(Updater.RMSPROP).graphBuilder().addInputs("in") .addLayer("L" + 1, new GravesLSTM.Builder().nIn(20).updater(Updater.RMSPROP).nOut(10) .weightInit(WeightInit.XAVIER) .dropOut(0.4).l1(0.3).activation(Activation.SIGMOID).build(), "in") .addLayer("output", new RnnOutputLayer.Builder().nIn(20).nOut(10).activation(Activation.SOFTMAX) .weightInit(WeightInit.RELU_UNIFORM).build(), "L" + 1) .setOutputs("output"); ComputationGraphConfiguration conf = gb.build(); ComputationGraph cg = new ComputationGraph(conf); cg.init(); }
Example #5
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testILossFunctionGetsSet() { ILossFunction lossFunction = new LossMCXENT(Nd4j.create(new float[] {1f, 2f}, new long[]{1,2})); MultiLayerConfiguration expected = new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list() .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(1, new DenseLayer.Builder().nIn(10).nOut(10).build()).layer(2, new OutputLayer.Builder().lossFunction(lossFunction) .activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .build(); MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345) .addLayer(new DenseLayerSpace.Builder().nIn(10).nOut(10).build(), new FixedValue<>(2)) //2 identical layers .addLayer(new OutputLayerSpace.Builder().iLossFunction(lossFunction).activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .build(); int nParams = mls.numParameters(); assertEquals(0, nParams); MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration(); assertEquals(expected, conf); }
Example #6
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testGlobalPoolingBasic() { MultiLayerConfiguration expected = new NeuralNetConfiguration.Builder().updater(new Sgd(0.005)).seed(12345).list() .layer(0, new GravesLSTM.Builder().nIn(10).nOut(10).build()) .layer(1, new GlobalPoolingLayer.Builder().poolingType(PoolingType.SUM).pnorm(7).build()) .layer(2, new OutputLayer.Builder().lossFunction(LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .build(); MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345) .addLayer(new GravesLSTMLayerSpace.Builder().nIn(10).nOut(10).build()) .addLayer(new GlobalPoolingLayerSpace.Builder().poolingType(PoolingType.SUM) .pNorm(7).build()) .addLayer(new OutputLayerSpace.Builder().lossFunction(LossFunction.MCXENT) .activation(Activation.SOFTMAX) .nIn(10).nOut(5).build()) .build(); int nParams = mls.numParameters(); assertEquals(0, nParams); MultiLayerConfiguration conf = mls.getValue(new double[0]).getMultiLayerConfiguration(); assertEquals(expected, conf); }
Example #7
Source File: SameDiffOutputTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void outputTest(){ DataSet data = new DataSet(Nd4j.zeros(10, 10), Nd4j.zeros(10, 10)); SameDiff sd = SameDiff.create(); SDVariable in = sd.placeHolder("input", DataType.FLOAT, 10, 10); SDVariable out = in.add("out", 2); TrainingConfig conf = new TrainingConfig.Builder() .l2(1e-4) .updater(new Sgd(3e-1)) .dataSetFeatureMapping("input") .dataSetLabelMapping() .build(); sd.setTrainingConfig(conf); INDArray output = sd.output(data, "out").get("out"); assertTrue("output != input + 2", output.equalsWithEps( Nd4j.zeros(10, 10).add(2).castTo(DataType.FLOAT), 0.0001)); }
Example #8
Source File: DenseTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
private static MultiLayerNetwork getDenseMLNConfig(boolean backprop, boolean pretrain) { int numInputs = 4; int outputNum = 3; long seed = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed) .updater(new Sgd(1e-3)).l1(0.3).l2(1e-3).list() .layer(0, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(numInputs).nOut(3) .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.DenseLayer.Builder().nIn(3).nOut(2) .activation(Activation.TANH).weightInit(WeightInit.XAVIER).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER).nIn(2).nOut(outputNum).activation(Activation.SOFTMAX).build()) .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); return model; }
Example #9
Source File: ModelSerializerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testWriteMLNModel() throws Exception { int nIn = 5; int nOut = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345).l1(0.01) .l2(0.01).updater(new Sgd(0.1)).activation(Activation.TANH).weightInit(WeightInit.XAVIER).list() .layer(0, new DenseLayer.Builder().nIn(nIn).nOut(20).build()) .layer(1, new DenseLayer.Builder().nIn(20).nOut(30).build()).layer(2, new OutputLayer.Builder() .lossFunction(LossFunctions.LossFunction.MSE).nIn(30).nOut(nOut).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); File tempFile = tempDir.newFile(); ModelSerializer.writeModel(net, tempFile, true); MultiLayerNetwork network = ModelSerializer.restoreMultiLayerNetwork(tempFile); assertEquals(network.getLayerWiseConfigurations().toJson(), net.getLayerWiseConfigurations().toJson()); assertEquals(net.params(), network.params()); assertEquals(net.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); }
Example #10
Source File: TestCustomActivation.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testCustomActivationFn() { //Second: let's create a MultiLayerCofiguration with one, and check JSON and YAML config actually works... MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)).list() .layer(0, new DenseLayer.Builder().nIn(10).nOut(10).activation(new CustomActivation()).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(10).nOut(10).build()) .build(); String json = conf.toJson(); String yaml = conf.toYaml(); // System.out.println(json); MultiLayerConfiguration confFromJson = MultiLayerConfiguration.fromJson(json); assertEquals(conf, confFromJson); MultiLayerConfiguration confFromYaml = MultiLayerConfiguration.fromYaml(yaml); assertEquals(conf, confFromYaml); }
Example #11
Source File: OutputLayerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testSetParams() { NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT) .updater(new Sgd(1e-1)) .layer(new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder().nIn(4).nOut(3) .weightInit(WeightInit.ZERO).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); long numParams = conf.getLayer().initializer().numParams(conf); INDArray params = Nd4j.create(1, numParams); OutputLayer l = (OutputLayer) conf.getLayer().instantiate(conf, Collections.<TrainingListener>singletonList(new ScoreIterationListener(1)), 0, params, true, params.dataType()); params = l.params(); l.setParams(params); assertEquals(params, l.params()); }
Example #12
Source File: ModelSerializerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testWriteCGModel() throws Exception { ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1)) .graphBuilder().addInputs("in") .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3) .activation(Activation.SOFTMAX).build(), "dense") .setOutputs("out").build(); ComputationGraph cg = new ComputationGraph(config); cg.init(); File tempFile = tempDir.newFile(); ModelSerializer.writeModel(cg, tempFile, true); ComputationGraph network = ModelSerializer.restoreComputationGraph(tempFile); assertEquals(network.getConfiguration().toJson(), cg.getConfiguration().toJson()); assertEquals(cg.params(), network.params()); assertEquals(cg.getUpdater().getStateViewArray(), network.getUpdater().getStateViewArray()); }
Example #13
Source File: GraphTestCase.java From jstarcraft-ai with Apache License 2.0 | 6 votes |
private ComputationGraph getOldFunction() { NeuralNetConfiguration.Builder netBuilder = new NeuralNetConfiguration.Builder(); // 设置随机种子 netBuilder.seed(6); netBuilder.setL1(l1Regularization); netBuilder.setL1Bias(l1Regularization); netBuilder.setL2(l2Regularization); netBuilder.setL2Bias(l2Regularization); netBuilder.weightInit(WeightInit.XAVIER_UNIFORM); netBuilder.updater(new Sgd(learnRatio)).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT); GraphBuilder graphBuilder = netBuilder.graphBuilder(); graphBuilder.addInputs("leftInput", "rightInput"); graphBuilder.addLayer("leftEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "leftInput"); graphBuilder.addLayer("rightEmbed", new EmbeddingLayer.Builder().nIn(5).nOut(5).hasBias(true).activation(Activation.IDENTITY).build(), "rightInput"); graphBuilder.addVertex("embed", new MergeVertex(), "leftEmbed", "rightEmbed"); graphBuilder.addLayer("output", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.IDENTITY).nIn(10).nOut(1).build(), "embed"); graphBuilder.setOutputs("output"); ComputationGraphConfiguration configuration = graphBuilder.build(); ComputationGraph graph = new ComputationGraph(configuration); graph.init(); return graph; }
Example #14
Source File: TestEarlyStoppingSparkCompGraph.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBadTuning() { //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(2.0)) //Intentionally huge LR .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY) .lossFunction(LossFunctions.LossFunction.MSE).build(), "in") .setOutputs("0").build(); ComputationGraph net = new ComputationGraph(conf); net.setListeners(new ScoreIterationListener(5)); JavaRDD<DataSet> irisData = getIris(); EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5000)) .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES), new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5 .scoreCalculator(new SparkLossCalculatorComputationGraph( irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc())) .modelSaver(saver).build(); TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0); IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm, esConf, net, irisData.map(new DataSetToMultiDataSetFn())); EarlyStoppingResult result = trainer.fit(); assertTrue(result.getTotalEpochs() < 5); assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason()); String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString(); assertEquals(expDetails, result.getTerminationDetails()); }
Example #15
Source File: TestEarlyStopping.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testEarlyStoppingListeners() { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).list() .layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); TestListener tl = new TestListener(); net.setListeners(tl); DataSetIterator irisIter = new IrisDataSetIterator(50, 150); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5)) .iterationTerminationConditions( new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES)) .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver) .build(); IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingTrainer(esConf, net, irisIter); EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit(); assertEquals(5, tl.countEpochStart); assertEquals(5, tl.countEpochEnd); assertEquals(5 * 150/50, tl.iterCount); assertEquals(4, tl.maxEpochStart); assertEquals(4, tl.maxEpochEnd); }
Example #16
Source File: TestEarlyStopping.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testListeners() { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(0.001)).weightInit(WeightInit.XAVIER).list() .layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.setListeners(new ScoreIterationListener(1)); DataSetIterator irisIter = new IrisDataSetIterator(150, 150); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5)) .iterationTerminationConditions( new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES)) .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver) .build(); LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener(); IEarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, net, irisIter, listener); trainer.fit(); assertEquals(1, listener.onStartCallCount); assertEquals(5, listener.onEpochCallCount); assertEquals(1, listener.onCompletionCallCount); }
Example #17
Source File: TestEarlyStoppingCompGraph.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBadTuning() { //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(5.0)) //Intentionally huge LR .weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in") .addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in") .setOutputs("0").build(); ComputationGraph net = new ComputationGraph(conf); net.setListeners(new ScoreIterationListener(1)); DataSetIterator irisIter = new IrisDataSetIterator(150, 150); EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5000)) .iterationTerminationConditions(new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES), new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5 .scoreCalculator(new DataSetLossCalculatorCG(irisIter, true)).modelSaver(saver).build(); IEarlyStoppingTrainer trainer = new EarlyStoppingGraphTrainer(esConf, net, irisIter); EarlyStoppingResult result = trainer.fit(); assertTrue(result.getTotalEpochs() < 5); assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason()); String expDetails = new MaxScoreIterationTerminationCondition(10).toString(); assertEquals(expDetails, result.getTerminationDetails()); assertEquals(0, result.getBestModelEpoch()); assertNotNull(result.getBestModel()); }
Example #18
Source File: TestComputationGraphNetwork.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test(timeout = 300000) public void testIrisFitMultiDataSetIterator() throws Exception { RecordReader rr = new CSVRecordReader(0, ','); rr.initialize(new FileSplit(Resources.asFile("iris.txt"))); MultiDataSetIterator iter = new RecordReaderMultiDataSetIterator.Builder(10).addReader("iris", rr) .addInput("iris", 0, 3).addOutputOneHot("iris", 4, 3).build(); ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder() .updater(new Sgd(0.1)) .graphBuilder().addInputs("in") .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(2).nOut(3) .build(), "dense") .setOutputs("out").build(); ComputationGraph cg = new ComputationGraph(config); cg.init(); cg.fit(iter); rr.reset(); iter = new RecordReaderMultiDataSetIterator.Builder(10).addReader("iris", rr).addInput("iris", 0, 3) .addOutputOneHot("iris", 4, 3).build(); while (iter.hasNext()) { cg.fit(iter.next()); } }
Example #19
Source File: NeuralNetConfigurationTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testLayerPretrainConfig() { boolean pretrain = true; VariationalAutoencoder layer = new VariationalAutoencoder.Builder() .nIn(10).nOut(5).updater(new Sgd(1e-1)) .lossFunction(LossFunctions.LossFunction.KL_DIVERGENCE).build(); NeuralNetConfiguration conf = new NeuralNetConfiguration.Builder().seed(42).layer(layer).build(); }
Example #20
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testDropout2(){ MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)).seed(12345) .addLayer(new ConvolutionLayerSpace.Builder().nOut(2) .dropOut(new ContinuousParameterSpace(0.4,0.6)) .build()) .addLayer(new DropoutLayerSpace.Builder().dropOut(new ContinuousParameterSpace(0.4,0.6)).build()) .addLayer(new OutputLayerSpace.Builder().activation(Activation.SOFTMAX).nIn(10).nOut(5).build()) .setInputType(InputType.convolutional(28, 28, 1)) .build(); int nParams = mls.numParameters(); List<ParameterSpace> l = LeafUtils.getUniqueObjects(mls.collectLeaves()); int x=0; for( ParameterSpace p : l){ int n = p.numParameters(); int[] arr = new int[n]; for(int i=0; i<arr.length; i++ ){ arr[i] = x++; } p.setIndices(arr); } MultiLayerConfiguration conf = mls.getValue(new double[nParams]).getMultiLayerConfiguration(); }
Example #21
Source File: NeuralNetConfigurationTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testLearningRateByParam() { double lr = 0.01; double biasLr = 0.02; int[] nIns = {4, 3, 3}; int[] nOuts = {3, 3, 3}; int oldScore = 1; int newScore = 1; int iteration = 3; INDArray gradientW = Nd4j.ones(nIns[0], nOuts[0]); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.3)).list() .layer(0, new DenseLayer.Builder().nIn(nIns[0]).nOut(nOuts[0]) .updater(new Sgd(lr)).biasUpdater(new Sgd(biasLr)).build()) .layer(1, new BatchNormalization.Builder().nIn(nIns[1]).nOut(nOuts[1]).updater(new Sgd(0.7)).build()) .layer(2, new OutputLayer.Builder().nIn(nIns[2]).nOut(nOuts[2]).lossFunction(LossFunctions.LossFunction.MSE).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); ConvexOptimizer opt = new StochasticGradientDescent(net.getDefaultConfiguration(), new NegativeDefaultStepFunction(), null, net); assertEquals(lr, ((Sgd)net.getLayer(0).conf().getLayer().getUpdaterByParam("W")).getLearningRate(), 1e-4); assertEquals(biasLr, ((Sgd)net.getLayer(0).conf().getLayer().getUpdaterByParam("b")).getLearningRate(), 1e-4); assertEquals(0.7, ((Sgd)net.getLayer(1).conf().getLayer().getUpdaterByParam("gamma")).getLearningRate(), 1e-4); assertEquals(0.3, ((Sgd)net.getLayer(2).conf().getLayer().getUpdaterByParam("W")).getLearningRate(), 1e-4); //From global LR assertEquals(0.3, ((Sgd)net.getLayer(2).conf().getLayer().getUpdaterByParam("W")).getLearningRate(), 1e-4); //From global LR }
Example #22
Source File: TestParallelEarlyStoppingUI.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test @Ignore //To be run manually public void testParallelStatsListenerCompatibility() throws Exception { UIServer uiServer = UIServer.getInstance(); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd()).weightInit(WeightInit.XAVIER).list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new OutputLayer.Builder().nIn(3).nOut(3) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); // it's important that the UI can report results from parallel training // there's potential for StatsListener to fail if certain properties aren't set in the model StatsStorage statsStorage = new InMemoryStatsStorage(); net.setListeners(new StatsListener(statsStorage)); uiServer.attach(statsStorage); DataSetIterator irisIter = new IrisDataSetIterator(50, 500); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) .scoreCalculator(new DataSetLossCalculator(irisIter, true)) .evaluateEveryNEpochs(2).modelSaver(saver).build(); IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 3, 6, 2); EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit(); System.out.println(result); assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason()); }
Example #23
Source File: ModelSerializerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
private ComputationGraph simpleComputationGraph() { ComputationGraphConfiguration config = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(new Sgd(0.1)) .graphBuilder().addInputs("in") .addLayer("dense", new DenseLayer.Builder().nIn(4).nOut(2).build(), "in").addLayer("out", new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT).nIn(2).nOut(3) .activation(Activation.SOFTMAX).build(), "dense") .setOutputs("out").build(); return new ComputationGraph(config); }
Example #24
Source File: TestParallelEarlyStopping.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBadTuning() { //Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd(1.0)) //Intentionally huge LR .weightInit(WeightInit.XAVIER).list() .layer(0, new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX) .lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.setListeners(new ScoreIterationListener(1)); DataSetIterator irisIter = new IrisDataSetIterator(10, 150); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5000)) .iterationTerminationConditions( new MaxTimeIterationTerminationCondition(1, TimeUnit.MINUTES), new MaxScoreIterationTerminationCondition(10)) //Initial score is ~2.5 .scoreCalculator(new DataSetLossCalculator(irisIter, true)).modelSaver(saver) .build(); IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 2, 2, 1); EarlyStoppingResult result = trainer.fit(); assertTrue(result.getTotalEpochs() < 5); assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition, result.getTerminationReason()); String expDetails = new MaxScoreIterationTerminationCondition(10).toString(); assertEquals(expDetails, result.getTerminationDetails()); assertTrue(result.getBestModelEpoch() <= 0); assertNotNull(result.getBestModel()); }
Example #25
Source File: TestMultiLayerSpace.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBidirectional() throws Exception { MultiLayerSpace mls = new MultiLayerSpace.Builder().updater(new Sgd(0.005)) .seed(12345) .layer(new Bidirectional(new LSTMLayerSpace.Builder() .nIn(10).nOut(10).build())) .build(); DL4JConfiguration conf = mls.getValue(new double[0]); MultiLayerConfiguration c2 = conf.getMultiLayerConfiguration(); MultiLayerNetwork net = new MultiLayerNetwork(c2); net.init(); assertEquals(1, net.getnLayers()); assertTrue(net.getLayer(0) instanceof BidirectionalLayer); BidirectionalLayer bl = (BidirectionalLayer)net.getLayer(0); Field f = BidirectionalLayer.class.getDeclaredField("fwd"); Field b = BidirectionalLayer.class.getDeclaredField("bwd"); f.setAccessible(true); b.setAccessible(true); org.deeplearning4j.nn.layers.recurrent.LSTM lstmFwd = (org.deeplearning4j.nn.layers.recurrent.LSTM) f.get(bl); org.deeplearning4j.nn.layers.recurrent.LSTM lstmBwd = (org.deeplearning4j.nn.layers.recurrent.LSTM) b.get(bl); assertEquals(10, ((LSTM)lstmFwd.conf().getLayer()).getNIn()); assertEquals(10, ((LSTM)lstmFwd.conf().getLayer()).getNOut()); assertEquals(10, ((LSTM)lstmBwd.conf().getLayer()).getNIn()); assertEquals(10, ((LSTM)lstmBwd.conf().getLayer()).getNOut()); }
Example #26
Source File: TestNetConversion.java From deeplearning4j with Apache License 2.0 | 5 votes |
private MultiLayerNetwork getNet1(boolean train) { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .convolutionMode(ConvolutionMode.Same) .activation(Activation.TANH) .weightInit(WeightInit.XAVIER) .updater(new Sgd(0.1)) .list() .layer(new ConvolutionLayer.Builder().nIn(3).nOut(5).kernelSize(2, 2).stride(1, 1).build()) .layer(new SubsamplingLayer.Builder().kernelSize(2, 2).stride(1, 1).build()) .layer(new DenseLayer.Builder().nOut(32).build()) .layer(new OutputLayer.Builder().nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build()) .setInputType(InputType.convolutional(10, 10, 3)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); if(train) { for (int i = 0; i < 3; i++) { INDArray f = Nd4j.rand(new int[]{8, 3, 10, 10}); INDArray l = Nd4j.rand(8, 10); net.fit(f, l); } } return net; }
Example #27
Source File: TestParallelEarlyStopping.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testEarlyStoppingEveryNEpoch() { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new Sgd()).weightInit(WeightInit.XAVIER).list() .layer(0, new OutputLayer.Builder().nIn(4).nOut(3) .lossFunction(LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.setListeners(new ScoreIterationListener(1)); DataSetIterator irisIter = new IrisDataSetIterator(50, 600); EarlyStoppingModelSaver<MultiLayerNetwork> saver = new InMemoryModelSaver<>(); EarlyStoppingConfiguration<MultiLayerNetwork> esConf = new EarlyStoppingConfiguration.Builder<MultiLayerNetwork>() .epochTerminationConditions(new MaxEpochsTerminationCondition(5)) .scoreCalculator(new DataSetLossCalculator(irisIter, true)) .evaluateEveryNEpochs(2).modelSaver(saver).build(); IEarlyStoppingTrainer<MultiLayerNetwork> trainer = new EarlyStoppingParallelTrainer<>(esConf, net, irisIter, null, 2, 6, 1); EarlyStoppingResult<MultiLayerNetwork> result = trainer.fit(); System.out.println(result); assertEquals(5, result.getTotalEpochs()); assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason()); }
Example #28
Source File: TestLrChanges.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testChangeLSGD() { //Simple test for no updater nets MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .activation(Activation.TANH) .seed(12345) .updater(new Sgd(0.1)) .list() .layer(new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(new DenseLayer.Builder().nIn(10).nOut(10).build()) .layer(new OutputLayer.Builder().nIn(10).nOut(10).lossFunction(LossFunctions.LossFunction.MSE).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setLearningRate(1.0); net.setLearningRate(1, 0.5); assertEquals(1.0, net.getLearningRate(0), 0.0); assertEquals(0.5, net.getLearningRate(1), 0.0); ComputationGraph cg = net.toComputationGraph(); cg.setLearningRate(2.0); cg.setLearningRate("1", 2.5); assertEquals(2.0, cg.getLearningRate("0"), 0.0); assertEquals(2.5, cg.getLearningRate("1"), 0.0); }
Example #29
Source File: TestComputationGraphNetwork.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void printSummary() { NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)) .activation(Activation.IDENTITY); ComputationGraphConfiguration conf = overallConf.graphBuilder().addInputs("inCentre", "inRight") .addLayer("denseCentre0", new DenseLayer.Builder().nIn(10).nOut(9).build(), "inCentre") .addLayer("denseCentre1", new DenseLayer.Builder().nIn(9).nOut(8).build(), "denseCentre0") .addLayer("denseCentre2", new DenseLayer.Builder().nIn(8).nOut(7).build(), "denseCentre1") .addLayer("denseCentre3", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2") .addLayer("outCentre", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(7).nOut(4).build(), "denseCentre3") .addVertex("subsetLeft", new SubsetVertex(0, 3), "denseCentre1") .addLayer("denseLeft0", new DenseLayer.Builder().nIn(4).nOut(5).build(), "subsetLeft") .addLayer("outLeft", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(6).build(), "denseLeft0") .addLayer("denseRight", new DenseLayer.Builder().nIn(7).nOut(7).build(), "denseCentre2") .addLayer("denseRight0", new DenseLayer.Builder().nIn(2).nOut(3).build(), "inRight") .addVertex("mergeRight", new MergeVertex(), "denseRight", "denseRight0") .addLayer("denseRight1", new DenseLayer.Builder().nIn(10).nOut(5).build(), "mergeRight") .addLayer("outRight", new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nIn(5).nOut(5).build(), "denseRight1") .setOutputs("outLeft", "outCentre", "outRight").build(); ComputationGraph modelToTune = new ComputationGraph(conf); modelToTune.init(); // System.out.println(modelToTune.summary()); modelToTune.summary(); ComputationGraph modelNow = new TransferLearning.GraphBuilder(modelToTune).setFeatureExtractor("denseCentre2").build(); // System.out.println(modelNow.summary()); // System.out.println(modelNow.summary(InputType.feedForward(10),InputType.feedForward(2))); modelNow.summary(); modelNow.summary(InputType.feedForward(10),InputType.feedForward(2)); }
Example #30
Source File: FrozenLayerWithBackpropTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testMultiLayerNetworkFrozenLayerParamsAfterBackprop() { Nd4j.getRandom().setSeed(12345); DataSet randomData = new DataSet(Nd4j.rand(100, 4), Nd4j.rand(100, 1)); MultiLayerConfiguration conf1 = new NeuralNetConfiguration.Builder() .seed(12345) .weightInit(WeightInit.XAVIER) .updater(new Sgd(2)) .list() .layer(new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop( new DenseLayer.Builder().nIn(3).nOut(4).build())) .layer(new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop( new DenseLayer.Builder().nIn(4).nOut(2).build())) .layer(new org.deeplearning4j.nn.conf.layers.misc.FrozenLayerWithBackprop( new OutputLayer.Builder(LossFunctions.LossFunction.MSE).activation(Activation.TANH).nIn(2).nOut(1).build())) .build(); MultiLayerNetwork network = new MultiLayerNetwork(conf1); network.init(); INDArray unfrozenLayerParams = network.getLayer(0).params().dup(); INDArray frozenLayerParams1 = network.getLayer(1).params().dup(); INDArray frozenLayerParams2 = network.getLayer(2).params().dup(); INDArray frozenOutputLayerParams = network.getLayer(3).params().dup(); for (int i = 0; i < 100; i++) { network.fit(randomData); } assertNotEquals(unfrozenLayerParams, network.getLayer(0).params()); assertEquals(frozenLayerParams1, network.getLayer(1).params()); assertEquals(frozenLayerParams2, network.getLayer(2).params()); assertEquals(frozenOutputLayerParams, network.getLayer(3).params()); }