org.nd4j.linalg.learning.config.AdaDelta Java Examples
The following examples show how to use
org.nd4j.linalg.learning.config.AdaDelta.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AdaDeltaLearnerTestCase.java From jstarcraft-ai with Apache License 2.0 | 5 votes |
@Override protected GradientUpdater<?> getOldFunction(long[] shape) { AdaDelta configuration = new AdaDelta(); GradientUpdater<?> oldFunction = new AdaDeltaUpdater(configuration); int length = (int) (shape[0] * configuration.stateSize(shape[1])); INDArray view = Nd4j.zeros(length); oldFunction.setStateViewArray(view, shape, 'c', true); return oldFunction; }
Example #2
Source File: BatchNormalizationTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testBatchNorm() throws Exception { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345) .updater(new Adam(1e-3)) .activation(Activation.TANH) .list() .layer(new ConvolutionLayer.Builder().nOut(5).kernelSize(2, 2).build()) .layer(new BatchNormalization()) .layer(new ConvolutionLayer.Builder().nOut(5).kernelSize(2, 2).build()) .layer(new OutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nOut(10).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator iter = new EarlyTerminationDataSetIterator(new MnistDataSetIterator(32, true, 12345), 10); net.fit(iter); MultiLayerNetwork net2 = new TransferLearning.Builder(net) .fineTuneConfiguration(FineTuneConfiguration.builder() .updater(new AdaDelta()) .build()) .removeOutputLayer() .addLayer(new BatchNormalization.Builder().nOut(3380).build()) .addLayer(new OutputLayer.Builder().activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).nIn(3380).nOut(10).build()) .build(); net2.fit(iter); }
Example #3
Source File: AdaDeltaUpdater.java From nd4j with Apache License 2.0 | 4 votes |
public AdaDeltaUpdater(AdaDelta config) { this.config = config; }
Example #4
Source File: AdaDeltaUpdater.java From deeplearning4j with Apache License 2.0 | 4 votes |
public AdaDeltaUpdater(AdaDelta config) { this.config = config; }
Example #5
Source File: TestGraphNodes.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testLastTimeStepWithTransfer(){ int lstmLayerSize = 16; int numLabelClasses = 10; int numInputs = 5; ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .trainingWorkspaceMode(WorkspaceMode.NONE) .inferenceWorkspaceMode(WorkspaceMode.NONE) .seed(123) //Random number generator seed for improved repeatability. Optional. .updater(new AdaDelta()) .weightInit(WeightInit.XAVIER) .graphBuilder() .addInputs("rr") .setInputTypes(InputType.recurrent(30)) .addLayer("1", new GravesLSTM.Builder().activation(Activation.TANH).nIn(numInputs).nOut(lstmLayerSize).dropOut(0.9).build(), "rr") .addLayer("2", new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nOut(numLabelClasses).build(), "1") .setOutputs("2") .build(); ComputationGraph net = new ComputationGraph(conf); net.init(); ComputationGraph updatedModel = new TransferLearning.GraphBuilder(net) .addVertex("laststepoutput", new LastTimeStepVertex("rr"), "2") .setOutputs("laststepoutput") .build(); INDArray input = Nd4j.rand(new int[]{10, numInputs, 16}); INDArray[] out = updatedModel.output(input); assertNotNull(out); assertEquals(1, out.length); assertNotNull(out[0]); assertArrayEquals(new long[]{10, numLabelClasses}, out[0].shape()); Map<String,INDArray> acts = updatedModel.feedForward(input, false); assertEquals(4, acts.size()); //2 layers + input + vertex output assertNotNull(acts.get("laststepoutput")); assertArrayEquals(new long[]{10, numLabelClasses}, acts.get("laststepoutput").shape()); String toString = out[0].toString(); }