Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#getLayerWiseConfigurations()
The following examples show how to use
org.deeplearning4j.nn.multilayer.MultiLayerNetwork#getLayerWiseConfigurations() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestUtils.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static MultiLayerNetwork testModelSerialization(MultiLayerNetwork net){ MultiLayerNetwork restored; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(net, baos, true); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(net.getLayerWiseConfigurations(), restored.getLayerWiseConfigurations()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen throw new RuntimeException(e); } //Also check the MultiLayerConfiguration is serializable (required by Spark etc) MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); serializeDeserializeJava(conf); return restored; }
Example 2
Source File: TestUtils.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static MultiLayerNetwork testModelSerialization(MultiLayerNetwork net){ MultiLayerNetwork restored; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(net, baos, true); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(net.getLayerWiseConfigurations(), restored.getLayerWiseConfigurations()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen throw new RuntimeException(e); } //Also check the MultiLayerConfiguration is serializable (required by Spark etc) MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); serializeDeserializeJava(conf); return restored; }
Example 3
Source File: TestUtils.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static MultiLayerNetwork testModelSerialization(MultiLayerNetwork net){ MultiLayerNetwork restored; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(net, baos, true); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(net.getLayerWiseConfigurations(), restored.getLayerWiseConfigurations()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen throw new RuntimeException(e); } //Also check the MultiLayerConfiguration is serializable (required by Spark etc) MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); serializeDeserializeJava(conf); return restored; }
Example 4
Source File: DTypeTests.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static void logUsedClasses(MultiLayerNetwork net) { MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); for (NeuralNetConfiguration nnc : conf.getConfs()) { Layer l = nnc.getLayer(); seenLayers.add(l.getClass()); if (l instanceof BaseWrapperLayer) { BaseWrapperLayer bwl = (BaseWrapperLayer) l; seenLayers.add(bwl.getUnderlying().getClass()); } else if (l instanceof Bidirectional) { seenLayers.add(((Bidirectional) l).getFwd().getClass()); } } Map<Integer, InputPreProcessor> preprocs = conf.getInputPreProcessors(); if (preprocs != null) { for (InputPreProcessor ipp : preprocs.values()) { seenPreprocs.add(ipp.getClass()); } } }
Example 5
Source File: TestUtils.java From deeplearning4j with Apache License 2.0 | 6 votes |
public static MultiLayerNetwork testModelSerialization(MultiLayerNetwork net){ MultiLayerNetwork restored; try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ModelSerializer.writeModel(net, baos, true); byte[] bytes = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(bytes); restored = ModelSerializer.restoreMultiLayerNetwork(bais, true); assertEquals(net.getLayerWiseConfigurations(), restored.getLayerWiseConfigurations()); assertEquals(net.params(), restored.params()); } catch (IOException e){ //Should never happen throw new RuntimeException(e); } //Also check the MultiLayerConfiguration is serializable (required by Spark etc) MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); serializeDeserializeJava(conf); return restored; }
Example 6
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 7
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example 8
Source File: RegressionTest050.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l1.getIUpdater()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(net.numParams()); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 9
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertEquals("softsign", l1.getActivationFn().toString()); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals("softmax", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example 10
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertEquals("relu", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertEquals(new Nesterovs(0.15, 0.9), l0.getIUpdater()); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("softmax", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); long numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 11
Source File: RegressionTest080.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestLSTM1() throws Exception { File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_LSTM_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); GravesLSTM l0 = (GravesLSTM) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationTanH); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); GravesBidirectionalLSTM l1 = (GravesBidirectionalLSTM) conf.getConf(1).getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationSoftSign); assertEquals(4, l1.getNIn()); assertEquals(4, l1.getNOut()); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); RnnOutputLayer l2 = (RnnOutputLayer) conf.getConf(2).getLayer(); assertEquals(4, l2.getNIn()); assertEquals(5, l2.getNOut()); assertTrue(l2.getActivationFn() instanceof ActivationSoftmax); assertTrue(l2.getLossFn() instanceof LossMCXENT); }
Example 12
Source File: RegressionTest080.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP1() throws Exception { File f = Resources.asFile("regression_testing/080/080_ModelSerializer_Regression_MLP_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationReLU); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitXavier(), l0.getWeightInitFn()); assertTrue(l0.getIUpdater() instanceof Nesterovs); Nesterovs n = (Nesterovs) l0.getIUpdater(); assertEquals(0.9, n.getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(0.15, n.getLearningRate(), 1e-6); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationSoftmax); assertTrue(l1.getLossFn() instanceof LossMCXENT); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitXavier(), l1.getWeightInitFn()); assertTrue(l1.getIUpdater() instanceof Nesterovs); assertEquals(0.9, ((Nesterovs)l1.getIUpdater()).getMomentum(), 1e-6); assertEquals(0.15, ((Nesterovs)l1.getIUpdater()).getLearningRate(), 1e-6); assertEquals(0.15, n.getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new Nesterovs().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 13
Source File: RegressionTest050.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void regressionTestMLP2() throws Exception { File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_MLP_2.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationLReLU); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l0.getIDropout()); assertEquals(0.1, TestUtils.getL1(l0), 1e-6); assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l0)); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("identity", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMSE); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater()); assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l1.getIDropout()); assertEquals(0.1, TestUtils.getL1(l1), 1e-6); assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l1)); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 14
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void regressionTestCNN1() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_CNN_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(3, l0.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertArrayEquals(new int[] {2, 2}, l0.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l0.getStride()); assertArrayEquals(new int[] {0, 0}, l0.getPadding()); assertEquals(ConvolutionMode.Same, l0.getConvolutionMode()); SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer(); assertArrayEquals(new int[] {2, 2}, l1.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l1.getStride()); assertArrayEquals(new int[] {0, 0}, l1.getPadding()); assertEquals(PoolingType.MAX, l1.getPoolingType()); assertEquals(l1.getConvolutionMode(), ConvolutionMode.Same); OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer(); assertEquals("sigmoid", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO assertEquals(26 * 26 * 3, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor); long numParams = net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 15
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void regressionTestMLP2() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_MLP_2.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationLReLU); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l0.getIDropout()); assertEquals(0.1, TestUtils.getL1(l0), 1e-6); assertEquals(new WeightDecay(0.2, false), TestUtils.getWeightDecayReg(l0)); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertEquals("identity", l1.getActivationFn().toString()); assertTrue(l1.getLossFn() instanceof LossMSE); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater()); assertEquals(0.15, ((RmsProp)l1.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l1.getIDropout()); assertEquals(0.1, TestUtils.getL1(l1), 1e-6); assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l1)); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 16
Source File: RegressionTest060.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void regressionTestCNN1() throws Exception { File f = Resources.asFile("regression_testing/060/060_ModelSerializer_Regression_CNN_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(3, l0.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertArrayEquals(new int[] {2, 2}, l0.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l0.getStride()); assertArrayEquals(new int[] {0, 0}, l0.getPadding()); assertEquals(ConvolutionMode.Truncate, l0.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer(); assertArrayEquals(new int[] {2, 2}, l1.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l1.getStride()); assertArrayEquals(new int[] {0, 0}, l1.getPadding()); assertEquals(PoolingType.MAX, l1.getPoolingType()); assertEquals(ConvolutionMode.Truncate, l1.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer(); assertEquals("sigmoid", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); //TODO assertEquals(26 * 26 * 3, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertTrue(conf.getInputPreProcess(2) instanceof CnnToFeedForwardPreProcessor); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 17
Source File: RegressionTest071.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void regressionTestMLP2() throws Exception { File f = Resources.asFile("regression_testing/071/071_ModelSerializer_Regression_MLP_2.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(2, conf.getConfs().size()); DenseLayer l0 = (DenseLayer) conf.getConf(0).getLayer(); assertTrue(l0.getActivationFn() instanceof ActivationLReLU); assertEquals(3, l0.getNIn()); assertEquals(4, l0.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l0.getIDropout()); assertEquals(0.1, TestUtils.getL1(l0), 1e-6); assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l0)); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l0.getGradientNormalization()); assertEquals(1.5, l0.getGradientNormalizationThreshold(), 1e-5); OutputLayer l1 = (OutputLayer) conf.getConf(1).getLayer(); assertTrue(l1.getActivationFn() instanceof ActivationIdentity); assertTrue(l1.getLossFn() instanceof LossMSE); assertEquals(4, l1.getNIn()); assertEquals(5, l1.getNOut()); assertEquals(new WeightInitDistribution(new NormalDistribution(0.1, 1.2)), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l1.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertEquals(new Dropout(0.6), l1.getIDropout()); assertEquals(0.1, TestUtils.getL1(l1), 1e-6); assertEquals(new WeightDecay(0.2,false), TestUtils.getWeightDecayReg(l1)); assertEquals(GradientNormalization.ClipElementWiseAbsoluteValue, l1.getGradientNormalization()); assertEquals(1.5, l1.getGradientNormalizationThreshold(), 1e-5); long numParams = net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize).reshape(1,numParams), net.getUpdater().getStateViewArray()); }
Example 18
Source File: TransferLearningMLNTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void simpleFineTune() { long rng = 12345L; Nd4j.getRandom().setSeed(rng); DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT, 10, 4), TestUtils.randomOneHot(DataType.FLOAT, 10, 3)); //original conf NeuralNetConfiguration.Builder confToChange = new NeuralNetConfiguration.Builder().seed(rng).optimizationAlgo(OptimizationAlgorithm.LBFGS) .updater(new Nesterovs(0.01, 0.99)); MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(confToChange.list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); modelToFineTune.init(); //model after applying changes with transfer learning MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune) .fineTuneConfiguration(new FineTuneConfiguration.Builder().seed(rng) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new RmsProp(0.5)) //Intent: override both weight and bias LR, unless bias LR is manually set also .l2(0.4).build()) .build(); for (org.deeplearning4j.nn.api.Layer l : modelNow.getLayers()) { BaseLayer bl = ((BaseLayer) l.conf().getLayer()); assertEquals(new RmsProp(0.5), bl.getIUpdater()); } NeuralNetConfiguration.Builder confSet = new NeuralNetConfiguration.Builder().seed(rng) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new RmsProp(0.5)).l2(0.4); MultiLayerNetwork expectedModel = new MultiLayerNetwork(confSet.list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); expectedModel.init(); expectedModel.setParams(modelToFineTune.params().dup()); assertEquals(expectedModel.params(), modelNow.params()); //Check json MultiLayerConfiguration expectedConf = expectedModel.getLayerWiseConfigurations(); assertEquals(expectedConf.toJson(), modelNow.getLayerWiseConfigurations().toJson()); //Check params after fit modelNow.fit(randomData); expectedModel.fit(randomData); assertEquals(modelNow.score(), expectedModel.score(), 1e-6); INDArray pExp = expectedModel.params(); INDArray pNow = modelNow.params(); assertEquals(pExp, pNow); }
Example 19
Source File: RegressionTest050.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void regressionTestCNN1() throws Exception { File f = Resources.asFile("regression_testing/050/050_ModelSerializer_Regression_CNN_1.zip"); MultiLayerNetwork net = ModelSerializer.restoreMultiLayerNetwork(f, true); MultiLayerConfiguration conf = net.getLayerWiseConfigurations(); assertEquals(3, conf.getConfs().size()); ConvolutionLayer l0 = (ConvolutionLayer) conf.getConf(0).getLayer(); assertEquals("tanh", l0.getActivationFn().toString()); assertEquals(3, l0.getNIn()); assertEquals(3, l0.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); assertArrayEquals(new int[] {2, 2}, l0.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l0.getStride()); assertArrayEquals(new int[] {0, 0}, l0.getPadding()); assertEquals(ConvolutionMode.Truncate, l0.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set SubsamplingLayer l1 = (SubsamplingLayer) conf.getConf(1).getLayer(); assertArrayEquals(new int[] {2, 2}, l1.getKernelSize()); assertArrayEquals(new int[] {1, 1}, l1.getStride()); assertArrayEquals(new int[] {0, 0}, l1.getPadding()); assertEquals(PoolingType.MAX, l1.getPoolingType()); assertEquals(ConvolutionMode.Truncate, l1.getConvolutionMode()); //Pre-0.7.0: no ConvolutionMode. Want to default to truncate here if not set OutputLayer l2 = (OutputLayer) conf.getConf(2).getLayer(); assertEquals("sigmoid", l2.getActivationFn().toString()); assertTrue(l2.getLossFn() instanceof LossNegativeLogLikelihood); assertEquals(26 * 26 * 3, l2.getNIn()); assertEquals(5, l2.getNOut()); assertEquals(new WeightInitRelu(), l0.getWeightInitFn()); assertEquals(new RmsProp(0.15, 0.96, RmsProp.DEFAULT_RMSPROP_EPSILON), l0.getIUpdater()); assertEquals(0.15, ((RmsProp)l0.getIUpdater()).getLearningRate(), 1e-6); int numParams = (int)net.numParams(); assertEquals(Nd4j.linspace(1, numParams, numParams, Nd4j.dataType()).reshape(1,numParams), net.params()); int updaterSize = (int) new RmsProp().stateSize(numParams); assertEquals(Nd4j.linspace(1, updaterSize, updaterSize, Nd4j.dataType()).reshape(1,numParams), net.getUpdater().getStateViewArray()); }