Java Code Examples for org.deeplearning4j.nn.multilayer.MultiLayerNetwork#params()
The following examples show how to use
org.deeplearning4j.nn.multilayer.MultiLayerNetwork#params() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ParameterAveragingTrainingWorker.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public ParameterAveragingTrainingResult getFinalResult(MultiLayerNetwork network) { INDArray updaterState = null; if (saveUpdater) { Updater u = network.getUpdater(); if (u != null) updaterState = u.getStateViewArray(); } Nd4j.getExecutioner().commit(); Collection<StorageMetaData> storageMetaData = null; Collection<Persistable> listenerStaticInfo = null; Collection<Persistable> listenerUpdates = null; if (listenerRouterProvider != null) { StatsStorageRouter r = listenerRouterProvider.getRouter(); if (r instanceof VanillaStatsStorageRouter) { //TODO this is ugly... need to find a better solution VanillaStatsStorageRouter ssr = (VanillaStatsStorageRouter) r; storageMetaData = ssr.getStorageMetaData(); listenerStaticInfo = ssr.getStaticInfo(); listenerUpdates = ssr.getUpdates(); } } return new ParameterAveragingTrainingResult(network.params(), updaterState, network.score(), storageMetaData, listenerStaticInfo, listenerUpdates); }
Example 2
Source File: TestSparkMultiLayerParameterAveraging.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testRunIteration() { DataSet dataSet = new IrisDataSetIterator(5, 5).next(); List<DataSet> list = dataSet.asList(); JavaRDD<DataSet> data = sc.parallelize(list); SparkDl4jMultiLayer sparkNetCopy = new SparkDl4jMultiLayer(sc, getBasicConf(), new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 5, 1, 0)); MultiLayerNetwork networkCopy = sparkNetCopy.fit(data); INDArray expectedParams = networkCopy.params(); SparkDl4jMultiLayer sparkNet = getBasicNetwork(); MultiLayerNetwork network = sparkNet.fit(data); INDArray actualParams = network.params(); assertEquals(expectedParams.size(1), actualParams.size(1)); }
Example 3
Source File: TestComputationGraphNetwork.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testIrisFit() { ComputationGraphConfiguration configuration = getIrisGraphConfiguration(); ComputationGraph graph = new ComputationGraph(configuration); graph.init(); MultiLayerConfiguration mlnConfig = getIrisMLNConfiguration(); MultiLayerNetwork net = new MultiLayerNetwork(mlnConfig); net.init(); Nd4j.getRandom().setSeed(12345); int nParams = getNumParams(); INDArray params = Nd4j.rand(1, nParams); graph.setParams(params.dup()); net.setParams(params.dup()); DataSetIterator iris = new IrisDataSetIterator(75, 150); net.fit(iris); iris.reset(); graph.fit(iris); //Check that parameters are equal for both models after fitting: INDArray paramsMLN = net.params(); INDArray paramsGraph = graph.params(); assertNotEquals(params, paramsGraph); assertEquals(paramsMLN, paramsGraph); }
Example 4
Source File: TransferLearningHelperTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testMLN() { DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3)); NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .activation(Activation.IDENTITY); MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build()) .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); modelToFineTune.init(); MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).setFeatureExtractor(1).build(); List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false); INDArray asFrozenFeatures = ff.get(2); TransferLearningHelper helper = new TransferLearningHelper(modelToFineTune, 1); INDArray paramsLastTwoLayers = Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params()); MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list() .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build(), paramsLastTwoLayers); assertEquals(asFrozenFeatures, helper.featurize(randomData).getFeatures()); assertEquals(randomData.getLabels(), helper.featurize(randomData).getLabels()); for (int i = 0; i < 5; i++) { notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels())); helper.fitFeaturized(helper.featurize(randomData)); modelNow.fit(randomData); } INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(), notFrozen.params()); INDArray act = modelNow.params(); assertEquals(expected, act); }
Example 5
Source File: TransferLearningMLNTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void simpleFineTune() { long rng = 12345L; Nd4j.getRandom().setSeed(rng); DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT, 10, 4), TestUtils.randomOneHot(DataType.FLOAT, 10, 3)); //original conf NeuralNetConfiguration.Builder confToChange = new NeuralNetConfiguration.Builder().seed(rng).optimizationAlgo(OptimizationAlgorithm.LBFGS) .updater(new Nesterovs(0.01, 0.99)); MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(confToChange.list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); modelToFineTune.init(); //model after applying changes with transfer learning MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune) .fineTuneConfiguration(new FineTuneConfiguration.Builder().seed(rng) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new RmsProp(0.5)) //Intent: override both weight and bias LR, unless bias LR is manually set also .l2(0.4).build()) .build(); for (org.deeplearning4j.nn.api.Layer l : modelNow.getLayers()) { BaseLayer bl = ((BaseLayer) l.conf().getLayer()); assertEquals(new RmsProp(0.5), bl.getIUpdater()); } NeuralNetConfiguration.Builder confSet = new NeuralNetConfiguration.Builder().seed(rng) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(new RmsProp(0.5)).l2(0.4); MultiLayerNetwork expectedModel = new MultiLayerNetwork(confSet.list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); expectedModel.init(); expectedModel.setParams(modelToFineTune.params().dup()); assertEquals(expectedModel.params(), modelNow.params()); //Check json MultiLayerConfiguration expectedConf = expectedModel.getLayerWiseConfigurations(); assertEquals(expectedConf.toJson(), modelNow.getLayerWiseConfigurations().toJson()); //Check params after fit modelNow.fit(randomData); expectedModel.fit(randomData); assertEquals(modelNow.score(), expectedModel.score(), 1e-6); INDArray pExp = expectedModel.params(); INDArray pNow = modelNow.params(); assertEquals(pExp, pNow); }
Example 6
Source File: FrozenLayerTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testFrozen() { DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3)); NeuralNetConfiguration.Builder overallConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1)) .activation(Activation.IDENTITY); FineTuneConfiguration finetune = new FineTuneConfiguration.Builder().updater(new Sgd(0.1)).build(); MultiLayerNetwork modelToFineTune = new MultiLayerNetwork(overallConf.clone().list() .layer(0, new DenseLayer.Builder().nIn(4).nOut(3).build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(2).build()) .layer(2, new DenseLayer.Builder().nIn(2).nOut(3).build()) .layer(3, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build()); modelToFineTune.init(); List<INDArray> ff = modelToFineTune.feedForwardToLayer(2, randomData.getFeatures(), false); INDArray asFrozenFeatures = ff.get(2); MultiLayerNetwork modelNow = new TransferLearning.Builder(modelToFineTune).fineTuneConfiguration(finetune) .setFeatureExtractor(1).build(); INDArray paramsLastTwoLayers = Nd4j.hstack(modelToFineTune.getLayer(2).params(), modelToFineTune.getLayer(3).params()); MultiLayerNetwork notFrozen = new MultiLayerNetwork(overallConf.clone().list() .layer(0, new DenseLayer.Builder().nIn(2).nOut(3).build()) .layer(1, new org.deeplearning4j.nn.conf.layers.OutputLayer.Builder( LossFunctions.LossFunction.MCXENT).activation(Activation.SOFTMAX).nIn(3).nOut(3) .build()) .build(), paramsLastTwoLayers); // assertEquals(modelNow.getLayer(2).conf(), notFrozen.getLayer(0).conf()); //Equal, other than names // assertEquals(modelNow.getLayer(3).conf(), notFrozen.getLayer(1).conf()); //Equal, other than names //Check: forward pass INDArray outNow = modelNow.output(randomData.getFeatures()); INDArray outNotFrozen = notFrozen.output(asFrozenFeatures); assertEquals(outNow, outNotFrozen); for (int i = 0; i < 5; i++) { notFrozen.fit(new DataSet(asFrozenFeatures, randomData.getLabels())); modelNow.fit(randomData); } INDArray expected = Nd4j.hstack(modelToFineTune.getLayer(0).params(), modelToFineTune.getLayer(1).params(), notFrozen.params()); INDArray act = modelNow.params(); assertEquals(expected, act); }
Example 7
Source File: EmbeddingLayerTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testEmbeddingWeightInit(){ // https://github.com/eclipse/deeplearning4j/issues/8663 //The embedding layer weight initialization should be independent of the vocabulary size (nIn setting) for(WeightInit wi : new WeightInit[]{WeightInit.XAVIER, WeightInit.RELU, WeightInit.XAVIER_UNIFORM, WeightInit.LECUN_NORMAL}) { for (boolean seq : new boolean[]{false, true}) { Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100).nOut(100).build()) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf2 = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100).nOut(100).build()) .build(); MultiLayerNetwork net2 = new MultiLayerNetwork(conf2); net2.init(); Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf3 = new NeuralNetConfiguration.Builder() .seed(12345) .list() .layer(seq ? new EmbeddingSequenceLayer.Builder().weightInit(wi).nIn(100000).nOut(100).build() : new EmbeddingLayer.Builder().weightInit(wi).nIn(100000).nOut(100).build()) .build(); MultiLayerNetwork net3 = new MultiLayerNetwork(conf3); net3.init(); INDArray p1 = net.params(); INDArray p2 = net2.params(); INDArray p3 = net3.params(); boolean eq = p1.equalsWithEps(p2, 1e-4); String str = (seq ? "EmbeddingSequenceLayer" : "EmbeddingLayer") + " - " + wi; assertTrue(str + " p1/p2 params not equal", eq); double m1 = p1.meanNumber().doubleValue(); double s1 = p1.stdNumber().doubleValue(); double m3 = p3.meanNumber().doubleValue(); double s3 = p3.stdNumber().doubleValue(); assertEquals(str, m1, m3, 0.1); assertEquals(str, s1, s3, 0.1); double re = relErr(s1, s3); assertTrue(str + " - " + re, re < 0.05); } } }
Example 8
Source File: GradientSharingTrainingTest.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test @Ignore //AB https://github.com/eclipse/deeplearning4j/issues/8985 public void differentNetsTrainingTest() throws Exception { int batch = 3; File temp = testDir.newFolder(); DataSet ds = new IrisDataSetIterator(150, 150).next(); List<DataSet> list = ds.asList(); Collections.shuffle(list, new Random(12345)); int pos = 0; int dsCount = 0; while (pos < list.size()) { List<DataSet> l2 = new ArrayList<>(); for (int i = 0; i < 3 && pos < list.size(); i++) { l2.add(list.get(pos++)); } DataSet d = DataSet.merge(l2); File f = new File(temp, dsCount++ + ".bin"); d.save(f); } INDArray last = null; INDArray lastDup = null; for (int i = 0; i < 2; i++) { System.out.println("||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||"); log.info("Starting: {}", i); MultiLayerConfiguration conf; if (i == 0) { conf = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.XAVIER) .seed(12345) .list() .layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); } else { conf = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.XAVIER) .seed(12345) .list() .layer(new DenseLayer.Builder().nIn(4).nOut(4).activation(Activation.TANH).build()) .layer(new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.SOFTMAX).lossFunction(LossFunctions.LossFunction.MCXENT).build()) .build(); } MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); //TODO this probably won't work everywhere... String controller = Inet4Address.getLocalHost().getHostAddress(); String networkMask = controller.substring(0, controller.lastIndexOf('.')) + ".0" + "/16"; VoidConfiguration voidConfiguration = VoidConfiguration.builder() .unicastPort(40123) // Should be open for IN/OUT communications on all Spark nodes .networkMask(networkMask) // Local network mask .controllerAddress(controller) .build(); TrainingMaster tm = new SharedTrainingMaster.Builder(voidConfiguration, 2, new FixedThresholdAlgorithm(1e-4), batch) .rngSeed(12345) .collectTrainingStats(false) .batchSizePerWorker(batch) // Minibatch size for each worker .workersPerNode(2) // Workers per node .build(); SparkDl4jMultiLayer sparkNet = new SparkDl4jMultiLayer(sc, net, tm); //System.out.println(Arrays.toString(sparkNet.getNetwork().params().get(NDArrayIndex.point(0), NDArrayIndex.interval(0, 256)).dup().data().asFloat())); String fitPath = "file:///" + temp.getAbsolutePath().replaceAll("\\\\", "/"); INDArray paramsBefore = net.params().dup(); for( int j=0; j<3; j++ ) { sparkNet.fit(fitPath); } INDArray paramsAfter = net.params(); assertNotEquals(paramsBefore, paramsAfter); //Also check we don't have any issues if(i == 0) { last = sparkNet.getNetwork().params(); lastDup = last.dup(); } else { assertEquals(lastDup, last); } } }
Example 9
Source File: ConvolutionLayerTest.java From deeplearning4j with Apache License 2.0 | 3 votes |
@Test public void testGetSetParams() { MultiLayerNetwork net = getCNNMLNConfig(true, false); INDArray paramsOrig = net.params().dup(); net.setParams(paramsOrig); INDArray params2 = net.params(); assertEquals(paramsOrig, params2); }