Java Code Examples for org.deeplearning4j.nn.graph.ComputationGraph#numParams()
The following examples show how to use
org.deeplearning4j.nn.graph.ComputationGraph#numParams() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CGVaeReconstructionProbWithKeyFunction.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public VariationalAutoencoder getVaeLayer() { ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue())); network.init(); INDArray val = ((INDArray) params.value()).unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcasted set parameters"); network.setParams(val); Layer l = network.getLayer(0); if (!(l instanceof VariationalAutoencoder)) { throw new RuntimeException( "Cannot use CGVaeReconstructionProbWithKeyFunction on network that doesn't have a VAE " + "layer as layer 0. Layer type: " + l.getClass()); } return (VariationalAutoencoder) l; }
Example 2
Source File: CGVaeReconstructionErrorWithKeyFunction.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Override public VariationalAutoencoder getVaeLayer() { ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson((String) jsonConfig.getValue())); network.init(); INDArray val = ((INDArray) params.value()).unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcasted set parameters"); network.setParams(val); Layer l = network.getLayer(0); if (!(l instanceof VariationalAutoencoder)) { throw new RuntimeException( "Cannot use CGVaeReconstructionErrorWithKeyFunction on network that doesn't have a VAE " + "layer as layer 0. Layer type: " + l.getClass()); } return (VariationalAutoencoder) l; }
Example 3
Source File: ScoreFlatMapFunctionCGDataSet.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Iterator<Tuple2<Long, Double>> call(Iterator<DataSet> dataSetIterator) throws Exception { if (!dataSetIterator.hasNext()) { return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator(); } DataSetIterator iter = new IteratorDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(json)); network.init(); INDArray val = params.value().unsafeDuplication(); //.value() is shared by all executors on single machine -> OK, as params are not changed in score function if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); network.setParams(val); List<Tuple2<Long, Double>> out = new ArrayList<>(); while (iter.hasNext()) { DataSet ds = iter.next(); double score = network.score(ds, false); long numExamples = ds.getFeatures().size(0); out.add(new Tuple2<>(numExamples, score * numExamples)); } Nd4j.getExecutioner().commit(); return out.iterator(); }
Example 4
Source File: ScoreFlatMapFunctionCGMultiDataSet.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Iterator<Tuple2<Long, Double>> call(Iterator<MultiDataSet> dataSetIterator) throws Exception { if (!dataSetIterator.hasNext()) { return Collections.singletonList(new Tuple2<>(0L, 0.0)).iterator(); } MultiDataSetIterator iter = new IteratorMultiDataSetIterator(dataSetIterator, minibatchSize); //Does batching where appropriate ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(json)); network.init(); INDArray val = params.value().unsafeDuplication(); //.value() is shared by all executors on single machine -> OK, as params are not changed in score function if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); network.setParams(val); List<Tuple2<Long, Double>> out = new ArrayList<>(); while (iter.hasNext()) { MultiDataSet ds = iter.next(); double score = network.score(ds, false); long numExamples = ds.getFeatures(0).size(0); out.add(new Tuple2<>(numExamples, score * numExamples)); } Nd4j.getExecutioner().commit(); return out.iterator(); }
Example 5
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicStackUnstackVariableLengthTS() { int layerSizes = 2; Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .activation(Activation.TANH).updater(new NoOp()).graphBuilder() .addInputs("in1", "in2") .addLayer("d0", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1") .addLayer("d1", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2") .addVertex("stack", new StackVertex(), "d0", "d1") .addLayer("d2", new SimpleRnn.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack") .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2") .addLayer("p1", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u1") .addLayer("p2", new GlobalPoolingLayer.Builder(PoolingType.AVG).build(), "u2") .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "p1") .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "p2") .setOutputs("out1", "out2").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); int[] mbSizes = new int[] {1, 2, 3}; for (int minibatch : mbSizes) { INDArray in1 = Nd4j.rand(new int[] {minibatch, layerSizes, 4}); INDArray in2 = Nd4j.rand(new int[] {minibatch, layerSizes, 5}); INDArray inMask1 = Nd4j.zeros(minibatch, 4); inMask1.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 3)).assign(1); INDArray inMask2 = Nd4j.zeros(minibatch, 5); inMask2.get(NDArrayIndex.all(), NDArrayIndex.interval(0, 4)).assign(1); INDArray labels1 = Nd4j.rand(new int[] {minibatch, 2}); INDArray labels2 = Nd4j.rand(new int[] {minibatch, 2}); String testName = "testBasicStackUnstackVariableLengthTS() - minibatch = " + minibatch; if (PRINT_RESULTS) { System.out.println(testName); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } graph.setLayerMaskArrays(new INDArray[] {inMask1, inMask2}, null); boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) .labels(new INDArray[]{labels1, labels2}).inputMask(new INDArray[]{inMask1, inMask2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } }
Example 6
Source File: ArbiterStatusListener.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public void onCandidateIteration(CandidateInfo candidateInfo, Object candidate, int iteration) { double score; long numParams; int numLayers; String modelConfigJson; int totalNumUpdates; if(candidate instanceof MultiLayerNetwork){ MultiLayerNetwork m = (MultiLayerNetwork)candidate; score = m.score(); numParams = m.numParams(); numLayers = m.getnLayers(); modelConfigJson = m.getLayerWiseConfigurations().toJson(); totalNumUpdates = m.getLayerWiseConfigurations().getIterationCount(); } else if(candidate instanceof ComputationGraph) { ComputationGraph cg = (ComputationGraph)candidate; score = cg.score(); numParams = cg.numParams(); numLayers = cg.getNumLayers(); modelConfigJson = cg.getConfiguration().toJson(); totalNumUpdates = cg.getConfiguration().getIterationCount(); } else { score = 0; numParams = 0; numLayers = 0; totalNumUpdates = 0; modelConfigJson = ""; } int idx = candidateInfo.getIndex(); Pair<IntArrayList, FloatArrayList> pair = candidateScoreVsIter.computeIfAbsent(idx, k -> new Pair<>(new IntArrayList(), new FloatArrayList())); IntArrayList iter = pair.getFirst(); FloatArrayList scores = pair.getSecond(); //Do we need subsampling to avoid having too many data points? int subsamplingFreq = candidateScoreVsIterSubsampleFreq.computeIfAbsent(idx, k -> 1); if(iteration / subsamplingFreq > MAX_SCORE_VS_ITER_PTS){ //Double subsampling frequency and re-parse data subsamplingFreq *= 2; candidateScoreVsIterSubsampleFreq.put(idx, subsamplingFreq); IntArrayList newIter = new IntArrayList(); FloatArrayList newScores = new FloatArrayList(); for( int i=0; i<iter.size(); i++ ){ int it = iter.get(i); if(it % subsamplingFreq == 0){ newIter.add(it); newScores.add(scores.get(i)); } } iter = newIter; scores = newScores; candidateScoreVsIter.put(idx, new Pair<>(iter, scores)); } if(iteration % subsamplingFreq == 0) { iter.add(iteration); scores.add((float) score); } int[] iters = iter.toIntArray(); float[] fScores = new float[iters.length]; for( int i=0; i<iters.length; i++ ){ fScores[i] = scores.get(i); } ModelInfoPersistable p = new ModelInfoPersistable.Builder() .timestamp(candidateInfo.getCreatedTime()) .sessionId(sessionId) .workerId(String.valueOf(candidateInfo.getIndex())) .modelIdx(candidateInfo.getIndex()) .score(candidateInfo.getScore()) .status(candidateInfo.getCandidateStatus()) .scoreVsIter(iters, fScores) .lastUpdateTime(System.currentTimeMillis()) .numParameters(numParams) .numLayers(numLayers) .totalNumUpdates(totalNumUpdates) .paramSpaceValues(candidateInfo.getFlatParams()) .modelConfigJson(modelConfigJson) .exceptionStackTrace(candidateInfo.getExceptionStackTrace()) .build(); lastModelInfoPersistable.put(candidateInfo.getIndex(), p); statsStorage.putUpdate(p); }
Example 7
Source File: ScoreExamplesFunction.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public Iterator<Double> call(Iterator<MultiDataSet> iterator) throws Exception { if (!iterator.hasNext()) { return Collections.emptyIterator(); } ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue())); network.init(); INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); network.setParams(val); List<Double> ret = new ArrayList<>(); List<MultiDataSet> collect = new ArrayList<>(batchSize); int totalCount = 0; while (iterator.hasNext()) { collect.clear(); int nExamples = 0; while (iterator.hasNext() && nExamples < batchSize) { MultiDataSet ds = iterator.next(); val n = ds.getFeatures(0).size(0); collect.add(ds); nExamples += n; } totalCount += nExamples; MultiDataSet data = org.nd4j.linalg.dataset.MultiDataSet.merge(collect); INDArray scores = network.scoreExamples(data, addRegularization); double[] doubleScores = scores.data().asDouble(); for (double doubleScore : doubleScores) { ret.add(doubleScore); } } Nd4j.getExecutioner().commit(); if (log.isDebugEnabled()) { log.debug("Scored {} examples ", totalCount); } return ret.iterator(); }
Example 8
Source File: ScoreExamplesWithKeyFunction.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Override public Iterator<Tuple2<K, Double>> call(Iterator<Tuple2<K, MultiDataSet>> iterator) throws Exception { if (!iterator.hasNext()) { return Collections.emptyIterator(); } ComputationGraph network = new ComputationGraph(ComputationGraphConfiguration.fromJson(jsonConfig.getValue())); network.init(); INDArray val = params.value().unsafeDuplication(); if (val.length() != network.numParams(false)) throw new IllegalStateException( "Network did not have same number of parameters as the broadcast set parameters"); network.setParams(val); List<Tuple2<K, Double>> ret = new ArrayList<>(); List<MultiDataSet> collect = new ArrayList<>(batchSize); List<K> collectKey = new ArrayList<>(batchSize); int totalCount = 0; while (iterator.hasNext()) { collect.clear(); collectKey.clear(); int nExamples = 0; while (iterator.hasNext() && nExamples < batchSize) { Tuple2<K, MultiDataSet> t2 = iterator.next(); MultiDataSet ds = t2._2(); val n = ds.getFeatures(0).size(0); if (n != 1) throw new IllegalStateException("Cannot score examples with one key per data set if " + "data set contains more than 1 example (numExamples: " + n + ")"); collect.add(ds); collectKey.add(t2._1()); nExamples += n; } totalCount += nExamples; MultiDataSet data = org.nd4j.linalg.dataset.MultiDataSet.merge(collect); INDArray scores = network.scoreExamples(data, addRegularization); double[] doubleScores = scores.data().asDouble(); for (int i = 0; i < doubleScores.length; i++) { ret.add(new Tuple2<>(collectKey.get(i), doubleScores[i])); } } Nd4j.getExecutioner().commit(); if (log.isDebugEnabled()) { log.debug("Scored {} examples ", totalCount); } return ret.iterator(); }
Example 9
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicTwoOutputs() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .activation(Activation.TANH).updater(new NoOp()).graphBuilder() .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) .nOut(2).activation(Activation.IDENTITY).build(), "d0") .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) .nOut(2).activation(Activation.IDENTITY).build(), "d1") .setOutputs("out1", "out2").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); System.out.println("Num layers: " + graph.getNumLayers()); System.out.println("Num params: " + graph.numParams()); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); int[] mbSizes = new int[] {1, 3, 10}; for (int minibatch : mbSizes) { INDArray in1 = Nd4j.rand(minibatch, 2); INDArray in2 = Nd4j.rand(minibatch, 2); INDArray labels1 = Nd4j.rand(minibatch, 2); INDArray labels2 = Nd4j.rand(minibatch, 2); String testName = "testBasicStackUnstack() - minibatch = " + minibatch; if (PRINT_RESULTS) { System.out.println(testName); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } }
Example 10
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicIris() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)).updater(new NoOp()) .graphBuilder().addInputs("input") .addLayer("firstLayer", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input") .addLayer("outputLayer", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), "firstLayer") .setOutputs("outputLayer").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); DataSet ds = new IrisDataSetIterator(150, 150).next(); INDArray min = ds.getFeatures().min(0); INDArray max = ds.getFeatures().max(0); ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min)); INDArray input = ds.getFeatures(); INDArray labels = ds.getLabels(); if (PRINT_RESULTS) { System.out.println("testBasicIris()"); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) .labels(new INDArray[]{labels})); String msg = "testBasicIris()"; assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); }
Example 11
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicStackUnstackDebug() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .activation(Activation.TANH).updater(new NoOp()).graphBuilder() .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addVertex("stack", new StackVertex(), "d0", "d1") .addVertex("u0", new UnstackVertex(0, 2), "stack") .addVertex("u1", new UnstackVertex(1, 2), "stack") .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) .nOut(2).activation(Activation.IDENTITY).build(), "u0") .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(2) .nOut(2).activation(Activation.IDENTITY).build(), "u1") .setOutputs("out1", "out2").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); int[] mbSizes = new int[] {1, 3, 10}; for (int minibatch : mbSizes) { INDArray in1 = Nd4j.rand(minibatch, 2); INDArray in2 = Nd4j.rand(minibatch, 2); INDArray labels1 = Nd4j.rand(minibatch, 2); INDArray labels2 = Nd4j.rand(minibatch, 2); String testName = "testBasicStackUnstack() - minibatch = " + minibatch; if (PRINT_RESULTS) { System.out.println(testName); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } }
Example 12
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicStackUnstack() { int layerSizes = 2; Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .activation(Activation.TANH).updater(new NoOp()).graphBuilder() .addInputs("in1", "in2") .addLayer("d0", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in1") .addLayer("d1", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "in2") .addVertex("stack", new StackVertex(), "d0", "d1") .addLayer("d2", new DenseLayer.Builder().nIn(layerSizes).nOut(layerSizes).build(), "stack") .addVertex("u1", new UnstackVertex(0, 2), "d2").addVertex("u2", new UnstackVertex(1, 2), "d2") .addLayer("out1", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .nIn(layerSizes).nOut(layerSizes).activation(Activation.IDENTITY).build(), "u1") .addLayer("out2", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2) .nIn(layerSizes).nOut(2).activation(Activation.IDENTITY).build(), "u2") .setOutputs("out1", "out2").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); int[] mbSizes = new int[] {1, 3, 10}; for (int minibatch : mbSizes) { INDArray in1 = Nd4j.rand(minibatch, layerSizes); INDArray in2 = Nd4j.rand(minibatch, layerSizes); INDArray labels1 = Nd4j.rand(minibatch, 2); INDArray labels2 = Nd4j.rand(minibatch, 2); String testName = "testBasicStackUnstack() - minibatch = " + minibatch; if (PRINT_RESULTS) { System.out.println(testName); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) .labels(new INDArray[]{labels1, labels2})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } }
Example 13
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicL2() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .activation(Activation.TANH).updater(new NoOp()).graphBuilder() .addInputs("in1", "in2").addLayer("d0", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in1") .addLayer("d1", new DenseLayer.Builder().nIn(2).nOut(2).build(), "in2") .addVertex("l2", new L2Vertex(), "d0", "d1") .addLayer("out", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.L2).nIn(1) .nOut(1).activation(Activation.IDENTITY).build(), "l2") .setOutputs("out").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); int[] mbSizes = new int[] {1, 3, 10}; for (int minibatch : mbSizes) { INDArray in1 = Nd4j.rand(DataType.DOUBLE, minibatch, 2); INDArray in2 = Nd4j.rand(DataType.DOUBLE, minibatch, 2); INDArray labels = Nd4j.rand(DataType.DOUBLE, minibatch, 1); String testName = "testBasicL2() - minibatch = " + minibatch; if (PRINT_RESULTS) { System.out.println(testName); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{in1, in2}) .labels(new INDArray[]{labels})); assertTrue(testName, gradOK); TestUtils.testModelSerialization(graph); } }
Example 14
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicIrisTripletStackingL2Loss() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .updater(new NoOp()).graphBuilder() .addInputs("input1", "input2", "input3") .addVertex("stack1", new StackVertex(), "input1", "input2", "input3") .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5) .activation(Activation.TANH).build(), "stack1") .addVertex("unstack0", new UnstackVertex(0, 3), "l1") .addVertex("unstack1", new UnstackVertex(1, 3), "l1") .addVertex("unstack2", new UnstackVertex(2, 3), "l1") .addVertex("l2-1", new L2Vertex(), "unstack1", "unstack0") // x - x- .addVertex("l2-2", new L2Vertex(), "unstack1", "unstack2") // x - x+ .addLayer("lossLayer", new LossLayer.Builder() .lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).build(), "l2-1", "l2-2") .setOutputs("lossLayer").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); int numParams = (4 * 5 + 5); assertEquals(numParams, graph.numParams()); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); INDArray pos = Nd4j.rand(150, 4); INDArray anc = Nd4j.rand(150, 4); INDArray neg = Nd4j.rand(150, 4); INDArray labels = Nd4j.zeros(150, 2); Random r = new Random(12345); for (int i = 0; i < 150; i++) { labels.putScalar(i, r.nextInt(2), 1.0); } Map<String, INDArray> out = graph.feedForward(new INDArray[] {pos, anc, neg}, true); // for (String s : out.keySet()) { // System.out.println(s + "\t" + Arrays.toString(out.get(s).shape())); // } if (PRINT_RESULTS) { System.out.println("testBasicIrisTripletStackingL2Loss()"); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{pos, anc, neg}) .labels(new INDArray[]{labels})); String msg = "testBasicIrisTripletStackingL2Loss()"; assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); }
Example 15
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicIrisWithElementWiseNodeInputSizeGreaterThanTwo() { ElementWiseVertex.Op[] ops = new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add, ElementWiseVertex.Op.Product, ElementWiseVertex.Op.Average, ElementWiseVertex.Op.Max}; for (ElementWiseVertex.Op op : ops) { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .updater(new NoOp()).graphBuilder().addInputs("input") .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input") .addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID) .build(), "input") .addLayer("l3", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.RELU).build(), "input") .addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2", "l3") .addLayer("outputLayer", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), "elementwise") .setOutputs("outputLayer").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); int numParams = (4 * 5 + 5) + (4 * 5 + 5) + (4 * 5 + 5) + (5 * 3 + 3); assertEquals(numParams, graph.numParams()); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); DataSet ds = new IrisDataSetIterator(150, 150).next(); INDArray min = ds.getFeatures().min(0); INDArray max = ds.getFeatures().max(0); ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min)); INDArray input = ds.getFeatures(); INDArray labels = ds.getLabels(); if (PRINT_RESULTS) { System.out.println("testBasicIrisWithElementWiseVertex(op=" + op + ")"); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithElementWiseVertex(op=" + op + ")"; assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); } }
Example 16
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicIrisWithElementWiseNode() { ElementWiseVertex.Op[] ops = new ElementWiseVertex.Op[] {ElementWiseVertex.Op.Add, ElementWiseVertex.Op.Subtract, ElementWiseVertex.Op.Product, ElementWiseVertex.Op.Average, ElementWiseVertex.Op.Max}; for (ElementWiseVertex.Op op : ops) { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)) .updater(new NoOp()).graphBuilder().addInputs("input") .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input") .addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.SIGMOID) .build(), "input") .addVertex("elementwise", new ElementWiseVertex(op), "l1", "l2") .addLayer("outputLayer", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(5).nOut(3).build(), "elementwise") .setOutputs("outputLayer").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); int numParams = (4 * 5 + 5) + (4 * 5 + 5) + (5 * 3 + 3); assertEquals(numParams, graph.numParams()); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); DataSet ds = new IrisDataSetIterator(150, 150).next(); INDArray min = ds.getFeatures().min(0); INDArray max = ds.getFeatures().max(0); ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min)); INDArray input = ds.getFeatures(); INDArray labels = ds.getLabels(); if (PRINT_RESULTS) { System.out.println("testBasicIrisWithElementWiseVertex(op=" + op + ")"); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithElementWiseVertex(op=" + op + ")"; assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); } }
Example 17
Source File: GradientCheckTestsComputationGraph.java From deeplearning4j with Apache License 2.0 | 4 votes |
@Test public void testBasicIrisWithMerging() { Nd4j.getRandom().setSeed(12345); ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345) .dataType(DataType.DOUBLE) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .dist(new NormalDistribution(0, 1)).updater(new NoOp()) .graphBuilder().addInputs("input") .addLayer("l1", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input") .addLayer("l2", new DenseLayer.Builder().nIn(4).nOut(5).activation(Activation.TANH).build(), "input") .addVertex("merge", new MergeVertex(), "l1", "l2") .addLayer("outputLayer", new OutputLayer.Builder().lossFunction(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(5 + 5).nOut(3).build(), "merge") .setOutputs("outputLayer").build(); ComputationGraph graph = new ComputationGraph(conf); graph.init(); int numParams = (4 * 5 + 5) + (4 * 5 + 5) + (10 * 3 + 3); assertEquals(numParams, graph.numParams()); Nd4j.getRandom().setSeed(12345); long nParams = graph.numParams(); INDArray newParams = Nd4j.rand(new long[]{1, nParams}); graph.setParams(newParams); DataSet ds = new IrisDataSetIterator(150, 150).next(); INDArray min = ds.getFeatures().min(0); INDArray max = ds.getFeatures().max(0); ds.getFeatures().subiRowVector(min).diviRowVector(max.sub(min)); INDArray input = ds.getFeatures(); INDArray labels = ds.getLabels(); if (PRINT_RESULTS) { System.out.println("testBasicIrisWithMerging()"); // for (int j = 0; j < graph.getNumLayers(); j++) // System.out.println("Layer " + j + " # params: " + graph.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.GraphConfig().net(graph).inputs(new INDArray[]{input}) .labels(new INDArray[]{labels})); String msg = "testBasicIrisWithMerging()"; assertTrue(msg, gradOK); TestUtils.testModelSerialization(graph); }