org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator Java Examples
The following examples show how to use
org.deeplearning4j.datasets.iterator.impl.MnistDataSetIterator.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: RPTreeTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testRPTree() throws Exception { DataSetIterator mnist = new MnistDataSetIterator(150,150); RPTree rpTree = new RPTree(784,50); DataSet d = mnist.next(); NormalizerStandardize normalizerStandardize = new NormalizerStandardize(); normalizerStandardize.fit(d); normalizerStandardize.transform(d.getFeatures()); INDArray data = d.getFeatures(); rpTree.buildTree(data); assertEquals(4,rpTree.getLeaves().size()); assertEquals(0,rpTree.getRoot().getDepth()); List<Integer> candidates = rpTree.getCandidates(data.getRow(0)); assertFalse(candidates.isEmpty()); assertEquals(10,rpTree.query(data.slice(0),10).length()); System.out.println(candidates.size()); rpTree.addNodeAtIndex(150,data.getRow(0)); }
Example #2
Source File: BatchNormalizationTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testCNNBNActivationCombo() throws Exception { DataSetIterator iter = new MnistDataSetIterator(2, 2); DataSet next = iter.next(); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .list() .layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER) .activation(Activation.IDENTITY).build()) .layer(1, new BatchNormalization.Builder().build()) .layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nOut(10).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.fit(next); assertNotEquals(null, network.getLayer(0).getParam("W")); assertNotEquals(null, network.getLayer(0).getParam("b")); }
Example #3
Source File: LocalResponseTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testMultiCNNLayer() throws Exception { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).seed(123).list() .layer(0, new ConvolutionLayer.Builder().nIn(1).nOut(6).weightInit(WeightInit.XAVIER) .activation(Activation.RELU).build()) .layer(1, new LocalResponseNormalization.Builder().build()).layer(2, new DenseLayer.Builder() .nOut(2).build()) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).nIn(2).nOut(10) .build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); DataSetIterator iter = new MnistDataSetIterator(2, 2); DataSet next = iter.next(); network.fit(next); }
Example #4
Source File: CenterLossOutputLayerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test @Ignore //Should be run manually public void testMNISTConfig() throws Exception { int batchSize = 64; // Test batch size DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345); ComputationGraph net = getCNNMnistConfig(); net.init(); net.setListeners(new ScoreIterationListener(1)); for (int i = 0; i < 50; i++) { net.fit(mnistTrain.next()); Thread.sleep(1000); } Thread.sleep(100000); }
Example #5
Source File: MultiLayerTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testPredict() throws Exception { Nd4j.getRandom().setSeed(12345); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .weightInit(WeightInit.XAVIER).seed(12345L).list() .layer(0, new DenseLayer.Builder().nIn(784).nOut(50).activation(Activation.RELU).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(50).nOut(10).build()) .setInputType(InputType.convolutional(28, 28, 1)).build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); DataSetIterator ds = new MnistDataSetIterator(10, 10); net.fit(ds); DataSetIterator testDs = new MnistDataSetIterator(1, 1); DataSet testData = testDs.next(); testData.setLabelNames(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")); String actualLables = testData.getLabelName(0); List<String> prediction = net.predict(testData); assertTrue(actualLables != null); assertTrue(prediction.get(0) != null); }
Example #6
Source File: TestGraphLocalExecution.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object testData(Map<String, Object> dataParameters) { try { DataSetIterator underlying = new MnistDataSetIterator(batchSize, Math.min(10000, 2 * batchSize), false, false, false, 12345); return new MultiDataSetIteratorAdapter(underlying); } catch (IOException e) { throw new RuntimeException(e); } }
Example #7
Source File: TestScoreFunctions.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object trainData(Map<String, Object> dataParameters) { try { DataSetIterator iter = new MnistDataSetIterator(4, 16, false, false, false, 12345); iter.setPreProcessor(new PreProc(rocType)); return iter; } catch (IOException e){ throw new RuntimeException(e); } }
Example #8
Source File: ConvolutionLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testTwdFirstLayer() throws Exception { MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(123) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).l2(2e-4) .updater(new Nesterovs(0.9)).dropOut(0.5) .list().layer(0, new ConvolutionLayer.Builder(8, 8) //16 filters kernel size 8 stride 4 .stride(4, 4).nOut(16).dropOut(0.5) .activation(Activation.RELU).weightInit( WeightInit.XAVIER) .build()) .layer(1, new ConvolutionLayer.Builder(4, 4) //32 filters kernel size 4 stride 2 .stride(2, 2).nOut(32).dropOut(0.5).activation(Activation.RELU) .weightInit(WeightInit.XAVIER).build()) .layer(2, new DenseLayer.Builder() //fully connected with 256 rectified units .nOut(256).activation(Activation.RELU).weightInit(WeightInit.XAVIER) .dropOut(0.5).build()) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.SQUARED_LOSS) //output layer .nOut(10).weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)); DataSetIterator iter = new MnistDataSetIterator(10, 10); MultiLayerConfiguration conf = builder.build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); DataSet ds = iter.next(); for( int i=0; i<5; i++ ) { network.fit(ds); } }
Example #9
Source File: MnistDataProvider.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object trainData(Map<String, Object> dataParameters) { try { return new MultipleEpochsIterator(numEpochs, new MnistDataSetIterator(batchSize, true, rngSeed)); } catch (IOException e) { throw new RuntimeException(e); } }
Example #10
Source File: ConvolutionLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCNNMLNPretrain() throws Exception { // Note CNN does not do pretrain int numSamples = 10; int batchSize = 10; DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true); MultiLayerNetwork model = getCNNMLNConfig(false, true); model.fit(mnistIter); mnistIter.reset(); MultiLayerNetwork model2 = getCNNMLNConfig(false, true); model2.fit(mnistIter); mnistIter.reset(); DataSet test = mnistIter.next(); Evaluation eval = new Evaluation(); INDArray output = model.output(test.getFeatures()); eval.eval(test.getLabels(), output); double f1Score = eval.f1(); Evaluation eval2 = new Evaluation(); INDArray output2 = model2.output(test.getFeatures()); eval2.eval(test.getLabels(), output2); double f1Score2 = eval2.f1(); assertEquals(f1Score, f1Score2, 1e-4); }
Example #11
Source File: ConvolutionLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCNNMLNBackprop() throws Exception { int numSamples = 10; int batchSize = 10; DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true); MultiLayerNetwork model = getCNNMLNConfig(true, false); model.fit(mnistIter); MultiLayerNetwork model2 = getCNNMLNConfig(true, false); model2.fit(mnistIter); mnistIter.reset(); DataSet test = mnistIter.next(); Evaluation eval = new Evaluation(); INDArray output = model.output(test.getFeatures()); eval.eval(test.getLabels(), output); double f1Score = eval.f1(); Evaluation eval2 = new Evaluation(); INDArray output2 = model2.output(test.getFeatures()); eval2.eval(test.getLabels(), output2); double f1Score2 = eval2.f1(); assertEquals(f1Score, f1Score2, 1e-4); }
Example #12
Source File: Upsampling1DTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
public INDArray getData() throws Exception { DataSetIterator data = new MnistDataSetIterator(5, 5); DataSet mnist = data.next(); nExamples = mnist.numExamples(); INDArray features = mnist.getFeatures().reshape(nExamples, nChannelsIn, inputLength, inputLength); return features.slice(0, 3); }
Example #13
Source File: MnistDataSetIteratorFactory.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * @return */ @Override public DataSetIterator create() { try { return new MnistDataSetIterator(1000,1000); } catch (IOException e) { throw new RuntimeException(e); } }
Example #14
Source File: ConvolutionLayerSetupTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCNNDBNMultiLayer() throws Exception { DataSetIterator iter = new MnistDataSetIterator(2, 2); DataSet next = iter.next(); // Run with separate activation layer MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).seed(123) .weightInit(WeightInit.XAVIER).list() .layer(0, new ConvolutionLayer.Builder(new int[] {1, 1}, new int[] {1, 1}).nIn(1).nOut(6) .activation(Activation.IDENTITY).build()) .layer(1, new BatchNormalization.Builder().build()) .layer(2, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(3, new DenseLayer.Builder().nIn(28 * 28 * 6).nOut(10).activation(Activation.IDENTITY) .build()) .layer(4, new BatchNormalization.Builder().nOut(10).build()) .layer(5, new ActivationLayer.Builder().activation(Activation.RELU).build()) .layer(6, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nOut(10).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); MultiLayerNetwork network = new MultiLayerNetwork(conf); network.init(); network.setInput(next.getFeatures()); INDArray activationsActual = network.output(next.getFeatures()); assertEquals(10, activationsActual.shape()[1], 1e-2); network.fit(next); INDArray actualGammaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.GAMMA); INDArray actualBetaParam = network.getLayer(1).getParam(BatchNormalizationParamInitializer.BETA); assertTrue(actualGammaParam != null); assertTrue(actualBetaParam != null); }
Example #15
Source File: TestDL4JLocalExecution.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object testData() { try { return new EarlyTerminationDataSetIterator(new MnistDataSetIterator(minibatch, true, 12345), 3); } catch (Exception e){ throw new RuntimeException(e); } }
Example #16
Source File: TestJson.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object testData() { try { return new MnistDataSetIterator(minibatch, true, 12345); } catch (Exception e){ throw new RuntimeException(e); } }
Example #17
Source File: ConvolutionLayerTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
public INDArray getMnistData() throws Exception { int inputWidth = 28; int inputHeight = 28; int nChannelsIn = 1; int nExamples = 5; DataSetIterator data = new MnistDataSetIterator(nExamples, nExamples); DataSet mnist = data.next(); nExamples = mnist.numExamples(); return mnist.getFeatures().reshape(nExamples, nChannelsIn, inputHeight, inputWidth); }
Example #18
Source File: TestGraphLocalExecutionGenetic.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public Object trainData(Map<String, Object> dataParameters) { try { DataSetIterator underlying = new MnistDataSetIterator(batchSize, Math.min(60000, 10 * batchSize), false, true, true, 12345); return new MultiDataSetIteratorAdapter(new MultipleEpochsIterator(numEpochs, underlying)); } catch (IOException e) { throw new RuntimeException(e); } }
Example #19
Source File: TestComputationGraphNetwork.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testZeroParamNet() throws Exception { ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder() .graphBuilder() .addInputs("in") .layer("0", new SubsamplingLayer.Builder().kernelSize(2,2).stride(2,2).build(), "in") .layer("1", new LossLayer.Builder().activation(Activation.SIGMOID).lossFunction(LossFunctions.LossFunction.MSE).build(), "0") .setOutputs("1") .setInputTypes(InputType.convolutionalFlat(28,28,1)) .build(); ComputationGraph net = new ComputationGraph(conf); net.init(); DataSet ds = new MnistDataSetIterator(16, true, 12345).next(); INDArray out = net.outputSingle(ds.getFeatures()); INDArray labelTemp = Nd4j.create(out.shape()); ds.setLabels(labelTemp); net.fit(ds); ComputationGraph net2 = TestUtils.testModelSerialization(net); INDArray out2 = net2.outputSingle(ds.getFeatures()); assertEquals(out, out2); }
Example #20
Source File: BarnesHutTsneTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testPerplexity() throws Exception { DataTypeUtil.setDTypeForContext(DataType.DOUBLE); Nd4j.getRandom().setSeed(123); BarnesHutTsne b = new BarnesHutTsne.Builder().stopLyingIteration(10).setMaxIter(10).theta(0.5).learningRate(500) .useAdaGrad(false).build(); DataSetIterator iter = new MnistDataSetIterator(100, true, 12345); INDArray data = iter.next().getFeatures(); INDArray perplexityOutput = b.computeGaussianPerplexity(data, 30.0); // System.out.println(perplexityOutput); }
Example #21
Source File: ParallelInferenceTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Before public void setUp() throws Exception { if (model == null) { File file = Resources.asFile("models/LenetMnistMLN.zip"); model = ModelSerializer.restoreMultiLayerNetwork(file, true); iterator = new MnistDataSetIterator(1, false, 12345); } }
Example #22
Source File: EarlyTerminationDataSetIteratorTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testNextNum() throws IOException { int terminateAfter = 1; DataSetIterator iter = new MnistDataSetIterator(minibatchSize, numExamples); EarlyTerminationDataSetIterator earlyEndIter = new EarlyTerminationDataSetIterator(iter, terminateAfter); earlyEndIter.next(10); assertEquals(false, earlyEndIter.hasNext()); earlyEndIter.reset(); assertEquals(true, earlyEndIter.hasNext()); }
Example #23
Source File: EarlyTerminationDataSetIteratorTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCallstoNextNotAllowed() throws IOException { int terminateAfter = 1; DataSetIterator iter = new MnistDataSetIterator(minibatchSize, numExamples); EarlyTerminationDataSetIterator earlyEndIter = new EarlyTerminationDataSetIterator(iter, terminateAfter); earlyEndIter.next(10); iter.reset(); exception.expect(RuntimeException.class); earlyEndIter.next(10); }
Example #24
Source File: SamplingTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testSample() throws Exception { DataSetIterator iter = new MnistDataSetIterator(10, 10); //batch size and total DataSetIterator sampling = new SamplingDataSetIterator(iter.next(), 10, 10); assertEquals(10, sampling.next().numExamples()); }
Example #25
Source File: EarlyTerminationMultiDataSetIteratorTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testNextNum() throws IOException { int terminateAfter = 1; MultiDataSetIterator iter = new MultiDataSetIteratorAdapter(new MnistDataSetIterator(minibatchSize, numExamples)); EarlyTerminationMultiDataSetIterator earlyEndIter = new EarlyTerminationMultiDataSetIterator(iter, terminateAfter); earlyEndIter.next(10); assertEquals(false, earlyEndIter.hasNext()); earlyEndIter.reset(); assertEquals(true, earlyEndIter.hasNext()); }
Example #26
Source File: TestGraphLocalExecutionGenetic.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public ScoreCalculator get() { try { return new DataSetLossCalculatorCG(new MnistDataSetIterator(128, 1280), true); } catch (Exception e){ throw new RuntimeException(e); } }
Example #27
Source File: MnistDataSetIteratorProviderFactory.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Create an {@link DataSetIterator} * * @return */ @Override public DataSetIterator create() { try { return new MnistDataSetIterator(100, 1000); } catch (IOException e) { throw new RuntimeException(e); } }
Example #28
Source File: ManualTests.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testCNNActivationsFrozen() throws Exception { int nChannels = 1; int outputNum = 10; int batchSize = 64; int nEpochs = 10; int seed = 123; log.info("Load data...."); DataSetIterator mnistTrain = new MnistDataSetIterator(batchSize, true, 12345); log.info("Build model...."); MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed) .l2(0.0005) .weightInit(WeightInit.XAVIER) .updater(new Nesterovs(0.01, 0.9)).list() .layer(0, new FrozenLayer(new ConvolutionLayer.Builder(5, 5) //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied .nIn(nChannels).stride(1, 1).nOut(20).activation(Activation.IDENTITY).build())) .layer(1, new FrozenLayer(new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2) .stride(2, 2).build())) .layer(2, new FrozenLayer(new DenseLayer.Builder().activation(Activation.RELU).nOut(500).build())) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .nOut(outputNum).activation(Activation.SOFTMAX).build()) .setInputType(InputType.convolutionalFlat(28, 28, nChannels)); MultiLayerConfiguration conf = builder.build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); log.info("Train model...."); model.setListeners(new ConvolutionalIterationListener(1)); for (int i = 0; i < nEpochs; i++) { model.fit(mnistTrain); } }
Example #29
Source File: RPTreeTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testRpTreeMaxNodes() throws Exception { DataSetIterator mnist = new MnistDataSetIterator(150,150); RPForest rpTree = new RPForest(4,4,"euclidean"); DataSet d = mnist.next(); NormalizerStandardize normalizerStandardize = new NormalizerStandardize(); normalizerStandardize.fit(d); rpTree.fit(d.getFeatures()); for(RPTree tree : rpTree.getTrees()) { for(RPNode node : tree.getLeaves()) { assertTrue(node.getIndices().size() <= rpTree.getMaxSize()); } } }
Example #30
Source File: RPTreeTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testFindSelf() throws Exception { DataSetIterator mnist = new MnistDataSetIterator(100, 6000); NormalizerMinMaxScaler minMaxNormalizer = new NormalizerMinMaxScaler(0, 1); minMaxNormalizer.fit(mnist); DataSet d = mnist.next(); minMaxNormalizer.transform(d.getFeatures()); RPForest rpForest = new RPForest(100, 100, "euclidean"); rpForest.fit(d.getFeatures()); for (int i = 0; i < 10; i++) { INDArray indexes = rpForest.queryAll(d.getFeatures().slice(i), 10); assertEquals(i,indexes.getInt(0)); } }