Java Code Examples for org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator#next()
The following examples show how to use
org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator#next() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestRecordReaders.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testClassIndexOutsideOfRangeRRDSI() { Collection<Collection<Writable>> c = new ArrayList<>(); c.add(Arrays.<Writable>asList(new DoubleWritable(0.5), new IntWritable(0))); c.add(Arrays.<Writable>asList(new DoubleWritable(1.0), new IntWritable(2))); CollectionRecordReader crr = new CollectionRecordReader(c); RecordReaderDataSetIterator iter = new RecordReaderDataSetIterator(crr, 2, 1, 2); try { DataSet ds = iter.next(); fail("Expected exception"); } catch (Exception e) { assertTrue(e.getMessage(), e.getMessage().contains("to one-hot")); } }
Example 2
Source File: DataSetIteratorTest.java From deeplearning4j with Apache License 2.0 | 6 votes |
@Test public void testMnist() throws Exception { ClassPathResource cpr = new ClassPathResource("mnist_first_200.txt"); CSVRecordReader rr = new CSVRecordReader(0, ','); rr.initialize(new FileSplit(cpr.getTempFileFromArchive())); RecordReaderDataSetIterator dsi = new RecordReaderDataSetIterator(rr, 10, 0, 10); MnistDataSetIterator iter = new MnistDataSetIterator(10, 200, false, true, false, 0); while (dsi.hasNext()) { DataSet dsExp = dsi.next(); DataSet dsAct = iter.next(); INDArray fExp = dsExp.getFeatures(); fExp.divi(255); INDArray lExp = dsExp.getLabels(); INDArray fAct = dsAct.getFeatures(); INDArray lAct = dsAct.getLabels(); assertEquals(fExp, fAct.castTo(fExp.dataType())); assertEquals(lExp, lAct.castTo(lExp.dataType())); } assertFalse(iter.hasNext()); }
Example 3
Source File: StringToDataSetExportFunction.java From deeplearning4j with Apache License 2.0 | 6 votes |
private void processBatchIfRequired(List<List<Writable>> list, boolean finalRecord) throws Exception { if (list.isEmpty()) return; if (list.size() < batchSize && !finalRecord) return; RecordReader rr = new CollectionRecordReader(list); RecordReaderDataSetIterator iter = new RecordReaderDataSetIterator(rr, null, batchSize, labelIndex, labelIndex, numPossibleLabels, -1, regression); DataSet ds = iter.next(); String filename = "dataset_" + uid + "_" + (outputCount++) + ".bin"; URI uri = new URI(outputDir.getPath() + "/" + filename); Configuration c = conf == null ? DefaultHadoopConfig.get() : conf.getValue().getConfiguration(); FileSystem file = FileSystem.get(uri, c); try (FSDataOutputStream out = file.create(new Path(uri))) { ds.save(out); } list.clear(); }
Example 4
Source File: RecordReaderFileBatchLoader.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public DataSet load(Source source) throws IOException { FileBatch fb = FileBatch.readFromZip(source.getInputStream()); //Wrap file batch in RecordReader //Create RecordReaderDataSetIterator //Return dataset RecordReader rr = new FileBatchRecordReader(recordReader, fb); RecordReaderDataSetIterator iter = new RecordReaderDataSetIterator(rr, null, batchSize, labelIndexFrom, labelIndexTo, numPossibleLabels, -1, regression); if (preProcessor != null) { iter.setPreProcessor(preProcessor); } DataSet ds = iter.next(); return ds; }