Java Code Examples for org.nd4j.linalg.dataset.api.MultiDataSet#getFeatures()

The following examples show how to use org.nd4j.linalg.dataset.api.MultiDataSet#getFeatures() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DefaultCallback.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void call(MultiDataSet multiDataSet) {
    if (multiDataSet != null) {
        if (multiDataSet.getFeatures() != null)
            for (int i = 0; i < multiDataSet.getFeatures().length; i++)
                Nd4j.getAffinityManager().ensureLocation(multiDataSet.getFeatures()[i],
                                AffinityManager.Location.DEVICE);

        if (multiDataSet.getLabels() != null)
            for (int i = 0; i < multiDataSet.getLabels().length; i++)
                Nd4j.getAffinityManager().ensureLocation(multiDataSet.getLabels()[i],
                                AffinityManager.Location.DEVICE);

        if (multiDataSet.getFeaturesMaskArrays() != null)
            for (int i = 0; i < multiDataSet.getFeaturesMaskArrays().length; i++)
                Nd4j.getAffinityManager().ensureLocation(multiDataSet.getFeaturesMaskArrays()[i],
                                AffinityManager.Location.DEVICE);

        if (multiDataSet.getLabelsMaskArrays() != null)
            for (int i = 0; i < multiDataSet.getLabelsMaskArrays().length; i++)
                Nd4j.getAffinityManager().ensureLocation(multiDataSet.getLabelsMaskArrays()[i],
                                AffinityManager.Location.DEVICE);
    }
}
 
Example 2
Source File: MultiLayerNetwork.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public void fit(MultiDataSet dataSet) {
    if (dataSet.getFeatures().length == 1 && dataSet.getLabels().length == 1) {
        INDArray features = dataSet.getFeatures(0);
        INDArray labels = dataSet.getLabels(0);
        INDArray fMask = null;
        INDArray lMask = null;

        if (dataSet.getFeaturesMaskArrays() != null)
            fMask = dataSet.getFeaturesMaskArrays()[0];

        if (dataSet.getFeaturesMaskArrays() != null)
            lMask = dataSet.getLabelsMaskArrays()[0];

        DataSet ds = new DataSet(features, labels, fMask, lMask);
        fit(ds);
    } else {
        throw new DL4JInvalidInputException(
                "MultiLayerNetwork can't handle MultiDataSet with more than 1 features or labels array." +
                        "Please consider use of ComputationGraph");
    }
}
 
Example 3
Source File: BertIterator.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * For use during inference. Will convert a given list of sentences to features and feature masks as appropriate.
 *
 * @param listOnlySentences
 * @return Pair of INDArrays[], first element is feature arrays and the second is the masks array
 */
public Pair<INDArray[], INDArray[]> featurizeSentences(List<String> listOnlySentences) {

    List<Pair<String, String>> sentencesWithNullLabel = addDummyLabel(listOnlySentences);
    SentenceListProcessed sentenceListProcessed = tokenizeMiniBatch(sentencesWithNullLabel);
    List<Pair<List<String>, String>> tokensAndLabelList = sentenceListProcessed.getTokensAndLabelList();
    int outLength = sentenceListProcessed.getMaxL();

    if (preProcessor != null) {
        Pair<INDArray[], INDArray[]> featureFeatureMasks = convertMiniBatchFeatures(tokensAndLabelList, outLength, null);
        MultiDataSet dummyMDS = new org.nd4j.linalg.dataset.MultiDataSet(featureFeatureMasks.getFirst(), null, featureFeatureMasks.getSecond(), null);
        preProcessor.preProcess(dummyMDS);
        return new Pair<>(dummyMDS.getFeatures(), dummyMDS.getFeaturesMaskArrays());
    }
    return convertMiniBatchFeatures(tokensAndLabelList, outLength, null);
}
 
Example 4
Source File: BertIterator.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
/**
 * For use during inference. Will convert a given pair of a list of sentences to features and feature masks as appropriate.
 *
 * @param listOnlySentencePairs
 * @return Pair of INDArrays[], first element is feature arrays and the second is the masks array
 */
public Pair<INDArray[], INDArray[]> featurizeSentencePairs(List<Pair<String, String>> listOnlySentencePairs) {
    Preconditions.checkState(sentencePairProvider != null, "The featurizeSentencePairs method is meant for inference with sentence pairs. Use only when the sentence pair provider is set (i.e not null).");

    List<Triple<String, String, String>> sentencePairsWithNullLabel = addDummyLabelForPairs(listOnlySentencePairs);
    SentencePairListProcessed sentencePairListProcessed = tokenizePairsMiniBatch(sentencePairsWithNullLabel);
    List<Pair<List<String>, String>> tokensAndLabelList = sentencePairListProcessed.getTokensAndLabelList();
    int outLength = sentencePairListProcessed.getMaxL();
    long[] segIdOnesFrom = sentencePairListProcessed.getSegIdOnesFrom();
    if (preProcessor != null) {
        Pair<INDArray[], INDArray[]> featuresAndMaskArraysPair = convertMiniBatchFeatures(tokensAndLabelList, outLength, segIdOnesFrom);
        MultiDataSet dummyMDS = new org.nd4j.linalg.dataset.MultiDataSet(featuresAndMaskArraysPair.getFirst(), null, featuresAndMaskArraysPair.getSecond(), null);
        preProcessor.preProcess(dummyMDS);
        return new Pair<>(dummyMDS.getFeatures(), dummyMDS.getFeaturesMaskArrays());
    }
    return convertMiniBatchFeatures(tokensAndLabelList, outLength, segIdOnesFrom);
}
 
Example 5
Source File: TestBertIterator.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
public void testSentencePairFeaturizer() throws IOException {
    int minibatchSize = 2;
    TestSentencePairsHelper testPairHelper = new TestSentencePairsHelper(minibatchSize);
    BertIterator b = BertIterator.builder()
            .tokenizer(testPairHelper.getTokenizer())
            .minibatchSize(minibatchSize)
            .padMinibatches(true)
            .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
            .vocabMap(testPairHelper.getTokenizer().getVocab())
            .task(BertIterator.Task.SEQ_CLASSIFICATION)
            .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, 128)
            .sentencePairProvider(testPairHelper.getPairSentenceProvider())
            .prependToken("[CLS]")
            .appendToken("[SEP]")
            .build();
    MultiDataSet mds = b.next();
    INDArray[] featuresArr = mds.getFeatures();
    INDArray[] featuresMaskArr = mds.getFeaturesMaskArrays();

    Pair<INDArray[], INDArray[]> p = b.featurizeSentencePairs(testPairHelper.getSentencePairs());
    assertEquals(p.getFirst().length, 2);
    assertEquals(featuresArr[0], p.getFirst()[0]);
    assertEquals(featuresArr[1], p.getFirst()[1]);
    assertEquals(featuresMaskArr[0], p.getSecond()[0]);
}
 
Example 6
Source File: MultiDataSetWrapperIterator.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Override
public DataSet next() {
    MultiDataSet mds = iterator.next();
    if (mds.getFeatures().length > 1 || mds.getLabels().length > 1)
        throw new UnsupportedOperationException(
                        "This iterator is able to convert MultiDataSet with number of inputs/outputs of 1");

    INDArray features = mds.getFeatures()[0];
    INDArray labels = mds.getLabels() != null ? mds.getLabels()[0] : features;
    INDArray fMask = mds.getFeaturesMaskArrays() != null ? mds.getFeaturesMaskArrays()[0] : null;
    INDArray lMask = mds.getLabelsMaskArrays() != null ? mds.getLabelsMaskArrays()[0] : null;

    DataSet ds = new DataSet(features, labels, fMask, lMask);

    if (preProcessor != null)
        preProcessor.preProcess(ds);

    return ds;
}
 
Example 7
Source File: ImageMultiPreProcessingScaler.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public void preProcess(MultiDataSet multiDataSet) {
    for( int i=0; i<featureIndices.length; i++ ){
        INDArray f = multiDataSet.getFeatures(featureIndices[i]);
        f.divi(this.maxPixelVal); //Scaled to 0->1
        if (this.maxRange - this.minRange != 1)
            f.muli(this.maxRange - this.minRange); //Scaled to minRange -> maxRange
        if (this.minRange != 0)
            f.addi(this.minRange); //Offset by minRange
    }
}
 
Example 8
Source File: ImageMultiPreProcessingScaler.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public void preProcess(MultiDataSet multiDataSet) {
    for( int i=0; i<featureIndices.length; i++ ){
        INDArray f = multiDataSet.getFeatures(featureIndices[i]);
        f.divi(this.maxPixelVal); //Scaled to 0->1
        if (this.maxRange - this.minRange != 1)
            f.muli(this.maxRange - this.minRange); //Scaled to minRange -> maxRange
        if (this.minRange != 0)
            f.addi(this.minRange); //Offset by minRange
    }
}
 
Example 9
Source File: CountingMultiDataSetIterator.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Override
public MultiDataSet next() {
    MultiDataSet mds = underlying.next();
    if(tbptt){
        INDArray f = mds.getFeatures(0);
        if(f.rank() == 3){
            int numSegments = (int)Math.ceil(f.size(2) / (double)tbpttLength);
            currIter += numSegments;
        }
    } else {
        currIter++;
    }
    return mds;
}
 
Example 10
Source File: ScoreListener.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public void iterationDone(SameDiff sd, At at, MultiDataSet dataSet, Loss loss) {
    iterTimeSumSinceLastReport += System.currentTimeMillis() - lastIterTime;
    epochBatchCount++;
    if (dataSet.numFeatureArrays() > 0 && dataSet.getFeatures(0) != null) {
        int n = (int) dataSet.getFeatures(0).size(0);
        examplesSinceLastReportIter += n;
        epochExampleCount += n;
    }

    if (at.iteration() > 0 && at.iteration() % frequency == 0) {
        double l = loss.totalLoss();
        String etl = "";
        if (etlTimeSumSinceLastReport > 0) {
            etl = "(" + formatDurationMs(etlTimeSumSinceLastReport) + " ETL";
            if (frequency == 1) {
                etl += ")";
            } else {
                etl += " in " + frequency + " iter)";
            }
        }

        if(!reportIterPerformance) {
            log.info("Loss at epoch {}, iteration {}: {}{}", at.epoch(), at.iteration(), format5dp(l), etl);
        } else {
            long time = System.currentTimeMillis();
            if(lastReportTime > 0){
                double batchPerSec = 1000 * frequency / (double)(time - lastReportTime);
                double exPerSec = 1000 * examplesSinceLastReportIter / (double)(time - lastReportTime);
                log.info("Loss at epoch {}, iteration {}: {}{}, batches/sec: {}, examples/sec: {}", at.epoch(), at.iteration(), format5dp(l),
                        etl, format5dp(batchPerSec), format5dp(exPerSec));
            } else {
                log.info("Loss at epoch {}, iteration {}: {}{}", at.epoch(), at.iteration(), format5dp(l), etl);
            }

            lastReportTime = time;
        }

        iterTimeSumSinceLastReport = 0;
        etlTimeSumSinceLastReport = 0;
        examplesSinceLastReportIter = 0;
    }
}
 
Example 11
Source File: TestBertIterator.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSentencePairsUnequalLengths() throws IOException {

    int minibatchSize = 4;
    int numOfSentencesinIter = 3;

    TestSentencePairsHelper testPairHelper = new TestSentencePairsHelper(numOfSentencesinIter);
    int shortL = testPairHelper.getShortL();
    int longL = testPairHelper.getLongL();
    int sent1L = testPairHelper.getSentenceALen();
    int sent2L = testPairHelper.getSentenceBLen();

    System.out.println("Sentence Pairs, Left");
    System.out.println(testPairHelper.getSentencesLeft());
    System.out.println("Sentence Pairs, Right");
    System.out.println(testPairHelper.getSentencesRight());

    //anything outside this range more will need to check padding,truncation
    for (int maxL = longL + shortL; maxL > 2 * shortL + 1; maxL--) {

        System.out.println("Running for max length = " + maxL);

        MultiDataSet leftMDS = BertIterator.builder()
                .tokenizer(testPairHelper.getTokenizer())
                .minibatchSize(minibatchSize)
                .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
                .vocabMap(testPairHelper.getTokenizer().getVocab())
                .task(BertIterator.Task.SEQ_CLASSIFICATION)
                .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, longL * 10) //random big num guaranteed to be longer than either
                .sentenceProvider(new TestSentenceHelper(numOfSentencesinIter).getSentenceProvider())
                .padMinibatches(true)
                .build().next();

        MultiDataSet rightMDS = BertIterator.builder()
                .tokenizer(testPairHelper.getTokenizer())
                .minibatchSize(minibatchSize)
                .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
                .vocabMap(testPairHelper.getTokenizer().getVocab())
                .task(BertIterator.Task.SEQ_CLASSIFICATION)
                .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, longL * 10) //random big num guaranteed to be longer than either
                .sentenceProvider(new TestSentenceHelper(true, numOfSentencesinIter).getSentenceProvider())
                .padMinibatches(true)
                .build().next();

        MultiDataSet pairMDS = BertIterator.builder()
                .tokenizer(testPairHelper.getTokenizer())
                .minibatchSize(minibatchSize)
                .featureArrays(BertIterator.FeatureArrays.INDICES_MASK_SEGMENTID)
                .vocabMap(testPairHelper.getTokenizer().getVocab())
                .task(BertIterator.Task.SEQ_CLASSIFICATION)
                .lengthHandling(BertIterator.LengthHandling.FIXED_LENGTH, maxL)
                .sentencePairProvider(testPairHelper.getPairSentenceProvider())
                .padMinibatches(true)
                .build().next();

        //CHECK FEATURES
        INDArray combinedFeat = Nd4j.create(DataType.INT, minibatchSize, maxL);
        //left side
        INDArray leftFeatures = leftMDS.getFeatures(0);
        INDArray topLSentFeat = leftFeatures.getRow(0).get(NDArrayIndex.interval(0, shortL));
        INDArray midLSentFeat = leftFeatures.getRow(1).get(NDArrayIndex.interval(0, maxL - shortL));
        INDArray bottomLSentFeat = leftFeatures.getRow(2).get(NDArrayIndex.interval(0, sent1L));
        //right side
        INDArray rightFeatures = rightMDS.getFeatures(0);
        INDArray topRSentFeat = rightFeatures.getRow(0).get(NDArrayIndex.interval(0, maxL - shortL));
        INDArray midRSentFeat = rightFeatures.getRow(1).get(NDArrayIndex.interval(0, shortL));
        INDArray bottomRSentFeat = rightFeatures.getRow(2).get(NDArrayIndex.interval(0, sent2L));
        //expected pair
        combinedFeat.getRow(0).addi(Nd4j.hstack(topLSentFeat, topRSentFeat));
        combinedFeat.getRow(1).addi(Nd4j.hstack(midLSentFeat, midRSentFeat));
        combinedFeat.getRow(2).get(NDArrayIndex.interval(0, sent1L + sent2L)).addi(Nd4j.hstack(bottomLSentFeat, bottomRSentFeat));

        assertEquals(maxL, pairMDS.getFeatures(0).shape()[1]);
        assertArrayEquals(combinedFeat.shape(), pairMDS.getFeatures(0).shape());
        assertEquals(combinedFeat, pairMDS.getFeatures(0));

        //CHECK SEGMENT ID
        INDArray combinedFetSeg = Nd4j.create(DataType.INT, minibatchSize, maxL);
        combinedFetSeg.get(NDArrayIndex.point(0), NDArrayIndex.interval(shortL, maxL)).addi(1);
        combinedFetSeg.get(NDArrayIndex.point(1), NDArrayIndex.interval(maxL - shortL, maxL)).addi(1);
        combinedFetSeg.get(NDArrayIndex.point(2), NDArrayIndex.interval(sent1L, sent1L + sent2L)).addi(1);
        assertArrayEquals(combinedFetSeg.shape(), pairMDS.getFeatures(1).shape());
        assertEquals(maxL, combinedFetSeg.shape()[1]);
        assertEquals(combinedFetSeg, pairMDS.getFeatures(1));

        testPairHelper.getPairSentenceProvider().reset();
    }
}
 
Example 12
Source File: RecordReaderMultiDataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSplittingCSV() throws Exception {
    //Here's the idea: take Iris, and split it up into 2 inputs and 2 output arrays
    //Inputs: columns 0 and 1-2
    //Outputs: columns 3, and 4->OneHot
    //need to manually extract
    RecordReader rr = new CSVRecordReader(0, ',');
    rr.initialize(new FileSplit(Resources.asFile("iris.txt")));
    RecordReaderDataSetIterator rrdsi = new RecordReaderDataSetIterator(rr, 10, 4, 3);

    RecordReader rr2 = new CSVRecordReader(0, ',');
    rr2.initialize(new FileSplit(Resources.asFile("iris.txt")));

    MultiDataSetIterator rrmdsi = new RecordReaderMultiDataSetIterator.Builder(10).addReader("reader", rr2)
                    .addInput("reader", 0, 0).addInput("reader", 1, 2).addOutput("reader", 3, 3)
                    .addOutputOneHot("reader", 4, 3).build();

    while (rrdsi.hasNext()) {
        DataSet ds = rrdsi.next();
        INDArray fds = ds.getFeatures();
        INDArray lds = ds.getLabels();

        MultiDataSet mds = rrmdsi.next();
        assertEquals(2, mds.getFeatures().length);
        assertEquals(2, mds.getLabels().length);
        assertNull(mds.getFeaturesMaskArrays());
        assertNull(mds.getLabelsMaskArrays());
        INDArray[] fmds = mds.getFeatures();
        INDArray[] lmds = mds.getLabels();

        assertNotNull(fmds);
        assertNotNull(lmds);
        for (int i = 0; i < fmds.length; i++)
            assertNotNull(fmds[i]);
        for (int i = 0; i < lmds.length; i++)
            assertNotNull(lmds[i]);

        //Get the subsets of the original iris data
        INDArray expIn1 = fds.get(all(), interval(0,0,true));
        INDArray expIn2 = fds.get(all(), interval(1, 2, true));
        INDArray expOut1 = fds.get(all(), interval(3,3,true));
        INDArray expOut2 = lds;

        assertEquals(expIn1, fmds[0]);
        assertEquals(expIn2, fmds[1]);
        assertEquals(expOut1, lmds[0]);
        assertEquals(expOut2, lmds[1]);
    }
    assertFalse(rrmdsi.hasNext());
}
 
Example 13
Source File: RecordReaderMultiDataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testSplittingCSVSequence() throws Exception {
    //Idea: take CSV sequences, and split "csvsequence_i.txt" into two separate inputs; keep "csvSequencelables_i.txt"
    // as standard one-hot output
    //need to manually extract
    File rootDir = temporaryFolder.newFolder();
    for (int i = 0; i < 3; i++) {
        new ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive(rootDir);
        new ClassPathResource(String.format("csvsequencelabels_%d.txt", i)).getTempFileFromArchive(rootDir);
        new ClassPathResource(String.format("csvsequencelabelsShort_%d.txt", i)).getTempFileFromArchive(rootDir);
    }

    String featuresPath = FilenameUtils.concat(rootDir.getAbsolutePath(), "csvsequence_%d.txt");
    String labelsPath = FilenameUtils.concat(rootDir.getAbsolutePath(), "csvsequencelabels_%d.txt");

    SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader = new CSVSequenceRecordReader(1, ",");
    featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));

    SequenceRecordReaderDataSetIterator iter =
                    new SequenceRecordReaderDataSetIterator(featureReader, labelReader, 1, 4, false);

    SequenceRecordReader featureReader2 = new CSVSequenceRecordReader(1, ",");
    SequenceRecordReader labelReader2 = new CSVSequenceRecordReader(1, ",");
    featureReader2.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
    labelReader2.initialize(new NumberedFileInputSplit(labelsPath, 0, 2));

    MultiDataSetIterator srrmdsi = new RecordReaderMultiDataSetIterator.Builder(1)
                    .addSequenceReader("seq1", featureReader2).addSequenceReader("seq2", labelReader2)
                    .addInput("seq1", 0, 1).addInput("seq1", 2, 2).addOutputOneHot("seq2", 0, 4).build();

    while (iter.hasNext()) {
        DataSet ds = iter.next();
        INDArray fds = ds.getFeatures();
        INDArray lds = ds.getLabels();

        MultiDataSet mds = srrmdsi.next();
        assertEquals(2, mds.getFeatures().length);
        assertEquals(1, mds.getLabels().length);
        assertNull(mds.getFeaturesMaskArrays());
        assertNull(mds.getLabelsMaskArrays());
        INDArray[] fmds = mds.getFeatures();
        INDArray[] lmds = mds.getLabels();

        assertNotNull(fmds);
        assertNotNull(lmds);
        for (int i = 0; i < fmds.length; i++)
            assertNotNull(fmds[i]);
        for (int i = 0; i < lmds.length; i++)
            assertNotNull(lmds[i]);

        INDArray expIn1 = fds.get(all(), NDArrayIndex.interval(0, 1, true), all());
        INDArray expIn2 = fds.get(all(), NDArrayIndex.interval(2, 2, true), all());

        assertEquals(expIn1, fmds[0]);
        assertEquals(expIn2, fmds[1]);
        assertEquals(lds, lmds[0]);
    }
    assertFalse(srrmdsi.hasNext());
}
 
Example 14
Source File: RecordReaderMultiDataSetIteratorTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testTimeSeriesRandomOffset() {
    //2 in, 2 out, 3 total sequences of length [1,3,5]

    List<List<Writable>> seq1 =
                    Arrays.asList(Arrays.<Writable>asList(new DoubleWritable(1.0), new DoubleWritable(2.0)));
    List<List<Writable>> seq2 =
                    Arrays.asList(Arrays.<Writable>asList(new DoubleWritable(10.0), new DoubleWritable(11.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(20.0), new DoubleWritable(21.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(30.0), new DoubleWritable(31.0)));
    List<List<Writable>> seq3 =
                    Arrays.asList(Arrays.<Writable>asList(new DoubleWritable(100.0), new DoubleWritable(101.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(200.0), new DoubleWritable(201.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(300.0), new DoubleWritable(301.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(400.0), new DoubleWritable(401.0)),
                                    Arrays.<Writable>asList(new DoubleWritable(500.0), new DoubleWritable(501.0)));

    Collection<List<List<Writable>>> seqs = Arrays.asList(seq1, seq2, seq3);

    SequenceRecordReader rr = new CollectionSequenceRecordReader(seqs);

    RecordReaderMultiDataSetIterator rrmdsi =
                    new RecordReaderMultiDataSetIterator.Builder(3).addSequenceReader("rr", rr).addInput("rr", 0, 0)
                                    .addOutput("rr", 1, 1).timeSeriesRandomOffset(true, 1234L).build();


    Random r = new Random(1234); //Provides seed for each minibatch
    long seed = r.nextLong();
    Random r2 = new Random(seed); //Use same RNG seed in new RNG for each minibatch
    int expOffsetSeq1 = r2.nextInt(5 - 1 + 1); //0 to 4 inclusive
    int expOffsetSeq2 = r2.nextInt(5 - 3 + 1);
    int expOffsetSeq3 = 0; //Longest TS, always 0
    //With current seed: 3, 1, 0
    //        System.out.println(expOffsetSeq1 + "\t" + expOffsetSeq2 + "\t" + expOffsetSeq3);

    MultiDataSet mds = rrmdsi.next();

    INDArray expMask = Nd4j.create(new double[][] {{0, 0, 0, 1, 0}, {0, 1, 1, 1, 0}, {1, 1, 1, 1, 1}});

    assertEquals(expMask, mds.getFeaturesMaskArray(0));
    assertEquals(expMask, mds.getLabelsMaskArray(0));

    INDArray f = mds.getFeatures(0);
    INDArray l = mds.getLabels(0);

    INDArray expF1 = Nd4j.create(new double[] {1.0}, new int[]{1,1});
    INDArray expL1 = Nd4j.create(new double[] {2.0}, new int[]{1,1});

    INDArray expF2 = Nd4j.create(new double[] {10, 20, 30}, new int[]{1,3});
    INDArray expL2 = Nd4j.create(new double[] {11, 21, 31}, new int[]{1,3});

    INDArray expF3 = Nd4j.create(new double[] {100, 200, 300, 400, 500}, new int[]{1,5});
    INDArray expL3 = Nd4j.create(new double[] {101, 201, 301, 401, 501}, new int[]{1,5});

    assertEquals(expF1, f.get(point(0), all(),
                    NDArrayIndex.interval(expOffsetSeq1, expOffsetSeq1 + 1)));
    assertEquals(expL1, l.get(point(0), all(),
                    NDArrayIndex.interval(expOffsetSeq1, expOffsetSeq1 + 1)));

    assertEquals(expF2, f.get(point(1), all(),
                    NDArrayIndex.interval(expOffsetSeq2, expOffsetSeq2 + 3)));
    assertEquals(expL2, l.get(point(1), all(),
                    NDArrayIndex.interval(expOffsetSeq2, expOffsetSeq2 + 3)));

    assertEquals(expF3, f.get(point(2), all(),
                    NDArrayIndex.interval(expOffsetSeq3, expOffsetSeq3 + 5)));
    assertEquals(expL3, l.get(point(2), all(),
                    NDArrayIndex.interval(expOffsetSeq3, expOffsetSeq3 + 5)));
}