Java Code Examples for org.nd4j.serde.base64.Nd4jBase64#fromBase64()

The following examples show how to use org.nd4j.serde.base64.Nd4jBase64#fromBase64() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CSVSparkTransformTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testTransformer() throws Exception {
        List<Writable> input = new ArrayList<>();
        input.add(new DoubleWritable(1.0));
        input.add(new DoubleWritable(2.0));

        Schema schema = new Schema.Builder().addColumnDouble("1.0").addColumnDouble("2.0").build();
        List<Writable> output = new ArrayList<>();
        output.add(new Text("1.0"));
        output.add(new Text("2.0"));

        TransformProcess transformProcess =
                new TransformProcess.Builder(schema).convertToString("1.0").convertToString("2.0").build();
        CSVSparkTransform csvSparkTransform = new CSVSparkTransform(transformProcess);
        String[] values = new String[] {"1.0", "2.0"};
        SingleCSVRecord record = csvSparkTransform.transform(new SingleCSVRecord(values));
        Base64NDArrayBody body = csvSparkTransform.toArray(new SingleCSVRecord(values));
        INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
        assertTrue(fromBase64.isVector());
//        System.out.println("Base 64ed array " + fromBase64);
    }
 
Example 2
Source File: ImageSparkTransformTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testBatchImageSparkTransform() throws Exception {
        int seed = 12345;

        File f0 = new ClassPathResource("datavec-spark-inference/testimages/class1/A.jpg").getFile();
        File f1 = new ClassPathResource("datavec-spark-inference/testimages/class1/B.png").getFile();
        File f2 = new ClassPathResource("datavec-spark-inference/testimages/class1/C.jpg").getFile();

        BatchImageRecord batch = new BatchImageRecord();
        batch.add(f0.toURI());
        batch.add(f1.toURI());
        batch.add(f2.toURI());

        ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
                        .scaleImageTransform(10).cropImageTransform(5).build();

        ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
        Base64NDArrayBody body = imgSparkTransform.toArray(batch);

        INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
//        System.out.println("Base 64ed array " + fromBase64);
        assertEquals(3, fromBase64.size(0));
    }
 
Example 3
Source File: ImageSparkTransformTest.java    From DataVec with Apache License 2.0 6 votes vote down vote up
@Test
public void testSingleImageSparkTransform() throws Exception {
    int seed = 12345;

    File f1 = new ClassPathResource("/testimages/class1/A.jpg").getFile();

    SingleImageRecord imgRecord = new SingleImageRecord(f1.toURI());

    ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
                    .scaleImageTransform(10).cropImageTransform(5).build();

    ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
    Base64NDArrayBody body = imgSparkTransform.toArray(imgRecord);

    INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
    System.out.println("Base 64ed array " + fromBase64);
    assertEquals(1, fromBase64.size(0));
}
 
Example 4
Source File: ImageSparkTransformTest.java    From DataVec with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchImageSparkTransform() throws Exception {
    int seed = 12345;

    File f0 = new ClassPathResource("/testimages/class1/A.jpg").getFile();
    File f1 = new ClassPathResource("/testimages/class1/B.png").getFile();
    File f2 = new ClassPathResource("/testimages/class1/C.jpg").getFile();

    BatchImageRecord batch = new BatchImageRecord();
    batch.add(f0.toURI());
    batch.add(f1.toURI());
    batch.add(f2.toURI());

    ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
                    .scaleImageTransform(10).cropImageTransform(5).build();

    ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
    Base64NDArrayBody body = imgSparkTransform.toArray(batch);

    INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
    System.out.println("Base 64ed array " + fromBase64);
    assertEquals(3, fromBase64.size(0));
}
 
Example 5
Source File: CSVSparkTransformTest.java    From DataVec with Apache License 2.0 6 votes vote down vote up
@Test
public void testTransformer() throws Exception {
    List<Writable> input = new ArrayList<>();
    input.add(new DoubleWritable(1.0));
    input.add(new DoubleWritable(2.0));

    Schema schema = new Schema.Builder().addColumnDouble("1.0").addColumnDouble("2.0").build();
    List<Writable> output = new ArrayList<>();
    output.add(new Text("1.0"));
    output.add(new Text("2.0"));

    TransformProcess transformProcess =
            new TransformProcess.Builder(schema).convertToString("1.0").convertToString("2.0").build();
    CSVSparkTransform csvSparkTransform = new CSVSparkTransform(transformProcess);
    String[] values = new String[] {"1.0", "2.0"};
    SingleCSVRecord record = csvSparkTransform.transform(new SingleCSVRecord(values));
    Base64NDArrayBody body = csvSparkTransform.toArray(new SingleCSVRecord(values));
    INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
    assertTrue(fromBase64.isVector());
    System.out.println("Base 64ed array " + fromBase64);
}
 
Example 6
Source File: ImageSparkTransformTest.java    From deeplearning4j with Apache License 2.0 6 votes vote down vote up
@Test
    public void testSingleImageSparkTransform() throws Exception {
        int seed = 12345;

        File f1 = new ClassPathResource("datavec-spark-inference/testimages/class1/A.jpg").getFile();

        SingleImageRecord imgRecord = new SingleImageRecord(f1.toURI());

        ImageTransformProcess imgTransformProcess = new ImageTransformProcess.Builder().seed(seed)
                        .scaleImageTransform(10).cropImageTransform(5).build();

        ImageSparkTransform imgSparkTransform = new ImageSparkTransform(imgTransformProcess);
        Base64NDArrayBody body = imgSparkTransform.toArray(imgRecord);

        INDArray fromBase64 = Nd4jBase64.fromBase64(body.getNdarray());
//        System.out.println("Base 64ed array " + fromBase64);
        assertEquals(1, fromBase64.size(0));
    }
 
Example 7
Source File: DataVecTransformClientTest.java    From DataVec with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchRecord() throws Exception {
    SingleCSVRecord singleCsvRecord = new SingleCSVRecord(new String[] {"0", "0"});

    BatchCSVRecord batchCSVRecord = new BatchCSVRecord(Arrays.asList(singleCsvRecord, singleCsvRecord));
    BatchCSVRecord batchCSVRecord1 = client.transform(batchCSVRecord);
    assertEquals(batchCSVRecord.getRecords().size(), batchCSVRecord1.getRecords().size());

    Base64NDArrayBody body = client.transformArray(batchCSVRecord);
    INDArray arr = Nd4jBase64.fromBase64(body.getNdarray());
    assumeNotNull(arr);
}
 
Example 8
Source File: CSVSparkTransformTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
    public void testSingleBatchSequence() throws Exception {
        List<Writable> input = new ArrayList<>();
        input.add(new DoubleWritable(1.0));
        input.add(new DoubleWritable(2.0));

        Schema schema = new Schema.Builder().addColumnDouble("1.0").addColumnDouble("2.0").build();
        List<Writable> output = new ArrayList<>();
        output.add(new Text("1.0"));
        output.add(new Text("2.0"));

        TransformProcess transformProcess =
                new TransformProcess.Builder(schema).convertToString("1.0").convertToString("2.0").build();
        CSVSparkTransform csvSparkTransform = new CSVSparkTransform(transformProcess);
        String[] values = new String[] {"1.0", "2.0"};
        SingleCSVRecord record = csvSparkTransform.transform(new SingleCSVRecord(values));
        BatchCSVRecord batchCSVRecord = new BatchCSVRecord();
        for (int i = 0; i < 3; i++)
            batchCSVRecord.add(record);
        BatchCSVRecord batchCSVRecord1 = csvSparkTransform.transform(batchCSVRecord);
        SequenceBatchCSVRecord sequenceBatchCSVRecord = new SequenceBatchCSVRecord();
        sequenceBatchCSVRecord.add(Arrays.asList(batchCSVRecord));
        Base64NDArrayBody sequenceArray = csvSparkTransform.transformSequenceArray(sequenceBatchCSVRecord);
        INDArray outputBody = Nd4jBase64.fromBase64(sequenceArray.getNdarray());


         //ensure accumulation
        sequenceBatchCSVRecord.add(Arrays.asList(batchCSVRecord));
        sequenceArray = csvSparkTransform.transformSequenceArray(sequenceBatchCSVRecord);
        assertArrayEquals(new long[]{2,2,3},Nd4jBase64.fromBase64(sequenceArray.getNdarray()).shape());

        SequenceBatchCSVRecord transformed = csvSparkTransform.transformSequence(sequenceBatchCSVRecord);
        assertNotNull(transformed.getRecords());
//        System.out.println(transformed);


    }
 
Example 9
Source File: DataVecTransformClientTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testBatchRecord() throws Exception {
    SingleCSVRecord singleCsvRecord = new SingleCSVRecord(new String[] {"0", "0"});

    BatchCSVRecord batchCSVRecord = new BatchCSVRecord(Arrays.asList(singleCsvRecord, singleCsvRecord));
    BatchCSVRecord batchCSVRecord1 = client.transform(batchCSVRecord);
    assertEquals(batchCSVRecord.getRecords().size(), batchCSVRecord1.getRecords().size());

    Base64NDArrayBody body = client.transformArray(batchCSVRecord);
    INDArray arr = Nd4jBase64.fromBase64(body.getNdarray());
    assumeNotNull(arr);
}
 
Example 10
Source File: DataVecTransformClientTest.java    From deeplearning4j with Apache License 2.0 5 votes vote down vote up
@Test
public void testRecord() throws Exception {
    SingleCSVRecord singleCsvRecord = new SingleCSVRecord(new String[] {"0", "0"});
    SingleCSVRecord transformed = client.transformIncremental(singleCsvRecord);
    assertEquals(singleCsvRecord.getValues().size(), transformed.getValues().size());
    Base64NDArrayBody body = client.transformArrayIncremental(singleCsvRecord);
    INDArray arr = Nd4jBase64.fromBase64(body.getNdarray());
    assumeNotNull(arr);
}
 
Example 11
Source File: NDArrayDeSerializer.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException {
    JsonNode node = jp.getCodec().readTree(jp);
    String field = node.get("array").asText();
    INDArray ret = Nd4jBase64.fromBase64(field);
    return ret;
}
 
Example 12
Source File: NDArrayDeSerializer.java    From nd4j with Apache License 2.0 5 votes vote down vote up
@Override
public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException {
    JsonNode node = jp.getCodec().readTree(jp);
    String field = node.get("array").asText();
    INDArray ret = Nd4jBase64.fromBase64(field.toString());
    return ret;

}
 
Example 13
Source File: CSVSparkTransformTest.java    From DataVec with Apache License 2.0 5 votes vote down vote up
@Test
public void testSingleBatchSequence() throws Exception {
    List<Writable> input = new ArrayList<>();
    input.add(new DoubleWritable(1.0));
    input.add(new DoubleWritable(2.0));

    Schema schema = new Schema.Builder().addColumnDouble("1.0").addColumnDouble("2.0").build();
    List<Writable> output = new ArrayList<>();
    output.add(new Text("1.0"));
    output.add(new Text("2.0"));

    TransformProcess transformProcess =
            new TransformProcess.Builder(schema).convertToString("1.0").convertToString("2.0").build();
    CSVSparkTransform csvSparkTransform = new CSVSparkTransform(transformProcess);
    String[] values = new String[] {"1.0", "2.0"};
    SingleCSVRecord record = csvSparkTransform.transform(new SingleCSVRecord(values));
    BatchCSVRecord batchCSVRecord = new BatchCSVRecord();
    for (int i = 0; i < 3; i++)
        batchCSVRecord.add(record);
    BatchCSVRecord batchCSVRecord1 = csvSparkTransform.transform(batchCSVRecord);
    SequenceBatchCSVRecord sequenceBatchCSVRecord = new SequenceBatchCSVRecord();
    sequenceBatchCSVRecord.add(Arrays.asList(batchCSVRecord));
    Base64NDArrayBody sequenceArray = csvSparkTransform.transformSequenceArray(sequenceBatchCSVRecord);
    INDArray outputBody = Nd4jBase64.fromBase64(sequenceArray.getNdarray());


     //ensure accumulation
    sequenceBatchCSVRecord.add(Arrays.asList(batchCSVRecord));
    sequenceArray = csvSparkTransform.transformSequenceArray(sequenceBatchCSVRecord);
    assertArrayEquals(new long[]{2,2,3},Nd4jBase64.fromBase64(sequenceArray.getNdarray()).shape());

    SequenceBatchCSVRecord transformed = csvSparkTransform.transformSequence(sequenceBatchCSVRecord);
    assertNotNull(transformed.getRecords());
    System.out.println(transformed);


}
 
Example 14
Source File: DataVecTransformClientTest.java    From DataVec with Apache License 2.0 5 votes vote down vote up
@Test
public void testRecord() throws Exception {
    SingleCSVRecord singleCsvRecord = new SingleCSVRecord(new String[] {"0", "0"});
    SingleCSVRecord transformed = client.transformIncremental(singleCsvRecord);
    assertEquals(singleCsvRecord.getValues().size(), transformed.getValues().size());
    Base64NDArrayBody body = client.transformArrayIncremental(singleCsvRecord);
    INDArray arr = Nd4jBase64.fromBase64(body.getNdarray());
    assumeNotNull(arr);
}
 
Example 15
Source File: ImageSparkTransformServerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public INDArray getNDArray(JsonNode node) throws IOException {
    return Nd4jBase64.fromBase64(node.getObject().getString("ndarray"));
}
 
Example 16
Source File: SparkTransformServerTest.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
public INDArray getNDArray(JsonNode node) throws IOException {
    return Nd4jBase64.fromBase64(node.getObject().getString("ndarray"));
}
 
Example 17
Source File: SparkTransformServerTest.java    From DataVec with Apache License 2.0 4 votes vote down vote up
public INDArray getNDArray(JsonNode node) throws IOException {
    return Nd4jBase64.fromBase64(node.getObject().getString("ndarray"));
}
 
Example 18
Source File: ImageSparkTransformServerTest.java    From DataVec with Apache License 2.0 4 votes vote down vote up
public INDArray getNDArray(JsonNode node) throws IOException {
    return Nd4jBase64.fromBase64(node.getObject().getString("ndarray"));
}
 
Example 19
Source File: NDArrayDeSerializer.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public INDArray deserialize(JsonParser jp, DeserializationContext deserializationContext) throws IOException {
    JsonNode node = jp.getCodec().readTree(jp);
    String field = node.get("array").asText();
    return Nd4jBase64.fromBase64(field);
}