Java Code Examples for org.nd4j.serde.base64.Nd4jBase64#base64String()
The following examples show how to use
org.nd4j.serde.base64.Nd4jBase64#base64String() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CSVSparkTransform.java From DataVec with Apache License 2.0 | 6 votes |
/** * * @param singleCsvRecord * @return */ public Base64NDArrayBody transformSequenceArrayIncremental(BatchCSVRecord singleCsvRecord) { List<List<List<Writable>>> converted = executeToSequence(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), singleCsvRecord.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = (ArrowWritableRecordTimeSeriesBatch) converted; INDArray arr = RecordConverter.toTensor(arrowWritableRecordBatch); try { return new Base64NDArrayBody(Nd4jBase64.base64String(arr)); } catch (IOException e) { e.printStackTrace(); } return null; }
Example 2
Source File: CSVSparkTransform.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * * @param singleCsvRecord * @return */ public Base64NDArrayBody transformSequenceArrayIncremental(BatchCSVRecord singleCsvRecord) { List<List<List<Writable>>> converted = executeToSequence(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), singleCsvRecord.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = (ArrowWritableRecordTimeSeriesBatch) converted; INDArray arr = RecordConverter.toTensor(arrowWritableRecordBatch); try { return new Base64NDArrayBody(Nd4jBase64.base64String(arr)); } catch (IOException e) { log.error("",e); } return null; }
Example 3
Source File: ImageSparkTransform.java From DataVec with Apache License 2.0 | 5 votes |
public Base64NDArrayBody toArray(BatchImageRecord batch) throws IOException { List<INDArray> records = new ArrayList<>(); for (SingleImageRecord imgRecord : batch.getRecords()) { ImageWritable record2 = imageTransformProcess.transformFileUriToInput(imgRecord.getUri()); INDArray finalRecord = imageTransformProcess.executeArray(record2); records.add(finalRecord); } long shape[] = records.get(0).shape(); INDArray array = Nd4j.create(records, new long[] {records.size(), shape[1], shape[2], shape[3]}); return new Base64NDArrayBody(Nd4jBase64.base64String(array)); }
Example 4
Source File: CSVSparkTransform.java From DataVec with Apache License 2.0 | 5 votes |
/** * Convert a raw record via * the {@link TransformProcess} * to a base 64ed ndarray * @param batch the record to convert * @return teh base 64ed ndarray * @throws IOException */ public Base64NDArrayBody toArray(BatchCSVRecord batch) throws IOException { List<List<Writable>> converted = execute(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), batch.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordBatch arrowRecordBatch = (ArrowWritableRecordBatch) converted; INDArray convert = ArrowConverter.toArray(arrowRecordBatch); return new Base64NDArrayBody(Nd4jBase64.base64String(convert)); }
Example 5
Source File: CSVSparkTransform.java From DataVec with Apache License 2.0 | 5 votes |
/** * Convert a raw record via * the {@link TransformProcess} * to a base 64ed ndarray * @param record the record to convert * @return the base 64ed ndarray * @throws IOException */ public Base64NDArrayBody toArray(SingleCSVRecord record) throws IOException { List<Writable> record2 = toArrowWritablesSingle( toArrowColumnsStringSingle(bufferAllocator, transformProcess.getInitialSchema(),record.getValues()), transformProcess.getInitialSchema()); List<Writable> finalRecord = execute(Arrays.asList(record2),transformProcess).get(0); INDArray convert = RecordConverter.toArray(finalRecord); return new Base64NDArrayBody(Nd4jBase64.base64String(convert)); }
Example 6
Source File: NDArraySerializer.java From nd4j with Apache License 2.0 | 5 votes |
@Override public void serialize(INDArray indArray, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { String toBase64 = Nd4jBase64.base64String(indArray); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("array", toBase64); jsonGenerator.writeEndObject(); }
Example 7
Source File: NDArraySerializer.java From nd4j with Apache License 2.0 | 5 votes |
@Override public void serialize(INDArray indArray, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { String toBase64 = Nd4jBase64.base64String(indArray); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("array", toBase64); jsonGenerator.writeEndObject(); }
Example 8
Source File: ImageSparkTransform.java From deeplearning4j with Apache License 2.0 | 5 votes |
public Base64NDArrayBody toArray(BatchImageRecord batch) throws IOException { List<INDArray> records = new ArrayList<>(); for (SingleImageRecord imgRecord : batch.getRecords()) { ImageWritable record2 = imageTransformProcess.transformFileUriToInput(imgRecord.getUri()); INDArray finalRecord = imageTransformProcess.executeArray(record2); records.add(finalRecord); } INDArray array = Nd4j.concat(0, records.toArray(new INDArray[records.size()])); return new Base64NDArrayBody(Nd4jBase64.base64String(array)); }
Example 9
Source File: CSVSparkTransform.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Convert a raw record via * the {@link TransformProcess} * to a base 64ed ndarray * @param batch the record to convert * @return teh base 64ed ndarray * @throws IOException */ public Base64NDArrayBody toArray(BatchCSVRecord batch) throws IOException { List<List<Writable>> converted = execute(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), batch.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordBatch arrowRecordBatch = (ArrowWritableRecordBatch) converted; INDArray convert = ArrowConverter.toArray(arrowRecordBatch); return new Base64NDArrayBody(Nd4jBase64.base64String(convert)); }
Example 10
Source File: CSVSparkTransform.java From deeplearning4j with Apache License 2.0 | 5 votes |
/** * Convert a raw record via * the {@link TransformProcess} * to a base 64ed ndarray * @param record the record to convert * @return the base 64ed ndarray * @throws IOException */ public Base64NDArrayBody toArray(SingleCSVRecord record) throws IOException { List<Writable> record2 = toArrowWritablesSingle( toArrowColumnsStringSingle(bufferAllocator, transformProcess.getInitialSchema(),record.getValues()), transformProcess.getInitialSchema()); List<Writable> finalRecord = execute(Arrays.asList(record2),transformProcess).get(0); INDArray convert = RecordConverter.toArray(DataType.DOUBLE, finalRecord); return new Base64NDArrayBody(Nd4jBase64.base64String(convert)); }
Example 11
Source File: NDArraySerializer.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Override public void serialize(INDArray indArray, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException { String toBase64 = Nd4jBase64.base64String(indArray); jsonGenerator.writeStartObject(); jsonGenerator.writeStringField("array", toBase64); jsonGenerator.writeEndObject(); }
Example 12
Source File: ModelServerDirectInferenceExample.java From SKIL_Examples with Apache License 2.0 | 4 votes |
public void run() throws Exception { final File file = new File(inputFile); if (!file.exists() || !file.isFile()) { System.err.format("unable to access file %s\n", inputFile); System.exit(2); } // Open file BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file))); SkilClient skilClient = new SkilClient(textAsJson); // Read each line String line = null; while ((line = br.readLine()) != null) { // Check if label indicator is up front String label = null; if (line.matches("^\\d:\\s.*")) { label = line.substring(0, 1); } // Just in case line = StringUtils.removePattern(line, "^\\d:\\s"); String[] fields = line.split(","); // Maybe strip quotes for (int i = 0; i < fields.length; i++) { final String field = fields[i]; if (field.matches("^\".*\"$")) { fields[i] = field.substring(1, field.length() - 1); } } int[] shape = (isSequential) ? new int[] { 1, 1, fields.length} : new int[] { 1, fields.length}; INDArray array = Nd4j.create(shape); for (int i=0; i<fields.length; i++) { // TODO: catch NumberFormatException Double d = Double.parseDouble(fields[i]); int[] idx = (isSequential) ? new int[]{0, 0, i} : new int[]{0, i}; array.putScalar(idx, d); } Inference.Request request = new Inference.Request(Nd4jBase64.base64String(array)); Inference.Response.Classify response = skilClient.classify(inferenceEndpoint, request); System.out.format("Inference response: %s\n", response.toString()); if (label != null) { System.out.format(" Label expected: %s\n", label); } } br.close(); }
Example 13
Source File: ImageSparkTransform.java From DataVec with Apache License 2.0 | 4 votes |
public Base64NDArrayBody toArray(SingleImageRecord record) throws IOException { ImageWritable record2 = imageTransformProcess.transformFileUriToInput(record.getUri()); INDArray finalRecord = imageTransformProcess.executeArray(record2); return new Base64NDArrayBody(Nd4jBase64.base64String(finalRecord)); }
Example 14
Source File: ImageSparkTransform.java From deeplearning4j with Apache License 2.0 | 4 votes |
public Base64NDArrayBody toArray(SingleImageRecord record) throws IOException { ImageWritable record2 = imageTransformProcess.transformFileUriToInput(record.getUri()); INDArray finalRecord = imageTransformProcess.executeArray(record2); return new Base64NDArrayBody(Nd4jBase64.base64String(finalRecord)); }