org.datavec.arrow.recordreader.ArrowWritableRecordTimeSeriesBatch Java Examples
The following examples show how to use
org.datavec.arrow.recordreader.ArrowWritableRecordTimeSeriesBatch.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CSVSparkTransform.java From DataVec with Apache License 2.0 | 6 votes |
/** * * @param singleCsvRecord * @return */ public Base64NDArrayBody transformSequenceArrayIncremental(BatchCSVRecord singleCsvRecord) { List<List<List<Writable>>> converted = executeToSequence(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), singleCsvRecord.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = (ArrowWritableRecordTimeSeriesBatch) converted; INDArray arr = RecordConverter.toTensor(arrowWritableRecordBatch); try { return new Base64NDArrayBody(Nd4jBase64.base64String(arr)); } catch (IOException e) { e.printStackTrace(); } return null; }
Example #2
Source File: CSVSparkTransform.java From deeplearning4j with Apache License 2.0 | 6 votes |
/** * * @param singleCsvRecord * @return */ public Base64NDArrayBody transformSequenceArrayIncremental(BatchCSVRecord singleCsvRecord) { List<List<List<Writable>>> converted = executeToSequence(toArrowWritables(toArrowColumnsString( bufferAllocator,transformProcess.getInitialSchema(), singleCsvRecord.getRecordsAsString()), transformProcess.getInitialSchema()),transformProcess); ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = (ArrowWritableRecordTimeSeriesBatch) converted; INDArray arr = RecordConverter.toTensor(arrowWritableRecordBatch); try { return new Base64NDArrayBody(Nd4jBase64.base64String(arr)); } catch (IOException e) { log.error("",e); } return null; }
Example #3
Source File: TestConvertToSequence.java From DataVec with Apache License 2.0 | 5 votes |
@Test public void testConvertToSequenceLength1() { Schema s = new Schema.Builder() .addColumnsString("string") .addColumnLong("long") .build(); List<List<Writable>> allExamples = Arrays.asList( Arrays.<Writable>asList(new Text("a"), new LongWritable(0)), Arrays.<Writable>asList(new Text("b"), new LongWritable(1)), Arrays.<Writable>asList(new Text("c"), new LongWritable(2))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence() .build(); List<List<Writable>> rdd = (allExamples); ArrowWritableRecordTimeSeriesBatch out = (ArrowWritableRecordTimeSeriesBatch) LocalTransformExecutor.executeToSequence(rdd, tp); List<List<List<Writable>>> out2 = out.toArrayList(); assertEquals(3, out2.size()); for( int i = 0; i < 3; i++) { assertTrue(out2.contains(Collections.singletonList(allExamples.get(i)))); } }
Example #4
Source File: TestConvertToSequence.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testConvertToSequenceLength1() { Schema s = new Schema.Builder() .addColumnsString("string") .addColumnLong("long") .build(); List<List<Writable>> allExamples = Arrays.asList( Arrays.<Writable>asList(new Text("a"), new LongWritable(0)), Arrays.<Writable>asList(new Text("b"), new LongWritable(1)), Arrays.<Writable>asList(new Text("c"), new LongWritable(2))); TransformProcess tp = new TransformProcess.Builder(s) .convertToSequence() .build(); List<List<Writable>> rdd = (allExamples); ArrowWritableRecordTimeSeriesBatch out = (ArrowWritableRecordTimeSeriesBatch) LocalTransformExecutor.executeToSequence(rdd, tp); List<List<List<Writable>>> out2 = out.toArrayList(); assertEquals(3, out2.size()); for( int i = 0; i < 3; i++) { assertTrue(out2.contains(Collections.singletonList(allExamples.get(i)))); } }
Example #5
Source File: ArrowUtils.java From konduit-serving with Apache License 2.0 | 4 votes |
public static INDArray toArray(ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch) { return RecordConverter.toTensor(arrowWritableRecordBatch); }
Example #6
Source File: ArrowUtils.java From konduit-serving with Apache License 2.0 | 4 votes |
public static List<List<List<Writable>>> toArrowWritablesTimeSeries(List<FieldVector> fieldVectors, Schema schema, int timeSeriesLength) { ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = new ArrowWritableRecordTimeSeriesBatch(fieldVectors, schema, timeSeriesLength); return arrowWritableRecordBatch; }
Example #7
Source File: ArrowConverter.java From DataVec with Apache License 2.0 | 2 votes |
/** * Create an ndarray from a matrix. * The included batch must be all the same number of rows in order * to work. The reason for this is {@link INDArray} must be all the same dimensions. * Note that the input columns must also be numerical. If they aren't numerical already, * consider using an {@link org.datavec.api.transform.TransformProcess} to transform the data * output from {@link org.datavec.arrow.recordreader.ArrowRecordReader} in to the proper format * for usage with this method for direct conversion. * * @param arrowWritableRecordBatch the incoming batch. This is typically output from * an {@link org.datavec.arrow.recordreader.ArrowRecordReader} * @return an {@link INDArray} representative of the input data */ public static INDArray toArray(ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch) { return RecordConverter.toTensor(arrowWritableRecordBatch); }
Example #8
Source File: ArrowConverter.java From DataVec with Apache License 2.0 | 2 votes |
/** * Convert the input field vectors (the input data) and * the given schema to a proper list of writables. * @param fieldVectors the field vectors to use * @param schema the schema to use * @param timeSeriesLength the length of the time series * @return the equivalent datavec batch given the input data */ public static List<List<List<Writable>>> toArrowWritablesTimeSeries(List<FieldVector> fieldVectors,Schema schema,int timeSeriesLength) { ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = new ArrowWritableRecordTimeSeriesBatch(fieldVectors,schema,timeSeriesLength); return arrowWritableRecordBatch; }
Example #9
Source File: ArrowConverter.java From deeplearning4j with Apache License 2.0 | 2 votes |
/** * Create an ndarray from a matrix. * The included batch must be all the same number of rows in order * to work. The reason for this is {@link INDArray} must be all the same dimensions. * Note that the input columns must also be numerical. If they aren't numerical already, * consider using an {@link org.datavec.api.transform.TransformProcess} to transform the data * output from {@link org.datavec.arrow.recordreader.ArrowRecordReader} in to the proper format * for usage with this method for direct conversion. * * @param arrowWritableRecordBatch the incoming batch. This is typically output from * an {@link org.datavec.arrow.recordreader.ArrowRecordReader} * @return an {@link INDArray} representative of the input data */ public static INDArray toArray(ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch) { return RecordConverter.toTensor(arrowWritableRecordBatch); }
Example #10
Source File: ArrowConverter.java From deeplearning4j with Apache License 2.0 | 2 votes |
/** * Convert the input field vectors (the input data) and * the given schema to a proper list of writables. * @param fieldVectors the field vectors to use * @param schema the schema to use * @param timeSeriesLength the length of the time series * @return the equivalent datavec batch given the input data */ public static List<List<List<Writable>>> toArrowWritablesTimeSeries(List<FieldVector> fieldVectors,Schema schema,int timeSeriesLength) { ArrowWritableRecordTimeSeriesBatch arrowWritableRecordBatch = new ArrowWritableRecordTimeSeriesBatch(fieldVectors,schema,timeSeriesLength); return arrowWritableRecordBatch; }