org.apache.arrow.vector.VectorSchemaRoot Java Examples
The following examples show how to use
org.apache.arrow.vector.VectorSchemaRoot.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: YosegiArrowReader.java From yosegi with Apache License 2.0 | 8 votes |
/** * Read next. */ public byte[] nextToBytes() throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); VectorSchemaRoot schemaRoot = nextToSchemaRoot(); ArrowFileWriter writer = new ArrowFileWriter( schemaRoot, null, Channels.newChannel( out ) ); writer.start(); writer.writeBatch(); writer.end(); writer.close(); return out.toByteArray(); }
Example #2
Source File: ArrowBinaryIterator.java From spark-bigquery-connector with Apache License 2.0 | 6 votes |
private Iterator<InternalRow> toArrowRows(VectorSchemaRoot root, List<String> namesInOrder) { ColumnVector[] columns = namesInOrder.stream() .map(name -> root.getVector(name)) .map(vector -> new ArrowSchemaConverter(vector)) .collect(Collectors.toList()).toArray(new ColumnVector[0]); ColumnarBatch batch = new ColumnarBatch(columns); batch.setNumRows(root.getRowCount()); return batch.rowIterator(); }
Example #3
Source File: GandivaUtils.java From dremio-oss with Apache License 2.0 | 6 votes |
/** * Creates the vector schema from incoming container and referenced fields. * @param input * @param referencedFields * @return the vector schema root. */ public static VectorSchemaRoot getSchemaRoot(VectorAccessible input, Set referencedFields) { List<FieldVector> fv = ImmutableList.copyOf(input) .stream() .map(vw -> ((FieldVector)vw.getValueVector())) .filter(fVec -> referencedFields.contains(fVec.getField())) .collect(Collectors.toList()); List<Field> fields = fv.stream() .map(fieldVec -> fieldVec.getField()) .collect(Collectors.toList()); Schema schemaWithOnlyReferencedFields = new Schema(fields); VectorSchemaRoot root = new VectorSchemaRoot( schemaWithOnlyReferencedFields, fv, 0 ); return root; }
Example #4
Source File: YosegiArrowWriter.java From yosegi with Apache License 2.0 | 6 votes |
/** * Append from arrow byte array. */ public void append( final byte[] buffer ) throws IOException { ArrowFileReader arrowReader = new ArrowFileReader( new SeekableInMemoryByteChannel( buffer ) , new RootAllocator( Integer.MAX_VALUE ) ); List<ArrowBlock> blockList = arrowReader.getRecordBlocks(); for ( ArrowBlock block : blockList ) { VectorSchemaRoot root = arrowReader.getVectorSchemaRoot(); arrowReader.loadRecordBatch(block); append( root ); } }
Example #5
Source File: RowDataArrowPythonScalarFunctionOperator.java From flink with Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("ConstantConditions") public void emitResults() throws IOException { byte[] udfResult; while ((udfResult = userDefinedFunctionResultQueue.poll()) != null) { bais.setBuffer(udfResult, 0, udfResult.length); reader.loadNextBatch(); VectorSchemaRoot root = reader.getVectorSchemaRoot(); if (arrowReader == null) { arrowReader = ArrowUtils.createRowDataArrowReader(root, outputType); } for (int i = 0; i < root.getRowCount(); i++) { RowData input = forwardedInputQueue.poll(); reuseJoinedRow.setRowKind(input.getRowKind()); rowDataWrapper.collect(reuseJoinedRow.replace(input, arrowReader.read(i))); } } }
Example #6
Source File: ArrowPythonScalarFunctionOperator.java From flink with Apache License 2.0 | 6 votes |
@Override @SuppressWarnings("ConstantConditions") public void emitResults() throws IOException { byte[] udfResult; while ((udfResult = userDefinedFunctionResultQueue.poll()) != null) { bais.setBuffer(udfResult, 0, udfResult.length); reader.loadNextBatch(); VectorSchemaRoot root = reader.getVectorSchemaRoot(); if (arrowReader == null) { arrowReader = ArrowUtils.createRowArrowReader(root, outputType); } for (int i = 0; i < root.getRowCount(); i++) { CRow input = forwardedInputQueue.poll(); cRowWrapper.setChange(input.change()); cRowWrapper.collect(Row.join(input.row(), arrowReader.read(i))); } } }
Example #7
Source File: ArrowUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateRowArrowReader() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); RowArrowReader reader = ArrowUtils.createRowArrowReader(root, rowType); ArrowFieldReader[] fieldReaders = reader.getFieldReaders(); for (int i = 0; i < fieldReaders.length; i++) { assertEquals(testFields.get(i).f5, fieldReaders[i].getClass()); } }
Example #8
Source File: SFArrowResultSetIT.java From snowflake-jdbc with Apache License 2.0 | 5 votes |
private File createArrowFile(String fileName, Schema schema, Object[][] data, int rowsPerRecordBatch) throws IOException { File file = resultFolder.newFile(fileName); VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator); try (ArrowWriter writer = new ArrowStreamWriter( root, new DictionaryProvider.MapDictionaryProvider(), new FileOutputStream(file))) { writer.start(); for (int i = 0; i < data[0].length; ) { int rowsToAppend = Math.min(rowsPerRecordBatch, data[0].length - i); root.setRowCount(rowsToAppend); for (int j = 0; j < data.length; j++) { FieldVector vector = root.getFieldVectors().get(j); switch (vector.getMinorType()) { case INT: writeIntToField(vector, data[j], i, rowsToAppend); break; } } writer.writeBatch(); i += rowsToAppend; } } return file; }
Example #9
Source File: ArrowUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an {@link ArrowWriter} for the specified {@link VectorSchemaRoot}. */ public static ArrowWriter<Row> createRowArrowWriter(VectorSchemaRoot root, RowType rowType) { ArrowFieldWriter<Row>[] fieldWriters = new ArrowFieldWriter[root.getFieldVectors().size()]; List<FieldVector> vectors = root.getFieldVectors(); for (int i = 0; i < vectors.size(); i++) { FieldVector vector = vectors.get(i); vector.allocateNew(); fieldWriters[i] = createRowArrowFieldWriter(vector, rowType.getTypeAt(i)); } return new ArrowWriter<>(root, fieldWriters); }
Example #10
Source File: ArrowUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an {@link ArrowWriter} for blink planner for the specified {@link VectorSchemaRoot}. */ public static ArrowWriter<RowData> createRowDataArrowWriter(VectorSchemaRoot root, RowType rowType) { ArrowFieldWriter<RowData>[] fieldWriters = new ArrowFieldWriter[root.getFieldVectors().size()]; List<FieldVector> vectors = root.getFieldVectors(); for (int i = 0; i < vectors.size(); i++) { FieldVector vector = vectors.get(i); vector.allocateNew(); fieldWriters[i] = createArrowFieldWriterForRow(vector, rowType.getTypeAt(i)); } return new ArrowWriter<>(root, fieldWriters); }
Example #11
Source File: ArrowUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an {@link ArrowReader} for the specified {@link VectorSchemaRoot}. */ public static RowArrowReader createRowArrowReader(VectorSchemaRoot root, RowType rowType) { List<ArrowFieldReader> fieldReaders = new ArrayList<>(); List<FieldVector> fieldVectors = root.getFieldVectors(); for (int i = 0; i < fieldVectors.size(); i++) { fieldReaders.add(createRowArrowFieldReader(fieldVectors.get(i), rowType.getTypeAt(i))); } return new RowArrowReader(fieldReaders.toArray(new ArrowFieldReader[0])); }
Example #12
Source File: ArrowUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Creates an {@link ArrowReader} for blink planner for the specified {@link VectorSchemaRoot}. */ public static RowDataArrowReader createRowDataArrowReader(VectorSchemaRoot root, RowType rowType) { List<ColumnVector> columnVectors = new ArrayList<>(); List<FieldVector> fieldVectors = root.getFieldVectors(); for (int i = 0; i < fieldVectors.size(); i++) { columnVectors.add(createColumnVector(fieldVectors.get(i), rowType.getTypeAt(i))); } return new RowDataArrowReader(columnVectors.toArray(new ColumnVector[0])); }
Example #13
Source File: ArrowPythonScalarFunctionFlatMap.java From flink with Apache License 2.0 | 5 votes |
@Override public void emitResults() throws IOException { byte[] udfResult; while ((udfResult = userDefinedFunctionResultQueue.poll()) != null) { bais.setBuffer(udfResult, 0, udfResult.length); reader.loadNextBatch(); VectorSchemaRoot root = reader.getVectorSchemaRoot(); if (arrowReader == null) { arrowReader = ArrowUtils.createRowArrowReader(root, userDefinedFunctionOutputType); } for (int i = 0; i < root.getRowCount(); i++) { resultCollector.collect(Row.join(forwardedInputQueue.poll(), arrowReader.read(i))); } } }
Example #14
Source File: AbstractArrowPythonScalarFunctionRunner.java From flink with Apache License 2.0 | 5 votes |
@Override public void open() throws Exception { super.open(); allocator = ArrowUtils.getRootAllocator().newChildAllocator("writer", 0, Long.MAX_VALUE); root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(getInputType()), allocator); arrowWriter = createArrowWriter(); arrowStreamWriter = new ArrowStreamWriter(root, null, baos); arrowStreamWriter.start(); currentBatchCount = 0; }
Example #15
Source File: MockHiveWarehouseConnector.java From spark-llap with Apache License 2.0 | 5 votes |
public MockLlapArrowBatchRecordReader(long arrowAllocatorMax) { BufferAllocator allocator = RootAllocatorFactory.INSTANCE.getOrCreateRootAllocator(arrowAllocatorMax); IntVector vector = new IntVector("a", allocator); vector.allocateNewSafe(); for(int i = 0; i < testVector.length; i++) { vector.set(i, testVector[i]); } vector.setValueCount(testVector.length); List<Field> fields = Lists.newArrayList(vector.getField()); List<FieldVector> vectors = new ArrayList<>(); vectors.add(vector); vectorSchemaRoot = new VectorSchemaRoot(fields, vectors, testVector.length); }
Example #16
Source File: ArrowUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateRowDataArrowReader() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); RowDataArrowReader reader = ArrowUtils.createRowDataArrowReader(root, rowType); ColumnVector[] columnVectors = reader.getColumnVectors(); for (int i = 0; i < columnVectors.length; i++) { assertEquals(testFields.get(i).f6, columnVectors[i].getClass()); } }
Example #17
Source File: ArrowUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateRowArrowWriter() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<Row> writer = ArrowUtils.createRowArrowWriter(root, rowType); ArrowFieldWriter<Row>[] fieldWriters = writer.getFieldWriters(); for (int i = 0; i < fieldWriters.length; i++) { assertEquals(testFields.get(i).f3, fieldWriters[i].getClass()); } }
Example #18
Source File: ArrowUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCreateRowDataArrowWriter() { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<RowData> writer = ArrowUtils.createRowDataArrowWriter(root, rowType); ArrowFieldWriter<RowData>[] fieldWriters = writer.getFieldWriters(); for (int i = 0; i < fieldWriters.length; i++) { assertEquals(testFields.get(i).f4, fieldWriters[i].getClass()); } }
Example #19
Source File: ArrowUtilsTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testReadArrowBatches() throws IOException { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<RowData> arrowWriter = ArrowUtils.createRowDataArrowWriter(root, rowType); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, baos); arrowStreamWriter.start(); List<RowData> testData = Arrays.asList( new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount()), new GenericRowData(rowType.getFieldCount())); int batches = 3; List<List<RowData>> subLists = Lists.partition(testData, testData.size() / batches + 1); for (List<RowData> subList : subLists) { for (RowData value : subList) { arrowWriter.write(value); } arrowWriter.finish(); arrowStreamWriter.writeBatch(); arrowWriter.reset(); } assertEquals(batches, ArrowUtils.readArrowBatches(Channels.newChannel(new ByteArrayInputStream(baos.toByteArray()))).length); }
Example #20
Source File: RowArrowReaderWriterTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Tuple2<ArrowWriter<Row>, ArrowStreamWriter> createArrowWriter(OutputStream outputStream) throws IOException { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<Row> arrowWriter = ArrowUtils.createRowArrowWriter(root, rowType); ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, outputStream); arrowStreamWriter.start(); return Tuple2.of(arrowWriter, arrowStreamWriter); }
Example #21
Source File: RowDataArrowReaderWriterTest.java From flink with Apache License 2.0 | 5 votes |
@Override public Tuple2<ArrowWriter<RowData>, ArrowStreamWriter> createArrowWriter(OutputStream outputStream) throws IOException { VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator); ArrowWriter<RowData> arrowWriter = ArrowUtils.createRowDataArrowWriter(root, rowType); ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, outputStream); arrowStreamWriter.start(); return Tuple2.of(arrowWriter, arrowStreamWriter); }
Example #22
Source File: ArrowSourceFunctionTest.java From flink with Apache License 2.0 | 5 votes |
public ArrowSourceFunctionTest() { super(VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator), serializer, Comparator.comparing(o -> o.getString(0)), new DeeplyEqualsChecker() .withCustomCheck( (o1, o2) -> o1 instanceof RowData && o2 instanceof RowData, (o1, o2, checker) -> deepEqualsBaseRow( (RowData) o1, (RowData) o2, (RowDataSerializer) serializer.duplicate(), (RowDataSerializer) serializer.duplicate()))); }
Example #23
Source File: ArrowConverterTest.java From deeplearning4j with Apache License 2.0 | 5 votes |
@Test public void testReadSchemaAndRecordsFromByteArray() throws Exception { BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); int valueCount = 3; List<Field> fields = new ArrayList<>(); fields.add(ArrowConverter.field("field1",new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE))); fields.add(ArrowConverter.intField("field2")); List<FieldVector> fieldVectors = new ArrayList<>(); fieldVectors.add(ArrowConverter.vectorFor(allocator,"field1",new float[] {1,2,3})); fieldVectors.add(ArrowConverter.vectorFor(allocator,"field2",new int[] {1,2,3})); org.apache.arrow.vector.types.pojo.Schema schema = new org.apache.arrow.vector.types.pojo.Schema(fields); VectorSchemaRoot schemaRoot1 = new VectorSchemaRoot(schema, fieldVectors, valueCount); VectorUnloader vectorUnloader = new VectorUnloader(schemaRoot1); vectorUnloader.getRecordBatch(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); try(ArrowFileWriter arrowFileWriter = new ArrowFileWriter(schemaRoot1,null,newChannel(byteArrayOutputStream))) { arrowFileWriter.writeBatch(); } catch (IOException e) { log.error("",e); } byte[] arr = byteArrayOutputStream.toByteArray(); val arr2 = ArrowConverter.readFromBytes(arr); assertEquals(2,arr2.getFirst().numColumns()); assertEquals(3,arr2.getRight().size()); val arrowCols = ArrowConverter.toArrowColumns(allocator,arr2.getFirst(),arr2.getRight()); assertEquals(2,arrowCols.size()); assertEquals(valueCount,arrowCols.get(0).getValueCount()); }
Example #24
Source File: ArrowConverterTest.java From DataVec with Apache License 2.0 | 5 votes |
@Test public void testReadSchemaAndRecordsFromByteArray() throws Exception { BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); int valueCount = 3; List<Field> fields = new ArrayList<>(); fields.add(ArrowConverter.field("field1",new ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE))); fields.add(ArrowConverter.intField("field2")); List<FieldVector> fieldVectors = new ArrayList<>(); fieldVectors.add(ArrowConverter.vectorFor(allocator,"field1",new float[] {1,2,3})); fieldVectors.add(ArrowConverter.vectorFor(allocator,"field2",new int[] {1,2,3})); org.apache.arrow.vector.types.pojo.Schema schema = new org.apache.arrow.vector.types.pojo.Schema(fields); VectorSchemaRoot schemaRoot1 = new VectorSchemaRoot(schema, fieldVectors, valueCount); VectorUnloader vectorUnloader = new VectorUnloader(schemaRoot1); vectorUnloader.getRecordBatch(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); try(ArrowFileWriter arrowFileWriter = new ArrowFileWriter(schemaRoot1,null,newChannel(byteArrayOutputStream))) { arrowFileWriter.writeBatch(); } catch (IOException e) { e.printStackTrace(); } byte[] arr = byteArrayOutputStream.toByteArray(); val arr2 = ArrowConverter.readFromBytes(arr); assertEquals(2,arr2.getFirst().numColumns()); assertEquals(3,arr2.getRight().size()); val arrowCols = ArrowConverter.toArrowColumns(allocator,arr2.getFirst(),arr2.getRight()); assertEquals(2,arrowCols.size()); assertEquals(valueCount,arrowCols.get(0).getValueCount()); }
Example #25
Source File: ConvertArrowFormatToMDS.java From multiple-dimension-spread with Apache License 2.0 | 5 votes |
public static int run( final String[] args ) throws IOException{ CommandLine cl; try{ CommandLineParser clParser = new GnuParser(); cl = clParser.parse( createOptions( args ) , args ); }catch( ParseException e ){ printHelp( args ); throw new IOException( e ); } if( cl.hasOption( "help" ) ){ printHelp( args ); return 0; } String input = cl.getOptionValue( "input" , null ); String output = cl.getOptionValue( "output" , null ); Configuration config = new Configuration(); ArrowFileReader arrowReader = new ArrowFileReader( new FileInputStream( input ).getChannel() , new RootAllocator( Integer.MAX_VALUE ) ); OutputStream out = FileUtil.create( output ); MDSWriter writer = new MDSWriter( out , config ); List<ArrowBlock> blockList = arrowReader.getRecordBlocks(); for( ArrowBlock block : blockList ){ VectorSchemaRoot root = arrowReader.getVectorSchemaRoot(); arrowReader.loadRecordBatch(block); List<FieldVector> fieldVectorList = root.getFieldVectors(); Spread spread = ArrowSpreadUtil.toSpread( root.getRowCount() , fieldVectorList ); writer.append( spread ); } arrowReader.close(); writer.close(); return 0; }
Example #26
Source File: Twister2ArrowFileWriter.java From twister2 with Apache License 2.0 | 5 votes |
public boolean setUpTwister2ArrowWrite(int workerId) throws Exception { LOG.fine("%%%%%%%%% worker id details:" + workerId + "\t" + arrowFile); this.root = VectorSchemaRoot.create(Schema.fromJSON(arrowSchema), this.rootAllocator); Path path = new Path(arrowFile); this.fileSystem = FileSystemUtils.get(path); this.fsDataOutputStream = fileSystem.create(path); this.twister2ArrowOutputStream = new Twister2ArrowOutputStream(this.fsDataOutputStream); DictionaryProvider.MapDictionaryProvider provider = new DictionaryProvider.MapDictionaryProvider(); if (!flag) { this.arrowFileWriter = new ArrowFileWriter(root, provider, this.fsDataOutputStream.getChannel()); } else { this.arrowFileWriter = new ArrowFileWriter(root, provider, this.twister2ArrowOutputStream); } LOG.info("root schema fields:" + root.getSchema().getFields()); for (Field field : root.getSchema().getFields()) { FieldVector vector = root.getVector(field.getName()); if (vector.getMinorType().equals(Types.MinorType.INT)) { this.generatorMap.put(vector, new IntVectorGenerator()); } else if (vector.getMinorType().equals(Types.MinorType.BIGINT)) { this.generatorMap.put(vector, new BigIntVectorGenerator()); } else if (vector.getMinorType().equals(Types.MinorType.FLOAT4)) { this.generatorMap.put(vector, new FloatVectorGenerator()); } else { throw new RuntimeException("unsupported arrow write type"); } } return true; }
Example #27
Source File: VectorContainer.java From dremio-oss with Apache License 2.0 | 5 votes |
public static void transferFromRoot(VectorSchemaRoot root, VectorContainer container, BufferAllocator allocator) { container.clear(); // iterate over and transfer columns for (FieldVector fv : root.getFieldVectors()) { final TransferPair tp = fv.getTransferPair(allocator); tp.transfer(); container.add(tp.getTo()); } container.setRecordCount(root.getRowCount()); container.addSchema(root.getSchema()); container.buildSchema(); }
Example #28
Source File: FragmentWritableBatch.java From dremio-oss with Apache License 2.0 | 5 votes |
public static VectorSchemaRoot getVectorSchemaRoot(final VectorAccessible batch) { List<FieldVector> fieldVectors = FluentIterable.from(batch) .transform(new Function<VectorWrapper<?>, FieldVector>() { @Override public FieldVector apply(VectorWrapper<?> vectorWrapper) { return (FieldVector)vectorWrapper.getValueVector(); } }).toList(); int rowCount = batch.getRecordCount(); List<Field> fields = batch.getSchema().getFields(); VectorSchemaRoot root = new VectorSchemaRoot(fields , fieldVectors, rowCount); return root; }
Example #29
Source File: FormationRecordWriter.java From dremio-flight-connector with Apache License 2.0 | 5 votes |
@Override public void setup(VectorAccessible incoming, OutputEntryListener listener, WriteStatsListener statsListener) throws IOException { root = new VectorSchemaRoot(ImmutableList.copyOf(incoming) .stream() .map(vw -> ((FieldVector) vw.getValueVector())) .collect(Collectors.toList())); unloader = new VectorUnloader(root); creator = store.putStream( descriptor, root.getSchema()); }
Example #30
Source File: Stream.java From dremio-flight-connector with Apache License 2.0 | 5 votes |
public void start(ServerStreamListener listener) throws InterruptedException { logger.debug("trying to start, waiting for schema for {}", descriptor); countDownLatch.await(); if (root == null) { logger.warn("root was not set for {}, not starting listener properly", descriptor); root = VectorSchemaRoot.create(new Schema(ImmutableList.of()), allocator); } listener.start(root); this.listener = listener; }