org.apache.orc.storage.ql.exec.vector.DoubleColumnVector Java Examples
The following examples show how to use
org.apache.orc.storage.ql.exec.vector.DoubleColumnVector.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GenericOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void addValue(int rowId, Float data, ColumnVector output) { if (data == null) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data; } }
Example #2
Source File: GenericOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void addValue(int rowId, Double data, ColumnVector output) { if (data == null) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data; } }
Example #3
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data.getFloat(column); } }
Example #4
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data.getDouble(column); } }
Example #5
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNullAt(column); } else { writer.write(column, (float) ((DoubleColumnVector) vector).vector[row]); } }
Example #6
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeArrayWriter writer, int element, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNull(element); } else { writer.write(element, (float) ((DoubleColumnVector) vector).vector[row]); } }
Example #7
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNullAt(column); } else { writer.write(column, ((DoubleColumnVector) vector).vector[row]); } }
Example #8
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeArrayWriter writer, int element, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNull(element); } else { writer.write(element, ((DoubleColumnVector) vector).vector[row]); } }
Example #9
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data.getFloat(column); } }
Example #10
Source File: SparkOrcWriter.java From iceberg with Apache License 2.0 | 5 votes |
public void addValue(int rowId, int column, SpecializedGetters data, ColumnVector output) { if (data.isNullAt(column)) { output.noNulls = false; output.isNull[rowId] = true; } else { output.isNull[rowId] = false; ((DoubleColumnVector) output).vector[rowId] = data.getDouble(column); } }
Example #11
Source File: AbstractOrcNoHiveVector.java From flink with Apache License 2.0 | 5 votes |
private static DoubleColumnVector createDoubleVector(int batchSize, Object value) { DoubleColumnVector dcv = new DoubleColumnVector(batchSize); if (value == null) { dcv.noNulls = false; dcv.isNull[0] = true; dcv.isRepeating = true; } else { dcv.fill(((Number) value).doubleValue()); dcv.isNull[0] = false; } return dcv; }
Example #12
Source File: OrcValueReaders.java From iceberg with Apache License 2.0 | 4 votes |
@Override public Float nonNullRead(ColumnVector vector, int row) { return (float) ((DoubleColumnVector) vector).vector[row]; }
Example #13
Source File: OrcValueReaders.java From iceberg with Apache License 2.0 | 4 votes |
@Override public Double nonNullRead(ColumnVector vector, int row) { return ((DoubleColumnVector) vector).vector[row]; }
Example #14
Source File: OrcNoHiveDoubleVector.java From flink with Apache License 2.0 | 4 votes |
public OrcNoHiveDoubleVector(DoubleColumnVector vector) { super(vector); this.vector = vector; }
Example #15
Source File: OrcColumnarRowSplitReaderNoHiveTest.java From flink with Apache License 2.0 | 4 votes |
@Override protected void prepareReadFileWithTypes(String file, int rowSize) throws IOException { // NOTE: orc has field name information, so name should be same as orc TypeDescription schema = TypeDescription.fromString( "struct<" + "f0:float," + "f1:double," + "f2:timestamp," + "f3:tinyint," + "f4:smallint" + ">"); org.apache.hadoop.fs.Path filePath = new org.apache.hadoop.fs.Path(file); Configuration conf = new Configuration(); Writer writer = OrcFile.createWriter(filePath, OrcFile.writerOptions(conf).setSchema(schema)); VectorizedRowBatch batch = schema.createRowBatch(rowSize); DoubleColumnVector col0 = (DoubleColumnVector) batch.cols[0]; DoubleColumnVector col1 = (DoubleColumnVector) batch.cols[1]; TimestampColumnVector col2 = (TimestampColumnVector) batch.cols[2]; LongColumnVector col3 = (LongColumnVector) batch.cols[3]; LongColumnVector col4 = (LongColumnVector) batch.cols[4]; col0.noNulls = false; col1.noNulls = false; col2.noNulls = false; col3.noNulls = false; col4.noNulls = false; for (int i = 0; i < rowSize - 1; i++) { col0.vector[i] = i; col1.vector[i] = i; Timestamp timestamp = toTimestamp(i); col2.time[i] = timestamp.getTime(); col2.nanos[i] = timestamp.getNanos(); col3.vector[i] = i; col4.vector[i] = i; } col0.isNull[rowSize - 1] = true; col1.isNull[rowSize - 1] = true; col2.isNull[rowSize - 1] = true; col3.isNull[rowSize - 1] = true; col4.isNull[rowSize - 1] = true; batch.size = rowSize; writer.addRowBatch(batch); batch.reset(); writer.close(); }