org.apache.orc.storage.ql.exec.vector.TimestampColumnVector Java Examples
The following examples show how to use
org.apache.orc.storage.ql.exec.vector.TimestampColumnVector.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeRowWriter writer, int column, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNullAt(column); } else { writer.write(column, convert((TimestampColumnVector) vector, row)); } }
Example #2
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 5 votes |
@Override public void convert(UnsafeArrayWriter writer, int element, ColumnVector vector, int row) { if (vector.isRepeating) { row = 0; } if (!vector.noNulls && vector.isNull[row]) { writer.setNull(element); } else { writer.write(element, convert((TimestampColumnVector) vector, row)); } }
Example #3
Source File: AbstractOrcNoHiveVector.java From flink with Apache License 2.0 | 5 votes |
private static TimestampColumnVector createTimestampVector(int batchSize, Object value) { TimestampColumnVector lcv = new TimestampColumnVector(batchSize); if (value == null) { lcv.noNulls = false; lcv.isNull[0] = true; lcv.isRepeating = true; } else { Timestamp timestamp = value instanceof LocalDateTime ? Timestamp.valueOf((LocalDateTime) value) : (Timestamp) value; lcv.fill(timestamp); lcv.isNull[0] = false; } return lcv; }
Example #4
Source File: SparkOrcReader.java From iceberg with Apache License 2.0 | 4 votes |
private long convert(TimestampColumnVector vector, int row) { // compute microseconds past 1970. long micros = (vector.time[row]/1000) * 1_000_000 + vector.nanos[row] / 1000; return micros; }
Example #5
Source File: OrcNoHiveTimestampVector.java From flink with Apache License 2.0 | 4 votes |
public OrcNoHiveTimestampVector(TimestampColumnVector vector) { super(vector); this.vector = vector; }
Example #6
Source File: OrcColumnarRowSplitReaderNoHiveTest.java From flink with Apache License 2.0 | 4 votes |
@Override protected void prepareReadFileWithTypes(String file, int rowSize) throws IOException { // NOTE: orc has field name information, so name should be same as orc TypeDescription schema = TypeDescription.fromString( "struct<" + "f0:float," + "f1:double," + "f2:timestamp," + "f3:tinyint," + "f4:smallint" + ">"); org.apache.hadoop.fs.Path filePath = new org.apache.hadoop.fs.Path(file); Configuration conf = new Configuration(); Writer writer = OrcFile.createWriter(filePath, OrcFile.writerOptions(conf).setSchema(schema)); VectorizedRowBatch batch = schema.createRowBatch(rowSize); DoubleColumnVector col0 = (DoubleColumnVector) batch.cols[0]; DoubleColumnVector col1 = (DoubleColumnVector) batch.cols[1]; TimestampColumnVector col2 = (TimestampColumnVector) batch.cols[2]; LongColumnVector col3 = (LongColumnVector) batch.cols[3]; LongColumnVector col4 = (LongColumnVector) batch.cols[4]; col0.noNulls = false; col1.noNulls = false; col2.noNulls = false; col3.noNulls = false; col4.noNulls = false; for (int i = 0; i < rowSize - 1; i++) { col0.vector[i] = i; col1.vector[i] = i; Timestamp timestamp = toTimestamp(i); col2.time[i] = timestamp.getTime(); col2.nanos[i] = timestamp.getNanos(); col3.vector[i] = i; col4.vector[i] = i; } col0.isNull[rowSize - 1] = true; col1.isNull[rowSize - 1] = true; col2.isNull[rowSize - 1] = true; col3.isNull[rowSize - 1] = true; col4.isNull[rowSize - 1] = true; batch.size = rowSize; writer.addRowBatch(batch); batch.reset(); writer.close(); }