org.apache.spark.sql.types.DateType Java Examples
The following examples show how to use
org.apache.spark.sql.types.DateType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IndexRUtil.java From indexr with Apache License 2.0 | 6 votes |
public static SegmentSchema sparkSchemaToIndexRSchema(List<StructField> sparkSchema, IsIndexed isIndexed) { List<ColumnSchema> columns = new ArrayList<>(); for (StructField f : sparkSchema) { SQLType type; if (f.dataType() instanceof IntegerType) { type = SQLType.INT; } else if (f.dataType() instanceof LongType) { type = SQLType.BIGINT; } else if (f.dataType() instanceof FloatType) { type = SQLType.FLOAT; } else if (f.dataType() instanceof DoubleType) { type = SQLType.DOUBLE; } else if (f.dataType() instanceof StringType) { type = SQLType.VARCHAR; } else if (f.dataType() instanceof DateType) { type = SQLType.DATE; } else if (f.dataType() instanceof TimestampType) { type = SQLType.DATETIME; } else { throw new IllegalStateException("Unsupported type: " + f.dataType()); } columns.add(new ColumnSchema(f.name(), type, isIndexed.apply(f.name()))); } return new SegmentSchema(columns); }
Example #2
Source File: IndexRUtil.java From indexr with Apache License 2.0 | 5 votes |
public static List<StructField> indexrSchemaToSparkSchema(SegmentSchema schema) { List<StructField> fields = new ArrayList<>(); for (ColumnSchema cs : schema.getColumns()) { DataType dataType; switch (cs.getSqlType()) { case INT: dataType = DataTypes.IntegerType; break; case BIGINT: dataType = DataTypes.LongType; break; case FLOAT: dataType = DataTypes.FloatType; break; case DOUBLE: dataType = DataTypes.DoubleType; break; case VARCHAR: dataType = DataTypes.StringType; break; case DATE: dataType = DataTypes.DateType; break; case DATETIME: dataType = DataTypes.TimestampType; break; default: throw new IllegalStateException("Unsupported type: " + cs.getSqlType()); } fields.add(new StructField(cs.getName(), dataType, scala.Boolean.box(false), Metadata.empty())); } return fields; }
Example #3
Source File: ShortDirectStreamReader.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
@Override public ColumnVector readBlock(DataType type, ColumnVector vector) throws IOException { if (!rowGroupOpen) { openRowGroup(); } if (readOffset > 0) { if (presentStream != null) { // skip ahead the present bit reader, but count the set bits // and use this as the skip size for the data reader readOffset = presentStream.countBitsSet(readOffset); } if (readOffset > 0) { if (dataStream == null) throw new OrcCorruptionException("Value is not null but data stream is not present"); dataStream.skip(readOffset); } } if (presentStream == null) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof LongType || type instanceof DateType) dataStream.nextLongVector(type, nextBatchSize, vector); else dataStream.nextShortVector(type, nextBatchSize, vector); } else { if (nullVector.length < nextBatchSize) { nullVector = new boolean[nextBatchSize]; } int nullValues = presentStream.getUnsetBits(nextBatchSize, nullVector); if (nullValues != nextBatchSize) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof LongType || type instanceof DateType) dataStream.nextLongVector(type, nextBatchSize, vector, nullVector); else dataStream.nextShortVector(type, nextBatchSize, vector, nullVector); } else { for (int i = 0, j = 0; i < nextBatchSize; i++) { while (vector.isNullAt(i+j)) { vector.appendNull(); j++; } vector.appendNull(); } } } readOffset = 0; nextBatchSize = 0; return vector; }
Example #4
Source File: LongDirectStreamReader.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
@Override public ColumnVector readBlock(DataType type, ColumnVector vector) throws IOException { if (!rowGroupOpen) { openRowGroup(); } if (readOffset > 0) { if (presentStream != null) { // skip ahead the present bit reader, but count the set bits // and use this as the skip size for the data reader readOffset = presentStream.countBitsSet(readOffset); } if (readOffset > 0) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } dataStream.skip(readOffset); } } if (presentStream == null) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof DateType || type instanceof IntegerType) { dataStream.nextIntVector(type, nextBatchSize, vector); } else { dataStream.nextLongVector(type, nextBatchSize, vector); } } else { if (nullVector.length < nextBatchSize) { nullVector = new boolean[nextBatchSize]; } int nullValues = presentStream.getUnsetBits(nextBatchSize, nullVector); if (nullValues != nextBatchSize) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof DateType || type instanceof IntegerType) { dataStream.nextIntVector(type, nextBatchSize, vector, nullVector); } else { dataStream.nextLongVector(type, nextBatchSize, vector, nullVector); } } else { for (int i = 0, j = 0; i < nextBatchSize; i++) { while (vector.isNullAt(i+j)) { vector.appendNull(); j++; } vector.appendNull(); } } } readOffset = 0; nextBatchSize = 0; return vector; }
Example #5
Source File: IntDirectStreamReader.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
@Override public ColumnVector readBlock(DataType type, ColumnVector vector) throws IOException { if (!rowGroupOpen) { openRowGroup(); } if (readOffset > 0) { if (presentStream != null) { // skip ahead the present bit reader, but count the set bits // and use this as the skip size for the data reader readOffset = presentStream.countBitsSet(readOffset); } if (readOffset > 0) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } dataStream.skip(readOffset); } } if (presentStream == null) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof LongType || type instanceof DateType) dataStream.nextLongVector(type, nextBatchSize, vector); else dataStream.nextIntVector(type, nextBatchSize, vector); } else { if (nullVector.length < nextBatchSize) { nullVector = new boolean[nextBatchSize]; } int nullValues = presentStream.getUnsetBits(nextBatchSize, nullVector); if (nullValues != nextBatchSize) { if (dataStream == null) { throw new OrcCorruptionException("Value is not null but data stream is not present"); } if (type instanceof LongType || type instanceof DateType) dataStream.nextLongVector(type, nextBatchSize, vector, nullVector); else dataStream.nextIntVector(type, nextBatchSize, vector, nullVector); } else { for (int i = 0, j = 0; i < nextBatchSize; i++) { while (vector.isNullAt(i+j)) { vector.appendNull(); j++; } vector.appendNull(); } } } readOffset = 0; nextBatchSize = 0; return vector; }