org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector Java Examples
The following examples show how to use
org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OrcBatchReader.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
private static void readNonNullBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value String repeatingValue = readString(bytes.vector[0], bytes.start[0], bytes.length[0]); fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount); } else { if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } }
Example #2
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 6 votes |
private static void readNonNullBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value String repeatingValue = readString(bytes.vector[0], bytes.start[0], bytes.length[0]); fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount); } else { if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } }
Example #3
Source File: OrcBulkWriterTestUtil.java From flink with Apache License 2.0 | 6 votes |
private static List<Record> getResults(Reader reader) throws IOException { List<Record> results = new ArrayList<>(); RecordReader recordReader = reader.rows(); VectorizedRowBatch batch = reader.getSchema().createRowBatch(); while (recordReader.nextBatch(batch)) { BytesColumnVector stringVector = (BytesColumnVector) batch.cols[0]; LongColumnVector intVector = (LongColumnVector) batch.cols[1]; for (int r = 0; r < batch.size; r++) { String name = new String(stringVector.vector[r], stringVector.start[r], stringVector.length[r]); int age = (int) intVector.vector[r]; results.add(new Record(name, age)); } recordReader.close(); } return results; }
Example #4
Source File: AbstractOrcColumnVector.java From flink with Apache License 2.0 | 6 votes |
private static BytesColumnVector createBytesVector(int batchSize, Object value) { BytesColumnVector bcv = new BytesColumnVector(batchSize); if (value == null) { bcv.noNulls = false; bcv.isNull[0] = true; bcv.isRepeating = true; } else { byte[] bytes = value instanceof byte[] ? (byte[]) value : value.toString().getBytes(StandardCharsets.UTF_8); bcv.initBuffer(bytes.length); bcv.fill(bytes); bcv.isNull[0] = false; } return bcv; }
Example #5
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 6 votes |
private static void readNonNullBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value String repeatingValue = readString(bytes.vector[0], bytes.start[0], bytes.length[0]); fillColumnWithRepeatingValue(vals, fieldIdx, repeatingValue, childCount); } else { if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } }
Example #6
Source File: OrcBatchReader.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsString(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #7
Source File: PentahoOrcRecordWriter.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
private void setBytesColumnVector( BytesColumnVector bytesColumnVector, String value ) { if ( value == null ) { setBytesColumnVector( bytesColumnVector, new byte[ 0 ] ); } else { setBytesColumnVector( bytesColumnVector, value.getBytes() ); } }
Example #8
Source File: VectorColumnFiller.java From secor with Apache License 2.0 | 5 votes |
public void convert(JsonElement value, ColumnVector vect, int row) { if (value == null || value.isJsonNull()) { vect.noNulls = false; vect.isNull[row] = true; } else { BytesColumnVector vector = (BytesColumnVector) vect; String binStr = value.getAsString(); byte[] bytes = new byte[binStr.length() / 2]; for (int i = 0; i < bytes.length; ++i) { bytes[i] = (byte) Integer.parseInt( binStr.substring(i * 2, i * 2 + 2), 16); } vector.setRef(row, bytes, 0, bytes.length); } }
Example #9
Source File: VectorColumnFiller.java From secor with Apache License 2.0 | 5 votes |
public void convert(JsonElement value, ColumnVector vect, int row) { if (value == null || value.isJsonNull()) { vect.noNulls = false; vect.isNull[row] = true; } else { BytesColumnVector vector = (BytesColumnVector) vect; byte[] bytes = value.getAsString().getBytes( StandardCharsets.UTF_8); vector.setRef(row, bytes, 0, bytes.length); } }
Example #10
Source File: JsonFieldFiller.java From secor with Apache License 2.0 | 5 votes |
private static void setMap(JSONWriter writer, MapColumnVector vector, TypeDescription schema, int row) throws JSONException { writer.object(); List<TypeDescription> schemaChildren = schema.getChildren(); BytesColumnVector keyVector = (BytesColumnVector) vector.keys; long length = vector.lengths[row]; long offset = vector.offsets[row]; for (int i = 0; i < length; i++) { writer.key(keyVector.toString((int) offset + i)); setValue(writer, vector.values, schemaChildren.get(1), (int) offset + i); } writer.endObject(); }
Example #11
Source File: RecordVectorizer.java From flink with Apache License 2.0 | 5 votes |
@Override public void vectorize(Record element, VectorizedRowBatch batch) throws IOException { BytesColumnVector stringVector = (BytesColumnVector) batch.cols[0]; LongColumnVector intColVector = (LongColumnVector) batch.cols[1]; int row = batch.size++; stringVector.setVal(row, element.getName().getBytes(StandardCharsets.UTF_8)); intColVector.vector[row] = element.getAge(); this.addUserMetadata(OrcBulkWriterTestUtil.USER_METADATA_KEY, OrcBulkWriterTestUtil.USER_METADATA_VALUE); }
Example #12
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsBinary(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsBinary(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readBinary(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readBinary(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #13
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsString(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #14
Source File: OrcWriter.java From osm2orc with ISC License | 5 votes |
@Override public void process(RelationContainer container) { DecimalColumnVector lat = (DecimalColumnVector) batch.cols[3]; DecimalColumnVector lon = (DecimalColumnVector) batch.cols[4]; ListColumnVector members = (ListColumnVector) batch.cols[6]; checkLimit(); addCommonProperties(container); lat.isNull[row] = true; lon.isNull[row] = true; lat.set(row, (HiveDecimal) null); lon.set(row, (HiveDecimal) null); Relation relation = container.getEntity(); members.lengths[row] = relation.getMembers().size(); members.childCount += members.lengths[row]; members.child.ensureSize(members.childCount, members.offsets[row] != 0); for (int j = 0; j < relation.getMembers().size(); j++) { StructColumnVector membersStruct = (StructColumnVector) members.child; ((BytesColumnVector) membersStruct.fields[0]).setVal((int) members.offsets[row] + j, relation.getMembers().get(j).getMemberType().toString().toLowerCase().getBytes()); ((LongColumnVector) membersStruct.fields[1]).vector[(int) members.offsets[row] + j] = relation.getMembers().get(j).getMemberId(); ((BytesColumnVector) membersStruct.fields[2]).setVal((int) members.offsets[row] + j, relation.getMembers().get(j).getMemberRole().getBytes()); } }
Example #15
Source File: OrcBatchReader.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsBinary(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsBinary(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readBinary(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readBinary(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #16
Source File: HiveORCVectorizedReader.java From dremio-oss with Apache License 2.0 | 5 votes |
private ColumnVector getPrimitiveColumnVector(PrimitiveObjectInspector poi) { switch (poi.getPrimitiveCategory()) { case BOOLEAN: case BYTE: case SHORT: case INT: case LONG: case DATE: return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case TIMESTAMP: return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case FLOAT: case DOUBLE: return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case BINARY: case STRING: case CHAR: case VARCHAR: return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case DECIMAL: DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo(); return new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE, tInfo.precision(), tInfo.scale() ); default: throw UserException.unsupportedError() .message("Vectorized ORC reader is not supported for datatype: %s", poi.getPrimitiveCategory()) .build(logger); } }
Example #17
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsString(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsString(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readString(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readString(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #18
Source File: OrcBatchReader.java From flink with Apache License 2.0 | 5 votes |
private static void readBytesColumnAsBinary(Object[] vals, int fieldIdx, BytesColumnVector bytes, int childCount) { if (bytes.isRepeating) { // fill complete column with first value if (bytes.isNull[0]) { // fill vals with null values fillColumnWithRepeatingValue(vals, fieldIdx, null, childCount); } else { // read repeating non-null value by forwarding call readNonNullBytesColumnAsBinary(vals, fieldIdx, bytes, childCount); } } else { boolean[] isNullVector = bytes.isNull; if (fieldIdx == -1) { // set as an object for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { vals[i] = null; } else { vals[i] = readBinary(bytes.vector[i], bytes.start[i], bytes.length[i]); } } } else { // set as a field of Row Row[] rows = (Row[]) vals; for (int i = 0; i < childCount; i++) { if (isNullVector[i]) { rows[i].setField(fieldIdx, null); } else { rows[i].setField(fieldIdx, readBinary(bytes.vector[i], bytes.start[i], bytes.length[i])); } } } } }
Example #19
Source File: BytesColumnVectorAssignor.java From multiple-dimension-spread with Apache License 2.0 | 5 votes |
@Override public void setColumnVector( final ColumnVector vector , final IExpressionIndex indexList , final int start , final int length ) throws IOException{ BytesColumnVector columnVector = (BytesColumnVector)vector; PrimitiveObject[] primitiveObjectArray = column.getPrimitiveObjectArray( indexList , start , length ); for( int i = 0 ; i < length ; i++ ){ if( primitiveObjectArray[i] == null ){ VectorizedBatchUtil.setNullColIsNullValue( columnVector , i ); } else{ if( primitiveObjectArray[i] instanceof IBytesLink ){ IBytesLink linkObj = (IBytesLink)primitiveObjectArray[i]; columnVector.vector[i] = linkObj.getLinkBytes(); columnVector.start[i] = linkObj.getStart(); columnVector.length[i] = linkObj.getLength(); } else{ byte[] strBytes = primitiveObjectArray[i].getBytes(); if( strBytes == null ){ VectorizedBatchUtil.setNullColIsNullValue( columnVector , i ); } else{ columnVector.vector[i] = strBytes; columnVector.start[i] = 0; columnVector.length[i] = strBytes.length; } } } } }
Example #20
Source File: HiveORCVectorizedReader.java From dremio-oss with Apache License 2.0 | 5 votes |
private ColumnVector getPrimitiveColumnVector(PrimitiveObjectInspector poi) { switch (poi.getPrimitiveCategory()) { case BOOLEAN: case BYTE: case SHORT: case INT: case LONG: case DATE: return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case TIMESTAMP: return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case FLOAT: case DOUBLE: return new DoubleColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case BINARY: case STRING: case CHAR: case VARCHAR: return new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE); case DECIMAL: DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo(); return new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE, tInfo.precision(), tInfo.scale() ); default: throw UserException.unsupportedError() .message("Vectorized ORC reader is not supported for datatype: %s", poi.getPrimitiveCategory()) .build(logger); } }
Example #21
Source File: PentahoOrcRecordWriter.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
private void setBytesColumnVector( BytesColumnVector bytesColumnVector, byte[] value ) { bytesColumnVector.vector[ batchRowNumber ] = value; bytesColumnVector.start[ batchRowNumber ] = 0; bytesColumnVector.length[ batchRowNumber ] = value.length; }
Example #22
Source File: HiveORCCopiers.java From dremio-oss with Apache License 2.0 | 4 votes |
BytesToVarWidthCopier(BytesColumnVector inputVector, BaseVariableWidthVector outputVector, HiveOperatorContextOptions operatorContextOptions) { this.operatorContextOptions = operatorContextOptions; this.inputVector = inputVector; this.outputVector = outputVector; }
Example #23
Source File: OrcConverter.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
protected static Object convertFromSourceToTargetDataType( ColumnVector columnVector, int currentBatchRow, int orcValueMetaInterface ) { if ( columnVector.isNull[ currentBatchRow ] ) { return null; } switch ( orcValueMetaInterface ) { case ValueMetaInterface.TYPE_INET: try { return InetAddress.getByName( new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ], ( (BytesColumnVector) columnVector ).start[ currentBatchRow ], ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] ) ); } catch ( UnknownHostException e ) { e.printStackTrace(); } case ValueMetaInterface.TYPE_STRING: return new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ], ( (BytesColumnVector) columnVector ).start[ currentBatchRow ], ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] ); case ValueMetaInterface.TYPE_INTEGER: return (long) ( (LongColumnVector) columnVector ).vector[ currentBatchRow ]; case ValueMetaInterface.TYPE_NUMBER: return ( (DoubleColumnVector) columnVector ).vector[ currentBatchRow ]; case ValueMetaInterface.TYPE_BIGNUMBER: HiveDecimalWritable obj = ( (DecimalColumnVector) columnVector ).vector[ currentBatchRow ]; return obj.getHiveDecimal().bigDecimalValue(); case ValueMetaInterface.TYPE_TIMESTAMP: Timestamp timestamp = new Timestamp( ( (TimestampColumnVector) columnVector ).time[ currentBatchRow ] ); timestamp.setNanos( ( (TimestampColumnVector) columnVector ).nanos[ currentBatchRow ] ); return timestamp; case ValueMetaInterface.TYPE_DATE: LocalDate localDate = LocalDate.ofEpochDay( 0 ).plusDays( ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] ); Date dateValue = Date.from( localDate.atStartOfDay( ZoneId.systemDefault() ).toInstant() ); return dateValue; case ValueMetaInterface.TYPE_BOOLEAN: return ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] == 0 ? false : true; case ValueMetaInterface.TYPE_BINARY: byte[] origBytes = ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ]; int startPos = ( (BytesColumnVector) columnVector ).start[ currentBatchRow ]; byte[] newBytes = Arrays.copyOfRange( origBytes, startPos, startPos + ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] ); return newBytes; } //if none of the cases match return a null return null; }
Example #24
Source File: ORCRecordExtractorTest.java From incubator-pinot with Apache License 2.0 | 4 votes |
/** * Create an ORC input file using the input records */ @Override protected void createInputFile() throws IOException { TypeDescription schema = TypeDescription.fromString( "struct<user_id:int,firstName:string,lastName:string,bids:array<int>,campaignInfo:string,cost:double,timestamp:bigint>"); Writer writer = OrcFile.createWriter(new Path(_dataFile.getAbsolutePath()), OrcFile.writerOptions(new Configuration()).setSchema(schema)); int numRecords = _inputRecords.size(); VectorizedRowBatch rowBatch = schema.createRowBatch(numRecords); LongColumnVector userIdVector = (LongColumnVector) rowBatch.cols[0]; userIdVector.noNulls = false; BytesColumnVector firstNameVector = (BytesColumnVector) rowBatch.cols[1]; firstNameVector.noNulls = false; BytesColumnVector lastNameVector = (BytesColumnVector) rowBatch.cols[2]; ListColumnVector bidsVector = (ListColumnVector) rowBatch.cols[3]; bidsVector.noNulls = false; LongColumnVector bidsElementVector = (LongColumnVector) bidsVector.child; bidsElementVector.ensureSize(6, false); BytesColumnVector campaignInfoVector = (BytesColumnVector) rowBatch.cols[4]; DoubleColumnVector costVector = (DoubleColumnVector) rowBatch.cols[5]; LongColumnVector timestampVector = (LongColumnVector) rowBatch.cols[6]; for (int i = 0; i < numRecords; i++) { Map<String, Object> record = _inputRecords.get(i); Integer userId = (Integer) record.get("user_id"); if (userId != null) { userIdVector.vector[i] = userId; } else { userIdVector.isNull[i] = true; } String firstName = (String) record.get("firstName"); if (firstName != null) { firstNameVector.setVal(i, StringUtils.encodeUtf8(firstName)); } else { firstNameVector.isNull[i] = true; } lastNameVector.setVal(i, StringUtils.encodeUtf8((String) record.get("lastName"))); List<Integer> bids = (List<Integer>) record.get("bids"); if (bids != null) { bidsVector.offsets[i] = bidsVector.childCount; bidsVector.lengths[i] = bids.size(); for (int bid : bids) { bidsElementVector.vector[bidsVector.childCount++] = bid; } } else { bidsVector.isNull[i] = true; } campaignInfoVector.setVal(i, StringUtils.encodeUtf8((String) record.get("campaignInfo"))); costVector.vector[i] = (double) record.get("cost"); timestampVector.vector[i] = (long) record.get("timestamp"); rowBatch.size++; } writer.addRowBatch(rowBatch); rowBatch.reset(); writer.close(); }
Example #25
Source File: JsonFieldFiller.java From secor with Apache License 2.0 | 4 votes |
static void setValue(JSONWriter writer, ColumnVector vector, TypeDescription schema, int row) throws JSONException { if (vector.isRepeating) { row = 0; } if (vector.noNulls || !vector.isNull[row]) { switch (schema.getCategory()) { case BOOLEAN: writer.value(((LongColumnVector) vector).vector[row] != 0); break; case BYTE: case SHORT: case INT: case LONG: writer.value(((LongColumnVector) vector).vector[row]); break; case FLOAT: case DOUBLE: writer.value(((DoubleColumnVector) vector).vector[row]); break; case STRING: case CHAR: case VARCHAR: writer.value(((BytesColumnVector) vector).toString(row)); break; case DECIMAL: writer.value(((DecimalColumnVector) vector).vector[row] .toString()); break; case DATE: writer.value(new DateWritable( (int) ((LongColumnVector) vector).vector[row]) .toString()); break; case TIMESTAMP: writer.value(((TimestampColumnVector) vector) .asScratchTimestamp(row).toString()); break; case LIST: setList(writer, (ListColumnVector) vector, schema, row); break; case STRUCT: setStruct(writer, (StructColumnVector) vector, schema, row); break; case UNION: setUnion(writer, (UnionColumnVector) vector, schema, row); break; case BINARY: // To prevent similar mistakes like the one described in https://github.com/pinterest/secor/pull/1018, // it would be better to explicitly throw an exception here rather than ignore the incoming values, // which causes silent failures in a later stage. throw new UnsupportedOperationException(); case MAP: setMap(writer, (MapColumnVector) vector, schema, row); break; default: throw new IllegalArgumentException("Unknown type " + schema.toString()); } } else { writer.value(null); } }
Example #26
Source File: HiveORCCopiers.java From dremio-oss with Apache License 2.0 | 4 votes |
BytesToDecimalCopier(BytesColumnVector inputVector, DecimalVector outputVector) { this.inputVector = inputVector; this.outputVector = outputVector; }
Example #27
Source File: HiveORCCopiers.java From dremio-oss with Apache License 2.0 | 4 votes |
BytesToFloat8Copier(BytesColumnVector inputVector, Float8Vector outputVector) { this.inputVector = inputVector; this.outputVector = outputVector; }
Example #28
Source File: OrcBytesColumnVector.java From flink with Apache License 2.0 | 4 votes |
public OrcBytesColumnVector(BytesColumnVector vector) { super(vector); this.vector = vector; }
Example #29
Source File: HiveORCCopiers.java From dremio-oss with Apache License 2.0 | 4 votes |
BytesToVarWidthCopier(BytesColumnVector inputVector, BaseVariableWidthVector outputVector, HiveOperatorContextOptions operatorContextOptions) { this.operatorContextOptions = operatorContextOptions; this.inputVector = inputVector; this.outputVector = outputVector; }
Example #30
Source File: HiveORCCopiers.java From dremio-oss with Apache License 2.0 | 4 votes |
BytesToDecimalCopier(BytesColumnVector inputVector, DecimalVector outputVector) { this.inputVector = inputVector; this.outputVector = outputVector; }