Java Code Examples for com.google.cloud.bigquery.Field#getSubFields()
The following examples show how to use
com.google.cloud.bigquery.Field#getSubFields() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Conversions.java From presto with Apache License 2.0 | 6 votes |
static BigQueryColumnHandle toColumnHandle(Field field) { FieldList subFields = field.getSubFields(); List<BigQueryColumnHandle> subColumns = subFields == null ? Collections.emptyList() : subFields.stream() .map(Conversions::toColumnHandle) .collect(Collectors.toList()); return new BigQueryColumnHandle( field.getName(), BigQueryType.valueOf(field.getType().name()), getMode(field), subColumns, field.getDescription(), false); }
Example 2
Source File: Conversions.java From presto with Apache License 2.0 | 5 votes |
static BigQueryType.Adaptor adapt(Field field) { return new BigQueryType.Adaptor() { @Override public BigQueryType getBigQueryType() { return BigQueryType.valueOf(field.getType().name()); } @Override public ImmutableMap<String, BigQueryType.Adaptor> getBigQuerySubTypes() { FieldList subFields = field.getSubFields(); if (subFields == null) { return ImmutableMap.of(); } return subFields.stream().collect(toImmutableMap(Field::getName, Conversions::adapt)); } @Override public Field.Mode getMode() { return Conversions.getMode(field); } }; }
Example 3
Source File: BigQueryAvroRegistry.java From components with Apache License 2.0 | 4 votes |
/** * All BigQuery types except Record/Struct and Arrays, no matter legacy or not, as {@link LegacySQLTypeName} is a * wrapper for {@link StandardSQLTypeName} */ private org.apache.avro.Schema inferSchemaFieldWithoutMode(Field field) { LegacySQLTypeName sqlType = field.getType(); switch (sqlType.getStandardType()) { case STRUCT: String name = field.getName(); // Struct type // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#struct-type SchemaBuilder.FieldAssembler<org.apache.avro.Schema> itemFieldAssembler = SchemaBuilder.record(name).fields(); for (Field itemField : field.getSubFields()) { itemFieldAssembler.name(itemField.getName()).type(inferSchemaField(itemField)).noDefault(); } org.apache.avro.Schema recordSchema = itemFieldAssembler.endRecord(); return recordSchema; case BYTES: return AvroUtils._bytes(); case INT64: return AvroUtils._long(); case FLOAT64: return AvroUtils._double(); case BOOL: return AvroUtils._boolean(); case DATETIME: org.apache.avro.Schema schemaDT = AvroUtils._string(); schemaDT.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.DATETIME.toString()); return schemaDT; case DATE: org.apache.avro.Schema schemaD = AvroUtils._string(); schemaD.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.DATE.toString()); return schemaD; // return LogicalTypes.date().addToSchema(AvroUtils._int()); case TIME: org.apache.avro.Schema schemaT = AvroUtils._string(); schemaT.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.TIME.toString()); return schemaT; // return LogicalTypes.timeMicros().addToSchema(AvroUtils._long()); case TIMESTAMP: org.apache.avro.Schema schemaTS = AvroUtils._string(); schemaTS.addProp(TALEND_COLUMN_DB_TYPE, BigQueryType.TIMESTAMP.toString()); return schemaTS; // return LogicalTypes.timestampMicros().addToSchema(AvroUtils._long()); case STRING: return AvroUtils._string(); default: // This should never occur. throw new RuntimeException("The BigQuery data type " + sqlType + " is not handled."); } }