com.google.cloud.bigquery.TableInfo Java Examples
The following examples show how to use
com.google.cloud.bigquery.TableInfo.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PutBigQueryStreamingIT.java From nifi with Apache License 2.0 | 6 votes |
private void createTable(String tableName) { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), tableName); // Table field definition Field id = Field.newBuilder("id", LegacySQLTypeName.INTEGER).setMode(Mode.REQUIRED).build(); Field name = Field.newBuilder("name", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(); Field alias = Field.newBuilder("alias", LegacySQLTypeName.STRING).setMode(Mode.REPEATED).build(); Field zip = Field.newBuilder("zip", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(); Field city = Field.newBuilder("city", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(); Field addresses = Field.newBuilder("addresses", LegacySQLTypeName.RECORD, zip, city).setMode(Mode.REPEATED).build(); Field position = Field.newBuilder("position", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(); Field company = Field.newBuilder("company", LegacySQLTypeName.STRING).setMode(Mode.NULLABLE).build(); Field job = Field.newBuilder("job", LegacySQLTypeName.RECORD, position, company).setMode(Mode.NULLABLE).build(); // Table schema definition schema = Schema.of(id, name, alias, addresses, job); TableDefinition tableDefinition = StandardTableDefinition.of(schema); TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); // create table bigquery.create(tableInfo); }
Example #2
Source File: BigQueryStatementIssuingFn.java From DataflowTemplates with Apache License 2.0 | 6 votes |
private Table createBigQueryTable(BigQueryAction action) { TableDefinition definition = StandardTableDefinition.of( BigQuerySchemaUtils.beamSchemaToBigQueryClientSchema(action.tableSchema)); TableId tableId = TableId.of(action.projectId, action.dataset, action.tableName); TableInfo tableInfo = TableInfo.newBuilder(tableId, definition).build(); LOG.info("Creating a new BigQuery table: {}", tableInfo); try { return bigQueryClient.create(tableInfo); } catch (BigQueryException e) { if (e.getMessage().startsWith("Already Exists")) { return null; } else { throw e; } } }
Example #3
Source File: BigQueryMapper.java From DataflowTemplates with Apache License 2.0 | 6 votes |
/** * Returns {@code Table} after creating the table with no columns in BigQuery. * * @param tableId a TableId referencing the BigQuery table being requested. */ private Table createBigQueryTable(TableId tableId) { // Create Blank BigQuery Table List<Field> fieldList = new ArrayList<Field>(); Schema schema = Schema.of(fieldList); StandardTableDefinition.Builder tableDefinitionBuilder = StandardTableDefinition.newBuilder().setSchema(schema); if (dayPartitioning) { tableDefinitionBuilder.setTimePartitioning( TimePartitioning.newBuilder(TimePartitioning.Type.DAY).build()); } TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinitionBuilder.build()).build(); Table table = bigquery.create(tableInfo); return table; }
Example #4
Source File: BigQueryMapper.java From DataflowTemplates with Apache License 2.0 | 6 votes |
private Table createBigQueryTable(TableId tableId) { // Create Blank BigQuery Table LOG.info(String.format("Creating Table: %s", tableId.toString())); List<Field> fieldList = new ArrayList<Field>(); Schema schema = Schema.of(fieldList); StandardTableDefinition.Builder tableDefinitionBuilder = StandardTableDefinition.newBuilder().setSchema(schema); if (dayPartitioning) { tableDefinitionBuilder.setTimePartitioning( TimePartitioning.newBuilder(TimePartitioning.Type.DAY).build()); } TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinitionBuilder.build()).build(); Table table = bigquery.create(tableInfo); return table; }
Example #5
Source File: ReadSessionCreator.java From presto with Apache License 2.0 | 6 votes |
TableInfo createTableFromQuery() { TableId destinationTable = bigQueryClient.createDestinationTable(table); log.debug("destinationTable is %s", destinationTable); JobInfo jobInfo = JobInfo.of( QueryJobConfiguration .newBuilder(query) .setDestinationTable(destinationTable) .build()); log.debug("running query %s", jobInfo); Job job = waitForJob(bigQueryClient.create(jobInfo)); log.debug("job has finished. %s", job); if (job.getStatus().getError() != null) { throw convertToBigQueryException(job.getStatus().getError()); } // add expiration time to the table TableInfo createdTable = bigQueryClient.getTable(destinationTable); long expirationTime = createdTable.getCreationTime() + TimeUnit.HOURS.toMillis(config.viewExpirationTimeInHours); Table updatedTable = bigQueryClient.update(createdTable.toBuilder() .setExpirationTime(expirationTime) .build()); return updatedTable; }
Example #6
Source File: CreateTableAndLoadData.java From google-cloud-java with Apache License 2.0 | 6 votes |
public static void main(String... args) throws InterruptedException, TimeoutException { BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); TableId tableId = TableId.of("dataset", "table"); Table table = bigquery.getTable(tableId); if (table == null) { System.out.println("Creating table " + tableId); Field integerField = Field.of("fieldName", LegacySQLTypeName.INTEGER); Schema schema = Schema.of(integerField); table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema))); } System.out.println("Loading data into table " + tableId); Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path"); loadJob = loadJob.waitFor(); if (loadJob.getStatus().getError() != null) { System.out.println("Job completed with errors"); } else { System.out.println("Job succeeded"); } }
Example #7
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 6 votes |
/** Example of creating a table. */ // [TARGET create(TableInfo, TableOption...)] // [VARIABLE "my_dataset_name"] // [VARIABLE "my_table_name"] // [VARIABLE "string_field"] public Table createTable(String datasetName, String tableName, String fieldName) { // [START bigquery_create_table] TableId tableId = TableId.of(datasetName, tableName); // Table field definition Field field = Field.of(fieldName, LegacySQLTypeName.STRING); // Table schema definition Schema schema = Schema.of(field); TableDefinition tableDefinition = StandardTableDefinition.of(schema); TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); Table table = bigquery.create(tableInfo); // [END bigquery_create_table] return table; }
Example #8
Source File: BigQueryExample.java From google-cloud-java with Apache License 2.0 | 6 votes |
@Override TableInfo parse(String... args) throws Exception { String message; if (args.length == 3) { String dataset = args[0]; String table = args[1]; String query = args[2]; TableId tableId = TableId.of(dataset, table); return TableInfo.of(tableId, ViewDefinition.of(query)); } else if (args.length < 3) { message = "Missing required dataset id, table id or query."; } else { message = "Too many arguments."; } throw new IllegalArgumentException(message); }
Example #9
Source File: BigQueryClient.java From beam with Apache License 2.0 | 5 votes |
private void createTable(TableId tableId, Schema schema) { TableInfo tableInfo = TableInfo.newBuilder(tableId, StandardTableDefinition.of(schema)) .setFriendlyName(tableId.getTable()) .build(); client.create(tableInfo, FIELD_OPTIONS); }
Example #10
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
@Test public void canWriteToDynamicTables() throws Exception { String table = "my_test_table"; TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery.create(TableInfo.newBuilder(tableId, StandardTableDefinition.of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.of("type", LegacySQLTypeName.STRING)))) .build()); String input = Resources .getResource("testdata/bigquery-integration/input-varied-doctypes.ndjson").getPath(); String output = String.format("%s:%s.%s", projectId, dataset, "${document_type}_table"); String errorOutput = outputPath + "/error/out"; PipelineResult result = Sink.run(new String[] { "--inputFileFormat=json", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--output=" + output, "--bqWriteMethod=streaming", "--errorOutputType=file", "--schemasLocation=schemas.tar.gz", "--errorOutputFileCompression=UNCOMPRESSED", "--errorOutput=" + errorOutput }); result.waitUntilFinish(); String tableSpec = String.format("%s.%s", dataset, table); assertThat(stringValuesQueryWithRetries("SELECT client_id FROM " + tableSpec), matchesInAnyOrder(ImmutableList.of("abc123"))); List<String> errorOutputLines = Lines.files(outputPath + "/error/out*.ndjson"); assertThat(errorOutputLines, Matchers.hasSize(2)); }
Example #11
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
@Test public void canWriteViaFileLoads() throws Exception { String table = "my_test_table"; TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery .create(TableInfo .newBuilder(tableId, StandardTableDefinition .of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.of("type", LegacySQLTypeName.STRING), Field.of("submission_timestamp", LegacySQLTypeName.TIMESTAMP))) .toBuilder().setTimePartitioning(TIME_PARTITIONING).setClustering(CLUSTERING) .build()) .build()); String input = Resources .getResource("testdata/bigquery-integration/input-varied-doctypes.ndjson").getPath(); String output = String.format("%s:%s.%s", projectId, dataset, "${document_type}_table"); String errorOutput = outputPath + "/error/out"; PipelineResult result = Sink.run(new String[] { "--inputFileFormat=json", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--output=" + output, "--bqWriteMethod=file_loads", "--errorOutputType=file", "--tempLocation=gs://gcp-ingestion-static-test-bucket/temp/bq-loads", "--schemasLocation=schemas.tar.gz", "--errorOutputFileCompression=UNCOMPRESSED", "--errorOutput=" + errorOutput }); result.waitUntilFinish(); String tableSpec = String.format("%s.%s", dataset, table); assertThat(stringValuesQueryWithRetries("SELECT client_id FROM " + tableSpec), matchesInAnyOrder(ImmutableList.of("abc123"))); List<String> errorOutputLines = Lines.files(outputPath + "/error/out*.ndjson"); assertThat(errorOutputLines, Matchers.hasSize(2)); }
Example #12
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
private void canWriteWithMixedMethod(String streamingDocTypes) throws Exception { String table = "my_test_table"; TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery .create(TableInfo .newBuilder(tableId, StandardTableDefinition .of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.of("type", LegacySQLTypeName.STRING), Field.of("submission_timestamp", LegacySQLTypeName.TIMESTAMP))) .toBuilder().setTimePartitioning(TIME_PARTITIONING).setClustering(CLUSTERING) .build()) .build()); String input = Resources .getResource("testdata/bigquery-integration/input-varied-doctypes.ndjson").getPath(); String output = String.format("%s:%s.%s", projectId, dataset, "${document_type}_table"); String errorOutput = outputPath + "/error/out"; PipelineResult result = Sink.run(new String[] { "--inputFileFormat=json", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--output=" + output, "--bqWriteMethod=mixed", "--bqStreamingDocTypes=" + streamingDocTypes, "--errorOutputType=file", "--tempLocation=gs://gcp-ingestion-static-test-bucket/temp/bq-loads", "--schemasLocation=schemas.tar.gz", "--errorOutputFileCompression=UNCOMPRESSED", "--errorOutput=" + errorOutput }); result.waitUntilFinish(); String tableSpec = String.format("%s.%s", dataset, table); assertThat(stringValuesQueryWithRetries("SELECT client_id FROM " + tableSpec), matchesInAnyOrder(ImmutableList.of("abc123"))); List<String> errorOutputLines = Lines.files(outputPath + "/error/out*.ndjson"); assertThat(errorOutputLines, Matchers.hasSize(2)); }
Example #13
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
@Test public void canRecoverFailedInsertsInStreamingMode() throws Exception { String table = "my_test_table"; String tableSpec = String.format("%s.%s", dataset, table); TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery.create(TableInfo.newBuilder(tableId, StandardTableDefinition.of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.newBuilder("extra_required_field", LegacySQLTypeName.STRING) .setMode(Mode.REQUIRED).build()))) .build()); String input = Resources.getResource("testdata/json-payload.ndjson").getPath(); String output = String.format("%s:%s", projectId, tableSpec); String errorOutput = outputPath + "/error/out"; PipelineResult result = Sink.run(new String[] { "--inputFileFormat=text", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--output=" + output, "--errorOutputType=file", "--bqWriteMethod=streaming", "--errorOutputFileCompression=UNCOMPRESSED", "--errorOutput=" + errorOutput }); result.waitUntilFinish(); assertTrue(stringValuesQuery("SELECT client_id FROM " + tableSpec).isEmpty()); List<String> expectedErrorLines = Lines.resources("testdata/json-payload-wrapped.ndjson"); List<String> errorOutputLines = Lines.files(outputPath + "/error/out*.ndjson"); assertThat(errorOutputLines, Matchers.hasSize(expectedErrorLines.size())); }
Example #14
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
@Test public void canSetStrictSchemaMode() throws Exception { String table = "my_test_table"; String tableSpec = String.format("%s.%s", dataset, table); TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery .create(TableInfo .newBuilder(tableId, StandardTableDefinition .of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.of("additional_properties", LegacySQLTypeName.STRING), Field.of("submission_timestamp", LegacySQLTypeName.TIMESTAMP))) .toBuilder().setTimePartitioning(TIME_PARTITIONING).setClustering(CLUSTERING) .build()) .build()); String input = Resources .getResource("testdata/bigquery-integration/input-varied-doctypes.ndjson").getPath(); String output = String.format("%s:%s", projectId, tableSpec); PipelineResult result = Sink.run(new String[] { "--inputFileFormat=json", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--bqWriteMethod=streaming", "--bqStrictSchemaDocTypes=my-namespace/my-test", "--output=" + output, "--errorOutputType=stderr" }); result.waitUntilFinish(); assertThat(stringValuesQueryWithRetries("SELECT additional_properties FROM " + tableSpec), matchesInAnyOrder(Lists.newArrayList("{\"type\":\"main\"}", null, "{\"type\":\"main\"}"))); }
Example #15
Source File: BigQueryIntegrationTest.java From gcp-ingestion with Mozilla Public License 2.0 | 5 votes |
@Test public void canWriteToBigQuery() throws Exception { String table = "my_test_table"; String tableSpec = String.format("%s.%s", dataset, table); TableId tableId = TableId.of(dataset, table); bigquery.create(DatasetInfo.newBuilder(dataset).build()); bigquery .create(TableInfo .newBuilder(tableId, StandardTableDefinition .of(Schema.of(Field.of("client_id", LegacySQLTypeName.STRING), Field.of("type", LegacySQLTypeName.STRING), Field.of("submission_timestamp", LegacySQLTypeName.TIMESTAMP))) .toBuilder().setTimePartitioning(TIME_PARTITIONING).setClustering(CLUSTERING) .build()) .build()); String input = Resources .getResource("testdata/bigquery-integration/input-with-attributes.ndjson").getPath(); String output = String.format("%s:%s", projectId, tableSpec); PipelineResult result = Sink.run(new String[] { "--inputFileFormat=json", "--inputType=file", "--input=" + input, "--outputType=bigquery", "--bqWriteMethod=streaming", "--schemasLocation=schemas.tar.gz", "--output=" + output, "--errorOutputType=stderr" }); result.waitUntilFinish(); assertThat(stringValuesQueryWithRetries("SELECT submission_timestamp FROM " + tableSpec), matchesInAnyOrder(Lists.newArrayList(null, null, "1561983194.123456"))); assertThat(stringValuesQueryWithRetries("SELECT client_id FROM " + tableSpec), matchesInAnyOrder(ImmutableList.of("abc123", "abc123", "def456"))); }
Example #16
Source File: CreateStore.java From quetzal with Eclipse Public License 2.0 | 5 votes |
public Table createTable(String tableName, Field[] fields) { TableId tableId = TableId.of(datasetName, tableName); Schema schema = Schema.of(fields); TableDefinition tableDefinition = StandardTableDefinition.of(schema); TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); Table t = bigquery.create(tableInfo); System.err.println("created " + t.getTableId()); return t; }
Example #17
Source File: ITTableSnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Test public void testDelete() { Table doomedTable = bigquery.create(TableInfo.of(DOOMED_TABLE_ID, StandardTableDefinition.of(SCHEMA))); TableSnippets doomedTableSnippets = new TableSnippets(doomedTable); assertTrue(doomedTableSnippets.delete()); }
Example #18
Source File: BigQueryExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override TableInfo parse(String... args) throws Exception { if (args.length >= 5) { String dataset = args[0]; String table = args[1]; TableId tableId = TableId.of(dataset, table); ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of( args[args.length - 1], parseSchema(args, 3, args.length - 1), FormatOptions.of(args[2])); return TableInfo.of(tableId, externalTableDefinition); } throw new IllegalArgumentException("Missing required arguments."); }
Example #19
Source File: ITTableSnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Before public void before() { ++nextTableNumber; StandardTableDefinition.Builder builder = StandardTableDefinition.newBuilder(); builder.setSchema(SCHEMA); table = bigquery.create(TableInfo.of(getTableId(), builder.build())); bigquery.create(TableInfo.of(getCopyTableId(), builder.build())); tableSnippets = new TableSnippets(table); }
Example #20
Source File: BigQueryDatasetRuntimeTestIT.java From components with Apache License 2.0 | 5 votes |
@BeforeClass public static void initDatasetAndTable() throws IOException { BigQuery bigquery = BigQueryConnection.createClient(createDatastore()); for (String dataset : datasets) { DatasetId datasetId = DatasetId.of(BigQueryTestConstants.PROJECT, dataset); bigquery.create(DatasetInfo.of(datasetId)); } for (String table : tables) { TableDefinition tableDefinition = StandardTableDefinition.of(Schema.of(Field.of("test", LegacySQLTypeName.STRING))); TableId tableId = TableId.of(BigQueryTestConstants.PROJECT, datasets.get(0), table); bigquery.create(TableInfo.of(tableId, tableDefinition)); } }
Example #21
Source File: ITBigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Test public void testInsertAllAndListTableData() throws IOException, InterruptedException { String tableName = "test_insert_all_and_list_table_data"; String fieldName1 = "booleanField"; String fieldName2 = "bytesField"; String fieldName3 = "recordField"; String fieldName4 = "stringField"; TableId tableId = TableId.of(DATASET, tableName); Schema schema = Schema.of( Field.of(fieldName1, LegacySQLTypeName.BOOLEAN), Field.of(fieldName2, LegacySQLTypeName.BYTES), Field.of( fieldName3, LegacySQLTypeName.RECORD, Field.of(fieldName4, LegacySQLTypeName.STRING))); TableInfo table = TableInfo.of(tableId, StandardTableDefinition.of(schema)); assertNotNull(bigquery.create(table)); InsertAllResponse response = bigquerySnippets.insertAll(DATASET, tableName); assertFalse(response.hasErrors()); assertTrue(response.getInsertErrors().isEmpty()); Page<FieldValueList> listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName); while (Iterators.size(listPage.iterateAll().iterator()) < 1) { Thread.sleep(500); listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName); } FieldValueList row = listPage.getValues().iterator().next(); assertEquals(true, row.get(0).getBooleanValue()); assertArrayEquals(new byte[] {0xA, 0xD, 0xD, 0xE, 0xD}, row.get(1).getBytesValue()); assertEquals("Hello, World!", row.get(2).getRecordValue().get(0).getStringValue()); listPage = bigquerySnippets.listTableDataSchema(DATASET, tableName, schema, fieldName1); row = listPage.getValues().iterator().next(); assertNotNull(row.get(fieldName1)); assertArrayEquals(new byte[] {0xA, 0xD, 0xD, 0xE, 0xD}, row.get(fieldName2).getBytesValue()); bigquerySnippets.listTableDataSchemaId(); assertTrue(bigquerySnippets.deleteTable(DATASET, tableName)); }
Example #22
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of updating a table by changing its description. */ public Table updateTableDescription(String datasetName, String tableName, String newDescription) { // [START bigquery_update_table_description] // String datasetName = "my_dataset_name"; // String tableName = "my_table_name"; // String newDescription = "new_description"; Table beforeTable = bigquery.getTable(datasetName, tableName); TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build(); Table afterTable = bigquery.update(tableInfo); // [END bigquery_update_table_description] return afterTable; }
Example #23
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of updating a table by changing its expiration. */ // [TARGET update(TableInfo, TableOption...)] // [VARIABLE "my_dataset_name"] // [VARIABLE "my_table_name"] public Table updateTableExpiration(String datasetName, String tableName) { // [START bigquery_update_table_expiration] Table beforeTable = bigquery.getTable(datasetName, tableName); // Set table to expire 5 days from now. long expirationMillis = Instant.now().plus(5, ChronoUnit.DAYS).toEpochMilli(); TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build(); Table afterTable = bigquery.update(tableInfo); // [END bigquery_update_table_expiration] return afterTable; }
Example #24
Source File: BigQueryExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override TableInfo parse(String... args) throws Exception { if (args.length >= 3) { String dataset = args[0]; String table = args[1]; TableId tableId = TableId.of(dataset, table); return TableInfo.of(tableId, StandardTableDefinition.of(parseSchema(args, 2, args.length))); } throw new IllegalArgumentException("Missing required arguments."); }
Example #25
Source File: BigQueryMetadata.java From presto with Apache License 2.0 | 5 votes |
@Override public ConnectorTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { log.debug("getTableHandle(session=%s, tableName=%s)", session, tableName); TableInfo tableInfo = getBigQueryTable(tableName); if (tableInfo == null) { log.debug("Table [%s.%s] was not found", tableName.getSchemaName(), tableName.getTableName()); return null; } return BigQueryTableHandle.from(tableInfo); }
Example #26
Source File: BigQueryClient.java From presto with Apache License 2.0 | 5 votes |
TableInfo getTable(TableId tableId) { TableId bigQueryTableId = tableIds.get(tableId); Table table = bigQuery.getTable(bigQueryTableId != null ? bigQueryTableId : tableId); if (table != null) { tableIds.putIfAbsent(tableId, table.getTableId()); datasetIds.putIfAbsent(toDatasetId(tableId), toDatasetId(table.getTableId())); } return table; }
Example #27
Source File: BigQueryMetadata.java From presto with Apache License 2.0 | 5 votes |
private List<SchemaTableName> listTables(ConnectorSession session, SchemaTablePrefix prefix) { if (prefix.getTable().isEmpty()) { return listTables(session, prefix.getSchema()); } SchemaTableName tableName = prefix.toSchemaTableName(); TableInfo tableInfo = getBigQueryTable(tableName); return tableInfo == null ? ImmutableList.of() : // table does not exist ImmutableList.of(tableName); }
Example #28
Source File: BigQueryMetadata.java From presto with Apache License 2.0 | 5 votes |
private List<BigQueryColumnHandle> getTableColumns(TableInfo table) { ImmutableList.Builder<BigQueryColumnHandle> columns = ImmutableList.builder(); TableDefinition tableDefinition = table.getDefinition(); Schema schema = tableDefinition.getSchema(); if (schema != null) { for (Field field : schema.getFields()) { columns.add(Conversions.toColumnHandle(field)); } } return columns.build(); }
Example #29
Source File: BigQueryMetadata.java From presto with Apache License 2.0 | 5 votes |
@Override public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle tableHandle) { log.debug("getTableMetadata(session=%s, tableHandle=%s)", session, tableHandle); TableInfo table = bigQueryClient.getTable(((BigQueryTableHandle) tableHandle).getTableId()); SchemaTableName schemaTableName = new SchemaTableName(table.getTableId().getDataset(), table.getTableId().getTable()); Schema schema = table.getDefinition().getSchema(); List<ColumnMetadata> columns = schema == null ? ImmutableList.of() : schema.getFields().stream() .map(Conversions::toColumnMetadata) .collect(toImmutableList()); return new ConnectorTableMetadata(schemaTableName, columns); }
Example #30
Source File: ReadSessionCreator.java From presto with Apache License 2.0 | 5 votes |
public Storage.ReadSession create(TableId table, List<String> selectedFields, Optional<String> filter, int parallelism) { TableInfo tableDetails = bigQueryClient.getTable(table); TableInfo actualTable = getActualTable(tableDetails, selectedFields); List<String> filteredSelectedFields = selectedFields.stream() .filter(BigQueryUtil::validColumnName) .collect(toList()); try (BigQueryStorageClient bigQueryStorageClient = bigQueryStorageClientFactory.createBigQueryStorageClient()) { ReadOptions.TableReadOptions.Builder readOptions = ReadOptions.TableReadOptions.newBuilder() .addAllSelectedFields(filteredSelectedFields); filter.ifPresent(readOptions::setRowRestriction); TableReferenceProto.TableReference tableReference = toTableReference(actualTable.getTableId()); Storage.ReadSession readSession = bigQueryStorageClient.createReadSession( Storage.CreateReadSessionRequest.newBuilder() .setParent("projects/" + bigQueryClient.getProjectId()) .setFormat(Storage.DataFormat.AVRO) .setRequestedStreams(parallelism) .setReadOptions(readOptions) .setTableReference(tableReference) // The BALANCED sharding strategy causes the server to // assign roughly the same number of rows to each stream. .setShardingStrategy(Storage.ShardingStrategy.BALANCED) .build()); return readSession; } }