com.google.cloud.bigquery.WriteChannelConfiguration Java Examples
The following examples show how to use
com.google.cloud.bigquery.WriteChannelConfiguration.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: PutBigQueryBatchTest.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testSuccessfulLoad() throws Exception { when(table.exists()).thenReturn(Boolean.TRUE); when(bq.create(ArgumentMatchers.isA(JobInfo.class))).thenReturn(job); when(bq.writer(ArgumentMatchers.isA(WriteChannelConfiguration.class))).thenReturn(tableDataWriteChannel); when(tableDataWriteChannel.getJob()).thenReturn(job); when(job.waitFor(ArgumentMatchers.isA(RetryOption.class))).thenReturn(job); when(job.getStatus()).thenReturn(jobStatus); when(job.getStatistics()).thenReturn(stats); when(stats.getCreationTime()).thenReturn(0L); when(stats.getStartTime()).thenReturn(1L); when(stats.getEndTime()).thenReturn(2L); final TestRunner runner = buildNewRunner(getProcessor()); addRequiredPropertiesToRunner(runner); runner.assertValid(); runner.enqueue("{ \"data\": \"datavalue\" }"); runner.run(); runner.assertAllFlowFilesTransferred(PutBigQueryBatch.REL_SUCCESS); }
Example #2
Source File: PutBigQueryBatchTest.java From nifi with Apache License 2.0 | 6 votes |
@Test public void testFailedLoad() throws Exception { when(table.exists()).thenReturn(Boolean.TRUE); when(bq.create(ArgumentMatchers.isA(JobInfo.class))).thenReturn(job); when(bq.writer(ArgumentMatchers.isA(WriteChannelConfiguration.class))).thenReturn(tableDataWriteChannel); when(tableDataWriteChannel.getJob()).thenReturn(job); when(job.waitFor(ArgumentMatchers.isA(RetryOption.class))).thenThrow(BigQueryException.class); when(job.getStatus()).thenReturn(jobStatus); when(job.getStatistics()).thenReturn(stats); when(stats.getCreationTime()).thenReturn(0L); when(stats.getStartTime()).thenReturn(1L); when(stats.getEndTime()).thenReturn(2L); final TestRunner runner = buildNewRunner(getProcessor()); addRequiredPropertiesToRunner(runner); runner.assertValid(); runner.enqueue("{ \"data\": \"datavalue\" }"); runner.run(); runner.assertAllFlowFilesTransferred(PutBigQueryBatch.REL_FAILURE); }
Example #3
Source File: BigQueryTemplate.java From spring-cloud-gcp with Apache License 2.0 | 5 votes |
@Override public ListenableFuture<Job> writeDataToTable( String tableName, InputStream inputStream, FormatOptions dataFormatOptions) { TableId tableId = TableId.of(datasetName, tableName); WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration .newBuilder(tableId) .setFormatOptions(dataFormatOptions) .setAutodetect(this.autoDetectSchema) .setWriteDisposition(this.writeDisposition) .build(); TableDataWriteChannel writer = bigQuery.writer(writeChannelConfiguration); try (OutputStream sink = Channels.newOutputStream(writer)) { // Write data from data input file to BigQuery StreamUtils.copy(inputStream, sink); } catch (IOException e) { throw new BigQueryException("Failed to write data to BigQuery tables.", e); } if (writer.getJob() == null) { throw new BigQueryException( "Failed to initialize the BigQuery write job."); } return createJobFuture(writer.getJob()); }
Example #4
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of writing a local file to a table. */ // [TARGET writer(WriteChannelConfiguration)] // [VARIABLE "my_dataset_name"] // [VARIABLE "my_table_name"] // [VARIABLE FileSystems.getDefault().getPath(".", "my-data.csv")] // [VARIABLE "us"] public long writeFileToTable(String datasetName, String tableName, Path csvPath, String location) throws IOException, InterruptedException, TimeoutException { // [START bigquery_load_from_file] TableId tableId = TableId.of(datasetName, tableName); WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build(); // Generally, location can be inferred based on the location of the referenced dataset. // However, // it can also be set explicitly to force job execution to be routed to a specific processing // location. See https://cloud.google.com/bigquery/docs/locations for more info. JobId jobId = JobId.newBuilder().setLocation(location).build(); TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration); // Write data to writer try (OutputStream stream = Channels.newOutputStream(writer)) { Files.copy(csvPath, stream); } finally { writer.close(); } // Get load job Job job = writer.getJob(); job = job.waitFor(); LoadStatistics stats = job.getStatistics(); return stats.getOutputRows(); // [END bigquery_load_from_file] }
Example #5
Source File: BigQueryExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override void run(BigQuery bigquery, Tuple<WriteChannelConfiguration, String> configuration) throws Exception { System.out.println("Running insert"); try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { WriteChannel writeChannel = bigquery.writer(configuration.x()); long position = 0; long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); while (written > 0) { position += written; written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); } writeChannel.close(); } }
Example #6
Source File: BigQueryExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override Tuple<WriteChannelConfiguration, String> parse(String... args) throws Exception { if (args.length == 4) { String dataset = args[0]; String table = args[1]; String format = args[2]; TableId tableId = TableId.of(dataset, table); WriteChannelConfiguration configuration = WriteChannelConfiguration.of(tableId, FormatOptions.of(format)); return Tuple.of(configuration, args[3]); } throw new IllegalArgumentException("Missing required arguments."); }