com.google.api.services.bigquery.model.JobConfigurationQuery Java Examples
The following examples show how to use
com.google.api.services.bigquery.model.JobConfigurationQuery.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BigQueryServicesImpl.java From beam with Apache License 2.0 | 6 votes |
@Override public JobStatistics dryRunQuery( String projectId, JobConfigurationQuery queryConfig, String location) throws InterruptedException, IOException { JobReference jobRef = new JobReference().setLocation(location).setProjectId(projectId); Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setQuery(queryConfig).setDryRun(true)); return executeWithRetries( client.jobs().insert(projectId, job), String.format( "Unable to dry run query: %s, aborting after %d retries.", queryConfig, MAX_RPC_RETRIES), Sleeper.DEFAULT, createDefaultBackoff(), ALWAYS_RETRY) .getStatistics(); }
Example #2
Source File: BigqueryConnection.java From nomulus with Apache License 2.0 | 6 votes |
/** * Starts an asynchronous query job to populate the specified destination table with the results * of the specified query, or if the table is a view, to update the view to reflect that query. * Returns a ListenableFuture that holds the same destination table object on success. */ public ListenableFuture<DestinationTable> query( String querySql, DestinationTable dest) { if (dest.type == TableType.VIEW) { // Use Futures.transform() rather than calling apply() directly so that any exceptions thrown // by calling updateTable will be propagated on the get() call, not from here. return transform( Futures.immediateFuture(dest.withQuery(querySql)), this::updateTable, directExecutor()); } else { Job job = new Job() .setConfiguration(new JobConfiguration() .setQuery(new JobConfigurationQuery() .setQuery(querySql) .setDefaultDataset(getDataset()) .setWriteDisposition(dest.getWriteDisposition().toString()) .setDestinationTable(dest.getTableReference()))); return transform(runJobToCompletion(job, dest), this::updateTable, directExecutor()); } }
Example #3
Source File: BigqueryConnection.java From nomulus with Apache License 2.0 | 6 votes |
/** Create a table from a SQL query if it doesn't already exist. */ public TableReference ensureTable(TableReference table, String sqlQuery) { try { runJob(new Job() .setConfiguration(new JobConfiguration() .setQuery(new JobConfigurationQuery() .setQuery(sqlQuery) .setDefaultDataset(getDataset()) .setDestinationTable(table)))); } catch (BigqueryJobFailureException e) { if (e.getReason().equals("duplicate")) { // Table already exists. } else { throw e; } } return table; }
Example #4
Source File: BigQueryServicesImpl.java From beam with Apache License 2.0 | 5 votes |
/** * {@inheritDoc} * * <p>Tries executing the RPC for at most {@code MAX_RPC_RETRIES} times until it succeeds. * * @throws IOException if it exceeds {@code MAX_RPC_RETRIES} attempts. */ @Override public void startQueryJob(JobReference jobRef, JobConfigurationQuery queryConfig) throws IOException, InterruptedException { Job job = new Job() .setJobReference(jobRef) .setConfiguration(new JobConfiguration().setQuery(queryConfig)); startJob(job, errorExtractor, client); }
Example #5
Source File: BigQueryQueryHelper.java From beam with Apache License 2.0 | 5 votes |
private static JobConfigurationQuery createBasicQueryConfig( String query, Boolean flattenResults, Boolean useLegacySql) { return new JobConfigurationQuery() .setQuery(query) .setFlattenResults(flattenResults) .setUseLegacySql(useLegacySql); }
Example #6
Source File: FakeJobService.java From beam with Apache License 2.0 | 5 votes |
@Override public void startQueryJob(JobReference jobRef, JobConfigurationQuery query) { synchronized (allJobs) { Job job = new Job(); job.setJobReference(jobRef); job.setConfiguration(new JobConfiguration().setQuery(query)); job.setKind(" bigquery#job"); job.setStatus(new JobStatus().setState("PENDING")); allJobs.put(jobRef.getProjectId(), jobRef.getJobId(), new JobInfo(job)); } }
Example #7
Source File: FakeJobService.java From beam with Apache License 2.0 | 5 votes |
@Override public JobStatistics dryRunQuery(String projectId, JobConfigurationQuery query, String location) { synchronized (dryRunQueryResults) { JobStatistics result = dryRunQueryResults.get(projectId, query.getQuery()); if (result != null) { return result; } } throw new UnsupportedOperationException(); }
Example #8
Source File: FakeJobService.java From beam with Apache License 2.0 | 5 votes |
private JobStatus runQueryJob(JobConfigurationQuery query) throws IOException, InterruptedException { KV<Table, List<TableRow>> result = FakeBigQueryServices.decodeQueryResult(query.getQuery()); datasetService.createTable(result.getKey().setTableReference(query.getDestinationTable())); datasetService.insertAll(query.getDestinationTable(), result.getValue(), null); return new JobStatus().setState("DONE"); }
Example #9
Source File: BigqueryConnection.java From nomulus with Apache License 2.0 | 5 votes |
/** * Starts an asynchronous query job to dump the results of the specified query into a local * ImmutableTable object, row-keyed by the row number (indexed from 1), column-keyed by the * TableFieldSchema for that column, and with the value object as the cell value. Note that null * values will not actually be null, but they can be checked for using Data.isNull(). * * <p>Returns a ListenableFuture that holds the ImmutableTable on success. */ public ListenableFuture<ImmutableTable<Integer, TableFieldSchema, Object>> queryToLocalTable(String querySql) { Job job = new Job() .setConfiguration(new JobConfiguration() .setQuery(new JobConfigurationQuery() .setQuery(querySql) .setDefaultDataset(getDataset()))); return transform(runJobToCompletion(job), this::getQueryResults, directExecutor()); }
Example #10
Source File: BigqueryConnection.java From nomulus with Apache License 2.0 | 5 votes |
/** * Returns the result of calling queryToLocalTable, but synchronously to avoid spawning new * background threads, which App Engine doesn't support. * * @see <a href="https://cloud.google.com/appengine/docs/standard/java/runtime#Threads">App Engine * Runtime</a> */ public ImmutableTable<Integer, TableFieldSchema, Object> queryToLocalTableSync(String querySql) { Job job = new Job() .setConfiguration(new JobConfiguration() .setQuery(new JobConfigurationQuery() .setQuery(querySql) .setDefaultDataset(getDataset()))); return getQueryResults(runJob(job)); }
Example #11
Source File: BqOperatorFactory.java From digdag with Apache License 2.0 | 5 votes |
@Override protected JobConfiguration jobConfiguration(String projectId) { JobConfigurationQuery cfg = new JobConfigurationQuery() .setQuery(query); cfg.setUseLegacySql(params.get("use_legacy_sql", boolean.class, false)); params.getOptional("allow_large_results", boolean.class).transform(cfg::setAllowLargeResults); params.getOptional("use_query_cache", Boolean.class).transform(cfg::setUseQueryCache); params.getOptional("create_disposition", String.class).transform(cfg::setCreateDisposition); params.getOptional("write_disposition", String.class).transform(cfg::setWriteDisposition); params.getOptional("flatten_results", Boolean.class).transform(cfg::setFlattenResults); params.getOptional("maximum_billing_tier", Integer.class).transform(cfg::setMaximumBillingTier); params.getOptional("priority", String.class).transform(cfg::setPriority); params.getOptional("table_definitions", new TypeReference<Map<String, ExternalDataConfiguration>>() {}) .transform(cfg::setTableDefinitions); params.getOptional("user_defined_function_resources", new TypeReference<List<UserDefinedFunctionResource>>() {}) .transform(cfg::setUserDefinedFunctionResources); Optional<DatasetReference> defaultDataset = params.getOptional("dataset", String.class) .transform(Bq::datasetReference); defaultDataset.transform(cfg::setDefaultDataset); params.getOptional("destination_table", String.class) .transform(s -> cfg.setDestinationTable(tableReference(projectId, defaultDataset, s))); return new JobConfiguration() .setQuery(cfg); }
Example #12
Source File: BigQueryServices.java From beam with Apache License 2.0 | 4 votes |
/** Start a BigQuery query job. */ void startQueryJob(JobReference jobRef, JobConfigurationQuery query) throws IOException, InterruptedException;
Example #13
Source File: BigQueryServices.java From beam with Apache License 2.0 | 4 votes |
/** Dry runs the query in the given project. */ JobStatistics dryRunQuery(String projectId, JobConfigurationQuery queryConfig, String location) throws InterruptedException, IOException;
Example #14
Source File: BigqueryClient.java From beam with Apache License 2.0 | 4 votes |
/** Performs a query without flattening results. */ @Nonnull public List<TableRow> queryUnflattened(String query, String projectId, boolean typed) throws IOException, InterruptedException { Random rnd = new Random(System.currentTimeMillis()); String temporaryDatasetId = "_dataflow_temporary_dataset_" + rnd.nextInt(1000000); String temporaryTableId = "dataflow_temporary_table_" + rnd.nextInt(1000000); TableReference tempTableReference = new TableReference() .setProjectId(projectId) .setDatasetId(temporaryDatasetId) .setTableId(temporaryTableId); createNewDataset(projectId, temporaryDatasetId); createNewTable( projectId, temporaryDatasetId, new Table().setTableReference(tempTableReference)); JobConfigurationQuery jcQuery = new JobConfigurationQuery() .setFlattenResults(false) .setAllowLargeResults(true) .setDestinationTable(tempTableReference) .setQuery(query); JobConfiguration jc = new JobConfiguration().setQuery(jcQuery); Job job = new Job().setConfiguration(jc); Job insertedJob = bqClient.jobs().insert(projectId, job).execute(); GetQueryResultsResponse qResponse; do { qResponse = bqClient .jobs() .getQueryResults(projectId, insertedJob.getJobReference().getJobId()) .execute(); } while (!qResponse.getJobComplete()); final TableSchema schema = qResponse.getSchema(); final List<TableRow> rows = qResponse.getRows(); deleteDataset(projectId, temporaryDatasetId); return !typed ? rows : rows.stream() .map(r -> getTypedTableRow(schema.getFields(), r)) .collect(Collectors.toList()); }