Java Code Examples for com.google.cloud.bigquery.InsertAllResponse#hasErrors()
The following examples show how to use
com.google.cloud.bigquery.InsertAllResponse#hasErrors() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BQResponseParser.java From beast with Apache License 2.0 | 6 votes |
/** * Parses the {@link InsertAllResponse} object and constructs a mapping of each record in @{@link Records} that were * tried to sink in BQ and the error type {@link BQInsertionRecordsErrorType}. * * @param records - records that were tried with BQ insertion * @param bqResponse - the status of insertion for all records as returned by BQ * @return - map of each record and the associated list of error types. */ public Map<Record, List<BQInsertionRecordsErrorType>> parseBQResponse(final Records records, final InsertAllResponse bqResponse) { if (!bqResponse.hasErrors()) { return Collections.emptyMap(); } Map<Record, List<BQInsertionRecordsErrorType>> parsedRecords = new HashMap<>(); Map<Long, List<BigQueryError>> insertErrorsMap = bqResponse.getInsertErrors(); for (long recordIndex : insertErrorsMap.keySet()) { final Record message = records.getRecords().get((int) recordIndex); final List<BQInsertionRecordsErrorType> errorTypeList = new ArrayList<>(); parsedRecords.put(message, errorTypeList); for (final BigQueryError err : insertErrorsMap.get(recordIndex)) { errorTypeList.add(ErrorTypeFactory.getErrorType(err.getReason(), err.getMessage())); } //end of for each row } return parsedRecords; }
Example 2
Source File: BqSink.java From beast with Apache License 2.0 | 6 votes |
@Override public Status push(Records records) { InsertAllResponse response = insertIntoBQ(records); InsertStatus status = new InsertStatus(!response.hasErrors(), response.getInsertErrors()); //if bq has errors if (response.hasErrors()) { //parse the error records Map<Record, List<BQInsertionRecordsErrorType>> parsedRecords = responseParser.parseBQResponse(records, response); //sink error records boolean isTheSinkSuccessful = errorHandler.handleErrorRecords(parsedRecords).isSuccess(); if (!isTheSinkSuccessful) { return new InsertStatus(isTheSinkSuccessful, response.getInsertErrors()); } Records bqValidRecords = errorHandler.getBQValidRecords(parsedRecords); if (!bqValidRecords.getRecords().isEmpty()) { // there are valid records //insert the valid records into bq isTheSinkSuccessful &= !insertIntoBQ(bqValidRecords).hasErrors(); } return new InsertStatus(isTheSinkSuccessful, response.getInsertErrors()); } // return the original response return new InsertStatus(!response.hasErrors(), response.getInsertErrors()); }
Example 3
Source File: BigQueryClient.java From beam with Apache License 2.0 | 5 votes |
private void handleBigQueryResponseExceptions(InsertAllResponse response) { if (response.hasErrors()) { throw new RuntimeException( format( "The following errors occurred while inserting to BigQuery: %s", response.getInsertErrors())); } }
Example 4
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of inserting rows into a table without running a load job. */ // [TARGET insertAll(InsertAllRequest)] // [VARIABLE "my_dataset_name"] // [VARIABLE "my_table_name"] public InsertAllResponse insertAll(String datasetName, String tableName) { // [START bigquery_table_insert_rows] TableId tableId = TableId.of(datasetName, tableName); // Values of the row to insert Map<String, Object> rowContent = new HashMap<>(); rowContent.put("booleanField", true); // Bytes are passed in base64 rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64 // Records are passed as a map Map<String, Object> recordsContent = new HashMap<>(); recordsContent.put("stringField", "Hello, World!"); rowContent.put("recordField", recordsContent); InsertAllResponse response = bigquery.insertAll( InsertAllRequest.newBuilder(tableId) .addRow(rowContent) // More rows can be added in the same RPC by invoking .addRow() on the builder. // You can also supply optional unique row keys to support de-duplication scenarios. .build()); if (response.hasErrors()) { // If any of the insertions failed, this lets you inspect the errors for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) { // inspect row error } } // [END bigquery_table_insert_rows] return response; }
Example 5
Source File: InsertDataAndQueryTable.java From google-cloud-java with Apache License 2.0 | 4 votes |
public static void main(String... args) throws InterruptedException { // Create a service instance BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); // Create a dataset String datasetId = "my_dataset_id"; bigquery.create(DatasetInfo.newBuilder(datasetId).build()); TableId tableId = TableId.of(datasetId, "my_table_id"); // Table field definition Field stringField = Field.of("StringField", LegacySQLTypeName.STRING); // Table schema definition Schema schema = Schema.of(stringField); // Create a table StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema); bigquery.create(TableInfo.of(tableId, tableDefinition)); // Define rows to insert Map<String, Object> firstRow = new HashMap<>(); Map<String, Object> secondRow = new HashMap<>(); firstRow.put("StringField", "value1"); secondRow.put("StringField", "value2"); // Create an insert request InsertAllRequest insertRequest = InsertAllRequest.newBuilder(tableId).addRow(firstRow).addRow(secondRow).build(); // Insert rows InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); // Check if errors occurred if (insertResponse.hasErrors()) { System.out.println("Errors occurred while inserting rows"); } // Create a query request QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder("SELECT * FROM my_dataset_id.my_table_id").build(); // Read rows System.out.println("Table rows:"); for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) { System.out.println(row); } }
Example 6
Source File: PutBigQueryStreaming.java From nifi with Apache License 2.0 | 4 votes |
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } final String projectId = context.getProperty(PROJECT_ID).evaluateAttributeExpressions().getValue(); final String dataset = context.getProperty(DATASET).evaluateAttributeExpressions(flowFile).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final TableId tableId; if (StringUtils.isEmpty(projectId)) { tableId = TableId.of(dataset, tableName); } else { tableId = TableId.of(projectId, dataset, tableName); } try { InsertAllRequest.Builder request = InsertAllRequest.newBuilder(tableId); int nbrecord = 0; try (final InputStream in = session.read(flowFile)) { final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); try (final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger());) { Record currentRecord; while ((currentRecord = reader.nextRecord()) != null) { request.addRow(convertMapRecord(currentRecord.toMap())); nbrecord++; } } } request.setIgnoreUnknownValues(context.getProperty(IGNORE_UNKNOWN).evaluateAttributeExpressions(flowFile).asBoolean()); request.setSkipInvalidRows(context.getProperty(SKIP_INVALID_ROWS).evaluateAttributeExpressions(flowFile).asBoolean()); InsertAllResponse response = getCloudService().insertAll(request.build()); final Map<String, String> attributes = new HashMap<>(); if (response.hasErrors()) { getLogger().log(LogLevel.WARN, "Failed to insert {} of {} records into BigQuery {} table.", new Object[] { response.getInsertErrors().size(), nbrecord, tableName }); if (getLogger().isDebugEnabled()) { for (long index : response.getInsertErrors().keySet()) { for (BigQueryError e : response.getInsertErrors().get(index)) { getLogger().log(LogLevel.DEBUG, "Failed to insert record #{}: {}", new Object[] { index, e.getMessage() }); } } } attributes.put(BigQueryAttributes.JOB_NB_RECORDS_ATTR, Long.toString(nbrecord - response.getInsertErrors().size())); flowFile = session.penalize(flowFile); flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, REL_FAILURE); } else { attributes.put(BigQueryAttributes.JOB_NB_RECORDS_ATTR, Long.toString(nbrecord)); flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, REL_SUCCESS); } } catch (Exception ex) { getLogger().log(LogLevel.ERROR, ex.getMessage(), ex); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE); } }