Python google.api_core.exceptions.Conflict() Examples
The following are 11
code examples of google.api_core.exceptions.Conflict().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
google.api_core.exceptions
, or try the search function
.
Example #1
Source File: bigquery.py From airflow with Apache License 2.0 | 6 votes |
def execute(self, context): bq_hook = BigQueryHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, location=self.location ) try: bq_hook.create_empty_dataset( project_id=self.project_id, dataset_id=self.dataset_id, dataset_reference=self.dataset_reference, location=self.location, exists_ok=False, ) except Conflict: dataset_id = self.dataset_reference.get("datasetReference", {}).get("datasetId", self.dataset_id) self.log.info('Dataset %s already exists.', dataset_id)
Example #2
Source File: test_blob.py From python-storage with Apache License 2.0 | 6 votes |
def test_upload_from_file_failure(self): import requests from google.resumable_media import InvalidResponse from google.cloud import exceptions message = "Someone is already in this spot." response = requests.Response() response.status_code = http_client.CONFLICT response.request = requests.Request("POST", "http://example.com").prepare() side_effect = InvalidResponse(response, message) with self.assertRaises(exceptions.Conflict) as exc_info: self._upload_from_file_helper(side_effect=side_effect) self.assertIn(message, exc_info.exception.message) self.assertEqual(exc_info.exception.errors, [])
Example #3
Source File: system.py From python-bigquery with Apache License 2.0 | 6 votes |
def tearDown(self): def _still_in_use(bad_request): return any( error["reason"] == "resourceInUse" for error in bad_request._errors ) retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use) retry_storage_errors_conflict = RetryErrors( (Conflict, TooManyRequests, InternalServerError, ServiceUnavailable) ) for doomed in self.to_delete: if isinstance(doomed, storage.Bucket): retry_storage_errors_conflict(doomed.delete)(force=True) elif isinstance(doomed, (Dataset, bigquery.DatasetReference)): retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True) elif isinstance(doomed, (Table, bigquery.TableReference)): retry_in_use(Config.CLIENT.delete_table)(doomed) else: doomed.delete()
Example #4
Source File: bq.py From geomancer with MIT License | 6 votes |
def _fetch_dataset(self, dataset_id): """Fetch a BigQuery Dataset if it exists, else, create a new one Parameters ---------- dataset_id : str ID to name the created Dataset Returns ------- :class:`google.cloud.bigquery.dataset.Dataset` The Dataset class to build tables from """ dataset_ref = self.client.dataset(dataset_id) dataset = bigquery.Dataset(dataset_ref) try: dataset = self.client.create_dataset(dataset) except Conflict: dataset = self.client.get_dataset(dataset_ref) return dataset
Example #5
Source File: gcs.py From airflow with Apache License 2.0 | 5 votes |
def execute(self, context): hook = GCSHook( google_cloud_storage_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to ) try: hook.create_bucket(bucket_name=self.bucket_name, resource=self.resource, storage_class=self.storage_class, location=self.location, project_id=self.project_id, labels=self.labels) except Conflict: # HTTP 409 self.log.warning("Bucket %s already exists", self.bucket_name)
Example #6
Source File: bigquery.py From airflow with Apache License 2.0 | 5 votes |
def execute(self, context): bq_hook = BigQueryHook( gcp_conn_id=self.bigquery_conn_id, delegate_to=self.delegate_to, location=self.location ) if not self.schema_fields and self.gcs_schema_object: gcs_bucket, gcs_object = _parse_gcs_url(self.gcs_schema_object) gcs_hook = GCSHook( google_cloud_storage_conn_id=self.google_cloud_storage_conn_id, delegate_to=self.delegate_to) schema_fields = json.loads(gcs_hook.download( gcs_bucket, gcs_object).decode("utf-8")) else: schema_fields = self.schema_fields try: self.log.info('Creating table') table = bq_hook.create_empty_table( project_id=self.project_id, dataset_id=self.dataset_id, table_id=self.table_id, schema_fields=schema_fields, time_partitioning=self.time_partitioning, cluster_fields=self.cluster_fields, labels=self.labels, view=self.view, encryption_configuration=self.encryption_configuration, table_resource=self.table_resource, exists_ok=False, ) self.log.info('Table %s.%s.%s created successfully', table.project, table.dataset_id, table.table_id) except Conflict: self.log.info('Table %s.%s already exists.', self.dataset_id, self.table_id) # pylint: disable=too-many-instance-attributes
Example #7
Source File: bigquery.py From airflow with Apache License 2.0 | 5 votes |
def execute(self, context: Any): hook = BigQueryHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, ) job_id = self.job_id or f"airflow_{self.task_id}_{int(time())}" try: job = hook.insert_job( configuration=self.configuration, project_id=self.project_id, location=self.location, job_id=job_id, ) # Start the job and wait for it to complete and get the result. job.result() except Conflict: job = hook.get_job( project_id=self.project_id, location=self.location, job_id=job_id, ) # Get existing job and wait for it to be ready for time_to_wait in exponential_sleep_generator(initial=10, maximum=120): sleep(time_to_wait) job.reload() if job.done(): break return job.job_id
Example #8
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def tearDownModule(): errors = (exceptions.Conflict, exceptions.TooManyRequests) retry = RetryErrors(errors, max_tries=15) retry(_empty_bucket)(Config.TEST_BUCKET) retry(Config.TEST_BUCKET.delete)(force=True)
Example #9
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def tearDownClass(cls): _empty_bucket(cls.bucket) errors = (exceptions.Conflict, exceptions.TooManyRequests) retry = RetryErrors(errors, max_tries=6) retry(cls.bucket.delete)(force=True)
Example #10
Source File: bigquery_utils.py From ethereum-etl-airflow with MIT License | 5 votes |
def create_view(bigquery_client, sql, table_ref): table = bigquery.Table(table_ref) table.view_query = sql logging.info('Creating view: ' + json.dumps(table.to_api_repr())) try: table = bigquery_client.create_table(table) except Conflict: # https://cloud.google.com/bigquery/docs/managing-views table = bigquery_client.update_table(table, ['view_query']) assert table.table_id == table_ref.table_id
Example #11
Source File: parse_logic.py From ethereum-etl-airflow with MIT License | 5 votes |
def create_dataset(client, dataset_name, project=None): dataset = client.dataset(dataset_name, project=project) try: logging.info('Creating new dataset ...') dataset = client.create_dataset(dataset) logging.info('New dataset created: ' + dataset_name) except Conflict as error: logging.info('Dataset already exists') return dataset