Python google.cloud.exceptions.Conflict() Examples

The following are 23 code examples of google.cloud.exceptions.Conflict(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.cloud.exceptions , or try the search function .
Example #1
Source File: pubsub.py    From turbinia with Apache License 2.0 7 votes vote down vote up
def setup_subscriber(self):
    """Set up the pubsub subscriber."""
    config.LoadConfig()
    self.subscriber = pubsub.SubscriberClient()
    subscription_path = self.subscriber.subscription_path(
        config.TURBINIA_PROJECT, self.topic_name)
    if not self.topic_path:
      self.topic_path = self.subscriber.topic_path(
          config.TURBINIA_PROJECT, self.topic_name)
    try:
      log.debug(
          'Trying to create subscription {0:s} on topic {1:s}'.format(
              subscription_path, self.topic_path))
      self.subscriber.create_subscription(subscription_path, self.topic_path)
    except exceptions.Conflict:
      log.debug('Subscription {0:s} already exists.'.format(subscription_path))

    log.debug('Setup PubSub Subscription {0:s}'.format(subscription_path))
    self.subscription = self.subscriber.subscribe(
        subscription_path, self._callback) 
Example #2
Source File: test_import_measures.py    From openprescribing with MIT License 6 votes vote down vote up
def set_up_bq():
    """Set up BQ datasets and tables."""

    try:
        Client("measures").create_dataset()
    except Conflict:
        pass

    client = Client("hscic")
    client.get_or_create_table("ccgs", schemas.CCG_SCHEMA)
    client.get_or_create_table("practices", schemas.PRACTICE_SCHEMA)
    client.get_or_create_table("normalised_prescribing", schemas.PRESCRIBING_SCHEMA)
    client.get_or_create_table(
        "practice_statistics", schemas.PRACTICE_STATISTICS_SCHEMA
    )
    client.get_or_create_table("presentation", schemas.PRESENTATION_SCHEMA) 
Example #3
Source File: google.py    From cloudstorage with MIT License 6 votes vote down vote up
def create_container(
        self, container_name: str, acl: str = None, meta_data: MetaData = None
    ) -> Container:
        if meta_data:
            logger.warning(messages.OPTION_NOT_SUPPORTED, "meta_data")

        try:
            bucket = self.client.create_bucket(container_name)
        except Conflict:
            logger.debug(messages.CONTAINER_EXISTS, container_name)
            bucket = self._get_bucket(container_name)
        except ValueError as err:
            raise CloudStorageError(str(err))

        if acl:
            bucket.acl.save_predefined(acl)

        return self._make_container(bucket) 
Example #4
Source File: test_blob.py    From python-storage with Apache License 2.0 6 votes vote down vote up
def test_upload_from_file_failure(self):
        import requests

        from google.resumable_media import InvalidResponse
        from google.cloud import exceptions

        message = "Someone is already in this spot."
        response = requests.Response()
        response.status_code = http_client.CONFLICT
        response.request = requests.Request("POST", "http://example.com").prepare()
        side_effect = InvalidResponse(response, message)

        with self.assertRaises(exceptions.Conflict) as exc_info:
            self._upload_from_file_helper(side_effect=side_effect)

        self.assertIn(message, exc_info.exception.message)
        self.assertEqual(exc_info.exception.errors, []) 
Example #5
Source File: tables_test.py    From upvote with Apache License 2.0 6 votes vote down vote up
def testMissingTable_Conflict(self):

    mock_client = mock.Mock(spec=tables.bigquery.Client)
    mock_client.insert_rows.side_effect = [exceptions.NotFound('OMG'), []]
    mock_client.get_table.side_effect = exceptions.NotFound('OMG')
    mock_client.create_table.side_effect = exceptions.Conflict('WTF')
    self.Patch(tables.bigquery, 'Client', return_value=mock_client)

    tables._SendToBigQuery(TEST_TABLE, self.row_dict)

    insert_rows_call = mock.call(
        mock.ANY, [self.row_dict], selected_fields=TEST_TABLE.schema,
        row_ids=[self.row_id])

    mock_client.insert_rows.assert_has_calls([insert_rows_call] * 2)
    mock_client.get_dataset.assert_called_once()
    mock_client.create_dataset.assert_not_called()
    mock_client.get_table.assert_called_once()
    mock_client.create_table.assert_called_once() 
Example #6
Source File: tables_test.py    From upvote with Apache License 2.0 6 votes vote down vote up
def testMissingDataset_Conflict(self):

    mock_client = mock.Mock(spec=tables.bigquery.Client)
    mock_client.insert_rows.side_effect = [exceptions.NotFound('OMG'), []]
    mock_client.get_dataset.side_effect = exceptions.NotFound('OMG')
    mock_client.create_dataset.side_effect = exceptions.Conflict('WTF')
    mock_client.get_table.side_effect = exceptions.NotFound('OMG')
    self.Patch(tables.bigquery, 'Client', return_value=mock_client)

    tables._SendToBigQuery(TEST_TABLE, self.row_dict)

    insert_rows_call = mock.call(
        mock.ANY, [self.row_dict], selected_fields=TEST_TABLE.schema,
        row_ids=[self.row_id])

    mock_client.insert_rows.assert_has_calls([insert_rows_call] * 2)
    mock_client.get_dataset.assert_called_once()
    mock_client.create_dataset.assert_called_once()
    mock_client.get_table.assert_called_once()
    mock_client.create_table.assert_called_once() 
Example #7
Source File: storage.py    From loaner with Apache License 2.0 6 votes vote down vote up
def insert_bucket(self, bucket_name=None):
    """Inserts a Google Cloud Storage Bucket object.

    Args:
      bucket_name: str, the name of the Google Cloud Storage Bucket to insert.

    Returns:
      A dictionary object representing a Google Cloud Storage Bucket.
          type: google.cloud.storage.bucket.Bucket

    Raises:
      AlreadyExistsError: when trying to insert a bucket that already exists.
    """
    bucket_name = bucket_name or self._config.bucket
    try:
      new_bucket = self._client.create_bucket(bucket_name)
    except exceptions.Conflict as err:
      raise AlreadyExistsError(
          'the Google Cloud Storage Bucket with name {!r} already exists: '
          '{}'.format(bucket_name, err))

    logging.debug(
        'The Googld Cloud Storage Bucket %r has been created for project '
        '%r.', bucket_name, self._config.project)
    return new_bucket 
Example #8
Source File: googleJobStore.py    From toil with Apache License 2.0 6 votes vote down vote up
def destroy(self):
        try:
            self.bucket = self.storageClient.get_bucket(self.bucketName)

        except exceptions.NotFound:
            # just return if not connect to physical storage. Needed for idempotency
            return
        try:
            self.bucket.delete(force=True)
            # throws ValueError if bucket has more than 256 objects. Then we must delete manually
        except ValueError:
            self.bucket.delete_blobs(self.bucket.list_blobs())
            self.bucket.delete()
            # if ^ throws a google.cloud.exceptions.Conflict, then we should have a deletion retry mechanism.

        # google freaks out if we call delete multiple times on the bucket obj, so after success
        # just set to None.
        self.bucket = None 
Example #9
Source File: googleJobStore.py    From toil with Apache License 2.0 5 votes vote down vote up
def initialize(self, config=None):
        try:
            self.bucket = self.storageClient.create_bucket(self.bucketName)
        except exceptions.Conflict:
            raise JobStoreExistsException(self.locator)
        super(GoogleJobStore, self).initialize(config)

        # set up sever side encryption after we set up config in super
        if self.config.sseKey is not None:
            with open(self.config.sseKey) as f:
                self.sseKey = compat_bytes(f.read())
                assert len(self.sseKey) == 32 
Example #10
Source File: create_bq_measure_views.py    From openprescribing with MIT License 5 votes vote down vote up
def recreate_table(self, client, table_name):
        logger.info("recreate_table: %s", table_name)
        base_path = os.path.join(
            settings.APPS_ROOT, "frontend", "management", "commands", "measure_sql"
        )

        path = os.path.join(base_path, table_name + ".sql")
        with open(path, "r") as sql_file:
            sql = sql_file.read()

        try:
            client.create_table_with_view(table_name, sql, False)
        except Conflict:
            client.delete_table(table_name)
            client.create_table_with_view(table_name, sql, False) 
Example #11
Source File: bigquery.py    From openprescribing with MIT License 5 votes vote down vote up
def get_or_create_table(self, table_id, schema):
        try:
            table = self.create_table(table_id, schema)
        except Conflict:
            table = self.get_table(table_id)
        return table 
Example #12
Source File: google.py    From cloudstorage with MIT License 5 votes vote down vote up
def delete_container(self, container: Container) -> None:
        bucket = self._get_bucket(container.name)

        try:
            bucket.delete()
        except Conflict as err:
            if err.code == HTTPStatus.CONFLICT:
                raise IsNotEmptyError(messages.CONTAINER_NOT_EMPTY % bucket.name)
            raise 
Example #13
Source File: persistor.py    From Rasa_NLU_Chi with Apache License 2.0 5 votes vote down vote up
def _ensure_bucket_exists(self, bucket_name):
        from google.cloud import exceptions

        try:
            self.storage_client.create_bucket(bucket_name)
        except exceptions.Conflict:
            # bucket exists
            pass 
Example #14
Source File: persistor.py    From rasa-for-botfront with Apache License 2.0 5 votes vote down vote up
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
        from google.cloud import exceptions

        try:
            self.storage_client.create_bucket(bucket_name)
        except exceptions.Conflict:
            # bucket exists
            pass 
Example #15
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def tearDownClass(cls):
        _empty_bucket(cls.bucket)
        errors = (exceptions.Conflict, exceptions.TooManyRequests)
        retry = RetryErrors(errors, max_tries=6)
        retry(cls.bucket.delete)(force=True) 
Example #16
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def tearDownModule():
    errors = (exceptions.Conflict, exceptions.TooManyRequests)
    retry = RetryErrors(errors, max_tries=15)
    retry(_empty_bucket)(Config.TEST_BUCKET)
    retry(Config.TEST_BUCKET.delete)(force=True) 
Example #17
Source File: test_client.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def test_create_bucket_w_conflict(self):
        from google.cloud.exceptions import Conflict

        project = "PROJECT"
        user_project = "USER_PROJECT"
        other_project = "OTHER_PROJECT"
        credentials = _make_credentials()
        client = self._make_one(project=project, credentials=credentials)
        connection = _make_connection()
        client._base_connection = connection
        connection.api_request.side_effect = Conflict("testing")

        bucket_name = "bucket-name"
        data = {"name": bucket_name}

        with self.assertRaises(Conflict):
            client.create_bucket(
                bucket_name, project=other_project, user_project=user_project
            )

        connection.api_request.assert_called_once_with(
            method="POST",
            path="/b",
            query_params={"project": other_project, "userProject": user_project},
            data=data,
            _target_object=mock.ANY,
            timeout=self._get_default_timeout(),
        ) 
Example #18
Source File: pubsub.py    From turbinia with Apache License 2.0 5 votes vote down vote up
def setup_publisher(self):
    """Set up the pubsub publisher."""
    config.LoadConfig()
    self.publisher = pubsub.PublisherClient()
    self.topic_path = self.publisher.topic_path(
        config.TURBINIA_PROJECT, self.topic_name)
    try:
      log.debug('Trying to create pubsub topic {0:s}'.format(self.topic_path))
      self.publisher.create_topic(self.topic_path)
    except exceptions.Conflict:
      log.debug('PubSub topic {0:s} already exists.'.format(self.topic_path))
    log.debug('Setup PubSub publisher at {0:s}'.format(self.topic_path)) 
Example #19
Source File: persistor.py    From rasa_nlu with Apache License 2.0 5 votes vote down vote up
def _ensure_bucket_exists(self, bucket_name: Text) -> None:
        from google.cloud import exceptions

        try:
            self.storage_client.create_bucket(bucket_name)
        except exceptions.Conflict:
            # bucket exists
            pass 
Example #20
Source File: http.py    From open-raadsinformatie with MIT License 5 votes vote down vote up
def get_bucket(self):
        """Get the bucket defined by 'bucket_name' from the storage_client.
        Throws a ValueError when bucket_name is not set. If the bucket does not
        exist in GCS, a new bucket will be created.
        """
        if self._bucket:
            return self._bucket

        if not self.bucket_name:
            raise ValueError("The 'bucket_name' needs to be set.")

        try:
            self._bucket = self.storage_client.get_bucket(self.bucket_name)
        except (exceptions.NotFound, exceptions.Forbidden):
            bucket = storage.Bucket(self.storage_client, name=self.bucket_name)
            bucket.versioning_enabled = True
            bucket.lifecycle_rules = [{
                'action': {'type': 'SetStorageClass', 'storageClass': 'NEARLINE'},
                'condition': {
                    'numNewerVersions': 1,
                    'matchesStorageClass': ['REGIONAL', 'STANDARD'],
                    'age': 30
                }
            }]
            try:
                bucket.create(location='europe-west4')
            except exceptions.Conflict:
                raise
            self._bucket = self.storage_client.get_bucket(self.bucket_name)

        return self._bucket 
Example #21
Source File: storage_test.py    From loaner with Apache License 2.0 5 votes vote down vote up
def test_cloud_storage_api_insert_bucket__already_exists_error(self):
    """Test the Cloud Storage Bucket creation for an existing bucket."""
    test_cloud_storage_api = storage.CloudStorageAPI(self.config, mock.Mock())
    test_cloud_storage_api._client.create_bucket.side_effect = (
        exceptions.Conflict('This bucket already exists.'))
    with self.assertRaises(storage.AlreadyExistsError):
      test_cloud_storage_api.insert_bucket() 
Example #22
Source File: test_bigquery.py    From airflow with Apache License 2.0 4 votes vote down vote up
def test_execute_idempotency(self, mock_hook, mock_sleep_generator):
        job_id = "123456"

        configuration = {
            "query": {
                "query": "SELECT * FROM any",
                "useLegacySql": False,
            }
        }

        class MockJob:
            _call_no = 0
            _done = False

            def __init__(self):
                pass

            def reload(self):
                if MockJob._call_no == 3:
                    MockJob._done = True
                else:
                    MockJob._call_no += 1

            def done(self):
                return MockJob._done

            @property
            def job_id(self):
                return job_id

        mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict("any")
        mock_sleep_generator.return_value = [0, 0, 0, 0, 0]
        mock_hook.return_value.get_job.return_value = MockJob()

        op = BigQueryInsertJobOperator(
            task_id="insert_query_job",
            configuration=configuration,
            location=TEST_DATASET_LOCATION,
            job_id=job_id,
            project_id=TEST_GCP_PROJECT_ID
        )
        result = op.execute({})

        assert MockJob._call_no == 3

        mock_hook.return_value.get_job.assert_called_once_with(
            location=TEST_DATASET_LOCATION,
            job_id=job_id,
            project_id=TEST_GCP_PROJECT_ID,
        )

        assert result == job_id 
Example #23
Source File: create_bq_measure_views.py    From openprescribing with MIT License 4 votes vote down vote up
def handle(self, *args, **kwargs):
        client = Client("hscic")

        try:
            client.create_storage_backed_table(
                "raw_prescribing_v1",
                RAW_PRESCRIBING_SCHEMA_V1,
                "hscic/prescribing_v1/20*Detailed_Prescribing_Information.csv",
            )
        except Conflict:
            pass

        try:
            client.create_storage_backed_table(
                "raw_prescribing_v2",
                RAW_PRESCRIBING_SCHEMA_V2,
                # This pattern may change once the data is published via the
                # new Open Data Portal.
                "hscic/prescribing_v2/20*.csv",
            )
        except Conflict:
            pass

        for table_name in [
            "all_prescribing",
            "normalised_prescribing",
            "normalised_prescribing_standard",
            "raw_prescribing_normalised",
        ]:
            self.recreate_table(client, table_name)

        client = Client("measures")

        for table_name in [
            "dmd_objs_with_form_route",
            "dmd_objs_hospital_only",
            "opioid_total_ome",
            "practice_data_all_low_priority",
            "pregabalin_total_mg",
            "vw__median_price_per_unit",
            "vw__ghost_generic_measure",
            "vw__herbal_list",
            # This references pregabalin_total_mg, so must come afterwards
            "gaba_total_ddd",
        ]:
            self.recreate_table(client, table_name)

        # cmpa_products is a table that has been created and managed by Rich.
        schema = build_schema(
            ("bnf_code", "STRING"), ("bnf_name", "STRING"), ("type", "STRING")
        )
        client.get_or_create_table("cmpa_products", schema)