Python google.cloud.exceptions.Forbidden() Examples
The following are 11
code examples of google.cloud.exceptions.Forbidden().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
google.cloud.exceptions
, or try the search function
.
Example #1
Source File: __init__.py From jgscm with MIT License | 6 votes |
def is_hidden(self, path): if path == "": return False if path.startswith("/"): path = path[1:] bucket_name, bucket_path = self._parse_path(path) try: bucket = self._get_bucket(bucket_name) except Forbidden: return True if bucket is None: return True if self.hide_dotted_blobs and \ self._get_blob_name(bucket_path).startswith("."): return True return False
Example #2
Source File: test_dbapi_cursor.py From python-bigquery with Apache License 2.0 | 6 votes |
def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self): from google.cloud.bigquery import dbapi from google.cloud.bigquery import table row_data = [table.Row([1.1, 1.2], {"foo": 0, "bar": 1})] mock_client = self._mock_client(rows=row_data) mock_bqstorage_client = self._mock_bqstorage_client( stream_count=1, rows=row_data, ) no_access_error = exceptions.Forbidden("invalid credentials") mock_bqstorage_client.create_read_session.side_effect = no_access_error connection = dbapi.connect( client=mock_client, bqstorage_client=mock_bqstorage_client, ) cursor = connection.cursor() cursor.execute("SELECT foo, bar FROM some_table") with six.assertRaisesRegex(self, exceptions.Forbidden, "invalid credentials"): cursor.fetchall() # the default client was not used mock_client.list_rows.assert_not_called()
Example #3
Source File: kaggle_gcp.py From docker-python with Apache License 2.0 | 5 votes |
def api_request(self, *args, **kwargs): """Wrap Connection.api_request in order to handle errors gracefully. """ try: return super().api_request(*args, **kwargs) except Forbidden as e: msg = ("Permission denied using Kaggle's public BigQuery integration. " "Did you mean to select a BigQuery account in the Notebook Add-ons menu?") print(msg) Log.info(msg) raise e
Example #4
Source File: test_base_google.py From airflow with Apache License 2.0 | 5 votes |
def __call__(self): """ Raise an Forbidden until after count threshold has been crossed. Then return True. """ if self.counter < self.count: self.counter += 1 raise Forbidden(**self.kwargs) return True
Example #5
Source File: test_base_google.py From airflow with Apache License 2.0 | 5 votes |
def test_raise_exception_on_non_quota_exception(self): with self.assertRaisesRegex(Forbidden, "Daily Limit Exceeded"): message = "POST https://translation.googleapis.com/language/translate/v2: Daily Limit Exceeded" errors = [ mock.MagicMock(details=mock.PropertyMock(return_value='dailyLimitExceeded')) ] _retryable_test_with_temporary_quota_retry( NoForbiddenAfterCount(5, message=message, errors=errors) )
Example #6
Source File: http.py From open-raadsinformatie with MIT License | 5 votes |
def get_bucket(self): """Get the bucket defined by 'bucket_name' from the storage_client. Throws a ValueError when bucket_name is not set. If the bucket does not exist in GCS, a new bucket will be created. """ if self._bucket: return self._bucket if not self.bucket_name: raise ValueError("The 'bucket_name' needs to be set.") try: self._bucket = self.storage_client.get_bucket(self.bucket_name) except (exceptions.NotFound, exceptions.Forbidden): bucket = storage.Bucket(self.storage_client, name=self.bucket_name) bucket.versioning_enabled = True bucket.lifecycle_rules = [{ 'action': {'type': 'SetStorageClass', 'storageClass': 'NEARLINE'}, 'condition': { 'numNewerVersions': 1, 'matchesStorageClass': ['REGIONAL', 'STANDARD'], 'age': 30 } }] try: bucket.create(location='europe-west4') except exceptions.Conflict: raise self._bucket = self.storage_client.get_bucket(self.bucket_name) return self._bucket
Example #7
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_blob_w_temporary_hold(self): from google.api_core import exceptions new_bucket_name = "w-tmp-hold" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) blob_name = "test-blob" payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) self.case_blobs_to_delete.append(blob) other = bucket.get_blob(blob_name) other.temporary_hold = True other.patch() self.assertTrue(other.temporary_hold) self.assertFalse(other.event_based_hold) self.assertIsNone(other.retention_expiration_time) with self.assertRaises(exceptions.Forbidden): other.delete() other.temporary_hold = False other.patch() other.delete() self.case_blobs_to_delete.pop()
Example #8
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_bucket_lock_retention_policy(self): import datetime from google.api_core import exceptions period_secs = 10 new_bucket_name = "loc-ret-policy" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.retention_period = period_secs bucket.patch() self.assertEqual(bucket.retention_period, period_secs) self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) bucket.lock_retention_policy() bucket.reload() self.assertTrue(bucket.retention_policy_locked) bucket.retention_period = None with self.assertRaises(exceptions.Forbidden): bucket.patch()
Example #9
Source File: test_system.py From python-storage with Apache License 2.0 | 4 votes |
def test_bucket_w_retention_period(self): import datetime from google.api_core import exceptions period_secs = 10 new_bucket_name = "w-retention-period" + unique_resource_id("-") bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.retention_period = period_secs bucket.default_event_based_hold = False bucket.patch() self.assertEqual(bucket.retention_period, period_secs) self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) blob_name = "test-blob" payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) self.case_blobs_to_delete.append(blob) other = bucket.get_blob(blob_name) self.assertFalse(other.event_based_hold) self.assertFalse(other.temporary_hold) self.assertIsInstance(other.retention_expiration_time, datetime.datetime) with self.assertRaises(exceptions.Forbidden): other.delete() bucket.retention_period = None bucket.patch() self.assertIsNone(bucket.retention_period) self.assertIsNone(bucket.retention_policy_effective_time) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) other.reload() self.assertFalse(other.event_based_hold) self.assertFalse(other.temporary_hold) self.assertIsNone(other.retention_expiration_time) other.delete() self.case_blobs_to_delete.pop()
Example #10
Source File: test_system.py From python-storage with Apache License 2.0 | 4 votes |
def test_bucket_w_default_event_based_hold(self): from google.api_core import exceptions new_bucket_name = "w-def-ebh" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.default_event_based_hold = True bucket.patch() self.assertTrue(bucket.default_event_based_hold) self.assertIsNone(bucket.retention_period) self.assertIsNone(bucket.retention_policy_effective_time) self.assertFalse(bucket.retention_policy_locked) blob_name = "test-blob" payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) self.case_blobs_to_delete.append(blob) other = bucket.get_blob(blob_name) self.assertTrue(other.event_based_hold) self.assertFalse(other.temporary_hold) self.assertIsNone(other.retention_expiration_time) with self.assertRaises(exceptions.Forbidden): other.delete() other.event_based_hold = False other.patch() other.delete() bucket.default_event_based_hold = False bucket.patch() self.assertFalse(bucket.default_event_based_hold) self.assertIsNone(bucket.retention_period) self.assertIsNone(bucket.retention_policy_effective_time) self.assertFalse(bucket.retention_policy_locked) blob.upload_from_string(payload) self.assertFalse(blob.event_based_hold) self.assertFalse(blob.temporary_hold) self.assertIsNone(blob.retention_expiration_time) blob.delete() self.case_blobs_to_delete.pop()
Example #11
Source File: bucket_mover_tester.py From professional-services with Apache License 2.0 | 4 votes |
def set_up_test_bucket(config, parsed_args): """Sets up the test bucket, adds objects and assigns various settings. It makes sure none of the buckets already exist, and then runs the main bucket mover service. Args: config: A Configuration object with all of the config values needed for the script to run parsed_args: the configargparser parsing of command line options Returns: The name of the randomly generated bucket """ random_bucket_name = _get_random_bucket_name() config.temp_bucket_name = random_bucket_name + '-temp' with yaspin(text='TESTING: Cleanup source bucket') as spinner: try: _check_bucket_exists_and_delete( spinner, config.source_storage_client, random_bucket_name, config.source_project) except exceptions.Forbidden: try: # Maybe the bucket already exists in the target project. _check_bucket_exists_and_delete( spinner, config.target_storage_client, random_bucket_name, config.target_project) except exceptions.Forbidden: spinner.write('TESTING: Not allowed to access bucket {}'.format( random_bucket_name)) spinner.fail('X') raise SystemExit() source_bucket = create_bucket(config.source_storage_client, random_bucket_name, parsed_args) spinner.write( '{} TESTING: Bucket {} created in source project {}'.format( _CHECKMARK, random_bucket_name, config.source_project)) _upload_blobs(source_bucket) with yaspin(text='TESTING: Cleanup target bucket') as spinner: _check_bucket_exists_and_delete(spinner, config.target_storage_client, config.temp_bucket_name, config.target_project) print() return random_bucket_name