Python google.api_core.exceptions.NotFound() Examples
The following are 30
code examples of google.api_core.exceptions.NotFound().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
google.api_core.exceptions
, or try the search function
.
Example #1
Source File: main.py From professional-services with Apache License 2.0 | 6 votes |
def get(self): """Fetches list of files from GCS to send to front-end. Returns: Array of objects holding object name and type for audio files. """ try: gcs_client = gcs_transcript_utils.authenticate_gcs() bucket_list = list(gcs_client.list_buckets()) processed_audio_bucket = gcs_transcript_utils.find_bucket_with_prefix( bucket_list, 'processed-audio-files') files = gcs_transcript_utils.get_files(gcs_client, processed_audio_bucket) return jsonify(files=files) except NotFound as e: logging.error(e) return jsonify(e.to_dict())
Example #2
Source File: test_secret_manager.py From airflow with Apache License 2.0 | 6 votes |
def test_get_conn_uri_non_existent_key(self, mock_client_callable, mock_get_creds): mock_get_creds.return_value = CREDENTIALS, PROJECT_ID mock_client = mock.MagicMock() mock_client_callable.return_value = mock_client # The requested secret id or secret version does not exist mock_client.access_secret_version.side_effect = NotFound('test-msg') secrets_manager_backend = CloudSecretManagerBackend(connections_prefix=CONNECTIONS_PREFIX) secret_id = secrets_manager_backend.build_path(CONNECTIONS_PREFIX, CONN_ID, SEP) with self.assertLogs(secrets_manager_backend.client.log, level="ERROR") as log_output: self.assertIsNone(secrets_manager_backend.get_conn_uri(conn_id=CONN_ID)) self.assertEqual([], secrets_manager_backend.get_connections(conn_id=CONN_ID)) self.assertRegex( log_output.output[0], f"GCP API Call Error \\(NotFound\\): Secret ID {secret_id} not found" )
Example #3
Source File: sqlalchemy_bigquery.py From pybigquery with MIT License | 6 votes |
def _get_table(self, connection, table_name, schema=None): if isinstance(connection, Engine): connection = connection.connect() project, dataset, table_name_prepared = self._split_table_name(table_name) if dataset is None: if schema is not None: dataset = schema elif self.dataset_id: dataset = self.dataset_id table = connection.connection._client.dataset(dataset, project=project).table(table_name_prepared) try: t = connection.connection._client.get_table(table) except NotFound as e: raise NoSuchTableError(table_name) return t
Example #4
Source File: conftest.py From python-docs-samples with Apache License 2.0 | 6 votes |
def job(tenant, company): post_unique_id = "TEST_POST_{}".format(uuid.uuid4().hex)[:20] # create a temporary job job_name = job_search_create_job.create_job( PROJECT_ID, tenant, company, post_unique_id, "www.jobUrl.com" ) # extract company id job_id = job_name.split("/")[-1] yield job_id try: job_search_delete_job.delete_job(PROJECT_ID, tenant, job_id) except NotFound as e: print("Ignoring NotFound upon cleanup, details: {}".format(e))
Example #5
Source File: client.py From rele with Apache License 2.0 | 6 votes |
def create_subscription(self, subscription, topic): """Handles creating the subscription when it does not exists. This makes it easier to deploy a worker and forget about the subscription side of things. The subscription must have a topic to subscribe to. Which means that the topic must be created manually before the worker is started. :param subscription: str Subscription name :param topic: str Topic name to subscribe """ subscription_path = self._client.subscription_path( self._gc_project_id, subscription ) topic_path = self._client.topic_path(self._gc_project_id, topic) with suppress(exceptions.AlreadyExists): try: self._client.create_subscription( name=subscription_path, topic=topic_path, ack_deadline_seconds=self._ack_deadline, ) except exceptions.NotFound: logger.error("Cannot subscribe to a topic that does not exist.")
Example #6
Source File: gcs_store.py From polystores with MIT License | 6 votes |
def download_file(self, blob, local_path, bucket_name=None, use_basename=True): """ Downloads a file from Google Cloud Storage. Args: blob: `str`. blob to download. local_path: `str`. the path to download to. bucket_name: `str`. the name of the bucket. use_basename: `bool`. whether or not to use the basename of the blob. """ if not bucket_name: bucket_name, blob = self.parse_gcs_url(blob) local_path = os.path.abspath(local_path) if use_basename: local_path = append_basename(local_path, blob) check_dirname_exists(local_path) try: blob = self.get_blob(blob=blob, bucket_name=bucket_name) blob.download_to_filename(local_path) except (NotFound, GoogleAPIError) as e: raise PolyaxonStoresException(e)
Example #7
Source File: gbq.py From pandas-gbq with BSD 3-Clause "New" or "Revised" License | 6 votes |
def exists(self, table_id): """ Check if a table exists in Google BigQuery Parameters ---------- table : str Name of table to be verified Returns ------- boolean true if table exists, otherwise false """ from google.api_core.exceptions import NotFound table_ref = self.client.dataset(self.dataset_id).table(table_id) try: self.client.get_table(table_ref) return True except NotFound: return False except self.http_error as ex: self.process_http_error(ex)
Example #8
Source File: gbq.py From pandas-gbq with BSD 3-Clause "New" or "Revised" License | 6 votes |
def delete(self, table_id): """ Delete a table in Google BigQuery Parameters ---------- table : str Name of table to be deleted """ from google.api_core.exceptions import NotFound if not self.exists(table_id): raise NotFoundException("Table does not exist") table_ref = self.client.dataset(self.dataset_id).table(table_id) try: self.client.delete_table(table_ref) except NotFound: # Ignore 404 error which may occur if table already deleted pass except self.http_error as ex: self.process_http_error(ex)
Example #9
Source File: delete_queue_test.py From python-docs-samples with Apache License 2.0 | 6 votes |
def test_queue(): client = tasks_v2.CloudTasksClient() parent = client.location_path(TEST_PROJECT_ID, TEST_LOCATION) queue = { # The fully qualified path to the queue 'name': client.queue_path( TEST_PROJECT_ID, TEST_LOCATION, TEST_QUEUE_NAME), } q = client.create_queue(parent, queue) yield q try: # Attempt to delete the queue in case the sample failed. client.delete_queue(q.name) except exceptions.NotFound: # The queue was already successfully deleted. print('Queue already deleted successfully')
Example #10
Source File: snippets_test.py From python-docs-samples with Apache License 2.0 | 6 votes |
def secret(client, project_id): parent = client.project_path(project_id) secret_id = 'python-secret-{}'.format(uuid.uuid4()) print('creating secret {}'.format(secret_id)) secret = client.create_secret(parent, secret_id, { 'replication': { 'automatic': {}, }, }) yield project_id, secret_id print('deleting secret {}'.format(secret_id)) try: client.delete_secret(secret.name) except exceptions.NotFound: # Secret was already deleted, probably in the test pass
Example #11
Source File: conftest.py From python-docs-samples with Apache License 2.0 | 6 votes |
def tenant(): tenant_ext_unique_id = "TEST_TENANT_{}".format(uuid.uuid4()) # create a temporary tenant tenant_name = job_search_create_tenant.create_tenant( PROJECT_ID, tenant_ext_unique_id ) # extract company id tenant_id = tenant_name.split("/")[-1] yield tenant_id try: job_search_delete_tenant.delete_tenant(PROJECT_ID, tenant_id) except NotFound as e: print("Ignoring NotFound upon cleanup, details: {}".format(e))
Example #12
Source File: fixtures.py From python-docs-samples with Apache License 2.0 | 6 votes |
def test_topic(): pubsub_client = pubsub.PublisherClient() try: topic = manager.create_iot_topic(project_id, topic_id) except AlreadyExists as e: print("The topic already exists, detail: {}".format(str(e))) # Ignore the error, fetch the topic topic = pubsub_client.get_topic( pubsub_client.topic_path(project_id, topic_id)) yield topic topic_path = pubsub_client.topic_path(project_id, topic_id) try: pubsub_client.delete_topic(topic_path) except NotFound as e: # We ignore this case. print("The topic doesn't exist: detail: {}".format(str(e)))
Example #13
Source File: fixtures.py From python-docs-samples with Apache License 2.0 | 6 votes |
def test_subscription(test_topic): subscriber = pubsub.SubscriberClient() subscription_path = subscriber.subscription_path( project_id, subscription_name) try: subscription = subscriber.create_subscription( subscription_path, test_topic.name) except AlreadyExists as e: print("The topic already exists, detail: {}".format(str(e))) # Ignore the error, fetch the subscription subscription = subscriber.get_subscription(subscription_path) yield subscription try: subscriber.delete_subscription(subscription_path) except NotFound as e: # We ignore this case. print("The subscription doesn't exist: detail: {}".format(str(e)))
Example #14
Source File: fixtures.py From python-docs-samples with Apache License 2.0 | 6 votes |
def test_registry_id(test_topic): @backoff.on_exception(backoff.expo, HttpError, max_time=60) def create_registry(): manager.open_registry( service_account_json, project_id, cloud_region, test_topic.name, registry_id) create_registry() yield registry_id @backoff.on_exception(backoff.expo, HttpError, max_time=60) def delete_registry(): try: manager.delete_registry( service_account_json, project_id, cloud_region, registry_id) except NotFound as e: # We ignore this case. print("The registry doesn't exist: detail: {}".format(str(e))) delete_registry()
Example #15
Source File: dataset_test.py From python-docs-samples with Apache License 2.0 | 6 votes |
def ensure_dataset_ready(): dataset = None name = STATIC_DATASET try: dataset = automl_tables_dataset.get_dataset(PROJECT, REGION, name) except exceptions.NotFound: dataset = automl_tables_dataset.create_dataset(PROJECT, REGION, name) if dataset.example_count is None or dataset.example_count == 0: automl_tables_dataset.import_data(PROJECT, REGION, name, GCS_DATASET) dataset = automl_tables_dataset.get_dataset(PROJECT, REGION, name) automl_tables_dataset.update_dataset( PROJECT, REGION, dataset.display_name, target_column_spec_name="Deposit", ) return dataset
Example #16
Source File: engine_client.py From Cirq with Apache License 2.0 | 6 votes |
def get_current_calibration(self, project_id: str, processor_id: str ) -> Optional[qtypes.QuantumCalibration]: """Returns the current quantum calibration for a processor if it has one. Params: project_id: A project_id of the parent Google Cloud Project. processor_id: The processor unique identifier. Returns: The quantum calibration or None if there is no current calibration. """ try: return self._make_request( lambda: self.grpc_client.get_quantum_calibration( self._processor_name_from_ids(project_id, processor_id) + '/calibrations/current')) except EngineException as err: if isinstance(err.__cause__, NotFound): return None raise
Example #17
Source File: engine_client.py From Cirq with Apache License 2.0 | 6 votes |
def get_reservation(self, project_id: str, processor_id: str, reservation_id: str): """ Gets a quantum reservation from the engine. Params: project_id: A project_id of the parent Google Cloud Project. processor_id: The processor unique identifier. reservation_id: Unique ID of the reservation in the parent project, """ try: name = self._reservation_name_from_ids(project_id, processor_id, reservation_id) return self._make_request(lambda: self.grpc_client. get_quantum_reservation(name=name)) except EngineException as err: if isinstance(err.__cause__, NotFound): return None raise
Example #18
Source File: engine_client_test.py From Cirq with Apache License 2.0 | 5 votes |
def test_get_current_calibration_does_not_exist(client_constructor): grpc_client = setup_mock_(client_constructor) grpc_client.get_quantum_calibration.side_effect = exceptions.NotFound( 'not found') client = EngineClient() assert client.get_current_calibration('proj', 'processor0') is None assert grpc_client.get_quantum_calibration.call_args[0] == ( 'projects/proj/processors/processor0/calibrations/current',)
Example #19
Source File: test_blob.py From python-storage with Apache License 2.0 | 5 votes |
def test_download_to_file_with_failure(self): import requests from google.resumable_media import InvalidResponse from google.cloud import exceptions raw_response = requests.Response() raw_response.status_code = http_client.NOT_FOUND raw_request = requests.Request("GET", "http://example.com") raw_response.request = raw_request.prepare() grmp_response = InvalidResponse(raw_response) blob_name = "blob-name" media_link = "http://test.invalid" client = mock.Mock(spec=[u"_http"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) blob._properties["mediaLink"] = media_link blob._do_download = mock.Mock() blob._do_download.side_effect = grmp_response file_obj = io.BytesIO() with self.assertRaises(exceptions.NotFound): blob.download_to_file(file_obj) self.assertEqual(file_obj.tell(), 0) headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( client._http, file_obj, media_link, headers, None, None, False )
Example #20
Source File: model_test.py From python-docs-samples with Apache License 2.0 | 5 votes |
def ensure_model_ready(): name = STATIC_MODEL try: return automl_tables_model.get_model(PROJECT, REGION, name) except exceptions.NotFound: pass dataset = dataset_test.ensure_dataset_ready() return automl_tables_model.create_model( PROJECT, REGION, dataset.display_name, name, 1000 )
Example #21
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def _empty_bucket(bucket): """Empty a bucket of all existing blobs (including multiple versions).""" for blob in list(bucket.list_blobs(versions=True)): try: blob.delete() except exceptions.NotFound: pass
Example #22
Source File: dataset_test.py From python-docs-samples with Apache License 2.0 | 5 votes |
def test_dataset_create_import_delete(capsys): name = _id("d_cr_dl") dataset = automl_tables_dataset.create_dataset(PROJECT, REGION, name) assert dataset is not None assert dataset.display_name == name automl_tables_dataset.import_data(PROJECT, REGION, name, GCS_DATASET) out, _ = capsys.readouterr() assert "Data imported." in out automl_tables_dataset.delete_dataset(PROJECT, REGION, name) with pytest.raises(exceptions.NotFound): automl_tables_dataset.get_dataset(PROJECT, REGION, name)
Example #23
Source File: test_blob.py From python-storage with Apache License 2.0 | 5 votes |
def api_request(self, **kw): from google.cloud.exceptions import NotFound info, content = self._respond(**kw) if info.get("status") == http_client.NOT_FOUND: raise NotFound(info) return content
Example #24
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_blob_w_temporary_hold(self): from google.api_core import exceptions new_bucket_name = "w-tmp-hold" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) blob_name = "test-blob" payload = b"DEADBEEF" blob = bucket.blob(blob_name) blob.upload_from_string(payload) self.case_blobs_to_delete.append(blob) other = bucket.get_blob(blob_name) other.temporary_hold = True other.patch() self.assertTrue(other.temporary_hold) self.assertFalse(other.event_based_hold) self.assertIsNone(other.retention_expiration_time) with self.assertRaises(exceptions.Forbidden): other.delete() other.temporary_hold = False other.patch() other.delete() self.case_blobs_to_delete.pop()
Example #25
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_bucket_lock_retention_policy(self): import datetime from google.api_core import exceptions period_secs = 10 new_bucket_name = "loc-ret-policy" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.retention_period = period_secs bucket.patch() self.assertEqual(bucket.retention_period, period_secs) self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime) self.assertFalse(bucket.default_event_based_hold) self.assertFalse(bucket.retention_policy_locked) bucket.lock_retention_policy() bucket.reload() self.assertTrue(bucket.retention_policy_locked) bucket.retention_period = None with self.assertRaises(exceptions.Forbidden): bucket.patch()
Example #26
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_new_bucket_w_ubla(self): new_bucket_name = "new-w-ubla" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = Config.CLIENT.bucket(new_bucket_name) bucket.iam_configuration.uniform_bucket_level_access_enabled = True retry_429_503(bucket.create)() self.case_buckets_to_delete.append(new_bucket_name) bucket_acl = bucket.acl with self.assertRaises(exceptions.BadRequest): bucket_acl.reload() bucket_acl.loaded = True # Fake that we somehow loaded the ACL bucket_acl.all().grant_read() with self.assertRaises(exceptions.BadRequest): bucket_acl.save() blob_name = "my-blob.txt" blob = bucket.blob(blob_name) payload = b"DEADBEEF" blob.upload_from_string(payload) found = bucket.get_blob(blob_name) self.assertEqual(found.download_as_string(), payload) blob_acl = blob.acl with self.assertRaises(exceptions.BadRequest): blob_acl.reload() blob_acl.loaded = True # Fake that we somehow loaded the ACL blob_acl.all().grant_read() with self.assertRaises(exceptions.BadRequest): blob_acl.save()
Example #27
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_get_signed_policy_v4(self): bucket_name = "post_policy" + unique_resource_id("-") self.assertRaises(exceptions.NotFound, Config.CLIENT.get_bucket, bucket_name) retry_429_503(Config.CLIENT.create_bucket)(bucket_name) self.case_buckets_to_delete.append(bucket_name) blob_name = "post_policy_obj.txt" with open(blob_name, "w") as f: f.write("DEADBEEF") policy = Config.CLIENT.generate_signed_post_policy_v4( bucket_name, blob_name, conditions=[ {"bucket": bucket_name}, ["starts-with", "$Content-Type", "text/pla"], ], expiration=datetime.datetime.now() + datetime.timedelta(hours=1), fields={"content-type": "text/plain"}, ) with open(blob_name, "r") as f: files = {"file": (blob_name, f)} response = requests.post(policy["url"], data=policy["fields"], files=files) os.remove(blob_name) self.assertEqual(response.status_code, 204)
Example #28
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_get_signed_policy_v4_invalid_field(self): bucket_name = "post_policy" + unique_resource_id("-") self.assertRaises(exceptions.NotFound, Config.CLIENT.get_bucket, bucket_name) retry_429_503(Config.CLIENT.create_bucket)(bucket_name) self.case_buckets_to_delete.append(bucket_name) blob_name = "post_policy_obj.txt" with open(blob_name, "w") as f: f.write("DEADBEEF") policy = Config.CLIENT.generate_signed_post_policy_v4( bucket_name, blob_name, conditions=[ {"bucket": bucket_name}, ["starts-with", "$Content-Type", "text/pla"], ], expiration=datetime.datetime.now() + datetime.timedelta(hours=1), fields={"x-goog-random": "invalid_field", "content-type": "text/plain"}, ) with open(blob_name, "r") as f: files = {"file": (blob_name, f)} response = requests.post(policy["url"], data=policy["fields"], files=files) os.remove(blob_name) self.assertEqual(response.status_code, 400)
Example #29
Source File: notification.py From python-storage with Apache License 2.0 | 5 votes |
def exists(self, client=None, timeout=_DEFAULT_TIMEOUT): """Test whether this notification exists. See: https://cloud.google.com/storage/docs/json_api/v1/notifications/get If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple :param timeout: (Optional) The amount of time, in seconds, to wait for the server response. Can also be passed as a tuple (connect_timeout, read_timeout). See :meth:`requests.Session.request` documentation for details. :rtype: bool :returns: True, if the notification exists, else False. :raises ValueError: if the notification has no ID. """ if self.notification_id is None: raise ValueError("Notification not intialized by server") client = self._require_client(client) query_params = {} if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project try: client._connection.api_request( method="GET", path=self.path, query_params=query_params, timeout=timeout ) except NotFound: return False else: return True
Example #30
Source File: test_system.py From python-storage with Apache License 2.0 | 5 votes |
def test_bucket_create_w_alt_storage_class(self): from google.cloud.storage import constants new_bucket_name = "bucket-w-archive" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = Config.CLIENT.bucket(new_bucket_name) bucket.storage_class = constants.ARCHIVE_STORAGE_CLASS retry_429_503(bucket.create)() self.case_buckets_to_delete.append(new_bucket_name) created = Config.CLIENT.get_bucket(new_bucket_name) self.assertEqual(created.storage_class, constants.ARCHIVE_STORAGE_CLASS)