Python google.cloud.exceptions.GoogleCloudError() Examples

The following are 27 code examples of google.cloud.exceptions.GoogleCloudError(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.cloud.exceptions , or try the search function .
Example #1
Source File: google_cloud.py    From turbinia with Apache License 2.0 7 votes vote down vote up
def setup_stackdriver_handler(project_id):
  """Set up Google Cloud Stackdriver Logging

  The Google Cloud Logging library will attach itself as a
  handler to the default Python logging module.

  Attributes:
    project_id: The name of the Google Cloud project.
  Raises:
    TurbiniaException: When an error occurs enabling GCP Stackdriver Logging.
  """
  try:
    client = cloud_logging.Client(project=project_id)
    cloud_handler = cloud_logging.handlers.CloudLoggingHandler(client)
    logger = logging.getLogger('turbinia')
    logger.addHandler(cloud_handler)
  except exceptions.GoogleCloudError as exception:
    msg = 'Error enabling Stackdriver Logging: {0:s}'.format(str(exception))
    raise TurbiniaException(msg) 
Example #2
Source File: job.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def result(self, retry=DEFAULT_RETRY, timeout=None):
        """Start the job and wait for it to complete and get the result.

        Args:
            retry (Optional[google.api_core.retry.Retry]): How to retry the RPC.
            timeout (Optional[float]):
                The number of seconds to wait for the underlying HTTP transport
                before using ``retry``.
                If multiple requests are made under the hood, ``timeout``
                applies to each individual request.

        Returns:
            _AsyncJob: This instance.

        Raises:
            google.cloud.exceptions.GoogleCloudError:
                if the job failed.
            concurrent.futures.TimeoutError:
                if the job did not complete in the given timeout.
        """
        if self.state is None:
            self._begin(retry=retry, timeout=timeout)
        # TODO: modify PollingFuture so it can pass a retry argument to done().
        return super(_AsyncJob, self).result(timeout=timeout) 
Example #3
Source File: job.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def _error_result_to_exception(error_result):
    """Maps BigQuery error reasons to an exception.

    The reasons and their matching HTTP status codes are documented on
    the `troubleshooting errors`_ page.

    .. _troubleshooting errors: https://cloud.google.com/bigquery\
        /troubleshooting-errors

    Args:
        error_result (Mapping[str, str]): The error result from BigQuery.

    Returns:
        google.cloud.exceptions.GoogleCloudError: The mapped exception.
    """
    reason = error_result.get("reason")
    status_code = _ERROR_REASON_TO_EXCEPTION.get(
        reason, http_client.INTERNAL_SERVER_ERROR
    )
    return exceptions.from_http_status(
        status_code, error_result.get("message", ""), errors=[error_result]
    ) 
Example #4
Source File: kubernetes_engine.py    From airflow with Apache License 2.0 6 votes vote down vote up
def wait_for_operation(self, operation: Operation, project_id: Optional[str] = None) -> Operation:
        """
        Given an operation, continuously fetches the status from Google Cloud until either
        completion or an error occurring

        :param operation: The Operation to wait for
        :type operation: google.cloud.container_V1.gapic.enums.Operation
        :param project_id: Google Cloud Platform project ID
        :type project_id: str
        :return: A new, updated operation fetched from Google Cloud
        """
        self.log.info("Waiting for OPERATION_NAME %s", operation.name)
        time.sleep(OPERATIONAL_POLL_INTERVAL)
        while operation.status != Operation.Status.DONE:
            if operation.status == Operation.Status.RUNNING or operation.status == \
                    Operation.Status.PENDING:
                time.sleep(OPERATIONAL_POLL_INTERVAL)
            else:
                raise exceptions.GoogleCloudError(
                    "Operation has failed with status: %s" % operation.status)
            # To update status of operation
            operation = self.get_operation(operation.name, project_id=project_id or self.project_id)
        return operation 
Example #5
Source File: provider.py    From koku with GNU Affero General Public License v3.0 6 votes vote down vote up
def cost_usage_source_is_reachable(self, credential_name, data_source):
        """
        Verify that the GCP bucket exists and is reachable.

        Args:
            credential_name (object): not used; only present for interface compatibility
            data_source (dict): dict containing name of GCP storage bucket

        """
        storage_client = storage.Client()
        bucket = data_source["bucket"]
        try:
            bucket_info = storage_client.lookup_bucket(bucket)
            if not bucket_info:
                # if the lookup does not return anything, then this is an nonexistent bucket
                key = "billing_source.bucket"
                message = f"The provided GCP bucket {bucket} does not exist"
                raise serializers.ValidationError(error_obj(key, message))

        except GoogleCloudError as e:
            key = "billing_source.bucket"
            raise serializers.ValidationError(error_obj(key, e.message))

        return True 
Example #6
Source File: dataprep.py    From professional-services with Apache License 2.0 6 votes vote down vote up
def _create_temp_table(self, timestamp, total_lines):
        """Save in a temporary table the data in the source table shuffled."""
        _, temp_table_uri = self._build_destination_table(timestamp, TEMP_TABLE_SUFFIX)
        try:
            logging.info('Creating temporary table %s', temp_table_uri)
            query = queries.QUERY_TEMP_DATA_TEMPLATE.format(
                temp_table=temp_table_uri,
                feature_columns=self._columns,
                target_columns_shuffle=self._target_columns_shuffle,
                source_table=self._source_table_uri,
                total_lines=total_lines)
            self._bq_client.run_query(query)
            return temp_table_uri
        except GoogleCloudError as gcp_exception:
            raise exceptions.MLDataPrepException(
                'Could not create table {}'.format(temp_table_uri),
                ERR_CALCULATE_DATASET_SIZE,
                gcp_exception) 
Example #7
Source File: output_manager.py    From turbinia with Apache License 2.0 6 votes vote down vote up
def copy_to(self, source_path):
    if os.path.getsize(source_path) == 0:
      message = (
          'Local source file {0:s} is empty.  Not uploading to GCS'.format(
              source_path))
      log.error(message)
      raise TurbiniaException(message)

    bucket = self.client.get_bucket(self.bucket)
    destination_path = os.path.join(
        self.base_output_dir, self.unique_dir, os.path.basename(source_path))
    log.info(
        'Writing {0:s} to GCS path {1:s}'.format(source_path, destination_path))
    try:
      blob = storage.Blob(destination_path, bucket, chunk_size=self.CHUNK_SIZE)
      blob.upload_from_filename(source_path, client=self.client)
    except exceptions.GoogleCloudError as exception:
      message = 'File upload to GCS failed: {0!s}'.format(exception)
      log.error(message)
      raise TurbiniaException(message)
    return os.path.join('gs://', self.bucket, destination_path) 
Example #8
Source File: google_cloud.py    From turbinia with Apache License 2.0 6 votes vote down vote up
def setup_stackdriver_traceback(project_id):
  """Set up Google Cloud Error Reporting

  This method will enable Google Cloud Error Reporting.
  All exceptions that occur within a Turbinia Task will be logged.

  Attributes:
    project_id: The name of the Google Cloud project.
  Raises:
    TurbiniaException: When an error occurs enabling GCP Error Reporting.
  """
  try:
    client = error_reporting.Client(project=project_id)
  except exceptions.GoogleCloudError as exception:
    msg = 'Error enabling GCP Error Reporting: {0:s}'.format(str(exception))
    raise TurbiniaException(msg)
  return client 
Example #9
Source File: benchmark_uploader.py    From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 5 votes vote down vote up
def insert_run_status(self, dataset_name, table_name, run_id, run_status):
    """Insert the run status in to Bigquery run status table."""
    query = ("INSERT {ds}.{tb} "
             "(run_id, status) "
             "VALUES('{rid}', '{status}')").format(
                 ds=dataset_name, tb=table_name, rid=run_id, status=run_status)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to insert run status: %s", e) 
Example #10
Source File: dataprep.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def _calculate_dataset_sizes(self):
        """Calculate the size of the training and validation datasets."""
        try:
            total_lines = self._bq_client.count_lines_in_table(self._source_project,
                                                               self._source_dataset,
                                                               self._source_table)
            split_index = int(self._split_ratio * total_lines)
            return total_lines, split_index
        except GoogleCloudError as gcp_exception:
            raise exceptions.MLDataPrepException(
                'Could not count lines in table {}'.format(self._source_table),
                ERR_CREATE_TEMP_TABLE,
                gcp_exception) 
Example #11
Source File: dataprep.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def _extract_ml_dataset(self, ml_dataset, temp_table_uri, timestamp, split_index):
        """Extract to Cloud Storage the training or the validation dataset."""
        table_id, table_uri = self._build_destination_table(
            timestamp, ml_dataset)
        logging.info('Exporting %s dataset to the table %s', ml_dataset, table_uri)
        try:
            query_template = queries.QUERY_TRAINING_DATA_TEMPLATE\
                if ml_dataset == TRAINING_DATASET else queries.QUERY_VALIDATION_DATA_TEMPLATE
            query = query_template.format(destination_table=table_uri,
                                          feature_columns=self._columns,
                                          target_columns_export=self._target_columns_export,
                                          temp_table=temp_table_uri,
                                          split_index=split_index)
            self._bq_client.run_query(query)
            destination_uri = self._build_gcs_destination_uri(
                timestamp, ml_dataset)
            logging.info('Exporting %s dataset to the GCS location %s', ml_dataset, destination_uri)
            self._bq_client.export_table_as_csv(self._destination_project,
                                                self._destination_dataset,
                                                table_id,
                                                destination_uri)
        except GoogleCloudError as gcp_exception:
            raise exceptions.MLDataPrepException(
                'Could not generate {} dataset'.format(ml_dataset),
                ERR_GENERATE_ML_DATASET,
                gcp_exception) 
Example #12
Source File: state_manager.py    From turbinia with Apache License 2.0 5 votes vote down vote up
def write_new_task(self, task):
    key = self.client.key('TurbiniaTask', task.id)
    try:
      entity = datastore.Entity(key)
      entity.update(self.get_task_dict(task))
      log.info('Writing new task {0:s} into Datastore'.format(task.name))
      self.client.put(entity)
      task.state_key = key
    except exceptions.GoogleCloudError as e:
      log.error(
          'Failed to update task {0:s} in datastore: {1!s}'.format(
              task.name, e))
    return key 
Example #13
Source File: benchmark_uploader.py    From Live-feed-object-device-identification-using-Tensorflow-and-OpenCV with Apache License 2.0 5 votes vote down vote up
def update_run_status(self, dataset_name, table_name, run_id, run_status):
    """Update the run status in in Bigquery run status table."""
    query = ("UPDATE {ds}.{tb} "
             "SET status = '{status}' "
             "WHERE run_id = '{rid}'").format(
                 ds=dataset_name, tb=table_name, status=run_status, rid=run_id)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to update run status: %s", e) 
Example #14
Source File: benchmark_uploader.py    From g-tensorflow-models with Apache License 2.0 5 votes vote down vote up
def insert_run_status(self, dataset_name, table_name, run_id, run_status):
    """Insert the run status in to Bigquery run status table."""
    query = ("INSERT {ds}.{tb} "
             "(run_id, status) "
             "VALUES('{rid}', '{status}')").format(
                 ds=dataset_name, tb=table_name, rid=run_id, status=run_status)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to insert run status: %s", e) 
Example #15
Source File: benchmark_uploader.py    From g-tensorflow-models with Apache License 2.0 5 votes vote down vote up
def update_run_status(self, dataset_name, table_name, run_id, run_status):
    """Update the run status in in Bigquery run status table."""
    query = ("UPDATE {ds}.{tb} "
             "SET status = '{status}' "
             "WHERE run_id = '{rid}'").format(
                 ds=dataset_name, tb=table_name, status=run_status, rid=run_id)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to update run status: %s", e) 
Example #16
Source File: benchmark_uploader.py    From multilabel-image-classification-tensorflow with MIT License 5 votes vote down vote up
def insert_run_status(self, dataset_name, table_name, run_id, run_status):
    """Insert the run status in to Bigquery run status table."""
    query = ("INSERT {ds}.{tb} "
             "(run_id, status) "
             "VALUES('{rid}', '{status}')").format(
                 ds=dataset_name, tb=table_name, rid=run_id, status=run_status)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to insert run status: %s", e) 
Example #17
Source File: benchmark_uploader.py    From multilabel-image-classification-tensorflow with MIT License 5 votes vote down vote up
def update_run_status(self, dataset_name, table_name, run_id, run_status):
    """Update the run status in in Bigquery run status table."""
    query = ("UPDATE {ds}.{tb} "
             "SET status = '{status}' "
             "WHERE run_id = '{rid}'").format(
                 ds=dataset_name, tb=table_name, status=run_status, rid=run_id)
    try:
      self._bq_client.query(query=query).result()
    except exceptions.GoogleCloudError as e:
      tf.logging.error("Failed to update run status: %s", e) 
Example #18
Source File: job.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def _begin(self, client=None, retry=DEFAULT_RETRY, timeout=None):
        """API call:  begin the job via a POST request

        See
        https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert

        Args:
            client (Optional[google.cloud.bigquery.client.Client]):
                The client to use. If not passed, falls back to the ``client``
                associated with the job object or``NoneType``.
            retry (Optional[google.api_core.retry.Retry]):
                How to retry the RPC.
            timeout (Optional[float]):
                The number of seconds to wait for the underlying HTTP transport
                before using ``retry``.

        Raises:
            ValueError: If the job has already begun.
        """

        try:
            super(QueryJob, self)._begin(client=client, retry=retry, timeout=timeout)
        except exceptions.GoogleCloudError as exc:
            exc.message += self._format_for_exception(self.query, self.job_id)
            exc.query_job = self
            raise 
Example #19
Source File: state_manager.py    From turbinia with Apache License 2.0 5 votes vote down vote up
def update_task(self, task):
    task.touch()
    try:
      with self.client.transaction():
        entity = self.client.get(task.state_key)
        if not entity:
          self.write_new_task(task)
          return
        entity.update(self.get_task_dict(task))
        log.debug('Updating Task {0:s} in Datastore'.format(task.name))
        self.client.put(entity)
    except exceptions.GoogleCloudError as e:
      log.error(
          'Failed to update task {0:s} in datastore: {1!s}'.format(
              task.name, e)) 
Example #20
Source File: flaky.py    From python-repo-tools with Apache License 2.0 5 votes vote down vote up
def client_library_errors(e, *args):
    """Used by mark_flaky to retry on remote service errors."""
    exception_class, exception_instance, traceback = e

    return isinstance(
        exception_instance,
        (GoogleCloudError,)) 
Example #21
Source File: eventually_consistent.py    From python-repo-tools with Apache License 2.0 5 votes vote down vote up
def mark(*args, **kwargs):
    """Marks an entire test as eventually consistent and retries.

    Args:
        tries: The number of retries.
        exceptions: The exceptions on which it will retry. It can be
            single value or a tuple.
        wait_exponential_multiplier: The exponential multiplier in
            milliseconds.
        wait_exponential_max: The maximum wait before the next try in
            milliseconds.
    """
    __tracebackhide__ = True
    tries = kwargs.get('tries', STOP_MAX_ATTEMPT_NUMBER_DEFAULT)
    retry_exceptions = kwargs.get(
        'exceptions', (AssertionError, exceptions.GoogleCloudError))
    wait_exponential_multiplier = kwargs.get(
        'wait_exponential_multiplier', WAIT_EXPONENTIAL_MULTIPLIER_DEFAULT)
    wait_exponential_max = kwargs.get(
        'wait_exponential_max', WAIT_EXPONENTIAL_MAX_DEFAULT)
    # support both `@mark` and `@mark()` syntax
    if len(args) == 1 and callable(args[0]):
        return retry(
            wait_exponential_multiplier=wait_exponential_multiplier,
            wait_exponential_max=wait_exponential_max,
            stop_max_attempt_number=tries,
            retry_on_exception=_retry_on_exception(retry_exceptions))(args[0])

    # `mark()` syntax
    def inner(f):
        __tracebackhide__ = True
        return retry(
            wait_exponential_multiplier=wait_exponential_multiplier,
            wait_exponential_max=wait_exponential_max,
            stop_max_attempt_number=tries,
            retry_on_exception=_retry_on_exception(retry_exceptions))(f)
    return inner 
Example #22
Source File: test_kubernetes_engine.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_wait_for_response_exception(self, time_mock):
        from google.cloud.container_v1.gapic.enums import Operation
        from google.cloud.exceptions import GoogleCloudError

        mock_op = mock.Mock()
        mock_op.status = Operation.Status.ABORTING

        with self.assertRaises(GoogleCloudError):
            self.gke_hook.wait_for_operation(mock_op)
            self.assertEqual(time_mock.call_count, 1) 
Example #23
Source File: tests_provider.py    From koku with GNU Affero General Public License v3.0 5 votes vote down vote up
def test_bucket_access_exception(self, mock_storage):
        """Test that ValidationError is raised when GoogleCloudError is raised."""
        gcp_client = mock_storage.return_value
        gcp_client.lookup_bucket.side_effect = GoogleCloudError("GCP Error")
        credentials = {"project_id": FAKE.word()}
        storage_resource_name = {"bucket": FAKE.word()}
        with self.assertRaises(ValidationError):
            GCPProvider().cost_usage_source_is_reachable(credentials, storage_resource_name) 
Example #24
Source File: tests_provider.py    From koku with GNU Affero General Public License v3.0 5 votes vote down vote up
def test_no_bucket_exists_exception(self, mock_storage):
        """Test that ValidationError is raised when GoogleCloudError is raised."""
        gcp_client = mock_storage.return_value
        gcp_client.lookup_bucket.return_value = None

        credentials = {"project_id": FAKE.word()}
        storage_resource_name = {"bucket": FAKE.word()}

        with self.assertRaises(ValidationError):
            GCPProvider().cost_usage_source_is_reachable(credentials, storage_resource_name) 
Example #25
Source File: client.py    From turbinia with Apache License 2.0 4 votes vote down vote up
def __init__(self, jobs_denylist=None, jobs_allowlist=None):
    """Initialization for PSQ Worker.

    Args:
      jobs_denylist (Optional[list[str]]): Jobs we will exclude from running
      jobs_allowlist (Optional[list[str]]): The only Jobs we will include to run
    """
    config.LoadConfig()
    psq_publisher = pubsub.PublisherClient()
    psq_subscriber = pubsub.SubscriberClient()
    datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
    try:
      self.psq = psq.Queue(
          psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
          name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
    except exceptions.GoogleCloudError as e:
      msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
      log.error(msg)
      raise TurbiniaException(msg)

    # Deregister jobs from denylist/allowlist.
    job_manager.JobsManager.DeregisterJobs(jobs_denylist, jobs_allowlist)
    disabled_jobs = list(config.DISABLED_JOBS) if config.DISABLED_JOBS else []
    disabled_jobs = [j.lower() for j in disabled_jobs]
    # Only actually disable jobs that have not been allowlisted.
    if jobs_allowlist:
      disabled_jobs = list(set(disabled_jobs) - set(jobs_allowlist))
    if disabled_jobs:
      log.info(
          'Disabling non-allowlisted jobs configured to be disabled in the '
          'config file: {0:s}'.format(', '.join(disabled_jobs)))
      job_manager.JobsManager.DeregisterJobs(jobs_denylist=disabled_jobs)

    # Check for valid dependencies/directories.
    dependencies = config.ParseDependencies()
    if config.DOCKER_ENABLED:
      check_docker_dependencies(dependencies)
    check_system_dependencies(dependencies)
    check_directory(config.MOUNT_DIR_PREFIX)
    check_directory(config.OUTPUT_DIR)
    check_directory(config.TMP_DIR)

    jobs = job_manager.JobsManager.GetJobNames()
    log.info(
        'Dependency check complete. The following jobs are enabled '
        'for this worker: {0:s}'.format(','.join(jobs)))
    log.info('Starting PSQ listener on queue {0:s}'.format(self.psq.name))
    self.worker = psq.Worker(queue=self.psq) 
Example #26
Source File: export.py    From professional-services with Apache License 2.0 4 votes vote down vote up
def export_to_gcs_content_types(parent, gcs_destination, content_types,
                                asset_types):
    """Export each asset type into a GCS object with the GCS prefix.

    Will call `export_to_gcs concurrently` to perform an export, once for each
    content_type.

    Args:
        parent: Project id or organization number.
        gcs_destination: GCS object prefix to export to (gs://bucket/prefix)
        content_types: List of [RESOURCE, NAME, IAM_POLICY, NAME] to export.
        Defaults to [RESOURCE, NAME, IAM_POLICY]
        asset_types: List of asset_types to export. Supply `None` to get
        everything.
    Returns:
        A dict of content_types and export result objects.

    """

    logging.info('performing export from %s to %s of content_types %s',
                 parent, gcs_destination, str(content_types))
    if asset_types == ['*']:
        asset_types = None
    if content_types is None:
        content_types = ['RESOURCE', 'IAM_POLICY']
    with futures.ThreadPoolExecutor(max_workers=3) as executor:
        export_futures = {
            executor.submit(export_to_gcs, parent, '{}/{}.json'.format(
                gcs_destination, content_type), content_type, asset_types):
            content_type
            for content_type in content_types
        }
    operation_results = {}
    for future in futures.as_completed(export_futures):
        try:
            content_type = export_futures[future]
            operation_results[content_type] = future.result()
        except GoogleCloudError:
            content_type = export_futures[future]
            logging.exception('Error exporting %s', content_type)
            raise
    logging.info('export results: %s', pprint.pformat(operation_results))
    return operation_results 
Example #27
Source File: output_manager.py    From turbinia with Apache License 2.0 4 votes vote down vote up
def copy_from(self, source_path):
    """Copies output file from the managed location to the local output dir.

    Args:
      source_file (string): A path to a source file in the managed storage
          location.  This path should be in a format matching the storage type
          (e.g. GCS paths are formatted like 'gs://bucketfoo/' and local paths
          are like '/foo/bar'.

    Returns:
      The path the file was saved to, or None if file was not written.

    Raises:
      TurbiniaException: If file retrieval fails.
    """
    bucket = self.client.get_bucket(self.bucket)
    gcs_path = self._parse_gcs_path(source_path)[1]
    destination_path = os.path.join(
        self.local_output_dir, os.path.basename(source_path))
    log.info(
        'Writing GCS file {0:s} to local path {1:s}'.format(
            source_path, destination_path))
    try:
      blob = storage.Blob(gcs_path, bucket, chunk_size=self.CHUNK_SIZE)
      blob.download_to_filename(destination_path, client=self.client)
    except exceptions.RequestRangeNotSatisfiable as exception:
      message = (
          'File retrieval from GCS failed, file may be empty: {0!s}'.format(
              exception))
      log.error(message)
      raise TurbiniaException(message)
    except exceptions.GoogleCloudError as exception:
      message = 'File retrieval from GCS failed: {0!s}'.format(exception)
      log.error(message)
      raise TurbiniaException(message)

    if not os.path.exists(destination_path):
      message = (
          'File retrieval from GCS failed: Local file {0:s} does not '
          'exist'.format(destination_path))
      log.error(message)
      raise TurbiniaException(message)
    return destination_path