Python google.api_core.exceptions.BadRequest() Examples

The following are 22 code examples of google.api_core.exceptions.BadRequest(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.api_core.exceptions , or try the search function .
Example #1
Source File: import_measures.py    From openprescribing with MIT License 6 votes vote down vote up
def check_definitions(self, measure_defs, start_date, end_date, verbose):
        """Checks SQL definitions for measures.
        """

        # We don't validate JSON here, as this is already done as a
        # side-effect of parsing the command options.
        errors = []
        for measure_def in measure_defs:
            measure_id = measure_def["id"]
            try:
                measure = create_or_update_measure(measure_def, end_date)
                calculation = MeasureCalculation(
                    measure, start_date=start_date, end_date=end_date, verbose=verbose
                )
                calculation.check_definition()
            except BadRequest as e:
                errors.append("* SQL error in `{}`: {}".format(measure_id, e.args[0]))
            except TypeError as e:
                errors.append("* JSON error in `{}`: {}".format(measure_id, e.args[0]))
        if errors:
            raise BadRequest("\n".join(errors)) 
Example #2
Source File: system.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def tearDown(self):
        def _still_in_use(bad_request):
            return any(
                error["reason"] == "resourceInUse" for error in bad_request._errors
            )

        retry_in_use = RetryErrors(BadRequest, error_predicate=_still_in_use)
        retry_storage_errors_conflict = RetryErrors(
            (Conflict, TooManyRequests, InternalServerError, ServiceUnavailable)
        )
        for doomed in self.to_delete:
            if isinstance(doomed, storage.Bucket):
                retry_storage_errors_conflict(doomed.delete)(force=True)
            elif isinstance(doomed, (Dataset, bigquery.DatasetReference)):
                retry_in_use(Config.CLIENT.delete_dataset)(doomed, delete_contents=True)
            elif isinstance(doomed, (Table, bigquery.TableReference)):
                retry_in_use(Config.CLIENT.delete_table)(doomed)
            else:
                doomed.delete() 
Example #3
Source File: system.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def test_get_failed_job(self):
        # issue 4246
        from google.api_core.exceptions import BadRequest

        JOB_ID = "invalid_{}".format(str(uuid.uuid4()))
        QUERY = "SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);"
        PARAM = bigquery.ScalarQueryParameter("ts_value", "TIMESTAMP", 1.4810976e9)

        job_config = bigquery.QueryJobConfig()
        job_config.query_parameters = [PARAM]

        with self.assertRaises(BadRequest):
            Config.CLIENT.query(QUERY, job_id=JOB_ID, job_config=job_config).result()

        job = Config.CLIENT.get_job(JOB_ID)

        with self.assertRaises(ValueError):
            job.query_parameters 
Example #4
Source File: test_magics.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def test_bigquery_magic_omits_tracebacks_from_error_message():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")

    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )
    default_patch = mock.patch(
        "google.auth.default", return_value=(credentials_mock, "general-project")
    )

    run_query_patch = mock.patch(
        "google.cloud.bigquery.magics._run_query",
        autospec=True,
        side_effect=exceptions.BadRequest("Syntax error in SQL query"),
    )

    with run_query_patch, default_patch, io.capture_output() as captured_io:
        ip.run_cell_magic("bigquery", "", "SELECT foo FROM WHERE LIMIT bar")

    output = captured_io.stderr
    assert "400 Syntax error in SQL query" in output
    assert "Traceback (most recent call last)" not in output
    assert "Syntax error" not in captured_io.stdout 
Example #5
Source File: test_import_measures.py    From openprescribing with MIT License 5 votes vote down vote up
def test_check_definition_bad_sql(self):
        with self.assertRaises(BadRequest) as command_error:
            call_command("import_measures", check=True)
        self.assertIn("SQL error", str(command_error.exception)) 
Example #6
Source File: test_sqlalchemy_bigquery.py    From pybigquery with MIT License 5 votes vote down vote up
def test_dry_run(engine, api_client):
    sql = 'SELECT * FROM test_pybigquery.sample_one_row'
    assert api_client.dry_run_query(sql).total_bytes_processed == 148

    sql = 'SELECT * FROM sample_one_row'
    with pytest.raises(BadRequest) as excinfo:
        api_client.dry_run_query(sql)

    assert 'Table name "sample_one_row" missing dataset while no default dataset is set in the request.' in str(excinfo.value.message) 
Example #7
Source File: test_helper.py    From BigQuery_Helper with Apache License 2.0 5 votes vote down vote up
def test_bad_query_raises_right_error(self):
        with self.assertRaises(BadRequest):
            self.my_bq.query_to_pandas("Not a valid query") 
Example #8
Source File: init_script.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def validate_bq_table_name(table_name):
    """Validates provided BigQuery table name for maximum character limit and
    permitted characters."""

    max_characters = 1024
    patterns = '^[a-zA-Z0-9_]*$'
    error_msg = "Invalid table name {}. Table name must be alphanumeric" \
                "(plus underscores) and must be at most 1024 characters" \
                " long.".format(table_name)

    if len(table_name) > max_characters:
        raise exceptions.BadRequest(error_msg)

    if not re.search(patterns, table_name):
        raise exceptions.BadRequest(error_msg) 
Example #9
Source File: backfill.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def get_buckets(project_ids: List[str],
                gcs_client: storage.Client) -> List[Dict[str, str]]:
    """Retrieves list of metadata for all buckets in a GCP org.

    Args:
        project_ids: List of strings holding project IDs
        gcs_client: storage.Client object

    Returns:
        List of dictionaries mapping bucket-level metadata.
    """
    output_list = []
    try:
        for project_id in project_ids:
            try:
                bucket_list = list(gcs_client.list_buckets(project=project_id))
                for bucket in bucket_list:
                    output_list.append({
                        "bucket_name": bucket.name,
                        "project_id": project_id,
                        "last_read_timestamp": "",
                        "days_since_last_read": -1,
                        "read_count_30_days": -1,
                        "read_count_90_days": -1,
                        "export_day": datetime.datetime.utcnow().strftime("%Y-%m-%d"),
                        "recommended_OLM": ""
                    })
            except Forbidden as err:
                logging.error(f"""Access denied on bucket {bucket.name}.
                              {err}""")

            except BadRequest as err:
                logging.error(f"Could not find bucket {bucket.name}.")
                logging.error(err)
                pass
        return output_list

    except Exception as err:
        logging.error(f"""Could not access buckets in {project_id}.
                      {err}""") 
Example #10
Source File: import_pipeline.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def finish_bundle(self):
        self.bigquery_client = None
        # wait for the load jobs to complete
        for _, load_job in self.load_jobs.items():
            try:
                load_job.result()
            except BadRequest as e:
                logging.error('error in load_job %s', load_job.self_link)
                raise e 
Example #11
Source File: import_pipeline.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def process(self, element, schemas):
        """Element is a tuple of key_ name and iterable of filesystem paths."""

        dataset_ref = self.get_dataset_ref()
        sharded_key_name = element[0]
        key_name = AssignGroupByKey.remove_shard(element[0])
        object_paths = [object_path for object_path in element[1]]
        job_config = bigquery.LoadJobConfig()
        job_config.write_disposition = 'WRITE_APPEND'
        job_config.schema_update_options = [
            bigquery.job.SchemaUpdateOption.ALLOW_FIELD_ADDITION]

        table_ref = dataset_ref.table(self.asset_type_to_table_name(key_name))

        # use load_time as a timestamp.
        job_config.time_partitioning = bigquery.table.TimePartitioning(
            field='timestamp')
        job_config.schema = self.to_bigquery_schema(schemas[sharded_key_name])
        job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
        try:
            load_job = self.bigquery_client.load_table_from_uri(
                object_paths,
                table_ref,
                location=self.dataset_location,
                job_config=job_config)
            self.load_jobs[key_name] = load_job
        except BadRequest as e:
            logging.error('error in load_job %s, %s, %s, %s',
                          str(object_paths), str(table_ref),
                          str(self.dataset_location),
                          str(job_config.to_api_repr()))
            raise e 
Example #12
Source File: test_magics.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def test_bigquery_magic_w_table_id_invalid():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context._project = None

    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )
    default_patch = mock.patch(
        "google.auth.default", return_value=(credentials_mock, "general-project")
    )

    list_rows_patch = mock.patch(
        "google.cloud.bigquery.magics.bigquery.Client.list_rows",
        autospec=True,
        side_effect=exceptions.BadRequest("Not a valid table ID"),
    )

    table_id = "not-a-real-table"

    with list_rows_patch, default_patch, io.capture_output() as captured_io:
        ip.run_cell_magic("bigquery", "df", table_id)

    output = captured_io.stderr
    assert "Could not save output to variable" in output
    assert "400 Not a valid table ID" in output
    assert "Traceback (most recent call last)" not in output 
Example #13
Source File: system.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def test_query_w_failed_query(self):
        from google.api_core.exceptions import BadRequest

        with self.assertRaises(BadRequest):
            Config.CLIENT.query("invalid syntax;").result() 
Example #14
Source File: system.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def test_delete_dataset_delete_contents_false(self):
        from google.api_core import exceptions

        dataset = self.temp_dataset(_make_dataset_id("delete_table_false"))
        table_id = "test_table"
        table_arg = Table(dataset.table(table_id), schema=SCHEMA)

        retry_403(Config.CLIENT.create_table)(table_arg)
        with self.assertRaises(exceptions.BadRequest):
            Config.CLIENT.delete_dataset(dataset) 
Example #15
Source File: engine_client_test.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def test_get_reservation_exception(client_constructor):
    grpc_client = setup_mock_(client_constructor)
    grpc_client.get_quantum_reservation.side_effect = exceptions.BadRequest(
        'boom')

    client = EngineClient()
    with pytest.raises(EngineException, match='boom'):
        client.get_reservation('proj', 'processor0', 'goog') 
Example #16
Source File: engine_client_test.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def test_get_current_calibration_error(client_constructor):
    grpc_client = setup_mock_(client_constructor)

    grpc_client.get_quantum_calibration.side_effect = exceptions.BadRequest(
        'boom')

    client = EngineClient()
    with pytest.raises(EngineException, match='boom'):
        client.get_current_calibration('proj', 'processor0') 
Example #17
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def test_ubla_set_unset_preserves_acls(self):
        new_bucket_name = "ubla-acls" + unique_resource_id("-")
        self.assertRaises(
            exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
        )
        bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)

        blob_name = "my-blob.txt"
        blob = bucket.blob(blob_name)
        payload = b"DEADBEEF"
        blob.upload_from_string(payload)

        # Preserve ACLs before setting UBLA
        bucket_acl_before = list(bucket.acl)
        blob_acl_before = list(bucket.acl)

        # Set UBLA
        bucket.iam_configuration.uniform_bucket_level_access_enabled = True
        bucket.patch()

        self.assertTrue(bucket.iam_configuration.uniform_bucket_level_access_enabled)

        # While UBLA is set, cannot get / set ACLs
        with self.assertRaises(exceptions.BadRequest):
            bucket.acl.reload()

        # Clear UBLA
        bucket.iam_configuration.uniform_bucket_level_access_enabled = False
        bucket.patch()

        # Query ACLs after clearing UBLA
        bucket.acl.reload()
        bucket_acl_after = list(bucket.acl)
        blob.acl.reload()
        blob_acl_after = list(bucket.acl)

        self.assertEqual(bucket_acl_before, bucket_acl_after)
        self.assertEqual(blob_acl_before, blob_acl_after) 
Example #18
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def test_new_bucket_w_ubla(self):
        new_bucket_name = "new-w-ubla" + unique_resource_id("-")
        self.assertRaises(
            exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
        )
        bucket = Config.CLIENT.bucket(new_bucket_name)
        bucket.iam_configuration.uniform_bucket_level_access_enabled = True
        retry_429_503(bucket.create)()
        self.case_buckets_to_delete.append(new_bucket_name)

        bucket_acl = bucket.acl
        with self.assertRaises(exceptions.BadRequest):
            bucket_acl.reload()

        bucket_acl.loaded = True  # Fake that we somehow loaded the ACL
        bucket_acl.all().grant_read()
        with self.assertRaises(exceptions.BadRequest):
            bucket_acl.save()

        blob_name = "my-blob.txt"
        blob = bucket.blob(blob_name)
        payload = b"DEADBEEF"
        blob.upload_from_string(payload)

        found = bucket.get_blob(blob_name)
        self.assertEqual(found.download_as_string(), payload)

        blob_acl = blob.acl
        with self.assertRaises(exceptions.BadRequest):
            blob_acl.reload()

        blob_acl.loaded = True  # Fake that we somehow loaded the ACL
        blob_acl.all().grant_read()
        with self.assertRaises(exceptions.BadRequest):
            blob_acl.save() 
Example #19
Source File: load_table_benchmark.py    From professional-services with Apache License 2.0 4 votes vote down vote up
def load_from_gcs(self):
        """Loads GCS files into the benchmark table and stores results.

        Creates and runs a load job to load files the GCS URI into the
        benchmark table. Then uses benchmark_result_util.BenchmarkResultUtil
        to gather results and generate a results row, which it then inserts
        into the BigQuery results table.

        Raises:
            google.api_core.exceptions.BadRequest: 400 Error while reading data,
                error message: Total data size exceeds max allowed size

        """
        job_type = benchmark_parameters.BENCHMARK_PARAMETERS['benchmark_names'][
            self.benchmark_name]['type']
        source_formats = file_constants.FILE_CONSTANTS['sourceFormats']
        job_config = bigquery.LoadJobConfig()
        job_config.source_format = source_formats[self.file_type]
        if self.file_type == 'csv':
            job_config.skip_leading_rows = 1

        self.load_job = self.bq_client.load_table_from_uri(
            source_uris='{0:s}/*'.format(self.uri),
            destination=self.dataset_ref.table(self.job_destination_table),
            job_config=job_config,
        )
        logging.info('Started load job {0:s} for table {1:s}.'.format(
            self.load_job.job_id, self.job_destination_table))
        try:
            self.load_job.result()
            load_result = benchmark_result_util.LoadBenchmarkResultUtil(
                job=self.load_job,
                job_type=job_type,
                benchmark_name=self.benchmark_name,
                project_id=self.bq_project,
                results_table_name=self.results_table_name,
                results_dataset_id=self.results_table_dataset_id,
                bq_logs_dataset=self.bq_logs_dataset,
                job_source_uri='{0:s}/*'.format(self.uri),
                load_table_id=self.job_destination_table,
                load_dataset_id=self.dataset_id)
            load_result.insert_results_row()

        except exceptions.BadRequest as e:
            logging.error(e.message) 
Example #20
Source File: bigquery_api.py    From sroka with MIT License 4 votes vote down vote up
def query_bigquery(input_query, filename=None):

    if filename:
        if type(filename) != str:
            print('filename needs to be a string')
            return None

        if type(input_query) != str:
            print('input_query needs to be a string')
            return None

        try:
            f = open(filename, 'w')
            f.close()
        except FileNotFoundError:
            print('file cannot be saved in selected directory')
            return None

    else:
        if type(input_query) != str:
            print('input_query needs to be a string')
            return pd.DataFrame([])

    client = bigquery.Client.from_service_account_json(
        KEY_FILE)

    query_job = client.query(input_query)

    try:
        df = query_job.result().to_dataframe()

    except (NotFound, BadRequest) as error:
        print(error)
        if filename:
            return None
        return pd.DataFrame([])

    if filename:
        df.to_csv(filename)
        print('saved to ' + filename)
        return None
    else:
        return df 
Example #21
Source File: bigquery_api.py    From sroka with MIT License 4 votes vote down vote up
def done_bigquery(job_id, filename=None):

    if filename:
        if type(filename) != str:
            print('filename needs to be a string')
            return None

        if type(job_id) != str:
            print('input_query needs to be a string')
            return None

        try:
            f = open(filename, 'w')
            f.close()
        except FileNotFoundError:
            print('file cannot be saved in selected directory')
            return None

    else:
        if type(job_id) != str:
            print('input_query needs to be a string')
            return pd.DataFrame([])

    client = bigquery.Client.from_service_account_json(
        KEY_FILE)
    try:
        query_job = client.get_job(job_id=job_id)
    except (BadRequest, NotFound) as error:
        print(error)
        if filename:
            return None
        return pd.DataFrame([])
    try:
        df = query_job.result().to_dataframe()
    except (Forbidden, NotFound) as error:
        print(error)
        if filename:
            return None
        return pd.DataFrame([])
    if filename:
        df.to_csv(filename)
        print('saved to ' + filename)
        return None
    else:
        return df 
Example #22
Source File: test_system.py    From python-storage with Apache License 2.0 4 votes vote down vote up
def test_get_set_iam_policy(self):
        import pytest
        from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE
        from google.api_core.exceptions import BadRequest, PreconditionFailed

        bucket_name = "iam-policy" + unique_resource_id("-")
        bucket = retry_429_503(Config.CLIENT.create_bucket)(bucket_name)
        self.case_buckets_to_delete.append(bucket_name)
        self.assertTrue(bucket.exists())

        policy_no_version = bucket.get_iam_policy()
        self.assertEqual(policy_no_version.version, 1)

        policy = bucket.get_iam_policy(requested_policy_version=3)
        self.assertEqual(policy, policy_no_version)

        member = "serviceAccount:{}".format(Config.CLIENT.get_service_account_email())

        BINDING_W_CONDITION = {
            "role": STORAGE_OBJECT_VIEWER_ROLE,
            "members": {member},
            "condition": {
                "title": "always-true",
                "description": "test condition always-true",
                "expression": "true",
            },
        }
        policy.bindings.append(BINDING_W_CONDITION)

        with pytest.raises(
            PreconditionFailed, match="enable uniform bucket-level access"
        ):
            bucket.set_iam_policy(policy)

        bucket.iam_configuration.uniform_bucket_level_access_enabled = True
        bucket.patch()

        policy = bucket.get_iam_policy(requested_policy_version=3)
        policy.bindings.append(BINDING_W_CONDITION)

        with pytest.raises(BadRequest, match="at least 3"):
            bucket.set_iam_policy(policy)

        policy.version = 3
        returned_policy = bucket.set_iam_policy(policy)
        self.assertEqual(returned_policy.version, 3)
        self.assertEqual(returned_policy.bindings, policy.bindings)

        fetched_policy = bucket.get_iam_policy(requested_policy_version=3)
        self.assertEqual(fetched_policy.bindings, returned_policy.bindings)