Python boto3.exceptions.Boto3Error() Examples
The following are 12
code examples of boto3.exceptions.Boto3Error().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto3.exceptions
, or try the search function
.
Example #1
Source File: backend.py From django-warrant with BSD 3-Clause "New" or "Revised" License | 6 votes |
def authenticate(self, username=None, password=None): """ Authenticate a Cognito User :param username: Cognito username :param password: Cognito password :return: returns User instance of AUTH_USER_MODEL or None """ cognito_user = CognitoUser( settings.COGNITO_USER_POOL_ID, settings.COGNITO_APP_ID, access_key=getattr(settings, 'AWS_ACCESS_KEY_ID', None), secret_key=getattr(settings, 'AWS_SECRET_ACCESS_KEY', None), username=username) try: cognito_user.authenticate(password) except (Boto3Error, ClientError) as e: return self.handle_error_response(e) user = cognito_user.get_user() if user: user.access_token = cognito_user.access_token user.id_token = cognito_user.id_token user.refresh_token = cognito_user.refresh_token return user
Example #2
Source File: boto_manager.py From fence with Apache License 2.0 | 6 votes |
def assume_role(self, role_arn, duration_seconds, config=None): assert ( duration_seconds ), 'assume_role() cannot be called without "duration_seconds" parameter; please check your "expires_in" parameters' try: if config and "aws_access_key_id" in config: self.sts_client = client("sts", **config) session_name_postfix = uuid.uuid4() return self.sts_client.assume_role( RoleArn=role_arn, DurationSeconds=duration_seconds, RoleSessionName="{}-{}".format("gen3", session_name_postfix), ) except Boto3Error as ex: self.logger.exception(ex) raise InternalError("Fail to assume role: {}".format(ex)) except Exception as ex: self.logger.exception(ex) raise UnavailableError("Fail to reach AWS: {}".format(ex))
Example #3
Source File: ssm.py From cfn-sphere with Apache License 2.0 | 5 votes |
def get_parameter(self, name, with_decryption=True): try: return self.client.get_parameter(Name=name, WithDecryption=with_decryption)['Parameter']['Value'] except (Boto3Error, ClientError) as e: raise CfnSphereBotoError(e)
Example #4
Source File: s3.py From cfn-sphere with Apache License 2.0 | 5 votes |
def get_contents_from_url(self, url): try: (_, bucket_name, key_name) = self._parse_url(url) s3_object = self.s3.Object(bucket_name, key_name) return s3_object.get(ResponseContentEncoding='utf-8')["Body"].read().decode('utf-8') except (Boto3Error, BotoCoreError, ClientError) as e: raise CfnSphereBotoError(e)
Example #5
Source File: kms.py From cfn-sphere with Apache License 2.0 | 5 votes |
def decrypt(self, encrypted_value, encryption_context=None): if encryption_context is None: encryption_context = {} try: ciphertext_blob = base64.b64decode(encrypted_value.encode()) response = self.client.decrypt(CiphertextBlob=ciphertext_blob, EncryptionContext=encryption_context) return response['Plaintext'].decode('utf-8') except Boto3Error as e: raise CfnSphereBotoError(e) except ClientError as e: raise CfnSphereException(e)
Example #6
Source File: kms.py From cfn-sphere with Apache License 2.0 | 5 votes |
def encrypt(self, key_id, cleartext_string, encryption_context=None): if encryption_context is None: encryption_context = {} try: response = self.client.encrypt(KeyId=key_id, Plaintext=cleartext_string, EncryptionContext=encryption_context) return base64.b64encode(response['CiphertextBlob']).decode('utf-8') except (Boto3Error, ClientError) as e: raise CfnSphereBotoError(e)
Example #7
Source File: db.py From autopush with Mozilla Public License 2.0 | 5 votes |
def get_uaid(self, uaid): # type: (str) -> Dict[str, Any] """Get the database record for the UAID :raises: :exc:`ItemNotFound` if there is no record for this UAID. :exc:`ProvisionedThroughputExceededException` if dynamodb table exceeds throughput. """ try: item = self.table.get_item( Key={ 'uaid': hasher(uaid) }, ConsistentRead=True, ) if item.get('ResponseMetadata').get('HTTPStatusCode') != 200: raise ItemNotFound('uaid not found') item = item.get('Item') if item is None: raise ItemNotFound("uaid not found") if item.keys() == ['uaid']: # Incomplete record, drop it. self.drop_user(uaid) raise ItemNotFound("uaid not found") # Mobile users do not check in after initial registration. # DO NOT EXPIRE THEM. return item except Boto3Error: # pragma: nocover # We trap JSONResponseError because Moto returns text instead of # JSON when looking up values in empty tables. We re-throw the # correct ItemNotFound exception raise ItemNotFound("uaid not found")
Example #8
Source File: test_process_notification.py From notifications-api with MIT License | 5 votes |
def test_send_notification_to_queue_throws_exception_deletes_notification(sample_notification, mocker): mocked = mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async', side_effect=Boto3Error("EXPECTED")) with pytest.raises(Boto3Error): send_notification_to_queue(sample_notification, False) mocked.assert_called_once_with([(str(sample_notification.id))], queue='send-sms-tasks') assert Notification.query.count() == 0 assert NotificationHistory.query.count() == 0
Example #9
Source File: test_cmds_spark_run.py From paasta with Apache License 2.0 | 5 votes |
def test_suppress_clusterman_metrics_errors( self, mock_time, mock_run_docker_container, mock_get_spark_config, mock_get_username, mock_pick_random_port, mock_os_path_exists, mock_get_aws_credentials, mock_create_spark_config_str, ): mock_get_aws_credentials.return_value = ("id", "secret") with mock.patch( "paasta_tools.cli.cmds.spark_run.emit_resource_requirements", autospec=True ) as mock_emit_resource_requirements, mock.patch( "paasta_tools.cli.cmds.spark_run.clusterman_metrics", autospec=True ): mock_emit_resource_requirements.side_effect = Boto3Error mock_create_spark_config_str.return_value = "--conf spark.cores.max=5" args = mock.MagicMock( suppress_clusterman_metrics_errors=False, cmd="pyspark" ) with pytest.raises(Boto3Error): configure_and_run_docker_container( args=args, docker_img="fake-registry/fake-service", instance_config=self.instance_config, system_paasta_config=self.system_paasta_config, ) # make sure we don't blow up when this setting is True args.suppress_clusterman_metrics_errors = True configure_and_run_docker_container( args=args, docker_img="fake-registry/fake-service", instance_config=self.instance_config, system_paasta_config=self.system_paasta_config, )
Example #10
Source File: boto_manager.py From fence with Apache License 2.0 | 5 votes |
def delete_data_file(self, bucket, guid): """ We use buckets with versioning disabled. See AWS docs here: https://docs.aws.amazon.com/AmazonS3/latest/dev/DeletingObjectsfromVersioningSuspendedBuckets.html """ try: s3_objects = self.s3_client.list_objects_v2( Bucket=bucket, Prefix=guid, Delimiter="/" ) if not s3_objects.get("Contents"): # file not found in the bucket self.logger.info( "tried to delete GUID {} but didn't find in bucket {}".format( guid, bucket ) ) return if len(s3_objects["Contents"]) > 1: raise InternalError("multiple files found with GUID {}".format(guid)) key = s3_objects["Contents"][0]["Key"] self.s3_client.delete_object(Bucket=bucket, Key=key) self.logger.info( "deleted file for GUID {} in bucket {}".format(guid, bucket) ) except (KeyError, Boto3Error) as e: self.logger.exception(e) raise InternalError("Failed to delete file: {}".format(str(e)))
Example #11
Source File: boto_manager.py From fence with Apache License 2.0 | 5 votes |
def get_bucket_region(self, bucket, config): try: if "aws_access_key_id" in config: self.s3_client = client("s3", **config) response = self.s3_client.get_bucket_location(Bucket=bucket) region = response.get("LocationConstraint") except Boto3Error as ex: self.logger.exception(ex) raise InternalError("Fail to get bucket region: {}".format(ex)) except Exception as ex: self.logger.exception(ex) raise UnavailableError("Fail to reach AWS: {}".format(ex)) if region is None: return "us-east-1" return region
Example #12
Source File: test_cloud.py From barman with GNU General Public License v3.0 | 4 votes |
def test_worker_process_main(self, worker_process_execute_job_mock): job_collection = [ {"job_id": 1, "job_type": "upload_part"}, {"job_id": 2, "job_type": "upload_part"}, {"job_id": 3, "job_type": "upload_part"}, None, ] interface = CloudInterface( url='s3://bucket/path/to/dir', encryption=None) interface.queue = mock.MagicMock() interface.errors_queue = Queue() interface.queue.get.side_effect = job_collection interface.worker_process_main(0) # Jobs are been grabbed from queue, and the queue itself has been # notified of tasks being done assert interface.queue.get.call_count == 4 # worker_process_execute_job is executed only 3 times, because it's # not called for the process stop marker assert worker_process_execute_job_mock.call_count == 3 assert interface.queue.task_done.call_count == 4 assert interface.errors_queue.empty() # If during an execution a job an exception is raised, the worker # process must put the error in the appropriate queue. def execute_mock(job, process_number): if job["job_id"] == 2: raise Boto3Error("Something is gone wrong") interface.queue.reset_mock() worker_process_execute_job_mock.reset_mock() worker_process_execute_job_mock.side_effect = execute_mock interface.queue.get.side_effect = job_collection interface.worker_process_main(0) assert interface.queue.get.call_count == 4 # worker_process_execute_job is executed only 3 times, because it's # not called for the process stop marker assert worker_process_execute_job_mock.call_count == 3 assert interface.queue.task_done.call_count == 4 assert interface.errors_queue.get() == "Something is gone wrong" assert interface.errors_queue.empty()