Python boto3.exceptions.S3UploadFailedError() Examples
The following are 12
code examples of boto3.exceptions.S3UploadFailedError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto3.exceptions
, or try the search function
.
Example #1
Source File: transfer.py From faces with GNU General Public License v2.0 | 6 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #2
Source File: transfer.py From faces with GNU General Public License v2.0 | 6 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #3
Source File: _s3_sync.py From taskcat with Apache License 2.0 | 6 votes |
def _s3_upload_file(paths, prefix, s3_client, acl): local_filename, bucket, s3_path = paths retry = 0 # backoff and retry while retry < 5: LOG.info( f"s3://{bucket}/{prefix + s3_path}", extra={"nametag": PrintMsg.S3} ) try: s3_client.upload_file( local_filename, bucket, prefix + s3_path, ExtraArgs={"ACL": acl} ) break except Exception as e: # pylint: disable=broad-except retry += 1 LOG.error("S3 upload error: %s" % e) # give up if we've exhausted retries, or if the error is not-retryable # ie. AccessDenied if retry == 5 or ( isinstance(e, S3UploadFailedError) and "(AccessDenied)" in str(e) ): raise TaskCatException("Failed to upload to S3") time.sleep(retry * 2)
Example #4
Source File: transfer.py From aws-extender with MIT License | 6 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #5
Source File: test_unit_s3_util.py From snowflake-connector-python with Apache License 2.0 | 5 votes |
def test_upload_file_with_s3_upload_failed_error(): """Tests Upload file with S3UploadFailedError, which could indicate AWS token expires.""" upload_file = MagicMock( side_effect=S3UploadFailedError( "An error occurred (ExpiredToken) when calling the " "CreateMultipartUpload operation: The provided token has expired.")) client = Mock() client.Object.return_value = MagicMock( metadata=defaultdict(str), upload_file=upload_file) initial_parallel = 100 upload_meta = { 'no_sleeping_time': True, 'parallel': initial_parallel, 'put_callback': None, 'put_callback_output_stream': None, 'existing_files': [], SHA256_DIGEST: '123456789abcdef', 'stage_info': { 'location': 'sfc-teststage/rwyitestacco/users/1234/', 'locationType': 'S3', }, 'client': client, 'dst_file_name': 'data1.txt.gz', 'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'), 'overwrite': True, } upload_meta['real_src_file_name'] = upload_meta['src_file_name'] upload_meta[ 'upload_size'] = os.stat(upload_meta['src_file_name']).st_size akey = SnowflakeRemoteStorageUtil.upload_one_file(upload_meta) assert akey is None assert upload_meta['result_status'] == ResultStatus.RENEW_TOKEN
Example #6
Source File: test_unit_s3_util.py From snowflake-connector-python with Apache License 2.0 | 5 votes |
def test_upload_file_with_s3_upload_failed_error(): """Tests Upload file with S3UploadFailedError, which could indicate AWS token expires.""" upload_file = MagicMock( side_effect=S3UploadFailedError( "An error occurred (ExpiredToken) when calling the " "CreateMultipartUpload operation: The provided token has expired.")) client = Mock() client.Object.return_value = MagicMock( metadata=defaultdict(str), upload_file=upload_file) initial_parallel = 100 upload_meta = { 'no_sleeping_time': True, 'parallel': initial_parallel, 'put_callback': None, 'put_callback_output_stream': None, 'existing_files': [], SHA256_DIGEST: '123456789abcdef', 'stage_info': { 'location': 'sfc-teststage/rwyitestacco/users/1234/', 'locationType': 'S3', }, 'client': client, 'dst_file_name': 'data1.txt.gz', 'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'), 'overwrite': True, } upload_meta['real_src_file_name'] = upload_meta['src_file_name'] upload_meta[ 'upload_size'] = os.stat(upload_meta['src_file_name']).st_size akey = SnowflakeRemoteStorageUtil.upload_one_file(upload_meta) assert akey is None assert upload_meta['result_status'] == ResultStatus.RENEW_TOKEN
Example #7
Source File: backup.py From jenkins-backup-s3 with MIT License | 5 votes |
def backup(self, file_path, backup_name): key = "%s/%s%s" % (self.__bucket_prefix, backup_name, self.KEY_SUFFIX) logger.debug(colored('Attempting to upload object to S3', 'white')) try: s3_object = self.s3.Object(self.__bucket, key).upload_file(file_path, Callback=logger.info(colored('File uploaded to S3 successfully', 'blue'))) except S3UploadFailedError as e: logger.critical(colored("Error uploading file to S3: %s" % e, 'red'))
Example #8
Source File: test_utils.py From sagemaker-python-sdk with Apache License 2.0 | 5 votes |
def mock_s3_upload(self): dst = os.path.join(self.tmp, "dst") class MockS3Object(object): def __init__(self, bucket, key): self.bucket = bucket self.key = key def upload_file(self, target, **kwargs): if self.bucket in BUCKET_WITHOUT_WRITING_PERMISSION: raise exceptions.S3UploadFailedError() shutil.copy2(target, dst) self.sagemaker_session.boto_session.resource().Object = MockS3Object return dst
Example #9
Source File: transfer.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 5 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. .. seealso:: :py:meth:`S3.Client.upload_file` :py:meth:`S3.Client.upload_fileobj` """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #10
Source File: transfer.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 5 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. .. seealso:: :py:meth:`S3.Client.upload_file` :py:meth:`S3.Client.upload_fileobj` """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #11
Source File: transfer.py From aws-builders-fair-projects with Apache License 2.0 | 5 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. .. seealso:: :py:meth:`S3.Client.upload_file` :py:meth:`S3.Client.upload_fileobj` """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))
Example #12
Source File: transfer.py From aws-builders-fair-projects with Apache License 2.0 | 5 votes |
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. .. seealso:: :py:meth:`S3.Client.upload_file` :py:meth:`S3.Client.upload_fileobj` """ if not isinstance(filename, six.string_types): raise ValueError('Filename must be a string') subscribers = self._get_subscribers(callback) future = self._manager.upload( filename, bucket, key, extra_args, subscribers) try: future.result() # If a client error was raised, add the backwards compatibility layer # that raises a S3UploadFailedError. These specific errors were only # ever thrown for upload_parts but now can be thrown for any related # client error. except ClientError as e: raise S3UploadFailedError( "Failed to upload %s to %s: %s" % ( filename, '/'.join([bucket, key]), e))