Python boto.s3() Examples
The following are 30
code examples of boto.s3().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto
, or try the search function
.
![](https://www.programcreek.com/common/static/images/search.png)
Example #1
Source File: bucket.py From aws-extender with MIT License | 6 votes |
def get_lifecycle_config(self, headers=None): """ Returns the current lifecycle configuration on the bucket. :rtype: :class:`boto.s3.lifecycle.Lifecycle` :returns: A LifecycleConfig object that describes all current lifecycle rules in effect for the bucket. """ response = self.connection.make_request('GET', self.name, query_args='lifecycle', headers=headers) body = response.read() boto.log.debug(body) if response.status == 200: lifecycle = Lifecycle() h = handler.XmlHandler(lifecycle, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return lifecycle else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
Example #2
Source File: bucket.py From aws-extender with MIT License | 6 votes |
def configure_lifecycle(self, lifecycle_config, headers=None): """ Configure lifecycle for this bucket. :type lifecycle_config: :class:`boto.s3.lifecycle.Lifecycle` :param lifecycle_config: The lifecycle configuration you want to configure for this bucket. """ xml = lifecycle_config.to_xml() #xml = xml.encode('utf-8') fp = StringIO(xml) md5 = boto.utils.compute_md5(fp) if headers is None: headers = {} headers['Content-MD5'] = md5[1] headers['Content-Type'] = 'text/xml' response = self.connection.make_request('PUT', self.name, data=fp.getvalue(), query_args='lifecycle', headers=headers) body = response.read() if response.status == 200: return True else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
Example #3
Source File: bucket.py From aws-extender with MIT License | 6 votes |
def get_logging_status(self, headers=None): """ Get the logging status for this bucket. :rtype: :class:`boto.s3.bucketlogging.BucketLogging` :return: A BucketLogging object for this bucket. """ response = self.connection.make_request('GET', self.name, query_args='logging', headers=headers) body = response.read() if response.status == 200: blogging = BucketLogging() h = handler.XmlHandler(blogging, self) if not isinstance(body, bytes): body = body.encode('utf-8') xml.sax.parseString(body, h) return blogging else: raise self.connection.provider.storage_response_error( response.status, response.reason, body)
Example #4
Source File: util.py From mycroft with MIT License | 6 votes |
def load_from_s3_file(s3_uri): """Load data from S3 Useful for loading small config or schema files :param s3_uri: path to S3 uri :returns: file contents """ _, _, path = s3_uri.partition('://') bucket_name, _, key_name = path.partition('/') # if region is in a bucket name, put that region first def preferred_region(item): return item.name not in bucket_name boto_creds = get_boto_creds() for region in sorted(boto.s3.regions(), key=preferred_region): try: conn = boto.s3.connect_to_region(region.name, **boto_creds) return _load_from_s3_region(conn, bucket_name, key_name) except S3ResponseError as e: # skip to next region if access is not allowed from this one if e.status not in [403, 301]: raise raise ValueError("{0}: No valid region found".format(s3_uri))
Example #5
Source File: get_s3json.py From hsds with Apache License 2.0 | 5 votes |
def main(): if len(sys.argv) == 1 or sys.argv[1] == "-h" or sys.argv[1] == "--help": printUsage() sys.exit(1) obj_id = sys.argv[-1] s3_gateway = config.get("aws_s3_gateway") print("aws_s3_gateway: {}".format(s3_gateway)) region = config.get("aws_region") print("region: {}".format(region)) print("now: {}".format(time.time())) conn = boto.s3.connect_to_region( region, calling_format=boto.s3.connection.OrdinaryCallingFormat() ) bucket_name = config.get("bucket_name") print("bucket_name: {}".format(bucket_name)) bucket = conn.get_bucket(bucket_name) if obj_id.startswith("d-") or obj_id.startswith("g-") or obj_id.startswith("t-"): # add the checksum prefix obj_id = getIdHash(obj_id) + '-' + obj_id k = Key(bucket) k.key = obj_id data = k.get_contents_as_string() if not isinstance(data, str): # Python 3 - convert from bytes to str data = data.decode("utf-8") json_data = json.loads(data) print(json.dumps(json_data, sort_keys=True, indent=4))
Example #6
Source File: aws.py From build-relengapi with Mozilla Public License 2.0 | 5 votes |
def connect_to_s3(self, service_name, region_name): # special case for S3, which boto does differently than # the other services import boto.s3 return boto.s3.connect_to_region(region_name=region_name, aws_access_key_id=self.config.get('access_key_id'), aws_secret_access_key=self.config.get('secret_access_key'))
Example #7
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def get_cors(self, headers=None): """ Returns the current CORS configuration on the bucket. :rtype: :class:`boto.s3.cors.CORSConfiguration` :returns: A CORSConfiguration object that describes all current CORS rules in effect for the bucket. """ body = self.get_cors_xml(headers) cors = CORSConfiguration() h = handler.XmlHandler(cors, self) xml.sax.parseString(body, h) return cors
Example #8
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def set_cors(self, cors_config, headers=None): """ Set the CORS for this bucket given a boto CORSConfiguration object. :type cors_config: :class:`boto.s3.cors.CORSConfiguration` :param cors_config: The CORS configuration you want to configure for this bucket. """ return self.set_cors_xml(cors_config.to_xml())
Example #9
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def get_website_configuration_obj(self, headers=None): """Get the website configuration as a :class:`boto.s3.website.WebsiteConfiguration` object. """ config_xml = self.get_website_configuration_xml(headers=headers) config = website.WebsiteConfiguration() h = handler.XmlHandler(config, self) xml.sax.parseString(config_xml, h) return config
Example #10
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def set_website_configuration(self, config, headers=None): """ :type config: boto.s3.website.WebsiteConfiguration :param config: Configuration data """ return self.set_website_configuration_xml(config.to_xml(), headers=headers)
Example #11
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def delete_key(self, key_name, headers=None, version_id=None, mfa_token=None): """ Deletes a key from the bucket. If a version_id is provided, only that version of the key will be deleted. :type key_name: string :param key_name: The key name to delete :type version_id: string :param version_id: The version ID (optional) :type mfa_token: tuple or list of strings :param mfa_token: A tuple or list consisting of the serial number from the MFA device and the current value of the six-digit token associated with the device. This value is required anytime you are deleting versioned objects from a bucket that has the MFADelete option on the bucket. :rtype: :class:`boto.s3.key.Key` or subclass :returns: A key object holding information on what was deleted. The Caller can see if a delete_marker was created or removed and what version_id the delete created or removed. """ if not key_name: raise ValueError('Empty key names are not allowed') return self._delete_key_internal(key_name, headers=headers, version_id=version_id, mfa_token=mfa_token, query_args_l=None)
Example #12
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def new_key(self, key_name=None): """ Creates a new key :type key_name: string :param key_name: The name of the key to create :rtype: :class:`boto.s3.key.Key` or subclass :returns: An instance of the newly created key object """ if not key_name: raise ValueError('Empty key names are not allowed') return self.key_class(self, key_name)
Example #13
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def lookup(self, key_name, headers=None): """ Deprecated: Please use get_key method. :type key_name: string :param key_name: The name of the key to retrieve :rtype: :class:`boto.s3.key.Key` :returns: A Key object from this bucket. """ return self.get_key(key_name, headers=headers)
Example #14
Source File: bucket.py From aws-extender with MIT License | 5 votes |
def set_key_class(self, key_class): """ Set the Key class associated with this bucket. By default, this would be the boto.s3.key.Key class but if you want to subclass that for some reason this allows you to associate your new class with a bucket so that when you call bucket.new_key() or when you get a listing of keys in the bucket you will get an instances of your key class rather than the default. :type key_class: class :param key_class: A subclass of Key that can be more specific """ self.key_class = key_class
Example #15
Source File: aws_connections.py From mycroft with MIT License | 5 votes |
def get_s3_connection(): ''' :returns: s3 connection ''' return boto.s3.connect_to_region( read_string('aws_config.region'), **get_boto_creds() )
Example #16
Source File: s3.py From splits with MIT License | 5 votes |
def __init__(self, uri, *args, **kwargs): mode = kwargs['mode'] if 'mode' in kwargs else 'rb' s3 = kwargs['s3'] if 's3' in kwargs else None self.s3File = S3File(uri, mode=mode, s3=s3) super(GzipS3File, self).__init__(fileobj=self.s3File, mode=mode)
Example #17
Source File: get_s3json.py From hsds with Apache License 2.0 | 5 votes |
def printUsage(): print("usage: python get_s3json [--bucket_name=<bucket>] [--aws_s3_gateway=<s3_endpoint>] objid ") print(" objid: s3 JSON obj to fetch") print(" Example: python get_s3json --aws_s3_gateway=http://192.168.99.100:9000 --bucket_name=minio.hsdsdev t-cf2fc310-996f-11e6-8ef6-0242ac110005") sys.exit(); # # Get hash prefix #
Example #18
Source File: s3.py From splits with MIT License | 5 votes |
def is_s3_uri(uri): uri = str(uri) return uri.startswith('s3://') or uri.startswith('s3n://')
Example #19
Source File: s3.py From splits with MIT License | 5 votes |
def __init__(self, region='us-east-1'): # use a single provider to avoid NoAuthHandler exceptions # see: http://blog.johnryding.com/post/122337566993/solving-intermittent-noauthhandlerfound-errors-in if S3.aws_settings_provider is None: S3.aws_settings_provider = boto.provider.Provider('aws') self._conn = boto.s3.connect_to_region( region, calling_format=boto.s3.connection.OrdinaryCallingFormat(), provider=S3.aws_settings_provider )
Example #20
Source File: s3.py From splits with MIT License | 5 votes |
def _list_prefix(self, s3uri): results = self._conn.get_bucket(s3uri.bucket).list(s3uri.path, delimiter='/') return (S3Uri('s3://{0}/{1}'.format(s3uri.bucket, i.name)) for i in results)
Example #21
Source File: s3.py From splits with MIT License | 5 votes |
def _list_buckets(self): return (S3Uri('s3://{0}'.format(i.name)) for i in self._conn.get_all_buckets())
Example #22
Source File: s3.py From splits with MIT License | 5 votes |
def rm(self, uris): uris = [S3Uri(uri) for uri in uris] for bucket, group in groupby( sorted(uris, key=lambda uri: uri.bucket), lambda i: i.bucket): returned_keys = self._conn.get_bucket(bucket)\ .delete_keys( boto.s3.key.Key(bucket, i.path) for i in group) if(len(returned_keys.errors) > 0): raise IOError('Could not delete keys: {keys}'.format( keys=[k for k in returned_keys.errors]))
Example #23
Source File: s3.py From splits with MIT License | 5 votes |
def __init_s3(cls): if not cls.s3: cls.s3 = S3()
Example #24
Source File: s3.py From splits with MIT License | 5 votes |
def close(self): if 'w' in self.mode: self.flush() self.s3.putfile(self, self.s3uri)
Example #25
Source File: util.py From mycroft with MIT License | 5 votes |
def _load_from_s3_region(conn, bucket_name, key_name): bucket = conn.get_bucket(bucket_name) key = bucket.get_key(key_name) if key is None: raise ValueError('s3://{0}/{1}: no such file'.format( bucket_name, key_name )) return key.get_contents_as_string()
Example #26
Source File: S3Session.py From mongodb_consistent_backup with Apache License 2.0 | 5 votes |
def connect(self): if not self._conn: try: if (self.access_key is not None and self.secret_key is not None): logging.debug("Connecting to AWS S3 with Access Key: %s" % self.access_key) self._conn = boto.s3.connect_to_region( self.region, aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key, is_secure=self.secure, calling_format=self.calling_format ) logging.debug("Successfully connected to AWS S3 with Access Key: %s" % self.access_key) else: logging.debug("Connecting to AWS S3 with IAM Role") self._conn = boto.s3.connect_to_region( self.region, is_secure=self.secure, calling_format=self.calling_format ) logging.debug("Successfully connected to AWS S3 with IAM Role") except boto.exception.S3ResponseError, e: if self.is_forbidden_error(e): logging.error("Not authorized to connect to AWS S3 with Access Key: %s!" % self.access_key) else: logging.error("Cannot connect to AWS S3 with Access Key: %s!" % self.access_key) return OperationError(e) except Exception, e: logging.error("Cannot connect to AWS S3 with Access Key: %s!" % self.access_key) raise OperationError(e)
Example #27
Source File: util.py From mycroft with MIT License | 5 votes |
def is_s3_path(file_path): """ Return true if file_path is an S3 path, else false. """ schema, _, rest = file_path.partition('://') return schema == 's3'
Example #28
Source File: util.py From mycroft with MIT License | 5 votes |
def parse_s3_path(file_path): if not is_s3_path(file_path): raise ValueError('{0} is not a valid s3 path'.format(file_path)) parse_array = file_path.split("/", 3) bucket = parse_array[2] prefix = parse_array[3] return bucket, prefix
Example #29
Source File: bucket.py From aws-extender with MIT License | 4 votes |
def list_versions(self, prefix='', delimiter='', key_marker='', version_id_marker='', headers=None, encoding_type=None): """ List version objects within a bucket. This returns an instance of an VersionedBucketListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys within the bucket. :type prefix: string :param prefix: allows you to limit the listing to a particular prefix. For example, if you call the method with prefix='/foo/' then the iterator will only cycle through the keys that begin with the string '/foo/'. :type delimiter: string :param delimiter: can be used in conjunction with the prefix to allow you to organize and browse your keys hierarchically. See: http://aws.amazon.com/releasenotes/Amazon-S3/213 for more details. :type key_marker: string :param key_marker: The "marker" of where you are in the result set :param encoding_type: Requests Amazon S3 to encode the response and specifies the encoding method to use. An object key can contain any Unicode character; however, XML 1.0 parser cannot parse some characters, such as characters with an ASCII value from 0 to 10. For characters that are not supported in XML 1.0, you can add this parameter to request that Amazon S3 encode the keys in the response. Valid options: ``url`` :type encoding_type: string :rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet` :return: an instance of a BucketListResultSet that handles paging, etc """ return VersionedBucketListResultSet(self, prefix, delimiter, key_marker, version_id_marker, headers, encoding_type=encoding_type)
Example #30
Source File: bucket.py From aws-extender with MIT License | 4 votes |
def list(self, prefix='', delimiter='', marker='', headers=None, encoding_type=None): """ List key objects within a bucket. This returns an instance of an BucketListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys within the bucket. The Key objects returned by the iterator are obtained by parsing the results of a GET on the bucket, also known as the List Objects request. The XML returned by this request contains only a subset of the information about each key. Certain metadata fields such as Content-Type and user metadata are not available in the XML. Therefore, if you want these additional metadata fields you will have to do a HEAD request on the Key in the bucket. :type prefix: string :param prefix: allows you to limit the listing to a particular prefix. For example, if you call the method with prefix='/foo/' then the iterator will only cycle through the keys that begin with the string '/foo/'. :type delimiter: string :param delimiter: can be used in conjunction with the prefix to allow you to organize and browse your keys hierarchically. See http://goo.gl/Xx63h for more details. :type marker: string :param marker: The "marker" of where you are in the result set :param encoding_type: Requests Amazon S3 to encode the response and specifies the encoding method to use. An object key can contain any Unicode character; however, XML 1.0 parser cannot parse some characters, such as characters with an ASCII value from 0 to 10. For characters that are not supported in XML 1.0, you can add this parameter to request that Amazon S3 encode the keys in the response. Valid options: ``url`` :type encoding_type: string :rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet` :return: an instance of a BucketListResultSet that handles paging, etc """ return BucketListResultSet(self, prefix, delimiter, marker, headers, encoding_type=encoding_type)