Python boto.storage_uri() Examples
The following are 10
code examples of boto.storage_uri().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto
, or try the search function
.
Example #1
Source File: uploader.py From GiftStick with Apache License 2.0 | 6 votes |
def _UploadStream(self, stream, remote_path, update_callback=None): """Uploads a file object to Google Cloud Storage. Args: stream (file): the file-like object pointing to data to upload. remote_path (str): the remote path to store the data to. update_callback (func): an optional function called as upload progresses. Raises: errors.RetryableError: when the upload encounters an error that's worth retrying. """ if not self._boto_configured: self._InitBoto() try: dst_uri = boto.storage_uri(remote_path, u'gs') dst_uri.new_key().set_contents_from_stream(stream, cb=update_callback) except boto.exception.GSDataError as e: # This is usually raised when the connection is broken, and deserves to # be retried. raise errors.RetryableError(str(e))
Example #2
Source File: GpredKfold.py From papiseval with MIT License | 6 votes |
def create_bucket(self): now = time.time() BUCKET = 'training-%d' % now # Instantiate a BucketStorageUri object. uri = boto.storage_uri(BUCKET, self.GOOGLE_STORAGE) # Try to create the bucket. try: # If the default project is defined, # you do not need the headers. # Just call: uri.create_bucket() header_values = {"x-goog-api-version": "2", "x-goog-project-id": self.project_id} uri.create_bucket(headers=header_values) return BUCKET except boto.exception.StorageCreateError, e: print 'Failed to create bucket:', e # delete_bucket: deleting a bucket # # params: # bucket_name: the name of the bucket to delete # project_id: the project id where the bucket to delete is
Example #3
Source File: gcs_boto.py From PerfKitBenchmarker with Apache License 2.0 | 6 votes |
def _StorageURI(self, bucket, object_name=None): """Return a storage_uri for the given resource. Args: bucket: the name of a bucket. object_name: the name of an object, if given. Returns: A storage_uri. If object is given, the uri will be for the bucket-object combination. If object is not given, the uri will be for the bucket. """ if object_name is not None: path = '%s/%s' % (bucket, object_name) else: path = bucket storage_uri = boto.storage_uri(path, 'gs') return storage_uri
Example #4
Source File: GsUploadThread.py From mongodb_consistent_backup with Apache License 2.0 | 5 votes |
def get_uri(self): return boto.storage_uri(self.path, 'gs')
Example #5
Source File: views.py From mirandum with Apache License 2.0 | 5 votes |
def handle_uploaded_file(f, user): gcs_bucket = settings.GCS_BUCKET user_key = md5.md5(str(user.username)).hexdigest() # Sometimes things have paths in them? Windows used to do this. name = f.name.split("/")[-1] file_key = "%s/%s" % (user_key, name) uri = boto.storage_uri("%s/%s" % (gcs_bucket, file_key), "gs") uri.set_contents_from_string(f.read()) return "http://%s/%s" % (gcs_bucket, file_key)
Example #6
Source File: GpredKfold.py From papiseval with MIT License | 5 votes |
def upload_file(self, dir, filename, bucket_name): with open(os.path.join(dir, filename), 'r') as localfile: dst_uri = boto.storage_uri(bucket_name + '/' + filename, self.GOOGLE_STORAGE) dst_uri.new_key().set_contents_from_file(localfile) # define_bucket: setting a bucket which will be used to store training data
Example #7
Source File: GpredKfold.py From papiseval with MIT License | 5 votes |
def delete_bucket(self, bucket_name, project_id): uri = boto.storage_uri(bucket_name, self.GOOGLE_STORAGE) for obj in uri.get_bucket(): #print 'Deleting object: %s...' % obj.name obj.delete() #print 'Deleting bucket: %s...' % uri.bucket_name uri.delete_bucket() ## Method which is use to train a google prediction regression or classification model # @param self the object pointer # @param inputs the inputs # @param outputs the outputs # @param train the integer array of positions for the data used for training # @return a list containing the bucket name (to clean it at the end) and the model id
Example #8
Source File: __init__.py From canvas with BSD 3-Clause "New" or "Revised" License | 5 votes |
def storage_uri_for_key(key): """Returns a StorageUri for the given key. :type key: :class:`boto.s3.key.Key` or subclass :param key: URI naming bucket + optional object. """ if not isinstance(key, boto.s3.key.Key): raise InvalidUriError('Requested key (%s) is not a subclass of ' 'boto.s3.key.Key' % str(type(key))) prov_name = key.bucket.connection.provider.get_provider_name() uri_str = '%s://%s/%s' % (prov_name, key.bucket.name, key.name) return storage_uri(uri_str)
Example #9
Source File: storage_uri.py From canvas with BSD 3-Clause "New" or "Revised" License | 5 votes |
def connect(self, access_key_id=None, secret_access_key=None, **kwargs): """ Opens a connection to appropriate provider, depending on provider portion of URI. Requires Credentials defined in boto config file (see boto/pyami/config.py). @type storage_uri: StorageUri @param storage_uri: StorageUri specifying a bucket or a bucket+object @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} @return: A connection to storage service provider of the given URI. """ connection_args = dict(self.connection_args or ()) # Use OrdinaryCallingFormat instead of boto-default # SubdomainCallingFormat because the latter changes the hostname # that's checked during cert validation for HTTPS connections, # which will fail cert validation (when cert validation is enabled). # Note: the following import can't be moved up to the start of # this file else it causes a config import failure when run from # the resumable upload/download tests. from boto.s3.connection import OrdinaryCallingFormat connection_args['calling_format'] = OrdinaryCallingFormat() connection_args.update(kwargs) if not self.connection: if self.scheme == 's3': from boto.s3.connection import S3Connection self.connection = S3Connection(access_key_id, secret_access_key, **connection_args) elif self.scheme == 'gs': from boto.gs.connection import GSConnection self.connection = GSConnection(access_key_id, secret_access_key, **connection_args) elif self.scheme == 'file': from boto.file.connection import FileConnection self.connection = FileConnection(self) else: raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme) self.connection.debug = self.debug return self.connection
Example #10
Source File: storage_uri.py From aws-extender with MIT License | 4 votes |
def connect(self, access_key_id=None, secret_access_key=None, **kwargs): """ Opens a connection to appropriate provider, depending on provider portion of URI. Requires Credentials defined in boto config file (see boto/pyami/config.py). @type storage_uri: StorageUri @param storage_uri: StorageUri specifying a bucket or a bucket+object @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>} @return: A connection to storage service provider of the given URI. """ connection_args = dict(self.connection_args or ()) if (hasattr(self, 'suppress_consec_slashes') and 'suppress_consec_slashes' not in connection_args): connection_args['suppress_consec_slashes'] = ( self.suppress_consec_slashes) connection_args.update(kwargs) if not self.connection: if self.scheme in self.provider_pool: self.connection = self.provider_pool[self.scheme] elif self.scheme == 's3': from boto.s3.connection import S3Connection self.connection = S3Connection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'gs': from boto.gs.connection import GSConnection # Use OrdinaryCallingFormat instead of boto-default # SubdomainCallingFormat because the latter changes the hostname # that's checked during cert validation for HTTPS connections, # which will fail cert validation (when cert validation is # enabled). # # The same is not true for S3's HTTPS certificates. In fact, # we don't want to do this for S3 because S3 requires the # subdomain to match the location of the bucket. If the proper # subdomain is not used, the server will return a 301 redirect # with no Location header. # # Note: the following import can't be moved up to the # start of this file else it causes a config import failure when # run from the resumable upload/download tests. from boto.s3.connection import OrdinaryCallingFormat connection_args['calling_format'] = OrdinaryCallingFormat() self.connection = GSConnection(access_key_id, secret_access_key, **connection_args) self.provider_pool[self.scheme] = self.connection elif self.scheme == 'file': from boto.file.connection import FileConnection self.connection = FileConnection(self) else: raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme) self.connection.debug = self.debug return self.connection