Python azure.storage.blob.ContentSettings() Examples
The following are 12
code examples of azure.storage.blob.ContentSettings().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
azure.storage.blob
, or try the search function
.
Example #1
Source File: azure_storage.py From django-storages with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _save(self, name, content): cleaned_name = clean_name(name) name = self._get_valid_path(name) guessed_type, content_encoding = mimetypes.guess_type(name) content_type = ( _content_type(content) or guessed_type or self.default_content_type) # Unwrap django file (wrapped by parent's save call) if isinstance(content, File): content = content.file content.seek(0) self.service.create_blob_from_stream( container_name=self.azure_container, blob_name=name, stream=content, content_settings=ContentSettings( content_type=content_type, content_encoding=content_encoding, cache_control=self.cache_control), max_connections=self.upload_max_conn, timeout=self.timeout) return cleaned_name
Example #2
Source File: azure.py From pghoard with Apache License 2.0 | 6 votes |
def store_file_object(self, key, fd, *, cache_control=None, metadata=None, mimetype=None, upload_progress_fn=None): if cache_control is not None: raise NotImplementedError("AzureTransfer: cache_control support not implemented") key = self.format_key_for_backend(key, remove_slash_prefix=True) content_settings = None if mimetype: content_settings = ContentSettings(content_type=mimetype) def progress_callback(bytes_sent, _): if upload_progress_fn: upload_progress_fn(bytes_sent) # Azure _BlobChunkUploader calls `tell()` on the stream even though it doesn't use the result. # We expect the input stream not to support `tell()` so use dummy implementation for it original_tell = getattr(fd, "tell", None) fd.tell = lambda: None try: self.conn.create_blob_from_stream(self.container_name, key, fd, content_settings=content_settings, metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"), progress_callback=progress_callback) finally: if original_tell: fd.tell = original_tell else: delattr(fd, "tell")
Example #3
Source File: release.py From mssql-cli with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _upload_index_file(service, blob_name, title, links): print('Uploading index file {}'.format(blob_name)) service.create_blob_from_text( container_name=BLOB_CONTAINER_NAME, blob_name=blob_name, text="<html><head><title>{0}</title></head><body><h1>{0}</h1>{1}</body></html>" .format(title, '\n'.join( ['<a href="{0}">{0}</a><br/>'.format(link) for link in links])), content_settings=ContentSettings( content_type='text/html', content_disposition=None, content_encoding=None, content_language=None, content_md5=None, cache_control=None ) )
Example #4
Source File: azure.py From pypiprivate with MIT License | 5 votes |
def put_contents(self, contents, dest, sync=False): dest_path = self.prefixed_path(dest) logger.debug('Writing content to azure: {0}'.format(dest_path)) content_settings = ContentSettings(content_type=guess_content_type(dest)) self.container_client.upload_blob(name=dest_path, data=contents.encode('utf-8'), overwrite=True, content_settings=content_settings)
Example #5
Source File: azure.py From pypiprivate with MIT License | 5 votes |
def put_file(self, src, dest, sync=False): dest_path = self.prefixed_path(dest) logger.debug('Writing content to azure: {0}'.format(dest_path)) content_settings = ContentSettings(content_type=guess_content_type(dest)) with open(src, "rb") as data: self.container_client.upload_blob(name=dest_path, data=data, overwrite=True, content_settings=content_settings)
Example #6
Source File: azurestorage.py From quay with Apache License 2.0 | 5 votes |
def stream_write(self, path, fp, content_type=None, content_encoding=None): blob_name = self._blob_name_from_path(path) content_settings = ContentSettings( content_type=content_type, content_encoding=content_encoding, ) try: self._blob_service.create_blob_from_stream( self._azure_container, blob_name, fp, content_settings=content_settings ) except AzureException as ae: logger.exception("Exception when trying to stream_write path %s", path) raise IOError("Exception when trying to stream_write path", ae)
Example #7
Source File: helper.py From trains with Apache License 2.0 | 5 votes |
def upload_object(self, file_path, container, object_name, extra=None, **kwargs): from azure.common import AzureHttpError blob_name = self._blob_name_from_object_path(object_name, container.name) stream = None try: from azure.storage.blob import ContentSettings from mimetypes import guess_type container.blob_service.MAX_SINGLE_PUT_SIZE = 16 * 1024 * 1024 container.blob_service.socket_timeout = (300, 2000) container.blob_service.create_blob_from_path( container.name, blob_name, file_path, # timeout=300, max_connections=2, content_settings=ContentSettings(content_type=guess_type(file_path)) ) return True except AzureHttpError as ex: log.error('Failed uploading (Azure error): %s' % ex) except Exception as ex: log.error('Failed uploading: %s' % ex) finally: if stream: stream.close()
Example #8
Source File: azure.py From pghoard with Apache License 2.0 | 5 votes |
def store_file_from_memory(self, key, memstring, metadata=None, cache_control=None, mimetype=None): if cache_control is not None: raise NotImplementedError("AzureTransfer: cache_control support not implemented") key = self.format_key_for_backend(key, remove_slash_prefix=True) content_settings = None if mimetype: content_settings = ContentSettings(content_type=mimetype) self.conn.create_blob_from_bytes(self.container_name, key, bytes(memstring), # azure would work with memoryview, but validates it's bytes content_settings=content_settings, metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))
Example #9
Source File: azure.py From pghoard with Apache License 2.0 | 5 votes |
def store_file_from_disk(self, key, filepath, metadata=None, multipart=None, cache_control=None, mimetype=None): if cache_control is not None: raise NotImplementedError("AzureTransfer: cache_control support not implemented") key = self.format_key_for_backend(key, remove_slash_prefix=True) content_settings = None if mimetype: content_settings = ContentSettings(content_type=mimetype) self.conn.create_blob_from_path(self.container_name, key, filepath, content_settings=content_settings, metadata=self.sanitize_metadata(metadata, replace_hyphen_with="_"))
Example #10
Source File: microsoft.py From docassemble with MIT License | 5 votes |
def set_contents_from_filename(self, filename): if hasattr(self, 'content_type') and self.content_type is not None: mimetype = self.content_type else: mimetype, encoding = mimetypes.guess_type(filename) if mimetype is not None: self.azure_object.conn.create_blob_from_path(self.azure_object.container, self.name, filename, content_settings=ContentSettings(content_type=mimetype)) else: self.azure_object.conn.create_blob_from_path(self.azure_object.container, self.name, filename) self.get_properties() secs = (self.last_modified - epoch).total_seconds() os.utime(filename, (secs, secs))
Example #11
Source File: azurestorage.py From quay with Apache License 2.0 | 4 votes |
def complete_chunked_upload(self, uuid, final_path, storage_metadata): """ Complete the chunked upload and store the final results in the path indicated. Returns nothing. """ # Commit the blob's blocks. upload_blob_path = self._upload_blob_path_from_uuid(uuid) block_list = [BlobBlock(block_id) for block_id in storage_metadata[_BLOCKS_KEY]] try: self._blob_service.put_block_list(self._azure_container, upload_blob_path, block_list) except AzureException: logger.exception( "Exception when trying to put block list for path %s from upload %s", final_path, uuid, ) raise IOError("Exception when trying to put block list") # Set the content type on the blob if applicable. if storage_metadata[_CONTENT_TYPE_KEY] is not None: content_settings = ContentSettings(content_type=storage_metadata[_CONTENT_TYPE_KEY]) try: self._blob_service.set_blob_properties( self._azure_container, upload_blob_path, content_settings=content_settings ) except AzureException: logger.exception( "Exception when trying to set blob properties for path %s", final_path ) raise IOError("Exception when trying to set blob properties") # Copy the blob to its final location. upload_blob_name = self._upload_blob_name_from_uuid(uuid) copy_source_url = self.get_direct_download_url(upload_blob_name, expires_in=300) try: blob_name = self._blob_name_from_path(final_path) copy_prop = self._blob_service.copy_blob( self._azure_container, blob_name, copy_source_url ) except AzureException: logger.exception( "Exception when trying to set copy uploaded blob %s to path %s", uuid, final_path ) raise IOError("Exception when trying to copy uploaded blob") self._await_copy(self._azure_container, blob_name, copy_prop) # Delete the original blob. logger.debug("Deleting chunked upload %s at path %s", uuid, upload_blob_path) try: self._blob_service.delete_blob(self._azure_container, upload_blob_path) except AzureException: logger.exception("Exception when trying to set delete uploaded blob %s", uuid) raise IOError("Exception when trying to delete uploaded blob")
Example #12
Source File: iconuploader.py From PowerPlatformConnectors with MIT License | 4 votes |
def upload_icon(sas_url, file_path): # Break the SAS URL (scheme, netloc, path, params, query, fragment) = urlparse(sas_url) # Account is the first part of the netlocation upto the dot account_name = netloc[0:netloc.index('.')] # The assumption here is that the blob URL will be in the # form accountname.blob.core.windows.net or # accountname.blob.core.usgovcloudapi.net. # Chopping off accountname.blob. to obtain the # endpoint suffix. endpoint_suffix = netloc.replace(account_name+'.blob.', '') # Container name is the path container_name = path.strip('/') # Create a block blob service blockblob_service = BlockBlobService( account_name=account_name, sas_token=query, endpoint_suffix=endpoint_suffix) # Get the file name of the icon file_name = os.path.basename(file_path) # Determine the content type and encoding for the file (content_type, content_encoding) = mimetypes.guess_type(file_name) content_settings = ContentSettings( content_type=content_type, content_encoding=content_encoding) # Upload the icon blockblob_service.create_blob_from_path( container_name=container_name, blob_name=file_name, file_path=file_path, content_settings=content_settings) # Append the icon name to the path to generate the download link path = path + '/' + file_name urlparts = (scheme, netloc, path, params, query, fragment) sas_download_url = urlunparse(urlparts) return sas_download_url