Python azure.storage.blob.BlockBlobService() Examples
The following are 30
code examples of azure.storage.blob.BlockBlobService().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
azure.storage.blob
, or try the search function
.
Example #1
Source File: helpers.py From aztk with MIT License | 6 votes |
def create_sas_token(container_name, blob_name, permission, blob_client, expiry=None, timeout=None): """ Create a blob sas token :param blob_client: The storage block blob client to use. :type blob_client: `azure.storage.blob.BlockBlobService` :param str container_name: The name of the container to upload the blob to. :param str blob_name: The name of the blob to upload the local file to. :param expiry: The SAS expiry time. :type expiry: `datetime.datetime` :param int timeout: timeout in minutes from now for expiry, will only be used if expiry is not specified :return: A SAS token :rtype: str """ if expiry is None: if timeout is None: timeout = 30 expiry = datetime.datetime.utcnow() + datetime.timedelta(minutes=timeout) return blob_client.generate_blob_shared_access_signature( container_name, blob_name, permission=permission, expiry=expiry)
Example #2
Source File: storage.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def clear_storage_containers( blob_client, queue_client, table_client, config, tables_only=False): # type: (azureblob.BlockBlobService, azurequeue.QueueService, # azuretable.TableService, dict, bool) -> None """Clear storage containers :param azure.storage.blob.BlockBlobService blob_client: blob client :param azure.storage.queue.QueueService queue_client: queue client :param azure.storage.table.TableService table_client: table client :param dict config: configuration dict :param bool tables_only: clear only tables """ bs = settings.batch_shipyard_settings(config) for key in _STORAGE_CONTAINERS: if not tables_only and key.startswith('blob_'): if key != 'blob_remotefs': _clear_blobs(blob_client, _STORAGE_CONTAINERS[key]) elif key.startswith('table_'): try: _clear_table(table_client, _STORAGE_CONTAINERS[key], config) except azure.common.AzureMissingResourceHttpError: if key != 'table_perf' or bs.store_timing_metrics: raise elif not tables_only and key.startswith('queue_'): logger.info('clearing queue: {}'.format(_STORAGE_CONTAINERS[key])) queue_client.clear_messages(_STORAGE_CONTAINERS[key])
Example #3
Source File: publish_artifact.py From takeoff with GNU General Public License v3.0 | 6 votes |
def _upload_file_to_azure_storage_account( self, client: BlockBlobService, source: str, destination: str, container: str = None ): """Upload the file to the specified Azure Storage Account. Assumption is that any cloud environment has access to a shared repository of artifacts. Args: client: Azure Storage Account client destination: Name of the file container: Name of the container the file should be uploaded to """ if not container: container = self.config["azure"]["common"]["artifacts_shared_storage_account_container_name"] logger.info( f"""uploading artifact from | from ${source} | to ${destination} | in container {container}""" ) client.create_blob_from_path(container_name=container, blob_name=destination, file_path=source)
Example #4
Source File: storage.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def create_storage_containers(blob_client, queue_client, table_client, config): # type: (azureblob.BlockBlobService, azurequeue.QueueService, # azuretable.TableService, dict) -> None """Create storage containers :param azure.storage.blob.BlockBlobService blob_client: blob client :param azure.storage.queue.QueueService queue_client: queue client :param azure.storage.table.TableService table_client: table client :param dict config: configuration dict """ bs = settings.batch_shipyard_settings(config) for key in _STORAGE_CONTAINERS: if key.startswith('blob_'): logger.info('creating container: {}'.format( _STORAGE_CONTAINERS[key])) blob_client.create_container(_STORAGE_CONTAINERS[key]) elif key.startswith('table_'): if key == 'table_perf' and not bs.store_timing_metrics: continue logger.info('creating table: {}'.format(_STORAGE_CONTAINERS[key])) table_client.create_table(_STORAGE_CONTAINERS[key]) elif key.startswith('queue_'): logger.info('creating queue: {}'.format(_STORAGE_CONTAINERS[key])) queue_client.create_queue(_STORAGE_CONTAINERS[key])
Example #5
Source File: meta_lib.py From incubator-dlab with Apache License 2.0 | 6 votes |
def list_container_content(self, resource_group_name, account_name, container_name): try: result = [] secret_key = list_storage_keys(resource_group_name, account_name)[0] block_blob_service = BlockBlobService(account_name=account_name, account_key=secret_key) content = block_blob_service.list_blobs(container_name) for blob in content: result.append(blob.name) return result except Exception as err: logging.info( "Unable to list container content: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout)) append_result(str({"error": "Unable to list container content", "error_message": str(err) + "\n Traceback: " + traceback.print_exc( file=sys.stdout)})) traceback.print_exc(file=sys.stdout)
Example #6
Source File: actions_lib.py From incubator-dlab with Apache License 2.0 | 6 votes |
def download_from_container(self, resource_group_name, account_name, container_name, files): try: secret_key = meta_lib.AzureMeta().list_storage_keys(resource_group_name, account_name)[0] block_blob_service = BlockBlobService(account_name=account_name, account_key=secret_key) for filename in files: block_blob_service.get_blob_to_path(container_name, filename, filename) return '' except azure.common.AzureMissingResourceHttpError: return '' except Exception as err: logging.info( "Unable to download files from container: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout)) append_result(str({"error": "Unable to download files from container", "error_message": str(err) + "\n Traceback: " + traceback.print_exc( file=sys.stdout)})) traceback.print_exc(file=sys.stdout)
Example #7
Source File: clients.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def create_storage_clients(): # type: (None) -> tuple """Create storage clients :rtype: tuple :return: blob_client, queue_client, table_client """ account_name = storage.get_storageaccount() account_key = storage.get_storageaccount_key() endpoint_suffix = storage.get_storageaccount_endpoint() blob_client = azureblob.BlockBlobService( account_name=account_name, account_key=account_key, endpoint_suffix=endpoint_suffix, ) queue_client = azurequeue.QueueService( account_name=account_name, account_key=account_key, endpoint_suffix=endpoint_suffix, ) table_client = azuretable.TableService( account_name=account_name, account_key=account_key, endpoint_suffix=endpoint_suffix, ) return blob_client, queue_client, table_client
Example #8
Source File: cascade.py From batch-shipyard with MIT License | 6 votes |
def __init__( self, blob_client: azureblob.BlockBlobService, resource: str, blob_name: str, nglobalresources: int): """ContainerImageSaveThread ctor :param azureblob.BlockBlobService blob_client: blob client :param str resource: resource :param str blob_name: resource blob name :param int nglobalresources: number of global resources """ threading.Thread.__init__(self) self.blob_client = blob_client self.resource = resource self.blob_name = blob_name self.nglobalresources = nglobalresources # add to downloading set with _DIRECTDL_LOCK: _DIRECTDL_DOWNLOADING.add(self.resource)
Example #9
Source File: storage.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def delete_storage_containers( blob_client, queue_client, table_client, config, skip_tables=False): # type: (azureblob.BlockBlobService, azurequeue.QueueService, # azuretable.TableService, dict, bool) -> None """Delete storage containers :param azure.storage.blob.BlockBlobService blob_client: blob client :param azure.storage.queue.QueueService queue_client: queue client :param azure.storage.table.TableService table_client: table client :param dict config: configuration dict :param bool skip_tables: skip deleting tables """ for key in _STORAGE_CONTAINERS: if key.startswith('blob_'): if key != 'blob_remotefs': logger.debug('deleting container: {}'.format( _STORAGE_CONTAINERS[key])) blob_client.delete_container(_STORAGE_CONTAINERS[key]) elif not skip_tables and key.startswith('table_'): logger.debug('deleting table: {}'.format(_STORAGE_CONTAINERS[key])) table_client.delete_table(_STORAGE_CONTAINERS[key]) elif key.startswith('queue_'): logger.debug('deleting queue: {}'.format(_STORAGE_CONTAINERS[key])) queue_client.delete_queue(_STORAGE_CONTAINERS[key])
Example #10
Source File: storage.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def upload_for_remotefs(blob_client, files): # type: (azure.storage.blob.BlockBlobService, List[tuple]) -> List[str] """Upload files to blob storage for remote fs :param azure.storage.blob.BlockBlobService blob_client: blob client :param dict config: configuration dict :param list files: files to upload :rtype: list :return: list of file urls """ ret = [] for file in files: _check_file_and_upload(blob_client, file, 'blob_remotefs') ret.append('https://{}.blob.{}/{}/{}'.format( _STORAGEACCOUNT, _STORAGEACCOUNTEP, _STORAGE_CONTAINERS['blob_remotefs'], file[0])) return ret
Example #11
Source File: storage.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def upload_resource_files(blob_client, config, files): # type: (azure.storage.blob.BlockBlobService, dict, List[tuple]) -> dict """Upload resource files to blob storage :param azure.storage.blob.BlockBlobService blob_client: blob client :param dict config: configuration dict :param list files: files to upload :rtype: dict :return: sas url dict """ sas_urls = {} for file in files: _check_file_and_upload(blob_client, file, 'blob_resourcefiles') sas_urls[file[0]] = 'https://{}.blob.{}/{}/{}?{}'.format( _STORAGEACCOUNT, _STORAGEACCOUNTEP, _STORAGE_CONTAINERS['blob_resourcefiles'], file[0], blob_client.generate_blob_shared_access_signature( _STORAGE_CONTAINERS['blob_resourcefiles'], file[0], permission=azureblob.BlobPermissions.READ, expiry=datetime.datetime.utcnow() + datetime.timedelta(days=_DEFAULT_SAS_EXPIRY_DAYS) ) ) return sas_urls
Example #12
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def delete_resource_file(blob_client, blob_name, federation_id=None): # type: (azure.storage.blob.BlockBlobService, str) -> bool """Delete a resource file from blob storage :param azure.storage.blob.BlockBlobService blob_client: blob client :param str blob_name: blob name :param str federation_id: federation id """ if util.is_not_empty(federation_id): fedhash = hash_federation_id(federation_id) container = '{}-{}'.format( _STORAGE_CONTAINERS['blob_federation'], fedhash) else: container = _STORAGE_CONTAINERS['blob_resourcefiles'] try: blob_client.delete_blob(container, blob_name) logger.debug('blob {} deleted from container {}'.format( blob_name, container)) except azure.common.AzureMissingResourceHttpError: logger.warning('blob {} does not exist in container {}'.format( blob_name, container)) return False return True
Example #13
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def upload_for_nonbatch(blob_client, files, kind): # type: (azure.storage.blob.BlockBlobService, List[tuple], # str) -> List[str] """Upload files to blob storage for non-batch :param azure.storage.blob.BlockBlobService blob_client: blob client :param dict config: configuration dict :param list files: files to upload :param str kind: "remotefs", "monitoring" or "federation" :rtype: list :return: list of file urls """ if kind == 'federation': kind = '{}_global'.format(kind.lower()) key = 'blob_{}'.format(kind.lower()) ret = [] for file in files: _check_file_and_upload(blob_client, file, key) ret.append('https://{}.blob.{}/{}/{}'.format( _STORAGEACCOUNT, _STORAGEACCOUNTEP, _STORAGE_CONTAINERS[key], file[0])) return ret
Example #14
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def _check_file_and_upload(blob_client, file, key, container=None): # type: (azure.storage.blob.BlockBlobService, tuple, str, str) -> None """Upload file to blob storage if necessary :param azure.storage.blob.BlockBlobService blob_client: blob client :param tuple file: file to upload :param str key: blob container key :param str container: absolute container override """ if file[0] is None: return contname = container or _STORAGE_CONTAINERS[key] upload = True # check if blob exists try: prop = blob_client.get_blob_properties(contname, file[0]) if (prop.properties.content_settings.content_md5 == util.compute_md5_for_file(file[1], True)): logger.debug( 'remote file is the same for {}, skipping'.format(file[0])) upload = False except azure.common.AzureMissingResourceHttpError: pass if upload: logger.info('uploading file {} as {!r}'.format(file[1], file[0])) blob_client.create_blob_from_path(contname, file[0], str(file[1]))
Example #15
Source File: python_quickstart_client.py From batch-python-quickstart with MIT License | 6 votes |
def get_container_sas_token(block_blob_client, container_name, blob_permissions): """ Obtains a shared access signature granting the specified permissions to the container. :param block_blob_client: A blob service client. :type block_blob_client: `azure.storage.blob.BlockBlobService` :param str container_name: The name of the Azure Blob storage container. :param BlobPermissions blob_permissions: :rtype: str :return: A SAS token granting the specified permissions to the container. """ # Obtain the SAS token for the container, setting the expiry time and # permissions. In this case, no start time is specified, so the shared # access signature becomes valid immediately. container_sas_token = \ block_blob_client.generate_container_shared_access_signature( container_name, permission=blob_permissions, expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)) return container_sas_token
Example #16
Source File: azurepool.py From Ensemble-Bayesian-Optimization with MIT License | 6 votes |
def get_container_sas_token(block_blob_client, container_name, blob_permissions): """ Obtains a shared access signature granting the specified permissions to the container. :param block_blob_client: A blob service client. :type block_blob_client: `azure.storage.blob.BlockBlobService` :param str container_name: The name of the Azure Blob storage container. :param BlobPermissions blob_permissions: :rtype: str :return: A SAS token granting the specified permissions to the container. """ # Obtain the SAS token for the container, setting the expiry time and # permissions. In this case, no start time is specified, so the shared # access signature becomes valid immediately. container_sas_token = \ block_blob_client.generate_container_shared_access_signature( container_name, permission=blob_permissions, expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)) return container_sas_token
Example #17
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def cleanup_with_del_pool(blob_client, table_client, config, pool_id=None): # type: (azureblob.BlockBlobService, azuretable.TableService, # dict, str) -> None """Special cleanup routine in combination with delete pool :param azure.storage.blob.BlockBlobService blob_client: blob client :param azure.cosmosdb.table.TableService table_client: table client :param dict config: configuration dict :param str pool_id: pool id """ if util.is_none_or_empty(pool_id): pool_id = settings.pool_id(config) if not util.confirm_action( config, 'delete/cleanup of Batch Shipyard metadata in storage ' 'containers associated with {} pool'.format(pool_id)): return clear_storage_containers( blob_client, table_client, config, tables_only=True, pool_id=pool_id) delete_storage_containers( blob_client, table_client, config, skip_tables=True)
Example #18
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def delete_storage_containers_boot_diagnostics( blob_client, vm_name, vm_id): # type: (azureblob.BlockBlobService, str, str) -> None """Delete storage containers used for remotefs bootdiagnostics :param azure.storage.blob.BlockBlobService blob_client: blob client :param str vm_name: vm name :param str vm_id: vm id """ name = re.sub('[\W_]+', '', vm_name) # noqa contname = 'bootdiagnostics-{}-{}'.format( name[0:min((9, len(name)))], vm_id) logger.info('deleting container: {}'.format(contname)) try: blob_client.delete_container(contname) except azure.common.AzureMissingResourceHttpError: logger.warning('container not found: {}'.format(contname))
Example #19
Source File: azure_storage_checkpoint_manager.py From azure-event-hubs-python with MIT License | 6 votes |
def initialize(self, host): """ The EventProcessorHost can't pass itself to the AzureStorageCheckpointLeaseManager constructor because it is still being constructed. Do other initialization here also because it might throw and hence we don't want it in the constructor. """ self.host = host self.storage_client = BlockBlobService(account_name=self.storage_account_name, account_key=self.storage_account_key, sas_token=self.storage_sas_token, endpoint_suffix=self.endpoint_suffix, connection_string=self.connection_string, request_session=self.request_session) self.consumer_group_directory = self.storage_blob_prefix + self.host.eh_config.consumer_group # Checkpoint Managment Methods
Example #20
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def _clear_blob_task_resourcefiles(blob_client, container, config): # type: (azureblob.BlockBlobService, str, dict) -> None """Clear task resource file blobs in container :param azure.storage.blob.BlockBlobService blob_client: blob client :param str container: container to clear blobs from :param dict config: configuration dict """ bs = settings.batch_shipyard_settings(config) envfileloc = '{}taskrf-'.format(bs.storage_entity_prefix) logger.info('deleting blobs with prefix: {}'.format(envfileloc)) try: blobs = blob_client.list_blobs(container, prefix=envfileloc) except azure.common.AzureMissingResourceHttpError: logger.warning('container not found: {}'.format(container)) else: for blob in blobs: blob_client.delete_blob(container, blob.name)
Example #21
Source File: cascade.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def _create_credentials() -> tuple: """Create storage credentials :rtype: tuple :return: (blob_client, queue_client, table_client) """ sa, ep, sakey = os.environ['SHIPYARD_STORAGE_ENV'].split(':') blob_client = azureblob.BlockBlobService( account_name=sa, account_key=sakey, endpoint_suffix=ep) queue_client = azurequeue.QueueService( account_name=sa, account_key=sakey, endpoint_suffix=ep) table_client = azuretable.TableService( account_name=sa, account_key=sakey, endpoint_suffix=ep) return blob_client, queue_client, table_client
Example #22
Source File: cascade.py From cortana-intelligence-inventory-optimization with MIT License | 6 votes |
def __init__( self, blob_client: azure.storage.blob.BlockBlobService, queue_client: azure.storage.queue.QueueService, table_client: azure.storage.table.TableService, resource: str, msg_id: str, nglobalresources: int): """DockerSaveThread ctor :param azure.storage.blob.BlockBlobService blob_client: blob client :param azure.storage.queue.QueueService queue_client: queue client :param azure.storage.table.TableService table_client: table client :param str resource: resource :param str msg_id: queue message id :param int nglobalresources: number of global resources """ threading.Thread.__init__(self) self.blob_client = blob_client self.queue_client = queue_client self.table_client = table_client self.resource = resource self.msg_id = msg_id self.nglobalresources = nglobalresources with _DIRECTDL_LOCK: _DIRECTDL_DOWNLOADING.append(self.resource)
Example #23
Source File: storage.py From batch-shipyard with MIT License | 6 votes |
def upload_resource_files(blob_client, files): # type: (azure.storage.blob.BlockBlobService, List[tuple]) -> dict """Upload resource files to blob storage :param azure.storage.blob.BlockBlobService blob_client: blob client :param list files: files to upload :rtype: dict :return: sas url dict """ sas_urls = {} for file in files: _check_file_and_upload(blob_client, file, 'blob_resourcefiles') sas_urls[file[0]] = 'https://{}.blob.{}/{}/{}?{}'.format( _STORAGEACCOUNT, _STORAGEACCOUNTEP, _STORAGE_CONTAINERS['blob_resourcefiles'], file[0], blob_client.generate_blob_shared_access_signature( _STORAGE_CONTAINERS['blob_resourcefiles'], file[0], permission=azureblob.BlobPermissions.READ, expiry=datetime.datetime.utcnow() + datetime.timedelta(days=_DEFAULT_SAS_EXPIRY_DAYS) ) ) return sas_urls
Example #24
Source File: test_wasb.py From airflow with Apache License 2.0 | 5 votes |
def test_sas_token(self): from azure.storage.blob import BlockBlobService hook = WasbHook(wasb_conn_id='wasb_test_sas_token') self.assertEqual(hook.conn_id, 'wasb_test_sas_token') self.assertIsInstance(hook.connection, BlockBlobService)
Example #25
Source File: azure_utils.py From gail-tf with MIT License | 5 votes |
def __init__(self, account_name, account_key, container_name, maybe_create=False): self._account_name = account_name self._container_name = container_name if account_name not in Container.services: Container.services[account_name] = BlobService(account_name, account_key) self._service = Container.services[account_name] if maybe_create: self._service.create_container(self._container_name, fail_on_exist=False)
Example #26
Source File: azure_client.py From polystores with MIT License | 5 votes |
def get_blob_service_connection(account_name=None, account_key=None, connection_string=None): account_name = account_name or get_account_name() account_key = account_key or get_account_key() connection_string = connection_string or get_connection_string() return BlockBlobService(account_name=account_name, account_key=account_key, connection_string=connection_string)
Example #27
Source File: batch_python_tutorial_ffmpeg.py From batch-python-ffmpeg-tutorial with MIT License | 5 votes |
def upload_file_to_container(block_blob_client, container_name, file_path): """ Uploads a local file to an Azure Blob storage container. :param block_blob_client: A blob service client. :type block_blob_client: `azure.storage.blob.BlockBlobService` :param str container_name: The name of the Azure Blob storage container. :param str file_path: The local path to the file. :rtype: `azure.batch.models.ResourceFile` :return: A ResourceFile initialized with a SAS URL appropriate for Batch tasks. """ blob_name = os.path.basename(file_path) print('Uploading file {} to container [{}]...'.format(file_path, container_name)) block_blob_client.create_blob_from_path(container_name, blob_name, file_path) # Obtain the SAS token for the container. sas_token = get_container_sas_token(block_blob_client, container_name, azureblob.BlobPermissions.READ) sas_url = block_blob_client.make_blob_url(container_name, blob_name, sas_token=sas_token) return batchmodels.ResourceFile(file_path=blob_name, http_url=sas_url)
Example #28
Source File: test_azure_client.py From polystores with MIT License | 5 votes |
def test_get_blob_service_connection(self): with self.assertRaises(ValueError): get_blob_service_connection() service = get_blob_service_connection(account_name='foo', account_key='bar') assert isinstance(service, BlockBlobService) os.environ['AZURE_ACCOUNT_NAME'] = 'foo' os.environ['AZURE_ACCOUNT_KEY'] = 'bar' service = get_blob_service_connection() assert isinstance(service, BlockBlobService)
Example #29
Source File: storage.py From InfraBox with Apache License 2.0 | 5 votes |
def _get_client(self): client = BlockBlobService(account_name=get_env('INFRABOX_STORAGE_AZURE_ACCOUNT_NAME'), account_key=get_env('INFRABOX_STORAGE_AZURE_ACCOUNT_KEY')) return client
Example #30
Source File: storage_account.py From takeoff with GNU General Public License v3.0 | 5 votes |
def service_client(self, config: dict) -> BlockBlobService: credential_kwargs = super()._transform_key_to_credential_kwargs( config["azure"]["keyvault_keys"][current_filename(__file__)] ) return BlockBlobService(**credential_kwargs)