Python azure.common.AzureException() Examples

The following are 30 code examples of azure.common.AzureException(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module azure.common , or try the search function .
Example #1
Source File: azure_service.py    From koku with GNU Affero General Public License v3.0 6 votes vote down vote up
def download_cost_export(self, key, container_name, destination=None):
        """Download the latest cost export file from a given storage container."""
        cost_export = self.get_cost_export_for_key(key, container_name)

        file_path = destination
        if not destination:
            temp_file = NamedTemporaryFile(delete=False, suffix=".csv")
            file_path = temp_file.name
        try:
            blob_client = self._cloud_storage_account.get_blob_client(container_name, cost_export.name)

            with open(file_path, "wb") as blob_download:
                blob_download.write(blob_client.download_blob().readall())
        except (AdalError, AzureException, ClientException, IOError) as error:
            raise AzureServiceError("Failed to download cost export. Error: ", str(error))
        return file_path 
Example #2
Source File: azurestorage.py    From quay with Apache License 2.0 6 votes vote down vote up
def get_direct_download_url(
        self, object_path, request_ip=None, expires_in=60, requires_cors=False, head=False
    ):
        blob_name = self._blob_name_from_path(object_path)

        try:
            sas_token = self._blob_service.generate_blob_shared_access_signature(
                self._azure_container,
                blob_name,
                ContainerPermissions.READ,
                datetime.utcnow() + timedelta(seconds=expires_in),
            )

            blob_url = self._blob_service.make_blob_url(
                self._azure_container, blob_name, sas_token=sas_token
            )
        except AzureException:
            logger.exception(
                "Exception when trying to get direct download for path %s", object_path
            )
            raise IOError("Exception when trying to get direct download")

        return blob_url 
Example #3
Source File: azure_service.py    From koku with GNU Affero General Public License v3.0 6 votes vote down vote up
def get_cost_export_for_key(self, key, container_name):
        """Get the latest cost export file from given storage account container."""
        report = None
        try:
            container_client = self._cloud_storage_account.get_container_client(container_name)
            blob_list = container_client.list_blobs(name_starts_with=key)
        except (AdalError, AzureException, ClientException) as error:
            raise AzureServiceError("Failed to download cost export. Error: ", str(error))

        for blob in blob_list:
            if key == blob.name:
                report = blob
                break
        if not report:
            message = f"No cost report for report name {key} found in container {container_name}."
            raise AzureCostReportNotFound(message)
        return report 
Example #4
Source File: test_blob_encryption.py    From azure-storage-python with MIT License 6 votes vote down vote up
def test_missing_attribute_kek_unwrap(self):
        # Shared between all services in _decrypt_blob
        # Arrange
        self.bbs.require_encryption = True
        valid_key = KeyWrapper('key1')
        self.bbs.key_encryption_key = valid_key
        blob_name = self._create_small_blob('block_blob')

        # Act
        # Note that KeyWrapper has a default value for key_id, so these Exceptions
        # are not due to non_matching kids.
        invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object
        invalid_key_1.get_kid = valid_key.get_kid
        #No attribute unwrap_key
        self.bbs.key_encryption_key = invalid_key_1
        with self.assertRaises(AzureException):
            self.bbs.get_blob_to_bytes(self.container_name, blob_name)

        invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object
        invalid_key_2.unwrap_key = valid_key.unwrap_key
        #No attribute get_kid
        with self.assertRaises(AzureException):
            self.bbs.get_blob_to_bytes(self.container_name, blob_name) 
Example #5
Source File: archive.py    From bob with GNU General Public License v3.0 6 votes vote down vote up
def scriptDownload(args):
        service, container, remoteBlob, localFile = AzureArchive.scriptGetService(args)
        from azure.common import AzureException

        # Download into temporary file and rename if downloaded successfully
        tmpName = None
        try:
            (tmpFd, tmpName) = mkstemp(dir=".")
            os.close(tmpFd)
            service.get_blob_to_path(container, remoteBlob, tmpName)
            os.rename(tmpName, localFile)
            tmpName = None
        except (OSError, AzureException) as e:
            raise BuildError("Download failed: " + str(e))
        finally:
            if tmpName is not None: os.unlink(tmpName) 
Example #6
Source File: archive.py    From bob with GNU General Public License v3.0 6 votes vote down vote up
def _openDownloadFile(self, buildId, suffix):
        from azure.common import AzureException, AzureMissingResourceHttpError
        (tmpFd, tmpName) = mkstemp()
        try:
            os.close(tmpFd)
            self.__service.get_blob_to_path(self.__container,
                self.__makeBlobName(buildId, suffix), tmpName)
            ret = tmpName
            tmpName = None
            return AzureDownloader(ret)
        except AzureMissingResourceHttpError:
            raise ArtifactNotFoundError()
        except AzureException as e:
            raise ArtifactDownloadError(str(e))
        finally:
            if tmpName is not None: os.unlink(tmpName) 
Example #7
Source File: test_blob_encryption.py    From azure-storage-python with MIT License 5 votes vote down vote up
def test_get_blob_nonmatching_kid(self):
        # Arrange
        self.bbs.require_encryption = True
        self.bbs.key_encryption_key = KeyWrapper('key1')
        blob_name = self._create_small_blob('block_blob')

        # Act
        self.bbs.key_encryption_key.kid = 'Invalid'

        # Assert
        try:
            self.bbs.get_blob_to_bytes(self.container_name, blob_name)
            self.fail()
        except AzureException as e:
            self.assertEqual(str(e), _ERROR_DECRYPTION_FAILURE) 
Example #8
Source File: test_blob_encryption.py    From azure-storage-python with MIT License 5 votes vote down vote up
def test_invalid_value_kek_unwrap(self):
        # Arrange
        self.bbs.require_encryption = True
        self.bbs.key_encryption_key = KeyWrapper('key1')
        blob_name = self._create_small_blob('block_blob')

        # Act
        self.bbs.key_encryption_key = KeyWrapper('key1')
        self.bbs.key_encryption_key.unwrap_key = None
        try:
            self.bbs.get_blob_to_bytes(self.container_name, blob_name)
            self.fail()
        except AzureException as e:
            self.assertEqual(str(e), _ERROR_DECRYPTION_FAILURE) 
Example #9
Source File: test_client.py    From azure-storage-python with MIT License 5 votes vote down vote up
def test_client_request_id_echo(self):
        # Arrange
        service = BlockBlobService(self.account_name, self.account_key, is_emulated=self.settings.IS_EMULATED)
        service.retry = ExponentialRetry(max_attempts=1, initial_backoff=1,).retry
        name = self.get_resource_name('cont')

        # Act make the client request ID slightly different
        def callback(response):
            response.status = 200
            response.headers[_CLIENT_REQUEST_ID_HEADER_NAME] += '1'

        service.response_callback = callback

        # Assert the client request ID validation is working
        with self.assertRaises(AzureException):
            service.exists(name)

        # Act remove the echoed client request ID
        def callback(response):
            response.status = 200
            del response.headers[_CLIENT_REQUEST_ID_HEADER_NAME]

        service.response_callback = callback

        # Assert the client request ID validation is not throwing when the ID is not echoed
        service.exists(name)


# ------------------------------------------------------------------------------ 
Example #10
Source File: archive.py    From bob with GNU General Public License v3.0 5 votes vote down vote up
def _openUploadFile(self, buildId, suffix):
        from azure.common import AzureException

        blobName = self.__makeBlobName(buildId, suffix)
        try:
            if self.__service.exists(self.__container, blobName):
                raise ArtifactExistsError()
        except AzureException as e:
            raise ArtifactUploadError(str(e))
        (tmpFd, tmpName) = mkstemp()
        os.close(tmpFd)
        return AzureUploader(self.__service, self.__container, tmpName, blobName) 
Example #11
Source File: tool_reset_account.py    From azure-storage-python with MIT License 5 votes vote down vote up
def purge_blob_containers(account, account_key):
    """
        Delete all blob containers in the given storage account.
        USE AT OWN RISK. NOT SUPPORTED BY STORAGE TEAM.
    """
    bs = BlockBlobService(account, account_key)

    with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
        # use a map to keep track of futures
        future_to_container_map = {executor.submit(delete_container, bs, container): container for container in bs.list_containers()}

        # as the futures are completed, print results
        for future in concurrent.futures.as_completed(future_to_container_map):
            container_name = future_to_container_map[future].name

            try:
                is_deleted = future.result()
                if is_deleted:
                    print("Deleted container {} on first try".format(container_name))
                else:
                    print("Skipped container {} as it no longer exists".format(container_name))
            except AzureException as e:
                # if the deletion failed because there's an active lease on the container, we will break it
                # since it is most likely left-over from previous tests
                if 'lease' in str(e):
                    bs.break_container_lease(container_name)
                    is_deleted = bs.delete_container(container_name)

                    if is_deleted:
                        print("Deleted container {} after having broken lease".format(container_name))
                    else:
                        print("Skipped container {} as it stopped existing after having broken lease".format(container_name))
                else:
                    raise e
            except Exception as e:
                print("Skipped container " + container_name + " due to error " + str(e)) 
Example #12
Source File: _deserialization.py    From azure-storage-python with MIT License 5 votes vote down vote up
def _parse_blob(response, name, snapshot, validate_content=False, require_encryption=False,
                key_encryption_key=None, key_resolver_function=None, start_offset=None, end_offset=None):
    if response is None:
        return None

    metadata = _parse_metadata(response)
    props = _parse_properties(response, BlobProperties)

    # For range gets, only look at 'x-ms-blob-content-md5' for overall MD5
    content_settings = getattr(props, 'content_settings')
    if 'content-range' in response.headers:
        if 'x-ms-blob-content-md5' in response.headers:
            setattr(content_settings, 'content_md5', _to_str(response.headers['x-ms-blob-content-md5']))
        else:
            delattr(content_settings, 'content_md5')

    if validate_content:
        computed_md5 = _get_content_md5(response.body)
        _validate_content_match(response.headers['content-md5'], computed_md5)

    if key_encryption_key is not None or key_resolver_function is not None:
        try:
            response.body = _decrypt_blob(require_encryption, key_encryption_key, key_resolver_function,
                                          response, start_offset, end_offset)
        except:
            raise AzureException(_ERROR_DECRYPTION_FAILURE)

    return Blob(name, snapshot, response.body, props, metadata) 
Example #13
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def get_checksum(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            blob = self._blob_service.get_blob_properties(self._azure_container, blob_name)
        except AzureException:
            logger.exception("Exception when trying to get_checksum for path %s", path)
            raise IOError("Exception when trying to get_checksum path")
        return blob.properties.etag 
Example #14
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def remove(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            self._blob_service.delete_blob(self._azure_container, blob_name)
        except AzureException:
            logger.exception("Exception when trying to remove path %s", path)
            raise IOError("Exception when trying to remove path") 
Example #15
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def stream_write(self, path, fp, content_type=None, content_encoding=None):
        blob_name = self._blob_name_from_path(path)
        content_settings = ContentSettings(
            content_type=content_type, content_encoding=content_encoding,
        )

        try:
            self._blob_service.create_blob_from_stream(
                self._azure_container, blob_name, fp, content_settings=content_settings
            )
        except AzureException as ae:
            logger.exception("Exception when trying to stream_write path %s", path)
            raise IOError("Exception when trying to stream_write path", ae) 
Example #16
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def stream_read_file(self, path):
        blob_name = self._blob_name_from_path(path)

        try:
            output_stream = io.BytesIO()
            self._blob_service.get_blob_to_stream(self._azure_container, blob_name, output_stream)
            output_stream.seek(0)
        except AzureException:
            logger.exception("Exception when trying to stream_file_read path %s", path)
            raise IOError("Exception when trying to stream_file_read path")

        return output_stream 
Example #17
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def put_content(self, path, content):
        blob_name = self._blob_name_from_path(path)
        try:
            self._blob_service.create_blob_from_bytes(self._azure_container, blob_name, content)
        except AzureException:
            logger.exception("Exception when trying to put path %s", path)
            raise IOError("Exception when trying to put path") 
Example #18
Source File: resources.py    From cloudbridge with MIT License 5 votes vote down vote up
def upload_from_file(self, path):
        """
        Store the contents of the file pointed by the "path" variable.
        """
        try:
            self._provider.azure_client.create_blob_from_file(
                self._container.id, self.id, path)
            return True
        except AzureException as azureEx:
            log.exception(azureEx)
            return False 
Example #19
Source File: resources.py    From cloudbridge with MIT License 5 votes vote down vote up
def upload(self, data):
        """
        Set the contents of this object to the data read from the source
        string.
        """
        try:
            self._provider.azure_client.create_blob_from_text(
                self._container.id, self.id, data)
            return True
        except AzureException as azureEx:
            log.exception(azureEx)
            return False 
Example #20
Source File: services.py    From cloudbridge with MIT License 5 votes vote down vote up
def get(self, bucket, object_id):
        """
        Retrieve a given object from this bucket.
        """
        try:
            obj = self.provider.azure_client.get_blob(bucket.name,
                                                      object_id)
            return AzureBucketObject(self.provider, bucket, obj)
        except AzureException as azureEx:
            log.exception(azureEx)
            return None 
Example #21
Source File: services.py    From cloudbridge with MIT License 5 votes vote down vote up
def get(self, bucket_id):
        """
        Returns a bucket given its ID. Returns ``None`` if the bucket
        does not exist.
        """
        try:
            bucket = self.provider.azure_client.get_container(bucket_id)
            return AzureBucket(self.provider, bucket)
        except AzureException as error:
            log.exception(error)
            return None 
Example #22
Source File: services.py    From cloudbridge with MIT License 5 votes vote down vote up
def get(self, key_pair_id):
        try:
            key_pair = self.provider.azure_client.\
                get_public_key(key_pair_id)

            if key_pair:
                return AzureKeyPair(self.provider, key_pair)
            return None
        except AzureException as error:
            log.debug("KeyPair %s was not found.", key_pair_id)
            log.debug(error)
            return None 
Example #23
Source File: azurestorage.py    From quay with Apache License 2.0 5 votes vote down vote up
def get_content(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            blob = self._blob_service.get_blob_to_bytes(self._azure_container, blob_name)
        except AzureException:
            logger.exception("Exception when trying to get path %s", path)
            raise IOError("Exception when trying to get path")

        return blob.content 
Example #24
Source File: archive.py    From bob with GNU General Public License v3.0 5 votes vote down vote up
def scriptUpload(args):
        service, container, remoteBlob, localFile = AzureArchive.scriptGetService(args)
        from azure.common import AzureException, AzureConflictHttpError
        try:
            service.create_blob_from_path(container, remoteBlob, localFile, if_none_match="*")
            print("OK")
        except AzureConflictHttpError:
            print("skipped")
        except (OSError, AzureException) as e:
            raise BuildError("Upload failed: " + str(e)) 
Example #25
Source File: archive.py    From bob with GNU General Public License v3.0 5 votes vote down vote up
def __upload(self):
        from azure.common import AzureException, AzureConflictHttpError
        try:
            self.__service.create_blob_from_path(self.__container,
                self.__remoteName, self.__name, if_none_match="*")
        except AzureConflictHttpError:
            raise ArtifactExistsError()
        except AzureException as e:
            raise ArtifactUploadError(str(e)) 
Example #26
Source File: test_azure_services.py    From koku with GNU Affero General Public License v3.0 5 votes vote down vote up
def throw_azure_exception(scope):
    """Raises azure exception."""
    raise AzureException() 
Example #27
Source File: azure_service.py    From koku with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_latest_cost_export_for_path(self, report_path, container_name):
        """Get the latest cost export file from given storage account container."""
        latest_report = None
        if not container_name:
            message = "Unable to gather latest export as container name is not provided."
            LOG.warning(message)
            raise AzureCostReportNotFound(message)

        try:
            container_client = self._cloud_storage_account.get_container_client(container_name)
            blob_list = container_client.list_blobs(name_starts_with=report_path)
            for blob in blob_list:
                if report_path in blob.name and not latest_report:
                    latest_report = blob
                elif report_path in blob.name and blob.last_modified > latest_report.last_modified:
                    latest_report = blob
            if not latest_report:
                message = f"No cost report found in container {container_name} for " f"path {report_path}."
                raise AzureCostReportNotFound(message)
            return latest_report
        except (AdalError, AzureException, ClientException) as error:
            raise AzureServiceError("Failed to download cost export. Error: ", str(error))
        except HttpResponseError as httpError:
            if httpError.status_code == 403:
                message = (
                    "An authorization error occurred attempting to gather latest export"
                    f" in container {container_name} for "
                    f"path {report_path}."
                )
            else:
                message = (
                    "Unknown error occurred attempting to gather latest export"
                    f" in container {container_name} for "
                    f"path {report_path}."
                )
            error_msg = message + f" Azure Error: {httpError}."
            LOG.warning(error_msg)
            raise AzureCostReportNotFound(message) 
Example #28
Source File: tests_provider.py    From koku with GNU Affero General Public License v3.0 5 votes vote down vote up
def test_cost_usage_source_is_reachable_exception(self, _):
        """Test that ValidationError is raised when AzureException is raised."""
        credentials = {
            "subscription_id": FAKE.uuid4(),
            "tenant_id": FAKE.uuid4(),
            "client_id": FAKE.uuid4(),
            "client_secret": FAKE.word(),
        }
        source_name = {"resource_group": FAKE.word(), "storage_account": FAKE.word()}
        with self.assertRaises(ValidationError):
            AzureProvider().cost_usage_source_is_reachable(credentials, source_name) 
Example #29
Source File: encryption_usage.py    From azure-storage-python with MIT License 4 votes vote down vote up
def require_encryption(self):
        self.block_blob_service.key_encryption_key = None
        self.block_blob_service.key_resolver_function = None
        self.block_blob_service.require_encryption = False
        container_name = self._create_container()
        encrypted_blob_name = self._get_blob_reference(prefix='block_blob')
        unencrypted_blob_name = self._get_blob_reference(prefix='unencrypted_blob')
        data = b'Foo'
        self.block_blob_service.create_blob_from_bytes(container_name, unencrypted_blob_name, data)

        # If the require_encryption flag is set, the service object will throw if 
        # there is no encryption policy set on upload.
        self.block_blob_service.require_encryption = True
        try:
            self.block_blob_service.create_blob_from_bytes(container_name, encrypted_blob_name, data)
            raise Exception
        except ValueError:
            pass

        # If the require_encryption flag is set, the service object will throw if
        # there is no encryption policy set on download.
        kek = KeyWrapper('key1')
        key_resolver = KeyResolver()
        key_resolver.put_key(kek)

        self.block_blob_service.key_encryption_key = kek
        self.block_blob_service.create_blob_from_bytes(container_name, encrypted_blob_name, data)

        self.block_blob_service.key_encryption_key = None
        try:
            self.block_blob_service.get_blob_to_bytes(container_name, encrypted_blob_name)
            raise Exception
        except ValueError:
            pass

        # If the require_encryption flag is set, but the retrieved blob is not
        # encrypted, the service object will throw.
        self.block_blob_service.key_resolver_function = key_resolver.resolve_key
        try:
            self.block_blob_service.get_blob_to_bytes(container_name, unencrypted_blob_name)
            raise Exception
        except AzureException:
            pass

        self.block_blob_service.delete_container(container_name) 
Example #30
Source File: provider.py    From koku with GNU Affero General Public License v3.0 4 votes vote down vote up
def cost_usage_source_is_reachable(self, credential_name, storage_resource_name):
        """
        Verify that the cost usage report source is reachable by Koku.

        Implemented by provider specific class.  An account validation and
        connectivity check is to be done.

        Args:
            credential (dict): Azure credentials dict

            example: {'subscription_id': 'f695f74f-36a4-4112-9fe6-74415fac75a2',
                      'tenant_id': '319d4d72-7ddc-45d0-9d63-a2db0a36e048',
                      'client_id': 'ce26bd50-2e5a-4eb7-9504-a05a79568e25',
                      'client_secret': 'abc123' }

            source_name (dict): Identifier of the cost usage report source

            example: { 'resource_group': 'My Resource Group 1',
                       'storage_account': 'My Storage Account 2'

        Returns:
            None

        Raises:
            ValidationError: Error string

        """
        key = "azure.error"

        azure_service = None

        if not (isinstance(credential_name, dict) and isinstance(storage_resource_name, dict)):
            message = "Resource group and/or Storage account must be a dict"
            raise ValidationError(error_obj(key, message))

        resource_group = storage_resource_name.get("resource_group")
        storage_account = storage_resource_name.get("storage_account")
        subscription_id = credential_name.get("subscription_id")

        self._verify_patch_entries(subscription_id, resource_group, storage_account)

        try:
            azure_service = AzureService(
                **credential_name, resource_group_name=resource_group, storage_account_name=storage_account
            )
            azure_client = AzureClientFactory(**credential_name)
            storage_accounts = azure_client.storage_client.storage_accounts
            storage_account = storage_accounts.get_properties(resource_group, storage_account)
            if azure_service and not azure_service.describe_cost_management_exports():
                key = ProviderErrors.AZURE_NO_REPORT_FOUND
                message = ProviderErrors.AZURE_MISSING_EXPORT_MESSAGE
                raise ValidationError(error_obj(key, message))
        except AzureCostReportNotFound as costreport_err:
            key = ProviderErrors.AZURE_BILLING_SOURCE_NOT_FOUND
            raise ValidationError(error_obj(key, str(costreport_err)))
        except (AdalError, AzureException, AzureServiceError, ClientException, TypeError) as exc:
            key = ProviderErrors.AZURE_CLIENT_ERROR
            raise ValidationError(error_obj(key, str(exc)))

        return True