com.microsoft.azure.storage.blob.CloudPageBlob Java Examples
The following examples show how to use
com.microsoft.azure.storage.blob.CloudPageBlob.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AzureCloudBlobClientActions.java From cloudbreak with Apache License 2.0 | 7 votes |
private void listBlobsInDirectoryWithValidation(CloudBlobContainer cloudBlobContainer, String directoryName, Boolean zeroContent) throws URISyntaxException, StorageException { CloudBlobDirectory blobDirectory = cloudBlobContainer.getDirectoryReference(directoryName); Set<String> blobsWithZeroLength = new HashSet<>(); for (ListBlobItem blobItem : blobDirectory.listBlobs()) { if (blobItem instanceof CloudBlobDirectory) { listBlobsInDirectoryWithValidation(cloudBlobContainer, ((CloudBlobDirectory) blobItem).getPrefix(), zeroContent); } else if (blobItem instanceof CloudPageBlob) { validateBlobItemLength(blobItem, zeroContent, blobsWithZeroLength); } else if (blobItem instanceof CloudBlockBlob) { validateBlobItemLength(blobItem, zeroContent, blobsWithZeroLength); } else { LOGGER.error("Azure Adls Gen 2 Cloud Storage Item that is present at URI: {} cannot be classify as CloudBlob, CloudPageBlob and " + "CloudBlockBlob. ", blobItem.getUri().getPath()); throw new TestFailException(String.format("Azure Adls Gen 2 Cloud Storage Item that is present at URI: %s cannot be classify as" + " CloudBlob, CloudPageBlob and CloudBlockBlob. ", blobItem.getUri().getPath())); } } }
Example #2
Source File: AzureCloudBlobClientActions.java From cloudbreak with Apache License 2.0 | 7 votes |
private void listBlobsInDirectory(CloudBlobContainer cloudBlobContainer, String directoryName) throws URISyntaxException, StorageException { CloudBlobDirectory blobDirectory = cloudBlobContainer.getDirectoryReference(directoryName); for (ListBlobItem blobItem : blobDirectory.listBlobs()) { if (blobItem instanceof CloudBlobDirectory) { listBlobsInDirectory(cloudBlobContainer, ((CloudBlobDirectory) blobItem).getPrefix()); } else if (blobItem instanceof CloudPageBlob) { Log.log(LOGGER, format(" Azure Adls Gen 2 Cloud Page Blob is present with Name: [%s] and with bytes of content: [%d] at URI: [%s] ", ((CloudPageBlob) blobItem).getName(), ((CloudPageBlob) blobItem).getProperties().getLength(), blobItem.getUri().getPath())); } else if (blobItem instanceof CloudBlockBlob) { Log.log(LOGGER, format(" Azure Adls Gen 2 Cloud Block Blob is present with Name: [%s] and with bytes of content: [%d] at URI: [%s] ", ((CloudBlockBlob) blobItem).getName(), ((CloudBlockBlob) blobItem).getProperties().getLength(), blobItem.getUri().getPath())); } else { LOGGER.error("Azure Adls Gen 2 Cloud Storage Item that is present at URI: [{}] cannot be classify as CloudBlob, CloudPageBlob and " + "CloudBlockBlob. ", blobItem.getUri().getPath()); throw new TestFailException(String.format("Azure Adls Gen 2 Cloud Storage Item that is present at URI: [%s] cannot be classify as" + " CloudBlob, CloudPageBlob and CloudBlockBlob. ", blobItem.getUri().getPath())); } } }
Example #3
Source File: AzureCloudBlobClientActions.java From cloudbreak with Apache License 2.0 | 7 votes |
private void deleteBlobsInDirectory(CloudBlobContainer cloudBlobContainer, String directoryName) throws URISyntaxException, StorageException { CloudBlobDirectory blobDirectory = cloudBlobContainer.getDirectoryReference(directoryName); for (ListBlobItem blobItem : blobDirectory.listBlobs()) { if (blobItem instanceof CloudBlobDirectory) { deleteBlobsInDirectory(cloudBlobContainer, ((CloudBlobDirectory) blobItem).getPrefix()); } else if (blobItem instanceof CloudPageBlob) { CloudPageBlob cloudPageBlob = cloudBlobContainer.getPageBlobReference(((CloudPageBlob) blobItem).getName()); cloudPageBlob.deleteIfExists(); } else if (blobItem instanceof CloudBlockBlob) { CloudBlockBlob cloudBlockBlob = cloudBlobContainer.getBlockBlobReference(((CloudBlockBlob) blobItem).getName()); cloudBlockBlob.deleteIfExists(); } } }
Example #4
Source File: CloudFileTests.java From azure-storage-android with Apache License 2.0 | 6 votes |
@Test @Category(SlowTests.class) public void testCopyPageBlobSas() throws Exception { // Create source on server. final CloudBlobContainer container = BlobTestHelper.getRandomContainerReference(); try { container.create(); final CloudPageBlob source = container.getPageBlobReference("source"); source.getMetadata().put("Test", "value"); final int length = 512; final ByteArrayInputStream data = BlobTestHelper.getRandomDataStream(length); source.upload(data, length); final CloudFile destination = doCloudBlobCopy(source, length); final ByteArrayOutputStream copyData = new ByteArrayOutputStream(); destination.download(copyData); BlobTestHelper.assertStreamsAreEqual(data, new ByteArrayInputStream(copyData.toByteArray())); } finally { container.deleteIfExists(); } }
Example #5
Source File: AzureStorageDriver.java From dcos-cassandra-service with Apache License 2.0 | 6 votes |
private Map<String, Long> getSnapshotFileKeys(CloudBlobContainer container, String keyPrefix) { Map<String, Long> snapshotFiles = new HashMap<>(); try { for (ListBlobItem item : container.listBlobs(keyPrefix, true)) { if (item instanceof CloudPageBlob) { CloudPageBlob cloudBlob = (CloudPageBlob) item; snapshotFiles.put(cloudBlob.getName(), getOriginalFileSize(cloudBlob)); } } } catch (StorageException e) { logger.error("Unable to retrieve metadata.", e); // all or none snapshotFiles = new HashMap<>(); } return snapshotFiles; }
Example #6
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 5 votes |
@Override public ListBlobItem next() { ListBlobItem unwrapped = present.next(); if (unwrapped instanceof CloudBlobDirectory) { return new CloudBlobDirectoryWrapperImpl((CloudBlobDirectory) unwrapped); } else if (unwrapped instanceof CloudBlockBlob) { return new CloudBlockBlobWrapperImpl((CloudBlockBlob) unwrapped); } else if (unwrapped instanceof CloudPageBlob) { return new CloudPageBlobWrapperImpl((CloudPageBlob) unwrapped); } else { return unwrapped; } }
Example #7
Source File: AzureStorageDriver.java From dcos-cassandra-service with Apache License 2.0 | 5 votes |
private long getOriginalFileSize(CloudPageBlob pageBlobReference) throws StorageException { long size = 0; pageBlobReference.downloadAttributes(); HashMap<String, String> map = pageBlobReference.getMetadata(); if (map != null && map.size() > 0) { try { size = Long.parseLong(map.get(ORIGINAL_SIZE_KEY)); } catch (Exception e) { logger.error("File size metadata missing or is not a number."); } } return size; }
Example #8
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 5 votes |
@Override public ListBlobItem next() { ListBlobItem unwrapped = present.next(); if (unwrapped instanceof CloudBlobDirectory) { return new CloudBlobDirectoryWrapperImpl((CloudBlobDirectory) unwrapped); } else if (unwrapped instanceof CloudBlockBlob) { return new CloudBlockBlobWrapperImpl((CloudBlockBlob) unwrapped); } else if (unwrapped instanceof CloudPageBlob) { return new CloudPageBlobWrapperImpl((CloudPageBlob) unwrapped); } else { return unwrapped; } }
Example #9
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
public ArrayList<PageRange> downloadPageRanges(BlobRequestOptions options, OperationContext opContext) throws StorageException { return ((CloudPageBlob) getBlob()).downloadPageRanges( null, options, opContext); }
Example #10
Source File: PageBlobOutputStream.java From dcos-cassandra-service with Apache License 2.0 | 4 votes |
public PageBlobOutputStream(CloudPageBlob pageBlob, long initialPageSize) throws StorageException { // to set the size we need to calc but that can't be the first line when we need to construct the super:( super(pageBlob.openWriteNew(initialPageSize)); this.pageBlob = pageBlob; resize(initialPageSize); }
Example #11
Source File: PageBlobOutputStream.java From big-c with Apache License 2.0 | 4 votes |
/** * Extend the page blob file if we are close to the end. */ private void conditionalExtendFile() { // maximum allowed size of an Azure page blob (1 terabyte) final long MAX_PAGE_BLOB_SIZE = 1024L * 1024L * 1024L * 1024L; // If blob is already at the maximum size, then don't try to extend it. if (currentBlobSize == MAX_PAGE_BLOB_SIZE) { return; } // If we are within the maximum write size of the end of the file, if (currentBlobSize - currentBlobOffset <= MAX_RAW_BYTES_PER_REQUEST) { // Extend the file. Retry up to 3 times with back-off. CloudPageBlob cloudPageBlob = (CloudPageBlob) blob.getBlob(); long newSize = currentBlobSize + configuredPageBlobExtensionSize; // Make sure we don't exceed maximum blob size. if (newSize > MAX_PAGE_BLOB_SIZE) { newSize = MAX_PAGE_BLOB_SIZE; } final int MAX_RETRIES = 3; int retries = 1; boolean resizeDone = false; while(!resizeDone && retries <= MAX_RETRIES) { try { cloudPageBlob.resize(newSize); resizeDone = true; currentBlobSize = newSize; } catch (StorageException e) { LOG.warn("Failed to extend size of " + cloudPageBlob.getUri()); try { // sleep 2, 8, 18 seconds for up to 3 retries Thread.sleep(2000 * retries * retries); } catch (InterruptedException e1) { // Restore the interrupted status Thread.currentThread().interrupt(); } } finally { retries++; } } } }
Example #12
Source File: PageBlobOutputStream.java From hadoop with Apache License 2.0 | 4 votes |
/** * Extend the page blob file if we are close to the end. */ private void conditionalExtendFile() { // maximum allowed size of an Azure page blob (1 terabyte) final long MAX_PAGE_BLOB_SIZE = 1024L * 1024L * 1024L * 1024L; // If blob is already at the maximum size, then don't try to extend it. if (currentBlobSize == MAX_PAGE_BLOB_SIZE) { return; } // If we are within the maximum write size of the end of the file, if (currentBlobSize - currentBlobOffset <= MAX_RAW_BYTES_PER_REQUEST) { // Extend the file. Retry up to 3 times with back-off. CloudPageBlob cloudPageBlob = (CloudPageBlob) blob.getBlob(); long newSize = currentBlobSize + configuredPageBlobExtensionSize; // Make sure we don't exceed maximum blob size. if (newSize > MAX_PAGE_BLOB_SIZE) { newSize = MAX_PAGE_BLOB_SIZE; } final int MAX_RETRIES = 3; int retries = 1; boolean resizeDone = false; while(!resizeDone && retries <= MAX_RETRIES) { try { cloudPageBlob.resize(newSize); resizeDone = true; currentBlobSize = newSize; } catch (StorageException e) { LOG.warn("Failed to extend size of " + cloudPageBlob.getUri()); try { // sleep 2, 8, 18 seconds for up to 3 retries Thread.sleep(2000 * retries * retries); } catch (InterruptedException e1) { // Restore the interrupted status Thread.currentThread().interrupt(); } } finally { retries++; } } } }
Example #13
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 4 votes |
public CloudPageBlobWrapperImpl(CloudPageBlob blob) { super(blob); }
Example #14
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 4 votes |
public void create(final long length, BlobRequestOptions options, OperationContext opContext) throws StorageException { ((CloudPageBlob) getBlob()).create(length, null, options, opContext); }
Example #15
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 4 votes |
public void uploadPages(final InputStream sourceStream, final long offset, final long length, BlobRequestOptions options, OperationContext opContext) throws StorageException, IOException { ((CloudPageBlob) getBlob()).uploadPages(sourceStream, offset, length, null, options, opContext); }
Example #16
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 4 votes |
public ArrayList<PageRange> downloadPageRanges(BlobRequestOptions options, OperationContext opContext) throws StorageException { return ((CloudPageBlob) getBlob()).downloadPageRanges( null, options, opContext); }
Example #17
Source File: LeaseBlobManager.java From samza with Apache License 2.0 | 4 votes |
public LeaseBlobManager(CloudPageBlob leaseBlob) { this.leaseBlob = leaseBlob; }
Example #18
Source File: BlobUtils.java From samza with Apache License 2.0 | 4 votes |
public CloudPageBlob getBlob() { return this.blob; }
Example #19
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
public void uploadPages(final InputStream sourceStream, final long offset, final long length, BlobRequestOptions options, OperationContext opContext) throws StorageException, IOException { ((CloudPageBlob) getBlob()).uploadPages(sourceStream, offset, length, null, options, opContext); }
Example #20
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
public void create(final long length, BlobRequestOptions options, OperationContext opContext) throws StorageException { ((CloudPageBlob) getBlob()).create(length, null, options, opContext); }
Example #21
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
public CloudPageBlobWrapperImpl(CloudPageBlob blob) { super(blob); }
Example #22
Source File: PageBlobOutputStream.java From dcos-cassandra-service with Apache License 2.0 | 2 votes |
/** * Creates an output stream filter built on top of the Azure * PageBlob output stream. * * @param pageBlob The Azure blob reference which has the underlying output * stream to be assigned to * the field <tt>this.out</tt> for later use, or * <code>null</code> if this instance is to be * created without an underlying stream. */ public PageBlobOutputStream(CloudPageBlob pageBlob) throws StorageException { // we don't know the total size, we default to a page this(pageBlob, PAGE_BLOB_PAGE_SIZE); }