com.microsoft.azure.storage.blob.BlobRequestOptions Java Examples
The following examples show how to use
com.microsoft.azure.storage.blob.BlobRequestOptions.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SecondaryTests.java From azure-storage-android with Apache License 2.0 | 6 votes |
private static void testContainerDownloadAttributes(LocationMode optionsLocationMode, LocationMode clientLocationMode, StorageLocation initialLocation, List<RetryContext> retryContextList, List<RetryInfo> retryInfoList) throws URISyntaxException, StorageException { CloudBlobContainer container = BlobTestHelper.getRandomContainerReference(); MultiLocationTestHelper helper = new MultiLocationTestHelper(container.getServiceClient().getStorageUri(), initialLocation, retryContextList, retryInfoList); container.getServiceClient().getDefaultRequestOptions().setLocationMode(clientLocationMode); BlobRequestOptions options = new BlobRequestOptions(); options.setLocationMode(optionsLocationMode); options.setRetryPolicyFactory(helper.retryPolicy); try { container.downloadAttributes(null, options, helper.operationContext); } catch (StorageException ex) { assertEquals(HttpURLConnection.HTTP_NOT_FOUND, ex.getHttpStatusCode()); } finally { helper.close(); } }
Example #2
Source File: GenericTests.java From azure-storage-android with Apache License 2.0 | 6 votes |
@Test public void testDefaultProxy() throws URISyntaxException, StorageException { CloudBlobClient blobClient = TestHelper.createCloudBlobClient(); CloudBlobContainer container = blobClient.getContainerReference("container1"); // Use a default proxy OperationContext.setDefaultProxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("10.1.1.1", 8888))); // Turn off retries to make the failure happen faster BlobRequestOptions opt = new BlobRequestOptions(); opt.setRetryPolicyFactory(new RetryNoRetry()); // Unfortunately HttpURLConnection doesn't expose a getter and the usingProxy method it does have doesn't // work as one would expect and will always for us return false. So, we validate by making sure the request // fails when we set a bad proxy rather than check the proxy setting itself succeeding. try { container.exists(null, opt, null); fail("Bad proxy should throw an exception."); } catch (StorageException e) { if (e.getCause().getClass() != ConnectException.class && e.getCause().getClass() != SocketTimeoutException.class && e.getCause().getClass() != SocketException.class) { Assert.fail("Unepected exception for bad proxy"); } } }
Example #3
Source File: LogBlobIterable.java From azure-storage-android with Apache License 2.0 | 5 votes |
protected LogBlobIterable(final CloudBlobDirectory logDirectory, final Date startTime, final Date endTime, final EnumSet<LoggingOperations> operations, final EnumSet<BlobListingDetails> details, final BlobRequestOptions options, final OperationContext opContext) { this.logDirectory = logDirectory; this.startTime = startTime; this.endTime = endTime; this.operations = operations; this.details = details; this.options = options; this.opContext = opContext; }
Example #4
Source File: AzureNativeFileSystemStore.java From hadoop with Apache License 2.0 | 5 votes |
private BlobRequestOptions getUploadOptions() { BlobRequestOptions options = new BlobRequestOptions(); options.setStoreBlobContentMD5(sessionConfiguration.getBoolean( KEY_STORE_BLOB_MD5, false)); options.setUseTransactionalContentMD5(getUseTransactionalContentMD5()); options.setConcurrentRequestCount(concurrentWrites); options.setRetryPolicyFactory(new RetryExponentialRetry(minBackoff, deltaBackoff, maxBackoff, maxRetries)); return options; }
Example #5
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 5 votes |
@Override public Iterable<ListBlobItem> listBlobs(String prefix, boolean useFlatBlobListing, EnumSet<BlobListingDetails> listingDetails, BlobRequestOptions options, OperationContext opContext) throws URISyntaxException, StorageException { return WrappingIterator.wrap(directory.listBlobs(prefix, useFlatBlobListing, listingDetails, options, opContext)); }
Example #6
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void downloadRange(long offset, long length, OutputStream outStream, BlobRequestOptions options, OperationContext opContext) throws StorageException, IOException { getBlob().downloadRange(offset, length, outStream, null, options, opContext); }
Example #7
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void startCopyFromBlob(URI source, BlobRequestOptions options, OperationContext opContext) throws StorageException, URISyntaxException { getBlob().startCopyFromBlob(source, null, null, options, opContext); }
Example #8
Source File: AzureNativeFileSystemStore.java From hadoop with Apache License 2.0 | 5 votes |
private BlobRequestOptions getDownloadOptions() { BlobRequestOptions options = new BlobRequestOptions(); options.setRetryPolicyFactory( new RetryExponentialRetry(minBackoff, deltaBackoff, maxBackoff, maxRetries)); options.setUseTransactionalContentMD5(getUseTransactionalContentMD5()); return options; }
Example #9
Source File: PageBlobInputStream.java From hadoop with Apache License 2.0 | 5 votes |
/** * Helper method to extract the actual data size of a page blob. * This typically involves 2 service requests (one for page ranges, another * for the last page's data). * * @param blob The blob to get the size from. * @param opContext The operation context to use for the requests. * @return The total data size of the blob in bytes. * @throws IOException If the format is corrupt. * @throws StorageException If anything goes wrong in the requests. */ public static long getPageBlobSize(CloudPageBlobWrapper blob, OperationContext opContext) throws IOException, StorageException { // Get the page ranges for the blob. There should be one range starting // at byte 0, but we tolerate (and ignore) ranges after the first one. ArrayList<PageRange> pageRanges = blob.downloadPageRanges(new BlobRequestOptions(), opContext); if (pageRanges.size() == 0) { return 0; } if (pageRanges.get(0).getStartOffset() != 0) { // Not expected: we always upload our page blobs as a contiguous range // starting at byte 0. throw badStartRangeException(blob, pageRanges.get(0)); } long totalRawBlobSize = pageRanges.get(0).getEndOffset() + 1; // Get the last page. long lastPageStart = totalRawBlobSize - PAGE_SIZE; ByteArrayOutputStream baos = new ByteArrayOutputStream(PageBlobFormatHelpers.PAGE_SIZE); blob.downloadRange(lastPageStart, PAGE_SIZE, baos, new BlobRequestOptions(), opContext); byte[] lastPage = baos.toByteArray(); short lastPageSize = getPageSize(blob, lastPage, 0); long totalNumberOfPages = totalRawBlobSize / PAGE_SIZE; return (totalNumberOfPages - 1) * PAGE_DATA_SIZE + lastPageSize; }
Example #10
Source File: PageBlobInputStream.java From hadoop with Apache License 2.0 | 5 votes |
/** * Constructs a stream over the given page blob. */ public PageBlobInputStream(CloudPageBlobWrapper blob, OperationContext opContext) throws IOException { this.blob = blob; this.opContext = opContext; ArrayList<PageRange> allRanges; try { allRanges = blob.downloadPageRanges(new BlobRequestOptions(), opContext); } catch (StorageException e) { throw new IOException(e); } if (allRanges.size() > 0) { if (allRanges.get(0).getStartOffset() != 0) { throw badStartRangeException(blob, allRanges.get(0)); } if (allRanges.size() > 1) { LOG.warn(String.format( "Blob %s has %d page ranges beyond the first range. " + "Only reading the first range.", blob.getUri(), allRanges.size() - 1)); } numberOfPagesRemaining = (allRanges.get(0).getEndOffset() + 1) / PAGE_SIZE; } else { numberOfPagesRemaining = 0; } }
Example #11
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 5 votes |
@Override public void startCopyFromBlob(URI source, BlobRequestOptions options, OperationContext opContext) throws StorageException, URISyntaxException { backingStore.copy(convertUriToDecodedString(source), convertUriToDecodedString(uri)); //TODO: set the backingStore.properties.CopyState and // update azureNativeFileSystemStore.waitForCopyToComplete }
Example #12
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 5 votes |
@Override public Iterable<ListBlobItem> listBlobs(String prefix, boolean useFlatBlobListing, EnumSet<BlobListingDetails> listingDetails, BlobRequestOptions options, OperationContext opContext) throws URISyntaxException, StorageException { return WrappingIterator.wrap(directory.listBlobs(prefix, useFlatBlobListing, listingDetails, options, opContext)); }
Example #13
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 5 votes |
@Override public void startCopyFromBlob(URI source, BlobRequestOptions options, OperationContext opContext) throws StorageException, URISyntaxException { getBlob().startCopyFromBlob(source, null, null, options, opContext); }
Example #14
Source File: AzureContainerListService.java From cyberduck with GNU General Public License v3.0 | 5 votes |
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { ResultSegment<CloudBlobContainer> result; ResultContinuation token = null; try { final AttributedList<Path> containers = new AttributedList<Path>(); do { final BlobRequestOptions options = new BlobRequestOptions(); result = session.getClient().listContainersSegmented(null, ContainerListingDetails.NONE, preferences.getInteger("azure.listing.chunksize"), token, options, context); for(CloudBlobContainer container : result.getResults()) { final PathAttributes attributes = new PathAttributes(); attributes.setETag(container.getProperties().getEtag()); attributes.setModificationDate(container.getProperties().getLastModified().getTime()); containers.add(new Path(PathNormalizer.normalize(container.getName()), EnumSet.of(Path.Type.volume, Path.Type.directory), attributes)); } listener.chunk(directory, containers); token = result.getContinuationToken(); } while(result.getHasMoreResults()); return containers; } catch(StorageException e) { throw new AzureExceptionMappingService().map("Listing directory {0} failed", e, directory); } }
Example #15
Source File: AzureNativeFileSystemStore.java From big-c with Apache License 2.0 | 5 votes |
private BlobRequestOptions getUploadOptions() { BlobRequestOptions options = new BlobRequestOptions(); options.setStoreBlobContentMD5(sessionConfiguration.getBoolean( KEY_STORE_BLOB_MD5, false)); options.setUseTransactionalContentMD5(getUseTransactionalContentMD5()); options.setConcurrentRequestCount(concurrentWrites); options.setRetryPolicyFactory(new RetryExponentialRetry(minBackoff, deltaBackoff, maxBackoff, maxRetries)); return options; }
Example #16
Source File: AzureNativeFileSystemStore.java From big-c with Apache License 2.0 | 5 votes |
private BlobRequestOptions getDownloadOptions() { BlobRequestOptions options = new BlobRequestOptions(); options.setRetryPolicyFactory( new RetryExponentialRetry(minBackoff, deltaBackoff, maxBackoff, maxRetries)); options.setUseTransactionalContentMD5(getUseTransactionalContentMD5()); return options; }
Example #17
Source File: PageBlobInputStream.java From big-c with Apache License 2.0 | 5 votes |
/** * Helper method to extract the actual data size of a page blob. * This typically involves 2 service requests (one for page ranges, another * for the last page's data). * * @param blob The blob to get the size from. * @param opContext The operation context to use for the requests. * @return The total data size of the blob in bytes. * @throws IOException If the format is corrupt. * @throws StorageException If anything goes wrong in the requests. */ public static long getPageBlobSize(CloudPageBlobWrapper blob, OperationContext opContext) throws IOException, StorageException { // Get the page ranges for the blob. There should be one range starting // at byte 0, but we tolerate (and ignore) ranges after the first one. ArrayList<PageRange> pageRanges = blob.downloadPageRanges(new BlobRequestOptions(), opContext); if (pageRanges.size() == 0) { return 0; } if (pageRanges.get(0).getStartOffset() != 0) { // Not expected: we always upload our page blobs as a contiguous range // starting at byte 0. throw badStartRangeException(blob, pageRanges.get(0)); } long totalRawBlobSize = pageRanges.get(0).getEndOffset() + 1; // Get the last page. long lastPageStart = totalRawBlobSize - PAGE_SIZE; ByteArrayOutputStream baos = new ByteArrayOutputStream(PageBlobFormatHelpers.PAGE_SIZE); blob.downloadRange(lastPageStart, PAGE_SIZE, baos, new BlobRequestOptions(), opContext); byte[] lastPage = baos.toByteArray(); short lastPageSize = getPageSize(blob, lastPage, 0); long totalNumberOfPages = totalRawBlobSize / PAGE_SIZE; return (totalNumberOfPages - 1) * PAGE_DATA_SIZE + lastPageSize; }
Example #18
Source File: PageBlobInputStream.java From big-c with Apache License 2.0 | 5 votes |
/** * Constructs a stream over the given page blob. */ public PageBlobInputStream(CloudPageBlobWrapper blob, OperationContext opContext) throws IOException { this.blob = blob; this.opContext = opContext; ArrayList<PageRange> allRanges; try { allRanges = blob.downloadPageRanges(new BlobRequestOptions(), opContext); } catch (StorageException e) { throw new IOException(e); } if (allRanges.size() > 0) { if (allRanges.get(0).getStartOffset() != 0) { throw badStartRangeException(blob, allRanges.get(0)); } if (allRanges.size() > 1) { LOG.warn(String.format( "Blob %s has %d page ranges beyond the first range. " + "Only reading the first range.", blob.getUri(), allRanges.size() - 1)); } numberOfPagesRemaining = (allRanges.get(0).getEndOffset() + 1) / PAGE_SIZE; } else { numberOfPagesRemaining = 0; } }
Example #19
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 5 votes |
@Override public void startCopyFromBlob(URI source, BlobRequestOptions options, OperationContext opContext) throws StorageException, URISyntaxException { backingStore.copy(convertUriToDecodedString(source), convertUriToDecodedString(uri)); //TODO: set the backingStore.properties.CopyState and // update azureNativeFileSystemStore.waitForCopyToComplete }
Example #20
Source File: AzureStorageRepository.java From hawkbit-extensions with Eclipse Public License 1.0 | 5 votes |
private CloudBlobContainer getContainer() throws URISyntaxException, StorageException { final CloudBlobContainer container = blobClient.getContainerReference(properties.getContainerName()); container.createIfNotExists(BlobContainerPublicAccessType.CONTAINER, new BlobRequestOptions(), new OperationContext()); return container; }
Example #21
Source File: GenericTests.java From azure-storage-android with Apache License 2.0 | 5 votes |
@Test public void testProxy() throws URISyntaxException, StorageException { CloudBlobClient blobClient = TestHelper.createCloudBlobClient(); CloudBlobContainer container = blobClient.getContainerReference("container1"); // Use a request-level proxy OperationContext opContext = new OperationContext(); opContext.setProxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("10.1.1.1", 8888))); // Turn of retries to make the failure happen faster BlobRequestOptions opt = new BlobRequestOptions(); opt.setRetryPolicyFactory(new RetryNoRetry()); // Unfortunately HttpURLConnection doesn't expose a getter and the usingProxy method it does have doesn't // work as one would expect and will always for us return false. So, we validate by making sure the request // fails when we set a bad proxy rather than check the proxy setting itself. try { container.exists(null, opt, opContext); fail("Bad proxy should throw an exception."); } catch (StorageException e) { if (e.getCause().getClass() != ConnectException.class && e.getCause().getClass() != SocketTimeoutException.class && e.getCause().getClass() != SocketException.class) { Assert.fail("Unepected exception for bad proxy"); } } }
Example #22
Source File: GenericTests.java From azure-storage-android with Apache License 2.0 | 5 votes |
@Test public void testProxyOverridesDefault() throws URISyntaxException, StorageException { CloudBlobClient blobClient = TestHelper.createCloudBlobClient(); CloudBlobContainer container = blobClient.getContainerReference("container1"); // Set a default proxy OperationContext.setDefaultProxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("10.1.1.1", 8888))); // Turn off retries to make the failure happen faster BlobRequestOptions opt = new BlobRequestOptions(); opt.setRetryPolicyFactory(new RetryNoRetry()); // Unfortunately HttpURLConnection doesn't expose a getter and the usingProxy method it does have doesn't // work as one would expect and will always for us return false. So, we validate by making sure the request // fails when we set a bad proxy rather than check the proxy setting itself succeeding. try { container.exists(null, opt, null); fail("Bad proxy should throw an exception."); } catch (StorageException e) { if (e.getCause().getClass() != ConnectException.class && e.getCause().getClass() != SocketTimeoutException.class) { Assert.fail("Unepected exception for bad proxy"); } } // Override it with no proxy OperationContext opContext = new OperationContext(); opContext.setProxy(Proxy.NO_PROXY); // Should succeed as request-level proxy should override the bad default proxy container.exists(null, null, opContext); }
Example #23
Source File: MaximumExecutionTimeTests.java From azure-storage-android with Apache License 2.0 | 5 votes |
@Test @Category({ DevFabricTests.class, DevStoreTests.class, SecondaryTests.class }) public void testBlobMaximumExecutionTime() throws URISyntaxException, StorageException { OperationContext opContext = new OperationContext(); setDelay(opContext, 2500); // set the maximum execution time BlobRequestOptions options = new BlobRequestOptions(); options.setMaximumExecutionTimeInMs(2000); // set the location mode to secondary, secondary request should fail // so set the timeout low to save time failing (or fail with a timeout) options.setLocationMode(LocationMode.SECONDARY_THEN_PRIMARY); options.setTimeoutIntervalInMs(1000); CloudBlobClient blobClient = TestHelper.createCloudBlobClient(); CloudBlobContainer container = blobClient.getContainerReference(generateRandomName("container")); try { // 1. download attributes will fail as the container does not exist // 2. the executor will attempt to retry as it is accessing secondary // 3. maximum execution time should prevent the retry from being made container.downloadAttributes(null, options, opContext); fail("Maximum execution time was reached but request did not fail."); } catch (StorageException e) { assertEquals(SR.MAXIMUM_EXECUTION_TIMEOUT_EXCEPTION, e.getMessage()); } }
Example #24
Source File: CloudAnalyticsClient.java From azure-storage-android with Apache License 2.0 | 5 votes |
/** * Returns an enumerable collection of log blobs, retrieved lazily. * * @param service * A {@link StorageService} enumeration value that indicates which storage service to use. * @param startTime * A <code>java.util.Date</code> object representing the start of the time range for which logs should * be retrieved. * @param endTime * A <code>java.util.Date</code> object representing the end of the time range for which logs should * be retrieved. * @param operations * A {@link LoggingOperations} enumeration set that indicates which log types to return. * @param details * A {@link BlobListingDetails} enumeration set that indicates whether or not blob metadata should * be returned. None or METADATA are the only valid values. * @param options * A {@link BlobRequestOptions} object that specifies additional options for the request. * @param operationContext * An {@link OperationContext} object that represents the context for the current operation. * @return * An enumerable collection of objects that implement {@link ListBlobItem} and are retrieved lazily. * @throws StorageException * @throws URISyntaxException */ public Iterable<ListBlobItem> listLogBlobs(StorageService service, Date startTime, Date endTime, EnumSet<LoggingOperations> operations, BlobListingDetails details, BlobRequestOptions options, OperationContext operationContext) throws StorageException, URISyntaxException { Utility.assertNotNull("service", service); if (operations == null) { operations = EnumSet.allOf(LoggingOperations.class); } if (!(details == null || details.equals(BlobListingDetails.METADATA))) { throw new IllegalArgumentException(SR.INVALID_LISTING_DETAILS); } if (operations.equals(EnumSet.noneOf(LoggingOperations.class))) { throw new IllegalArgumentException(SR.INVALID_LOGGING_LEVEL); } EnumSet<BlobListingDetails> metadataDetails; if (details != null && (details.equals(BlobListingDetails.METADATA) || !operations.equals(EnumSet .allOf(LoggingOperations.class)))) { metadataDetails = EnumSet.of(BlobListingDetails.METADATA); } else { metadataDetails = EnumSet.noneOf(BlobListingDetails.class); } return new LogBlobIterable(this.getLogDirectory(service), startTime, endTime, operations, metadataDetails, options, operationContext); }
Example #25
Source File: LogBlobIterator.java From azure-storage-android with Apache License 2.0 | 5 votes |
public LogBlobIterator(final CloudBlobDirectory logDirectory, final Date startDate, final Date endDate, final EnumSet<LoggingOperations> operations, final EnumSet<BlobListingDetails> details, final BlobRequestOptions options, final OperationContext opContext) { TimeZone gmtTime = TimeZone.getTimeZone("GMT"); HOUR_FORMAT.setTimeZone(gmtTime); DAY_FORMAT.setTimeZone(gmtTime); MONTH_FORMAT.setTimeZone(gmtTime); YEAR_FORMAT.setTimeZone(gmtTime); this.logDirectory = logDirectory; this.operations = operations; this.details = details; this.opContext = opContext; if (options == null) { this.options = new BlobRequestOptions(); } else { this.options = options; } if (startDate != null) { this.startDate = new GregorianCalendar(); this.startDate.setTime(startDate); this.startDate.add(GregorianCalendar.MINUTE, (-this.startDate.get(GregorianCalendar.MINUTE))); this.startDate.setTimeZone(gmtTime); } if (endDate != null) { this.endDate = new GregorianCalendar(); this.endDate.setTime(endDate); this.endDate.setTimeZone(gmtTime); this.endPrefix = this.logDirectory.getPrefix() + HOUR_FORMAT.format(this.endDate.getTime()); } }
Example #26
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 5 votes |
@Override public void downloadRange(long offset, long length, OutputStream outStream, BlobRequestOptions options, OperationContext opContext) throws StorageException, IOException { getBlob().downloadRange(offset, length, outStream, null, options, opContext); }
Example #27
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
public void create(final long length, BlobRequestOptions options, OperationContext opContext) throws StorageException { ((CloudPageBlob) getBlob()).create(length, null, options, opContext); }
Example #28
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 4 votes |
@Override public void uploadPages(InputStream sourceStream, long offset, long length, BlobRequestOptions options, OperationContext opContext) throws StorageException, IOException { throw new NotImplementedException(); }
Example #29
Source File: AzureStorageRepository.java From hawkbit-extensions with Eclipse Public License 1.0 | 4 votes |
@Override protected AbstractDbArtifact store(final String tenant, final DbArtifactHash base16Hashes, final String contentType, final String tempFile) throws IOException { final File file = new File(tempFile); try { final CloudBlockBlob blob = getBlob(tenant, base16Hashes.getSha1()); final AzureStorageArtifact artifact = new AzureStorageArtifact(blob, base16Hashes.getSha1(), base16Hashes, file.length(), contentType); LOG.info("Storing file {} with length {} to Azure Storage container {} in directory {}", file.getName(), file.length(), properties.getContainerName(), blob.getParent()); if (blob.exists()) { LOG.debug( "Artifact {} for tenant {} already exists on Azure Storage container {}, don't need to upload twice", base16Hashes.getSha1(), tenant, properties.getContainerName()); return artifact; } // Creating blob and uploading file to it blob.getProperties().setContentType(contentType); final OperationContext context = new OperationContext(); context.setLoggingEnabled(true); context.setLogger(AZURE_SDK_LOG); final BlobRequestOptions options = new BlobRequestOptions(); options.setConcurrentRequestCount(properties.getConcurrentRequestCount()); blob.uploadFromFile(tempFile, null, options, context); final String md5Base16 = convertToBase16(blob.getProperties().getContentMD5()); LOG.debug("Artifact {} stored on Azure Storage container {} with server side Etag {} and MD5 hash {}", base16Hashes.getSha1(), blob.getContainer().getName(), blob.getProperties().getEtag(), md5Base16); return artifact; } catch (final URISyntaxException | StorageException e) { throw new ArtifactStoreException("Failed to store artifact into Azure storage", e); } }
Example #30
Source File: GenericTests.java From azure-storage-android with Apache License 2.0 | 4 votes |
public void testReadTimeoutIssue() throws URISyntaxException, StorageException, IOException { // part 1 byte[] buffer = BlobTestHelper.getRandomBuffer(1 * 1024 * 1024); // set the maximum execution time BlobRequestOptions options = new BlobRequestOptions(); options.setMaximumExecutionTimeInMs(5000); CloudBlobClient blobClient = TestHelper.createCloudBlobClient(); CloudBlobContainer container = blobClient.getContainerReference(generateRandomContainerName()); String blobName = "testBlob"; final CloudBlockBlob blockBlobRef = container.getBlockBlobReference(blobName); blockBlobRef.setStreamWriteSizeInBytes(1 * 1024 * 1024); ByteArrayInputStream inputStream = new ByteArrayInputStream(buffer); BlobOutputStream blobOutputStream = null; try { container.createIfNotExists(); blobOutputStream = blockBlobRef.openOutputStream(null, options, null); try { blobOutputStream.write(inputStream, buffer.length); } finally { blobOutputStream.close(); } assertTrue(blockBlobRef.exists()); } finally { inputStream.close(); container.deleteIfExists(); } // part 2 int length2 = 10 * 1024 * 1024; byte[] uploadBuffer2 = BlobTestHelper.getRandomBuffer(length2); CloudBlobClient blobClient2 = TestHelper.createCloudBlobClient(); CloudBlobContainer container2 = blobClient2.getContainerReference(generateRandomContainerName()); String blobName2 = "testBlob"; final CloudBlockBlob blockBlobRef2 = container2.getBlockBlobReference(blobName2); ByteArrayInputStream inputStream2 = new ByteArrayInputStream(uploadBuffer2); try { container2.createIfNotExists(); blockBlobRef2.upload(inputStream2, length2); } finally { inputStream2.close(); container2.deleteIfExists(); } }