com.microsoft.azure.storage.blob.BlobProperties Java Examples
The following examples show how to use
com.microsoft.azure.storage.blob.BlobProperties.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamingIngestClientTest.java From azure-kusto-java with MIT License | 7 votes |
@Test void IngestFromBlob() throws Exception { CloudBlockBlob cloudBlockBlob = mock(CloudBlockBlob.class); String blobPath = "https://storageaccount.blob.core.windows.net/container/blob.csv"; BlobSourceInfo blobSourceInfo = new BlobSourceInfo(blobPath); BlobProperties blobProperties = mock(BlobProperties.class); when(blobProperties.getLength()).thenReturn((long) 1000); BlobInputStream blobInputStream = mock(BlobInputStream.class); when(blobInputStream.read(any(byte[].class))).thenReturn(10).thenReturn(-1); doNothing().when(cloudBlockBlob).downloadAttributes(); when(cloudBlockBlob.getProperties()).thenReturn(blobProperties); when(cloudBlockBlob.openInputStream()).thenReturn(blobInputStream); OperationStatus status = streamingIngestClient.ingestFromBlob(blobSourceInfo, ingestionProperties, cloudBlockBlob).getIngestionStatusCollection().get(0).status; assertEquals(status, OperationStatus.Succeeded); verify(streamingClientMock, atLeastOnce()).executeStreamingIngest(any(String.class), any(String.class), any(InputStream.class), isNull(), any(String.class), isNull(), any(boolean.class)); }
Example #2
Source File: StreamingIngestClientTest.java From azure-kusto-java with MIT License | 7 votes |
@Test void IngestFromBlob_EmptyBlob_IngestClientException() throws Exception { CloudBlockBlob cloudBlockBlob = mock(CloudBlockBlob.class); String blobPath = "https://storageaccount.blob.core.windows.net/container/blob.csv"; BlobSourceInfo blobSourceInfo = new BlobSourceInfo(blobPath); BlobProperties blobProperties = mock(BlobProperties.class); when(blobProperties.getLength()).thenReturn((long) 0); doNothing().when(cloudBlockBlob).downloadAttributes(); when(cloudBlockBlob.getProperties()).thenReturn(blobProperties); IngestionClientException ingestionClientException = assertThrows(IngestionClientException.class, () -> streamingIngestClient.ingestFromBlob(blobSourceInfo, ingestionProperties, cloudBlockBlob), "Expected IngestionClientException to be thrown, but it didn't"); assertTrue(ingestionClientException.getMessage().contains("Empty blob.")); }
Example #3
Source File: AzureBackuper.java From cassandra-backup with Apache License 2.0 | 6 votes |
private void deleteStaleBlobs() throws Exception { final Date expiryDate = Date.from(ZonedDateTime.now().minusWeeks(1).toInstant()); final CloudBlobDirectory directoryReference = blobContainer.getDirectoryReference(request.storageLocation.clusterId + "/" + request.storageLocation.datacenterId); for (final ListBlobItem blob : directoryReference.listBlobs(null, true, EnumSet.noneOf(BlobListingDetails.class), null, null)) { if (!(blob instanceof CloudBlob)) { continue; } final BlobProperties properties = ((CloudBlob) blob).getProperties(); if (properties == null || properties.getLastModified() == null) { continue; } if (properties.getLastModified().before(expiryDate)) { ((CloudBlob) blob).delete(); } } }
Example #4
Source File: AzureStorageService.java From crate with Apache License 2.0 | 5 votes |
public Map<String, BlobMetaData> listBlobsByPrefix(String container, String keyPath, String prefix) throws URISyntaxException, StorageException { // NOTE: this should be here: if (prefix == null) prefix = ""; // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and // then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix! final var blobsBuilder = new HashMap<String, BlobMetaData>(); final EnumSet<BlobListingDetails> enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA); final Tuple<CloudBlobClient, Supplier<OperationContext>> client = client(); final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); LOGGER.trace(() -> new ParameterizedMessage("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix)); for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, enumBlobListingDetails, null, client.v2().get())) { final URI uri = blobItem.getUri(); LOGGER.trace(() -> new ParameterizedMessage("blob url [{}]", uri)); // uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/ // this requires 1 + container.length() + 1, with each 1 corresponding to one of the / final String blobPath = uri.getPath().substring(1 + container.length() + 1); if (blobItem instanceof CloudBlob) { final BlobProperties properties = ((CloudBlob) blobItem).getProperties(); final String name = blobPath.substring(keyPath.length()); LOGGER.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength())); blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength())); } } return Map.copyOf(blobsBuilder); }
Example #5
Source File: AzureConfigProvider.java From exhibitor with Apache License 2.0 | 5 votes |
private BlobProperties getConfigBlobProperties() throws Exception { try { BlobProperties properties = azureClient.getBlobProperties(arguments.getContainer(), arguments.getBlobName()); if (properties.getLength() > 0) { return properties; } } catch (StorageException e) { if (!isNotFoundError(e) && !isForbiddenError(e)) { throw e; } } return null; }
Example #6
Source File: StorageObjectSummary.java From snowflake-jdbc with Apache License 2.0 | 5 votes |
/** * Contructs a StorageObjectSummary object from Azure BLOB properties * Using factory methods to create these objects since Azure can throw, * while retrieving the BLOB properties * * @param listBlobItem an Azure ListBlobItem object * @return the ObjectSummary object created */ public static StorageObjectSummary createFromAzureListBlobItem(ListBlobItem listBlobItem) throws StorageProviderException { String location, key, md5; long size; // Retrieve the BLOB properties that we need for the Summary // Azure Storage stores metadata inside each BLOB, therefore the listBlobItem // will point us to the underlying BLOB and will get the properties from it // During the process the Storage Client could fail, hence we need to wrap the // get calls in try/catch and handle possible exceptions try { location = listBlobItem.getContainer().getName(); CloudBlob cloudBlob = (CloudBlob) listBlobItem; key = cloudBlob.getName(); BlobProperties blobProperties = cloudBlob.getProperties(); // the content md5 property is not always the actual md5 of the file. But for here, it's only // used for skipping file on PUT command, hense is ok. md5 = convertBase64ToHex(blobProperties.getContentMD5()); size = blobProperties.getLength(); } catch (URISyntaxException | StorageException ex) { // This should only happen if somehow we got here with and invalid URI (it should never happen) // ...or there is a Storage service error. Unlike S3, Azure fetches metadata from the BLOB itself, // and its a lazy operation throw new StorageProviderException(ex); } return new StorageObjectSummary(location, key, md5, size); }
Example #7
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 5 votes |
protected void refreshProperties(boolean getMetadata) { if (backingStore.exists(convertUriToDecodedString(uri))) { byte[] content = backingStore.getContent(convertUriToDecodedString(uri)); properties = new BlobProperties(); this.properties=updateLastModifed(this.properties); this.properties=updateLength(this.properties, content.length); if (getMetadata) { metadata = backingStore.getMetadata(convertUriToDecodedString(uri)); } } }
Example #8
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 5 votes |
protected BlobProperties updateLength(BlobProperties properties,int length) { try{ Method setLength =properties.getClass(). getDeclaredMethod("setLength", long.class); setLength.setAccessible(true); setLength.invoke(this.properties, length); }catch (Exception e){ throw new RuntimeException(e); } return properties; }
Example #9
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 5 votes |
protected BlobProperties updateLastModifed(BlobProperties properties){ try{ Method setLastModified =properties.getClass(). getDeclaredMethod("setLastModified", Date.class); setLastModified.setAccessible(true); setLastModified.invoke(this.properties, Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime()); }catch(Exception e){ throw new RuntimeException(e); } return properties; }
Example #10
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 5 votes |
protected MockCloudBlobWrapper(URI uri, HashMap<String, String> metadata, int length) { this.uri = uri; this.metadata = metadata; this.properties = new BlobProperties(); this.properties=updateLastModifed(this.properties); this.properties=updateLength(this.properties,length); }
Example #11
Source File: AzureNativeFileSystemStore.java From big-c with Apache License 2.0 | 5 votes |
/** * Return the actual data length of the blob with the specified properties. * If it is a page blob, you can't rely on the length from the properties * argument and you must get it from the file. Otherwise, you can. */ private long getDataLength(CloudBlobWrapper blob, BlobProperties properties) throws AzureException { if (blob instanceof CloudPageBlobWrapper) { try { return PageBlobInputStream.getPageBlobSize((CloudPageBlobWrapper) blob, getInstrumentedContext( isConcurrentOOBAppendAllowed())); } catch (Exception e) { throw new AzureException( "Unexpected exception getting page blob actual data size.", e); } } return properties.getLength(); }
Example #12
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 5 votes |
protected void refreshProperties(boolean getMetadata) { if (backingStore.exists(convertUriToDecodedString(uri))) { byte[] content = backingStore.getContent(convertUriToDecodedString(uri)); properties = new BlobProperties(); this.properties=updateLastModifed(this.properties); this.properties=updateLength(this.properties, content.length); if (getMetadata) { metadata = backingStore.getMetadata(convertUriToDecodedString(uri)); } } }
Example #13
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 5 votes |
protected BlobProperties updateLength(BlobProperties properties,int length) { try{ Method setLength =properties.getClass(). getDeclaredMethod("setLength", long.class); setLength.setAccessible(true); setLength.invoke(this.properties, length); }catch (Exception e){ throw new RuntimeException(e); } return properties; }
Example #14
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 5 votes |
protected BlobProperties updateLastModifed(BlobProperties properties){ try{ Method setLastModified =properties.getClass(). getDeclaredMethod("setLastModified", Date.class); setLastModified.setAccessible(true); setLastModified.invoke(this.properties, Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime()); }catch(Exception e){ throw new RuntimeException(e); } return properties; }
Example #15
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 5 votes |
protected MockCloudBlobWrapper(URI uri, HashMap<String, String> metadata, int length) { this.uri = uri; this.metadata = metadata; this.properties = new BlobProperties(); this.properties=updateLastModifed(this.properties); this.properties=updateLength(this.properties,length); }
Example #16
Source File: AzureNativeFileSystemStore.java From hadoop with Apache License 2.0 | 5 votes |
/** * Return the actual data length of the blob with the specified properties. * If it is a page blob, you can't rely on the length from the properties * argument and you must get it from the file. Otherwise, you can. */ private long getDataLength(CloudBlobWrapper blob, BlobProperties properties) throws AzureException { if (blob instanceof CloudPageBlobWrapper) { try { return PageBlobInputStream.getPageBlobSize((CloudPageBlobWrapper) blob, getInstrumentedContext( isConcurrentOOBAppendAllowed())); } catch (Exception e) { throw new AzureException( "Unexpected exception getting page blob actual data size.", e); } } return properties.getLength(); }
Example #17
Source File: TestableBlobsStorageService.java From kayenta with Apache License 2.0 | 4 votes |
@Override public Date getLastModified(BlobProperties properties) { return new Date(); }
Example #18
Source File: ListAzureBlobStorage.java From nifi with Apache License 2.0 | 4 votes |
@Override protected List<BlobInfo> performListing(final ProcessContext context, final Long minTimestamp) throws IOException { String containerName = context.getProperty(AzureStorageUtils.CONTAINER).evaluateAttributeExpressions().getValue(); String prefix = context.getProperty(PROP_PREFIX).evaluateAttributeExpressions().getValue(); if (prefix == null) { prefix = ""; } final List<BlobInfo> listing = new ArrayList<>(); try { CloudBlobClient blobClient = AzureStorageUtils.createCloudBlobClient(context, getLogger(), null); CloudBlobContainer container = blobClient.getContainerReference(containerName); final OperationContext operationContext = new OperationContext(); AzureStorageUtils.setProxy(operationContext, context); for (ListBlobItem blob : container.listBlobs(prefix, true, EnumSet.of(BlobListingDetails.METADATA), null, operationContext)) { if (blob instanceof CloudBlob) { CloudBlob cloudBlob = (CloudBlob) blob; BlobProperties properties = cloudBlob.getProperties(); StorageUri uri = cloudBlob.getSnapshotQualifiedStorageUri(); Builder builder = new BlobInfo.Builder() .primaryUri(uri.getPrimaryUri().toString()) .blobName(cloudBlob.getName()) .containerName(containerName) .contentType(properties.getContentType()) .contentLanguage(properties.getContentLanguage()) .etag(properties.getEtag()) .lastModifiedTime(properties.getLastModified().getTime()) .length(properties.getLength()); if (uri.getSecondaryUri() != null) { builder.secondaryUri(uri.getSecondaryUri().toString()); } if (blob instanceof CloudBlockBlob) { builder.blobType(AzureStorageUtils.BLOCK); } else { builder.blobType(AzureStorageUtils.PAGE); } listing.add(builder.build()); } } } catch (Throwable t) { throw new IOException(ExceptionUtils.getRootCause(t)); } return listing; }
Example #19
Source File: FileSasTests.java From azure-storage-android with Apache License 2.0 | 4 votes |
@Test public void testFileCopyFromBlobWithSasAndSnapshot() throws URISyntaxException, StorageException, InterruptedException, IOException, InvalidKeyException { String blobName = BlobTestHelper.generateRandomBlobNameWithPrefix("testblob"); CloudBlobContainer container = TestHelper.createCloudBlobClient().getContainerReference(BlobTestHelper.generateRandomContainerName()); container.createIfNotExists(); CloudBlockBlob source = container.getBlockBlobReference(blobName); String data = "String data"; source.uploadText(data, Constants.UTF8_CHARSET, null, null, null); byte[] buffer = BlobTestHelper.getRandomBuffer(512); ByteArrayInputStream stream = new ByteArrayInputStream(buffer); source.upload(stream, buffer.length); source.getMetadata().put("Test", "value"); source.uploadMetadata(); SharedAccessFilePolicy policy = createSharedAccessPolicy( EnumSet.of(SharedAccessFilePermissions.READ, SharedAccessFilePermissions.WRITE, SharedAccessFilePermissions.LIST, SharedAccessFilePermissions.DELETE), 5000); CloudFile copy = this.share.getRootDirectoryReference().getFileReference("copy"); String sasToken = copy.generateSharedAccessSignature(policy, null); CloudFile copySas = new CloudFile(new URI(copy.getUri().toString() + "?" + sasToken)); // Generate account SAS for the source // Cannot generate a SAS directly on a snapshot and the SAS for the destination is only for the destination SharedAccessAccountPolicy accountPolicy = new SharedAccessAccountPolicy(); accountPolicy.setPermissions(EnumSet.of(SharedAccessAccountPermissions.READ, SharedAccessAccountPermissions.WRITE)); accountPolicy.setServices(EnumSet.of(SharedAccessAccountService.BLOB)); accountPolicy.setResourceTypes(EnumSet.of(SharedAccessAccountResourceType.OBJECT, SharedAccessAccountResourceType.CONTAINER)); accountPolicy.setSharedAccessExpiryTime(policy.getSharedAccessExpiryTime()); final CloudBlobClient sasClient = TestHelper.createCloudBlobClient(accountPolicy, false); CloudBlockBlob snapshot = (CloudBlockBlob) source.createSnapshot(); CloudBlockBlob sasBlob = (CloudBlockBlob) sasClient.getContainerReference(container.getName()) .getBlobReferenceFromServer(snapshot.getName(), snapshot.getSnapshotID(), null, null, null); sasBlob.exists(); String copyId = copySas.startCopy(BlobTestHelper.defiddler(sasBlob)); FileTestHelper.waitForCopy(copySas); copySas.downloadAttributes(); FileProperties prop1 = copySas.getProperties(); BlobProperties prop2 = sasBlob.getProperties(); assertEquals(prop1.getCacheControl(), prop2.getCacheControl()); assertEquals(prop1.getContentEncoding(), prop2.getContentEncoding()); assertEquals(prop1.getContentDisposition(), prop2.getContentDisposition()); assertEquals(prop1.getContentLanguage(), prop2.getContentLanguage()); assertEquals(prop1.getContentMD5(), prop2.getContentMD5()); assertEquals(prop1.getContentType(), prop2.getContentType()); assertEquals("value", copySas.getMetadata().get("Test")); assertEquals(copyId, copySas.getCopyState().getCopyId()); snapshot.delete(); source.delete(); copySas.delete(); container.delete(); }
Example #20
Source File: CloudFileTests.java From azure-storage-android with Apache License 2.0 | 4 votes |
private CloudFile doCloudBlobCopy(CloudBlob source, int length) throws Exception { Calendar cal = new GregorianCalendar(TimeZone.getTimeZone("UTC")); cal.setTime(new Date()); cal.add(Calendar.MINUTE, 5); // Source SAS must have read permissions SharedAccessBlobPolicy policy = new SharedAccessBlobPolicy(); policy.setPermissions(EnumSet.of(SharedAccessBlobPermissions.READ)); policy.setSharedAccessExpiryTime(cal.getTime()); String sasToken = source.generateSharedAccessSignature(policy, null, null); // Get destination reference final CloudFile destination = this.share.getRootDirectoryReference().getFileReference("destination"); // Start copy and wait for completion StorageCredentialsSharedAccessSignature credentials = new StorageCredentialsSharedAccessSignature(sasToken); Constructor<? extends CloudBlob> blobType = source.getClass().getConstructor(URI.class); String copyId = destination.startCopy(blobType.newInstance(credentials.transformUri(source.getUri()))); FileTestHelper.waitForCopy(destination); destination.downloadAttributes(); // Check original file references for equality assertEquals(CopyStatus.SUCCESS, destination.getCopyState().getStatus()); assertEquals(source.getServiceClient().getCredentials().transformUri(source.getUri()).getPath(), destination.getCopyState().getSource().getPath()); assertEquals(length, destination.getCopyState().getTotalBytes().intValue()); assertEquals(length, destination.getCopyState().getBytesCopied().intValue()); assertEquals(copyId, destination.getProperties().getCopyState().getCopyId()); // Attempt to abort the completed copy operation. try { destination.abortCopy(destination.getCopyState().getCopyId()); FileTestHelper.waitForCopy(destination); fail(); } catch (StorageException ex) { assertEquals(HttpURLConnection.HTTP_CONFLICT, ex.getHttpStatusCode()); } assertNotNull(destination.getProperties().getEtag()); assertFalse(source.getProperties().getEtag().equals(destination.getProperties().getEtag())); source.downloadAttributes(); FileProperties prop1 = destination.getProperties(); BlobProperties prop2 = source.getProperties(); assertEquals(prop1.getCacheControl(), prop2.getCacheControl()); assertEquals(prop1.getContentEncoding(), prop2.getContentEncoding()); assertEquals(prop1.getContentLanguage(), prop2.getContentLanguage()); assertEquals(prop1.getContentMD5(), prop2.getContentMD5()); assertEquals(prop1.getContentType(), prop2.getContentType()); assertEquals("value", destination.getMetadata().get("Test")); return destination; }
Example #21
Source File: StorageInterfaceImpl.java From hadoop with Apache License 2.0 | 4 votes |
@Override public BlobProperties getProperties() { return getBlob().getProperties(); }
Example #22
Source File: AzureNativeFileSystemStore.java From hadoop with Apache License 2.0 | 4 votes |
private PartialListing list(String prefix, String delimiter, final int maxListingCount, final int maxListingDepth, String priorLastKey) throws IOException { try { checkContainer(ContainerAccessType.PureRead); if (0 < prefix.length() && !prefix.endsWith(PATH_DELIMITER)) { prefix += PATH_DELIMITER; } Iterable<ListBlobItem> objects; if (prefix.equals("/")) { objects = listRootBlobs(true); } else { objects = listRootBlobs(prefix, true); } ArrayList<FileMetadata> fileMetadata = new ArrayList<FileMetadata>(); for (ListBlobItem blobItem : objects) { // Check that the maximum listing count is not exhausted. // if (0 < maxListingCount && fileMetadata.size() >= maxListingCount) { break; } if (blobItem instanceof CloudBlockBlobWrapper || blobItem instanceof CloudPageBlobWrapper) { String blobKey = null; CloudBlobWrapper blob = (CloudBlobWrapper) blobItem; BlobProperties properties = blob.getProperties(); // Determine format of the blob name depending on whether an absolute // path is being used or not. blobKey = normalizeKey(blob); FileMetadata metadata; if (retrieveFolderAttribute(blob)) { metadata = new FileMetadata(blobKey, properties.getLastModified().getTime(), getPermissionStatus(blob), BlobMaterialization.Explicit); } else { metadata = new FileMetadata( blobKey, getDataLength(blob, properties), properties.getLastModified().getTime(), getPermissionStatus(blob)); } // Add the metadata to the list, but remove any existing duplicate // entries first that we may have added by finding nested files. FileMetadata existing = getDirectoryInList(fileMetadata, blobKey); if (existing != null) { fileMetadata.remove(existing); } fileMetadata.add(metadata); } else if (blobItem instanceof CloudBlobDirectoryWrapper) { CloudBlobDirectoryWrapper directory = (CloudBlobDirectoryWrapper) blobItem; // Determine format of directory name depending on whether an absolute // path is being used or not. // String dirKey = normalizeKey(directory); // Strip the last / if (dirKey.endsWith(PATH_DELIMITER)) { dirKey = dirKey.substring(0, dirKey.length() - 1); } // Reached the targeted listing depth. Return metadata for the // directory using default permissions. // // Note: Something smarter should be done about permissions. Maybe // inherit the permissions of the first non-directory blob. // Also, getting a proper value for last-modified is tricky. FileMetadata directoryMetadata = new FileMetadata(dirKey, 0, defaultPermissionNoBlobMetadata(), BlobMaterialization.Implicit); // Add the directory metadata to the list only if it's not already // there. if (getDirectoryInList(fileMetadata, dirKey) == null) { fileMetadata.add(directoryMetadata); } // Currently at a depth of one, decrement the listing depth for // sub-directories. buildUpList(directory, fileMetadata, maxListingCount, maxListingDepth - 1); } } // Note: Original code indicated that this may be a hack. priorLastKey = null; PartialListing listing = new PartialListing(priorLastKey, fileMetadata.toArray(new FileMetadata[] {}), 0 == fileMetadata.size() ? new String[] {} : new String[] { prefix }); return listing; } catch (Exception e) { // Re-throw as an Azure storage exception. // throw new AzureException(e); } }
Example #23
Source File: MockStorageInterface.java From big-c with Apache License 2.0 | 4 votes |
@Override public BlobProperties getProperties() { return properties; }
Example #24
Source File: AzureNativeFileSystemStore.java From big-c with Apache License 2.0 | 4 votes |
private PartialListing list(String prefix, String delimiter, final int maxListingCount, final int maxListingDepth, String priorLastKey) throws IOException { try { checkContainer(ContainerAccessType.PureRead); if (0 < prefix.length() && !prefix.endsWith(PATH_DELIMITER)) { prefix += PATH_DELIMITER; } Iterable<ListBlobItem> objects; if (prefix.equals("/")) { objects = listRootBlobs(true); } else { objects = listRootBlobs(prefix, true); } ArrayList<FileMetadata> fileMetadata = new ArrayList<FileMetadata>(); for (ListBlobItem blobItem : objects) { // Check that the maximum listing count is not exhausted. // if (0 < maxListingCount && fileMetadata.size() >= maxListingCount) { break; } if (blobItem instanceof CloudBlockBlobWrapper || blobItem instanceof CloudPageBlobWrapper) { String blobKey = null; CloudBlobWrapper blob = (CloudBlobWrapper) blobItem; BlobProperties properties = blob.getProperties(); // Determine format of the blob name depending on whether an absolute // path is being used or not. blobKey = normalizeKey(blob); FileMetadata metadata; if (retrieveFolderAttribute(blob)) { metadata = new FileMetadata(blobKey, properties.getLastModified().getTime(), getPermissionStatus(blob), BlobMaterialization.Explicit); } else { metadata = new FileMetadata( blobKey, getDataLength(blob, properties), properties.getLastModified().getTime(), getPermissionStatus(blob)); } // Add the metadata to the list, but remove any existing duplicate // entries first that we may have added by finding nested files. FileMetadata existing = getDirectoryInList(fileMetadata, blobKey); if (existing != null) { fileMetadata.remove(existing); } fileMetadata.add(metadata); } else if (blobItem instanceof CloudBlobDirectoryWrapper) { CloudBlobDirectoryWrapper directory = (CloudBlobDirectoryWrapper) blobItem; // Determine format of directory name depending on whether an absolute // path is being used or not. // String dirKey = normalizeKey(directory); // Strip the last / if (dirKey.endsWith(PATH_DELIMITER)) { dirKey = dirKey.substring(0, dirKey.length() - 1); } // Reached the targeted listing depth. Return metadata for the // directory using default permissions. // // Note: Something smarter should be done about permissions. Maybe // inherit the permissions of the first non-directory blob. // Also, getting a proper value for last-modified is tricky. FileMetadata directoryMetadata = new FileMetadata(dirKey, 0, defaultPermissionNoBlobMetadata(), BlobMaterialization.Implicit); // Add the directory metadata to the list only if it's not already // there. if (getDirectoryInList(fileMetadata, dirKey) == null) { fileMetadata.add(directoryMetadata); } // Currently at a depth of one, decrement the listing depth for // sub-directories. buildUpList(directory, fileMetadata, maxListingCount, maxListingDepth - 1); } } // Note: Original code indicated that this may be a hack. priorLastKey = null; PartialListing listing = new PartialListing(priorLastKey, fileMetadata.toArray(new FileMetadata[] {}), 0 == fileMetadata.size() ? new String[] {} : new String[] { prefix }); return listing; } catch (Exception e) { // Re-throw as an Azure storage exception. // throw new AzureException(e); } }
Example #25
Source File: StorageInterfaceImpl.java From big-c with Apache License 2.0 | 4 votes |
@Override public BlobProperties getProperties() { return getBlob().getProperties(); }
Example #26
Source File: MockStorageInterface.java From hadoop with Apache License 2.0 | 4 votes |
@Override public BlobProperties getProperties() { return properties; }
Example #27
Source File: StorageInterface.java From hadoop with Apache License 2.0 | 2 votes |
/** * Returns the blob's properties. * * @return A {@link BlobProperties} object that represents the properties of * the blob. */ BlobProperties getProperties();
Example #28
Source File: StorageInterface.java From big-c with Apache License 2.0 | 2 votes |
/** * Returns the blob's properties. * * @return A {@link BlobProperties} object that represents the properties of * the blob. */ BlobProperties getProperties();
Example #29
Source File: AzureClient.java From exhibitor with Apache License 2.0 | votes |
public BlobProperties getBlobProperties(String containerName, String uri) throws Exception;