org.jclouds.blobstore.BlobStore Java Examples
The following examples show how to use
org.jclouds.blobstore.BlobStore.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ObjectStorageBlobStoreBuilder.java From james-project with Apache License 2.0 | 6 votes |
public ObjectStorageBlobStore build() { Preconditions.checkState(blobIdFactory != null); BlobStore blobStore = supplier.get(); ObjectStorageBucketNameResolver bucketNameResolver = ObjectStorageBucketNameResolver.builder() .prefix(bucketPrefix) .namespace(namespace) .build(); return new ObjectStorageBlobStore(namespace.orElse(BucketName.DEFAULT), blobIdFactory, blobStore, blobPutter.orElseGet(() -> defaultPutBlob(blobStore)), payloadCodec.orElse(PayloadCodec.DEFAULT_CODEC), bucketNameResolver); }
Example #2
Source File: BlobStoreManagedLedgerOffloaderTest.java From pulsar with Apache License 2.0 | 6 votes |
@Test public void testOffloadFailDataBlockPartUpload() throws Exception { ReadHandle readHandle = buildReadHandle(); UUID uuid = UUID.randomUUID(); String failureString = "fail DataBlockPartUpload"; // mock throw exception when uploadPart try { BlobStore spiedBlobStore = mock(BlobStore.class, delegatesTo(blobStore)); Mockito .doThrow(new RuntimeException(failureString)) .when(spiedBlobStore).uploadMultipartPart(any(), anyInt(), any()); LedgerOffloader offloader = new BlobStoreManagedLedgerOffloader(spiedBlobStore, BUCKET, scheduler, DEFAULT_BLOCK_SIZE, DEFAULT_READ_BUFFER_SIZE); offloader.offload(readHandle, uuid, new HashMap<>()).get(); Assert.fail("Should throw exception for when uploadPart"); } catch (Exception e) { // excepted Assert.assertTrue(e.getCause() instanceof RuntimeException); Assert.assertTrue(e.getCause().getMessage().contains(failureString)); Assert.assertFalse(blobStore.blobExists(BUCKET, BlobStoreManagedLedgerOffloader.dataBlockOffloadKey(readHandle.getId(), uuid))); Assert.assertFalse(blobStore.blobExists(BUCKET, BlobStoreManagedLedgerOffloader.indexBlockOffloadKey(readHandle.getId(), uuid))); } }
Example #3
Source File: BlobStoreManagedLedgerOffloaderTest.java From pulsar with Apache License 2.0 | 6 votes |
@Test public void testOffloadFailInitDataBlockUpload() throws Exception { ReadHandle readHandle = buildReadHandle(); UUID uuid = UUID.randomUUID(); String failureString = "fail InitDataBlockUpload"; // mock throw exception when initiateMultipartUpload try { BlobStore spiedBlobStore = mock(BlobStore.class, delegatesTo(blobStore)); Mockito .doThrow(new RuntimeException(failureString)) .when(spiedBlobStore).initiateMultipartUpload(any(), any(), any()); LedgerOffloader offloader = new BlobStoreManagedLedgerOffloader(spiedBlobStore, BUCKET, scheduler, DEFAULT_BLOCK_SIZE, DEFAULT_READ_BUFFER_SIZE); offloader.offload(readHandle, uuid, new HashMap<>()).get(); Assert.fail("Should throw exception when initiateMultipartUpload"); } catch (Exception e) { // excepted Assert.assertTrue(e.getCause() instanceof RuntimeException); Assert.assertTrue(e.getCause().getMessage().contains(failureString)); Assert.assertFalse(blobStore.blobExists(BUCKET, BlobStoreManagedLedgerOffloader.dataBlockOffloadKey(readHandle.getId(), uuid))); Assert.assertFalse(blobStore.blobExists(BUCKET, BlobStoreManagedLedgerOffloader.indexBlockOffloadKey(readHandle.getId(), uuid))); } }
Example #4
Source File: ImportCollectionIT.java From usergrid with Apache License 2.0 | 6 votes |
/** * Delete the configured s3 bucket. */ public void deleteBucket() { logger.debug("\n\nDelete bucket\n"); String accessId = System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR); String secretKey = System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR); Properties overrides = new Properties(); overrides.setProperty("s3" + ".identity", accessId); overrides.setProperty("s3" + ".credential", secretKey); final Iterable<? extends Module> MODULES = ImmutableSet .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); blobStore.deleteContainer( bucketName ); }
Example #5
Source File: CloudFilesManager.java From blueflood with Apache License 2.0 | 6 votes |
public synchronized boolean hasNewFiles() { // see if there are any files since lastMarker. BlobStoreContext ctx = ContextBuilder.newBuilder(provider) .credentials(user, key) .overrides(new Properties() {{ setProperty(LocationConstants.PROPERTY_ZONE, zone); }}) .buildView(BlobStoreContext.class); BlobStore store = ctx.getBlobStore(); ListContainerOptions options = new ListContainerOptions().maxResults(batchSize).afterMarker(lastMarker); PageSet<? extends StorageMetadata> pages = store.list(container, options); log.debug("Saw {} new files since {}", pages.size() == batchSize ? "many" : Integer.toString(pages.size()), lastMarker); boolean emptiness = getBlobsWithinRange(pages).isEmpty(); if(emptiness) { log.warn("No file found within range {}", new Range(START_TIME, STOP_TIME)); } else { log.debug("New files found within range {}", new Range(START_TIME, STOP_TIME)); } return !emptiness; }
Example #6
Source File: BlobStoreManagedLedgerOffloader.java From pulsar with Apache License 2.0 | 6 votes |
@Override public CompletableFuture<Void> deleteOffloaded(long ledgerId, UUID uid, Map<String, String> offloadDriverMetadata) { String readBucket = getReadBucket(offloadDriverMetadata); BlobStore readBlobstore = getReadBlobStore(offloadDriverMetadata); CompletableFuture<Void> promise = new CompletableFuture<>(); scheduler.chooseThread(ledgerId).submit(() -> { try { readBlobstore.removeBlobs(readBucket, ImmutableList.of(dataBlockOffloadKey(ledgerId, uid), indexBlockOffloadKey(ledgerId, uid))); promise.complete(null); } catch (Throwable t) { log.error("Failed delete Blob", t); promise.completeExceptionally(t); } }); return promise; }
Example #7
Source File: BlobStoreManagedLedgerOffloader.java From pulsar with Apache License 2.0 | 6 votes |
@Override public CompletableFuture<ReadHandle> readOffloaded(long ledgerId, UUID uid, Map<String, String> offloadDriverMetadata) { String readBucket = getReadBucket(offloadDriverMetadata); BlobStore readBlobstore = getReadBlobStore(offloadDriverMetadata); CompletableFuture<ReadHandle> promise = new CompletableFuture<>(); String key = dataBlockOffloadKey(ledgerId, uid); String indexKey = indexBlockOffloadKey(ledgerId, uid); scheduler.chooseThread(ledgerId).submit(() -> { try { promise.complete(BlobStoreBackedReadHandleImpl.open(scheduler.chooseThread(ledgerId), readBlobstore, readBucket, key, indexKey, VERSION_CHECK, ledgerId, readBufferSize)); } catch (Throwable t) { log.error("Failed readOffloaded: ", t); promise.completeExceptionally(t); } }); return promise; }
Example #8
Source File: ImportResourceIT.java From usergrid with Apache License 2.0 | 6 votes |
/** * Delete the configured s3 bucket. */ public void deleteBucket() { logger.debug("\n\nDelete bucket\n"); String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR ); String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ); Properties overrides = new Properties(); overrides.setProperty("s3" + ".identity", accessId); overrides.setProperty("s3" + ".credential", secretKey); final Iterable<? extends Module> MODULES = ImmutableSet.of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides ).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); blobStore.deleteContainer(bucketName); }
Example #9
Source File: BlobStoreManagedLedgerOffloader.java From pulsar with Apache License 2.0 | 6 votes |
BlobStoreManagedLedgerOffloader(BlobStore blobStore, String container, OrderedScheduler scheduler, int maxBlockSize, int readBufferSize, Map<String, String> userMetadata) { this.offloadDriverName = "aws-s3"; this.scheduler = scheduler; this.readBufferSize = readBufferSize; this.writeBucket = container; this.writeRegion = null; this.writeEndpoint = null; this.maxBlockSize = maxBlockSize; this.writeBlobStore = blobStore; this.writeLocation = null; this.userMetadata = userMetadata; this.credentials = null; readBlobStores.put( BlobStoreLocation.of(writeRegion, writeEndpoint), blobStore ); }
Example #10
Source File: S3ProxyHandler.java From s3proxy with Apache License 2.0 | 6 votes |
private static void handleContainerDelete(HttpServletResponse response, BlobStore blobStore, String containerName) throws IOException, S3Exception { if (!blobStore.containerExists(containerName)) { throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET); } String blobStoreType = getBlobStoreType(blobStore); if (blobStoreType.equals("b2")) { // S3 allows deleting a container with in-progress MPU while B2 does // not. Explicitly cancel uploads for B2. for (MultipartUpload mpu : blobStore.listMultipartUploads( containerName)) { blobStore.abortMultipartUpload(mpu); } } if (!blobStore.deleteContainerIfEmpty(containerName)) { throw new S3Exception(S3ErrorCode.BUCKET_NOT_EMPTY); } response.setStatus(HttpServletResponse.SC_NO_CONTENT); }
Example #11
Source File: BlobStoreManagedLedgerOffloader.java From pulsar with Apache License 2.0 | 6 votes |
private static Pair<BlobStoreLocation, BlobStore> createBlobStore(String driver, String region, String endpoint, Supplier<Credentials> credentials, int maxBlockSize) { Properties overrides = new Properties(); // This property controls the number of parts being uploaded in parallel. overrides.setProperty("jclouds.mpu.parallel.degree", "1"); overrides.setProperty("jclouds.mpu.parts.size", Integer.toString(maxBlockSize)); overrides.setProperty(Constants.PROPERTY_SO_TIMEOUT, "25000"); overrides.setProperty(Constants.PROPERTY_MAX_RETRIES, Integer.toString(100)); ApiRegistry.registerApi(new S3ApiMetadata()); ProviderRegistry.registerProvider(new AWSS3ProviderMetadata()); ProviderRegistry.registerProvider(new GoogleCloudStorageProviderMetadata()); ContextBuilder contextBuilder = ContextBuilder.newBuilder(driver); contextBuilder.credentialsSupplier(credentials); if (isS3Driver(driver) && !Strings.isNullOrEmpty(endpoint)) { contextBuilder.endpoint(endpoint); overrides.setProperty(S3Constants.PROPERTY_S3_VIRTUAL_HOST_BUCKETS, "false"); } contextBuilder.overrides(overrides); BlobStoreContext context = contextBuilder.buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); log.info("Connect to blobstore : driver: {}, region: {}, endpoint: {}", driver, region, endpoint); return Pair.of( BlobStoreLocation.of(region, endpoint), blobStore); }
Example #12
Source File: BlobStoreBackedReadHandleImpl.java From pulsar with Apache License 2.0 | 6 votes |
public static ReadHandle open(ScheduledExecutorService executor, BlobStore blobStore, String bucket, String key, String indexKey, VersionCheck versionCheck, long ledgerId, int readBufferSize) throws IOException { Blob blob = blobStore.getBlob(bucket, indexKey); versionCheck.check(indexKey, blob); OffloadIndexBlockBuilder indexBuilder = OffloadIndexBlockBuilder.create(); OffloadIndexBlock index = indexBuilder.fromStream(blob.getPayload().openStream()); BackedInputStream inputStream = new BlobStoreBackedInputStreamImpl(blobStore, bucket, key, versionCheck, index.getDataObjectLength(), readBufferSize); return new BlobStoreBackedReadHandleImpl(ledgerId, index, inputStream, executor); }
Example #13
Source File: S3ProxyHandler.java From s3proxy with Apache License 2.0 | 6 votes |
private static void handleAbortMultipartUpload(HttpServletResponse response, BlobStore blobStore, String containerName, String blobName, String uploadId) throws IOException, S3Exception { if (Quirks.MULTIPART_REQUIRES_STUB.contains(getBlobStoreType( blobStore))) { if (!blobStore.blobExists(containerName, uploadId)) { throw new S3Exception(S3ErrorCode.NO_SUCH_UPLOAD); } blobStore.removeBlob(containerName, uploadId); } // TODO: how to reconstruct original mpu? MultipartUpload mpu = MultipartUpload.create(containerName, blobName, uploadId, createFakeBlobMetadata(blobStore), new PutOptions()); blobStore.abortMultipartUpload(mpu); response.sendError(HttpServletResponse.SC_NO_CONTENT); }
Example #14
Source File: ObjectStoreFileStorageTest.java From multiapps-controller with Apache License 2.0 | 6 votes |
private String addBlobWithNoMetadata() throws Exception { BlobStore blobStore = blobStoreContext.getBlobStore(); Path path = Paths.get(TEST_FILE_LOCATION); long fileSize = FileUtils.sizeOf(path.toFile()); String id = UUID.randomUUID() .toString(); Blob blob = blobStore.blobBuilder(id) .payload(new FileInputStream(path.toFile())) .contentDisposition(path.getFileName() .toString()) .contentType(MediaType.OCTET_STREAM.toString()) .contentLength(fileSize) .build(); blobStore.putBlob(CONTAINER, blob); return id; }
Example #15
Source File: ImportServiceIT.java From usergrid with Apache License 2.0 | 6 votes |
public void deleteBucket() { String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR ); String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ); Properties overrides = new Properties(); overrides.setProperty( "s3" + ".identity", accessId ); overrides.setProperty( "s3" + ".credential", secretKey ); Blob bo = null; BlobStore blobStore = null; final Iterable<? extends Module> MODULES = ImmutableSet .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials( accessId, secretKey ).modules( MODULES ) .overrides( overrides ).buildView( BlobStoreContext.class ); blobStore = context.getBlobStore(); blobStore.deleteContainer( bucketName ); }
Example #16
Source File: S3ProxyHandler.java From s3proxy with Apache License 2.0 | 5 votes |
private static void handleBucketPolicy(BlobStore blobStore, String containerName) throws S3Exception { if (!blobStore.containerExists(containerName)) { throw new S3Exception(S3ErrorCode.NO_SUCH_BUCKET); } throw new S3Exception(S3ErrorCode.NO_SUCH_POLICY); }
Example #17
Source File: EventualBlobStoreTest.java From s3proxy with Apache License 2.0 | 5 votes |
private static Blob makeBlob(BlobStore blobStore, String blobName) throws IOException { return blobStore.blobBuilder(blobName) .payload(BYTE_SOURCE) .contentDisposition("attachment; filename=foo.mp4") .contentEncoding("compress") .contentLength(BYTE_SOURCE.size()) .contentType(MediaType.MP4_AUDIO) .contentMD5(BYTE_SOURCE.hash(TestUtils.MD5)) .userMetadata(ImmutableMap.of("key", "value")) .build(); }
Example #18
Source File: BlobStoreFileSystemHandler.java From sakai with Educational Community License v2.0 | 5 votes |
/** * {@inheritDoc} */ @Override public long saveInputStream(String id, String root, String filePath, InputStream stream) throws IOException { if(stream == null){ return 0L; } ContainerAndName can = getContainerAndName(id, root, filePath); createContainerIfNotExist(can.container); InputStream in = markableInputStream(stream); long size = markableStreamLength(in); Payload payload = Payloads.newInputStreamPayload(in); try { BlobStore store = getBlobStore(); String asciiID = Base64.encodeBase64String(id.getBytes("UTF8")); Blob blob = store.blobBuilder(can.name) .payload(payload) .contentLength(size) .userMetadata(ImmutableMap.of("id", asciiID, "path", filePath)) .build(); store.putBlob(can.container, blob); } finally { payload.release(); Closeables.close(stream, true); Closeables.close(in, true); } return size; }
Example #19
Source File: CloudFilesManager.java From blueflood with Apache License 2.0 | 5 votes |
public synchronized void downloadNewFiles(File downloadDir) { log.info("Downloading new files since {}", lastMarker); BlobStoreContext ctx = ContextBuilder.newBuilder(provider) .credentials(user, key) .overrides(new Properties() {{ setProperty(LocationConstants.PROPERTY_ZONE, zone); }}) .buildView(BlobStoreContext.class); // threadsafe according to https://jclouds.apache.org/documentation/userguide/blobstore-guide/ BlobStore store = ctx.getBlobStore(); ListContainerOptions options = new ListContainerOptions().maxResults(batchSize).afterMarker(lastMarker); PageSet<? extends StorageMetadata> pages = store.list(container, options); //Gets key within the time range specified NavigableMap<Long, String> mapWithinRange = getBlobsWithinRange(pages); //Download only for keys within that range for(Map.Entry<Long, String> blobMeta : mapWithinRange.entrySet()) { log.info("Downloading file: " + blobMeta.getValue()); downloadWorkers.submit(new BlobDownload(downloadDir, store, container, blobMeta.getValue())); lastMarker = blobMeta.getValue(); synchronized (CloudFilesManager.this) { // this is where we resume from. MarkerUtils.writeLastMarker(blobMeta.getValue()); } } log.info("Updated the last marker value as " + lastMarker); }
Example #20
Source File: ImportCollectionIT.java From usergrid with Apache License 2.0 | 5 votes |
private static void deleteBucketsWithPrefix() { logger.debug("\n\nDelete buckets with prefix {}\n", bucketPrefix ); String accessId = System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR); String secretKey = System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR); Properties overrides = new Properties(); overrides.setProperty("s3" + ".identity", accessId); overrides.setProperty("s3" + ".credential", secretKey); final Iterable<? extends Module> MODULES = ImmutableSet .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); final PageSet<? extends StorageMetadata> blobStoreList = blobStore.list(); for ( Object o : blobStoreList.toArray() ) { StorageMetadata s = (StorageMetadata)o; if ( s.getName().startsWith( bucketPrefix )) { try { blobStore.deleteContainer(s.getName()); } catch ( ContainerNotFoundException cnfe ) { logger.warn("Attempted to delete bucket {} but it is already deleted", cnfe ); } logger.debug("Deleted bucket {}", s.getName()); } } }
Example #21
Source File: JCloudsBlobStoreIntegrationTest.java From wildfly-camel with Apache License 2.0 | 5 votes |
private BlobStore getBlobStore() { BlobStore blobStore = ContextBuilder.newBuilder("transient") .credentials("id", "credential") .buildView(BlobStoreContext.class) .getBlobStore(); blobStore.createContainerInLocation(null, CONTAINER_NAME); blobStore.createContainerInLocation(null, CONTAINER_NAME_WITH_DIR); return blobStore; }
Example #22
Source File: MainApp.java From jclouds-examples with Apache License 2.0 | 5 votes |
private static void putAndRetrieveBlobExample(BlobStore blobstore) throws IOException { // Create a container String containerName = "jclouds_putAndRetrieveBlobExample_" + UUID.randomUUID().toString(); blobstore.createContainerInLocation(null, containerName); // Create a vault // Create a blob ByteSource payload = ByteSource.wrap("data".getBytes(Charsets.UTF_8)); Blob blob = blobstore.blobBuilder("ignored") // The blob name is ignored in Glacier .payload(payload) .contentLength(payload.size()) .build(); // Put the blob in the container String blobId = blobstore.putBlob(containerName, blob); // Retrieve the blob Blob result = blobstore.getBlob(containerName, blobId); // Print the result InputStream is = result.getPayload().openStream(); try { String data = CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8)); System.out.println("The retrieved payload is: " + data); } finally { is.close(); } }
Example #23
Source File: SwiftTempAuthObjectStorage.java From james-project with Apache License 2.0 | 5 votes |
public BlobStore get() { RegionScopedBlobStoreContext blobStoreContext = contextBuilder() .endpoint(testConfig.getEndpoint().toString()) .credentials(testConfig.getIdentity().asString(), testConfig.getCredentials().value()) .overrides(testConfig.getOverrides()) .modules(JCLOUDS_MODULES) .buildView(RegionScopedBlobStoreContext.class); return testConfig.getRegion() .map(Region::value) .map(blobStoreContext::getBlobStore) .orElseGet(blobStoreContext::getBlobStore); }
Example #24
Source File: CloudExplorerSupport.java From brooklyn-server with Apache License 2.0 | 5 votes |
@Override protected void doCall(BlobStore blobStore, String indent) throws Exception { Blob content = blobStore.getBlob(container, blob); stdout.println(indent+"Blob "+container+" : " +blob +" {"); stdout.println(indent+"\tHeaders {"); for (Map.Entry<String, String> entry : content.getAllHeaders().entries()) { stdout.println(indent+"\t\t"+entry.getKey() + " = " + entry.getValue()); } stdout.println(indent+"\t}"); stdout.println(indent+"\tmetadata : "+content.getMetadata()); stdout.println(indent+"\tpayload : "+ Streams.readFullyStringAndClose(content.getPayload().openStream())); stdout.println(indent+"}"); }
Example #25
Source File: CloudExplorerSupport.java From brooklyn-server with Apache License 2.0 | 5 votes |
@Override protected void doCall(BlobStore blobStore, String indent) throws Exception { for (String containerName : names) { Set<? extends StorageMetadata> contents = blobStore.list(containerName); stdout.println(indent+"Container "+containerName+" {"); for (StorageMetadata content : contents) { stdout.println(indent+"\t"+content); } stdout.println(indent+"}"); } }
Example #26
Source File: CloudExplorerSupport.java From brooklyn-server with Apache License 2.0 | 5 votes |
@Override protected void doCall(BlobStore blobstore, String indent) throws Exception { Set<? extends StorageMetadata> containers = blobstore.list(); stdout.println(indent+"Containers {"); for (StorageMetadata container : containers) { stdout.println(indent+"\t"+container); } stdout.println(indent+"}"); }
Example #27
Source File: JcloudsExpect100ContinueTest.java From brooklyn-server with Apache License 2.0 | 5 votes |
private Blob get(String name) { try { BlobStore blobStore = context.getBlobStore(); return blobStore.getBlob(containerName, name); } catch (Exception e) { LOG.error("GET " + name + " failed", e); return null; } }
Example #28
Source File: JcloudsExpect100ContinueTest.java From brooklyn-server with Apache License 2.0 | 5 votes |
private void put(String name, String content) { BlobStore blobStore = context.getBlobStore(); byte[] bytes = content.getBytes(Charsets.UTF_8); Blob blob = blobStore.blobBuilder(name) .payload(ByteSource.wrap(bytes)) .contentLength(bytes.length) .build(); try { blobStore.putBlob(containerName, blob); } catch (Exception e) { LOG.error("PUT " + name + " failed", e); } }
Example #29
Source File: SwiftKeystone2ObjectStorage.java From james-project with Apache License 2.0 | 5 votes |
@Override public BlobStore get() { RegionScopedBlobStoreContext blobStoreContext = contextBuilder() .endpoint(testConfig.getEndpoint().toString()) .credentials(testConfig.getIdentity().asString(), testConfig.getCredentials().value()) .overrides(testConfig.getOverrides()) .modules(JCLOUDS_MODULES) .buildView(RegionScopedBlobStoreContext.class); return testConfig.getRegion() .map(Region::value) .map(blobStoreContext::getBlobStore) .orElseGet(blobStoreContext::getBlobStore); }
Example #30
Source File: CloudExplorerSupport.java From brooklyn-server with Apache License 2.0 | 5 votes |
@Override protected void doCall(JcloudsLocation loc, String indent) throws Exception { BlobStoreContext context = BlobStoreContextFactoryImpl.INSTANCE.newBlobStoreContext(loc); try { org.jclouds.blobstore.BlobStore blobStore = context.getBlobStore(); doCall(blobStore, indent); } finally { context.close(); } }