com.google.api.services.storage.Storage Java Examples
The following examples show how to use
com.google.api.services.storage.Storage.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GoogleCloudStorageReadChannelTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void metadataInitialization_eager() throws IOException { StorageObject object = newStorageObject(BUCKET_NAME, OBJECT_NAME); MockHttpTransport transport = mockTransport(jsonDataResponse(object)); List<HttpRequest> requests = new ArrayList<>(); Storage storage = new Storage(transport, JSON_FACTORY, requests::add); GoogleCloudStorageReadOptions options = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(true).build(); GoogleCloudStorageReadChannel readChannel = createReadChannel(storage, options); assertThat(requests).hasSize(1); assertThat(readChannel.size()).isEqualTo(object.getSize().longValue()); assertThat(requests).hasSize(1); }
Example #2
Source File: GoogleApiDebugOptionsTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testMatchingAllCalls() throws Exception { String[] args = new String[] {STORAGE_TRACE}; GcsOptions options = PipelineOptionsFactory.fromArgs(args).as(GcsOptions.class); options.setGcpCredential(new TestCredential()); assertNotNull(options.getGoogleApiTrace()); Storage.Objects.Get getRequest = Transport.newStorageClient(options).build().objects().get("testBucketId", "testObjectId"); assertEquals("TraceDestination", getRequest.get("$trace")); Storage.Objects.List listRequest = Transport.newStorageClient(options).build().objects().list("testProjectId"); assertEquals("TraceDestination", listRequest.get("$trace")); }
Example #3
Source File: GcsUtil.java From beam with Apache License 2.0 | 6 votes |
/** Lists {@link Objects} given the {@code bucket}, {@code prefix}, {@code pageToken}. */ public Objects listObjects(String bucket, String prefix, @Nullable String pageToken) throws IOException { // List all objects that start with the prefix (including objects in sub-directories). Storage.Objects.List listObject = storageClient.objects().list(bucket); listObject.setMaxResults(MAX_LIST_ITEMS_PER_CALL); listObject.setPrefix(prefix); if (pageToken != null) { listObject.setPageToken(pageToken); } try { return ResilientOperation.retry( ResilientOperation.getGoogleRequestCallable(listObject), createBackOff(), RetryDeterminer.SOCKET_ERRORS, IOException.class); } catch (Exception e) { throw new IOException( String.format("Unable to match files in bucket %s, prefix %s.", bucket, prefix), e); } }
Example #4
Source File: GoogleApiDebugOptionsTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testMatchingAgainstClient() throws Exception { GcsOptions options = PipelineOptionsFactory.as(GcsOptions.class); options.setGcpCredential(new TestCredential()); options.setGoogleApiTrace( new GoogleApiTracer() .addTraceFor(Transport.newStorageClient(options).build(), "TraceDestination")); Storage.Objects.Get getRequest = Transport.newStorageClient(options).build().objects().get("testBucketId", "testObjectId"); assertEquals("TraceDestination", getRequest.get("$trace")); Delete deleteRequest = GcpOptions.GcpTempLocationFactory.newCloudResourceManagerClient( options.as(CloudResourceManagerOptions.class)) .build() .projects() .delete("testProjectId"); assertNull(deleteRequest.get("$trace")); }
Example #5
Source File: GcsUtil.java From beam with Apache License 2.0 | 6 votes |
private GcsUtil( Storage storageClient, HttpRequestInitializer httpRequestInitializer, ExecutorService executorService, Boolean shouldUseGrpc, @Nullable Integer uploadBufferSizeBytes) { this.storageClient = storageClient; this.httpRequestInitializer = httpRequestInitializer; this.uploadBufferSizeBytes = uploadBufferSizeBytes; this.executorService = executorService; this.maxBytesRewrittenPerCall = null; this.numRewriteTokensUsed = null; this.shouldUseGrpc = shouldUseGrpc; googleCloudStorageOptions = GoogleCloudStorageOptions.newBuilder() .setAppName("Beam") .setGrpcEnabled(shouldUseGrpc) .build(); googleCloudStorage = new GoogleCloudStorageImpl(googleCloudStorageOptions, storageClient); }
Example #6
Source File: GoogleCloudStorageReadChannelTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void eagerReadFileAtSpecificGeneration_succeeds_whenReadConsistencyStrict() throws IOException { long generation = 5L; MockHttpTransport transport = mockTransport( jsonDataResponse(newStorageObject(BUCKET_NAME, OBJECT_NAME).setGeneration(generation))); List<HttpRequest> requests = new ArrayList<>(); Storage storage = new Storage(transport, JSON_FACTORY, requests::add); GoogleCloudStorageReadOptions options = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(true).build(); GoogleCloudStorageReadChannel readChannel = createReadChannel(storage, options, generation); assertThat(readChannel.generation()).isEqualTo(generation); }
Example #7
Source File: GoogleStorageCacheManager.java From simpleci with MIT License | 6 votes |
@Override public void uploadCache(JobOutputProcessor outputProcessor, String cachePath) { try { outputProcessor.output("Uploading cache file " + cacheFileName + " to google storage\n"); Storage client = createClient(); File uploadFile = new File(cachePath); InputStreamContent contentStream = new InputStreamContent( null, new FileInputStream(uploadFile)); contentStream.setLength(uploadFile.length()); StorageObject objectMetadata = new StorageObject() .setName(cacheFileName); Storage.Objects.Insert insertRequest = client.objects().insert( settings.bucketName, objectMetadata, contentStream); insertRequest.execute(); outputProcessor.output("Cache uploaded\n"); } catch (GeneralSecurityException | IOException e) { outputProcessor.output("Error upload cache: " + e.getMessage() + "\n"); } }
Example #8
Source File: GcsUtilTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testBucketAccessible() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Get mockStorageGet = Mockito.mock(Storage.Buckets.Get.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.get("testbucket")).thenReturn(mockStorageGet); when(mockStorageGet.execute()) .thenThrow(new SocketTimeoutException("SocketException")) .thenReturn(new Bucket()); assertTrue( gcsUtil.bucketAccessible( GcsPath.fromComponents("testbucket", "testobject"), mockBackOff, new FastNanoClockAndSleeper())); }
Example #9
Source File: GoogleCloudStorageReadChannel.java From hadoop-connectors with Apache License 2.0 | 6 votes |
/** * Constructs an instance of GoogleCloudStorageReadChannel. * * @param gcs storage object instance * @param bucketName name of the bucket containing the object to read * @param objectName name of the object to read * @param requestHelper a ClientRequestHelper used to set any extra headers * @param readOptions fine-grained options specifying things like retry settings, buffering, etc. * Could not be null. * @throws IOException on IO error */ public GoogleCloudStorageReadChannel( Storage gcs, String bucketName, String objectName, ApiErrorExtractor errorExtractor, ClientRequestHelper<StorageObject> requestHelper, @Nonnull GoogleCloudStorageReadOptions readOptions) throws IOException { this( gcs, new StorageResourceId(bucketName, objectName), errorExtractor, requestHelper, readOptions); }
Example #10
Source File: RetryHttpRequestInitializerTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testBasicOperation() throws IOException { when(mockLowLevelRequest.execute()).thenReturn(mockLowLevelResponse); when(mockLowLevelResponse.getStatusCode()).thenReturn(200); Storage.Buckets.Get result = storage.buckets().get("test"); HttpResponse response = result.executeUnparsed(); assertNotNull(response); verify(mockHttpResponseInterceptor).interceptResponse(any(HttpResponse.class)); verify(mockLowLevelRequest, atLeastOnce()).addHeader(anyString(), anyString()); verify(mockLowLevelRequest).setTimeout(anyInt(), anyInt()); verify(mockLowLevelRequest).setWriteTimeout(anyInt()); verify(mockLowLevelRequest).execute(); verify(mockLowLevelResponse).getStatusCode(); expectedLogs.verifyNotLogged("Request failed"); }
Example #11
Source File: StorageHandler.java From tech-gallery with Apache License 2.0 | 6 votes |
/** * Method to create the service or get the service if is already created. * * @author <a href="mailto:[email protected]"> João Felipe de Medeiros Moreira </a> * @since 13/10/2015 * * @return the Storage service already created. * * @throws IOException in case a IO problem. * @throws GeneralSecurityException in case a security problem. */ private static Storage getService() throws IOException, GeneralSecurityException { logger.finest("###### Getting the storage service"); if (null == storageService) { HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport(); GoogleCredential credential = GoogleCredential.getApplicationDefault(); // Depending on the environment that provides the default credentials (e.g. Compute Engine, // App Engine), the credentials may require us to specify the scopes we need explicitly. // Check for this case, and inject the Cloud Storage scope if required. if (credential.createScopedRequired()) { credential = credential.createScoped(StorageScopes.all()); } storageService = new Storage.Builder(httpTransport, JSON_FACTORY, credential) .setApplicationName(APPLICATION_NAME).build(); } return storageService; }
Example #12
Source File: RetryHttpRequestInitializerTest.java From beam with Apache License 2.0 | 6 votes |
/** Tests that a non-retriable error is not retried. */ @Test public void testErrorCodeForbidden() throws IOException { when(mockLowLevelRequest.execute()).thenReturn(mockLowLevelResponse); when(mockLowLevelResponse.getStatusCode()) .thenReturn(403) // Non-retryable error. .thenReturn(200); // Shouldn't happen. try { Storage.Buckets.Get result = storage.buckets().get("test"); HttpResponse response = result.executeUnparsed(); assertNotNull(response); } catch (HttpResponseException e) { assertThat(e.getMessage(), Matchers.containsString("403")); } verify(mockHttpResponseInterceptor).interceptResponse(any(HttpResponse.class)); verify(mockLowLevelRequest, atLeastOnce()).addHeader(anyString(), anyString()); verify(mockLowLevelRequest).setTimeout(anyInt(), anyInt()); verify(mockLowLevelRequest).setWriteTimeout(anyInt()); verify(mockLowLevelRequest).execute(); verify(mockLowLevelResponse).getStatusCode(); expectedLogs.verifyWarn("Request failed with code 403"); }
Example #13
Source File: GoogleCloudStorageReadChannel.java From hadoop-connectors with Apache License 2.0 | 6 votes |
/** * Constructs an instance of GoogleCloudStorageReadChannel. * * @param gcs storage object instance * @param resourceId contains information about a specific resource * @param requestHelper a ClientRequestHelper used to set any extra headers * @param readOptions fine-grained options specifying things like retry settings, buffering, etc. * Could not be null. * @throws IOException on IO error */ public GoogleCloudStorageReadChannel( Storage gcs, StorageResourceId resourceId, ApiErrorExtractor errorExtractor, ClientRequestHelper<StorageObject> requestHelper, @Nonnull GoogleCloudStorageReadOptions readOptions) throws IOException { this.gcs = gcs; this.clientRequestHelper = requestHelper; this.errorExtractor = errorExtractor; this.readOptions = readOptions; this.resourceId = resourceId; // Initialize metadata if available. GoogleCloudStorageItemInfo info = getInitialMetadata(); if (info != null) { initMetadata(info); } }
Example #14
Source File: BatchHelperTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void newBatchHelper_throwsException_whenTotalRequestsZero() { Storage storage = new Storage(HTTP_TRANSPORT, JSON_FACTORY, httpRequestInitializer); IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> batchFactory.newBatchHelper( httpRequestInitializer, storage, /* maxRequestsPerBatch= */ 1, /* totalRequests= */ 0, /* maxThreads= */ 1)); assertThat(e).hasMessageThat().startsWith("totalRequests should be greater than 0"); }
Example #15
Source File: GcsUtilTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testMakeRewriteOps() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertNull(request.getMaxBytesRewrittenPerCall()); assertEquals("bucket", request.getSourceBucket()); assertEquals("s0", request.getSourceObject()); assertEquals("bucket", request.getDestinationBucket()); assertEquals("d0", request.getDestinationObject()); }
Example #16
Source File: GoogleApiDebugOptionsTest.java From beam with Apache License 2.0 | 6 votes |
@Test public void testWithMultipleTraces() throws Exception { String[] args = new String[] {STORAGE_GET_AND_LIST_TRACE}; GcsOptions options = PipelineOptionsFactory.fromArgs(args).as(GcsOptions.class); options.setGcpCredential(new TestCredential()); assertNotNull(options.getGoogleApiTrace()); Storage.Objects.Get getRequest = Transport.newStorageClient(options).build().objects().get("testBucketId", "testObjectId"); assertEquals("GetTraceDestination", getRequest.get("$trace")); Storage.Objects.List listRequest = Transport.newStorageClient(options).build().objects().list("testProjectId"); assertEquals("ListTraceDestination", listRequest.get("$trace")); }
Example #17
Source File: GoogleCloudStorageReadChannelTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void initMetadata_throwsException_whenReadConsistencyEnabledAndGenerationIsNull() throws IOException { Storage storage = new Storage(HTTP_TRANSPORT, JSON_FACTORY, r -> {}); GoogleCloudStorageReadOptions options = newLazyReadOptionsBuilder().build(); GoogleCloudStorageReadChannel readChannel = createReadChannel(storage, options); IllegalStateException e = assertThrows( IllegalStateException.class, () -> readChannel.initMetadata("gzip", /* sizeFromMetadata= */ 1, UNKNOWN_GENERATION_ID)); assertThat(e).hasMessageThat().contains("Generation parameter of -1 is invalid"); }
Example #18
Source File: GoogleCloudStorageReadChannelTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void lazyInitGeneration_succeeds_whenReadConsistencyStrict() throws IOException { MockHttpTransport transport = mockTransport( jsonDataResponse(newStorageObject(BUCKET_NAME, OBJECT_NAME).setGeneration(5L))); List<HttpRequest> requests = new ArrayList<>(); Storage storage = new Storage(transport, JSON_FACTORY, requests::add); GoogleCloudStorageReadOptions options = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(false).build(); GoogleCloudStorageReadChannel readChannel = createReadChannel(storage, options); // initialize metadata readChannel.size(); assertThat(readChannel.generation()).isEqualTo(5L); }
Example #19
Source File: GoogleCloudStorageReadChannelTest.java From hadoop-connectors with Apache License 2.0 | 6 votes |
@Test public void initGeneration_hasGenerationId() throws IOException { StorageObject storageObject = newStorageObject(BUCKET_NAME, OBJECT_NAME); MockHttpTransport transport = mockTransport(jsonDataResponse(storageObject)); List<HttpRequest> requests = new ArrayList<>(); Storage storage = new Storage(transport, JSON_FACTORY, requests::add); GoogleCloudStorageReadOptions options = GoogleCloudStorageReadOptions.builder().setFastFailOnNotFound(false).build(); GoogleCloudStorageReadChannel readChannel = createReadChannel(storage, options); // initialize metadata readChannel.size(); assertThat(readChannel.generation()).isEqualTo(storageObject.getGeneration()); }
Example #20
Source File: GoogleStorage.java From halyard with Apache License 2.0 | 6 votes |
private static Bucket createBucket( Storage storage, String projectId, String locationId, String bucketId) { try { Bucket bucket = new Bucket() .setLocation(locationId) .setName(bucketId) .setVersioning(new Bucket.Versioning().setEnabled(true)); if (!StringUtils.isEmpty(locationId)) { bucket.setLocation(locationId); } return storage.buckets().insert(projectId, bucket).execute(); } catch (IOException e) { throw new RuntimeException("Unable to create bucket", e); } }
Example #21
Source File: GcsSecretEngine.java From kork with Apache License 2.0 | 6 votes |
@Override protected InputStream downloadRemoteFile(EncryptedSecret encryptedSecret) { String bucket = encryptedSecret.getParams().get(STORAGE_BUCKET); String objName = encryptedSecret.getParams().get(STORAGE_FILE_URI); log.info("Getting contents of object {} from bucket {}", objName, bucket); try { Storage storage = getStorage(); return storage.objects().get(bucket, objName).executeMediaAsInputStream(); } catch (IOException e) { throw new SecretException( String.format( "Error reading contents of GCS. Bucket: %s, Object: %s.\nError: %s", bucket, objName, e.toString())); } }
Example #22
Source File: StorageFactory.java From dlp-dataflow-deidentification with Apache License 2.0 | 6 votes |
public static InputStream downloadObject( Storage storage, String bucketName, String objectName, String base64CseKey, String base64CseKeyHash) throws Exception { // Set the CSEK headers final HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.set("x-goog-encryption-algorithm", "AES256"); httpHeaders.set("x-goog-encryption-key", base64CseKey); httpHeaders.set("x-goog-encryption-key-sha256", base64CseKeyHash); Storage.Objects.Get getObject = storage.objects().get(bucketName, objectName); getObject.setRequestHeaders(httpHeaders); try { return getObject.executeMediaAsInputStream(); } catch (GoogleJsonResponseException e) { LOG.info("Error downloading: " + e.getContent()); System.exit(1); return null; } }
Example #23
Source File: GcsUtilTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testMakeRewriteOpsWithOptions() throws IOException { GcsOptions gcsOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = gcsOptions.getGcsUtil(); gcsUtil.maxBytesRewrittenPerCall = 1337L; LinkedList<RewriteOp> rewrites = gcsUtil.makeRewriteOps(makeStrings("s", 1), makeStrings("d", 1)); assertEquals(1, rewrites.size()); RewriteOp rewrite = rewrites.pop(); assertTrue(rewrite.getReadyToEnqueue()); Storage.Objects.Rewrite request = rewrite.rewriteRequest; assertEquals(Long.valueOf(1337L), request.getMaxBytesRewrittenPerCall()); }
Example #24
Source File: GoogleStorage.java From halyard with Apache License 2.0 | 5 votes |
private Storage buildCredentials(GoogleSecureStorageProperties properties) { HttpTransport transport = new NetHttpTransport(); JsonFactory jsonFactory = new JacksonFactory(); GoogleCredentials credentials; try { credentials = loadStorageCredential(properties.getJsonPath()); } catch (IOException e) { throw new RuntimeException("Unable to load KMS credentials: " + e.getMessage(), e); } return new Storage.Builder(transport, jsonFactory, new HttpCredentialsAdapter(credentials)) .setApplicationName("halyard") .build(); }
Example #25
Source File: GoogleCloudStorageImpl.java From hadoop-connectors with Apache License 2.0 | 5 votes |
@Override public ListPage<GoogleCloudStorageItemInfo> listObjectInfoPage( String bucketName, String objectNamePrefix, String delimiter, String pageToken) throws IOException { logger.atFine().log( "listObjectInfoPage(%s, %s, %s, %s)", bucketName, objectNamePrefix, delimiter, pageToken); Storage.Objects.List listObject = createListRequest( bucketName, objectNamePrefix, delimiter, /* includeTrailingDelimiter= */ true, MAX_RESULTS_UNLIMITED); if (pageToken != null) { logger.atFine().log("listObjectInfoPage: next page %s", pageToken); listObject.setPageToken(pageToken); } // Helper will handle going through pages of list results and accumulating them. List<StorageObject> listedObjects = new ArrayList<>(); List<String> listedPrefixes = new ArrayList<>(); String nextPageToken = listStorageObjectsAndPrefixesPage( listObject, MAX_RESULTS_UNLIMITED, listedObjects, listedPrefixes); // For the listedObjects, we simply parse each item into a GoogleCloudStorageItemInfo without // further work. List<GoogleCloudStorageItemInfo> objectInfos = new ArrayList<>(listedObjects.size()); for (StorageObject obj : listedObjects) { objectInfos.add( createItemInfoForStorageObject(new StorageResourceId(bucketName, obj.getName()), obj)); } if (!listedPrefixes.isEmpty()) { handlePrefixes(bucketName, listedPrefixes, objectInfos); } return new ListPage<>(objectInfos, nextPageToken); }
Example #26
Source File: GcsUtilTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testCreateBucketAccessErrors() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Buckets mockStorageObjects = Mockito.mock(Storage.Buckets.class); Storage.Buckets.Insert mockStorageInsert = Mockito.mock(Storage.Buckets.Insert.class); BackOff mockBackOff = BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff()); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.buckets()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert(any(String.class), any(Bucket.class))) .thenReturn(mockStorageInsert); when(mockStorageInsert.execute()).thenThrow(expectedException); thrown.expect(AccessDeniedException.class); gcsUtil.createBucket("a", new Bucket(), mockBackOff, new FastNanoClockAndSleeper()); }
Example #27
Source File: GoogleCloudStorageImpl.java From hadoop-connectors with Apache License 2.0 | 5 votes |
/** * Helper for both listObjectNames and listObjectInfo that executes the actual API calls to get * paginated lists, accumulating the StorageObjects and String prefixes into the params {@code * listedObjects} and {@code listedPrefixes}. * * @param bucketName bucket name * @param objectNamePrefix object name prefix or null if all objects in the bucket are desired * @param delimiter delimiter to use (typically "/"), otherwise null * @param includeTrailingDelimiter whether to include prefix objects into the {@code * listedObjects} * @param maxResults maximum number of results to return (total of both {@code listedObjects} and * {@code listedPrefixes}), unlimited if negative or zero * @param listedObjects output parameter into which retrieved StorageObjects will be added * @param listedPrefixes output parameter into which retrieved prefixes will be added */ private void listStorageObjectsAndPrefixes( String bucketName, String objectNamePrefix, String delimiter, boolean includeTrailingDelimiter, long maxResults, List<StorageObject> listedObjects, List<String> listedPrefixes) throws IOException { logger.atFine().log( "listStorageObjectsAndPrefixes(%s, %s, %s, %s, %d)", bucketName, objectNamePrefix, delimiter, includeTrailingDelimiter, maxResults); checkArgument( listedObjects != null && listedObjects.isEmpty(), "Must provide a non-null empty container for listedObjects."); checkArgument( listedPrefixes != null && listedPrefixes.isEmpty(), "Must provide a non-null empty container for listedPrefixes."); Storage.Objects.List listObject = createListRequest( bucketName, objectNamePrefix, delimiter, includeTrailingDelimiter, maxResults); String pageToken = null; do { if (pageToken != null) { logger.atFine().log("listStorageObjectsAndPrefixes: next page %s", pageToken); listObject.setPageToken(pageToken); } pageToken = listStorageObjectsAndPrefixesPage(listObject, maxResults, listedObjects, listedPrefixes); } while (pageToken != null && getMaxRemainingResults(maxResults, listedPrefixes, listedObjects) > 0); }
Example #28
Source File: GCSHelper.java From dataflow-java with Apache License 2.0 | 5 votes |
/** * @param name of the file we're interested in * @return size of the file, in bytes * @throws IOException */ public long getFileSize(String bucket, String name) throws IOException { Storage.Objects.Get getObject = storage.objects().get(bucket, name); StorageObject object = getObject.execute(); BigInteger size = object.getSize(); if (size.compareTo(BigInteger.valueOf(Long.MAX_VALUE)) > 0) { throw new RuntimeException("File size is too big for a long!"); } return size.longValue(); }
Example #29
Source File: GcsUtilTest.java From beam with Apache License 2.0 | 5 votes |
@Test public void testAccessDeniedObjectThrowsIOException() throws IOException { GcsOptions pipelineOptions = gcsOptionsWithTestCredential(); GcsUtil gcsUtil = pipelineOptions.getGcsUtil(); Storage mockStorage = Mockito.mock(Storage.class); gcsUtil.setStorageClient(mockStorage); Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class); Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class); GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/accessdeniedfile"); GoogleJsonResponseException expectedException = googleJsonResponseException( HttpStatusCodes.STATUS_CODE_FORBIDDEN, "Waves hand mysteriously", "These aren't the buckets you're looking for"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.get(pattern.getBucket(), pattern.getObject())) .thenReturn(mockStorageGet); when(mockStorageGet.execute()).thenThrow(expectedException); thrown.expect(IOException.class); thrown.expectMessage("Unable to get the file object for path"); gcsUtil.expand(pattern); }
Example #30
Source File: GcsStorageService.java From front50 with Apache License 2.0 | 5 votes |
private void purgeOldVersions(String path) throws Exception { Storage.Objects.List listObjects = obj_api.list(bucketName).setPrefix(path).setVersions(true); com.google.api.services.storage.model.Objects objects; // Keep the 0th object on the first page (which is current). List<Long> generations = new ArrayList(32); do { objects = timeExecute(listTimer, listObjects); List<StorageObject> items = objects.getItems(); if (items != null) { int n = items.size(); while (--n >= 0) { generations.add(items.get(n).getGeneration()); } } listObjects.setPageToken(objects.getNextPageToken()); } while (objects.getNextPageToken() != null); for (long generation : generations) { if (generation == generations.get(0)) { continue; } log.debug("Remove {} generation {}", value("path", path), value("generation", generation)); timeExecute(purgeTimer, obj_api.delete(bucketName, path).setGeneration(generation)); } }