Java Code Examples for com.google.api.gax.paging.Page#iterateAll()
The following examples show how to use
com.google.api.gax.paging.Page#iterateAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GCPRestorer.java From cassandra-backup with Apache License 2.0 | 6 votes |
@Override public Path downloadFileToDir(final Path destinationDir, final Path remotePrefix, final Predicate<String> keyFilter) throws Exception { final Page<Blob> blobs = list(request.storageLocation.bucket, remotePrefix.toString()); final List<Blob> blobItems = new ArrayList<>(); for (final Blob blob : blobs.iterateAll()) { if (keyFilter.test(blob.getName())) { blobItems.add(blob); } } if (blobItems.size() != 1) { throw new IllegalStateException(format("There is not one key which satisfies key filter: %s", blobItems.toString())); } final String blobItemPath = blobItems.get(0).getName(); final String fileName = blobItemPath.split("/")[blobItemPath.split("/").length - 1]; final Path destination = destinationDir.resolve(fileName); downloadFile(destination, objectKeyToRemoteReference(remotePrefix.resolve(fileName))); return destination; }
Example 2
Source File: ITStorageHmacKeySnippets.java From google-cloud-java with Apache License 2.0 | 6 votes |
@Test public void testListHmacKeys() { // Create 2 HMAC keys storage.createHmacKey( ServiceAccount.of(HMAC_KEY_TEST_SERVICE_ACCOUNT), Storage.CreateHmacKeyOption.projectId(PROJECT_ID)); storage.createHmacKey( ServiceAccount.of(HMAC_KEY_TEST_SERVICE_ACCOUNT), Storage.CreateHmacKeyOption.projectId(PROJECT_ID)); Page<HmacKeyMetadata> page = storageSnippets.listHmacKeys(PROJECT_ID); int count = 0; for (HmacKeyMetadata metadata : page.iterateAll()) { if (metadata.getServiceAccount().getEmail().equals(HMAC_KEY_TEST_SERVICE_ACCOUNT)) { count++; } } assertEquals(2, count); }
Example 3
Source File: CreateAndListMetrics.java From google-cloud-java with Apache License 2.0 | 6 votes |
public static void main(String... args) throws Exception { // Create a service object // Credentials are inferred from the environment try (Logging logging = LoggingOptions.getDefaultInstance().getService()) { // Create a metric MetricInfo metricInfo = MetricInfo.newBuilder("test-metric", "severity >= ERROR") .setDescription("Log entries with severity higher or equal to ERROR") .build(); logging.create(metricInfo); // List metrics Page<Metric> metrics = logging.listMetrics(); for (Metric metric : metrics.iterateAll()) { System.out.println(metric); } } }
Example 4
Source File: DetectIT.java From java-docs-samples with Apache License 2.0 | 6 votes |
@Test public void testAsyncBatchAnnotateImagesGcs() throws Exception { // Act AsyncBatchAnnotateImagesGcs.asyncBatchAnnotateImagesGcs( "gs://cloud-samples-data/vision/label/wakeupcat.jpg", "gs://" + OUTPUT_BUCKET + "/" + OUTPUT_PREFIX + "/"); // Assert String got = bout.toString(); assertThat(got).contains("red:"); Storage storage = StorageOptions.getDefaultInstance().getService(); Page<Blob> blobs = storage.list(OUTPUT_BUCKET, BlobListOption.currentDirectory(), BlobListOption.prefix(OUTPUT_PREFIX + "/")); for (Blob blob : blobs.iterateAll()) { blob.delete(); } }
Example 5
Source File: GcsPinotFS.java From incubator-pinot with Apache License 2.0 | 6 votes |
/** * Determines if a path is a directory that is not empty * @param uri The path under the gcs bucket * @return {@code true} if the path is a non-empty directory, * {@code false} otherwise */ private boolean isEmptyDirectory(URI uri) throws IOException { if (!isDirectory(uri)) { return false; } String prefix = normalizeToDirectoryPrefix(uri); boolean isEmpty = true; Page<Blob> page; if (prefix.equals(DELIMITER)) { page = getBucket(uri).list(); } else { page = getBucket(uri).list(Storage.BlobListOption.prefix(prefix)); } for (Blob blob : page.iterateAll()) { if (blob.getName().equals(prefix)) { continue; } else { isEmpty = false; break; } } return isEmpty; }
Example 6
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of listing jobs, specifying the page size. */ // [TARGET listJobs(JobListOption...)] public Page<Job> listJobs() { // [START bigquery_list_jobs] Page<Job> jobs = bigquery.listJobs(JobListOption.pageSize(100)); for (Job job : jobs.iterateAll()) { // do something with the job } // [END bigquery_list_jobs] return jobs; }
Example 7
Source File: BigQuerySnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of listing datasets, specifying the page size. */ // [TARGET listDatasets(DatasetListOption...)] public Page<Dataset> listDatasets() { // [START bigquery_list_datasets] // List datasets in the default project Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100)); for (Dataset dataset : datasets.iterateAll()) { // do something with the dataset } // [END bigquery_list_datasets] return datasets; }
Example 8
Source File: ITTranslateSnippetsBeta.java From google-cloud-java with Apache License 2.0 | 5 votes |
private void deleteDirectory(Storage storage, Page<Blob> blobs) { for (Blob blob : blobs.iterateAll()) { System.out.println(blob.getBlobId()); if (!blob.delete()) { Page<Blob> subBlobs = storage.list( projectId, BlobListOption.currentDirectory(), BlobListOption.prefix(blob.getName())); deleteDirectory(storage, subBlobs); } } }
Example 9
Source File: AsyncBatchAnnotateImagesTest.java From java-docs-samples with Apache License 2.0 | 5 votes |
@After public void tearDown() { System.setOut(null); Storage storage = StorageOptions.getDefaultInstance().getService(); Page<Blob> blobs = storage.list( PROJECT_ID, Storage.BlobListOption.currentDirectory(), Storage.BlobListOption.prefix(PREFIX)); for (Blob blob : blobs.iterateAll()) { blob.delete(); } }
Example 10
Source File: ComputeExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override public void run(Compute compute, RegionId region) { Page<Subnetwork> subnetworkPage; if (region != null) { subnetworkPage = compute.listSubnetworks(region.getRegion()); } else { subnetworkPage = compute.listSubnetworks(); } for (Subnetwork subnetwork : subnetworkPage.iterateAll()) { System.out.println(subnetwork); } }
Example 11
Source File: BatchTranslateTextWithGlossaryTests.java From java-docs-samples with Apache License 2.0 | 5 votes |
private static void deleteDirectory(Storage storage, Page<Blob> blobs) { for (Blob blob : blobs.iterateAll()) { System.out.println(blob.getBlobId()); if (!blob.delete()) { Page<Blob> subBlobs = storage.list( PROJECT_ID, Storage.BlobListOption.currentDirectory(), Storage.BlobListOption.prefix(blob.getName())); deleteDirectory(storage, subBlobs); } } }
Example 12
Source File: ComputeExample.java From google-cloud-java with Apache License 2.0 | 5 votes |
@Override public void run(Compute compute, RegionId region) { Page<Address> addressPage; if (region != null) { addressPage = compute.listRegionAddresses(region.getRegion()); } else { addressPage = compute.listAddresses(); } for (Address address : addressPage.iterateAll()) { System.out.println(address); } }
Example 13
Source File: TableSnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of listing rows in the table. */ // [TARGET list(TableDataListOption...)] public Page<FieldValueList> list() { // [START ] // This example reads the result 100 rows per RPC call. If there's no need to limit the number, // simply omit the option. Page<FieldValueList> page = table.list(TableDataListOption.pageSize(100)); for (FieldValueList row : page.iterateAll()) { // do something with the row } // [END ] return page; }
Example 14
Source File: AuthExample.java From java-docs-samples with Apache License 2.0 | 5 votes |
static void authAppEngineStandard() throws IOException { // Explicitly request service account credentials from the app engine standard instance. GoogleCredentials credentials = AppEngineCredentials.getApplicationDefault(); Storage storage = StorageOptions.newBuilder().setCredentials(credentials).build().getService(); System.out.println("Buckets:"); Page<Bucket> buckets = storage.list(); for (Bucket bucket : buckets.iterateAll()) { System.out.println(bucket.toString()); } }
Example 15
Source File: AuthExample.java From java-docs-samples with Apache License 2.0 | 5 votes |
static void authCompute() { // Explicitly request service account credentials from the compute engine instance. GoogleCredentials credentials = ComputeEngineCredentials.create(); Storage storage = StorageOptions.newBuilder().setCredentials(credentials).build().getService(); System.out.println("Buckets:"); Page<Bucket> buckets = storage.list(); for (Bucket bucket : buckets.iterateAll()) { System.out.println(bucket.toString()); } }
Example 16
Source File: AuthExample.java From java-docs-samples with Apache License 2.0 | 5 votes |
static void authExplicit(String jsonPath) throws IOException { // You can specify a credential file by providing a path to GoogleCredentials. // Otherwise credentials are read from the GOOGLE_APPLICATION_CREDENTIALS environment variable. GoogleCredentials credentials = GoogleCredentials.fromStream(new FileInputStream(jsonPath)) .createScoped(Lists.newArrayList("https://www.googleapis.com/auth/cloud-platform")); Storage storage = StorageOptions.newBuilder().setCredentials(credentials).build().getService(); System.out.println("Buckets:"); Page<Bucket> buckets = storage.list(); for (Bucket bucket : buckets.iterateAll()) { System.out.println(bucket.toString()); } }
Example 17
Source File: GcsSampleApplicationTests.java From spring-cloud-gcp with Apache License 2.0 | 5 votes |
@Before @After public void cleanupCloudStorage() { Page<Blob> blobs = this.storage.list(this.bucketName); for (Blob blob : blobs.iterateAll()) { blob.delete(); } }
Example 18
Source File: LoggingSnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of listing sinks, specifying the page size. */ // [TARGET listSinks(ListOption...)] public Page<Sink> listSinks() { // [START logging_list_sinks] Page<Sink> sinks = logging.listSinks(ListOption.pageSize(100)); for (Sink sink : sinks.iterateAll()) { // do something with the sink } // [END logging_list_sinks] return sinks; }
Example 19
Source File: LoggingSnippets.java From google-cloud-java with Apache License 2.0 | 5 votes |
/** Example of listing metrics, specifying the page size. */ // [TARGET listMetrics(ListOption...)] public Page<Metric> listMetrics() { // [START listMetrics] Page<Metric> metrics = logging.listMetrics(ListOption.pageSize(100)); for (Metric metric : metrics.iterateAll()) { // do something with the metric } // [END listMetrics] return metrics; }
Example 20
Source File: ListObjectsWithPrefix.java From google-cloud-java with Apache License 2.0 | 4 votes |
public static void listObjectsWithPrefix( String projectId, String bucketName, String directoryPrefix) { // The ID of your GCP project // String projectId = "your-project-id"; // The ID of your GCS bucket // String bucketName = "your-unique-bucket-name"; // The directory prefix to search for // String directoryPrefix = "myDirectory/" Storage storage = StorageOptions.newBuilder().setProjectId(projectId).build().getService(); Bucket bucket = storage.get(bucketName); /** * Using the Storage.BlobListOption.currentDirectory() option here causes the results to display * in a "directory-like" mode, showing what objects are in the directory you've specified, as * well as what other directories exist in that directory. For example, given these blobs: * * <p>a/1.txt a/b/2.txt a/b/3.txt * * <p>If you specify prefix = "a/" and don't use Storage.BlobListOption.currentDirectory(), * you'll get back: * * <p>a/1.txt a/b/2.txt a/b/3.txt * * <p>However, if you specify prefix = "a/" and do use * Storage.BlobListOption.currentDirectory(), you'll get back: * * <p>a/1.txt a/b/ * * <p>Because a/1.txt is the only file in the a/ directory and a/b/ is a directory inside the * /a/ directory. */ Page<Blob> blobs = bucket.list( Storage.BlobListOption.prefix(directoryPrefix), Storage.BlobListOption.currentDirectory()); for (Blob blob : blobs.iterateAll()) { System.out.println(blob.getName()); } }