com.amazonaws.services.s3.model.ObjectListing Java Examples
The following examples show how to use
com.amazonaws.services.s3.model.ObjectListing.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: S3Source.java From sequenceiq-samples with Apache License 2.0 | 7 votes |
@Override protected void doStart() { AWSCredentials myCredentials = new BasicAWSCredentials(accessKey, secretKey); AmazonS3 s3Client = new AmazonS3Client(myCredentials); ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucket); ObjectListing objectListing = s3Client.listObjects(listObjectsRequest); ChannelProcessor channelProcessor = getChannelProcessor(); for (S3ObjectSummary s3ObjectSummary : objectListing.getObjectSummaries()) { String file = s3ObjectSummary.getKey(); LOGGER.info("Read the content of {}", file); GetObjectRequest objectRequest = new GetObjectRequest(bucket, file); S3Object objectPortion = s3Client.getObject(objectRequest); try { long startTime = System.currentTimeMillis(); processLines(channelProcessor, objectPortion.getObjectContent()); LOGGER.info("Processing of {} took {} ms", file, System.currentTimeMillis() - startTime); } catch (IOException e) { LOGGER.warn("Cannot process the {}, skipping", file, e); } } }
Example #2
Source File: InfectedFileCache.java From aws-s3-virusscan with Apache License 2.0 | 6 votes |
public List<InfectedFile> getFiles() { final List<InfectedFile> files = new ArrayList<>(); if (Config.has(Config.Key.INFECTED_FILES_BUCKET_NAME)) { final AmazonS3 s3local = AmazonS3ClientBuilder.standard().withCredentials(this.credentialsProvider).withRegion(Config.get(Config.Key.INFECTED_FILES_BUCKET_REGION)).build(); ObjectListing objectListing = s3local.listObjects(Config.get(Config.Key.INFECTED_FILES_BUCKET_NAME)); while (true) { objectListing.getObjectSummaries().forEach((summary) -> { final S3Object object = s3local.getObject(summary.getBucketName(), summary.getKey()); final byte[] content; try { content = IOUtils.toByteArray(object.getObjectContent()); } catch (final IOException e) { throw new RuntimeException(e); } files.add(new InfectedFile(summary.getKey(), content, object.getObjectMetadata().getContentType())); }); if (objectListing.isTruncated()) { objectListing = s3local.listNextBatchOfObjects(objectListing); } else { break; } } } return files; }
Example #3
Source File: ObjectsOnS3.java From cantor with BSD 3-Clause "New" or "Revised" License | 6 votes |
private int doSize(final String namespace) { final String bucket = toBucketName(namespace); if (!this.s3Client.doesBucketExistV2(bucket)) { return -1; } int totalSize = 0; ObjectListing listing = this.s3Client.listObjects(bucket); do { totalSize += listing.getObjectSummaries().size(); logger.debug("got {} keys from {}", listing.getObjectSummaries().size(), listing); listing = this.s3Client.listNextBatchOfObjects(listing); } while (listing.isTruncated()); return totalSize; }
Example #4
Source File: S3NFileObjectTest.java From hop with Apache License 2.0 | 6 votes |
@Test public void testHandleAttachExceptionFileNotFound() throws FileSystemException { AmazonS3Exception notFoundException = new AmazonS3Exception( "404 Not Found" ); notFoundException.setErrorCode( "404 Not Found" ); AmazonS3Exception noSuchKeyException = new AmazonS3Exception( "NoSuchKey" ); noSuchKeyException.setErrorCode( "NoSuchKey" ); //test the case where the file is not found; no exception should be thrown when( s3ServiceMock.getObject( BUCKET_NAME, origKey ) ).thenThrow( notFoundException ); when( s3ServiceMock.getObject( BUCKET_NAME, origKey + "/" ) ).thenThrow( noSuchKeyException ); childObjectListing = mock( ObjectListing.class ); when( childObjectListing.getObjectSummaries() ).thenReturn( new ArrayList<>() ); when( childObjectListing.getCommonPrefixes() ).thenReturn( new ArrayList<>() ); when( s3ServiceMock.listObjects( any( ListObjectsRequest.class ) ) ).thenReturn( childObjectListing ); try { s3FileObjectFileSpy.doAttach(); } catch ( Exception e ) { fail( "Caught exception " + e.getMessage() ); } assertEquals( FileType.IMAGINARY, s3FileObjectFileSpy.getType() ); }
Example #5
Source File: S3FileInput.java From crate with Apache License 2.0 | 6 votes |
@Override public List<URI> listUris(URI uri, Predicate<URI> uriPredicate) throws IOException { String bucketName = uri.getHost(); if (client == null) { client = clientBuilder.client(uri); } String prefix = uri.getPath().length() > 1 ? uri.getPath().substring(1) : ""; List<URI> uris = new ArrayList<>(); ObjectListing list = client.listObjects(bucketName, prefix); addKeyUris(uris, list, uri, uriPredicate); while (list.isTruncated()) { list = client.listNextBatchOfObjects(list); addKeyUris(uris, list, uri, uriPredicate); } return uris; }
Example #6
Source File: S3SinkStreamWriterUnitTest.java From Scribengin with GNU Affero General Public License v3.0 | 6 votes |
@Test public void testUploadManyFiles() throws Exception { init("s3.default.properties"); for (int i = 0; i < 8; i++) { sink.append(new Record(Integer.toString(i), Integer.toString(i).getBytes())); } List<String> md5s = new ArrayList<>(); for (String file : sink.getBuffer().getFiles()) { md5s.add(new String(Md5Utils.md5AsBase64(new File(file)))); } sink.prepareCommit(); sink.commit(); ObjectListing list = s3.listObjects(s3SinkConfig.getBucketName(), "10"); assertTrue(list.getObjectSummaries().size() == 4); for (int i = 0; i < 8; i += 2) { S3Object s3object = s3.getObject(s3SinkConfig.getBucketName(), "10/" + i + "_" + (i + 1)); assertNotNull(s3object); } }
Example #7
Source File: S3CommonFileObject.java From hop with Apache License 2.0 | 6 votes |
protected void doDelete( String key, String bucketName ) throws FileSystemException { // can only delete folder if empty if ( getType() == FileType.FOLDER ) { // list all children inside the folder ObjectListing ol = fileSystem.getS3Client().listObjects( bucketName, key ); ArrayList<S3ObjectSummary> allSummaries = new ArrayList<>( ol.getObjectSummaries() ); // get full list while ( ol.isTruncated() ) { ol = fileSystem.getS3Client().listNextBatchOfObjects( ol ); allSummaries.addAll( ol.getObjectSummaries() ); } for ( S3ObjectSummary s3os : allSummaries ) { fileSystem.getS3Client().deleteObject( bucketName, s3os.getKey() ); } } fileSystem.getS3Client().deleteObject( bucketName, key ); }
Example #8
Source File: S3CommonFileObject.java From hop with Apache License 2.0 | 6 votes |
private void handleAttachExceptionFallback( String bucket, String keyWithDelimiter, AmazonS3Exception exception ) throws FileSystemException { ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .withBucketName( bucket ) .withPrefix( keyWithDelimiter ) .withDelimiter( DELIMITER ); ObjectListing ol = fileSystem.getS3Client().listObjects( listObjectsRequest ); if ( !( ol.getCommonPrefixes().isEmpty() && ol.getObjectSummaries().isEmpty() ) ) { injectType( FileType.FOLDER ); } else { //Folders don't really exist - they will generate a "NoSuchKey" exception // confirms key doesn't exist but connection okay String errorCode = exception.getErrorCode(); if ( !errorCode.equals( "NoSuchKey" ) ) { // bubbling up other connection errors logger.error( "Could not get information on " + getQualifiedName(), exception ); // make sure this gets printed for the user throw new FileSystemException( "vfs.provider/get-type.error", getQualifiedName(), exception ); } } }
Example #9
Source File: S3RemoteFileSystem.java From imhotep with Apache License 2.0 | 6 votes |
private List<String> getFilenamesFromListing(ObjectListing listing, String prefix) { List<String> results = new ArrayList<String>(100); for (S3ObjectSummary summary : listing.getObjectSummaries()) { final String key = summary.getKey(); final String filename; filename = key.substring(prefix.length()); if (filename.length() == 0 || filename.contains(DELIMITER)) { log.error("Error parsing S3 object Key. Key: " + key); continue; } results.add(filename); } return results; }
Example #10
Source File: EmrIT.java From digdag with Apache License 2.0 | 6 votes |
protected void validateTdSparkQueryOutput() { AmazonS3URI resultUri = new AmazonS3URI(tmpS3FolderUri.toString() + "/result/"); ObjectListing resultListing = s3.listObjects(new ListObjectsRequest().withBucketName(resultUri.getBucket()).withPrefix(resultUri.getKey())); List<String> resultLines = resultListing.getObjectSummaries().stream().flatMap(o -> { try (S3Object object = s3.getObject(o.getBucketName(), o.getKey())) { return CharStreams.readLines(new InputStreamReader(object.getObjectContent())).stream(); } catch (IOException e) { throw Throwables.propagate(e); } }).collect(toList()); // FIXME // we need to specify the version of td-spark that we can use for acceptance tests. // In the meantime the assertion is commented out. //assertThat(resultLines, Matchers.hasItem(",164.54.104.106,/item/games/4663,/category/electronics,404,Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0),121,GET,1412383598")); }
Example #11
Source File: ObjectsOnS3.java From cantor with BSD 3-Clause "New" or "Revised" License | 6 votes |
private Collection<String> getKeys(final String bucket, final int start, final int count) { final Set<String> keys = new HashSet<>(); int index = 0; ObjectListing listing = this.s3Client.listObjects(bucket); do { for (final S3ObjectSummary summary : listing.getObjectSummaries()) { if (index < start) { logger.debug("skipping {} at index={} start={}", summary.getKey(), index++, start); continue; } keys.add(summary.getKey()); if (keys.size() == count) { logger.debug("retrieved {}/{} keys, returning early", keys.size(), count); return keys; } } logger.debug("got {} keys from {}", listing.getObjectSummaries().size(), listing); listing = this.s3Client.listNextBatchOfObjects(listing); } while (listing.isTruncated()); return keys; }
Example #12
Source File: BucketManager.java From nexus-public with Eclipse Public License 1.0 | 6 votes |
@Override public void deleteStorageLocation(final BlobStoreConfiguration blobStoreConfiguration) { String bucket = getConfiguredBucket(blobStoreConfiguration); ObjectListing listing = s3.listObjects(new ListObjectsRequest().withBucketName(bucket).withMaxKeys(1)); if (listing.getObjectSummaries().isEmpty()) { s3.deleteBucket(bucket); } else { log.info("Not removing S3 bucket {} because it is not empty", bucket); BucketLifecycleConfiguration lifecycleConfiguration = s3.getBucketLifecycleConfiguration(bucket); List<Rule> nonBlobstoreRules = nonBlobstoreRules(lifecycleConfiguration, blobStoreConfiguration.getName()); if(!isEmpty(nonBlobstoreRules)) { lifecycleConfiguration.setRules(nonBlobstoreRules); s3.setBucketLifecycleConfiguration(bucket, lifecycleConfiguration); } else { s3.deleteBucketLifecycleConfiguration(bucket); } } }
Example #13
Source File: S3PseudoLock.java From exhibitor with Apache License 2.0 | 6 votes |
@Override protected List<String> getFileNames(String lockPrefix) throws Exception { ListObjectsRequest request = new ListObjectsRequest(); request.setBucketName(bucket); request.setPrefix(lockPrefix); ObjectListing objectListing = client.listObjects(request); return Lists.transform ( objectListing.getObjectSummaries(), new Function<S3ObjectSummary, String>() { @Override public String apply(S3ObjectSummary summary) { return summary.getKey(); } } ); }
Example #14
Source File: AWSTestUtils.java From aws-ant-tasks with Apache License 2.0 | 6 votes |
public static void emptyAndDeleteBucket(AmazonS3Client client, String bucketName) { ObjectListing objectListing = client.listObjects(bucketName); while (true) { for (Iterator<?> iterator = objectListing.getObjectSummaries() .iterator(); iterator.hasNext();) { S3ObjectSummary objectSummary = (S3ObjectSummary) iterator .next(); client.deleteObject(bucketName, objectSummary.getKey()); } if (objectListing.isTruncated()) { objectListing = client.listNextBatchOfObjects(objectListing); } else { break; } } client.deleteBucket(bucketName); }
Example #15
Source File: S3BucketService.java From cassandra-backup with Apache License 2.0 | 6 votes |
@Override public void delete(final String bucketName) { if (!doesExist(bucketName)) { logger.info("Bucket was not deleted as it does not exist."); return; } ObjectListing objectListing = transferManager.getAmazonS3Client().listObjects(bucketName); delete(transferManager.getAmazonS3Client(), objectListing, bucketName); while (objectListing.isTruncated()) { objectListing = transferManager.getAmazonS3Client().listNextBatchOfObjects(objectListing); delete(transferManager.getAmazonS3Client(), objectListing, bucketName); } transferManager.getAmazonS3Client().deleteBucket(bucketName); }
Example #16
Source File: OldS3NotebookRepo.java From zeppelin with Apache License 2.0 | 6 votes |
@Override public void remove(String noteId, AuthenticationInfo subject) throws IOException { String key = user + "/" + "notebook" + "/" + noteId; final ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .withBucketName(bucketName).withPrefix(key); try { ObjectListing objects = s3client.listObjects(listObjectsRequest); do { for (S3ObjectSummary objectSummary : objects.getObjectSummaries()) { s3client.deleteObject(bucketName, objectSummary.getKey()); } objects = s3client.listNextBatchOfObjects(objects); } while (objects.isTruncated()); } catch (AmazonClientException ace) { throw new IOException("Unable to remove note in S3: " + ace, ace); } }
Example #17
Source File: S3Profile.java From jobcacher-plugin with MIT License | 6 votes |
public void delete(String bucketName, String pathPrefix) { ObjectListing listing = null; do { listing = listing == null ? helper.client().listObjects(bucketName, pathPrefix) : helper.client().listNextBatchOfObjects(listing); DeleteObjectsRequest req = new DeleteObjectsRequest(bucketName); List<DeleteObjectsRequest.KeyVersion> keys = new ArrayList<>(listing.getObjectSummaries().size()); for (S3ObjectSummary summary : listing.getObjectSummaries()) { keys.add(new DeleteObjectsRequest.KeyVersion(summary.getKey())); } req.withKeys(keys); helper.client().deleteObjects(req); } while (listing.isTruncated()); }
Example #18
Source File: MockAmazonS3.java From crate with Apache License 2.0 | 6 votes |
@Override public ObjectListing listObjects(final ListObjectsRequest request) throws AmazonClientException { assertThat(request.getBucketName(), equalTo(bucket)); final ObjectListing listing = new ObjectListing(); listing.setBucketName(request.getBucketName()); listing.setPrefix(request.getPrefix()); for (Map.Entry<String, byte[]> blob : blobs.entrySet()) { if (Strings.isEmpty(request.getPrefix()) || blob.getKey().startsWith(request.getPrefix())) { S3ObjectSummary summary = new S3ObjectSummary(); summary.setBucketName(request.getBucketName()); summary.setKey(blob.getKey()); summary.setSize(blob.getValue().length); listing.getObjectSummaries().add(summary); } } return listing; }
Example #19
Source File: ImportS3.java From h2o-2 with Apache License 2.0 | 6 votes |
@Override protected Response serve() { String bucket = _bucket.value(); Log.info("ImportS3 processing (" + bucket + ")"); JsonObject json = new JsonObject(); JsonArray succ = new JsonArray(); JsonArray fail = new JsonArray(); AmazonS3 s3 = PersistS3.getClient(); ObjectListing currentList = s3.listObjects(bucket); processListing(currentList, succ, fail); while(currentList.isTruncated()){ currentList = s3.listNextBatchOfObjects(currentList); processListing(currentList, succ, fail); } json.add(NUM_SUCCEEDED, new JsonPrimitive(succ.size())); json.add(SUCCEEDED, succ); json.add(NUM_FAILED, new JsonPrimitive(fail.size())); json.add(FAILED, fail); DKV.write_barrier(); Response r = Response.done(json); r.setBuilder(SUCCEEDED + "." + KEY, new KeyCellBuilder()); return r; }
Example #20
Source File: StashReaderTest.java From emodb with Apache License 2.0 | 6 votes |
private Answer<ObjectListing> objectListingAnswer(@Nullable final String marker, final String... fileNames) { return new Answer<ObjectListing>() { @Override public ObjectListing answer(InvocationOnMock invocation) throws Throwable { ListObjectsRequest request = (ListObjectsRequest) invocation.getArguments()[0]; ObjectListing objectListing = new ObjectListing(); objectListing.setBucketName(request.getBucketName()); objectListing.setPrefix(request.getPrefix()); objectListing.setTruncated(marker != null); objectListing.setNextMarker(marker); for (String fileName : fileNames) { S3ObjectSummary objectSummary = new S3ObjectSummary(); objectSummary.setKey(request.getPrefix() + fileName); objectSummary.setSize(100); objectListing.getObjectSummaries().add(objectSummary); } return objectListing; } }; }
Example #21
Source File: S3Repository.java From hawkbit-extensions with Eclipse Public License 1.0 | 6 votes |
@Override public void deleteByTenant(final String tenant) { final String folder = sanitizeTenant(tenant); LOG.info("Deleting S3 object folder (tenant) from bucket {} and key {}", s3Properties.getBucketName(), folder); // Delete artifacts ObjectListing objects = amazonS3.listObjects(s3Properties.getBucketName(), folder + "/"); do { for (final S3ObjectSummary objectSummary : objects.getObjectSummaries()) { amazonS3.deleteObject(s3Properties.getBucketName(), objectSummary.getKey()); } objects = amazonS3.listNextBatchOfObjects(objects); } while (objects.isTruncated()); }
Example #22
Source File: S3NotebookRepo.java From zeppelin with Apache License 2.0 | 6 votes |
@Override public void remove(String folderPath, AuthenticationInfo subject) throws IOException { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .withBucketName(bucketName).withPrefix(rootFolder + folderPath + "/"); try { ObjectListing objects = s3client.listObjects(listObjectsRequest); do { for (S3ObjectSummary objectSummary : objects.getObjectSummaries()) { s3client.deleteObject(bucketName, objectSummary.getKey()); } objects = s3client.listNextBatchOfObjects(objects); } while (objects.isTruncated()); } catch (AmazonClientException ace) { throw new IOException("Unable to remove folder " + folderPath + " in S3", ace); } }
Example #23
Source File: TestAmazonS3Target.java From datacollector with Apache License 2.0 | 6 votes |
@Test public void testEventRecords() throws Exception { String prefix = "testEventRecords"; AmazonS3Target amazonS3Target = createS3targetWithTextData(prefix, false, ""); TargetRunner targetRunner = new TargetRunner.Builder(AmazonS3DTarget.class, amazonS3Target) .addService(DataFormatGeneratorService.class, new SdkJsonDataFormatGeneratorService()) .build(); targetRunner.runInit(); List<Record> logRecords = TestUtil.createStringRecords(BUCKET_NAME); //Make sure the prefix is empty ObjectListing objectListing = s3client.listObjects(BUCKET_NAME, prefix); Assert.assertTrue(objectListing.getObjectSummaries().isEmpty()); targetRunner.runWrite(logRecords); Assert.assertEquals(1, targetRunner.getEventRecords().size()); Record eventRecord = targetRunner.getEventRecords().get(0); Assert.assertTrue(eventRecord.has("/bucket")); Assert.assertTrue(eventRecord.has("/objectKey")); Assert.assertEquals(BUCKET_NAME, eventRecord.get("/bucket").getValueAsString()); targetRunner.runDestroy(); }
Example #24
Source File: S3Restorer.java From cassandra-backup with Apache License 2.0 | 6 votes |
@Override public void consumeFiles(final RemoteObjectReference prefix, final Consumer<RemoteObjectReference> consumer) { final Path bucketPath = Paths.get(request.storageLocation.clusterId).resolve(request.storageLocation.datacenterId).resolve(request.storageLocation.nodeId); ObjectListing objectListing = amazonS3.listObjects(request.storageLocation.bucket, prefix.canonicalPath); boolean hasMoreContent = true; while (hasMoreContent) { objectListing.getObjectSummaries().stream() .filter(objectSummary -> !objectSummary.getKey().endsWith("/")) .forEach(objectSummary -> consumer.accept(objectKeyToRemoteReference(bucketPath.relativize(Paths.get(objectSummary.getKey()))))); if (objectListing.isTruncated()) { objectListing = amazonS3.listNextBatchOfObjects(objectListing); } else { hasMoreContent = false; } } }
Example #25
Source File: TerrapinUtil.java From terrapin with Apache License 2.0 | 5 votes |
static public List<Pair<Path, Long>> getS3FileList(AWSCredentials credentials, String s3Bucket, String s3KeyPrefix) { List<Pair<Path, Long>> fileSizePairList = Lists.newArrayListWithCapacity( Constants.MAX_ALLOWED_SHARDS); AmazonS3Client s3Client = new AmazonS3Client(credentials); // List files and build the path using the s3n: prefix. // Note that keys > marker are retrieved where the > is by lexicographic order. String prefix = s3KeyPrefix; String marker = prefix; while (true) { boolean reachedEnd = false; ObjectListing listing = s3Client.listObjects(new ListObjectsRequest(). withBucketName(s3Bucket). withMarker(marker)); List<S3ObjectSummary> summaries = listing.getObjectSummaries(); if (summaries.isEmpty()) { break; } for (S3ObjectSummary summary: summaries) { if (summary.getKey().startsWith(prefix)) { fileSizePairList.add(new ImmutablePair(new Path("s3n", s3Bucket, "/" + summary.getKey()), summary.getSize())); if (fileSizePairList.size() > Constants.MAX_ALLOWED_SHARDS) { throw new RuntimeException("Too many files " + fileSizePairList.size()); } } else { // We found a key which does not match the prefix, stop. reachedEnd = true; break; } } if (reachedEnd) { break; } marker = summaries.get(summaries.size() - 1).getKey(); } return fileSizePairList; }
Example #26
Source File: S3ArtifactStoreTest.java From gocd-s3-artifacts with Apache License 2.0 | 5 votes |
@Test public void shouldReturnTheLatestStageCounter() { ObjectListing listing = new ObjectListing(); List<String> commonPrefixes = new ArrayList<>(); commonPrefixes.add("pipeline/stage/job/1.2"); commonPrefixes.add("pipeline/stage/job/1.1"); commonPrefixes.add("pipeline/stage/job/1.7"); listing.setCommonPrefixes(commonPrefixes); doReturn(listing).when(mockClient).listObjects(any(ListObjectsRequest.class)); S3ArtifactStore store = new S3ArtifactStore(mockClient, "foo-bar"); String prefix = store.getLatestPrefix("pipeline", "stage", "job", "1"); assertEquals("pipeline/stage/job/1.7", prefix); }
Example #27
Source File: S3Utils.java From micro-server with Apache License 2.0 | 5 votes |
/** * Method returns list of all <b>S3ObjectSummary</b> objects, subject to * <i>req</i> parameters. Multiple S3 calls will be performed if there are * more than 1000 elements there * * @param req * - ListObjectRequest to be used. * @return List of S3ObjectSummary from bucket, */ public List<S3ObjectSummary> getAllSummaries(ListObjectsRequest req) { List<S3ObjectSummary> result = new ArrayList<>(); String marker = null; ListObjectsRequest req2 = (ListObjectsRequest) req.clone(); ObjectListing listing; do { listing = client.listObjects(req2.withMarker(marker)); marker = listing.getNextMarker(); result.addAll(listing.getObjectSummaries()); } while (listing.isTruncated()); return result; }
Example #28
Source File: CachingThreddsS3Client.java From netcdf-java with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Override public ObjectListing listObjects(S3URI s3uri) { Optional<ObjectListing> objectListing = objectListingCache.getIfPresent(s3uri); if (objectListing == null) { logger.debug(String.format("ObjectListing cache MISS: '%s'", s3uri)); objectListing = Optional.fromNullable(threddsS3Client.listObjects(s3uri)); objectListingCache.put(s3uri, objectListing); } else { logger.debug(String.format("ObjectListing cache hit: '%s'", s3uri)); } return objectListing.orNull(); }
Example #29
Source File: OldS3NotebookRepo.java From zeppelin with Apache License 2.0 | 5 votes |
@Override public List<OldNoteInfo> list(AuthenticationInfo subject) throws IOException { List<OldNoteInfo> infos = new LinkedList<>(); OldNoteInfo info; try { ListObjectsRequest listObjectsRequest = new ListObjectsRequest() .withBucketName(bucketName) .withPrefix(user + "/" + "notebook"); ObjectListing objectListing; do { objectListing = s3client.listObjects(listObjectsRequest); for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) { if (objectSummary.getKey().endsWith("note.json")) { info = getNoteInfo(objectSummary.getKey()); if (info != null) { infos.add(info); } else { LOG.debug("Unable to get notebook info for key: " + objectSummary.getKey()); } } } listObjectsRequest.setMarker(objectListing.getNextMarker()); } while (objectListing.isTruncated()); } catch (AmazonClientException ace) { throw new IOException("Unable to list objects in S3: " + ace, ace); } return infos; }
Example #30
Source File: S3Storage.java From digdag with Apache License 2.0 | 5 votes |
@Override public void list(String keyPrefix, FileListing callback) { checkArgument(keyPrefix != null, "keyPrefix is null"); String errorMessage = "listing files on bucket " + bucket + " prefix " + keyPrefix; ListObjectsRequest req = new ListObjectsRequest(); req.setBucketName(bucket); req.setPrefix(keyPrefix); ObjectListing listing; do { try { listing = getWithRetry(errorMessage, () -> client.listObjects(req)); } catch (StorageFileNotFoundException ex) { throw Throwables.propagate(ex.getCause()); } callback.accept(Lists.transform( listing.getObjectSummaries(), (summary) -> StorageObjectSummary.builder() .key(summary.getKey()) .contentLength(summary.getSize()) .lastModified(summary.getLastModified().toInstant()) .build() )); req.setMarker(listing.getNextMarker()); } while (listing.isTruncated()); }