com.amazonaws.services.s3.model.S3Object Java Examples
The following examples show how to use
com.amazonaws.services.s3.model.S3Object.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AWSSdkClient.java From incubator-gobblin with Apache License 2.0 | 6 votes |
/*** * Download a S3 object to local directory * * @param s3ObjectSummary S3 object summary for the object to download * @param targetDirectory Local target directory to download the object to * @throws IOException If any errors were encountered in downloading the object */ public void downloadS3Object(S3ObjectSummary s3ObjectSummary, String targetDirectory) throws IOException { final AmazonS3 amazonS3 = getS3Client(); final GetObjectRequest getObjectRequest = new GetObjectRequest( s3ObjectSummary.getBucketName(), s3ObjectSummary.getKey()); final S3Object s3Object = amazonS3.getObject(getObjectRequest); final String targetFile = StringUtils.removeEnd(targetDirectory, File.separator) + File.separator + s3Object.getKey(); FileUtils.copyInputStreamToFile(s3Object.getObjectContent(), new File(targetFile)); LOGGER.info("S3 object downloaded to file: " + targetFile); }
Example #2
Source File: DownloadCallable.java From aws-codepipeline-plugin-for-jenkins with Apache License 2.0 | 6 votes |
private void downloadAndExtract( final S3Object sessionObject, final File workspace, final String downloadedFileName, final TaskListener listener) throws IOException { downloadArtifacts(sessionObject, workspace, downloadedFileName, listener); final File fullFilePath = new File(workspace, downloadedFileName); try { ExtractionTools.decompressFile(fullFilePath, workspace, model.getCompressionType(), listener); LoggingHelper.log(listener, "Artifact uncompressed successfully"); } finally { if (fullFilePath != null) { try { ExtractionTools.deleteTemporaryCompressedFile(fullFilePath); } catch (final IOException ex) { LoggingHelper.log(listener, "Could not delete temporary file: %s", ex.getMessage()); LoggingHelper.log(listener, ex); } } } }
Example #3
Source File: S3DaoImpl.java From herd with Apache License 2.0 | 6 votes |
@Override public Properties getProperties(String bucketName, String key, S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto) { AmazonS3Client s3Client = getAmazonS3(s3FileTransferRequestParamsDto); try { S3Object s3Object = getS3Object(s3Client, bucketName, key, true); return javaPropertiesHelper.getProperties(s3Object.getObjectContent()); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("The properties file in S3 bucket '" + bucketName + "' and key '" + key + "' is invalid.", e); } finally { s3Client.shutdown(); } }
Example #4
Source File: S3Utils.java From singleton with Eclipse Public License 2.0 | 6 votes |
/** * convert the S3 Object to String */ public static String convertS3Obj2Str(S3Object s3Obj) throws IOException { S3ObjectInputStream s3is = s3Obj.getObjectContent(); ByteArrayOutputStream fos = new ByteArrayOutputStream(); byte[] read_buf = new byte[1024]; int read_len = 0; try { while ((read_len = s3is.read(read_buf)) > 0) { fos.write(read_buf, 0, read_len); } return fos.toString(ConstantsUnicode.UTF8); } finally { s3is.close(); fos.close(); } }
Example #5
Source File: S3OneComponentDaoImpl.java From singleton with Eclipse Public License 2.0 | 6 votes |
/** * get one component bundle files from s3 server as json String */ @Override public String get2JsonStr(String productName, String version, String component, String locale) throws DataException { String filePath = S3Utils.genProductVersionS3Path(productName, version) + component + ConstantsChar.BACKSLASH + ResourceFilePathGetter.getLocalizedJSONFileName(locale); String result = null; if (s3Client.getS3Client().doesObjectExist(config.getBucketName(), filePath)) { S3Object o = s3Client.getS3Client().getObject(config.getBucketName(), filePath); if (o != null) { try { result = S3Utils.convertS3Obj2Str(o); } catch (IOException e) { logger.warn(e.getMessage(), e); throw new DataException(S3_NOT_EXIST_STR + filePath); } } else { throw new DataException(S3_NOT_EXIST_STR + filePath); } } if (result == null) { throw new DataException(S3_NOT_EXIST_STR + filePath); } return result; }
Example #6
Source File: AwsS3BuildCacheServiceTest.java From gradle-s3-build-cache with Apache License 2.0 | 6 votes |
@Test public void loadGetsObjectsAndReturnsTrueIfItExistsInS3() throws Exception { /** Setup **/ buildCacheService = new AwsS3BuildCacheService(s3, "bucketName", null, true); doReturn(true).when(s3).doesObjectExist("bucketName", "abcdefghijkl123456789"); S3Object s3Object = mock(S3Object.class); doReturn(s3Object).when(s3).getObject("bucketName", "abcdefghijkl123456789"); S3ObjectInputStream s3ObjectInputStream = mock(S3ObjectInputStream.class); doReturn(s3ObjectInputStream).when(s3Object).getObjectContent(); /** Run **/ boolean result = buildCacheService.load(key, reader); /** Check **/ assertTrue(result); verify(reader).readFrom(s3ObjectInputStream); }
Example #7
Source File: pinpoint_export_endpoints.java From aws-doc-sdk-examples with Apache License 2.0 | 6 votes |
public static void downloadFromS3(String s3BucketName, List<String> objectKeys, String downloadDirectory) { // Initializes the Amazon S3 client. AmazonS3 s3Client = AmazonS3ClientBuilder.defaultClient(); try { // Downloads each object to the specified file path. for (String key : objectKeys) { S3Object object = s3Client.getObject(s3BucketName, key); String endpointsFileName = key.substring(key.lastIndexOf("/")); Path filePath = Paths.get(downloadDirectory + endpointsFileName); System.out.format("Downloading %s to %s . . .\n", filePath.getFileName(), filePath.getParent()); writeObjectToFile(filePath, object); } System.out.println("Download finished."); } catch (AmazonServiceException | NullPointerException e) { System.err.println(e.getMessage()); System.exit(1); } }
Example #8
Source File: AmazonS3SourceMockTests.java From spring-cloud-stream-app-starters with Apache License 2.0 | 6 votes |
@BeforeClass public static void setup() throws IOException { File remoteFolder = TEMPORARY_FOLDER.newFolder("remote"); File aFile = new File(remoteFolder, "1.test"); FileCopyUtils.copy("Hello".getBytes(), aFile); File bFile = new File(remoteFolder, "2.test"); FileCopyUtils.copy("Bye".getBytes(), bFile); File otherFile = new File(remoteFolder, "otherFile"); FileCopyUtils.copy("Other\nOther2".getBytes(), otherFile); S3_OBJECTS = new ArrayList<>(); for (File file : remoteFolder.listFiles()) { S3Object s3Object = new S3Object(); s3Object.setBucketName(S3_BUCKET); s3Object.setKey(file.getName()); s3Object.setObjectContent(new FileInputStream(file)); S3_OBJECTS.add(s3Object); } String localFolder = TEMPORARY_FOLDER.newFolder("local").getAbsolutePath(); System.setProperty("s3.localDir", localFolder); }
Example #9
Source File: AmazonS3OAuthStateService.java From java-slack-sdk with MIT License | 6 votes |
@Override public boolean isAvailableInDatabase(String state) { AmazonS3 s3 = this.createS3Client(); S3Object s3Object = getObject(s3, getKey(state)); if (s3Object == null) { return false; } String millisToExpire = null; try { millisToExpire = IOUtils.toString(s3Object.getObjectContent()); return Long.valueOf(millisToExpire) > System.currentTimeMillis(); } catch (IOException e) { log.error("Failed to load a state data for state: {}", state, e); return false; } catch (NumberFormatException ne) { log.error("Invalid state value detected - state: {}, millisToExpire: {}", state, millisToExpire); return false; } }
Example #10
Source File: S3S3CopierTest.java From circus-train with Apache License 2.0 | 6 votes |
@Test public void copyOneFile() throws Exception { client.putObject("source", "data", inputData); Path sourceBaseLocation = new Path("s3://source/data"); Path replicaLocation = new Path("s3://target/data2"); List<Path> sourceSubLocations = new ArrayList<>(); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); Metrics metrics = s3s3Copier.copy(); assertThat(metrics.getBytesReplicated(), is(7L)); assertThat(metrics.getMetrics().get(S3S3CopierMetrics.Metrics.TOTAL_BYTES_TO_REPLICATE.name()), is(7L)); S3Object object = client.getObject("target", "data2"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); assertThat(registry.getGauges().containsKey(RunningMetrics.S3S3_CP_BYTES_REPLICATED.name()), is(true)); }
Example #11
Source File: SimpleStorageResourceTest.java From spring-cloud-aws with Apache License 2.0 | 6 votes |
@Test void getInputStream_existingObject_returnsInputStreamWithContent() throws Exception { // Arrange AmazonS3 amazonS3 = mock(AmazonS3.class); ObjectMetadata objectMetadata = mock(ObjectMetadata.class); when(amazonS3.getObjectMetadata(any(GetObjectMetadataRequest.class))) .thenReturn(objectMetadata); S3Object s3Object = new S3Object(); s3Object.setObjectMetadata(objectMetadata); s3Object.setObjectContent(new ByteArrayInputStream(new byte[] { 42 })); when(amazonS3.getObject(any(GetObjectRequest.class))).thenReturn(s3Object); // Act SimpleStorageResource simpleStorageResource = new SimpleStorageResource(amazonS3, "bucket", "object", new SyncTaskExecutor()); // Assert assertThat(simpleStorageResource.exists()).isTrue(); assertThat(simpleStorageResource.getInputStream().read()).isEqualTo(42); }
Example #12
Source File: DownloadCallable.java From aws-codepipeline-plugin-for-jenkins with Apache License 2.0 | 6 votes |
private static void streamReadAndDownloadObject( final File workspace, final S3Object sessionObject, final String downloadedFileName) throws IOException { final File outputFile = new File(workspace, downloadedFileName); try (final S3ObjectInputStream objectContents = sessionObject.getObjectContent(); final OutputStream outputStream = new FileOutputStream(outputFile)) { final int BUFFER_SIZE = 8192; final byte[] buffer = new byte[BUFFER_SIZE]; int i; while ((i = objectContents.read(buffer)) != -1) { outputStream.write(buffer, 0, i); } } }
Example #13
Source File: DynamoDbS3Operations.java From ReCiter with Apache License 2.0 | 6 votes |
/** * This function retrieves large object from S3 * @param bucketName * @param keyName * @param objectClass * @return */ public <T> Object retrieveLargeItem(String bucketName, String keyName, Class<T> objectClass) { try { S3Object s3Object = s3.getObject(new GetObjectRequest(bucketName.toLowerCase(), keyName)); String objectContent = IOUtils.toString(s3Object.getObjectContent(), StandardCharsets.UTF_8); if(objectClass == ReCiterFeature.class) { ReCiterFeature reCiterFeature = OBJECT_MAPPER.readValue(objectContent, ReCiterFeature.class); return reCiterFeature; } } catch (IOException | AmazonServiceException e) { log.error(e.getMessage()); } return null; }
Example #14
Source File: S3BucketStepsTests.java From vividus with Apache License 2.0 | 6 votes |
@Test void fetchCsvFileTest() throws IOException { String objectKey = S3_OBJECT_KEY + ".csv"; byte[] csv = ResourceUtils.loadResourceAsByteArray(CSV_FILE_PATH); S3Object s3Object = mock(S3Object.class); S3ObjectInputStream s3ObjectInputStream = new S3ObjectInputStream(new ByteArrayInputStream(csv), null); when(s3Object.getObjectContent()).thenReturn(s3ObjectInputStream); when(amazonS3Client.getObject(S3_BUCKET_NAME, objectKey)).thenReturn(s3Object); Set<VariableScope> scopes = Set.of(VariableScope.SCENARIO); String variableName = "varName"; steps.fetchCsvObject(objectKey, S3_BUCKET_NAME, scopes, variableName); verify(amazonS3Client).getObject(S3_BUCKET_NAME, objectKey); verify(bddVariableContext).putVariable(scopes, variableName, List.of(Map.of("id", "1"))); }
Example #15
Source File: ErrorManager.java From pacbot with Apache License 2.0 | 6 votes |
/** * Fetch error info. * * @param datasource the datasource * @param errorList the error list */ private void fetchErrorInfo(List<Map<String,String>> errorList){ if(errorInfo==null){ ObjectMapper objectMapper = new ObjectMapper(); List<Map<String, String>> inventoryErrors = new ArrayList<>(); AmazonS3 s3Client = AmazonS3ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(new CredentialProvider().getCredentials(s3Account,s3Role))).withRegion(s3Region).build(); try { S3Object inventoryErrorData = s3Client.getObject(new GetObjectRequest(bucketName,dataPath+"/"+dataSource+"-loaderror.data")); try (BufferedReader reader = new BufferedReader(new InputStreamReader(inventoryErrorData.getObjectContent()))) { inventoryErrors = objectMapper.readValue(reader.lines().collect(Collectors.joining("\n")),new TypeReference<List<Map<String, String>>>() {}); } } catch (IOException e) { LOGGER.error("Exception in collecting inventory error data",e); Map<String,String> errorMap = new HashMap<>(); errorMap.put(ERROR, "Exception in collecting inventory error data"); errorMap.put(ERROR_TYPE, WARN); errorMap.put(EXCEPTION, e.getMessage()); errorList.add(errorMap); } errorInfo = inventoryErrors.parallelStream().collect(Collectors.groupingBy(obj -> obj.get("type"))); } }
Example #16
Source File: S3BlockReader.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
/** * S3 block read would be achieved through the AmazonS3 client. Following are the steps to achieve: * (1) Create the objectRequest from bucketName and filePath. * (2) Set the range to the above created objectRequest. * (3) Get the object portion through AmazonS3 client API. * (4) Get the object content from the above object portion. * @return the block entity * @throws IOException */ @Override protected Entity readEntity() throws IOException { entity.clear(); GetObjectRequest rangeObjectRequest = new GetObjectRequest( bucketName, filePath); rangeObjectRequest.setRange(offset, blockMetadata.getLength() - 1); S3Object objectPortion = s3Client.getObject(rangeObjectRequest); S3ObjectInputStream wrappedStream = objectPortion.getObjectContent(); byte[] record = ByteStreams.toByteArray(wrappedStream); entity.setUsedBytes(record.length); entity.setRecord(record); wrappedStream.close(); return entity; }
Example #17
Source File: S3Service.java From spring-s3-properties-loader with MIT License | 6 votes |
/** * @param bucketName + key location * @return {@link S3Object} for the given aws s3 location. * @throws InvalidS3LocationException for invalid location params * @throws S3ResourceException for connection and availability errors */ public S3Object retriveFrom(String location) { if (isEmpty(location)) { throw new InvalidS3LocationException("Location cannot be empty or null"); } String path = location.startsWith(S3_PROTOCOL_PREFIX) ? location.substring(S3_PROTOCOL_PREFIX.length(), location.length()) : location; if(!path.contains("/")) { throw new InvalidS3LocationException("The location must contains the full path of the properties file"); } String bucketName = path.substring(0, path.indexOf('/')); String keyName = path.substring(path.indexOf('/') + 1); try { return amazonS3.getObject(bucketName, keyName); } catch (Exception e) { throw new S3ResourceException("Could not load resource from " + location, e); } }
Example #18
Source File: S3ChangeLogStore.java From athenz with Apache License 2.0 | 6 votes |
SignedDomain getSignedDomain(AmazonS3 s3, String domainName) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("getSignedDomain with S3: {}", domainName); } SignedDomain signedDomain = null; try { S3Object object = s3.getObject(s3BucketName, domainName); try (S3ObjectInputStream s3is = object.getObjectContent()) { signedDomain = jsonMapper.readValue(s3is, SignedDomain.class); } } catch (Exception ex) { LOGGER.error("AWSS3ChangeLog: getSignedDomain - unable to get domain {} error: {}", domainName, ex.getMessage()); } return signedDomain; }
Example #19
Source File: S3Operations.java From ats-framework with Apache License 2.0 | 6 votes |
/** * Get MD5, size, owner, storage class and last modification time for a desired file in the pointed bucket * * @param fileName the file name */ @PublicAtsApi public S3ObjectInfo getFileMetadata( String fileName ) { try { S3Object element = s3Client.getObject(bucketName, fileName); if (element != null) { ObjectMetadata metaData = element.getObjectMetadata(); S3ObjectInfo s3Info = new S3ObjectInfo(); s3Info.setBucketName(fileName); s3Info.setLastModified(metaData.getLastModified()); s3Info.setMd5(metaData.getETag()); s3Info.setName(element.getKey()); s3Info.setSize(metaData.getContentLength()); return s3Info; } else { throw new NoSuchElementException("File with name '" + fileName + "' does not exist!"); } } catch (Exception e) { handleExeption(e, "Could not retrieve metadata for S3 object with key '" + fileName + "'"); } return null; }
Example #20
Source File: AwsSdkTest.java From s3proxy with Apache License 2.0 | 6 votes |
@Test public void testAtomicMpuAbort() throws Exception { String key = "testAtomicMpuAbort"; ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(BYTE_SOURCE.size()); client.putObject(containerName, key, BYTE_SOURCE.openStream(), metadata); InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(containerName, key); InitiateMultipartUploadResult initResponse = client.initiateMultipartUpload(initRequest); String uploadId = initResponse.getUploadId(); client.abortMultipartUpload(new AbortMultipartUploadRequest( containerName, key, uploadId)); S3Object object = client.getObject(containerName, key); assertThat(object.getObjectMetadata().getContentLength()).isEqualTo( BYTE_SOURCE.size()); try (InputStream actual = object.getObjectContent(); InputStream expected = BYTE_SOURCE.openStream()) { assertThat(actual).hasContentEqualTo(expected); } }
Example #21
Source File: AwsSdkAnonymousTest.java From s3proxy with Apache License 2.0 | 6 votes |
@Test public void testAwsV4SignatureChunkedAnonymous() throws Exception { client = AmazonS3ClientBuilder.standard() .withChunkedEncodingDisabled(false) .withEndpointConfiguration(s3EndpointConfig) .build(); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(BYTE_SOURCE.size()); client.putObject(containerName, "foo", BYTE_SOURCE.openStream(), metadata); S3Object object = client.getObject(containerName, "foo"); assertThat(object.getObjectMetadata().getContentLength()).isEqualTo( BYTE_SOURCE.size()); try (InputStream actual = object.getObjectContent(); InputStream expected = BYTE_SOURCE.openStream()) { assertThat(actual).hasContentEqualTo(expected); } }
Example #22
Source File: S3Operations.java From ats-framework with Apache License 2.0 | 5 votes |
/** * Download an object data as a file * * @param remoteObjectName the name of object/key which contents should be downloaded * @param localFileName the location and file name on the local machine, where the file will be downloaded * @throws S3OperationException if there is an error during data transfer */ @PublicAtsApi public void download( String remoteObjectName, String localFileName ) throws S3OperationException, IllegalArgumentException { localFileName = IoUtils.normalizeFilePath(localFileName); String localDirName = IoUtils.getFilePath(localFileName); String localFileOnlyName = IoUtils.getFileName(localFileName); File localDir = new File(localDirName); if (localDir.exists()) { if (localDir.isFile()) { throw new IllegalArgumentException("Could not create file " + localFileOnlyName + " into existing file " + localDirName); } // else dir exists } else { LOG.debug("Creating target directory path " + localDirName); if (!localDir.mkdirs()) { throw new S3OperationException("Could not create local directory path '" + localDirName + "' for local file specified '" + localFileName + "'"); } } S3Object obj = s3Client.getObject(bucketName, remoteObjectName); try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(new File(localFileName))); S3ObjectInputStream s3is = obj.getObjectContent();) { byte[] readBuffArr = new byte[4096]; int readBytes = 0; while ( (readBytes = s3is.read(readBuffArr)) >= 0) { bos.write(readBuffArr, 0, readBytes); } } catch (Exception e) { handleExeption(e, "Error while downloading object " + remoteObjectName + " to local file " + localFileName + ". If error persists check your endpoint, credentials and permissions."); } LOG.info("S3 object '" + remoteObjectName + "; is downloaded successfully from bucket '" + bucketName + "' to file " + localFileName); }
Example #23
Source File: TestFetchS3Object.java From nifi with Apache License 2.0 | 5 votes |
@Test public void testGetObjectVersion() throws IOException { runner.setProperty(FetchS3Object.REGION, "us-east-1"); runner.setProperty(FetchS3Object.BUCKET, "request-bucket"); runner.setProperty(FetchS3Object.VERSION_ID, "${s3.version}"); final Map<String, String> attrs = new HashMap<>(); attrs.put("filename", "request-key"); attrs.put("s3.version", "request-version"); runner.enqueue(new byte[0], attrs); S3Object s3ObjectResponse = new S3Object(); s3ObjectResponse.setBucketName("response-bucket-name"); s3ObjectResponse.setObjectContent(new StringInputStream("Some Content")); ObjectMetadata metadata = Mockito.spy(ObjectMetadata.class); metadata.setContentDisposition("key/path/to/file.txt"); Mockito.when(metadata.getVersionId()).thenReturn("response-version"); s3ObjectResponse.setObjectMetadata(metadata); Mockito.when(mockS3Client.getObject(Mockito.any())).thenReturn(s3ObjectResponse); runner.run(1); ArgumentCaptor<GetObjectRequest> captureRequest = ArgumentCaptor.forClass(GetObjectRequest.class); Mockito.verify(mockS3Client, Mockito.times(1)).getObject(captureRequest.capture()); GetObjectRequest request = captureRequest.getValue(); assertEquals("request-bucket", request.getBucketName()); assertEquals("request-key", request.getKey()); assertEquals("request-version", request.getVersionId()); runner.assertAllFlowFilesTransferred(FetchS3Object.REL_SUCCESS, 1); final List<MockFlowFile> ffs = runner.getFlowFilesForRelationship(FetchS3Object.REL_SUCCESS); MockFlowFile ff = ffs.get(0); ff.assertAttributeEquals("s3.bucket", "response-bucket-name"); ff.assertAttributeEquals(CoreAttributes.FILENAME.key(), "file.txt"); ff.assertAttributeEquals(CoreAttributes.PATH.key(), "key/path/to"); ff.assertAttributeEquals(CoreAttributes.ABSOLUTE_PATH.key(), "key/path/to/file.txt"); ff.assertAttributeEquals("s3.version", "response-version"); ff.assertContentEquals("Some Content"); }
Example #24
Source File: S3Operations.java From ats-framework with Apache License 2.0 | 5 votes |
/** * Get object/file contents from the remote storage. * <p> * <em>Note</em> that user should close stream after reading in order to release resources and do not * block connection from the pool. * </p> * @param objectName the object/key name that should be downloaded * @return InputStream that could be used to download object contents */ @PublicAtsApi public InputStream download( String objectName ) { try { S3Object o = s3Client.getObject(bucketName, objectName); return o.getObjectContent(); } catch (Exception e) { handleExeption(e, "Could get contents for S3 object with key '" + objectName + "'"); return null; // needed because of compiler limitation. Above handleException() always throws exception } }
Example #25
Source File: Configuration.java From XRTB with Apache License 2.0 | 5 votes |
public static String readData(String type, String name, S3Object object, long size) throws Exception { String message = ""; switch (type) { case "range": case "cidr": NavMap map = new NavMap(name, object); message = "Added NavMap " + name + ": has " + map.size() + " members"; break; case "set": SimpleSet set = new SimpleSet(name, object); message = "Initialize Set: " + name + " from S3, entries = " + set.size(); break; case "bloom": Bloom b = new Bloom(name, object, size); message = "Initialize Bloom Filter: " + name + " from S3, members = " + b.getMembers(); break; case "cuckoo": Cuckoo c = new Cuckoo(name, object, size); message = "Initialize Cuckoo Filter: " + name + " from S3, entries = " + c.getMembers(); break; case "multiset": SimpleMultiset ms = new SimpleMultiset(name, object); message = "Initialize Multiset " + name + " from S3, entries = " + ms.getMembers(); break; default: message = "Unknown type: " + type; } logger.info("*** {}",message); return message; }
Example #26
Source File: AmazonBucketClientImplTest.java From molgenis with GNU Lesser General Public License v3.0 | 5 votes |
@BeforeEach void setUp() { client = mock(AmazonS3Client.class); fileStore = mock(FileStore.class); s3Object = mock(S3Object.class); httpRequestBase = mock(HttpRequestBase.class); amazonBucketClient = new AmazonBucketClientImpl(); }
Example #27
Source File: AmazonS3FileSystem.java From iaf with Apache License 2.0 | 5 votes |
@Override public S3Object renameFile(S3Object f, String destinationFile, boolean force) throws FileSystemException { if(s3Client.doesObjectExist(bucketName, destinationFile)) { throw new FileSystemException("Cannot rename file. Destination file already exists."); } s3Client.copyObject(bucketName, f.getKey(), bucketName, destinationFile); s3Client.deleteObject(bucketName, f.getKey()); return toFile(destinationFile); }
Example #28
Source File: S3S3CopierTest.java From circus-train with Apache License 2.0 | 5 votes |
@Test public void copyOneObjectPartitionedHandlingS3ASchemes() throws Exception { client.putObject("source", "year=2016/data", inputData); Path sourceBaseLocation = new Path("s3a://source/"); Path replicaLocation = new Path("s3a://target/foo/"); List<Path> sourceSubLocations = Lists.newArrayList(new Path(sourceBaseLocation, "year=2016")); S3S3Copier s3s3Copier = newS3S3Copier(sourceBaseLocation, sourceSubLocations, replicaLocation); s3s3Copier.copy(); S3Object object = client.getObject("target", "foo/year=2016/data"); String data = IOUtils.toString(object.getObjectContent()); assertThat(data, is("bar foo")); }
Example #29
Source File: SecureShellAuthentication.java From github-bucket with ISC License | 5 votes |
public SecureShellAuthentication(Bucket bucket, AmazonS3 client) { factory = new JschConfigSessionFactory() { @Override public synchronized RemoteSession getSession(URIish uri, CredentialsProvider credentialsProvider, FS fs, int tms) throws TransportException { // Do not check for default ssh user config fs.setUserHome(null); return super.getSession(uri, credentialsProvider, fs, tms); } @Override protected void configure(OpenSshConfig.Host host, Session session) { session.setConfig("HashKnownHosts", "no"); if ("localhost".equalsIgnoreCase(host.getHostName())) { session.setConfig("StrictHostKeyChecking", "no"); } } @Override protected void configureJSch(JSch jsch) { S3Object file; file = client.getObject(bucket.getName(), ".ssh/known_hosts"); try (InputStream is = file.getObjectContent()) { jsch.setKnownHosts(is); } catch (IOException | JSchException e) { throw new IllegalArgumentException("Missing known hosts file on s3: .ssh/known_hosts", e); } file = client.getObject(bucket.getName(), ".ssh/id_rsa"); try (InputStream is = file.getObjectContent()) { jsch.addIdentity("git", IOUtils.toByteArray(is), null, new byte[0]); } catch (IOException | JSchException e) { throw new IllegalArgumentException("Missing key file on s3: .ssh/id_rsa", e); } } }; }
Example #30
Source File: DownloadCallable.java From aws-codepipeline-plugin-for-jenkins with Apache License 2.0 | 5 votes |
@Override public Void invoke(final File workspace, final VirtualChannel channel) throws InterruptedException { clearWorkspaceIfSelected(workspace, listener); final AWSClients awsClients = awsClientFactory.getAwsClient( model.getAwsAccessKey(), model.getAwsSecretKey(), model.getProxyHost(), model.getProxyPort(), model.getRegion(), pluginUserAgentPrefix); final AWSCodePipelineJobCredentialsProvider credentialsProvider = new AWSCodePipelineJobCredentialsProvider( job.getId(), awsClients.getCodePipelineClient()); final AmazonS3 s3Client = awsClients.getS3Client(credentialsProvider); for (final Artifact artifact : job.getData().getInputArtifacts()) { final S3Object sessionObject = getS3Object(s3Client, artifact); model.setCompressionType(ExtractionTools.getCompressionType(sessionObject, listener)); final String downloadedFileName = Paths.get(sessionObject.getKey()).getFileName().toString(); try { downloadAndExtract(sessionObject, workspace, downloadedFileName, listener); } catch (final Exception ex) { final String error = "Failed to acquire artifacts: " + ex.getMessage(); LoggingHelper.log(listener, error); LoggingHelper.log(listener, ex); throw new InterruptedException(error); } } return null; }