Java Code Examples for com.amazonaws.retry.PredefinedRetryPolicies#DEFAULT_RETRY_CONDITION
The following examples show how to use
com.amazonaws.retry.PredefinedRetryPolicies#DEFAULT_RETRY_CONDITION .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: StsDaoTest.java From herd with Apache License 2.0 | 5 votes |
@Test public void testGetTemporarySecurityCredentials() { // Create an AWS parameters DTO with proxy settings. AwsParamsDto awsParamsDto = new AwsParamsDto(); awsParamsDto.setHttpProxyHost(HTTP_PROXY_HOST); awsParamsDto.setHttpProxyPort(HTTP_PROXY_PORT); // Specify the duration, in seconds, of the role session. int awsRoleDurationSeconds = INTEGER_VALUE; // Create an IAM policy. Policy policy = new Policy(STRING_VALUE); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create the expected assume role request. AssumeRoleRequest assumeRoleRequest = new AssumeRoleRequest().withRoleArn(AWS_ROLE_ARN).withRoleSessionName(SESSION_NAME).withPolicy(policy.toJson()) .withDurationSeconds(awsRoleDurationSeconds); // Create AWS credentials for API authentication. Credentials credentials = new Credentials(); credentials.setAccessKeyId(AWS_ASSUMED_ROLE_ACCESS_KEY); credentials.setSecretAccessKey(AWS_ASSUMED_ROLE_SECRET_KEY); credentials.setSessionToken(AWS_ASSUMED_ROLE_SESSION_TOKEN); // Create an assume role result. AssumeRoleResult assumeRoleResult = new AssumeRoleResult(); assumeRoleResult.setCredentials(credentials); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(stsOperations.assumeRole(any(AWSSecurityTokenServiceClient.class), eq(assumeRoleRequest))).thenReturn(assumeRoleResult); // Call the method under test. Credentials result = stsDaoImpl.getTemporarySecurityCredentials(awsParamsDto, SESSION_NAME, AWS_ROLE_ARN, awsRoleDurationSeconds, policy); // Verify the external calls. verify(retryPolicyFactory).getRetryPolicy(); verify(stsOperations).assumeRole(any(AWSSecurityTokenServiceClient.class), eq(assumeRoleRequest)); verifyNoMoreInteractionsHelper(); // Validate the returned object. assertEquals(credentials, result); }
Example 2
Source File: StsDaoTest.java From herd with Apache License 2.0 | 5 votes |
@Test public void testGetTemporarySecurityCredentialsMissingOptionalParameters() { // Create an AWS parameters DTO without proxy settings. AwsParamsDto awsParamsDto = new AwsParamsDto(); // Specify the duration, in seconds, of the role session. int awsRoleDurationSeconds = INTEGER_VALUE; // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create the expected assume role request. AssumeRoleRequest assumeRoleRequest = new AssumeRoleRequest().withRoleArn(AWS_ROLE_ARN).withRoleSessionName(SESSION_NAME).withDurationSeconds(awsRoleDurationSeconds); // Create AWS credentials for API authentication. Credentials credentials = new Credentials(); credentials.setAccessKeyId(AWS_ASSUMED_ROLE_ACCESS_KEY); credentials.setSecretAccessKey(AWS_ASSUMED_ROLE_SECRET_KEY); credentials.setSessionToken(AWS_ASSUMED_ROLE_SESSION_TOKEN); // Create an assume role result. AssumeRoleResult assumeRoleResult = new AssumeRoleResult(); assumeRoleResult.setCredentials(credentials); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(stsOperations.assumeRole(any(AWSSecurityTokenServiceClient.class), eq(assumeRoleRequest))).thenReturn(assumeRoleResult); // Call the method under test. Please note that we do not specify an IAM policy. Credentials result = stsDaoImpl.getTemporarySecurityCredentials(awsParamsDto, SESSION_NAME, AWS_ROLE_ARN, awsRoleDurationSeconds, null); // Verify the external calls. verify(retryPolicyFactory).getRetryPolicy(); verify(stsOperations).assumeRole(any(AWSSecurityTokenServiceClient.class), eq(assumeRoleRequest)); verifyNoMoreInteractionsHelper(); // Validate the returned object. assertEquals(credentials, result); }
Example 3
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 5 votes |
@Test public void testDeleteDirectoryNoS3VersionsExist() { // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(S3_KEY_PREFIX); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create an empty version listing. VersionListing versionListing = new VersionListing(); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.listVersions(any(ListVersionsRequest.class), any(AmazonS3Client.class))).thenReturn(versionListing); // Call the method under test. s3DaoImpl.deleteDirectory(s3FileTransferRequestParamsDto); // Verify the external calls. verify(retryPolicyFactory).getRetryPolicy(); verify(s3Operations).listVersions(any(ListVersionsRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 4
Source File: ApiImporterDefaultModule.java From aws-apigateway-importer with Apache License 2.0 | 5 votes |
@Provides protected ApiGateway provideAmazonApiGateway(AWSCredentialsProvider credsProvider, RetryPolicy.BackoffStrategy backoffStrategy, @Named("region") String region) { final RetryPolicy retrypolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, backoffStrategy, 5, true); final ClientConfiguration clientConfig = new ClientConfiguration().withUserAgent(USER_AGENT).withRetryPolicy(retrypolicy); return new AmazonApiGateway(getEndpoint(region)).with(credsProvider).with(clientConfig).getApiGateway(); }
Example 5
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
@Test public void testDeleteDirectoryMultiObjectDeleteException() { // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(S3_KEY_PREFIX); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create an S3 version summary. S3VersionSummary s3VersionSummary = new S3VersionSummary(); s3VersionSummary.setKey(S3_KEY); s3VersionSummary.setVersionId(S3_VERSION_ID); // Create a version listing. VersionListing versionListing = new VersionListing(); versionListing.setVersionSummaries(Collections.singletonList(s3VersionSummary)); // Create a delete error. MultiObjectDeleteException.DeleteError deleteError = new MultiObjectDeleteException.DeleteError(); deleteError.setKey(S3_KEY); deleteError.setVersionId(S3_VERSION_ID); deleteError.setCode(ERROR_CODE); deleteError.setMessage(ERROR_MESSAGE); // Create a multi object delete exception. MultiObjectDeleteException multiObjectDeleteException = new MultiObjectDeleteException(Collections.singletonList(deleteError), new ArrayList<>()); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.listVersions(any(ListVersionsRequest.class), any(AmazonS3Client.class))).thenReturn(versionListing); when(s3Operations.deleteObjects(any(DeleteObjectsRequest.class), any(AmazonS3Client.class))).thenThrow(multiObjectDeleteException); // Try to call the method under test. try { s3DaoImpl.deleteDirectory(s3FileTransferRequestParamsDto); } catch (IllegalStateException e) { assertEquals(String.format( "Failed to delete keys/key versions with prefix \"%s\" from bucket \"%s\". Reason: One or more objects could not be deleted " + "(Service: null; Status Code: 0; Error Code: null; Request ID: null; S3 Extended Request ID: null)", S3_KEY_PREFIX, S3_BUCKET_NAME), e.getMessage()); } // Verify the external calls. verify(retryPolicyFactory, times(2)).getRetryPolicy(); verify(s3Operations).listVersions(any(ListVersionsRequest.class), any(AmazonS3Client.class)); verify(s3Operations).deleteObjects(any(DeleteObjectsRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 6
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
@Test public void testRestoreObjectsInDeepArchiveWithExpeditedArchiveRetrievalOption() { List<File> files = Collections.singletonList(new File(TEST_FILE)); // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(S3_KEY_PREFIX); s3FileTransferRequestParamsDto.setFiles(files); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create an Object Metadata with DeepArchive storage class. ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setOngoingRestore(false); objectMetadata.setHeader(Headers.STORAGE_CLASS, StorageClass.DeepArchive); ArgumentCaptor<AmazonS3Client> s3ClientCaptor = ArgumentCaptor.forClass(AmazonS3Client.class); ArgumentCaptor<String> s3BucketNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> keyCaptor = ArgumentCaptor.forClass(String.class); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.getObjectMetadata(s3BucketNameCaptor.capture(), keyCaptor.capture(), s3ClientCaptor.capture())).thenReturn(objectMetadata); doThrow(new AmazonServiceException("Retrieval option is not supported by this storage class")).when(s3Operations) .restoreObject(any(RestoreObjectRequest.class), any(AmazonS3.class)); try { s3DaoImpl.restoreObjects(s3FileTransferRequestParamsDto, EXPIRATION_IN_DAYS, Tier.Expedited.toString()); fail(); } catch (IllegalArgumentException e) { assertEquals(String.format("Failed to initiate a restore request for \"%s\" key in \"%s\" bucket. " + "Reason: Retrieval option is not supported by this storage class (Service: null; Status Code: 0; Error Code: null; Request ID: null)", TEST_FILE, S3_BUCKET_NAME), e.getMessage()); } }
Example 7
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
private void testRestoreObjectsWithS3Exception(String exceptionMessage, int statusCode) { List<File> files = Collections.singletonList(new File(TEST_FILE)); // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(S3_KEY_PREFIX); s3FileTransferRequestParamsDto.setFiles(files); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create an Object Metadata ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setOngoingRestore(false); objectMetadata.setHeader(Headers.STORAGE_CLASS, StorageClass.DeepArchive); ArgumentCaptor<AmazonS3Client> s3ClientCaptor = ArgumentCaptor.forClass(AmazonS3Client.class); ArgumentCaptor<String> s3BucketNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> keyCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<RestoreObjectRequest> requestStoreCaptor = ArgumentCaptor.forClass(RestoreObjectRequest.class); // Create an Amazon S3 Exception AmazonS3Exception amazonS3Exception = new AmazonS3Exception(exceptionMessage); amazonS3Exception.setStatusCode(statusCode); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.getObjectMetadata(s3BucketNameCaptor.capture(), keyCaptor.capture(), s3ClientCaptor.capture())).thenReturn(objectMetadata); doThrow(amazonS3Exception).when(s3Operations).restoreObject(requestStoreCaptor.capture(), s3ClientCaptor.capture()); try { // Call the method under test. s3DaoImpl.restoreObjects(s3FileTransferRequestParamsDto, EXPIRATION_IN_DAYS, Tier.Standard.toString()); // If this is not a restore already in progress exception message (409) then we should have caught an exception. // Else if this is a restore already in progress message (409) then continue as usual. if (!exceptionMessage.equals(RESTORE_ALREADY_IN_PROGRESS_EXCEPTION_MESSAGE)) { // Should not be here. Fail! fail(); } else { RestoreObjectRequest requestStore = requestStoreCaptor.getValue(); assertEquals(S3_BUCKET_NAME, s3BucketNameCaptor.getValue()); assertEquals(TEST_FILE, keyCaptor.getValue()); // Verify Bulk option is used when the option is not provided assertEquals(StringUtils.isNotEmpty(Tier.Standard.toString()) ? Tier.Standard.toString() : Tier.Bulk.toString(), requestStore.getGlacierJobParameters().getTier()); } } catch (IllegalStateException illegalStateException) { assertEquals(String.format("Failed to initiate a restore request for \"%s\" key in \"%s\" bucket. " + "Reason: com.amazonaws.services.s3.model.AmazonS3Exception: %s " + "(Service: null; Status Code: %s; Error Code: null; Request ID: null; S3 Extended Request ID: null), S3 Extended Request ID: null", TEST_FILE, S3_BUCKET_NAME, exceptionMessage, statusCode), illegalStateException.getMessage()); } // Verify the external calls verify(retryPolicyFactory).getRetryPolicy(); verify(s3Operations).getObjectMetadata(anyString(), anyString(), any(AmazonS3Client.class)); verify(s3Operations).restoreObject(any(RestoreObjectRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 8
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
/** * Run restore objects method * * @param archiveRetrievalOption the archive retrieval option */ private void runRestoreObjects(String archiveRetrievalOption, StorageClass storageClass) { List<File> files = Collections.singletonList(new File(TEST_FILE)); // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(S3_KEY_PREFIX); s3FileTransferRequestParamsDto.setFiles(files); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create an Object Metadata ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setOngoingRestore(false); objectMetadata.setHeader(Headers.STORAGE_CLASS, storageClass); ArgumentCaptor<AmazonS3Client> s3ClientCaptor = ArgumentCaptor.forClass(AmazonS3Client.class); ArgumentCaptor<String> s3BucketNameCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> keyCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<RestoreObjectRequest> requestStoreCaptor = ArgumentCaptor.forClass(RestoreObjectRequest.class); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.getObjectMetadata(s3BucketNameCaptor.capture(), keyCaptor.capture(), s3ClientCaptor.capture())).thenReturn(objectMetadata); doNothing().when(s3Operations).restoreObject(requestStoreCaptor.capture(), s3ClientCaptor.capture()); s3DaoImpl.restoreObjects(s3FileTransferRequestParamsDto, EXPIRATION_IN_DAYS, archiveRetrievalOption); RestoreObjectRequest requestStore = requestStoreCaptor.getValue(); assertEquals(S3_BUCKET_NAME, s3BucketNameCaptor.getValue()); assertEquals(TEST_FILE, keyCaptor.getValue()); // Verify Bulk option is used when the option is not provided assertEquals(StringUtils.isNotEmpty(archiveRetrievalOption) ? archiveRetrievalOption : Tier.Bulk.toString(), requestStore.getGlacierJobParameters().getTier()); // Verify the external calls verify(retryPolicyFactory).getRetryPolicy(); verify(s3Operations).getObjectMetadata(anyString(), anyString(), any(AmazonS3Client.class)); verify(s3Operations).restoreObject(any(RestoreObjectRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 9
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
private void runTagObjectsTest() { // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); // Create an S3 file transfer request parameters DTO to tag S3 objects. S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = new S3FileTransferRequestParamsDto(); s3ObjectTaggerParamsDto.setAwsAccessKeyId(AWS_ASSUMED_ROLE_ACCESS_KEY); s3ObjectTaggerParamsDto.setAwsSecretKey(AWS_ASSUMED_ROLE_SECRET_KEY); s3ObjectTaggerParamsDto.setSessionToken(AWS_ASSUMED_ROLE_SESSION_TOKEN); // Create an S3 object summary. S3ObjectSummary s3ObjectSummary = new S3ObjectSummary(); s3ObjectSummary.setKey(S3_KEY); // Create an S3 object tag. Tag tag = new Tag(S3_OBJECT_TAG_KEY, S3_OBJECT_TAG_VALUE); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create a get object tagging result. GetObjectTaggingResult getObjectTaggingResult = new GetObjectTaggingResult(null); // Create a set object tagging result. SetObjectTaggingResult setObjectTaggingResult = new SetObjectTaggingResult(); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.getObjectTagging(any(GetObjectTaggingRequest.class), any(AmazonS3Client.class))).thenReturn(getObjectTaggingResult); when(s3Operations.setObjectTagging(any(SetObjectTaggingRequest.class), any(AmazonS3Client.class))).thenReturn(setObjectTaggingResult); // Call the method under test. s3DaoImpl.tagObjects(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, Collections.singletonList(s3ObjectSummary), tag); // Verify the external calls. verify(retryPolicyFactory, times(2)).getRetryPolicy(); verify(s3Operations).getObjectTagging(any(GetObjectTaggingRequest.class), any(AmazonS3Client.class)); verify(s3Operations).setObjectTagging(any(SetObjectTaggingRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 10
Source File: S3DaoImplTest.java From herd with Apache License 2.0 | 4 votes |
private void runTagVersionsTest() { // Create an S3 file transfer request parameters DTO to access S3 objects. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = new S3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); // Create an S3 file transfer request parameters DTO to tag S3 objects. S3FileTransferRequestParamsDto s3ObjectTaggerParamsDto = new S3FileTransferRequestParamsDto(); s3ObjectTaggerParamsDto.setAwsAccessKeyId(AWS_ASSUMED_ROLE_ACCESS_KEY); s3ObjectTaggerParamsDto.setAwsSecretKey(AWS_ASSUMED_ROLE_SECRET_KEY); s3ObjectTaggerParamsDto.setSessionToken(AWS_ASSUMED_ROLE_SESSION_TOKEN); // Create an S3 version summary. S3VersionSummary s3VersionSummary = new S3VersionSummary(); s3VersionSummary.setKey(S3_KEY); s3VersionSummary.setVersionId(S3_VERSION_ID); // Create an S3 object tag. Tag tag = new Tag(S3_OBJECT_TAG_KEY, S3_OBJECT_TAG_VALUE); // Create a retry policy. RetryPolicy retryPolicy = new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, PredefinedRetryPolicies.DEFAULT_BACKOFF_STRATEGY, INTEGER_VALUE, true); // Create a get object tagging result. GetObjectTaggingResult getObjectTaggingResult = new GetObjectTaggingResult(null); // Create a set object tagging result. SetObjectTaggingResult setObjectTaggingResult = new SetObjectTaggingResult(); // Mock the external calls. when(retryPolicyFactory.getRetryPolicy()).thenReturn(retryPolicy); when(s3Operations.getObjectTagging(any(GetObjectTaggingRequest.class), any(AmazonS3Client.class))).thenReturn(getObjectTaggingResult); when(s3Operations.setObjectTagging(any(SetObjectTaggingRequest.class), any(AmazonS3Client.class))).thenReturn(setObjectTaggingResult); // Call the method under test. s3DaoImpl.tagVersions(s3FileTransferRequestParamsDto, s3ObjectTaggerParamsDto, Collections.singletonList(s3VersionSummary), tag); // Verify the external calls. verify(retryPolicyFactory, times(2)).getRetryPolicy(); verify(s3Operations).getObjectTagging(any(GetObjectTaggingRequest.class), any(AmazonS3Client.class)); verify(s3Operations).setObjectTagging(any(SetObjectTaggingRequest.class), any(AmazonS3Client.class)); verifyNoMoreInteractionsHelper(); }
Example 11
Source File: InstrumentedRetryCondition.java From kork with Apache License 2.0 | 4 votes |
public InstrumentedRetryCondition(Registry registry) { this(registry, PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION); }
Example 12
Source File: RetryPolicyFactory.java From herd with Apache License 2.0 | 2 votes |
/** * Gets the application's default retry policy. The policy uses PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, a SimpleExponentialBackoffStrategy, and the * maximum number of attempts is dynamically configurable through AWS_MAX_RETRY_ATTEMPT. * * @return RetryPolicy */ public RetryPolicy getRetryPolicy() { return new RetryPolicy(PredefinedRetryPolicies.DEFAULT_RETRY_CONDITION, backoffStrategy, configurationHelper.getProperty(ConfigurationValue.AWS_MAX_RETRY_ATTEMPT, Integer.class), true); }