Java Code Examples for com.amazonaws.auth.DefaultAWSCredentialsProviderChain#getCredentials()
The following examples show how to use
com.amazonaws.auth.DefaultAWSCredentialsProviderChain#getCredentials() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AmazonS3Manager.java From carina with Apache License 2.0 | 6 votes |
/** * Method to download file from s3 to local file system * * @param bucketName AWS S3 bucket name * @param key (example: android/apkFolder/ApkName.apk) * @param file (local file name) * @param pollingInterval (polling interval in sec for S3 download status determination) */ public void download(final String bucketName, final String key, final File file, long pollingInterval) { LOGGER.info("App will be downloaded from s3."); LOGGER.info(String.format("[Bucket name: %s] [Key: %s] [File: %s]", bucketName, key, file.getAbsolutePath())); DefaultAWSCredentialsProviderChain credentialProviderChain = new DefaultAWSCredentialsProviderChain(); TransferManager tx = new TransferManager( credentialProviderChain.getCredentials()); Download appDownload = tx.download(bucketName, key, file); try { LOGGER.info("Transfer: " + appDownload.getDescription()); LOGGER.info(" State: " + appDownload.getState()); LOGGER.info(" Progress: "); // You can poll your transfer's status to check its progress while (!appDownload.isDone()) { LOGGER.info(" transferred: " + (int) (appDownload.getProgress().getPercentTransferred() + 0.5) + "%"); CommonUtils.pause(pollingInterval); } LOGGER.info(" State: " + appDownload.getState()); // appDownload.waitForCompletion(); } catch (AmazonClientException e) { throw new RuntimeException("File wasn't downloaded from s3. See log: ".concat(e.getMessage())); } // tx.shutdownNow(); }
Example 2
Source File: ClusterManager.java From herd-mdl with Apache License 2.0 | 5 votes |
AmazonElasticMapReduce createEmrClient() { DefaultAWSCredentialsProviderChain defaultAWSCredentialsProviderChain = new DefaultAWSCredentialsProviderChain(); AWSCredentials credentials = defaultAWSCredentialsProviderChain.getCredentials(); emrClient = AmazonElasticMapReduceClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(credentials)) .build(); return emrClient; }
Example 3
Source File: ClientAuthenticationFactory.java From spring-cloud-vault with Apache License 2.0 | 5 votes |
private static AWSCredentialsProvider getAwsCredentialsProvider() { DefaultAWSCredentialsProviderChain backingCredentialsProvider = DefaultAWSCredentialsProviderChain .getInstance(); // Eagerly fetch credentials preventing lag during the first, actual login. AWSCredentials firstAccess = backingCredentialsProvider.getCredentials(); AtomicReference<AWSCredentials> once = new AtomicReference<>(firstAccess); return new AWSCredentialsProvider() { @Override public AWSCredentials getCredentials() { if (once.compareAndSet(firstAccess, null)) { return firstAccess; } return backingCredentialsProvider.getCredentials(); } @Override public void refresh() { backingCredentialsProvider.refresh(); } }; }
Example 4
Source File: AwsIamClientAuthenticationProvider.java From spring-cloud-config with Apache License 2.0 | 5 votes |
private static AWSCredentialsProvider getAwsCredentialsProvider() { DefaultAWSCredentialsProviderChain backingCredentialsProvider = DefaultAWSCredentialsProviderChain .getInstance(); // Eagerly fetch credentials preventing lag during the first, actual login. AWSCredentials firstAccess = backingCredentialsProvider.getCredentials(); AtomicReference<AWSCredentials> once = new AtomicReference<>(firstAccess); return new AWSCredentialsProvider() { @Override public AWSCredentials getCredentials() { if (once.compareAndSet(firstAccess, null)) { return firstAccess; } return backingCredentialsProvider.getCredentials(); } @Override public void refresh() { backingCredentialsProvider.refresh(); } }; }
Example 5
Source File: LDAPIAMPoller.java From aws-iam-ldap-bridge with Apache License 2.0 | 5 votes |
public LDAPIAMPoller(DirectoryService directoryService) throws LdapException { this.directory = directoryService; credentials = new DefaultAWSCredentialsProviderChain(); try { credentials.getCredentials(); // throws } catch (AmazonClientException ex) { LOG.error("AWS credentials error", ex); throw new LdapException("Unable to initialze AWS poller - cannot retrieve valid credentials"); } utils = new ApacheDSUtils(directory); runner = new Runner(directory); LOG.info("IAMPoller created"); }
Example 6
Source File: ProcessVendorTrasactions.java From aws-big-data-blog with Apache License 2.0 | 4 votes |
public static void run(String jobInputParam) throws Exception{ List<StructField> schemaFields = new ArrayList<StructField>(); schemaFields.add(DataTypes.createStructField("vendor_id", DataTypes.StringType, true)); schemaFields.add(DataTypes.createStructField("trans_amount", DataTypes.StringType, true)); schemaFields.add(DataTypes.createStructField("trans_type", DataTypes.StringType, true)); schemaFields.add(DataTypes.createStructField("item_id", DataTypes.StringType, true)); schemaFields.add(DataTypes.createStructField("trans_date", DataTypes.StringType, true)); StructType schema = DataTypes.createStructType(schemaFields); SparkConf conf = new SparkConf().setAppName("Spark Redshift No Access-Keys"); SparkSession spark = SparkSession.builder().config(conf).getOrCreate(); JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); String redshiftJDBCURL=props.getProperty("redshift.jdbc.url"); String s3TempPath = props.getProperty("s3.temp.path"); System.out.println("props"+props); JavaRDD<Row> salesRDD = sc.textFile(jobInputParam). map(new Function<String,Row>(){public Row call(String saleRec){ String[] fields = saleRec.split(","); return RowFactory.create(fields[0], fields[1],fields[2],fields[3],fields[4]);}}); Dataset<Row> salesDF = spark.createDataFrame(salesRDD,schema); Dataset<Row> vendorItemSaleAmountDF = salesDF.filter(salesDF.col("trans_type").equalTo("4")).groupBy(salesDF.col("vendor_id"),salesDF.col("item_id"),salesDF.col("trans_date")).agg(ImmutableMap.of("trans_amount", "sum")); Dataset<Row> vendorItemTaxAmountDF = salesDF.filter(salesDF.col("trans_type").equalTo("5")).groupBy(salesDF.col("vendor_id"),salesDF.col("item_id"),salesDF.col("trans_date")).agg(ImmutableMap.of("trans_amount", "sum")); Dataset<Row> vendorItemDiscountAmountDF = salesDF.filter(salesDF.col("trans_type").equalTo("6")).groupBy(salesDF.col("vendor_id"),salesDF.col("item_id"),salesDF.col("trans_date")).agg(ImmutableMap.of("trans_amount", "sum")); String[] joinColArray = {"vendor_id","item_id","trans_date"}; vendorItemSaleAmountDF.printSchema(); Seq<String> commonJoinColumns = scala.collection.JavaConversions.asScalaBuffer(Arrays.asList(joinColArray)).seq(); Dataset<Row> vendorAggregatedDF = vendorItemSaleAmountDF.join(vendorItemTaxAmountDF,commonJoinColumns,"left_outer") .join(vendorItemDiscountAmountDF,commonJoinColumns,"left_outer") .toDF("vendor_id","item_id","trans_date","sale_amount","tax_amount","discount_amount"); vendorAggregatedDF.printSchema(); DefaultAWSCredentialsProviderChain provider = new DefaultAWSCredentialsProviderChain(); AWSSessionCredentials creds = (AWSSessionCredentials) provider.getCredentials(); String appendix=new StringBuilder(String.valueOf(System.currentTimeMillis())).append("_").append(String.valueOf(new Random().nextInt(10)+1)).toString(); String vendorTransSummarySQL = new StringBuilder("begin transaction;delete from vendortranssummary using vendortranssummary_temp") .append(appendix) .append(" where vendortranssummary.vendor_id=vendortranssummary_temp") .append(appendix) .append(".vendor_id and vendortranssummary.item_id=vendortranssummary_temp") .append(appendix) .append(".item_id and vendortranssummary.trans_date = vendortranssummary_temp") .append(appendix) .append(".trans_date;") .append("insert into vendortranssummary select * from vendortranssummary_temp") .append(appendix) .append(";drop table vendortranssummary_temp") .append(appendix) .append(";end transaction;").toString(); vendorAggregatedDF.write().format("com.databricks.spark.redshift").option("url", redshiftJDBCURL) .option("dbtable", "vendortranssummary_temp"+appendix) .option("usestagingtable","false") .option("postactions",vendorTransSummarySQL) .option("temporary_aws_access_key_id", creds.getAWSAccessKeyId()) .option("temporary_aws_secret_access_key",creds.getAWSSecretKey()) .option("temporary_aws_session_token", creds.getSessionToken()) .option("tempdir", s3TempPath).mode(SaveMode.Overwrite).save(); }