Python boto3.session.Session() Examples
The following are 30
code examples of boto3.session.Session().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto3.session
, or try the search function
.
Example #1
Source File: paasta_remote_run.py From paasta with Apache License 2.0 | 8 votes |
def create_boto_session(taskproc_config, region): # first, try to load credentials credentials_file = taskproc_config.get("boto_credential_file") if credentials_file: with open(credentials_file) as f: credentials = json.loads(f.read()) else: raise ValueError("Required aws credentials") # second, create the session for the given region return Session( region_name=region, aws_access_key_id=credentials["accessKeyId"], aws_secret_access_key=credentials["secretAccessKey"], ) # TODO: rename to registry?
Example #2
Source File: aws-git-backed-static-website-lambda.py From aws-git-backed-static-website with Apache License 2.0 | 7 votes |
def setup(event): # Extract attributes passed in by CodePipeline job_id = event['CodePipeline.job']['id'] job_data = event['CodePipeline.job']['data'] artifact = job_data['inputArtifacts'][0] config = job_data['actionConfiguration']['configuration'] credentials = job_data['artifactCredentials'] from_bucket = artifact['location']['s3Location']['bucketName'] from_key = artifact['location']['s3Location']['objectKey'] from_revision = artifact['revision'] #output_artifact = job_data['outputArtifacts'][0] #to_bucket = output_artifact['location']['s3Location']['bucketName'] #to_key = output_artifact['location']['s3Location']['objectKey'] # Temporary credentials to access CodePipeline artifact in S3 key_id = credentials['accessKeyId'] key_secret = credentials['secretAccessKey'] session_token = credentials['sessionToken'] session = Session(aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) s3 = session.client('s3', config=botocore.client.Config(signature_version='s3v4')) return (job_id, s3, from_bucket, from_key, from_revision)
Example #3
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #4
Source File: clear_lambda_storage.py From clear-lambda-storage with MIT License | 6 votes |
def init_boto_client(client_name, region, args): """ Initiates boto's client object :param client_name: client name :param region: region name :param args: arguments :return: Client """ if args.token_key_id and args.token_secret: boto_client = boto3.client( client_name, aws_access_key_id=args.token_key_id, aws_secret_access_key=args.token_secret, region_name=region ) elif args.profile: session = boto3.session.Session(profile_name=args.profile) boto_client = session.client(client_name, region_name=region) else: boto_client = boto3.client(client_name, region_name=region) return boto_client
Example #5
Source File: __init__.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def setup_cloudwatch_logging(logger): """Add Cloud Watch log handler if appropriate.""" if not (Config.CW_AWS_ACCESS_KEY_ID and Config.CW_AWS_SECRET_ACCESS_KEY): logger.info("CloudWatch logging disabled due to missing access key") return try: session = Session( aws_access_key_id=Config.CW_AWS_ACCESS_KEY_ID, aws_secret_access_key=Config.CW_AWS_SECRET_ACCESS_KEY, region_name=Config.CW_AWS_REGION, ) handler = watchtower.CloudWatchLogHandler( boto3_session=session, log_group=Config.CW_LOG_GROUP, stream_name=Config.NAMESPACE, create_log_group=False ) logger.addHandler(handler) except ClientError as cerr: logger.error("CloudWatch logging setup failed: %s", cerr)
Example #6
Source File: elastic2_doc_manager.py From elastic2-doc-manager with Apache License 2.0 | 6 votes |
def convert_aws_args(aws_args): """Convert old style options into arguments to boto3.session.Session.""" if not isinstance(aws_args, dict): raise errors.InvalidConfiguration( 'Elastic DocManager config option "aws" must be a dict' ) old_session_kwargs = dict( region="region_name", access_id="aws_access_key_id", secret_key="aws_secret_access_key", ) new_kwargs = {} for arg in aws_args: if arg in old_session_kwargs: new_kwargs[old_session_kwargs[arg]] = aws_args[arg] else: new_kwargs[arg] = aws_args[arg] return new_kwargs
Example #7
Source File: search.py From aws-sat-api-py with BSD 3-Clause "New" or "Revised" License | 6 votes |
def cbers(path, row, sensor='MUX'): """Get CBERS scenes. Valid values for sensor are: 'MUX', 'AWFI', 'PAN5M' and 'PAN10M'. """ path = utils.zeroPad(path, 3) row = utils.zeroPad(row, 3) prefix = f'CBERS4/{sensor}/{path}/{row}/' session = boto3_session(region_name=region) s3 = session.client('s3') results = aws.list_directory(cbers_bucket, prefix, s3=s3) scene_ids = [os.path.basename(key.strip('/')) for key in results] results = [] for scene_id in scene_ids: info = utils.cbers_parse_scene_id(scene_id) scene_key = info["key"] preview_id = '_'.join(scene_id.split('_')[0:-1]) info['thumbURL'] = f'https://s3.amazonaws.com/{cbers_bucket}/{scene_key}/{preview_id}_small.jpeg' info['browseURL'] = f'https://s3.amazonaws.com/{cbers_bucket}/{scene_key}/{preview_id}.jpg' results.append(info) return results
Example #8
Source File: search.py From aws-sat-api-py with BSD 3-Clause "New" or "Revised" License | 6 votes |
def landsat(path, row, full=False): """Get Landsat scenes.""" path = utils.zeroPad(path, 3) row = utils.zeroPad(row, 3) levels = ['L8', 'c1/L8'] prefixes = [f'{l}/{path}/{row}/' for l in levels] # WARNING: This is fast but not thread safe session = boto3_session(region_name=region) s3 = session.client('s3') _ls_worker = partial(aws.list_directory, landsat_bucket, s3=s3) with futures.ThreadPoolExecutor(max_workers=2) as executor: results = executor.map(_ls_worker, prefixes) results = itertools.chain.from_iterable(results) scene_ids = [os.path.basename(key.strip('/')) for key in results] _info_worker = partial(get_l8_info, full=full, s3=s3) with futures.ThreadPoolExecutor(max_workers=max_worker) as executor: results = executor.map(_info_worker, scene_ids) return results
Example #9
Source File: token_provider.py From cachet-url-monitor with MIT License | 6 votes |
def get_token(self) -> Optional[str]: session = Session() client = session.client(service_name="secretsmanager", region_name=self.region) try: get_secret_value_response = client.get_secret_value(SecretId=self.secret_name) except ClientError as e: if e.response["Error"]["Code"] == "ResourceNotFoundException": raise AwsSecretsManagerTokenRetrievalException(f"The requested secret {self.secret_name} was not found") elif e.response["Error"]["Code"] == "InvalidRequestException": raise AwsSecretsManagerTokenRetrievalException("The request was invalid") elif e.response["Error"]["Code"] == "InvalidParameterException": raise AwsSecretsManagerTokenRetrievalException("The request had invalid params") else: if "SecretString" in get_secret_value_response: secret = json.loads(get_secret_value_response["SecretString"]) try: return secret[self.secret_key] except KeyError: raise AwsSecretsManagerTokenRetrievalException(f"Invalid secret_key parameter: {self.secret_key}") else: raise AwsSecretsManagerTokenRetrievalException( "Invalid secret format. It should be a SecretString, instead of binary." )
Example #10
Source File: aws.py From aws-sat-api-py with BSD 3-Clause "New" or "Revised" License | 6 votes |
def list_directory(bucket, prefix, s3=None, request_pays=False): """AWS s3 list directory.""" if not s3: session = boto3_session(region_name=region) s3 = session.client('s3') pag = s3.get_paginator('list_objects_v2') params = { 'Bucket': bucket, 'Prefix': prefix, 'Delimiter': '/'} if request_pays: params['RequestPayer'] = 'requester' directories = [] for subset in pag.paginate(**params): if 'CommonPrefixes' in subset.keys(): directories.extend(subset.get('CommonPrefixes')) return [r['Prefix'] for r in directories]
Example #11
Source File: sqs_sensor.py From stackstorm-aws with Apache License 2.0 | 6 votes |
def _setup_multiaccount_session(self, account_id): ''' Assume role and setup session for the cross-account capability''' try: assumed_role = self.sessions[self.account_id].client('sts').assume_role( RoleArn=self.cross_roles[account_id], RoleSessionName='StackStormEvents' ) except ClientError: self._logger.error('Could not assume role on %s', account_id) return self.credentials[account_id] = (assumed_role["Credentials"]["AccessKeyId"], assumed_role["Credentials"]["SecretAccessKey"], assumed_role["Credentials"]["SessionToken"]) session = Session( aws_access_key_id=self.credentials[account_id][0], aws_secret_access_key=self.credentials[account_id][1], aws_session_token=self.credentials[account_id][2] ) self.sessions[account_id] = session self.sqs_res.pop(account_id, None)
Example #12
Source File: aws.py From formica with MIT License | 6 votes |
def initialize(region="", profile=""): from botocore import credentials import botocore.session import os cli_cache = os.path.join(os.path.expanduser("~"), ".aws/cli/cache") params = {} if profile: params["profile"] = profile session = botocore.session.Session(**params) session.get_component("credential_provider").get_provider("assume-role").cache = credentials.JSONFileCache( cli_cache ) from boto3.session import Session params = {} if region: params["region_name"] = region AWS.__session = Session(botocore_session=session, **params)
Example #13
Source File: cfn_validate_lambda.py From automating-governance-sample with Apache License 2.0 | 6 votes |
def setup_s3_client(job_data): """Creates an S3 client Uses the credentials passed in the event by CodePipeline. These credentials can be used to access the artifact bucket. Args: job_data: The job data structure Returns: An S3 client with the appropriate credentials """ key_id = job_data['artifactCredentials']['accessKeyId'] key_secret = job_data['artifactCredentials']['secretAccessKey'] session_token = job_data['artifactCredentials']['sessionToken'] session = Session( aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) return session.client('s3', config=botocore.client.Config(signature_version='s3v4'))
Example #14
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") #object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #15
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") # object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #16
Source File: MLOps-BYO-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event, writeData): objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(writeData, indent=4, sort_keys=True, default=str) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") #object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=SSEKMSKeyId) print('[INFO]event written to s3')
Example #17
Source File: MLOps-BYO-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #18
Source File: MLOps-BIA-DeployModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("Item:", item) return item
Example #19
Source File: MLOps-BYO-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #20
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): KMSKeyIdSSEIn = os.environ['SSEKMSKeyIdIn'] objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey + '/event.json') object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=KMSKeyIdSSEIn)
Example #21
Source File: MLOps-BYO-TrainModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): print(event) objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey) print('[SUCCESS]Job Information Written to S3')
Example #22
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): print("[DEBUG]EVENT IN:", event) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Previous Job Info Bucket:", bucketname) objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Previous Job Info Object:", objectKey) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #23
Source File: MLOps-BIA-EvaluateModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): print("[DEBUG]EVENT IN:", event) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Previous Job Info Bucket:", bucketname) objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Previous Job Info Object:", objectKey) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname, objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) print("[INFO]Previous CodePipeline Job Info Sucessfully Read:", item) return item
Example #24
Source File: MLOps-BIA-TrainModel.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def write_job_info_s3(event): print(event) objectKey = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['objectKey'] bucketname = event['CodePipeline.job']['data']['outputArtifacts'][0]['location']['s3Location']['bucketName'] artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] artifactName = event['CodePipeline.job']['data']['outputArtifacts'][0]['name'] # S3 Managed Key for Encryption S3SSEKey = os.environ['SSEKMSKeyIdIn'] json_data = json.dumps(event) print(json_data) session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource("s3") object = s3.Object(bucketname, objectKey) print(object) object.put(Body=json_data, ServerSideEncryption='aws:kms', SSEKMSKeyId=S3SSEKey) print('[SUCCESS]Job Information Written to S3')
Example #25
Source File: MLOps-BIA-GetStatus.py From mlops-amazon-sagemaker-devops-with-ml with Apache License 2.0 | 6 votes |
def read_job_info(event): tmp_file = tempfile.NamedTemporaryFile() objectKey = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['objectKey'] print("[INFO]Object Key:", objectKey) bucketname = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']['bucketName'] print("[INFO]Bucket Name:", bucketname) artifactCredentials = event['CodePipeline.job']['data']['artifactCredentials'] session = Session(aws_access_key_id=artifactCredentials['accessKeyId'], aws_secret_access_key=artifactCredentials['secretAccessKey'], aws_session_token=artifactCredentials['sessionToken']) s3 = session.resource('s3') obj = s3.Object(bucketname,objectKey) item = json.loads(obj.get()['Body'].read().decode('utf-8')) return item
Example #26
Source File: __init__.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 5 votes |
def client(*args, **kwargs): """ Create a low-level service client by name using the default session. See :py:meth:`boto3.session.Session.client`. """ return _get_default_session().client(*args, **kwargs)
Example #27
Source File: index.py From aws-lambda-codepipeline-site-generator-hugo with Apache License 2.0 | 5 votes |
def setup(event): # Extract attributes passed in by CodePipeline job_id = event['CodePipeline.job']['id'] job_data = event['CodePipeline.job']['data'] input_artifact = job_data['inputArtifacts'][0] config = job_data['actionConfiguration']['configuration'] credentials = job_data['artifactCredentials'] from_bucket = input_artifact['location']['s3Location']['bucketName'] from_key = input_artifact['location']['s3Location']['objectKey'] from_revision = input_artifact['revision'] output_artifact = job_data['outputArtifacts'][0] to_bucket = output_artifact['location']['s3Location']['bucketName'] to_key = output_artifact['location']['s3Location']['objectKey'] user_parameters = config['UserParameters'] # Temporary credentials to access CodePipeline artifacts in S3 key_id = credentials['accessKeyId'] key_secret = credentials['secretAccessKey'] session_token = credentials['sessionToken'] session = Session(aws_access_key_id=key_id, aws_secret_access_key=key_secret, aws_session_token=session_token) s3 = session.client('s3', config=botocore.client.Config(signature_version='s3v4')) return (job_id, s3, from_bucket, from_key, from_revision, to_bucket, to_key, user_parameters)
Example #28
Source File: __init__.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 5 votes |
def resource(*args, **kwargs): """ Create a resource service client by name using the default session. See :py:meth:`boto3.session.Session.resource`. """ return _get_default_session().resource(*args, **kwargs) # Set up logging to ``/dev/null`` like a library is supposed to. # http://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
Example #29
Source File: __init__.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 5 votes |
def setup_default_session(**kwargs): """ Set up a default session, passing through any parameters to the session constructor. There is no need to call this unless you wish to pass custom parameters, because a default session will be created for you. """ global DEFAULT_SESSION DEFAULT_SESSION = Session(**kwargs)
Example #30
Source File: aws.py From formica with MIT License | 5 votes |
def current_session(): if not AWS.__session: raise AttributeError("Session was not initialised") return AWS.__session