Python botocore.config.Config() Examples

The following are 30 code examples of botocore.config.Config(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module botocore.config , or try the search function .
Example #1
Source File: s3.py    From gnocchi with Apache License 2.0 6 votes vote down vote up
def get_connection(conf):
    if boto3 is None:
        raise RuntimeError("boto3 unavailable")
    conn = boto3.client(
        's3',
        endpoint_url=conf.s3_endpoint_url,
        region_name=conf.s3_region_name,
        aws_access_key_id=conf.s3_access_key_id,
        aws_secret_access_key=conf.s3_secret_access_key,
        config=boto_config.Config(
            max_pool_connections=conf.s3_max_pool_connections))
    return conn, conf.s3_region_name, conf.s3_bucket_prefix


# NOTE(jd) OperationAborted might be raised if we try to create the bucket
# for the first time at the same time 
Example #2
Source File: delete_empty_log_streams.py    From aws-cloudwatch-log-minder with Apache License 2.0 6 votes vote down vote up
def handle(request, context):
    global cw_logs

    cw_logs = boto3.client("logs", config=Config(retries=dict(max_attempts=10)))

    dry_run = request.get("dry_run", False)
    if "dry_run" in request and not isinstance(dry_run, bool):
        raise ValueError(f"'dry_run' is not a boolean value, {request}")

    purge_non_empty = request.get("purge_non_empty", False)
    if "purge_non_empty" in request and not isinstance(dry_run, bool):
        raise ValueError(f"'purge_non_empty' is not a boolean value, {request}")

    log_group_name_prefix = request.get("log_group_name_prefix")
    if log_group_name_prefix:
        delete_empty_log_streams(log_group_name_prefix, purge_non_empty, dry_run)
    else:
        fan_out(
            context.invoked_function_arn,
            get_all_log_group_names(),
            purge_non_empty,
            dry_run,
        ) 
Example #3
Source File: delete_empty_log_streams.py    From aws-cloudwatch-log-minder with Apache License 2.0 6 votes vote down vote up
def delete_empty_log_streams(
    log_group_name_prefix: str = None,
    purge_non_empty: bool = False,
    dry_run: bool = False,
    region: str = None,
    profile: str = None,
):
    global cw_logs

    boto_session = boto3.Session(region_name=region, profile_name=profile)
    cw_logs = boto_session.client("logs", config=Config(retries=dict(max_attempts=10)))

    kwargs = {"PaginationConfig": {"PageSize": 50}}
    if log_group_name_prefix:
        kwargs["logGroupNamePrefix"] = log_group_name_prefix

    log.info("finding log groups with prefix %r", log_group_name_prefix)
    for response in cw_logs.get_paginator("describe_log_groups").paginate(**kwargs):
        for group in response["logGroups"]:
            _delete_empty_log_streams(group, purge_non_empty, dry_run) 
Example #4
Source File: aws.py    From scale with Apache License 2.0 6 votes vote down vote up
def __init__(self, resource, config, credentials=None, region_name=None):
        """Constructor

        :param resource: AWS specific token for resource type. e.g., 's3', 'sqs', etc.
        :type resource: string
        :param config: Resource specific configuration
        :type config: :class:`botocore.client.Config`
        :param credentials: Authentication values needed to access AWS. If no credentials are passed, then IAM
            role-based access is assumed.
        :type credentials: :class:`util.aws.AWSCredentials`
        :param region_name: The AWS region the resource resides in.
        :type region_name: string
        """

        self.credentials = credentials
        self.region_name = region_name
        self._client = None
        self._resource_name = resource
        self._config = config 
Example #5
Source File: ls_public_bucket.py    From cube-in-a-box with MIT License 6 votes vote down vote up
def iterate_datasets(bucket_name, config, prefix, suffix, start_date, end_date, func, unsafe, sources_policy):
    manager = Manager()
    queue = manager.Queue()

    s3 = boto3.resource('s3', config=Config(signature_version=UNSIGNED))
    bucket = s3.Bucket(bucket_name)
    logging.info("Bucket : %s prefix: %s ", bucket_name, str(prefix))
    # safety = 'safe' if not unsafe else 'unsafe'
    worker_count = cpu_count() * 2

    processess = []
    for i in range(worker_count):
        proc = Process(target=worker, args=(config, bucket_name, prefix, suffix, start_date, end_date, func, unsafe, sources_policy, queue,))
        processess.append(proc)
        proc.start()

    for obj in bucket.objects.filter(Prefix=str(prefix)):
        if (obj.key.endswith(suffix)):
            queue.put(obj.key)

    for i in range(worker_count):
        queue.put(GUARDIAN)

    for proc in processess:
        proc.join() 
Example #6
Source File: autoIndex.py    From cube-in-a-box with MIT License 6 votes vote down vote up
def worker(parse_only, queue):
    s3 = boto3.resource("s3", config=Config(signature_version=UNSIGNED))
    dc = datacube.Datacube()
    idx = dc.index

    while True:
        try:
            url = queue.get(timeout=60)
            if url == STOP_SIGN:
                break
            logging.info("Processing {} {}".format(url, current_process()))
            index_dataset(idx, s3, url, parse_only)
            queue.task_done()
        except Empty:
            break
        except EOFError:
            break 
Example #7
Source File: base_aws.py    From airflow with Apache License 2.0 6 votes vote down vote up
def __init__(
        self,
        aws_conn_id: Optional[str] = "aws_default",
        verify: Union[bool, str, None] = None,
        region_name: Optional[str] = None,
        client_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        config: Optional[Config] = None
    ) -> None:
        super().__init__()
        self.aws_conn_id = aws_conn_id
        self.verify = verify
        self.client_type = client_type
        self.resource_type = resource_type
        self.region_name = region_name
        self.config = config

        if not (self.client_type or self.resource_type):
            raise AirflowException(
                'Either client_type or resource_type'
                ' must be provided.')

    # pylint: disable=too-many-statements 
Example #8
Source File: conftest.py    From sagemaker-python-sdk with Apache License 2.0 6 votes vote down vote up
def sagemaker_session(sagemaker_client_config, sagemaker_runtime_config, boto_session):
    sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
    sagemaker_client = (
        boto_session.client("sagemaker", **sagemaker_client_config)
        if sagemaker_client_config
        else None
    )
    runtime_client = (
        boto_session.client("sagemaker-runtime", **sagemaker_runtime_config)
        if sagemaker_runtime_config
        else None
    )

    return Session(
        boto_session=boto_session,
        sagemaker_client=sagemaker_client,
        sagemaker_runtime_client=runtime_client,
    ) 
Example #9
Source File: test_session.py    From sagemaker-python-sdk with Apache License 2.0 6 votes vote down vote up
def test_sagemaker_session_does_not_create_bucket_on_init(
    sagemaker_client_config, sagemaker_runtime_config, boto_session
):
    sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
    sagemaker_client = (
        boto_session.client("sagemaker", **sagemaker_client_config)
        if sagemaker_client_config
        else None
    )
    runtime_client = (
        boto_session.client("sagemaker-runtime", **sagemaker_runtime_config)
        if sagemaker_runtime_config
        else None
    )

    Session(
        boto_session=boto_session,
        sagemaker_client=sagemaker_client,
        sagemaker_runtime_client=runtime_client,
        default_bucket=CUSTOM_BUCKET_NAME,
    )

    s3 = boto3.resource("s3", region_name=boto_session.region_name)
    assert s3.Bucket(CUSTOM_BUCKET_NAME).creation_date is None 
Example #10
Source File: test_processing.py    From sagemaker-python-sdk with Apache License 2.0 6 votes vote down vote up
def sagemaker_session_with_custom_bucket(
    boto_session, sagemaker_client_config, sagemaker_runtime_config, custom_bucket_name
):
    sagemaker_client_config.setdefault("config", Config(retries=dict(max_attempts=10)))
    sagemaker_client = (
        boto_session.client("sagemaker", **sagemaker_client_config)
        if sagemaker_client_config
        else None
    )
    runtime_client = (
        boto_session.client("sagemaker-runtime", **sagemaker_runtime_config)
        if sagemaker_runtime_config
        else None
    )

    return Session(
        boto_session=boto_session,
        sagemaker_client=sagemaker_client,
        sagemaker_runtime_client=runtime_client,
        default_bucket=custom_bucket_name,
    ) 
Example #11
Source File: connectable.py    From awslimitchecker with GNU Affero General Public License v3.0 6 votes vote down vote up
def _max_retries_config(self):
        """
        If a ``BOTO_MAX_RETRIES_<self.api_name>`` environment variable is set,
        return a new ``botocore.config.Config`` instance using that number
        as the retries max_attempts value.

        :rtype: ``botocore.config.Config`` or None
        """
        key = 'BOTO_MAX_RETRIES_%s' % self.api_name
        if key not in os.environ:
            return None
        try:
            max_retries = int(os.environ[key])
        except Exception:
            logger.error(
                'ERROR: Found "%s" environment variable, but unable to '
                'parse value "%s" to an integer.', key, os.environ[key]
            )
            return None
        logger.debug(
            'Setting explicit botocore retry config with max_attempts=%d '
            'for "%s" API based on %s environment variable.',
            max_retries, self.api_name, key
        )
        return Config(retries={'max_attempts': max_retries}) 
Example #12
Source File: test_integration.py    From pipeline with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def fetch_autoclaved_bucket(dst_dir, bucket_date):
    print("Fetch bucket")
    dst_bucket_dir = os.path.join(dst_dir, bucket_date)
    if not os.path.exists(dst_bucket_dir):
        os.makedirs(dst_bucket_dir)
    client = boto3.client("s3", config=Config(signature_version=UNSIGNED))
    resource = boto3.resource("s3", config=Config(signature_version=UNSIGNED))

    prefix = "autoclaved/jsonl.tar.lz4/{}/".format(bucket_date)
    paginator = client.get_paginator("list_objects")
    for result in paginator.paginate(Bucket="ooni-data", Delimiter="/", Prefix=prefix):
        for f in result.get("Contents", []):
            fkey = f.get("Key")
            dst_pathname = os.path.join(dst_bucket_dir, os.path.basename(fkey))
            try:
                s = os.stat(dst_pathname)
                if s.st_size == f.get("Size"):
                    continue
            except Exception:  # XXX maybe make this more strict. It's FileNotFoundError on py3 and OSError on py2
                pass
            print("[+] Downloading {}".format(dst_pathname))
            resource.meta.client.download_file("ooni-data", fkey, dst_pathname) 
Example #13
Source File: storage.py    From indico-plugins with MIT License 6 votes vote down vote up
def __init__(self, data):
        self.parsed_data = data = self._parse_data(data)
        self.endpoint_url = data.get('host')
        if self.endpoint_url and '://' not in self.endpoint_url:
            self.endpoint_url = 'https://' + self.endpoint_url
        self.session_kwargs = {}
        self.client_kwargs = {}
        if 'profile' in data:
            self.session_kwargs['profile_name'] = data['profile']
        if 'access_key' in data:
            self.session_kwargs['aws_access_key_id'] = data['access_key']
        if 'secret_key' in data:
            self.session_kwargs['aws_secret_access_key'] = data['secret_key']
        if 'addressing_style' in data:
            self.client_kwargs['config'] = Config(s3={'addressing_style': data['addressing_style']})
        self.bucket_policy_file = data.get('bucket_policy_file')
        self.bucket_versioning = data.get('bucket_versioning') in ('1', 'true', 'yes', 'on')
        if data.get('proxy') in ('1', 'true', 'yes', 'on'):
            self.proxy_downloads = ProxyDownloadsMode.local
        elif data.get('proxy') in ('xaccelredirect', 'nginx'):
            self.proxy_downloads = ProxyDownloadsMode.nginx
        else:
            self.proxy_downloads = ProxyDownloadsMode.disabled
        self.meta = data.get('meta') 
Example #14
Source File: processpool.py    From bash-lambda-layer with MIT License 6 votes vote down vote up
def __init__(self, client_kwargs=None):
        """Creates S3 clients for processes

        Botocore sessions and clients are not pickleable so they cannot be
        inherited across Process boundaries. Instead, they must be instantiated
        once a process is running.
        """
        self._client_kwargs = client_kwargs
        if self._client_kwargs is None:
            self._client_kwargs = {}

        client_config = deepcopy(self._client_kwargs.get('config', Config()))
        if not client_config.user_agent_extra:
            client_config.user_agent_extra = PROCESS_USER_AGENT
        else:
            client_config.user_agent_extra += " " + PROCESS_USER_AGENT
        self._client_kwargs['config'] = client_config 
Example #15
Source File: test-utils-aws.py    From opinel with GNU General Public License v2.0 6 votes vote down vote up
def test_connect_service(self):
        client = connect_service('iam', self.creds)
        client = connect_service('iam', self.creds, config={})
        client = connect_service('iam', self.creds, silent=True)
        client = connect_service('ec2', self.creds, region_name = 'us-east-1')
        try:
            client = connect_service('opinelunittest', creds)
            assert(False)
        except:
            pass
        config = Config(region_name = 'us-east-1')
        client = connect_service('ec2', self.creds, config = config)
        try:
            client = connect_service('ec2', self.creds, region_name = config)
            assert(False)
        except:
            pass 
Example #16
Source File: default.py    From runway with Apache License 2.0 5 votes vote down vote up
def get_cloudformation_client(session):
    """Get CloudFormaiton boto3 client."""
    config = Config(
        retries=dict(
            max_attempts=MAX_ATTEMPTS
        )
    )
    return session.client('cloudformation', config=config) 
Example #17
Source File: credentials.py    From runway with Apache License 2.0 5 votes vote down vote up
def _get_credentials(self):
        """Get credentials by calling SSO get role credentials."""
        config = Config(
            signature_version=UNSIGNED,
            region_name=self._sso_region,
        )
        client = self._client_creator('sso', config=config)

        kwargs = {
            'roleName': self._role_name,
            'accountId': self._account_id,
            'accessToken': self._token_loader(self._start_url),
        }
        try:
            response = client.get_role_credentials(**kwargs)
        except client.exceptions.UnauthorizedException:
            raise UnauthorizedSSOTokenError()
        credentials = response['roleCredentials']

        credentials = {
            'ProviderType': 'sso',
            'Credentials': {
                'AccessKeyId': credentials['accessKeyId'],
                'SecretAccessKey': credentials['secretAccessKey'],
                'SessionToken': credentials['sessionToken'],
                'Expiration': self._parse_timestamp(credentials['expiration']),
            }
        }
        return credentials 
Example #18
Source File: __init__.py    From bubuku with MIT License 5 votes vote down vote up
def cloudwatch_client(self):
        if not self._cloudwatch_client:
            self._cloudwatch_client = self.session.client(
                'cloudwatch',
                region_name=self.region,
                config=Config(retries={'max_attempts': self.retries}))
        return self._cloudwatch_client 
Example #19
Source File: file_utils.py    From CCF-BDCI-Sentiment-Analysis-Baseline with Apache License 2.0 5 votes vote down vote up
def s3_get(url, temp_file, proxies=None):
    """Pull a file directly from S3."""
    s3_resource = boto3.resource("s3", config=Config(proxies=proxies))
    bucket_name, s3_path = split_s3_path(url)
    s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file) 
Example #20
Source File: default.py    From stacker with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def get_cloudformation_client(session):
    config = Config(
        retries=dict(
            max_attempts=MAX_ATTEMPTS
        )
    )
    return session.client('cloudformation', config=config) 
Example #21
Source File: __init__.py    From bubuku with MIT License 5 votes vote down vote up
def ec2_resource(self):
        if not self._ec2_resource:
            self._ec2_resource = self.session.resource(
                'ec2',
                region_name=self.region,
                config=Config(retries={'max_attempts': self.retries}))
        return self._ec2_resource 
Example #22
Source File: __init__.py    From bubuku with MIT License 5 votes vote down vote up
def ec2_client(self):
        if not self._ec2_client:
            self._ec2_client = self.session.client(
                'ec2',
                region_name=self.region,
                config=Config(retries={'max_attempts': self.retries}))
        return self._ec2_client 
Example #23
Source File: app.py    From aws-media-insights-engine with Apache License 2.0 5 votes vote down vote up
def upload():
    """
    Generate a pre-signed URL that can be used to upload media files to S3 from a web application

    Returns:
        Pre-signed S3 URL for uploading files to S3 from a web application
    Raises:
        ChaliceViewError - 500
    """
    print('/upload request: '+app.current_request.raw_body.decode())
    region = os.environ['AWS_REGION']
    s3 = boto3.client('s3', region_name=region, config = Config(signature_version = 's3v4', s3={'addressing_style': 'virtual'}))
    # limit uploads to 5GB
    max_upload_size = 5368709120
    try:
        response = s3.generate_presigned_post(
            Bucket=(json.loads(app.current_request.raw_body.decode())['S3Bucket']),
            Key=(json.loads(app.current_request.raw_body.decode())['S3Key']),
            Conditions=[["content-length-range", 0, max_upload_size ]],
            ExpiresIn=3600
        )
    except ClientError as e:
        logging.info(e)
        raise ChaliceViewError(
            "Unable to generate pre-signed S3 URL for uploading media: {error}".format(error=e))
    except Exception as e:
        logging.info(e)
        raise ChaliceViewError(
            "Unable to generate pre-signed S3 URL for uploading media: {error}".format(error=e))
    else:
        print("presigned url generated: ", response)
        return response

# TODO: Change the name of this method - "download" is too vague 
Example #24
Source File: file_utils.py    From exbert with Apache License 2.0 5 votes vote down vote up
def s3_get(url, temp_file, proxies=None):
    """Pull a file directly from S3."""
    s3_resource = boto3.resource("s3", config=Config(proxies=proxies))
    bucket_name, s3_path = split_s3_path(url)
    s3_resource.Bucket(bucket_name).download_fileobj(s3_path, temp_file) 
Example #25
Source File: file_utils.py    From exbert with Apache License 2.0 5 votes vote down vote up
def s3_etag(url, proxies=None):
    """Check ETag on S3 object."""
    s3_resource = boto3.resource("s3", config=Config(proxies=proxies))
    bucket_name, s3_path = split_s3_path(url)
    s3_object = s3_resource.Object(bucket_name, s3_path)
    return s3_object.e_tag 
Example #26
Source File: invoke.py    From cloud-custodian with Apache License 2.0 5 votes vote down vote up
def process(self, resources, event=None):
        params = dict(FunctionName=self.data['function'])
        if self.data.get('qualifier'):
            params['Qualifier'] = self.data['Qualifier']

        if self.data.get('async', True):
            params['InvocationType'] = 'Event'

        config = Config(read_timeout=self.data.get(
            'timeout', 90), region_name=self.data.get('region', None))
        client = utils.local_session(
            self.manager.session_factory).client('lambda', config=config)
        alias = utils.get_account_alias_from_sts(
            utils.local_session(self.manager.session_factory))

        payload = {
            'version': VERSION,
            'event': event,
            'account_id': self.manager.config.account_id,
            'account': alias,
            'region': self.manager.config.region,
            'action': self.data,
            'policy': self.manager.data}

        results = []
        for resource_set in utils.chunks(resources, self.data.get('batch_size', 250)):
            payload['resources'] = resource_set
            params['Payload'] = utils.dumps(payload)
            result = client.invoke(**params)
            result['Payload'] = result['Payload'].read()
            if isinstance(result['Payload'], bytes):
                result['Payload'] = result['Payload'].decode('utf-8')
            results.append(result)
        return results 
Example #27
Source File: sqs_queue_client.py    From django-eb-sqs with MIT License 5 votes vote down vote up
def __init__(self):
        # type: () -> None
        self.sqs = boto3.resource('sqs',
                                  region_name=settings.AWS_REGION,
                                  config=Config(retries={'max_attempts': settings.AWS_MAX_RETRIES})
                                  )
        self.queue_cache = {} 
Example #28
Source File: _client_factory.py    From taskcat with Apache License 2.0 5 votes vote down vote up
def client(
        self, service: str, profile: str = "default", region: str = None
    ) -> boto3.client:
        region = self._get_region(region, profile)
        session = self.session(profile, region)
        kwargs = {"config": BotoConfig(retries={"max_attempts": 20})}
        if service in REGIONAL_ENDPOINT_SERVICES:
            kwargs.update({"endpoint_url": self._get_endpoint_url(service, region)})
        return self._cache_lookup(
            self._client_cache,
            [profile, region, service],
            session.client,
            [service],
            kwargs,
        ) 
Example #29
Source File: utils_aws.py    From datacollector-tests with Apache License 2.0 5 votes vote down vote up
def create_anonymous_client():
    """Creates an anonymous s3 client.  This is useful if you need to read an object created by an anonymous user, which
    the normal client won't have access to.
    """
    return boto3.client('s3', config=Config(signature_version=UNSIGNED)) 
Example #30
Source File: support_cases_aggregator.py    From aws-support-tickets-aggregator with MIT License 5 votes vote down vote up
def list_account_ids():
    """
    Default requires permission to invoke organizations:ListAccounts API.

    DEFAULTS TO CALLING organizations:ListAccounts WITH CURRENT ROLE
    If CloudFormation stack is deployed in non-master AWS Organizations
    account, must assume role in that master AWS Organizations account.
    See README for details.
    """

    accounts = []
    assumed_role_arn = os.environ.get("ORG_MASTER_ACCOUNT_VIEWER_ROLE")
    if assumed_role_arn:
        session = get_session_with_arn(
            role_arn=assumed_role_arn, session_name="listAccountIds", base_session=None
        )
    else:
        session = boto3.session.Session()  # get local session
    try:
        client = session.client(
            "organizations", config=Config(retries={"max_attempts": 8})
        )
        paginator = client.get_paginator("list_accounts")
        response_iterator = paginator.paginate()
        for page in response_iterator:
            accounts.extend(page.get("Accounts", []))
    except (BotoCoreError, ClientError) as e:
        if e.response["Error"]["Code"] == "AccessDeniedException":
            logging.error(e)
            logging.error(
                "Could not call organizations:ListAccounts. "
                "Current account is likely not "
                "the AWS Organizations master account. "
                "See README for more details on setup. "
                "Returning empty list by default."
            )
        return []

    return [str(account_info.get("Id", "")) for account_info in accounts]