Python botocore.exceptions.ClientError() Examples
The following are 30
code examples of botocore.exceptions.ClientError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
botocore.exceptions
, or try the search function
.
Example #1
Source File: deploy.py From aegea with Apache License 2.0 | 7 votes |
def grant(args): """ Given an IAM role or instance name, attach an IAM policy granting appropriate permissions to subscribe to deployments. Given a GitHub repo URL, create and record deployment keys for the repo and any of its private submodules, making the keys accessible to the IAM role. """ try: role = resources.iam.Role(args.iam_role_or_instance) role.load() except ClientError: role = get_iam_role_for_instance(args.iam_role_or_instance) role.attach_policy(PolicyArn=ensure_deploy_iam_policy().arn) for private_repo in [args.repo] + list(private_submodules(args.repo)): gh_owner_name, gh_repo_name = parse_repo_name(private_repo) secret = secrets.put(argparse.Namespace(secret_name="deploy.{}.{}".format(gh_owner_name, gh_repo_name), iam_role=role.name, instance_profile=None, iam_group=None, iam_user=None, generate_ssh_key=True)) get_repo(private_repo).create_key(__name__ + "." + role.name, secret["ssh_public_key"]) logger.info("Created deploy key %s for IAM role %s to access GitHub repo %s", secret["ssh_key_fingerprint"], role.name, private_repo)
Example #2
Source File: __init__.py From aegea with Apache License 2.0 | 7 votes |
def ensure_security_group(name, vpc, tcp_ingress=frozenset()): try: security_group = resolve_security_group(name, vpc) except (ClientError, KeyError): logger.info("Creating security group %s for %s", name, vpc) security_group = vpc.create_security_group(GroupName=name, Description=name) for i in range(90): try: clients.ec2.describe_security_groups(GroupIds=[security_group.id]) except ClientError: time.sleep(1) for rule in tcp_ingress: source_security_group_id = None if "source_security_group_name" in rule: source_security_group_id = resolve_security_group(rule["source_security_group_name"], vpc).id ensure_ingress_rule(security_group, IpProtocol="tcp", FromPort=rule["port"], ToPort=rule["port"], CidrIp=rule.get("cidr"), SourceSecurityGroupId=source_security_group_id) return security_group
Example #3
Source File: remove_vpc.py From vpc-delete with MIT License | 6 votes |
def delete_rtbs(ec2, args): """ Delete the route tables """ try: rtbs = ec2.describe_route_tables(**args)['RouteTables'] except ClientError as e: print(e.response['Error']['Message']) if rtbs: for rtb in rtbs: main = 'false' for assoc in rtb['Associations']: main = assoc['Main'] if main == True: continue rtb_id = rtb['RouteTableId'] try: result = ec2.delete_route_table(RouteTableId=rtb_id) except ClientError as e: print(e.response['Error']['Message']) return
Example #4
Source File: crypto.py From aegea with Apache License 2.0 | 6 votes |
def ensure_ssh_key(name=None, base_name=__name__, verify_pem_file=True): if name is None: from getpass import getuser from socket import gethostname name = base_name + "." + getuser() + "." + gethostname().split(".")[0] try: ec2_key_pairs = list(resources.ec2.key_pairs.filter(KeyNames=[name])) if verify_pem_file and not os.path.exists(get_ssh_key_path(name)): msg = "Key {} found in EC2, but not in ~/.ssh." msg += " Delete the key in EC2, copy it to {}, or specify another key." raise KeyError(msg.format(name, get_ssh_key_path(name))) except ClientError as e: expect_error_codes(e, "InvalidKeyPair.NotFound") ec2_key_pairs = None if not ec2_key_pairs: ssh_key = ensure_local_ssh_key(name) resources.ec2.import_key_pair(KeyName=name, PublicKeyMaterial=get_public_key_from_pair(ssh_key)) logger.info("Imported SSH key %s", get_ssh_key_path(name)) add_ssh_key_to_agent(name) return name
Example #5
Source File: file_utils.py From BERT-Relation-Extraction with Apache License 2.0 | 6 votes |
def s3_request(func): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise EnvironmentError("file {} not found".format(url)) else: raise return wrapper
Example #6
Source File: file_utils.py From cmrc2019 with Creative Commons Attribution Share Alike 4.0 International | 6 votes |
def s3_request(func: Callable): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url: str, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise FileNotFoundError("file {} not found".format(url)) else: raise return wrapper
Example #7
Source File: PortChange_Slingr.py From ChaoSlingr with Apache License 2.0 | 6 votes |
def lambda_handler(event, context): print(event) if 'SecurityGroupId' in event and 'IpRanges' in event and 'IpProtocol' in event and 'FromPort' in event: try: data = ec2.authorize_security_group_ingress( GroupId= event['SecurityGroupId'], IpPermissions=[ {'IpProtocol': event['IpProtocol'], 'FromPort': event['FromPort'], 'ToPort': event['ToPort'], 'IpRanges': event['IpRanges'] } ]) print('Ingress Successfully Set %s' % data) except ClientError as e: print(e) else: print("One or more parameters are missing. Nothing to do.")
Example #8
Source File: __init__.py From aegea with Apache License 2.0 | 6 votes |
def ensure_s3_bucket(name=None, policy=None, lifecycle=None): if name is None: name = "aegea-assets-{}".format(ARN.get_account_id()) bucket = resources.s3.Bucket(name) try: clients.s3.head_bucket(Bucket=bucket.name) except ClientError as e: logger.debug(e) if ARN.get_region() == "us-east-1": bucket.create() else: bucket.create(CreateBucketConfiguration=dict(LocationConstraint=ARN.get_region())) bucket.wait_until_exists() if policy: bucket.Policy().put(Policy=str(policy)) if lifecycle: bucket.LifecycleConfiguration().put(LifecycleConfiguration=dict(lifecycle)) return bucket
Example #9
Source File: throttle_decorator.py From bluecanary with MIT License | 6 votes |
def throttle(max_retries=MAX_RETRIES, interval=DECISECOND): def throttle_decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): retries = 0 while retries < max_retries: try: sleep((2 ^ retries) * interval) return func(*args, **kwargs) except ClientError as e: if 'Throttling' in e.args[0]: retries += 1 else: raise e return wrapper return throttle_decorator
Example #10
Source File: billing.py From aegea with Apache License 2.0 | 6 votes |
def ls(args): bucket = resources.s3.Bucket(args.billing_reports_bucket.format(account_id=ARN.get_account_id())) now = datetime.utcnow() year = args.year or now.year month = str(args.month or now.month).zfill(2) next_year = year + ((args.month or now.month) + 1) // 12 next_month = str(((args.month or now.month) + 1) % 12).zfill(2) manifest_name = "aegea/{report}/{yr}{mo}01-{next_yr}{next_mo}01/{report}-Manifest.json" manifest_name = manifest_name.format(report=__name__, yr=year, mo=month, next_yr=next_year, next_mo=next_month) try: manifest = json.loads(bucket.Object(manifest_name).get().get("Body").read()) for report_key in manifest["reportKeys"]: report = BytesIO(bucket.Object(report_key).get().get("Body").read()) with gzip.GzipFile(fileobj=report) as fh: reader = csv.DictReader(fh) for line in reader: page_output(tabulate(filter_line_items(reader, args), args)) except ClientError as e: msg = 'Unable to get report {} from {}: {}. Run "aegea billing configure" to enable reports.' raise AegeaException(msg.format(manifest_name, bucket, e))
Example #11
Source File: ecs.py From aegea with Apache License 2.0 | 6 votes |
def watch(args): logger.info("Watching task %s (%s)", args.task_id, args.cluster) last_status, events_received = None, 0 log_reader = CloudwatchLogReader("/".join([args.task_name, args.task_name, os.path.basename(args.task_id)]), log_group_name=args.task_name) while last_status != "STOPPED": res = clients.ecs.describe_tasks(cluster=args.cluster, tasks=[args.task_id]) if len(res["tasks"]) == 1: task_desc = res["tasks"][0] if task_desc["lastStatus"] != last_status: logger.info("Task %s %s", args.task_id, format_task_status(task_desc["lastStatus"])) last_status = task_desc["lastStatus"] try: for event in log_reader: print_event(event) events_received += 1 except ClientError as e: expect_error_codes(e, "ResourceNotFoundException") if last_status is None and events_received > 0: break # Logs retrieved successfully but task record is no longer in ECS time.sleep(1)
Example #12
Source File: file_utils.py From neuralcoref with MIT License | 6 votes |
def s3_request(func): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise EnvironmentError(f"file {url} not found") else: raise return wrapper
Example #13
Source File: flow_logs.py From aegea with Apache License 2.0 | 6 votes |
def create(args): if args.resource and args.resource.startswith("vpc-"): resource_type = "VPC" elif args.resource and args.resource.startswith("subnet-"): resource_type = "Subnet" elif args.resource and args.resource.startswith("eni-"): resource_type = "NetworkInterface" elif args.resource: raise AegeaException('Unrecognized resource type: "{}"'.format(args.resource)) else: args.resource = ensure_vpc().id resource_type = "VPC" flow_logs_iam_role = ensure_iam_role(__name__, policies=["service-role/AmazonAPIGatewayPushToCloudWatchLogs"], trust=["vpc-flow-logs"]) try: return clients.ec2.create_flow_logs(ResourceIds=[args.resource], ResourceType=resource_type, TrafficType=args.traffic_type, LogGroupName=__name__, DeliverLogsPermissionArn=flow_logs_iam_role.arn) except ClientError as e: expect_error_codes(e, "FlowLogAlreadyExists") return dict(FlowLogAlreadyExists=True)
Example #14
Source File: test_throttle.py From bluecanary with MIT License | 6 votes |
def test_throttle(self): @throttle(max_retries=2, interval=0.001) def function(): error_response = { 'Error': { 'Code': 'TEST', 'Message': 'Throttling', } } raise ClientError(error_response=error_response, operation_name='test') try: function() except ClientError as e: if 'Throttling' in e.args[0]: self.fail('A throttled function should not raise a ClientError ' 'for throttling.') else: raise e
Example #15
Source File: environment.py From sqs-s3-logger with Apache License 2.0 | 6 votes |
def get_create_queue(self): if not self._queue: try: q = self._sqs.get_queue_by_name(QueueName=self._queue_name) except ClientError as e: if e.response['Error']['Code'] == 'AWS.SimpleQueueService.NonExistentQueue': q = None else: raise e if not q: LOGGER.info('Creating queue {}'.format(self._queue_name)) q = self._create_queue_with_pushback( self._queue_name, {'MessageRetentionPeriod': str(self.TWO_WEEKS)} ) self._queue = q return self._queue
Example #16
Source File: ingestor.py From awspx with GNU General Public License v3.0 | 6 votes |
def get_object_acls(self): sr = self.session.resource(self.__class__.__name__.lower()) for obj in self.get("AWS::S3::Object").get("Resource"): try: arn = obj.get("Arn") bucket, *key = arn.split(':::')[1].split('/') key = "/".join(key) obj.set("ACL", sr.ObjectAcl(bucket, key).grants) self._print(f"[+] Updated object acl for {obj}") except ClientError as e: if "AccessDenied" in str(e): self._print( f"[!] Access denied when getting ACL for {obj}") else: self._print("[!]", e)
Example #17
Source File: allennlp_file_utils.py From ConvLab with MIT License | 6 votes |
def s3_request(func: Callable): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url: str, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise FileNotFoundError("file {} not found".format(url)) else: raise return wrapper
Example #18
Source File: test_s3.py From S4 with GNU General Public License v3.0 | 6 votes |
def test_delete(self, s3_client): # given s3_client.boto.put_object( Bucket=s3_client.bucket, Key=os.path.join(s3_client.prefix, "war.png"), Body="bang", ) # when assert s3_client.delete("war.png") is True # then assert s3_client.get("war.png") is None with pytest.raises(ClientError) as exc: s3_client.boto.head_object( Bucket=s3_client.bucket, Key=os.path.join(s3_client.prefix, "war.png") ) assert exc.value.response["Error"]["Code"] == "404"
Example #19
Source File: file_utils.py From crosentgec with GNU General Public License v3.0 | 6 votes |
def s3_request(func: Callable): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url: str, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise FileNotFoundError("file {} not found".format(url)) else: raise return wrapper
Example #20
Source File: file_utils.py From FARM with Apache License 2.0 | 6 votes |
def s3_request(func): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise EnvironmentError("file {} not found".format(url)) else: raise return wrapper
Example #21
Source File: test_s3.py From ec2-api with Apache License 2.0 | 6 votes |
def test_create_bucket_and_key_and_delete_key(self): # Test key operations on buckets. bucket_name = 'testbucket' key_name = 'somekey' key_contents = b'somekey' self.conn.create_bucket(Bucket=bucket_name) self.conn.put_object(Bucket=bucket_name, Key=key_name, Body=key_contents) # make sure the contents are correct key = self.conn.get_object(Bucket=bucket_name, Key=key_name) self.assertEqual(key['Body'].read(), key_contents, "Bad contents") # delete the key self.conn.delete_object(Bucket=bucket_name, Key=key_name) self.assertRaises(botocore_exception.ClientError, self.conn.get_object, Bucket=bucket_name, Key=key_name)
Example #22
Source File: remove_vpc.py From vpc-delete with MIT License | 6 votes |
def delete_subs(ec2, args): """ Delete the subnets """ try: subs = ec2.describe_subnets(**args)['Subnets'] except ClientError as e: print(e.response['Error']['Message']) if subs: for sub in subs: sub_id = sub['SubnetId'] try: result = ec2.delete_subnet(SubnetId=sub_id) except ClientError as e: print(e.response['Error']['Message']) return
Example #23
Source File: remove_vpc.py From vpc-delete with MIT License | 6 votes |
def get_regions(ec2): """ Return all AWS regions """ regions = [] try: aws_regions = ec2.describe_regions()['Regions'] except ClientError as e: print(e.response['Error']['Message']) else: for region in aws_regions: regions.append(region['RegionName']) return regions
Example #24
Source File: remove_vpc.py From vpc-delete with MIT License | 6 votes |
def delete_sgps(ec2, args): """ Delete any security groups """ try: sgps = ec2.describe_security_groups(**args)['SecurityGroups'] except ClientError as e: print(e.response['Error']['Message']) if sgps: for sgp in sgps: default = sgp['GroupName'] if default == 'default': continue sg_id = sgp['GroupId'] try: result = ec2.delete_security_group(GroupId=sg_id) except ClientError as e: print(e.response['Error']['Message']) return
Example #25
Source File: file_utils.py From mrc-for-flat-nested-ner with Apache License 2.0 | 6 votes |
def s3_request(func: Callable): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url: str, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise FileNotFoundError("file {} not found".format(url)) else: raise return wrapper
Example #26
Source File: remove_vpc.py From vpc-delete with MIT License | 6 votes |
def delete_acls(ec2, args): """ Delete the network access lists (NACLs) """ try: acls = ec2.describe_network_acls(**args)['NetworkAcls'] except ClientError as e: print(e.response['Error']['Message']) if acls: for acl in acls: default = acl['IsDefault'] if default == True: continue acl_id = acl['NetworkAclId'] try: result = ec2.delete_network_acl(NetworkAclId=acl_id) except ClientError as e: print(e.response['Error']['Message']) return
Example #27
Source File: file_utils.py From BERT-Relation-Extraction with Apache License 2.0 | 6 votes |
def s3_request(func): """ Wrapper function for s3 requests in order to create more helpful error messages. """ @wraps(func) def wrapper(url, *args, **kwargs): try: return func(url, *args, **kwargs) except ClientError as exc: if int(exc.response["Error"]["Code"]) == 404: raise EnvironmentError("file {} not found".format(url)) else: raise return wrapper
Example #28
Source File: cmd.py From aws-greengrass-group-setup with Apache License 2.0 | 5 votes |
def _create_attach_thing_policy(cert_arn, thing_policy, iot_client, policy_name): if thing_policy: try: iot_client.create_policy( policyName=policy_name, policyDocument=thing_policy ) except ClientError as ce: if ce.response['Error']['Code'] == 'EntityAlreadyExists': logging.warning( "Policy '{0}' exists. Using existing Policy".format( policy_name)) else: logging.error("Unexpected Error: {0}".format(ce)) except BaseException as e: logging.error("Error type: {0} message: {1}".format( e, str(type(e)))) # even if there's an exception creating the policy, try to attach iot_client.attach_principal_policy( policyName=policy_name, principal=cert_arn ) logging.info("Created {0} and attached to {1}".format( policy_name, cert_arn)) else: logging.warning("No thing policy to create and attach.")
Example #29
Source File: cmd.py From aws-greengrass-group-setup with Apache License 2.0 | 5 votes |
def _delete_thing(cert_arn, cert_id, thing_name, region, policy_name, profile_name): iot_client = _get_iot_session(region=region, profile_name=profile_name) try: # update certificate to an INACTIVE status. logging.info('[_delete_thing] deactivating cert:{0}'.format( cert_id)) iot_client.update_certificate( certificateId=cert_id, newStatus='INACTIVE' ) # Next, detach the Thing principal/certificate from the Thing. logging.info('[_delete_thing] detach cert') iot_client.detach_thing_principal( thingName=thing_name, principal=cert_arn ) logging.info('[_delete_thing] detach principal policy:{0}'.format( policy_name) ) iot_client.detach_principal_policy( policyName=policy_name, principal=cert_arn ) # finally delete the Certificate iot_client.delete_certificate(certificateId=cert_id) except ClientError as ce: logging.error(ce) # delete the Thing logging.info('Deleting thing_name:{0}'.format(thing_name)) try: thing = iot_client.describe_thing(thingName=thing_name) iot_client.delete_thing( thingName=thing_name, expectedVersion=thing['version'] ) except ClientError as ce: logging.error(ce)
Example #30
Source File: s3Client.py From hsds with Apache License 2.0 | 5 votes |
def list_keys(self, prefix='', deliminator='', suffix='', include_stats=False, callback=None, bucket=None, limit=None): """ return keys matching the arguments """ if not bucket: log.error("list_keys - bucket not set") raise KeyError() log.info(f"list_keys('{prefix}','{deliminator}','{suffix}', include_stats={include_stats}") paginator = self._client.get_paginator('list_objects') if include_stats: # use a dictionary to hold return values key_names = {} else: # just use a list key_names = [] count = 0 try: for page in paginator.paginate( PaginationConfig={'PageSize': 1000}, Bucket=bucket, Prefix=prefix, Delimiter=deliminator): self._getPageItems(page, key_names, include_stats=include_stats) count += len(key_names) if callback: callback(self._app, key_names) if limit and count >= limit: log.info(f"list_keys - reached limit {limit}") break except ClientError as ce: log.warn(f"bucket: {bucket} does not exist, exception: {ce}") raise except Exception as e: log.error(f"s3 paginate got exception {type(e)}: {e}") raise log.info(f"getS3Keys done, got {len(key_names)} keys") return key_names