Python django.conf.settings.AWS_SECRET_ACCESS_KEY Examples
The following are 16
code examples of django.conf.settings.AWS_SECRET_ACCESS_KEY().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
django.conf.settings
, or try the search function
.
Example #1
Source File: factories.py From opencraft with GNU Affero General Public License v3.0 | 6 votes |
def _check_environment(): """ Check environment and report potential problems for production instances """ if settings.INSTANCE_STORAGE_TYPE == StorageContainer.S3_STORAGE and \ not(settings.AWS_ACCESS_KEY_ID and settings.AWS_SECRET_ACCESS_KEY): logger.warning( "AWS support is currently enabled. Add AWS_ACCESS_KEY_ID and " "AWS_SECRET_ACCESS_KEY settings or adjust INSTANCE_STORAGE_TYPE setting." ) return False if not MySQLServer.objects.exists() and settings.DEFAULT_INSTANCE_MYSQL_URL is None: logger.warning( "No MySQL servers configured, and default URL for external MySQL database is missing." "Create at least one MySQLServer, or set DEFAULT_INSTANCE_MYSQL_URL in your .env." ) return False if not MongoDBServer.objects.exists() and settings.DEFAULT_INSTANCE_MONGO_URL is None: logger.warning( "No MongoDB servers configured, and default URL for external MongoDB database is missing." "Create at least one MongoDBServer, or set DEFAULT_INSTANCE_MONGO_URL in your .env." ) return False return True
Example #2
Source File: mlflow_model_manager.py From lexpredict-contraxsuite with GNU Affero General Public License v3.0 | 6 votes |
def apply_os_env_from_settings(self): os.environ[AWS_ACCESS_KEY_ID] = settings.AWS_ACCESS_KEY_ID os.environ[AWS_SECRET_ACCESS_KEY] = settings.AWS_SECRET_ACCESS_KEY os.environ[MLFLOW_S3_ENDPOINT_URL] = settings.MLFLOW_S3_ENDPOINT_URL
Example #3
Source File: utils.py From developer-portal with Mozilla Public License 2.0 | 6 votes |
def set_up_boto3(): """ A DRY place to make sure AWS credentials in settings override environment based credentials. Boto3 will fall back to: http://boto3.readthedocs.io/en/latest/guide/configuration.html Taken from https://github.com/datadesk/django-bakery/blob/ a2f1f74b03951450d797ec70cc9872d6c694e1e3/bakery/management/commands/__init__.py#L8 """ session_kwargs = {} if hasattr(settings, "AWS_ACCESS_KEY_ID"): session_kwargs["aws_access_key_id"] = settings.AWS_ACCESS_KEY_ID if hasattr(settings, "AWS_SECRET_ACCESS_KEY"): session_kwargs["aws_secret_access_key"] = settings.AWS_SECRET_ACCESS_KEY boto3.setup_default_session(**session_kwargs)
Example #4
Source File: compress_css_js_files.py From django-webpacker with MIT License | 6 votes |
def upload_to_s3(css_file): bucket_name = settings.AWS_BUCKET_NAME conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) folder = 'webpack_bundles/' bucket = conn.get_bucket(bucket_name=bucket_name) filename = css_file.split('/')[-1] file_obj = open(css_file, 'r') content = file_obj.read() key = folder + filename bucket = conn.get_bucket(bucket_name=bucket_name) mime = mimetypes.guess_type(filename)[0] k = Key(bucket) k.key = key # folder + filename k.set_metadata("Content-Type", mime) k.set_contents_from_string(content) public_read = True if public_read: k.set_acl("public-read")
Example #5
Source File: api_views.py From open-humans with MIT License | 6 votes |
def post(self, request): super().post(request) key = get_upload_path(self.project.id_label, self.form.cleaned_data["filename"]) datafile = ProjectDataFile( user=self.project_member.member.user, file=key, metadata=self.form.cleaned_data["metadata"], direct_sharing_project=self.project, ) datafile.save() datafile.datatypes.set(self.form.cleaned_data["datatypes"]) s3 = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) url = s3.generate_url( expires_in=settings.INCOMPLETE_FILE_EXPIRATION_HOURS * 60 * 60, method="PUT", bucket=settings.AWS_STORAGE_BUCKET_NAME, key=key, ) return Response({"id": datafile.id, "url": url}, status=status.HTTP_201_CREATED)
Example #6
Source File: storage.py From opencraft with GNU Affero General Public License v3.0 | 5 votes |
def iam(self): """ Create connection to S3 service """ if self._iam_client is None: self._iam_client = boto3.client( service_name='iam', region_name=self.s3_region or None, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY ) return self._iam_client
Example #7
Source File: utils.py From opentaps_seas with GNU Lesser General Public License v3.0 | 5 votes |
def check_boto_config(): if not settings.AWS_ACCESS_KEY_ID or not settings.AWS_SECRET_ACCESS_KEY or not settings.AWS_STORAGE_BUCKET_NAME: raise Exception('''AWS configuration is required, check your settings for AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME''')
Example #8
Source File: checks.py From opentaps_seas with GNU Lesser General Public License v3.0 | 5 votes |
def check_aws_config(app_configs, **kwargs): errors = [] if not settings.AWS_ACCESS_KEY_ID or not settings.AWS_SECRET_ACCESS_KEY or not settings.AWS_STORAGE_BUCKET_NAME: errors.append( Warning( 'Missing AWS configuration, file storage will be unavailable', hint='''Make sure you set AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY and AWS_STORAGE_BUCKET_NAME in secrets.json''', obj=settings, id='opentaps_seas.W002', ) ) return errors
Example #9
Source File: search.py From open-ledger with MIT License | 5 votes |
def init_es(timeout=TIMEOUT): log.info("connecting to %s %s", settings.ELASTICSEARCH_URL, settings.ELASTICSEARCH_PORT) auth = AWSRequestsAuth(aws_access_key=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, aws_host=settings.ELASTICSEARCH_URL, aws_region='us-west-1', aws_service='es') auth.encode = lambda x: bytes(x.encode('utf-8')) es = Elasticsearch(host=settings.ELASTICSEARCH_URL, port=settings.ELASTICSEARCH_PORT, connection_class=RequestsHttpConnection, timeout=timeout, max_retries=10, retry_on_timeout=True, http_auth=auth) return es
Example #10
Source File: s3.py From education-backend with MIT License | 5 votes |
def client(self): session = boto3.session.Session() return session.client( "s3", region_name=settings.AWS_S3_REGION_NAME, endpoint_url=settings.AWS_S3_ENDPOINT_URL, aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, config=Config(signature_version="s3"), )
Example #11
Source File: utils.py From django-scarface with MIT License | 5 votes |
def get_sns_connection(): """ Creates a new AWS connection based upon the credentials defined in the django configuration :param region: the region of the DynamoDB, defaults to Ireland :return: a new dynamodb2 connection """ region = settings.SCARFACE_REGION_NAME if hasattr(settings, "SCARFACE_REGION_NAME") else 'eu-west-1' return sns.connect_to_region( region, aws_access_key_id=settings.AWS_ACCESS_KEY, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY )
Example #12
Source File: jobs.py From ggpyjobs with GNU General Public License v3.0 | 5 votes |
def perform(args): try: sc2reader_to_esdb = SC2ReaderToEsdb() filename = args['hash'] + '.s2gs' gateway = args['gateway'] if gateway == 'sea': gateway = 'sg' # retrieve it from battlenet depoturl = 'http://{0}.depot.battle.net:1119/{1}'.format(gateway, filename) try: s2gsfile = urllib2.urlopen(depoturl).read() except: logging.getLogger("jobs").info("couldnt retrieve {} s2gs hash {}. maybe its bad.".format(gateway, args['hash'])) return None # save it in S3 because we are pack rats bucket = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)\ .get_bucket(settings.S2GS_BUCKET_NAME) k = Key(bucket) k.key = filename k.set_contents_from_string(s2gsfile) # parse it and write stuff to DB summaryDB = sc2reader_to_esdb.processSummary(StringIO(s2gsfile), args['hash']) except Exception as e: tb = traceback.format_exc() exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logging.getLogger("jobs").info("parsing failed for s2gs {}. oh well. exception={}. {} {} {} {}".format(args['hash'], e, exc_type, fname, exc_tb.tb_lineno, tb)) pass finally: # Enqueue ruby PostParse job, always! ResQ(server=settings.REDIS_SERVER).enqueue_from_string('ESDB::Jobs::Sc2::Summary::PostParse', 'summaries-high', { 'hash': args['hash'] })
Example #13
Source File: sqs.py From donate-wagtail with Mozilla Public License 2.0 | 5 votes |
def sqs_client(): if all([settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, settings.AWS_REGION]): return boto3.client( 'sqs', aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, region_name=settings.AWS_REGION )
Example #14
Source File: s3_utils.py From marsha with MIT License | 4 votes |
def create_presigned_post(conditions, fields, key): """Build the url and the form fields used for a presigned s3 post. Parameters ---------- conditions : Type[List] A list of conditions to include in the policy. Each element can be either a list or a structure. For example: [ {"acl": "public-read"}, ["content-length-range", 2, 5], ["starts-with", "$success_action_redirect", ""] ] Conditions that are included may pertain to acl, content-length-range, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and/or x-amz-meta-. Note that if you include a condition, you must specify the a valid value in the fields dictionary as well. A value will not be added automatically to the fields dictionary based on the conditions. fields: Type[Dict] A dictionary of prefilled form fields to build on top of. Elements that may be included are acl, Cache-Control, Content-Type, Content-Disposition, Content-Encoding, Expires, success_action_redirect, redirect, success_action_status, and x-amz-meta-. Note that if a particular element is included in the fields dictionary it will not be automatically added to the conditions list. You must specify a condition for the element as well. key: string Key name, optionally add ${filename} to the end to attach the submitted filename. Note that key related conditions and fields are filled out for you and should not be included in the Fields or Conditions parameter. Returns ------- Dictionary A dictionary with two elements: url and fields. Url is the url to post to. Fields is a dictionary filled with the form fields and respective values to use when submitting the post. """ # Configure S3 client using sugnature V4 s3_client = boto3.client( "s3", aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, config=Config( region_name=settings.AWS_S3_REGION_NAME, signature_version="s3v4", ), ) acl = "private" fields.update({"acl": acl}) return s3_client.generate_presigned_post( settings.AWS_SOURCE_BUCKET_NAME, key, Fields=fields, Conditions=[{"acl": acl}] + conditions, ExpiresIn=settings.AWS_UPLOAD_EXPIRATION_DELAY, )
Example #15
Source File: renderer.py From arxiv-vanity with Apache License 2.0 | 4 votes |
def render_paper( source, output_path, webhook_url=None, output_bucket=None, extra_run_kwargs=None ): """ Render a source directory using Engrafo. """ client = create_client() renders_running = client.info()["ContainersRunning"] if renders_running >= settings.PAPERS_MAX_RENDERS_RUNNING: raise TooManyRendersRunningError( f"{renders_running} renders running, which is more than PAPERS_MAX_RENDERS_RUNNING" ) labels = {} environment = { "BIBLIO_GLUTTON_URL": settings.BIBLIO_GLUTTON_URL, "GROBID_URL": settings.GROBID_URL, "SENTRY_DSN": settings.ENGRAFO_SENTRY_DSN, } volumes = {} network = None # Production if settings.MEDIA_USE_S3: if output_bucket is None: output_bucket = settings.AWS_STORAGE_BUCKET_NAME source = f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{source}" output_path = f"s3://{output_bucket}/{output_path}" environment["AWS_ACCESS_KEY_ID"] = settings.AWS_ACCESS_KEY_ID environment["AWS_SECRET_ACCESS_KEY"] = settings.AWS_SECRET_ACCESS_KEY environment["AWS_S3_REGION_NAME"] = settings.AWS_S3_REGION_NAME # Development else: # HACK(bfirsh): MEDIA_ROOT is an absolute path to something on # the host machine. We need to make this relative to a mount inside the # Docker container. docker_media_root = os.path.join("/mnt", os.path.basename(settings.MEDIA_ROOT)) source = os.path.join(docker_media_root, source) output_path = os.path.join(docker_media_root, output_path) # HOST_PWD is set in docker-compose.yml volumes[os.environ["HOST_PWD"]] = {"bind": "/mnt", "mode": "rw"} # If running on the local machine, we need to add the container to the same network # as the web app so it can call the callback if os.environ.get("DOCKER_HOST") == "unix:///var/run/docker.sock": network = "arxiv-vanity_default" if extra_run_kwargs is None: extra_run_kwargs = {} return client.containers.run( settings.ENGRAFO_IMAGE, "sh -c " + shlex.quote("; ".join(make_command(source, output_path, webhook_url))), volumes=volumes, environment=environment, labels=labels, network=network, detach=True, **extra_run_kwargs, )
Example #16
Source File: sc2reader_to_esdb.py From ggpyjobs with GNU General Public License v3.0 | 4 votes |
def getOrCreateMap(self, replay): mapDB, created = Map.objects.get_or_create( s2ma_hash=replay.map_hash) bucket = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)\ .lookup(settings.MINIMAP_BUCKET_NAME) if not created: k = Key(bucket) k.key = "%s_%i.png" % (mapDB.s2ma_hash, 100) if not k.exists(): replay.load_map() self.writeMinimapToS3(replay.map, bucket, mapDB.s2ma_hash) else: replay.load_map() self.writeMinimapToS3(replay.map, bucket, replay.map_hash) # ggpyjobs#15 - Use the s2ma map name if available in english map_name = replay.map.name or replay.map_name mapDB.name=map_name mapDB.gateway=replay.region mapDB.save() if mapDB.transX is None: replay.load_map() mapOffsetX, mapOffsetY = replay.map.map_info.camera_left, replay.map.map_info.camera_bottom camerarangeX = replay.map.map_info.camera_right - replay.map.map_info.camera_left camerarangeY = replay.map.map_info.camera_top - replay.map.map_info.camera_bottom camerarange = (camerarangeX,camerarangeY) # this is the center of the map, in the SC2 coordinate system mapCenter = [mapOffsetX + camerarange[0]/2.0, mapOffsetY + camerarange[1]/2.0] # this is the center of the map image, in pixel coordinates imageCenter = [50.0 * camerarange[0] / camerarange[1], 50.0] # this is the scaling factor to go from the SC2 coordinate # system to pixel coordinates mapDB.image_scale = 100.0 / camerarange[1] # these are the X and Y translations to apply to an SC2 # camera center coordinate to turn it into the upper-left # corner of the camera rectangle in a pixel-based coordinate # system in a <canvas> tag, where the upper-left is 0,0. mapDB.transX = imageCenter[0] - mapDB.image_scale * (mapCenter[0] + 12.5) mapDB.transY = imageCenter[1] + mapDB.image_scale * (mapCenter[1] - 7.5) mapDB.save() return mapDB # return the Match object and a boolean indicating whether or not it was created