Python boto3.DEFAULT_SESSION Examples
The following are 5
code examples of boto3.DEFAULT_SESSION().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
boto3
, or try the search function
.
Example #1
Source File: settings.py From django-logpipe with ISC License | 6 votes |
def get_aws_region(_default='us-east-1'): # Try to use the explicit KINESIS_REGION setting region = get('KINESIS_REGION', '') if region: return region # Try to import boto3 to get the region name try: import boto3 except ImportError: # Can't import boto3, so fallback to the AWS_DEFAULT_REGION environment variable, then finally, us-east-1 return os.environ.get('AWS_DEFAULT_REGION', _default) # Use the region for boto3's default session if boto3.DEFAULT_SESSION is not None: region = boto3.DEFAULT_SESSION.region_name if region: return region # Finally, make a new session and use it's region region = boto3.session.Session().region_name if region: return region # Finally, return the default return _default
Example #2
Source File: test_athena.py From aws-data-wrangler with Apache License 2.0 | 5 votes |
def test_athena_query_cancelled(glue_database): session = boto3.DEFAULT_SESSION query_execution_id = wr.athena.start_query_execution( sql=get_query_long(), database=glue_database, boto3_session=session ) wr.athena.stop_query_execution(query_execution_id=query_execution_id, boto3_session=session) with pytest.raises(wr.exceptions.QueryCancelled): assert wr.athena.wait_query(query_execution_id=query_execution_id)
Example #3
Source File: test_s3.py From aws-data-wrangler with Apache License 2.0 | 5 votes |
def test_parquet(path): df_file = pd.DataFrame({"id": [1, 2, 3]}) path_file = f"{path}test_parquet_file.parquet" df_dataset = pd.DataFrame({"id": [1, 2, 3], "partition": ["A", "A", "B"]}) df_dataset["partition"] = df_dataset["partition"].astype("category") path_dataset = f"{path}test_parquet_dataset" with pytest.raises(wr.exceptions.InvalidArgumentCombination): wr.s3.to_parquet(df=df_file, path=path_file, mode="append") with pytest.raises(wr.exceptions.InvalidCompression): wr.s3.to_parquet(df=df_file, path=path_file, compression="WRONG") with pytest.raises(wr.exceptions.InvalidArgumentCombination): wr.s3.to_parquet(df=df_dataset, path=path_dataset, partition_cols=["col2"]) with pytest.raises(wr.exceptions.InvalidArgumentCombination): wr.s3.to_parquet(df=df_dataset, path=path_dataset, description="foo") with pytest.raises(wr.exceptions.InvalidArgumentValue): wr.s3.to_parquet(df=df_dataset, path=path_dataset, partition_cols=["col2"], dataset=True, mode="WRONG") paths = wr.s3.to_parquet(df=df_file, path=path_file)["paths"] wr.s3.wait_objects_exist(paths=paths) assert len(wr.s3.read_parquet(path=path_file, use_threads=True, boto3_session=None).index) == 3 assert len(wr.s3.read_parquet(path=[path_file], use_threads=False, boto3_session=boto3.DEFAULT_SESSION).index) == 3 paths = wr.s3.to_parquet(df=df_dataset, path=path_dataset, dataset=True)["paths"] wr.s3.wait_objects_exist(paths=paths) assert len(wr.s3.read_parquet(path=paths, dataset=True).index) == 3 assert len(wr.s3.read_parquet(path=path_dataset, use_threads=True, boto3_session=boto3.DEFAULT_SESSION).index) == 3 dataset_paths = wr.s3.to_parquet( df=df_dataset, path=path_dataset, dataset=True, partition_cols=["partition"], mode="overwrite" )["paths"] wr.s3.wait_objects_exist(paths=dataset_paths) assert len(wr.s3.read_parquet(path=path_dataset, use_threads=True, boto3_session=None).index) == 3 assert len(wr.s3.read_parquet(path=dataset_paths, use_threads=True).index) == 3 assert len(wr.s3.read_parquet(path=path_dataset, dataset=True, use_threads=True).index) == 3 wr.s3.to_parquet(df=df_dataset, path=path_dataset, dataset=True, partition_cols=["partition"], mode="overwrite") wr.s3.to_parquet( df=df_dataset, path=path_dataset, dataset=True, partition_cols=["partition"], mode="overwrite_partitions" )
Example #4
Source File: test_s3.py From aws-data-wrangler with Apache License 2.0 | 5 votes |
def test_s3_get_bucket_region(bucket, region): assert wr.s3.get_bucket_region(bucket=bucket) == region assert wr.s3.get_bucket_region(bucket=bucket, boto3_session=boto3.DEFAULT_SESSION) == region
Example #5
Source File: session.py From sagemaker-python-sdk with Apache License 2.0 | 5 votes |
def _initialize(self, boto_session, sagemaker_client, sagemaker_runtime_client): """Initialize this SageMaker Session. Creates or uses a boto_session, sagemaker_client and sagemaker_runtime_client. Sets the region_name. """ self.boto_session = boto_session or boto3.DEFAULT_SESSION or boto3.Session() self._region_name = self.boto_session.region_name if self._region_name is None: raise ValueError( "Must setup local AWS configuration with a region supported by SageMaker." ) self.sagemaker_client = sagemaker_client or self.boto_session.client("sagemaker") prepend_user_agent(self.sagemaker_client) if sagemaker_runtime_client is not None: self.sagemaker_runtime_client = sagemaker_runtime_client else: config = botocore.config.Config(read_timeout=80) self.sagemaker_runtime_client = self.boto_session.client( "runtime.sagemaker", config=config ) prepend_user_agent(self.sagemaker_runtime_client) self.local_mode = False