Python moto.mock_s3() Examples

The following are 30 code examples of moto.mock_s3(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module moto , or try the search function .
Example #1
Source File: test_mldataset.py    From xcube with MIT License 8 votes vote down vote up
def test_s3_levels(self):
        with moto.mock_s3():
            self._write_test_cube_pyramid()

            s3 = s3fs.S3FileSystem(key='test_fake_id',
                                   secret='test_fake_secret',
                                   client_kwargs=dict(endpoint_url="https://s3.amazonaws.com"))
            ml_dataset = ObjectStorageMultiLevelDataset(s3,
                                                        "xcube-test/cube-1-250-250.levels",
                                                        chunk_cache_capacity=1000 * 1000 * 1000)
            self.assertIsNotNone(ml_dataset)
            self.assertEqual(3, ml_dataset.num_levels)
            self.assertEqual((250, 250), ml_dataset.tile_grid.tile_size)
            self.assertEqual(2, ml_dataset.tile_grid.num_level_zero_tiles_x)
            self.assertEqual(1, ml_dataset.tile_grid.num_level_zero_tiles_y)
            self.assertEqual(761904762, ml_dataset.get_chunk_cache_capacity(0))
            self.assertEqual(190476190, ml_dataset.get_chunk_cache_capacity(1))
            self.assertEqual(47619048, ml_dataset.get_chunk_cache_capacity(2)) 
Example #2
Source File: test_core.py    From streamalert with Apache License 2.0 7 votes vote down vote up
def setup(self):
        """LookupTables - Setup S3 bucket mocking"""
        self.config = load_config('tests/unit/conf')

        self.s3_mock = mock_s3()
        self.s3_mock.start()

        self.dynamodb_mock = mock_dynamodb2()
        self.dynamodb_mock.start()

        self._put_mock_data()

        self._lookup_tables = LookupTables.get_instance(
            config=self.config,
            reset=True
        ) 
Example #3
Source File: test_bypass_azure_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_bypass(self):
        # given:
        opts = create_s3_azure_file_bypass_simple_opts()

        # when:
        with moto.mock_s3(), mock.patch('azure.storage.file.FileService') as azure:
            s3_conn = boto.connect_s3()
            bucket = s3_conn.create_bucket(opts['reader']['options']['bucket'])
            keys = ['some_prefix/{}'.format(k) for k in ['some', 'keys', 'here']]
            create_s3_keys(bucket, keys)

            exporter = BasicExporter(opts)
            exporter.export()

        # then:
        self.assertEquals(exporter.writer.get_metadata('items_count'), 0,
                          "No items should be read")
        self.assertEquals(exporter.reader.get_metadata('read_items'), 0,
                          "No items should get written")
        azure_puts = [
            call for call in azure.mock_calls if call[0] == '().copy_file'
        ]
        self.assertEquals(len(azure_puts), len(keys),
                          "all keys should be put into Azure files") 
Example #4
Source File: test_bypass_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def setUp(self):
        self.mock_s3 = moto.mock_s3()
        self.mock_s3.start()
        self.s3_conn = boto.connect_s3()
        self.s3_conn.create_bucket('source_bucket')

        self.source_bucket = self.s3_conn.get_bucket('source_bucket')
        self.data = [
            {'name': 'Roberto', 'birthday': '12/05/1987'},
            {'name': 'Claudia', 'birthday': '21/12/1985'},
        ]
        with closing(self.source_bucket.new_key('some_prefix/test_key')) as key:
            with TmpFile() as tmp_filename:
                with open(tmp_filename, 'w') as f:
                    f.write(json.dumps(self.data))
                with open(tmp_filename) as f:
                    self.key_md5 = compute_md5(f)
            key.metadata = {'total': 2, 'md5': self.key_md5}
            key.set_contents_from_string(json.dumps(self.data))
        self.tmp_bypass_resume_file = 'tests/data/tmp_s3_bypass_resume_persistence.pickle'
        shutil.copyfile('tests/data/s3_bypass_resume_persistence.pickle',
                        self.tmp_bypass_resume_file) 
Example #5
Source File: test_driver_s3.py    From streamalert with Apache License 2.0 6 votes vote down vote up
def setup(self):
        """LookupTables - Setup S3 bucket mocking"""
        self.buckets_info = {'bucket_name': ['foo.json', 'bar.json']}
        self.config = load_config('tests/unit/conf')
        self.s3_mock = mock_s3()
        self.s3_mock.start()

        self._foo_driver = construct_persistence_driver(
            self.config['lookup_tables']['tables']['foo']
        )
        self._bar_driver = construct_persistence_driver(
            self.config['lookup_tables']['tables']['bar']
        )
        self._bad_driver = construct_persistence_driver(
            {
                'driver': 's3',
                'bucket': 'bucket_name',
                'key': 'invalid-key',
            }
        )

        self._put_mock_tables() 
Example #6
Source File: test_compression.py    From elasticintel with GNU General Public License v3.0 6 votes vote down vote up
def test_with_s3_url(compression):
    boto3 = pytest.importorskip('boto3')
    pytest.importorskip('s3fs')
    if compression == 'xz':
        tm._skip_if_no_lzma()

    df = pd.read_json('{"a": [1, 2, 3], "b": [4, 5, 6]}')
    with moto.mock_s3():
        conn = boto3.resource("s3", region_name="us-east-1")
        bucket = conn.create_bucket(Bucket="pandas-test")

        with tm.ensure_clean() as path:
            df.to_json(path, compression=compression)
            with open(path, 'rb') as f:
                bucket.put_object(Key='test-1', Body=f)

        roundtripped_df = pd.read_json('s3://pandas-test/test-1',
                                       compression=compression)
        assert_frame_equal(df, roundtripped_df) 
Example #7
Source File: pybuilder_aws_plugin_tests.py    From pybuilder_aws_plugin with Apache License 2.0 6 votes vote down vote up
def setUp(self):
        self.tempdir = tempfile.mkdtemp(prefix='palp-')
        self.project = Project(
            basedir=self.tempdir, name='palp', version='123')
        self.project.set_property('dir_target', 'target')
        self.bucket_name = 'palp-lambda-zips'
        self.project.set_property(
                'lambda_file_access_control', 'bucket-owner-full-control')
        self.project.set_property('bucket_name', self.bucket_name)
        self.project.set_property('bucket_prefix', '')
        self.dir_target = os.path.join(self.tempdir, 'target')
        os.mkdir(self.dir_target)
        self.zipfile_name = os.path.join(self.dir_target, 'palp.zip')
        self.test_data = b'testdata'
        with open(self.zipfile_name, 'wb') as fp:
            fp.write(self.test_data)

        self.my_mock_s3 = mock_s3()
        self.my_mock_s3.start()
        self.s3 = boto3.resource('s3')
        self.s3.create_bucket(Bucket=self.bucket_name) 
Example #8
Source File: version_specific.py    From pybuilder_aws_plugin with Apache License 2.0 6 votes vote down vote up
def setUp(self):
        self.bucket_name = 'palp-cfn-json'
        basedir = os.path.dirname(__file__)
        self.project = Project(basedir=basedir,
                               name='palp', version='123')
        self.test_files = [
            (os.path.join(basedir, 'templates'), 'alarm-topic.yml'),
            (os.path.join(basedir, 'templates'), 'ecs-simple-webapp.yml')]
        self.project.set_property('bucket_name', self.bucket_name)
        self.project.set_property('template_key_prefix', 'palp/')
        self.project.set_property('template_files', self.test_files)
        self.project.set_property(
                'template_file_access_control', 'bucket-owner-full-control')
        self.my_mock_s3 = mock_s3()
        self.my_mock_s3.start()
        self.s3 = boto3.resource('s3')
        self.s3.create_bucket(Bucket=self.bucket_name) 
Example #9
Source File: test_excel.py    From elasticintel with GNU General Public License v3.0 6 votes vote down vote up
def test_read_from_s3_url(self):
        boto3 = pytest.importorskip('boto3')
        pytest.importorskip('s3fs')

        with moto.mock_s3():
            conn = boto3.resource("s3", region_name="us-east-1")
            conn.create_bucket(Bucket="pandas-test")
            file_name = os.path.join(self.dirpath, 'test1' + self.ext)
            with open(file_name, 'rb') as f:
                conn.Bucket("pandas-test").put_object(Key="test1" + self.ext,
                                                      Body=f)

            url = ('s3://pandas-test/test1' + self.ext)
            url_table = read_excel(url)
            local_table = self.get_exceldf('test1')
            tm.assert_frame_equal(url_table, local_table) 
Example #10
Source File: test_readers_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def setUp(self):
        self.mock_s3 = moto.mock_s3()
        self.mock_s3.start()
        self.s3_conn = boto.connect_s3()
        self.s3_conn.create_bucket('last_bucket')
        bucket = self.s3_conn.get_bucket('last_bucket')
        key = bucket.new_key('test_list/LAST')
        self.pointers = ['pointer1', 'pointer2', 'pointer3', '']
        key.set_contents_from_string('\r\n'.join(self.pointers))
        key.close()

        for key_name in POINTER_KEYS:
            key = bucket.new_key(key_name)
            out = StringIO.StringIO()
            with gzip.GzipFile(fileobj=out, mode='w') as f:
                f.write(json.dumps({'name': key_name}))
            key.set_contents_from_string(out.getvalue())
            key.close()

        self.options_prefix_pointer = {
            'bucket': 'last_bucket',
            'aws_access_key_id': 'KEY',
            'aws_secret_access_key': 'SECRET',
            'prefix_pointer': 'test_list/LAST'
        } 
Example #11
Source File: test_config_s3.py    From aws-auto-remediate with GNU General Public License v3.0 6 votes vote down vote up
def cr(self):
        with moto.mock_s3():
            cr = config_rules.ConfigRules(logging)
            yield cr

    # def test_s3_bucket_sse_enabled(self, cr):
    #     # create bucket
    #     cr.client_s3.create_bucket(Bucket="test")

    #     # test s3_bucket_server_side_encryption_enabled function
    #     cr.s3_bucket_server_side_encryption_enabled("test")

    #     # validate test
    #     response = cr.client_s3.get_bucket_encryption(Bucket="test")
    #     print(response)
    #     assert (
    #         response["ServerSideEncryptionConfiguration"]["Rules"][0][
    #             "ApplyServerSideEncryptionByDefault"
    #         ]["SSEAlgorithm"]
    #         == "AES256"
    #     ) 
Example #12
Source File: test_bypass_azure_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_bypass(self):
        # given:
        opts = create_s3_azure_blob_bypass_simple_opts()

        # when:
        with moto.mock_s3(), mock.patch('azure.storage.blob.BlockBlobService') as azure:
            s3_conn = boto.connect_s3()
            bucket = s3_conn.create_bucket(opts['reader']['options']['bucket'])
            keys = ['some_prefix/{}'.format(k) for k in ['this', 'test', 'has', 'keys']]
            create_s3_keys(bucket, keys)

            exporter = BasicExporter(opts)
            exporter.export()

        # then:
        self.assertEquals(exporter.writer.get_metadata('items_count'), 0,
                          "No items should be read")
        self.assertEquals(exporter.reader.get_metadata('read_items'), 0,
                          "No items should get written")
        azure_puts = [
            call for call in azure.mock_calls if call[0] == '().copy_blob'
        ]
        self.assertEquals(len(azure_puts), len(keys),
                          "all keys should be put into Azure blobs") 
Example #13
Source File: test_readers_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setUp(self):
        self.mock_s3 = moto.mock_s3()
        self.mock_s3.start()
        self.s3_conn = boto.connect_s3()
        self.s3_conn.create_bucket('fake_bucket') 
Example #14
Source File: test_readers_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def tearDown(self):
        self.mock_s3.stop() 
Example #15
Source File: test_writers_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def setUp(self):
        self.mock_s3 = moto.mock_s3()
        self.mock_s3.start()
        self.s3_conn = boto.connect_s3()
        self.s3_conn.create_bucket('fake_bucket') 
Example #16
Source File: test_writers_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def tearDown(self):
        self.mock_s3.stop() 
Example #17
Source File: test_bypass_s3.py    From exporters with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def tearDown(self):
        self.mock_s3.stop()
        remove_if_exists(self.tmp_bypass_resume_file) 
Example #18
Source File: test_downloader.py    From flambe with MIT License 5 votes vote down vote up
def s3_mock():
    mock = mock_s3()
    mock.start()

    yield

    mock.stop() 
Example #19
Source File: test_persistence.py    From palladium with Apache License 2.0 5 votes vote down vote up
def s3_io_filled(self, bucket_name, test_models, s3_io_cls):
        import moto, boto3
        with moto.mock_s3():
            conn = boto3.resource('s3')
            conn.create_bucket(Bucket=bucket_name)

            s3 = boto3.client('s3')
            for model in test_models:
                s3.put_object(
                    Bucket=bucket_name,
                    Key=model.name,
                    Body=model.value,
                )

            yield s3_io_cls() 
Example #20
Source File: test_persistence.py    From palladium with Apache License 2.0 5 votes vote down vote up
def s3_cls_with_bucket(self, bucket_name, s3_cls):
        import moto, boto3
        with moto.mock_s3():
            conn = boto3.resource('s3')
            conn.create_bucket(Bucket=bucket_name)

            yield s3_cls 
Example #21
Source File: test_moz_databricks_operator.py    From telemetry-airflow with Mozilla Public License 2.0 5 votes vote down vote up
def client():
    """Create a moto generated fixture for s3. Using this fixture will put the function
    under test in the same scope as the @mock_s3 decorator. See
    https://github.com/spulec/moto/issues/620.
    """
    mock_s3().start()
    client = boto3.resource("s3")
    client.create_bucket(Bucket="telemetry-test-bucket")
    client.create_bucket(Bucket="telemetry-airflow")
    yield client
    mock_s3().stop() 
Example #22
Source File: conftest.py    From timeserio with MIT License 5 votes vote down vote up
def s3(test_bucket_name):
    # writable local S3 system
    with moto.mock_s3():
        client = boto3.client('s3')
        client.create_bucket(Bucket=test_bucket_name)
        yield s3fs.S3FileSystem() 
Example #23
Source File: test_s3.py    From gbdxtools with MIT License 5 votes vote down vote up
def setUp(self):
        self._mock_s3 = mock_s3()
        self._mock_s3.start()
        pre_load_s3_data(self._bucket_name, self._prefix)
        self.s3 = S3()
        self.s3.info = self._info 
Example #24
Source File: test_s3.py    From simple-settings with MIT License 5 votes vote down vote up
def mock_s3_resource(self):
        mock = mock_s3()
        mock.start()

        yield mock

        mock.stop() 
Example #25
Source File: test_aws_lambda.py    From stacker with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def s3():
    with mock_s3():
        yield boto3.client('s3', region_name=REGION) 
Example #26
Source File: test_network.py    From elasticintel with GNU General Public License v3.0 5 votes vote down vote up
def s3_resource(tips_file):
    pytest.importorskip('s3fs')
    moto.mock_s3().start()

    test_s3_files = [
        ('tips.csv', tips_file),
        ('tips.csv.gz', tips_file + '.gz'),
        ('tips.csv.bz2', tips_file + '.bz2'),
    ]

    def add_tips_files(bucket_name):
        for s3_key, file_name in test_s3_files:
            with open(file_name, 'rb') as f:
                conn.Bucket(bucket_name).put_object(
                    Key=s3_key,
                    Body=f)

    boto3 = pytest.importorskip('boto3')
    # see gh-16135
    bucket = 'pandas-test'

    conn = boto3.resource("s3", region_name="us-east-1")
    conn.create_bucket(Bucket=bucket)
    add_tips_files(bucket)

    conn.create_bucket(Bucket='cant_get_it', ACL='private')
    add_tips_files('cant_get_it')

    yield conn

    moto.mock_s3().stop() 
Example #27
Source File: conftest.py    From bucketstore with MIT License 5 votes vote down vote up
def bucket() -> Generator:
    """fixture that provides a bucketstore bucket."""
    with mock_s3():
        yield bucketstore.get("bucketstore-playground", create=True) 
Example #28
Source File: helper_functions.py    From mlflow with Apache License 2.0 5 votes vote down vote up
def mock_s3_bucket():
    """
    Creates a mock S3 bucket using moto

    :return: The name of the mock bucket
    """
    import boto3
    import moto

    with moto.mock_s3():
        bucket_name = "mock-bucket"
        s3_client = boto3.client("s3")
        s3_client.create_bucket(Bucket=bucket_name)
        yield bucket_name 
Example #29
Source File: test_boto3.py    From opentracing-python-instrumentation with MIT License 5 votes vote down vote up
def s3_mock():
    import moto
    with moto.mock_s3():
        s3 = boto3.client('s3', region_name='us-east-1')
        yield s3 
Example #30
Source File: conftest.py    From diffy with Apache License 2.0 5 votes vote down vote up
def s3():
    with mock_s3():
        yield boto3.client("s3", region_name="us-east-1")