Python tarfile.TarFile.open() Examples
The following are 30
code examples of tarfile.TarFile.open().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
tarfile.TarFile
, or try the search function
.
Example #1
Source File: __init__.py From pipenv with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #2
Source File: test_aws_local_report_downloader.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def test_download_missing_month(self): """Test to verify that downloading a non-existant month throws proper exception.""" fake_bucket = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz") mytar.extractall(fake_bucket) test_report_date = datetime(year=2018, month=7, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): report_downloader = ReportDownloader( self.mock_task, self.fake_customer_name, self.fake_auth_credential, fake_bucket, Provider.PROVIDER_AWS_LOCAL, 1, cache_key=self.fake.word(), ) # Names from test report .gz file report_downloader.download_report(test_report_date) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertFalse(os.path.isdir(expected_path))
Example #3
Source File: test_aws_local_report_downloader.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def test_download_bucket_with_prefix(self): """Test to verify that basic report downloading works.""" fake_bucket = tempfile.mkdtemp() mytar = TarFile.open("./koku/masu/test/data/test_local_bucket_prefix.tar.gz") mytar.extractall(fake_bucket) test_report_date = datetime(year=2018, month=8, day=7) with patch.object(DateAccessor, "today", return_value=test_report_date): report_downloader = ReportDownloader( self.mock_task, self.fake_customer_name, self.fake_auth_credential, fake_bucket, Provider.PROVIDER_AWS_LOCAL, self.aws_provider_uuid, cache_key=self.fake.word(), ) # Names from test report .gz file report_downloader.download_report(test_report_date) expected_path = "{}/{}/{}".format(DATA_DIR, self.fake_customer_name, "aws-local") self.assertTrue(os.path.isdir(expected_path)) shutil.rmtree(fake_bucket)
Example #4
Source File: hunch_publisher.py From Hunch with Apache License 2.0 | 6 votes |
def _get_prediction_module_tar_byte_buffer(self, path_to_prediction_module): """ Returns a Byte buffer of a tar file containing the prediction module. The tar file is compressed using bz2 :param path_to_prediction_module: Path to prediction module file :return: Byte buffer with the tar data """ prediction_module_stat = os.stat(path_to_prediction_module) if stat.S_ISDIR(prediction_module_stat.st_mode): raise Exception("Expected a file but got a directory for arg 'path_to_prediction_module' = '{}'".format( path_to_prediction_module)) file_out = BytesIO() with TarFile.open(mode="w:bz2", fileobj=file_out) as tar: tar.add(name=path_to_prediction_module, arcname='model.py') return file_out
Example #5
Source File: hunch_publisher.py From Hunch with Apache License 2.0 | 6 votes |
def _create_model_blob_details_with_custom_setup(self, model_blob, custom_package_name, custom_package_version, custom_package_path): if not os.path.exists(custom_package_path) or not custom_package_path.endswith('.tar.gz'): raise Exception("Tar file doesn't exit at: " + custom_package_path) with open(custom_package_path) as fp: custom_package_blob = fp.read() model_blob_with_custom_code = { 'custom_package_version': custom_package_version, 'custom_package_name': custom_package_name, 'custom_package_blob': custom_package_blob } if model_blob is not None: model_blob_with_custom_code['model_blob'] = model_blob return model_blob_with_custom_code
Example #6
Source File: hunch_publisher.py From Hunch with Apache License 2.0 | 6 votes |
def _get_model_resources_tar_byte_buffer(self, path_to_model_resources_dir): """ Returns a byte buffer of a tar file containing the model resources. The tar file is compressed using bz2. The topmost folder is named 'model_resource' :param path_to_model_resources_dir: Path to resources directory :return: Byte buffer with the tar data """ model_resources_stat = os.stat(path_to_model_resources_dir) if not stat.S_ISDIR(model_resources_stat.st_mode): raise Exception( "Expected a directory for arg 'path_to_model_resources_dir' = {}".format(path_to_model_resources_dir)) file_out = BytesIO() with TarFile.open(mode="w:bz2", fileobj=file_out) as tar: tar.add(name=path_to_model_resources_dir, recursive=True, arcname='') return file_out
Example #7
Source File: __init__.py From recruit with Apache License 2.0 | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #8
Source File: __init__.py From vnpy_crypto with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #9
Source File: __init__.py From Mastering-Elasticsearch-7.0 with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #10
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_workspace_read_only_path(self): # Create file where workspace would be, thus preventing the creation # of the workspace directory. if os.path.exists(self.project.workspace()): os.rmdir(self.project.workspace()) with open(os.path.join(self.project.workspace()), 'w'): pass with pytest.raises(OSError): # Ensure that the file is in place. os.mkdir(self.project.workspace()) assert issubclass(WorkspaceError, OSError) try: logging.disable(logging.ERROR) with pytest.raises(WorkspaceError): list(self.project.find_jobs()) finally: logging.disable(logging.NOTSET) assert not os.path.isdir(self._tmp_wd) assert not os.path.isdir(self.project.workspace())
Example #11
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_corrupted_statepoint_file(self): job = self.project.open_job(dict(a=0)) job.init() # overwrite state point manifest file with open(job.fn(job.FN_MANIFEST), 'w'): pass self.project._sp_cache.clear() self.project._remove_persistent_cache_file() try: logging.disable(logging.CRITICAL) with pytest.raises(JobsCorruptedError): self.project.open_job(id=job.id) finally: logging.disable(logging.NOTSET)
Example #12
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_index(self): docs = list(self.project.index(include_job_document=True)) assert len(docs) == 0 docs = list(self.project.index(include_job_document=False)) assert len(docs) == 0 statepoints = [{'a': i} for i in range(5)] for sp in statepoints: self.project.open_job(sp).document['test'] = True job_ids = set((job.id for job in self.project.find_jobs())) docs = list(self.project.index()) job_ids_cmp = set((doc['_id'] for doc in docs)) assert job_ids == job_ids_cmp assert len(docs) == len(statepoints) for sp in statepoints: with self.project.open_job(sp): with open('test.txt', 'w'): pass docs = list(self.project.index({'.*' + re.escape(os.path.sep) + r'test\.txt': 'TextFile'})) assert len(docs) == 2 * len(statepoints) assert len(set((doc['_id'] for doc in docs))) == len(docs)
Example #13
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_tarfile_zipped(self): target = os.path.join(self._tmp_dir.name, 'data.tar.gz') for i in range(10): with self.project.open_job(dict(a=i)) as job: os.makedirs(job.fn('sub-dir')) with open(job.fn(os.path.join('sub-dir', 'signac_statepoint.json')), 'w') as file: file.write(json.dumps({"foo": 0})) with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=target) assert len(self.project) == 10 with TarFile.open(name=target, mode='r:gz') as tarfile: for i in range(10): assert 'a/{}'.format(i) in tarfile.getnames() assert 'a/{}/sub-dir/signac_statepoint.json'.format(i) in tarfile.getnames() os.replace(self.project.workspace(), self.project.workspace() + '~') assert len(self.project) == 0 self.project.import_from(origin=target) assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids())) for job in self.project: assert job.isfile(os.path.join('sub-dir', 'signac_statepoint.json'))
Example #14
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_zipfile(self): target = os.path.join(self._tmp_dir.name, 'data.zip') for i in range(10): with self.project.open_job(dict(a=i)) as job: os.makedirs(job.fn('sub-dir')) with open(job.fn(os.path.join('sub-dir', 'signac_statepoint.json')), 'w') as file: file.write(json.dumps({"foo": 0})) with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=target) assert len(self.project) == 10 with ZipFile(target) as zipfile: for i in range(10): assert 'a/{}/signac_statepoint.json'.format(i) in zipfile.namelist() assert 'a/{}/sub-dir/signac_statepoint.json'.format(i) in zipfile.namelist() os.replace(self.project.workspace(), self.project.workspace() + '~') assert len(self.project) == 0 self.project.import_from(origin=target) assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids())) for job in self.project: assert job.isfile(os.path.join('sub-dir', 'signac_statepoint.json'))
Example #15
Source File: __init__.py From deepWordBug with Apache License 2.0 | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #16
Source File: __init__.py From plugin.video.emby with GNU General Public License v3.0 | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #17
Source File: __init__.py From nzb-subliminal with GNU General Public License v3.0 | 6 votes |
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
Example #18
Source File: __init__.py From GraphicDesignPatternByPython with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #19
Source File: release.py From VUT-FIT-IFJ-2017-toolkit with GNU General Public License v3.0 | 6 votes |
def _add_header(original, target_file, authors): with open(original) as source, open(target_file, 'w') as target: target.write( '{}\n' '{}'.format( HEADER.format( basename(original), ', '.join(authors), datetime.now() ) if basename(original) != 'Makefile' else '\n'.join('# {}'.format(line) for line in HEADER.format( basename(original), ', '.join(authors), datetime.now() ).splitlines()), source.read() ) )
Example #20
Source File: __init__.py From bash-lambda-layer with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #21
Source File: processors.py From pooch with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _extract_file(self, fname, extract_dir): """ This method receives an argument for the archive to extract and the destination path. """ with ZipFile(fname, "r") as zip_file: if self.members is None: get_logger().info( "Unzipping contents of '%s' to '%s'", fname, extract_dir ) # Unpack all files from the archive into our new folder zip_file.extractall(path=extract_dir) else: for member in self.members: get_logger().info( "Extracting '%s' from '%s' to '%s'", member, fname, extract_dir ) # Extract the data file from within the archive with zip_file.open(member) as data_file: # Save it to our desired file name with open(os.path.join(extract_dir, member), "wb") as output: output.write(data_file.read())
Example #22
Source File: __init__.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #23
Source File: __init__.py From AWS-Transit-Gateway-Demo-MultiAccount with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #24
Source File: __init__.py From aws-extender with MIT License | 6 votes |
def __init__(self, zonefile_stream=None): if zonefile_stream is not None: with TarFile.open(fileobj=zonefile_stream) as tf: self.zones = {zf.name: tzfile(tf.extractfile(zf), filename=zf.name) for zf in tf.getmembers() if zf.isfile() and zf.name != METADATA_FN} # deal with links: They'll point to their parent object. Less # waste of memory links = {zl.name: self.zones[zl.linkname] for zl in tf.getmembers() if zl.islnk() or zl.issym()} self.zones.update(links) try: metadata_json = tf.extractfile(tf.getmember(METADATA_FN)) metadata_str = metadata_json.read().decode('UTF-8') self.metadata = json.loads(metadata_str) except KeyError: # no metadata in tar file self.metadata = None else: self.zones = {} self.metadata = None
Example #25
Source File: package.py From c3nav with Apache License 2.0 | 6 votes |
def save(self, filename=None, compression=None): if filename is None: from django.conf import settings filename = os.path.join(settings.CACHE_ROOT, 'package.tar') if compression is not None: filename += '.' + compression filemode = 'w' if compression is not None: filemode += ':' + compression with TarFile.open(filename, filemode) as f: self._add_bytesio(f, 'bounds', BytesIO(struct.pack('<iiii', *(int(i*100) for i in self.bounds)))) for level_id, level_data in self.levels.items(): self._add_geometryindexed(f, 'history_%d' % level_id, level_data.history) self._add_geometryindexed(f, 'restrictions_%d' % level_id, level_data.restrictions)
Example #26
Source File: package.py From c3nav with Apache License 2.0 | 6 votes |
def read(cls, f): f = TarFile.open(fileobj=f) files = {info.name: info for info in f.getmembers()} bounds = tuple(i/100 for i in struct.unpack('<iiii', f.extractfile(files['bounds']).read())) levels = {} for filename in files: if not filename.startswith('history_'): continue level_id = int(filename[8:]) levels[level_id] = CachePackageLevel( history=MapHistory.read(f.extractfile(files['history_%d' % level_id])), restrictions=AccessRestrictionAffected.read(f.extractfile(files['restrictions_%d' % level_id])) ) return cls(bounds, levels)
Example #27
Source File: __init__.py From SmartAlarmClock with MIT License | 6 votes |
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
Example #28
Source File: __init__.py From jx-sqlite with Mozilla Public License 2.0 | 6 votes |
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
Example #29
Source File: __init__.py From Crunchyroll-XML-Decoder with GNU General Public License v2.0 | 6 votes |
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo
Example #30
Source File: __init__.py From script.tv.show.next.aired with GNU General Public License v2.0 | 6 votes |
def gettz(name): tzinfo = None if ZONEINFOFILE: for cachedname, tzinfo in CACHE: if cachedname == name: break else: tf = TarFile.open(ZONEINFOFILE) try: zonefile = tf.extractfile(name) except KeyError: tzinfo = None else: tzinfo = tzfile(zonefile) tf.close() CACHE.insert(0, (name, tzinfo)) del CACHE[CACHESIZE:] return tzinfo