Python os.replace() Examples
The following are 30
code examples of os.replace().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
os
, or try the search function
.
Example #1
Source File: backports.py From mlens with MIT License | 7 votes |
def concurrency_safe_rename(src, dst): """Renames ``src`` into ``dst`` overwriting ``dst`` if it exists. On Windows os.replace (or for Python 2.7 its implementation through MoveFileExW) can yield permission errors if executed by two different processes. """ max_sleep_time = 1 total_sleep_time = 0 sleep_time = 0.001 while total_sleep_time < max_sleep_time: try: replace(src, dst) break except Exception as exc: if getattr(exc, 'winerror', None) == error_access_denied: time.sleep(sleep_time) total_sleep_time += sleep_time sleep_time *= 2 else: raise else: raise
Example #2
Source File: atomic_write.py From DRL_DeliveryDuel with MIT License | 6 votes |
def atomic_write(filepath, binary=False, fsync=False): """ Writeable file object that atomically updates a file (using a temporary file). In some cases (namely Python < 3.3 on Windows), this could result in an existing file being temporarily unlinked. :param filepath: the file path to be opened :param binary: whether to open the file in a binary mode instead of textual :param fsync: whether to force write the file to disk """ tmppath = filepath + '~' while os.path.isfile(tmppath): tmppath += '~' try: with open(tmppath, 'wb' if binary else 'w') as file: yield file if fsync: file.flush() os.fsync(file.fileno()) replace(tmppath, filepath) finally: try: os.remove(tmppath) except (IOError, OSError): pass
Example #3
Source File: server.py From master-server with GNU Lesser General Public License v2.1 | 6 votes |
def save(self): with self.lock: servers = len(self.list) clients = 0 for server in self.list: clients += server["clients"] self.maxServers = max(servers, self.maxServers) self.maxClients = max(clients, self.maxClients) list_path = os.path.join(app.static_folder, "list.json") with open(list_path + "~", "w") as fd: json.dump({ "total": {"servers": servers, "clients": clients}, "total_max": {"servers": self.maxServers, "clients": self.maxClients}, "list": self.list }, fd, indent = "\t" if app.config["DEBUG"] else None, separators = (', ', ': ') if app.config["DEBUG"] else (',', ':') ) os.replace(list_path + "~", list_path)
Example #4
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_complex_path(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') sp_0 = [{'a': i, 'b': i % 3} for i in range(5)] sp_1 = [{'a': i, 'b': i % 3, 'c': {'a': i, 'b': 0}} for i in range(5)] sp_2 = [{'a': i, 'b': i % 3, 'c': {'a': i, 'b': 0, 'c': {'a': i, 'b': 0}}} for i in range(5)] statepoints = sp_0 + sp_1 + sp_2 for sp in statepoints: self.project.open_job(sp).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project) == 0 self.project.import_from(prefix_data) assert len(self.project) == len(statepoints) with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #5
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_simple_path_schema_from_path_float(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=float(i))).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project) == 0 assert len(os.listdir(prefix_data)) == 1 assert len(os.listdir(os.path.join(prefix_data, 'a'))) == 10 for i in range(10): assert os.path.isdir(os.path.join(prefix_data, 'a', str(float(i)))) ret = self.project.import_from(origin=prefix_data, schema='a/{a:int}') assert len(ret) == 0 # should not match ret = self.project.import_from(origin=prefix_data, schema='a/{a:float}') assert len(ret) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #6
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_simple_path_schema_from_path(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project) == 0 assert len(os.listdir(prefix_data)) == 1 assert len(os.listdir(os.path.join(prefix_data, 'a'))) == 10 for i in range(10): assert os.path.isdir(os.path.join(prefix_data, 'a', str(i))) ret = self.project.import_from(origin=prefix_data, schema='a/{a:int}') assert len(ret) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #7
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_simple_path_with_float(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=float(i))).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project) == 0 assert len(os.listdir(prefix_data)) == 1 assert len(os.listdir(os.path.join(prefix_data, 'a'))) == 10 for i in range(10): assert os.path.isdir(os.path.join(prefix_data, 'a', str(float(i)))) assert len(self.project.import_from(prefix_data)) == 10 assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #8
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_simple_path(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project) == 0 assert len(os.listdir(prefix_data)) == 1 assert len(os.listdir(os.path.join(prefix_data, 'a'))) == 10 for i in range(10): assert os.path.isdir(os.path.join(prefix_data, 'a', str(i))) with pytest.raises(StatepointParsingError): self.project.import_from(origin=prefix_data, schema='a/{b:int}') assert len(self.project.import_from(prefix_data)) == 10 assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #9
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_schema_callable(self): def my_schema(path): re_sep = re.escape(os.path.sep) m = re.match(r'.*' + re_sep + 'a' + re_sep + r'(?P<a>\d+)$', path) if m: return dict(a=int(m.groupdict()['a'])) prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project.import_from(prefix_data, schema=my_schema)) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #10
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_tarfile(self): target = os.path.join(self._tmp_dir.name, 'data.tar') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=target) assert len(self.project) == 10 with TarFile(name=target) as tarfile: for i in range(10): assert 'a/{}'.format(i) in tarfile.getnames() os.replace(self.project.workspace(), self.project.workspace() + '~') assert len(self.project) == 0 self.project.import_from(origin=target) assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #11
Source File: node.py From pyuavcan with MIT License | 6 votes |
def _register_output_transfer_id_map_save_at_exit(presentation: pyuavcan.presentation.Presentation) -> None: # We MUST sample the configuration early because if this is a redundant transport it may reset its # reported descriptor and local node-ID back to default after close(). local_node_id = presentation.transport.local_node_id descriptor = presentation.transport.descriptor def do_save_at_exit() -> None: if local_node_id is not None: file_path = _get_output_transfer_id_file_path(local_node_id, descriptor) tmp_path = f'{file_path}.{os.getpid()}.{time.time_ns()}.tmp' _logger.debug('Output TID map save: %s --> %s', tmp_path, file_path) with open(tmp_path, 'wb') as f: pickle.dump(presentation.output_transfer_id_map, f) # We use replace for compatibility reasons. On POSIX, a call to rename() will be made, which is # guaranteed to be atomic. On Windows this may fall back to non-atomic copy, which is still # acceptable for us here. If the file ends up being damaged, we'll simply ignore it at next startup. os.replace(tmp_path, file_path) try: os.unlink(tmp_path) except OSError: pass else: _logger.debug('Output TID map NOT saved because the transport instance is anonymous') atexit.register(do_save_at_exit)
Example #12
Source File: dict_manager.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __setitem__(self, key, value): self._validate_key(key) tmp_key = str(uuid.uuid4()) try: self[tmp_key].update(value) os.replace(self[tmp_key].filename, self[key].filename) except (IOError, OSError) as error: if error.errno == errno.ENOENT and not len(value): raise ValueError("Cannot assign empty value!") else: raise error except Exception as error: try: del self[tmp_key] except KeyError: pass raise error else: del self._dict_registry[key]
Example #13
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_conflict_synced_with_args(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data) with pytest.raises(DestinationExistsError): assert len(self.project.import_from(prefix_data)) == 10 selection = list(self.project.find_jobs(dict(a=0))) os.replace(self.project.workspace(), self.project.workspace() + '~') assert len(self.project) == 0 assert len(self.project.import_from(prefix_data, sync=dict(selection=selection))) == 10 assert len(self.project) == 1 assert len(self.project.find_jobs(dict(a=0))) == 1 with pytest.deprecated_call(): assert list(self.project.find_job_ids())[0] in ids_before_export
Example #14
Source File: jsondict.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _save(self, data=None): assert self._filename is not None if data is None: data = self._as_dict() # Serialize data: blob = json.dumps(data).encode() if _BUFFERED_MODE > 0: _store_in_buffer(self._filename, blob) else: # Saving to disk: if self._write_concern: dirname, filename = os.path.split(self._filename) fn_tmp = os.path.join(dirname, '._{uid}_{fn}'.format( uid=uuid.uuid4(), fn=filename)) with open(fn_tmp, 'wb') as tmpfile: tmpfile.write(blob) os.replace(fn_tmp, self._filename) else: with open(self._filename, 'wb') as file: file.write(blob)
Example #15
Source File: common.py From bob with GNU General Public License v3.0 | 6 votes |
def updateFile(self, name, content, encoding=None, newline=None): newName = name+".new" oldName = name+".old" with open(newName, "w", encoding=encoding, newline=newline) as f: f.write(content) with open(newName, "rb") as f: newContent = f.read() try: with open(oldName, "rb") as f: oldContent = f.read() except OSError: oldContent = None if oldContent != newContent: os.replace(newName, name) with open(oldName, "wb") as f: f.write(newContent) else: os.remove(newName)
Example #16
Source File: types.py From imitation with MIT License | 6 votes |
def save(path: str, trajectories: Sequence[TrajectoryWithRew]) -> None: """Generate policy rollouts and save them to a pickled list of trajectories. The `.infos` field of each Trajectory is set to `None` to save space. Args: path: Rollouts are saved to this path. venv: The vectorized environments. sample_until: End condition for rollout sampling. unwrap: If True, then save original observations and rewards (instead of potentially wrapped observations and rewards) by calling `unwrap_traj()`. exclude_infos: If True, then exclude `infos` from pickle by setting this field to None. Excluding `infos` can save a lot of space during pickles. verbose: If True, then print out rollout stats before saving. deterministic_policy: Argument from `generate_trajectories`. """ os.makedirs(os.path.dirname(path), exist_ok=True) with open(path + ".tmp", "wb") as f: pickle.dump(trajectories, f) # Ensure atomic write os.replace(path + ".tmp", path) tf.logging.info("Dumped demonstrations to {}.".format(path))
Example #17
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_export_import_tarfile_zipped(self): target = os.path.join(self._tmp_dir.name, 'data.tar.gz') for i in range(10): with self.project.open_job(dict(a=i)) as job: os.makedirs(job.fn('sub-dir')) with open(job.fn(os.path.join('sub-dir', 'signac_statepoint.json')), 'w') as file: file.write(json.dumps({"foo": 0})) with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=target) assert len(self.project) == 10 with TarFile.open(name=target, mode='r:gz') as tarfile: for i in range(10): assert 'a/{}'.format(i) in tarfile.getnames() assert 'a/{}/sub-dir/signac_statepoint.json'.format(i) in tarfile.getnames() os.replace(self.project.workspace(), self.project.workspace() + '~') assert len(self.project) == 0 self.project.import_from(origin=target) assert len(self.project) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids())) for job in self.project: assert job.isfile(os.path.join('sub-dir', 'signac_statepoint.json'))
Example #18
Source File: test_project.py From signac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_export_import(self): prefix_data = os.path.join(self._tmp_dir.name, 'data') for i in range(10): self.project.open_job(dict(a=i)).init() with pytest.deprecated_call(): ids_before_export = list(sorted(self.project.find_job_ids())) self.project.export_to(target=prefix_data, copytree=os.replace) assert len(self.project.import_from(prefix_data)) == 10 with pytest.deprecated_call(): assert ids_before_export == list(sorted(self.project.find_job_ids()))
Example #19
Source File: jsondict.py From signac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def flush_all(): """Execute all deferred JSONDict write operations.""" logger.debug("Flushing buffer...") issues = dict() while _JSONDICT_BUFFER: filename, blob = _JSONDICT_BUFFER.popitem() if not _BUFFERED_MODE_FORCE_WRITE: meta = _JSONDICT_META.pop(filename) if _hash(blob) != _JSONDICT_HASHES.pop(filename): try: if not _BUFFERED_MODE_FORCE_WRITE: if _get_filemetadata(filename) != meta: issues[filename] = 'File appears to have been externally modified.' continue try: fd_tmp, fn_tmp = mkstemp(dir=os.path.dirname(filename), suffix='.json') with os.fdopen(fd_tmp, 'wb') as file: file.write(blob) except OSError: os.remove(fn_tmp) raise else: os.replace(fn_tmp, filename) except OSError as error: logger.error(str(error)) issues[filename] = error if issues: raise BufferedFileError(issues)
Example #20
Source File: import_export.py From signac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _make_schema_based_path_function(jobs, exclude_keys=None, delimiter_nested='.'): "Generate schema based paths as a function of the given jobs." from .schema import _build_job_statepoint_index if len(jobs) <= 1: # The lambda must (optionally) take a format spec argument to match the # signature of the path function below. return lambda job, sep=None: '' index = [{'_id': job._id, 'statepoint': job.sp()} for job in jobs] jsi = _build_job_statepoint_index(jobs=jobs, exclude_const=True, index=index) sp_index = OrderedDict(jsi) paths = dict() for key_tokens, values in sp_index.items(): key = key_tokens.replace('.', delimiter_nested) if exclude_keys and key in exclude_keys: continue for value, group in values.items(): path_tokens = key, str(value) for job_id in group: paths.setdefault(job_id, list()) paths[job_id].extend(path_tokens) def path(job, sep=None): try: if sep: return os.path.normpath(sep.join(paths[job._id])) else: return os.path.normpath(os.path.join(* paths[job._id])) except KeyError: raise RuntimeError( "Unable to determine path for job '{}'.\nThis is usually caused by a " "heterogeneous schema, where some keys are only present in some jobs. " "Try providing a custom path.".format(job)) return path
Example #21
Source File: __init__.py From pipenv with MIT License | 5 votes |
def as_posix(self): """Return the string representation of the path with forward (/) slashes.""" f = self._flavour return str(self).replace(f.sep, '/')
Example #22
Source File: __init__.py From pipenv with MIT License | 5 votes |
def replace(self, target): """ Rename this path to the given path, clobbering the existing destination if it exists. """ if sys.version_info < (3, 3): raise NotImplementedError("replace() is only available " "with Python 3.3 and later") if self._closed: self._raise_closed() self._accessor.replace(self, target)
Example #23
Source File: project.py From signac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def update_cache(self): """Update the persistent state point cache. This function updates a persistent state point cache, which is stored in the project root directory. Most data space operations, including iteration and filtering or selection are expected to be significantly faster after calling this function, especially for large data spaces. """ logger.info('Update cache...') start = time.time() cache = self._read_cache() self._update_in_memory_cache() if cache is None or set(cache) != set(self._sp_cache): fn_cache = self.fn(self.FN_CACHE) fn_cache_tmp = fn_cache + '~' try: with gzip.open(fn_cache_tmp, 'wb') as cachefile: cachefile.write(json.dumps(self._sp_cache).encode()) except OSError: # clean-up try: os.remove(fn_cache_tmp) except (OSError, IOError): pass raise else: os.replace(fn_cache_tmp, fn_cache) delta = time.time() - start logger.info("Updated cache in {:.3f} seconds.".format(delta)) return len(self._sp_cache) else: logger.info("Cache is up to date.")
Example #24
Source File: job.py From signac with BSD 3-Clause "New" or "Revised" License | 5 votes |
def move(self, project): """Move this job to project. This function will attempt to move this instance of job from its original project to a different project. :param project: The project to move this job to. :type project: :py:class:`~.project.Project` :raises DestinationExistsError: If the job is already initialized in project. :raises RuntimeError: If the job is not initialized or the destination is on a different device. :raises OSError: When the move failed due unexpected file system issues. """ dst = project.open_job(self.statepoint()) _mkdir_p(project.workspace()) try: os.replace(self.workspace(), dst.workspace()) except OSError as error: if error.errno == errno.ENOENT: raise RuntimeError( "Cannot move job '{}', because it is not initialized!".format(self)) elif error.errno in (errno.EEXIST, errno.ENOTEMPTY, errno.EACCES): raise DestinationExistsError(dst) elif error.errno == errno.EXDEV: raise RuntimeError( "Cannot move jobs across different devices (file systems).") else: raise error self.__dict__.update(dst.__dict__)
Example #25
Source File: _compat.py From RSSNewsGAE with Apache License 2.0 | 5 votes |
def filename_to_ui(value): if isinstance(value, bytes): value = value.decode(get_filesystem_encoding(), 'replace') else: value = value.encode('utf-8', 'surrogateescape') \ .decode('utf-8', 'replace') return value
Example #26
Source File: _compat.py From pipenv with MIT License | 5 votes |
def filename_to_ui(value): if isinstance(value, bytes): value = value.decode(get_filesystem_encoding(), "replace") return value
Example #27
Source File: _compat.py From RSSNewsGAE with Apache License 2.0 | 5 votes |
def _force_correct_text_writer(text_writer, encoding, errors): if _is_binary_writer(text_writer, False): binary_writer = text_writer else: # If there is no target encoding set, we need to verify that the # writer is not actually misconfigured. if encoding is None and not _stream_is_misconfigured(text_writer): return text_writer if _is_compatible_text_stream(text_writer, encoding, errors): return text_writer # If the writer has no encoding, we try to find the underlying # binary writer for it. If that fails because the environment is # misconfigured, we silently go with the same writer because this # is too common to happen. In that case, mojibake is better than # exceptions. binary_writer = _find_binary_writer(text_writer) if binary_writer is None: return text_writer # At this point, we default the errors to replace instead of strict # because nobody handles those errors anyways and at this point # we're so fundamentally fucked that nothing can repair it. if errors is None: errors = 'replace' return _make_text_stream(binary_writer, encoding, errors)
Example #28
Source File: _compat.py From RSSNewsGAE with Apache License 2.0 | 5 votes |
def _force_correct_text_reader(text_reader, encoding, errors): if _is_binary_reader(text_reader, False): binary_reader = text_reader else: # If there is no target encoding set, we need to verify that the # reader is not actually misconfigured. if encoding is None and not _stream_is_misconfigured(text_reader): return text_reader if _is_compatible_text_stream(text_reader, encoding, errors): return text_reader # If the reader has no encoding, we try to find the underlying # binary reader for it. If that fails because the environment is # misconfigured, we silently go with the same reader because this # is too common to happen. In that case, mojibake is better than # exceptions. binary_reader = _find_binary_reader(text_reader) if binary_reader is None: return text_reader # At this point, we default the errors to replace instead of strict # because nobody handles those errors anyways and at this point # we're so fundamentally fucked that nothing can repair it. if errors is None: errors = 'replace' return _make_text_stream(binary_reader, encoding, errors)
Example #29
Source File: _compat.py From RSSNewsGAE with Apache License 2.0 | 5 votes |
def filename_to_ui(value): if isinstance(value, bytes): value = value.decode(get_filesystem_encoding(), 'replace') return value
Example #30
Source File: utils.py From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 | 5 votes |
def _replace_atomic(src, dst): """Implement atomic os.replace with linux and OSX. Internal use only""" try: os.rename(src, dst) except OSError: try: os.remove(src) except OSError: pass finally: raise OSError( 'Moving downloaded temp file - {}, to {} failed. \ Please retry the download.'.format(src, dst))