Python io.DEFAULT_BUFFER_SIZE Examples
The following are 30
code examples of io.DEFAULT_BUFFER_SIZE().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
io
, or try the search function
.
Example #1
Source File: ace_api.py From ACE with Apache License 2.0 | 6 votes |
def get_analysis_file(uuid, name, output_file=None, output_fp=None, *args, **kwargs): if output_file is None and output_fp is None: output_fp = sys.stdout.buffer elif output_fp is None: output_fp = open(output_file, 'wb') r = _execute_api_call('analysis/file/{}/{}'.format(uuid, name), stream=True, *args, **kwargs) size = 0 for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE): if chunk: output_fp.write(chunk) size += len(chunk) if output_file is not None: output_fp.close() return True
Example #2
Source File: ace_api.py From ACE with Apache License 2.0 | 6 votes |
def get_analysis_file(uuid, name, output_file=None, output_fp=None, *args, **kwargs): if output_file is None and output_fp is None: output_fp = sys.stdout.buffer elif output_fp is None: output_fp = open(output_file, 'wb') r = _execute_api_call('analysis/file/{}/{}'.format(uuid, name), stream=True, *args, **kwargs) size = 0 for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE): if chunk: output_fp.write(chunk) size += len(chunk) if output_file is not None: output_fp.close() return True
Example #3
Source File: _pyio.py From ironpython2 with Apache License 2.0 | 6 votes |
def __init__(self, reader, writer, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): """Constructor. The arguments are two RawIO instances. """ if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2) if not reader.readable(): raise IOError('"reader" argument must be readable.') if not writer.writable(): raise IOError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size)
Example #4
Source File: _pyio.py From meddle with MIT License | 6 votes |
def __init__(self, reader, writer, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): """Constructor. The arguments are two RawIO instances. """ if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2) if not reader.readable(): raise IOError('"reader" argument must be readable.') if not writer.writable(): raise IOError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size)
Example #5
Source File: gzipstreamfile.py From gzipstream with MIT License | 6 votes |
def read(self, size): # TODO: Update this to use unconsumed_tail and a StringIO buffer # http://docs.python.org/2/library/zlib.html#zlib.Decompress.unconsumed_tail # Check if we need to start a new decoder if self.decoder and self.decoder.unused_data: self.restart_decoder() # Use unused data first if len(self.unused_buffer) > size: part = self.unused_buffer[:size] self.unused_buffer = self.unused_buffer[size:] return part # If the stream is finished and no unused raw data, return what we have if self.stream.closed or self.finished: self.finished = True buf, self.unused_buffer = self.unused_buffer, '' return buf # Otherwise consume new data raw = self.stream.read(io.DEFAULT_BUFFER_SIZE) if len(raw) > 0: self.unused_buffer += self.decoder.decompress(raw) else: self.finished = True return self.read(size)
Example #6
Source File: test_streaming_client_encryption_stream.py From aws-encryption-sdk-python with Apache License 2.0 | 6 votes |
def test_new_with_params(self): mock_int_sentinel = MagicMock(__class__=int) mock_stream = MockEncryptionStream( source=self.mock_source_stream, key_provider=self.mock_key_provider, mock_read_bytes=sentinel.read_bytes, line_length=io.DEFAULT_BUFFER_SIZE, source_length=mock_int_sentinel, ) assert mock_stream.config.source == self.mock_source_stream assert_prepped_stream_identity(mock_stream.config.source, object) assert mock_stream.config.key_provider is self.mock_key_provider assert mock_stream.config.mock_read_bytes is sentinel.read_bytes assert mock_stream.config.line_length == io.DEFAULT_BUFFER_SIZE assert mock_stream.config.source_length is mock_int_sentinel assert mock_stream.bytes_read == 0 assert mock_stream.output_buffer == b"" assert not mock_stream._message_prepped assert mock_stream.source_stream == self.mock_source_stream assert_prepped_stream_identity(mock_stream.source_stream, object) assert mock_stream._stream_length is mock_int_sentinel assert mock_stream.line_length == io.DEFAULT_BUFFER_SIZE
Example #7
Source File: _pyio.py From BinderFilter with MIT License | 6 votes |
def __init__(self, reader, writer, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): """Constructor. The arguments are two RawIO instances. """ if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2) if not reader.readable(): raise IOError('"reader" argument must be readable.') if not writer.writable(): raise IOError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size)
Example #8
Source File: fileutil.py From Blender-CM3D2-Converter with Apache License 2.0 | 6 votes |
def __init__(self, filepath, mode='wb', buffer_size=io.DEFAULT_BUFFER_SIZE, backup_filepath=None): """ファイルパスを指定して初期化します。 backup_filepath に None 以外が指定された場合、書き込み完了時に バックアップファイルが作成されます。 """ dirpath, filename = os.path.split(filepath) fd, temppath = tempfile.mkstemp(prefix=filename + '.', dir=dirpath) try: fh = os.fdopen(fd, mode) super(TemporaryFileWriter, self).__init__(fh, buffer_size) except: if fh: fh.close() os.remove(temppath) raise self.__filepath = filepath self.__temppath = temppath self.backup_filepath = backup_filepath
Example #9
Source File: _pyio.py From oss-ftp with MIT License | 6 votes |
def __init__(self, reader, writer, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): """Constructor. The arguments are two RawIO instances. """ if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2) if not reader.readable(): raise IOError('"reader" argument must be readable.') if not writer.writable(): raise IOError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size)
Example #10
Source File: _pyio.py From Computable with MIT License | 6 votes |
def __init__(self, reader, writer, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): """Constructor. The arguments are two RawIO instances. """ if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2) if not reader.readable(): raise IOError('"reader" argument must be readable.') if not writer.writable(): raise IOError('"writer" argument must be writable.') self.reader = BufferedReader(reader, buffer_size) self.writer = BufferedWriter(writer, buffer_size)
Example #11
Source File: misc.py From Python24 with MIT License | 5 votes |
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: chunk = file.read(size) if not chunk: break yield chunk
Example #12
Source File: base_evaluator.py From scikit-multiflow with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _update_file(self): if self.output_file is not None: # Note: Must follow order set in _init_file() line = str(self._data_buffer.sample_id) for metric in self.metrics: if metric == constants.TRUE_VS_PREDICTED: true_value = self._data_buffer.get_data(metric_id=metric, data_id=constants.Y_TRUE) pred_values = self._data_buffer.get_data(metric_id=metric, data_id=constants.Y_PRED) line += ',{:.6f}'.format(true_value) for i in range(self.n_models): line += ',{:.6f}'.format(pred_values[i]) elif metric == constants.RUNNING_TIME: training_time_values = self._data_buffer.get_data(metric_id=metric, data_id='training_time') testing_time_values = self._data_buffer.get_data(metric_id=metric, data_id='testing_time') total_running_time_values = self._data_buffer.get_data(metric_id=metric, data_id='total_running_time') values = (training_time_values, testing_time_values, total_running_time_values) for i in range(self.n_models): line += ',{:.6f},{:.6f},{:.6f}'.format(values[0][i], values[1][i], values[2][i]) elif metric == constants.MODEL_SIZE: values = self._data_buffer.get_data(metric_id=metric, data_id='model_size') for i in range(self.n_models): line += ',{:.6f}'.format(values[i]) elif metric == constants.DATA_POINTS: continue else: mean_values = self._data_buffer.get_data(metric_id=metric, data_id=constants.MEAN) current_values = self._data_buffer.get_data(metric_id=metric, data_id=constants.CURRENT) values = (mean_values, current_values) for i in range(self.n_models): line += ',{:.6f},{:.6f}'.format(values[0][i], values[1][i]) line = '\n' + line if sys.getsizeof(line) + self._file_buffer_size > io.DEFAULT_BUFFER_SIZE: # Appending the next line will make the buffer to exceed the system's default buffer size # flush the content of the buffer self._flush_file_buffer() self._file_buffer += line self._file_buffer_size += sys.getsizeof(line)
Example #13
Source File: __init__.py From kobo-predict with BSD 2-Clause "Simplified" License | 5 votes |
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): """Yield pieces of data from a file-like object until EOF.""" while True: chunk = file.read(size) if not chunk: break yield chunk
Example #14
Source File: makefile.py From faces with GNU General Public License v2.0 | 5 votes |
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text
Example #15
Source File: makefile.py From splunk-aws-project-trumpet with MIT License | 5 votes |
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text
Example #16
Source File: makefile.py From splunk-aws-project-trumpet with MIT License | 5 votes |
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text
Example #17
Source File: _pyio.py From oss-ftp with MIT License | 5 votes |
def readall(self): """Read until EOF, using multiple read() call.""" res = bytearray() while True: data = self.read(DEFAULT_BUFFER_SIZE) if not data: break res += data if res: return bytes(res) else: # b'' or None return data
Example #18
Source File: _pyio.py From BinderFilter with MIT License | 5 votes |
def readall(self): """Read until EOF, using multiple read() call.""" res = bytearray() while True: data = self.read(DEFAULT_BUFFER_SIZE) if not data: break res += data if res: return bytes(res) else: # b'' or None return data
Example #19
Source File: _pyio.py From BinderFilter with MIT License | 5 votes |
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE): """Create a new buffered reader using the given readable raw IO object. """ if not raw.readable(): raise IOError('"raw" argument must be readable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") self.buffer_size = buffer_size self._reset_read_buf() self._read_lock = Lock()
Example #20
Source File: _pyio.py From BinderFilter with MIT License | 5 votes |
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): if not raw.writable(): raise IOError('"raw" argument must be writable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, self._warning_stack_offset) self.buffer_size = buffer_size self._write_buf = bytearray() self._write_lock = Lock()
Example #21
Source File: _pyio.py From oss-ftp with MIT License | 5 votes |
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE): """Create a new buffered reader using the given readable raw IO object. """ if not raw.readable(): raise IOError('"raw" argument must be readable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") self.buffer_size = buffer_size self._reset_read_buf() self._read_lock = Lock()
Example #22
Source File: makefile.py From Python24 with MIT License | 5 votes |
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text
Example #23
Source File: _pyio.py From oss-ftp with MIT License | 5 votes |
def __init__(self, raw, buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None): if not raw.writable(): raise IOError('"raw" argument must be writable.') _BufferedIOMixin.__init__(self, raw) if buffer_size <= 0: raise ValueError("invalid buffer size") if max_buffer_size is not None: warnings.warn("max_buffer_size is deprecated", DeprecationWarning, self._warning_stack_offset) self.buffer_size = buffer_size self._write_buf = bytearray() self._write_lock = Lock()
Example #24
Source File: wsgiserver3.py From SalesforceXyTools with Apache License 2.0 | 5 votes |
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): raise NotImplemented
Example #25
Source File: wsgiserver3.py From SalesforceXyTools with Apache License 2.0 | 5 votes |
def CP_makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): if 'r' in mode: return io.BufferedReader(socket.SocketIO(sock, mode), bufsize) else: return CP_BufferedWriter(socket.SocketIO(sock, mode), bufsize)
Example #26
Source File: ssl_builtin.py From SalesforceXyTools with Apache License 2.0 | 5 votes |
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): return wsgiserver.CP_fileobject(sock, mode, bufsize)
Example #27
Source File: ssl_builtin.py From SalesforceXyTools with Apache License 2.0 | 5 votes |
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE): return wsgiserver.CP_makefile(sock, mode, bufsize)
Example #28
Source File: ace_api.py From ACE with Apache License 2.0 | 5 votes |
def cloudphish_download(url=None, sha256=None, output_path=None, output_fp=None, *args, **kwargs): """Download content from Cloudphish. Note: either the url OR the sha256 of the url is expected to passed. :param str url: (optional) The url :param str sha256: (optional) The sha256 of the url. :param str output_path: (optional) The path to write the content. Default: stdout :param str output_fp: (optional) a file handle/buffer to write the content. Default: stdout """ if url is None and sha256 is None: raise ValueError("you must supply either url or sha256 to cloudphish_download") if output_path is None and output_fp is None: output_fp = sys.stdout.buffer elif output_fp is None: output_fp = open(output_path, 'wb') params = { } if url: params['url'] = url if sha256: params['s'] = sha256 r = _execute_api_call('cloudphish/download', params=params, stream=True, *args, **kwargs) size = 0 for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE): if chunk: output_fp.write(chunk) size += len(chunk) if output_path is not None: output_fp.close() return True
Example #29
Source File: hashes.py From pywr with GNU General Public License v3.0 | 5 votes |
def compute_hash(filename, algorithm='md5', chunk_size=io.DEFAULT_BUFFER_SIZE): """ Compute the hash of a large file using hashlib """ h = hashlib.new(algorithm) with io.open(filename, mode='rb') as fh: for chunk in iter(lambda: fh.read(chunk_size), b''): h.update(chunk) return h.hexdigest()
Example #30
Source File: makefile.py From vnpy_crypto with MIT License | 5 votes |
def backport_makefile(self, mode="r", buffering=None, encoding=None, errors=None, newline=None): """ Backport of ``socket.makefile`` from Python 3.5. """ if not set(mode) <= set(["r", "w", "b"]): raise ValueError( "invalid mode %r (only r, w, b allowed)" % (mode,) ) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing binary = "b" in mode rawmode = "" if reading: rawmode += "r" if writing: rawmode += "w" raw = SocketIO(self, rawmode) self._makefile_refs += 1 if buffering is None: buffering = -1 if buffering < 0: buffering = io.DEFAULT_BUFFER_SIZE if buffering == 0: if not binary: raise ValueError("unbuffered streams must be binary") return raw if reading and writing: buffer = io.BufferedRWPair(raw, raw, buffering) elif reading: buffer = io.BufferedReader(raw, buffering) else: assert writing buffer = io.BufferedWriter(raw, buffering) if binary: return buffer text = io.TextIOWrapper(buffer, encoding, errors, newline) text.mode = mode return text