Python sys.getsizeof() Examples
The following are 30
code examples of sys.getsizeof().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sys
, or try the search function
.
Example #1
Source File: driver_s3.py From streamalert with Apache License 2.0 | 6 votes |
def gz_compress(driver, data): """ Params: driver (PersistenceDriver) data (Bytes): Uncompressed data Return: Bytes """ try: original_size = sys.getsizeof(data) data = zlib.compress(data, level=zlib.Z_BEST_COMPRESSION) LOGGER.debug( 'LookupTable (%s): Successfully compressed input data from %d to %d bytes', driver.id, original_size, sys.getsizeof(data) ) return data except zlib.error: LOGGER.exception('LookupTable (%s): Data compression error.', driver.id)
Example #2
Source File: storage.py From workload-collocation-agent with Apache License 2.0 | 6 votes |
def divide_message(msg): """Kafka won't accept more than 1Mb messages, therefore too big messages need to be divided into smaller chunks""" MAX_SIZE = 10 ** 5 devided_message = [] msg_size = sys.getsizeof(msg) if msg_size < MAX_SIZE: return [msg] else: message = msg.split('\n') new_message = '' for i in range(len(message)): new_metric = '' while message[i].startswith('#'): new_metric += message[i] + '\n' i += 1 new_metric += message[i] + '\n' if sys.getsizeof(new_message + new_metric) > MAX_SIZE and new_message: devided_message.append(new_message) new_message = new_metric else: new_message += new_metric return devided_message
Example #3
Source File: logstalgia.py From xenon with GNU General Public License v3.0 | 6 votes |
def get_size(self, msg, seen=None): """Recursively finds size of objects""" size = sys.getsizeof(msg) if seen is None: seen = set() obj_id = id(msg) if obj_id in seen: return 0 seen.add(obj_id) if isinstance(msg, dict): size += sum([self.get_size(v, seen) for v in msg.values()]) size += sum([self.get_size(k, seen) for k in msg.keys()]) elif hasattr(msg, '__dict__'): size += self.get_size(msg.__dict__, seen) elif hasattr(msg, '__iter__') and not isinstance(msg, (str, bytes, bytearray)): size += sum([self.get_size(i, seen) for i in msg]) return size
Example #4
Source File: bot.py From bale-bot-python with Apache License 2.0 | 6 votes |
def send_photo(self, user_peer, image, caption_text="", name="", file_storage_version=1, mime_type="image/jpeg", success_callback=None, failure_callback=None, **kwargs): image_buffer = get_file_buffer(file=image) file_size = sys.getsizeof(image_buffer) im = Image.open(io.BytesIO(image_buffer)) width, height = im.size thumb = get_image_thumbnails(im) def success_upload_image(user_data, server_response): file_id = str(server_response.get("file_id", None)) access_hash = str(server_response.get("user_id", None)) photo_message = PhotoMessage(file_id=file_id, access_hash=access_hash, name=name, file_size=file_size, mime_type=mime_type, file_storage_version=file_storage_version, width=width, height=height, caption_text=TextMessage(text=caption_text), thumb=thumb) self.send_message(message=photo_message, peer=user_peer, success_callback=success_callback, failure_callback=failure_callback, kwargs=kwargs) self.upload_file(file=image, file_type="file", success_callback=success_upload_image, failure_callback=failure_callback)
Example #5
Source File: perf_util.py From indy-plenum with Apache License 2.0 | 6 votes |
def get_size(obj, seen=None): """Recursively finds size of objects""" size = sys.getsizeof(obj) if seen is None: seen = set() obj_id = id(obj) if obj_id in seen: return 0 # Important mark as seen *before* entering recursion to gracefully handle # self-referential objects seen.add(obj_id) if isinstance(obj, dict): size += sum([get_size(v, seen) for v in obj.values()]) size += sum([get_size(k, seen) for k in obj.keys()]) elif hasattr(obj, '__dict__'): size += get_size(obj.__dict__, seen) elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): size += sum([get_size(i, seen) for i in obj]) return size
Example #6
Source File: multi.py From recruit with Apache License 2.0 | 6 votes |
def _nbytes(self, deep=False): """ return the number of bytes in the underlying data deeply introspect the level data if deep=True include the engine hashtable *this is in internal routine* """ # for implementations with no useful getsizeof (PyPy) objsize = 24 level_nbytes = sum(i.memory_usage(deep=deep) for i in self.levels) label_nbytes = sum(i.nbytes for i in self.codes) names_nbytes = sum(getsizeof(i, objsize) for i in self.names) result = level_nbytes + label_nbytes + names_nbytes # include our engine hashtable result += self._engine.sizeof(deep=deep) return result # -------------------------------------------------------------------- # Rendering Methods
Example #7
Source File: utils.py From mars with Apache License 2.0 | 6 votes |
def calc_data_size(dt): if dt is None: return 0 if isinstance(dt, tuple): return sum(calc_data_size(c) for c in dt) if hasattr(dt, 'nbytes'): return max(sys.getsizeof(dt), dt.nbytes) if hasattr(dt, 'shape') and len(dt.shape) == 0: return 0 if hasattr(dt, 'memory_usage') or hasattr(dt, 'groupby_obj'): return sys.getsizeof(dt) if hasattr(dt, 'dtypes') and hasattr(dt, 'shape'): return dt.shape[0] * sum(dtype.itemsize for dtype in dt.dtypes) if hasattr(dt, 'dtype') and hasattr(dt, 'shape'): return dt.shape[0] * dt.dtype.itemsize # object chunk return sys.getsizeof(dt)
Example #8
Source File: context.py From mars with Apache License 2.0 | 6 votes |
def get_chunk_metas(self, chunk_keys, filter_fields=None): if filter_fields is not None: # pragma: no cover raise NotImplementedError("Local context doesn't support filter fields now") metas = [] for chunk_key in chunk_keys: chunk_data = self.get(chunk_key) if chunk_data is None: metas.append(None) continue if hasattr(chunk_data, 'nbytes'): # ndarray size = chunk_data.nbytes shape = chunk_data.shape elif hasattr(chunk_data, 'memory_usage'): # DataFrame size = chunk_data.memory_usage(deep=True).sum() shape = chunk_data.shape else: # other size = sys.getsizeof(chunk_data) shape = () metas.append(ChunkMeta(chunk_size=size, chunk_shape=shape, workers=None)) return metas
Example #9
Source File: multi.py From vnpy_crypto with MIT License | 6 votes |
def _nbytes(self, deep=False): """ return the number of bytes in the underlying data deeply introspect the level data if deep=True include the engine hashtable *this is in internal routine* """ # for implementations with no useful getsizeof (PyPy) objsize = 24 level_nbytes = sum(i.memory_usage(deep=deep) for i in self.levels) label_nbytes = sum(i.nbytes for i in self.labels) names_nbytes = sum(getsizeof(i, objsize) for i in self.names) result = level_nbytes + label_nbytes + names_nbytes # include our engine hashtable result += self._engine.sizeof(deep=deep) return result
Example #10
Source File: size.py From bitmask-dev with GNU General Public License v3.0 | 6 votes |
def _get_size(item, seen): known_types = {dict: lambda d: chain.from_iterable(d.items())} default_size = getsizeof(0) def size_walk(item): if id(item) in seen: return 0 seen.add(id(item)) s = getsizeof(item, default_size) for _type, fun in known_types.iteritems(): if isinstance(item, _type): s += sum(map(size_walk, fun(item))) break return s return size_walk(item)
Example #11
Source File: test_base.py From vnpy_crypto with MIT License | 6 votes |
def test_memory_usage(self): for o in self.objs: res = o.memory_usage() res_deep = o.memory_usage(deep=True) if (is_object_dtype(o) or (isinstance(o, Series) and is_object_dtype(o.index))): # if there are objects, only deep will pick them up assert res_deep > res else: assert res == res_deep if isinstance(o, Series): assert ((o.memory_usage(index=False) + o.index.memory_usage()) == o.memory_usage(index=True)) # sys.getsizeof will call the .memory_usage with # deep=True, and add on some GC overhead diff = res_deep - sys.getsizeof(o) assert abs(diff) < 100
Example #12
Source File: test_sys.py From oss-ftp with MIT License | 6 votes |
def test_errors(self): class BadSizeof(object): def __sizeof__(self): raise ValueError self.assertRaises(ValueError, sys.getsizeof, BadSizeof()) class InvalidSizeof(object): def __sizeof__(self): return None self.assertRaises(TypeError, sys.getsizeof, InvalidSizeof()) sentinel = ["sentinel"] self.assertIs(sys.getsizeof(InvalidSizeof(), sentinel), sentinel) class OverflowSizeof(long): def __sizeof__(self): return int(self) self.assertEqual(sys.getsizeof(OverflowSizeof(sys.maxsize)), sys.maxsize + self.gc_headsize) with self.assertRaises(OverflowError): sys.getsizeof(OverflowSizeof(sys.maxsize + 1)) with self.assertRaises(ValueError): sys.getsizeof(OverflowSizeof(-1)) with self.assertRaises((ValueError, OverflowError)): sys.getsizeof(OverflowSizeof(-sys.maxsize - 1))
Example #13
Source File: util.py From ConvLab with MIT License | 6 votes |
def _sizeof(obj, seen=None): '''Recursively finds size of objects''' size = sys.getsizeof(obj) if seen is None: seen = set() obj_id = id(obj) if obj_id in seen: return 0 # Important mark as seen *before* entering recursion to gracefully handle # self-referential objects seen.add(obj_id) if isinstance(obj, dict): size += sum([_sizeof(v, seen) for v in obj.values()]) size += sum([_sizeof(k, seen) for k in obj.keys()]) elif hasattr(obj, '__dict__'): size += _sizeof(obj.__dict__, seen) elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): size += sum([_sizeof(i, seen) for i in obj]) return size
Example #14
Source File: driver_s3.py From streamalert with Apache License 2.0 | 6 votes |
def gz_decompress(driver, data): """ Params: driver (PersistenceDriver) data (Bytes): Compressed data Return: Bytes """ try: data = zlib.decompress(data, 47) LOGGER.debug( 'LookupTable (%s): Object decompressed to %d byte payload', driver.id, sys.getsizeof(data) ) except zlib.error: LOGGER.warning( 'LookupTable (%s): Data is not compressed; defaulting to original payload', driver.id ) return data
Example #15
Source File: test_sys.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_errors(self): class BadSizeof(object): def __sizeof__(self): raise ValueError self.assertRaises(ValueError, sys.getsizeof, BadSizeof()) class InvalidSizeof(object): def __sizeof__(self): return None self.assertRaises(TypeError, sys.getsizeof, InvalidSizeof()) sentinel = ["sentinel"] self.assertIs(sys.getsizeof(InvalidSizeof(), sentinel), sentinel) class OverflowSizeof(long): def __sizeof__(self): return int(self) self.assertEqual(sys.getsizeof(OverflowSizeof(sys.maxsize)), sys.maxsize + self.gc_headsize) with self.assertRaises(OverflowError): sys.getsizeof(OverflowSizeof(sys.maxsize + 1)) with self.assertRaises(ValueError): sys.getsizeof(OverflowSizeof(-1)) with self.assertRaises((ValueError, OverflowError)): sys.getsizeof(OverflowSizeof(-sys.maxsize - 1))
Example #16
Source File: train_op.py From RelativePose with BSD 3-Clause "New" or "Revised" License | 6 votes |
def get_size(obj, seen=None): """Recursively finds size of objects""" size = sys.getsizeof(obj) if seen is None: seen = set() obj_id = id(obj) if obj_id in seen: return 0 # Important mark as seen *before* entering recursion to gracefully handle # self-referential objects seen.add(obj_id) if isinstance(obj, dict): size += sum([get_size(v, seen) for v in obj.values()]) size += sum([get_size(k, seen) for k in obj.keys()]) elif hasattr(obj, '__dict__'): size += get_size(obj.__dict__, seen) elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): size += sum([get_size(i, seen) for i in obj]) return size
Example #17
Source File: unicorn_binance_websocket_api_connection.py From unicorn-binance-websocket-api with MIT License | 6 votes |
def receive(self): self.handler_binance_websocket_api_manager.set_heartbeat(self.stream_id) try: received_data_json = await self.handler_binance_websocket_api_manager.websocket_list[self.stream_id].recv() try: if self.handler_binance_websocket_api_manager.restart_requests[self.stream_id]['status'] == "restarted": self.handler_binance_websocket_api_manager.increase_reconnect_counter(self.stream_id) del self.handler_binance_websocket_api_manager.restart_requests[self.stream_id] except KeyError: pass if received_data_json is not None: size = sys.getsizeof(received_data_json) self.handler_binance_websocket_api_manager.increase_processed_receives_statistic(self.stream_id) self.handler_binance_websocket_api_manager.add_total_received_bytes(size) self.handler_binance_websocket_api_manager.increase_received_bytes_per_second(self.stream_id, size) return received_data_json except RuntimeError as error_msg: logging.debug("binance_websocket_api_connection->receive(" + str(self.stream_id) + ") - RuntimeError - error_msg: " + str(error_msg)) sys.exit(1) except ssl.SSLError as error_msg: logging.debug("binance_websocket_api_connection->receive(" + str(self.stream_id) + ") - ssl.SSLError - error_msg: " + str(error_msg)) except KeyError as error_msg: logging.debug("binance_websocket_api_connection->receive(" + str(self.stream_id) + ") - KeyError - error_msg: " + str(error_msg)) self.handler_binance_websocket_api_manager.stream_is_stopping(self.stream_id) if self.handler_binance_websocket_api_manager.is_stop_request(self.stream_id) is False: self.handler_binance_websocket_api_manager.set_restart_request(self.stream_id) sys.exit(1) except asyncio.base_futures.InvalidStateError as error_msg: logging.critical("binance_websocket_api_connection->receive(" + str(self.stream_id) + ") - asyncio.base_futures.InvalidStateError - error_msg: " + str(error_msg) + " - Extra info: https://github.com/oliver-zehentleitner/unicorn-binance-" "websocket-api/issues/18 - open an own issue if needed!") self.handler_binance_websocket_api_manager.stream_is_stopping(self.stream_id) if self.handler_binance_websocket_api_manager.is_stop_request(self.stream_id) is False: self.handler_binance_websocket_api_manager.set_restart_request(self.stream_id) sys.exit(1)
Example #18
Source File: simpletable.py From TheCannon with MIT License | 5 votes |
def nbytes(self): """ number of bytes of the object """ n = sum(k.nbytes if hasattr(k, 'nbytes') else sys.getsizeof(k) for k in self.__dict__.values()) return n
Example #19
Source File: utils.py From pybtc with GNU General Public License v3.0 | 5 votes |
def set(self, key, value): self._check_limit() self._store[key] = value self._store_size += sys.getsizeof(value) + sys.getsizeof(key)
Example #20
Source File: test_mmap.py From oss-ftp with MIT License | 5 votes |
def test_sizeof(self): m1 = mmap.mmap(-1, 100) tagname = "foo" m2 = mmap.mmap(-1, 100, tagname=tagname) self.assertEqual(sys.getsizeof(m2), sys.getsizeof(m1) + len(tagname) + 1)
Example #21
Source File: test_sys.py From oss-ftp with MIT License | 5 votes |
def test_gc_head_size(self): # Check that the gc header size is added to objects tracked by the gc. size = test.test_support.calcobjsize gc_header_size = self.gc_headsize # bool objects are not gc tracked self.assertEqual(sys.getsizeof(True), size('l')) # but lists are self.assertEqual(sys.getsizeof([]), size('P PP') + gc_header_size)
Example #22
Source File: test_support.py From oss-ftp with MIT License | 5 votes |
def check_sizeof(test, o, size): import _testcapi result = sys.getsizeof(o) # add GC header size if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): size += _testcapi.SIZEOF_PYGC_HEAD msg = 'wrong size for %s: got %d, expected %d' \ % (type(o), result, size) test.assertEqual(result, size, msg) #======================================================================= # Decorator for running a function in a different locale, correctly resetting # it afterwards.
Example #23
Source File: __init__.py From kobo-predict with BSD 2-Clause "Simplified" License | 5 votes |
def __sizeof__(self): sizeof = _sys.getsizeof n = len(self) + 1 # number of links including root size = sizeof(self.__dict__) # instance dictionary size += sizeof(self.__map) * 2 # internal dict and inherited dict size += sizeof(self.__hardroot) * n # link objects size += sizeof(self.__root) * n # proxy objects return size
Example #24
Source File: test_sys.py From ironpython2 with Apache License 2.0 | 5 votes |
def test_default(self): size = test.test_support.calcobjsize self.assertEqual(sys.getsizeof(True, -1), size('l'))
Example #25
Source File: test_scalarmath.py From vnpy_crypto with MIT License | 5 votes |
def test_equal_nbytes(self): for type in types: x = type(0) assert_(sys.getsizeof(x) > x.nbytes)
Example #26
Source File: range.py From vnpy_crypto with MIT License | 5 votes |
def nbytes(self): """ Return the number of bytes in the underlying data On implementations where this is undetermined (PyPy) assume 24 bytes for each value """ return sum(getsizeof(getattr(self, v), 24) for v in ['_start', '_stop', '_step'])
Example #27
Source File: array.py From vnpy_crypto with MIT License | 5 votes |
def nbytes(self): return sys.getsizeof(self.data)
Example #28
Source File: test_analytics.py From vnpy_crypto with MIT License | 5 votes |
def test_memory_usage(self): cat = Categorical([1, 2, 3]) # .categories is an index, so we include the hashtable assert 0 < cat.nbytes <= cat.memory_usage() assert 0 < cat.nbytes <= cat.memory_usage(deep=True) cat = Categorical(['foo', 'foo', 'bar']) assert cat.memory_usage(deep=True) > cat.nbytes if not PYPY: # sys.getsizeof will call the .memory_usage with # deep=True, and add on some GC overhead diff = cat.memory_usage(deep=True) - sys.getsizeof(cat) assert abs(diff) < 100
Example #29
Source File: test_base.py From vnpy_crypto with MIT License | 5 votes |
def test_memory_usage(self): # Delegate does not implement memory_usage. # Check that we fall back to in-built `__sizeof__` # GH 12924 delegate = self.Delegate(self.Delegator()) sys.getsizeof(delegate)
Example #30
Source File: array.py From vnpy_crypto with MIT License | 5 votes |
def nbytes(self): n = len(self) if n: return n * sys.getsizeof(self[0]) return 0