Python msgpack.unpackb() Examples
The following are 30
code examples of msgpack.unpackb().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
msgpack
, or try the search function
.
Example #1
Source File: communicator.py From grimoire with GNU Affero General Public License v3.0 | 6 votes |
def wait(self): results = [] print "selecting" r, w, e = select.select(self.clients, (), ()) for sock_ready in r: if sock_ready == self.listener: print "accepting new client" c = self.listener.accept() self.clients.append(c) else: try: msg = sock_ready.recv_bytes() msg = msgpack.unpackb(msg) results.append((sock_ready, msg)) # print "received {}".format(msg) except EOFError: print "closing" sock_ready.close() self.clients.remove(sock_ready) return results
Example #2
Source File: base.py From intake with BSD 2-Clause "Simplified" License | 6 votes |
def search(self, *args, **kwargs): import requests request = {'action': 'search', 'query': (args, kwargs), 'source_id': self._source_id} response = requests.post( url=self.source_url, **self._get_http_args({}), data=msgpack.packb(request, **pack_kwargs)) try: response.raise_for_status() except requests.HTTPError as err: raise RemoteCatalogError("Failed search query.") from err source = msgpack.unpackb(response.content, **unpack_kwargs) source_id = source['source_id'] cat = RemoteCatalog( url=self.url, http_args=self.http_args, source_id=source_id, name="") cat.cat = self return cat
Example #3
Source File: base.py From intake with BSD 2-Clause "Simplified" License | 6 votes |
def fetch_by_name(self, name): import requests logger.debug("Requesting info about entry named '%s'", name) params = {'name': name} http_args = self._get_http_args(params) response = requests.get(self.source_url, **http_args) if response.status_code == 404: raise KeyError(name) try: response.raise_for_status() except requests.HTTPError as err: raise RemoteCatalogError( "Failed to fetch entry {!r}.".format(name)) from err info = msgpack.unpackb(response.content, **unpack_kwargs) return RemoteCatalogEntry( url=self.url, getenv=self.getenv, getshell=self.getshell, auth=self.auth, http_args=self.http_args, page_size=self._page_size, **info['source'])
Example #4
Source File: MessageProcessor.py From ReadableWebProxy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def __unchunk(self, new_message, worker_name): new = msgpack.unpackb(new_message, encoding='utf-8', use_list=False) # If we don't have a chunking type, it's probably an old-style message. if not 'chunk-type' in new: return new # Messages smaller then the chunk_size are not split, and can just be returned. if new['chunk-type'] == "complete-message": assert 'chunk-type' in new assert 'data' in new return new['data'] elif new['chunk-type'] == "chunked-message": return self.__process_chunk(new, worker_name) else: raise RuntimeError("Unknown message type: %s", new['chunk-type'])
Example #5
Source File: NetlocThrottler.py From ReadableWebProxy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def get_available_jobs(self): ret = [] for job_netloc, key_dict in self.url_throttler.items(): try: # Allow unlimited fetching if the site isn't erroring at all while key_dict['active_fetches'] <= key_dict['status_accumulator']: # ret.append(item['job_queue'].get(block=False)) item_b = self.redis.lpop(self.__netloc_to_key(job_netloc)) if not item_b: # Nothing in queue break item = msgpack.unpackb(item_b, use_list=False, raw=False) ret.append(item) key_dict['active_fetches'] += 1 self.total_queued -= 1 except queue.Empty: pass self.log.info("Extracted %s jobs from rate-limiting queues.", len(ret)) return ret
Example #6
Source File: message.py From rssant with BSD 3-Clause "New" or "Revised" License | 6 votes |
def raw_decode(cls, data, content_encoding=None): content_encoding = ContentEncoding.of(content_encoding) if content_encoding.is_gzip: try: data = gzip.decompress(data) except (ValueError, TypeError): raise ActorMessageDecodeError('gzip decompress failed') try: if content_encoding.is_json: data = json.loads(data.decode('utf-8')) else: data = msgpack.unpackb(data, raw=False) except json.JSONDecodeError: raise ActorMessageDecodeError('json decode failed') except msgpack.UnpackException: raise ActorMessageDecodeError('msgpack decode failed') return data
Example #7
Source File: e3db52a480f8_alter_log_data_type.py From chainerui with MIT License | 6 votes |
def downgrade(): conn = op.get_bind() temp_log_table = op.create_table( 'temp_log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('result_id', sa.Integer(), nullable=True), sa.Column('data', sa.String(length=1024), nullable=True), sa.ForeignKeyConstraint(['result_id'], ['result.id'], ), sa.PrimaryKeyConstraint('id')) res = conn.execute('SELECT id, result_id, data FROM log') results = res.fetchall() if len(results) > 0: modified_logs = [{ 'id': r[0], 'result_id': r[1], 'data': json.dumps(msgpack.unpackb(r[2], raw=False))} for r in results] op.bulk_insert(temp_log_table, modified_logs) op.drop_table('log') op.rename_table('temp_log', 'log')
Example #8
Source File: test_integration.py From pex with Apache License 2.0 | 6 votes |
def test_pex_manylinux_runtime(): """Tests resolver manylinux support and runtime resolution (and --platform=current).""" test_stub = dedent( """ import msgpack print(msgpack.unpackb(msgpack.packb([1, 2, 3]))) """ ) with temporary_content({'tester.py': test_stub}) as output_dir: pex_path = os.path.join(output_dir, 'test.pex') tester_path = os.path.join(output_dir, 'tester.py') results = run_pex_command(['--disable-cache', '--no-build', 'msgpack-python==0.4.7', '--platform=current', '-o', pex_path]) results.assert_success() out = subprocess.check_output([pex_path, tester_path]) assert out.strip() == '[1, 2, 3]'
Example #9
Source File: socket_environment.py From tensorforce with Apache License 2.0 | 6 votes |
def proxy_receive(cls, connection): str_success = connection.recv(1) if len(str_success) != 1: raise TensorforceError.unexpected() success = bool(str_success) str_num_bytes = connection.recv(8) if len(str_num_bytes) != 8: raise TensorforceError.unexpected() num_bytes = int(str_num_bytes.decode()) str_result = b'' for n in range(num_bytes // cls.MAX_BYTES): str_result += connection.recv(cls.MAX_BYTES) if len(str_result) != n * cls.MAX_BYTES: raise TensorforceError.unexpected() str_result += connection.recv(num_bytes % cls.MAX_BYTES) if len(str_result) != num_bytes: raise TensorforceError.unexpected() result = msgpack.unpackb(packed=str_result) decode = (lambda x: x.decode() if isinstance(x, bytes) else x) result = util.fmap(function=decode, xs=result, map_keys=True) return success, result
Example #10
Source File: scanner.py From redis-memory-analyzer with MIT License | 6 votes |
def resolve_types(self, ret): if not self.pipeline_mode: try: key_with_types = msgpack.unpackb(self.resolve_types_script(ret)) except ResponseError as e: if "CROSSSLOT" not in repr(e): raise e key_with_types = self.resolve_with_pipe(ret) self.pipeline_mode = True else: key_with_types = self.resolve_with_pipe(ret) for i in range(0, len(ret)): yield key_with_types[i], ret[i] ret.clear()
Example #11
Source File: test_func.py From sharq with MIT License | 6 votes |
def test_enqueue_payload_encode_decode(self): job_id = self._get_job_id() response = self.queue.enqueue( payload=self._test_payload_1, interval=10000, # 10s (10000ms) job_id=job_id, queue_id=self._test_queue_id, queue_type=self._test_queue_type, ) payload_map_name = '%s:payload' % (self.queue._key_prefix) payload_map_key = '%s:%s:%s' % ( self._test_queue_type, self._test_queue_id, job_id) raw_payload = self.queue._r.hget(payload_map_name, payload_map_key) # decode the payload from msgpack to dictionary payload = msgpack.unpackb(raw_payload[1:-1]) self.assertEqual(payload, self._test_payload_1)
Example #12
Source File: abstractClient.py From zatt with GNU Affero General Public License v3.0 | 6 votes |
def _request(self, message): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(self.server_address) sock.send(msgpack.packb(message, use_bin_type=True)) buff = bytes() while True: block = sock.recv(128) if not block: break buff += block resp = msgpack.unpackb(buff, encoding='utf-8') sock.close() if 'type' in resp and resp['type'] == 'redirect': self.server_address = tuple(resp['leader']) resp = self._request(message) return resp
Example #13
Source File: log.py From chainerui with MIT License | 6 votes |
def serialize(self): """serialize.""" log_dict = {} data = msgpack.unpackb(self.data, raw=False) for item in data.items(): value_to_store = ( None if not isinstance(item[1], numbers.Number) or isinf(item[1]) or isnan(item[1]) else item[1] ) log_dict[item[0]] = value_to_store return { 'id': self.id, 'resultId': self.result_id, 'logDict': log_dict }
Example #14
Source File: param_cache.py From rafiki with Apache License 2.0 | 5 votes |
def _deserialize_params(params_bytes): # Deserialize as `msgpack` params_simple = msgpack.unpackb(params_bytes, raw=False) params = _unsimplify_params(params_simple) return params
Example #15
Source File: test_client.py From nats-python with MIT License | 5 votes |
def test_request_msgpack(nats_plain_url): def worker(): with NATSClient(nats_plain_url, socket_timeout=2) as client: def callback(message): client.publish( message.reply, payload=msgpack.packb( {b"v": 3338} if message.payload else {b"v": 32} ), ) client.subscribe( "test-subject", callback=callback, queue="test-queue", max_messages=2 ) client.wait(count=2) t = threading.Thread(target=worker) t.start() time.sleep(1) with NATSClient(nats_plain_url, socket_timeout=2) as client: # request without payload resp = client.request("test-subject") assert resp.subject.startswith("_INBOX.") assert resp.reply == "" assert msgpack.unpackb(resp.payload) == {b"v": 32} # request with payload resp = client.request("test-subject", payload=msgpack.packb("test-payload")) assert resp.subject.startswith("_INBOX.") assert resp.reply == "" assert msgpack.unpackb(resp.payload) == {b"v": 3338} t.join()
Example #16
Source File: param_store.py From rafiki with Apache License 2.0 | 5 votes |
def _deserialize_params(params_bytes): # Deserialize as `msgpack` params_simple = msgpack.unpackb(params_bytes, raw=False) params = _unsimplify_params(params_simple) return params
Example #17
Source File: test_sender.py From iris with BSD 2-Clause "Simplified" License | 5 votes |
def test_generate_slave_message_payload(): from iris.sender.rpc import generate_msgpack_message_payload data = { 'ids': set([1, 2, 3, 4]) } result = generate_msgpack_message_payload(data) assert msgpack.unpackb(result) == { b'endpoint': b'v0/slave_send', b'data': { b'ids': [1, 2, 3, 4] } }
Example #18
Source File: example_correlation_with_globals.py From xi-iot with MIT License | 5 votes |
def main(ctx,msg): global image_payload global label_payload if ctx.get_topic() == "images": ''' Use get_topic to determine the MQTT topic. If we receive an image message then store the payload in image_payload global. ''' logging.info("***** Image message received *****") logging.info("***** Unpacking message *****") unpacked_dict = msgpack.unpackb(msg, raw=True) image = numpy.fromstring(unpacked_dict["Data"], dtype=unpacked_dict["DataType"]) image = image.reshape((unpacked_dict["Height"],unpacked_dict["Width"],unpacked_dict["Channels"])) _, img_encoded = cv2.imencode('.jpg', image) encodedStr = base64.b64encode(img_encoded) logging.info("***** Storing unpacked frame payload in memory *****") payload ={} payload['timestamp'] = ctx.get_timestamp() payload['image'] = encodedStr image_payload = payload return elif ctx.get_topic() == "labels": logging.info("***** Label message received *****") if image_payload == "init": logging.info("***** Label message received, but no image payload in memory *****") else: #If we receive a label and have image_payload then combine them. logging.info("***** Retrieving unpacked frame payload from memory *****") payload ={} payload['image_timestamp'] = image_payload['timestamp'] payload['image'] = image_payload['image'] logging.info("***** Adding label payload to image payload *****") payload['label_timestamp'] = ctx.get_timestamp() payload['label'] = msg #Return the combined payload as a single JSON. return ctx.send(json.dumps(payload)) else: return
Example #19
Source File: example_correlation_with_globals.py From xi-iot with MIT License | 5 votes |
def main(ctx,msg): global image_payload global label_payload if ctx.get_topic() == "images": ''' Use get_topic to determine the MQTT topic. If we receive an image message then store the payload in image_payload global. ''' logging.info("***** Image message received *****") logging.info("***** Unpacking message *****") unpacked_dict = msgpack.unpackb(msg, raw=True) image = numpy.fromstring(unpacked_dict["Data"], dtype=unpacked_dict["DataType"]) image = image.reshape((unpacked_dict["Height"],unpacked_dict["Width"],unpacked_dict["Channels"])) _, img_encoded = cv2.imencode('.jpg', image) encodedStr = base64.b64encode(img_encoded) logging.info("***** Storing unpacked frame payload in memory *****") payload ={} payload['timestamp'] = ctx.get_timestamp() payload['image'] = encodedStr image_payload = payload return elif ctx.get_topic() == "labels": logging.info("***** Label message received *****") if image_payload == "init": logging.info("***** Label message received, but no image payload in memory *****") else: #If we receive a label and have image_payload then combine them. logging.info("***** Retrieving unpacked frame payload from memory *****") payload ={} payload['image_timestamp'] = image_payload['timestamp'] payload['image'] = image_payload['image'] logging.info("***** Adding label payload to image payload *****") payload['label_timestamp'] = ctx.get_timestamp() payload['label'] = msg #Return the combined payload as a single JSON. return ctx.send(json.dumps(payload)) else: return
Example #20
Source File: raw_to_jpeg.py From xi-iot with MIT License | 5 votes |
def main(ctx,msg): unpacked_dict = msgpack.unpackb(msg, raw=False, max_bin_len=3145728) image = numpy.fromstring(unpacked_dict["Data"], dtype=unpacked_dict["DataType"]) image = image.reshape((unpacked_dict["Height"],unpacked_dict["Width"],unpacked_dict["Channels"])) cvImage = cv2.cvtColor(image,cv2.COLOR_RGB2BGR) _, img_encoded = cv2.imencode('.jpg', cvImage) encodedStr = base64.b64encode(img_encoded) payload ={} payload['data'] = encodedStr ctx.send(json.dumps(payload)) return
Example #21
Source File: banyan_base_aio.py From python_banyan with GNU Affero General Public License v3.0 | 5 votes |
def unpack(self, data): return msgpack.unpackb(data, raw=False)
Example #22
Source File: test_worker.py From arq with MIT License | 5 votes |
def test_incompatible_serializers_2(arq_redis: ArqRedis, worker): await arq_redis.enqueue_job('foobar', _job_id='job_id') worker: Worker = worker( functions=[foobar], job_serializer=msgpack.packb, job_deserializer=functools.partial(msgpack.unpackb, raw=False) ) await worker.main() assert worker.jobs_complete == 0 assert worker.jobs_failed == 1 assert worker.jobs_retried == 0
Example #23
Source File: test_worker.py From arq with MIT License | 5 votes |
def test_custom_serializers(arq_redis_msgpack: ArqRedis, worker): j = await arq_redis_msgpack.enqueue_job('foobar', _job_id='job_id') worker: Worker = worker( functions=[foobar], job_serializer=msgpack.packb, job_deserializer=functools.partial(msgpack.unpackb, raw=False) ) info = await j.info() assert info.function == 'foobar' assert await worker.run_check() == 1 assert await j.result() == 42 r = await j.info() assert r.result == 42
Example #24
Source File: conftest.py From arq with MIT License | 5 votes |
def arq_redis_msgpack(loop): redis_ = await create_redis_pool( ('localhost', 6379), encoding='utf8', loop=loop, commands_factory=functools.partial( ArqRedis, job_serializer=msgpack.packb, job_deserializer=functools.partial(msgpack.unpackb, raw=False) ), ) await redis_.flushall() yield redis_ redis_.close() await redis_.wait_closed()
Example #25
Source File: msfrpc.py From Autobloodhound with MIT License | 5 votes |
def decode(self, data): return msgpack.unpackb(data)
Example #26
Source File: msgpackutils.py From oslo.serialization with Apache License 2.0 | 5 votes |
def loads(s, registry=None): """Deserialize ``s`` messagepack ``str`` into a Python object. .. versionchanged:: 1.5 Added *registry* parameter. """ if registry is None: registry = default_registry ext_hook = functools.partial(_unserializer, registry) return msgpack.unpackb(s, ext_hook=ext_hook, raw=False)
Example #27
Source File: msgpackutils.py From oslo.serialization with Apache License 2.0 | 5 votes |
def deserialize(data): return netaddr.IPAddress(msgpack.unpackb(data))
Example #28
Source File: msgpackutils.py From oslo.serialization with Apache License 2.0 | 5 votes |
def deserialize(data): value = msgpack.unpackb(data) start, step = value return itertools.count(start, step)
Example #29
Source File: test_server.py From intake with BSD 2-Clause "Simplified" License | 5 votes |
def decode(self, bytestr): return msgpack.unpackb(bytestr, **unpack_kwargs)
Example #30
Source File: serializer.py From intake with BSD 2-Clause "Simplified" License | 5 votes |
def decode(self, bytestr, container): from ..compat import unpack_kwargs if container in ['ndarray', 'xarray'] and msgpack_numpy: from ..compat import np_unpack_kwargs return msgpack.unpackb(bytestr, **np_unpack_kwargs) elif container == 'dataframe': pa = check_pyarrow() context = pa.default_serialization_context() return context.deserialize(bytestr) else: return msgpack.unpackb(bytestr, **unpack_kwargs)