Python msgpack.dumps() Examples

The following are 30 code examples of msgpack.dumps(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module msgpack , or try the search function .
Example #1
Source File: run.py    From pop with Apache License 2.0 8 votes vote down vote up
def send(hub, worker, payload):
    '''
    Send the given payload to the given worker, yield iterations based on the
    returns from the remote.
    '''
    mp = msgpack.dumps(payload, use_bin_type=True)
    mp += hub.proc.DELIM
    reader, writer = await asyncio.open_unix_connection(path=worker['path'])
    writer.write(mp)
    await writer.drain()
    final_ret = True
    while True:
        ret = await reader.readuntil(hub.proc.DELIM)
        p_ret = ret[:-len(hub.proc.DELIM)]
        i_flag = p_ret[-1:]
        ret = msgpack.loads(p_ret[:-1], raw=False)
        if i_flag == hub.proc.D_FLAG:
            # break for the end of the sequence
            break
        yield ret
        final_ret = False
    if final_ret:
        yield ret 
Example #2
Source File: stone_serializers.py    From dropbox-sdk-python with MIT License 6 votes vote down vote up
def json_compat_obj_encode(data_type, obj, caller_permissions=None, alias_validators=None,
                           old_style=False, for_msgpack=False, should_redact=False):
    """Encodes an object into a JSON-compatible dict based on its type.

    Args:
        data_type (Validator): Validator for obj.
        obj (object): Object to be serialized.
        caller_permissions (list): The list of raw-string caller permissions
            with which to serialize.

    Returns:
        An object that when passed to json.dumps() will produce a string
        giving the JSON-encoded object.

    See json_encode() for additional information about validation.
    """
    serializer = StoneToPythonPrimitiveSerializer(
        caller_permissions, alias_validators, for_msgpack, old_style, should_redact)
    return serializer.encode(data_type, obj)

# --------------------------------------------------------------
# JSON Decoder 
Example #3
Source File: serialization.py    From QCElemental with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def jsonext_dumps(data: Any) -> str:
    """Safe serialization of Python objects to JSON string representation using all known encoders.
    The JSON serializer uses a custom array syntax rather than flat JSON lists.

    Parameters
    ----------
    data : Any
        A encodable python object.

    Returns
    -------
    str
        A JSON representation of the data.
    """

    return json.dumps(data, cls=JSONExtArrayEncoder) 
Example #4
Source File: stone_serializers.py    From stone with MIT License 6 votes vote down vote up
def json_compat_obj_encode(data_type, obj, caller_permissions=None, alias_validators=None,
                           old_style=False, for_msgpack=False, should_redact=False):
    """Encodes an object into a JSON-compatible dict based on its type.

    Args:
        data_type (Validator): Validator for obj.
        obj (object): Object to be serialized.
        caller_permissions (list): The list of raw-string caller permissions
            with which to serialize.

    Returns:
        An object that when passed to json.dumps() will produce a string
        giving the JSON-encoded object.

    See json_encode() for additional information about validation.
    """
    serializer = StoneToPythonPrimitiveSerializer(
        caller_permissions, alias_validators, for_msgpack, old_style, should_redact)
    return serializer.encode(data_type, obj)

# --------------------------------------------------------------
# JSON Decoder 
Example #5
Source File: serde.py    From PySyft with Apache License 2.0 6 votes vote down vote up
def _serialize_msgpack_binary(
    simple_objects: object,
    worker: AbstractWorker = None,
    simplified: bool = False,
    force_full_simplification: bool = False,
) -> bin:
    # 2) Serialize
    # serialize into a binary
    binary = msgpack_lib.dumps(simple_objects)

    # 3) Compress
    # compress the binary and return the result
    # prepend a 1-byte header '0' or '1' to the output stream
    # to denote whether output stream is compressed or not
    # if compressed stream length is greater than input stream
    # we output the input stream as it is with header set to '0'
    # otherwise we output the compressed stream with header set to '1'
    # even if compressed flag is set to false by the caller we
    # output the input stream as it is with header set to '0'
    return compression._compress(binary) 
Example #6
Source File: base.py    From libnacl with Apache License 2.0 6 votes vote down vote up
def save(self, path, serial='json'):
        '''
        Safely save keys with perms of 0400
        '''
        pre = self.for_json()

        if serial == 'msgpack':
            import msgpack
            packaged = msgpack.dumps(pre)
        elif serial == 'json':
            import json
            packaged = json.dumps(pre)

        perm_other = stat.S_IROTH | stat.S_IWOTH | stat.S_IXOTH
        perm_group = stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP

        cumask = os.umask(perm_other | perm_group)
        with open(path, 'w+') as fp_:
            fp_.write(packaged)
        os.umask(cumask) 
Example #7
Source File: plan.py    From forsun with MIT License 6 votes vote down vote up
def dumps(self):
        return msgpack.dumps({
            "key": self.key,
            "second": self.second,
            "minute": self.minute,
            "hour": self.hour,
            "day": self.day,
            "month": self.month,
            "week": self.week,
            "status": self.status,
            "count": self.count,
            "is_time_out": self.is_time_out,
            "next_time": self.next_time,
            "current_count": self.current_count,
            "last_timeout": self.last_timeout,
            "created_time": self.created_time,
            "action": self.action,
            "params": self.params,
        }) 
Example #8
Source File: test_metadata.py    From tskit with MIT License 6 votes vote down vote up
def test_json(self):
        ts = msprime.simulate(10, random_seed=1)
        tables = ts.dump_tables()
        nodes = tables.nodes
        # For each node, we create some Python metadata that can be JSON encoded.
        metadata = [
            {"one": j, "two": 2 * j, "three": list(range(j))} for j in range(len(nodes))
        ]
        encoded, offset = tskit.pack_strings(map(json.dumps, metadata))
        nodes.set_columns(
            flags=nodes.flags,
            time=nodes.time,
            population=nodes.population,
            metadata_offset=offset,
            metadata=encoded,
        )
        self.assertTrue(np.array_equal(nodes.metadata_offset, offset))
        self.assertTrue(np.array_equal(nodes.metadata, encoded))
        ts1 = tables.tree_sequence()
        for j, node in enumerate(ts1.nodes()):
            decoded_metadata = json.loads(node.metadata.decode())
            self.assertEqual(decoded_metadata, metadata[j])
        ts1.dump(self.temp_file)
        ts2 = tskit.load(self.temp_file)
        self.assertEqual(ts1.tables.nodes, ts2.tables.nodes) 
Example #9
Source File: test_metadata.py    From tskit with MIT License 6 votes vote down vote up
def test_pickle(self):
        ts = msprime.simulate(10, random_seed=1)
        tables = ts.dump_tables()
        # For each node, we create some Python metadata that can be pickled
        metadata = [
            {"one": j, "two": 2 * j, "three": list(range(j))}
            for j in range(ts.num_nodes)
        ]
        encoded, offset = tskit.pack_bytes(list(map(pickle.dumps, metadata)))
        tables.nodes.set_columns(
            flags=tables.nodes.flags,
            time=tables.nodes.time,
            population=tables.nodes.population,
            metadata_offset=offset,
            metadata=encoded,
        )
        self.assertTrue(np.array_equal(tables.nodes.metadata_offset, offset))
        self.assertTrue(np.array_equal(tables.nodes.metadata, encoded))
        ts1 = tables.tree_sequence()
        for j, node in enumerate(ts1.nodes()):
            decoded_metadata = pickle.loads(node.metadata)
            self.assertEqual(decoded_metadata, metadata[j])
        ts1.dump(self.temp_file)
        ts2 = tskit.load(self.temp_file)
        self.assertEqual(ts1.tables.nodes, ts2.tables.nodes) 
Example #10
Source File: test_metadata.py    From tskit with MIT License 6 votes vote down vote up
def test_mutations(self):
        tables = tskit.TableCollection(sequence_length=1)
        metadata = ExampleMetadata(one="node1", two="node2")
        pickled = pickle.dumps(metadata)
        tables.nodes.add_row(time=0)
        tables.sites.add_row(position=0.1, ancestral_state="A")
        tables.mutations.add_row(site=0, node=0, derived_state="T", metadata=pickled)
        ts = tables.tree_sequence()
        mutation = ts.site(0).mutations[0]
        self.assertEqual(mutation.site, 0)
        self.assertEqual(mutation.node, 0)
        self.assertEqual(mutation.derived_state, "T")
        self.assertEqual(mutation.metadata, pickled)
        unpickled = pickle.loads(mutation.metadata)
        self.assertEqual(unpickled.one, metadata.one)
        self.assertEqual(unpickled.two, metadata.two) 
Example #11
Source File: test_metadata.py    From tskit with MIT License 6 votes vote down vote up
def test_metadata_schema(self):
        # Bad jsonschema
        with self.assertRaises(exceptions.MetadataSchemaValidationError):
            metadata.MetadataSchema(
                {"codec": "json", "additionalProperties": "THIS ISN'T RIGHT"},
            )
        # Bad codec
        with self.assertRaises(exceptions.MetadataSchemaValidationError):
            metadata.MetadataSchema({"codec": "morse-code"})
        # Missing codec
        with self.assertRaises(exceptions.MetadataSchemaValidationError):
            metadata.MetadataSchema({})
        schema = {
            "codec": "json",
            "title": "Example Metadata",
            "type": "object",
            "properties": {"one": {"type": "string"}, "two": {"type": "number"}},
            "required": ["one", "two"],
            "additionalProperties": False,
        }
        ms = metadata.MetadataSchema(schema)
        self.assertEqual(str(ms), json.dumps(schema))
        # Missing required properties
        with self.assertRaises(exceptions.MetadataValidationError):
            ms.validate_and_encode_row({}) 
Example #12
Source File: test_metadata.py    From tskit with MIT License 6 votes vote down vote up
def test_json_codec(self):
        schema = {
            "codec": "json",
            "title": "Example Metadata",
            "type": "object",
            "properties": {"one": {"type": "string"}, "two": {"type": "number"}},
            "required": ["one", "two"],
            "additionalProperties": False,
        }
        ms = metadata.MetadataSchema(schema)
        # Valid row data
        row_data = {"one": "tree", "two": 5}
        self.assertEqual(
            ms.validate_and_encode_row(row_data), json.dumps(row_data).encode()
        )
        self.assertEqual(ms.decode_row(json.dumps(row_data).encode()), row_data)
        # Round trip
        self.assertEqual(ms.decode_row(ms.validate_and_encode_row(row_data)), row_data) 
Example #13
Source File: pytest_log_handler.py    From pytest-salt with Apache License 2.0 5 votes vote down vote up
def process_queue(host, port, prefix, queue):
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    try:
        sock.connect((host, port))
    except socket.error:
        sock.close()
        return

    log.debug('Sending log records to Remote log server')
    while True:
        try:
            record = queue.get()
            if record is None:
                # A sentinel to stop processing the queue
                break
            # Just send every log. Filtering will happen on the main process
            # logging handlers
            record_dict = record.__dict__
            record_dict['msg'] = '[{}] {}'.format(to_unicode(prefix), to_unicode(record_dict['msg']))
            sock.sendall(msgpack.dumps(record_dict, encoding='utf-8'))
        except (IOError, EOFError, KeyboardInterrupt, SystemExit):
            break
        except Exception as exc:  # pylint: disable=broad-except
            log.warning(
                'An exception occurred in the pytest salt logging '
                'queue thread: %s',
                exc,
                exc_info_on_loglevel=logging.DEBUG
            ) 
Example #14
Source File: format.py    From sift with MIT License 5 votes vote down vote up
def __call__(self, model):
        return model.map(json.dumps) 
Example #15
Source File: worker.py    From pop with Apache License 2.0 5 votes vote down vote up
def ret(hub, payload):
    '''
    Send a return payload to the spawning process. This return will be tagged
    with the index of the process that returned it
    '''
    payload = {'ind': hub.proc.IND, 'payload': payload}
    mp = msgpack.dumps(payload, use_bin_type=True)
    mp += hub.proc.DELIM
    reader, writer = await asyncio.open_unix_connection(path=hub.proc.RET_SOCK_PATH)
    writer.write(mp)
    await writer.drain()
    ret = await reader.readuntil(hub.proc.DELIM)
    ret = ret[:-len(hub.proc.DELIM)]
    writer.close()
    return msgpack.loads(ret, encoding='utf8') 
Example #16
Source File: worker.py    From pop with Apache License 2.0 5 votes vote down vote up
def gen(hub, payload, reader, writer):
    '''
    Run a generator and yield back the returns. Supports a generator and an
    async generator
    '''
    ref = payload.get('ref')
    args = payload.get('args', [])
    kwargs = payload.get('kwargs', {})
    ret = hub.pop.ref.last(ref)(*args, **kwargs)
    if isinstance(ret, types.AsyncGeneratorType):
        async for chunk in ret:
            rchunk = msgpack.dumps(chunk, use_bin_type=True)
            rchunk += hub.proc.I_FLAG
            rchunk += hub.proc.DELIM
            writer.write(rchunk)
            await writer.drain()
    elif isinstance(ret, types.GeneratorType):
        for chunk in ret:
            rchunk = msgpack.dumps(chunk, use_bin_type=True)
            rchunk += hub.proc.I_FLAG
            rchunk += hub.proc.DELIM
            writer.write(rchunk)
            await writer.drain()
    elif asyncio.iscoroutine(ret):
        return await ret
    else:
        return ret
    return '' 
Example #17
Source File: worker.py    From pop with Apache License 2.0 5 votes vote down vote up
def work(hub, reader, writer):
    '''
    Process the incoming work
    '''
    inbound = await reader.readuntil(hub.proc.DELIM)
    inbound = inbound[:-len(hub.proc.DELIM)]
    payload = msgpack.loads(inbound, encoding='utf8')
    ret = b''
    if 'fun' not in payload:
        ret = {'err': 'Invalid format'}
    elif payload['fun'] == 'sub':
        # Time to add a sub to the hub!
        try:
            hub.proc.worker.add_sub(payload)
            ret = {'status': True}
        except Exception as exc:
            ret = {'status': False, 'exc': str(exc)}
    elif payload['fun'] == 'run':
        # Time to do some work!
        try:
            ret = await hub.proc.worker.run(payload)
        except Exception as exc:
            ret = {'status': False, 'exc': str(exc)}
    elif payload['fun'] == 'gen':
        ret = await hub.proc.worker.gen(payload, reader, writer)
    elif payload['fun'] == 'setattr':
        ret = await hub.proc.worker.set_attr(payload)
    ret = msgpack.dumps(ret, use_bin_type=True)
    ret += hub.proc.D_FLAG
    ret += hub.proc.DELIM
    writer.write(ret)
    await writer.drain()
    writer.close() 
Example #18
Source File: serialize.py    From PoseFix_RELEASE with MIT License 5 votes vote down vote up
def dumps_msgpack(obj):
    """
    Serialize an object.
    Returns:
        Implementation-dependent bytes-like object
    """
    return msgpack.dumps(obj, use_bin_type=True) 
Example #19
Source File: serialize.py    From lighttrack with MIT License 5 votes vote down vote up
def loads_pyarrow(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    return pa.deserialize(buf) 
Example #20
Source File: serialize.py    From PoseFix_RELEASE with MIT License 5 votes vote down vote up
def loads_msgpack(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    return msgpack.loads(buf, raw=False) 
Example #21
Source File: serialize.py    From PoseFix_RELEASE with MIT License 5 votes vote down vote up
def loads_pyarrow(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    return pa.deserialize(buf) 
Example #22
Source File: serialize.py    From lighttrack with MIT License 5 votes vote down vote up
def loads_msgpack(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    return msgpack.loads(buf, raw=False) 
Example #23
Source File: serialize.py    From lighttrack with MIT License 5 votes vote down vote up
def dumps_msgpack(obj):
    """
    Serialize an object.
    Returns:
        Implementation-dependent bytes-like object
    """
    return msgpack.dumps(obj, use_bin_type=True) 
Example #24
Source File: serializers.py    From aiocache with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def dumps(self, value):
        raise NotImplementedError("dumps method must be implemented") 
Example #25
Source File: serializers.py    From aiocache with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def dumps(self, value):
        """
        Returns the same value
        """
        return value 
Example #26
Source File: serialize.py    From petridishnn with MIT License 5 votes vote down vote up
def loads_pyarrow(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    return pa.deserialize(buf)


# import pyarrow has a lot of side effect:
# https://github.com/apache/arrow/pull/2329
# https://groups.google.com/a/tensorflow.org/forum/#!topic/developers/TMqRaT-H2bI
# So we use msgpack as default. 
Example #27
Source File: serialization.py    From QCElemental with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def msgpackext_dumps(data: Any) -> bytes:
    """Safe serialization of a Python object to msgpack binary representation using all known encoders.
    For NumPy, encodes a specialized object format to encode all shape and type data.

    Parameters
    ----------
    data : Any
        A encodable python object.

    Returns
    -------
    bytes
        A msgpack representation of the data in bytes.
    """
    which_import("msgpack", raise_error=True, raise_msg=_msgpack_which_msg)

    return msgpack.dumps(data, default=msgpackext_encode, use_bin_type=True) 
Example #28
Source File: serialize.py    From petridishnn with MIT License 5 votes vote down vote up
def loads_msgpack(buf):
    """
    Args:
        buf: the output of `dumps`.
    """
    # Since 0.6, the default max size was set to 1MB.
    # We change it to approximately 1G.
    return msgpack.loads(buf, raw=False,
                         max_bin_len=MAX_MSGPACK_LEN,
                         max_array_len=MAX_MSGPACK_LEN,
                         max_map_len=MAX_MSGPACK_LEN,
                         max_str_len=MAX_MSGPACK_LEN) 
Example #29
Source File: serialize.py    From petridishnn with MIT License 5 votes vote down vote up
def dumps_msgpack(obj):
    """
    Serialize an object.

    Returns:
        Implementation-dependent bytes-like object.
    """
    return msgpack.dumps(obj, use_bin_type=True) 
Example #30
Source File: serialize.py    From ternarynet with Apache License 2.0 5 votes vote down vote up
def dumps(obj):
    #return dill.dumps(obj)
    return msgpack.dumps(obj, use_bin_type=True)