Python dill.dumps() Examples
The following are 30
code examples of dill.dumps().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
dill
, or try the search function
.
Example #1
Source File: test_nesting.py From transitions with MIT License | 6 votes |
def test_pickle(self): print("separator", self.state_cls.separator) if sys.version_info < (3, 4): import dill as pickle else: import pickle states = ['A', 'B', {'name': 'C', 'children': ['1', '2', {'name': '3', 'children': ['a', 'b', 'c']}]}, 'D', 'E', 'F'] transitions = [ {'trigger': 'walk', 'source': 'A', 'dest': 'B'}, {'trigger': 'run', 'source': 'B', 'dest': 'C'}, {'trigger': 'sprint', 'source': 'C', 'dest': 'D'} ] m = self.stuff.machine_cls(states=states, transitions=transitions, initial='A') m.walk() dump = pickle.dumps(m) self.assertIsNotNone(dump) m2 = pickle.loads(dump) self.assertEqual(m.state, m2.state) m2.run() m2.to_C_3_a() m2.to_C_3_b()
Example #2
Source File: datamining.py From pyiron with BSD 3-Clause "New" or "Revised" License | 6 votes |
def update_table(self, job_status_list=None): """ Update the pyiron table object, add new columns if a new function was added or add new rows for new jobs Args: job_status_list (list/None): List of job status which are added to the table by default ["finished"] """ if job_status_list is None: job_status_list = ["finished"] self.project.db.item_update({"timestart": datetime.now()}, self.job_id) with self.project_hdf5.open("input") as hdf5_input: self._pyiron_table.create_table( enforce_update=self._enforce_update, file=hdf5_input, level=self._project_level, job_status_list=job_status_list, ) self.to_hdf() self._pyiron_table._df.to_csv( os.path.join(self.working_directory, "pyirontable.csv"), index=False ) with self.project_hdf5.open("output") as hdf5_output: hdf5_output["table"] = json.dumps(self.pyiron_table._df.to_dict()) self.project.db.item_update(self._runtime(), self.job_id)
Example #3
Source File: util.py From sparktorch with MIT License | 6 votes |
def serialize_torch_obj( model: nn.Module, criterion: Any, optimizer: Type[Optimizer], **kwargs ) -> str: model_encoded = torch_encoder( TorchObj( model=model, criterion=criterion, optimizer=optimizer, optimizer_params=kwargs, is_lazy=False, model_parameters=None ) ) return json.dumps({ 'torch_obj': model_encoded, 'shapes': [list(ps.shape) for ps in model.parameters()] })
Example #4
Source File: pipeline_util.py From sparktorch with MIT License | 6 votes |
def _to_java(self): """ Convert this instance to a dill dump, then to a list of strings with the unicode integer values of each character. Use this list as a set of dumby stopwords and store in a StopWordsRemover instance :return: Java object equivalent to this instance. """ dmp = dill.dumps(self) dmp = zlib.compress(dmp) sc = SparkContext._active_spark_context pylist = [str(i) + ',' for i in bytearray(dmp)] # convert bytes to string integer list pylist = [''.join(pylist)] pylist.append(PysparkObjId._getPyObjId()) # add our id so PysparkPipelineWrapper can id us. java_class = sc._gateway.jvm.java.lang.String java_array = sc._gateway.new_array(java_class, len(pylist)) java_array[0:2] = pylist[0:2] _java_obj = JavaParams._new_java_obj(PysparkObjId._getCarrierClass(javaName=True), self.uid) _java_obj.setStopWords(java_array) return _java_obj
Example #5
Source File: passmanager.py From qiskit-terra with Apache License 2.0 | 6 votes |
def _run_several_circuits( self, circuits: List[QuantumCircuit], output_name: str = None, callback: Callable = None ) -> List[QuantumCircuit]: """Run all the passes on the specified ``circuits``. Args: circuits: Circuits to transform via all the registered passes. output_name: The output circuit name. If ``None``, it will be set to the same as the input circuit name. callback: A callback function that will be called after each pass execution. Returns: The transformed circuits. """ # TODO support for List(output_name) and List(callback) del output_name del callback return parallel_map(PassManager._in_parallel, circuits, task_kwargs={'pm_dill': dill.dumps(self)})
Example #6
Source File: decorators.py From lambdify with Apache License 2.0 | 6 votes |
def invoke(self, event, context, inv_type=None, log_type='None', version=None): """Invoke the lambda function This is basically a low-level lambda interface. In most cases, you won't need to use this by yourself. :param event: lambda input :param context: lambda execution client context :param inv_type: invocation type :param log_type: log type :param version: version """ if not self._was_updated and self.create_options & UPDATE_LAZY == UPDATE_LAZY: self._create_or_update() params = dict( FunctionName=self.name, InvocationType=inv_type or self._inv_type, LogType=log_type, ClientContext=json.dumps(context), Payload=json.dumps(event), ) if version: params['Qualifier'] = version return self.client.invoke(**params)
Example #7
Source File: celery.py From flask-unchained with MIT License | 6 votes |
def _register_dill(self): def encode(obj, dumper=dill_dumps): return dumper(obj, protocol=pickle_protocol) def decode(s): return pickle_loads(str_to_bytes(s), load=dill_load) registry.register( name='dill', encoder=encode, decoder=decode, content_type='application/x-python-serialize', content_encoding='binary' ) # the same as upstream, but we need to copy it here so we can access it
Example #8
Source File: template.py From polymorph with GNU General Public License v2.0 | 6 votes |
def add_function(self, cond, name, func): """Add a new function that will be executed as a when intercepting packets. Parameters ---------- cond : :obj:`str` Name of the condition set (preconditions, postconditions, executions). name : :obj:`str` Name to identify the function in the `Template`. func : :obj:`function` Pointer to a function. """ fdump = dill.dumps(func) self._functions[cond][name] = fdump.hex()
Example #9
Source File: deferred.py From toil with Apache License 2.0 | 6 votes |
def create(cls, function, *args, **kwargs): """ Capture the given callable and arguments as an instance of this class. :param callable function: The deferred action to take in the form of a function :param tuple args: Non-keyword arguments to the function :param dict kwargs: Keyword arguments to the function """ # The general principle is to deserialize as late as possible, i.e. when the function is # to be invoked, as that will avoid redundantly deserializing deferred functions for # concurrently running jobs when the cache state is loaded from disk. By implication we # should serialize as early as possible. We need to serialize the function as well as its # arguments. return cls(*list(map(dill.dumps, (function, args, kwargs))), name=function.__name__, module=ModuleDescriptor.forModule(function.__module__).globalize())
Example #10
Source File: agent.py From osbrain with Apache License 2.0 | 6 votes |
def safe_call(self, method, *args, **kwargs): """ A safe call to a method. A safe call is simply sent to be executed by the main thread. Parameters ---------- method : str Method name to be executed by the main thread. *args : arguments Method arguments. *kwargs : keyword arguments Method keyword arguments. """ if not self._running: raise RuntimeError( 'Agent must be running to safely execute methods!' ) data = cloudpickle.dumps((method, args, kwargs)) return self._loopback_reqrep('inproc://_loopback_safe', data)
Example #11
Source File: prediction_problem.py From Trane with MIT License | 6 votes |
def to_json(self): """ This function converts Prediction Problems to JSON. It captures the table_meta, but not the cutoff_strategy Parameters ---------- None Returns ------- json: JSON representation of the Prediction Problem. """ table_meta_json = None if self.table_meta: table_meta_json = self.table_meta.to_json() return json.dumps( {"operations": [ json.loads(op_to_json(op)) for op in self.operations], "entity_id_col": self.entity_id_col, "label_col": self.label_col, "table_meta": table_meta_json})
Example #12
Source File: pipeline_util.py From sparkflow with MIT License | 6 votes |
def _to_java(self): """ Convert this instance to a dill dump, then to a list of strings with the unicode integer values of each character. Use this list as a set of dumby stopwords and store in a StopWordsRemover instance :return: Java object equivalent to this instance. """ dmp = dill.dumps(self) dmp = zlib.compress(dmp) sc = SparkContext._active_spark_context pylist = [str(i) + ',' for i in bytearray(dmp)] # convert bytes to string integer list pylist = [''.join(pylist)] pylist.append(PysparkObjId._getPyObjId()) # add our id so PysparkPipelineWrapper can id us. java_class = sc._gateway.jvm.java.lang.String java_array = sc._gateway.new_array(java_class, len(pylist)) java_array[0:2] = pylist[0:2] _java_obj = JavaParams._new_java_obj(PysparkObjId._getCarrierClass(javaName=True), self.uid) _java_obj.setStopWords(java_array) return _java_obj
Example #13
Source File: test_core.py From transitions with MIT License | 6 votes |
def test_pickle(self): import sys if sys.version_info < (3, 4): import dill as pickle else: import pickle states = ['A', 'B', 'C', 'D'] # Define with list of dictionaries transitions = [ {'trigger': 'walk', 'source': 'A', 'dest': 'B'}, {'trigger': 'run', 'source': 'B', 'dest': 'C'}, {'trigger': 'sprint', 'source': 'C', 'dest': 'D'} ] m = Machine(states=states, transitions=transitions, initial='A') m.walk() dump = pickle.dumps(m) self.assertIsNotNone(dump) m2 = pickle.loads(dump) self.assertEqual(m.state, m2.state) m2.run()
Example #14
Source File: utils.py From DynamicTriad with Apache License 2.0 | 6 votes |
def run_slices(self, slices): mgr = mp.Manager() report_queue = mgr.Queue() if self.monitor_func is not None: monitor = mp.Process(target=self.monitor_func, args=(report_queue,)) monitor.start() else: monitor = None if self.njobs == 1: res = [] for slc in slices: res.append(self.work_func(None, slc, report_queue)) else: dill_work_func = dill.dumps(self.work_func) with contextlib.closing(mp.Pool(self.njobs, maxtasksperchild=self.__mtpc)) as pool: res = pool.map(func_wrapper, [[dill_work_func, slc, report_queue] for slc in slices]) res = list(itertools.chain.from_iterable(res)) report_queue.put(StopIteration()) if monitor is not None: monitor.join() return res
Example #15
Source File: fmin.py From auptimizer with GNU General Public License v3.0 | 6 votes |
def __init__(self, algo, domain, trials, rstate, asynchronous=None, max_queue_len=1, poll_interval_secs=1.0, max_evals=sys.maxsize, verbose=0, ): self.algo = algo self.domain = domain self.trials = trials if asynchronous is None: self.asynchronous = trials.asynchronous else: self.asynchronous = asynchronous self.poll_interval_secs = poll_interval_secs self.max_queue_len = max_queue_len self.max_evals = max_evals self.rstate = rstate if self.asynchronous: if 'FMinIter_Domain' in trials.attachments: logger.warn('over-writing old domain trials attachment') msg = pickler.dumps(domain) # -- sanity check for unpickling pickler.loads(msg) trials.attachments['FMinIter_Domain'] = msg
Example #16
Source File: test_blue.py From schedula with European Union Public License 1.1 | 6 votes |
def test_blue_io(self): import dill s0 = self.dsp() pre_dsp = dill.dumps(self.dsp) blue = self.dsp.blue() self.assertEqual(pre_dsp, dill.dumps(self.dsp)) pre = dill.dumps(blue), pre_dsp sol = blue() post = dill.dumps(blue), dill.dumps(self.dsp) self.assertEqual(pre, post) s = self.dsp() post = dill.dumps(blue), dill.dumps(self.dsp) self.assertEqual(pre, post) self.assertEqual(s, sol) self.assertEqual(s0, sol) self.assertLess(*map(len, post)) self.assertLess(len(post[1]), len(dill.dumps(s))) blue, dsp = list(map(dill.loads, post)) self.assertEqual(dsp.solution, {}) self.assertEqual(s, dsp()) self.assertEqual(s, blue())
Example #17
Source File: core.py From slack-machine with MIT License | 6 votes |
def load_plugins(self): with indent(4): logger.debug("PLUGINS: %s", self._settings['PLUGINS']) for plugin in self._settings['PLUGINS']: for class_name, cls in import_string(plugin): if issubclass(cls, MachineBasePlugin) and cls is not MachineBasePlugin: logger.debug("Found a Machine plugin: {}".format(plugin)) storage = PluginStorage(class_name) instance = cls(SlackClient(), self._settings, storage) missing_settings = self._register_plugin(class_name, instance) if missing_settings: show_invalid(class_name) with indent(4): error_msg = "The following settings are missing: {}".format( ", ".join(missing_settings) ) puts(colored.red(error_msg)) puts(colored.red("This plugin will not be loaded!")) del instance else: instance.init() show_valid(class_name) self._storage.set('manual', dill.dumps(self._help))
Example #18
Source File: dataset.py From rising with MIT License | 6 votes |
def load_async(pool: Pool, fn: Callable, *args, callback: Callable = None, **kwargs) -> Any: """ Load data asynchronously and serialize data via dill Args: pool: multiprocessing pool to use for :func:`apply_async` fn: function to load a single sample *args: positional arguments to dump with dill callback: optional callback. defaults to None. **kwargs: keyword arguments to dump with dill Returns: Any: reference to obtain data with :func:`get` """ if not DILL_AVAILABLE: raise RuntimeError('dill is not installed. For async loading ' 'please install it') payload = dill.dumps((fn, args, kwargs)) return pool.apply_async(dill_helper, (payload,), callback=callback)
Example #19
Source File: agent.py From osbrain with Apache License 2.0 | 6 votes |
def __init__( self, name='', nsaddr=None, addr=None, serializer=None, transport=None, base=Agent, attributes=None, ): super().__init__() self.name = name self._daemon = None self._host, self.port = address_to_host_port(addr) if self.port is None: self.port = 0 self.nsaddr = nsaddr self._serializer = serializer self._transport = transport self.base = cloudpickle.dumps(base) self._shutdown_event = multiprocessing.Event() self._queue = multiprocessing.Queue() self._sigint = False self.attributes = attributes
Example #20
Source File: tasks.py From pythonwhat with GNU Affero General Public License v3.0 | 5 votes |
def getRepresentation(name, process): obj_class = getClass(name, process) converters = pythonwhat.State.State.root_state.converters if obj_class in converters: repres = convert(name, dill.dumps(converters[obj_class]), process) if errored(repres): return ReprFail("manual conversion failed: {}".format(repres)) else: return repres else: # first try to pickle try: stream = getStreamPickle(name, process) if not errored(stream): return pickle.loads(stream) except: pass # if it failed, try to dill try: stream = getStreamDill(name, process) if not errored(stream): return dill.loads(stream) return ReprFail( "dilling inside process failed for %s - write manual converter" % obj_class ) except PicklingError: return ReprFail( "undilling of bytestream failed with PicklingError - write manual converter" ) except Exception as e: return ReprFail( "undilling of bytestream failed for class %s - write manual converter." "Error: %s - %s" % (obj_class, type(e), e) )
Example #21
Source File: util.py From FeatureHub with MIT License | 5 votes |
def run_isolated(f, *args): """Execute `f(args)` in an isolated environment. First, uses dill to serialize the function. Unfortunately, pickle is unable to serialize some functions, so we must serialize and deserialize the function ourselves. """ f_dill = dill.dumps(f) with Pool(1) as pool: return pool.apply(_get_function_and_execute, (f_dill, *args))
Example #22
Source File: tasks.py From pythonwhat with GNU Affero General Public License v3.0 | 5 votes |
def getStreamDill(name, process, shell): try: return dill.dumps(get_env(shell.user_ns)[name]) except: return None
Example #23
Source File: tasks.py From pythonwhat with GNU Affero General Public License v3.0 | 5 votes |
def getStreamPickle(name, process, shell): try: return pickle.dumps(get_env(shell.user_ns)[name]) except: return None
Example #24
Source File: __init__.py From slack-machine with MIT License | 5 votes |
def set(self, key, value, expires=None, shared=False): """Store or update a value by key :param key: the key under which to store the data :param value: the data to store :param expires: optional number of seconds after which the data is expired :param shared: ``True/False`` wether this data should be shared by other plugins. Use with care, because it pollutes the global namespace of the storage. """ namespaced_key = self._namespace_key(key, shared) pickled_value = dill.dumps(value) Storage.get_instance().set(namespaced_key, pickled_value, expires)
Example #25
Source File: util.py From sparktorch with MIT License | 5 votes |
def torch_encoder(obj) -> str: """ Encodes Torch Object or anything related :param obj: any object :return: decoded object """ return codecs.encode( dill.dumps(obj), "base64" ).decode()
Example #26
Source File: test_model.py From astromodels with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_pickling_unpickling(): import dill mg = ModelGetter() m1 = mg.model pick = dill.dumps(m1) m_reloaded = dill.loads(pick)
Example #27
Source File: prediction_problem.py From Trane with MIT License | 5 votes |
def from_json(cls, json_data): """ This function converts a JSON snippet to a prediction problem Parameters ---------- json_data: JSON code or dict containing the prediction problem. Returns ------- problem: Prediction Problem """ # only tries json.loads if json_data is not a dict if type(json_data) != dict: json_data = json.loads(json_data) operations = [ op_from_json(json.dumps(item)) for item in json_data['operations']] entity_id_col = json_data['entity_id_col'] label_col = json_data['label_col'] table_meta = TableMeta.from_json(json_data.get('table_meta')) problem = PredictionProblem( operations=operations, entity_id_col=entity_id_col, label_col=label_col, table_meta=table_meta, cutoff_strategy=None) return problem
Example #28
Source File: inference.py From sparktorch with MIT License | 5 votes |
def convert_to_serialized_torch(network: nn.Module) -> str: """ Converts an existing torch network to a serialized string. :param network: a nn.Module that you want to serialize :return: Returns the serialized torch model. """ return codecs.encode(dill.dumps(network), "base64").decode()
Example #29
Source File: unittests_simulations.py From Conditional_Density_Estimation with MIT License | 5 votes |
def test_serializarion(self): import pickle, dill model = LinearStudentT(ndim_x=5, mu=5, random_seed=22) X, Y = model.simulate(200) pkl_str = dill.dumps(model) model_loaded = dill.loads(pkl_str)
Example #30
Source File: test_dataset.py From rising with MIT License | 5 votes |
def test_dill_helper(self): payload = dill.dumps((lambda x: x, (1, ), {})) res = dill_helper(payload) self.assertEqual(res, 1)