Python six.moves.cPickle.dumps() Examples
The following are 30
code examples of six.moves.cPickle.dumps().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
six.moves.cPickle
, or try the search function
.
Example #1
Source File: __init__.py From picklable-itertools with MIT License | 6 votes |
def verify_tee(n, original, seed): try: state = random.getstate() iterators = list(tee(original, n=n)) results = [[] for _ in range(n)] exhausted = [False] * n while not all(exhausted): # Upper argument of random.randint is inclusive. Argh. i = random.randint(0, n - 1) if not exhausted[i]: if len(results[i]) == len(original): assert_raises(StopIteration, next, iterators[i]) assert results[i] == original exhausted[i] = True else: if random.randint(0, 1): iterators[i] = cPickle.loads( cPickle.dumps(iterators[i])) elem = next(iterators[i]) results[i].append(elem) finally: random.setstate(state)
Example #2
Source File: test_toolkit.py From oddt with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_pickle_protein(): """Pickle proteins""" # Proteins rec = next(oddt.toolkit.readfile('pdb', xiap_receptor)) # generate atom_dict assert rec.atom_dict is not None assert rec._atom_dict is not None pickled_rec = loads(dumps(rec)) assert pickled_rec.protein is False assert pickled_rec._atom_dict is not None rec.protein = True # setting protein property should clean atom_dict cache assert rec._atom_dict is None # generate atom_dict assert rec.atom_dict is not None pickled_rec = loads(dumps(rec)) assert pickled_rec.protein is True assert pickled_rec._atom_dict is not None
Example #3
Source File: test_failure.py From taskflow with Apache License 2.0 | 6 votes |
def test_causes_pickle(self): f = None try: self._raise_many(["Still still not working", "Still not working", "Not working"]) except RuntimeError: f = failure.Failure() self.assertIsNotNone(f) p_f = pickle.dumps(f) f = pickle.loads(p_f) self.assertEqual(2, len(f.causes)) self.assertEqual("Still not working", f.causes[0].exception_str) self.assertEqual("Not working", f.causes[1].exception_str) f = f.causes[0] self.assertEqual(1, len(f.causes)) self.assertEqual("Not working", f.causes[0].exception_str) f = f.causes[0] self.assertEqual(0, len(f.causes))
Example #4
Source File: test_dilated_convolution_2d.py From chainer with MIT License | 6 votes |
def check_pickling(self, x_data): x = chainer.Variable(x_data) y = self.link(x) y_data1 = y.data del x, y pickled = pickle.dumps(self.link, -1) del self.link self.link = pickle.loads(pickled) x = chainer.Variable(x_data) y = self.link(x) y_data2 = y.data testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
Example #5
Source File: test_pickle_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_pickle_chunk_V1_read(): data = {'foo': b'abcdefghijklmnopqrstuvwxyz'} version = {'_id': sentinel._id, 'blob': '__chunked__'} coll = Mock() arctic_lib = Mock() datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL)) data_1 = datap[0:5] data_2 = datap[5:] coll.find.return_value = [{'data': Binary(data_1), 'symbol': 'sentinel.symbol', 'segment': 0}, {'data': Binary(data_2), 'symbol': 'sentinel.symbol', 'segment': 1}, ] arctic_lib.get_top_level_collection.return_value = coll ps = PickleStore() assert(data == ps.read(arctic_lib, version, sentinel.symbol))
Example #6
Source File: test_pickle_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_pickle_store_future_version(): data = {'foo': b'abcdefghijklmnopqrstuvwxyz'} version = {'_id': sentinel._id, 'blob': '__chunked__VERSION_ONE_MILLION'} coll = Mock() arctic_lib = Mock() datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL)) data_1 = datap[0:5] data_2 = datap[5:] coll.find.return_value = [{'data': Binary(data_1), 'symbol': 'sentinel.symbol', 'segment': 0}, {'data': Binary(data_2), 'symbol': 'sentinel.symbol', 'segment': 1}, ] arctic_lib.get_top_level_collection.return_value = coll ps = PickleStore() with pytest.raises(UnsupportedPickleStoreVersion) as e: ps.read(arctic_lib, version, sentinel.symbol) assert('unsupported version of pickle store' in str(e.value))
Example #7
Source File: test_dilated_convolution_2d.py From chainer with MIT License | 6 votes |
def check_pickling(self, x_data): x = chainer.Variable(x_data) y = self.link(x) y_data1 = y.data del x, y pickled = pickle.dumps(self.link, -1) del self.link self.link = pickle.loads(pickled) x = chainer.Variable(x_data) y = self.link(x) y_data2 = y.data testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
Example #8
Source File: test_convolution_nd.py From chainer with MIT License | 6 votes |
def test_pickling(self, backend_config): x_data, = self.generate_inputs() link = self.create_link(self.generate_params()) link.to_device(backend_config.device) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data1 = y.data del x, y pickled = pickle.dumps(link, -1) del link link = pickle.loads(pickled) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data2 = y.data testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
Example #9
Source File: test_convolution_2d.py From chainer with MIT License | 6 votes |
def test_pickling(self, backend_config): x_data, = self.generate_inputs() link = self.create_link(self.generate_params()) link.to_device(backend_config.device) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data1 = y.data del x, y pickled = pickle.dumps(link, -1) del link link = pickle.loads(pickled) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data2 = y.data testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
Example #10
Source File: test_convolution_2d.py From chainer with MIT License | 6 votes |
def test_pickling(self, backend_config): x_data, = self.generate_inputs() link = self.create_link(self.generate_params()) link.to_device(backend_config.device) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data1 = y.data del x, y pickled = pickle.dumps(link, -1) del link link = pickle.loads(pickled) x = chainer.Variable(x_data) x.to_device(backend_config.device) y = link(x) y_data2 = y.data testing.assert_allclose(y_data1, y_data2, atol=0, rtol=0)
Example #11
Source File: __init__.py From picklable-itertools with MIT License | 6 votes |
def test_xrange(): yield assert_equal, list(xrange(10)), list(_xrange(10)) yield assert_equal, list(xrange(10, 15)), list(_xrange(10, 15)) yield assert_equal, list(xrange(10, 20, 2)), list(_xrange(10, 20, 2)) yield assert_equal, list(xrange(5, 1, -1)), list(_xrange(5, 1, -1)) yield (assert_equal, list(xrange(5, 55, 3)), list(cPickle.loads(cPickle.dumps(_xrange(5, 55, 3))))) yield assert_equal, _xrange(5).index(4), 4 yield assert_equal, _xrange(5, 9).index(6), 1 yield assert_equal, _xrange(8, 24, 3).index(11), 1 yield assert_equal, _xrange(25, 4, -5).index(25), 0 yield assert_equal, _xrange(28, 7, -7).index(14), 2 yield assert_raises, ValueError, _xrange(2, 9, 2).index, 3 yield assert_raises, ValueError, _xrange(2, 20, 2).index, 9 yield assert_equal, _xrange(5).count(5), 0 yield assert_equal, _xrange(5).count(4), 1 yield assert_equal, _xrange(4, 9).count(4), 1 yield assert_equal, _xrange(3, 9, 2).count(4), 0 yield assert_equal, _xrange(3, 9, 2).count(5), 1 yield assert_equal, _xrange(3, 9, 2).count(20), 0 yield assert_equal, _xrange(9, 3).count(5), 0 yield assert_equal, _xrange(3, 10, -1).count(5), 0 yield assert_equal, _xrange(10, 3, -1).count(5), 1 yield assert_equal, _xrange(10, 0, -2).count(6), 1 yield assert_equal, _xrange(10, -1, -3).count(7), 1
Example #12
Source File: test_hdf5.py From attention-lvcsr with MIT License | 6 votes |
def test_pickling(self): try: features = numpy.arange(360, dtype='uint16').reshape((10, 36)) h5file = h5py.File('file.hdf5', mode='w') h5file['features'] = features split_dict = {'train': {'features': (0, 10, None, '.')}} h5file.attrs['split'] = H5PYDataset.create_split_array(split_dict) dataset = cPickle.loads( cPickle.dumps(H5PYDataset(h5file, which_sets=('train',)))) # Make sure _out_of_memory_{open,close} accesses # external_file_handle rather than _external_file_handle dataset._out_of_memory_open() dataset._out_of_memory_close() assert dataset.data_sources is None finally: os.remove('file.hdf5')
Example #13
Source File: xpediteData.py From Xpedite with Apache License 2.0 | 5 votes |
def commit(self): """Commits accumulated data to the xpedite data file""" offset = 0 layout = {} for key in self.dataTable: if not isinstance(self.dataTable[key].data, str): isMarshalled = True self.dataTable[key].binData = pickle.dumps(self.dataTable[key].data, pickle.HIGHEST_PROTOCOL) else: isMarshalled = False self.dataTable[key].binData = self.dataTable[key].data dataSize = len(self.dataTable[key].binData) layout[key] = LayoutEntry(offset, isMarshalled, dataSize) offset += dataSize with open(self.dataFile, 'wb') as binFile: pTable = pickle.dumps(layout, pickle.HIGHEST_PROTOCOL) pTableSize = len(pTable) #convert to bytes binBuffer = create_string_buffer(8) struct.pack_into('i', binBuffer, 0, pTableSize) binFile.write(binBuffer) binFile.write(pTable) for key in self.dataTable: binFile.write(self.dataTable[key].binData)
Example #14
Source File: test_hdf5.py From attention-lvcsr with MIT License | 5 votes |
def test_data_stream_pickling(self): stream = DataStream(H5PYDataset(self.h5file, which_sets=('train',)), iteration_scheme=SequentialScheme(100, 10)) cPickle.loads(cPickle.dumps(stream)) stream.close()
Example #15
Source File: utils.py From tfmesos with BSD 3-Clause "New" or "Revised" License | 5 votes |
def send(fd, o): d = pickle.dumps(o) fd.send(struct.pack('>I', len(d)) + d)
Example #16
Source File: networkx_graph.py From vitrage with Apache License 2.0 | 5 votes |
def json_output_graph(self, **kwargs): """supports both 1.10<=networkx<2.0 and networx>=2.0 by returning the same json output regardless networx version :return: graph in json format """ # TODO(annarez): when we decide to support networkx 2.0 with # versioning of Vitrage, we can move part of the logic to vitrageclient node_link_data = json_graph.node_link_data(self._g) node_link_data.update(kwargs) vitrage_id_to_index = dict() for index, node in enumerate(node_link_data['nodes']): vitrage_id_to_index[node[VProps.VITRAGE_ID]] = index if VProps.ID in self._g.nodes[node[VProps.ID]]: node[VProps.ID] = self._g.nodes[node[VProps.ID]][VProps.ID] node[VProps.GRAPH_INDEX] = index vers = nx.__version__ if vers >= '2.0': for i in range(len(node_link_data['links'])): node_link_data['links'][i]['source'] = vitrage_id_to_index[ node_link_data['links'][i]['source']] node_link_data['links'][i]['target'] = vitrage_id_to_index[ node_link_data['links'][i]['target']] if kwargs.get('raw', False): return node_link_data else: return json.dumps(node_link_data)
Example #17
Source File: blob.py From PANet with MIT License | 5 votes |
def serialize(obj): """Serialize a Python object using pickle and encode it as an array of float32 values so that it can be feed into the workspace. See deserialize(). """ return np.fromstring(pickle.dumps(obj), dtype=np.uint8).astype(np.float32)
Example #18
Source File: networkx_graph.py From vitrage with Apache License 2.0 | 5 votes |
def write_gpickle(self): return cPickle.dumps(self._g, cPickle.HIGHEST_PROTOCOL)
Example #19
Source File: blob.py From PMFNet with MIT License | 5 votes |
def serialize(obj): """Serialize a Python object using pickle and encode it as an array of float32 values so that it can be feed into the workspace. See deserialize(). """ return np.fromstring(pickle.dumps(obj), dtype=np.uint8).astype(np.float32)
Example #20
Source File: utils.py From vitrage with Apache License 2.0 | 5 votes |
def compress_obj(obj, level=9): str_data = cPickle.dumps(obj) data = base64.b64encode(zlib.compress(str_data, level)) return data
Example #21
Source File: executor.py From pymesos with BSD 3-Clause "New" or "Revised" License | 5 votes |
def reply_status(self, driver, proc_id, state, message='', data=tuple()): update = dict( task_id=dict(value=str(proc_id)), agent_id=self.agent_id, timestamp=time.time(), state=state, ) if message: update['message'] = message if data: update['data'] = encode_data(pickle.dumps(data)) driver.sendStatusUpdate(update)
Example #22
Source File: scheduler.py From pymesos with BSD 3-Clause "New" or "Revised" License | 5 votes |
def send_data(self, pid, type, data): if self.driver.aborted: raise RuntimeError('driver already aborted') msg = encode_data(pickle.dumps((pid, type, data))) for agent_id, procs in list(self.agent_to_proc.items()): if pid in procs: self.driver.sendFrameworkMessage( self.executor['executor_id'], dict(value=agent_id), msg ) return raise RuntimeError('Cannot find agent for pid %s' % (pid,))
Example #23
Source File: scheduler.py From pymesos with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _init_task(self, proc, offer): resources = [ dict( name='cpus', type='SCALAR', scalar=dict(value=proc.cpus), ), dict( name='mem', type='SCALAR', scalar=dict(value=proc.mem), ) ] if proc.gpus > 0: resources.append( dict( name='gpus', type='SCALAR', scalar=dict(value=proc.gpus), ) ) task = dict( task_id=dict(value=str(proc.id)), name=repr(proc), executor=self.executor, agent_id=offer['agent_id'], data=encode_data(pickle.dumps(proc.params)), resources=resources, ) return task
Example #24
Source File: test_reactors.py From armi with Apache License 2.0 | 5 votes |
def test_isPickleable(self): loaded = cPickle.loads(cPickle.dumps(self.r)) # ensure we didn't break the current reactor self.assertIs(self.r.core.spatialGrid.armiObject, self.r.core) # make sure that the loaded reactor and grid are aligned self.assertIs(loaded.core.spatialGrid.armiObject, loaded.core) self.assertTrue( all( isinstance(key, grids.LocationBase) for key in loaded.core.childrenByLocator.keys() ) ) loc = loaded.core.spatialGrid[0, 0, 0] self.assertIs(loc.grid, loaded.core.spatialGrid) self.assertEqual(loaded.core.childrenByLocator[loc], loaded.core[0]) allIDs = set() def checkAdd(comp): self.assertNotIn(id(comp), allIDs) self.assertNotIn(id(comp.p), allIDs) allIDs.add(id(comp)) allIDs.add(id(comp.p)) # check a few locations to be equivalent for a0, a1 in zip(self.r.core, loaded.core): self.assertEqual(str(a0.getLocation()), str(a1.getLocation())) self.assertIs(a0.spatialLocator.grid, self.r.core.spatialGrid) self.assertIs(a1.spatialLocator.grid, loaded.core.spatialGrid) checkAdd(a0) checkAdd(a1) for b0, b1 in zip(a0, a1): self.assertIs(b0.spatialLocator.grid, a0.spatialGrid) self.assertIs(b1.spatialLocator.grid, a1.spatialGrid) self.assertEqual(str(b0.getLocation()), str(b1.getLocation())) self.assertEqual(b0.getSymmetryFactor(), b1.getSymmetryFactor()) self.assertEqual(b0.getHMMoles(), b1.getHMMoles()) checkAdd(b0) checkAdd(b1)
Example #25
Source File: test_utils.py From attention-lvcsr with MIT License | 5 votes |
def test_load(self): instance = cPickle.loads(cPickle.dumps(DummyClass())) assert_equal(instance.bulky_attr, list(range(100))) assert instance.non_picklable is not None
Example #26
Source File: test_pickle_unpickle_theano_fn.py From attention-lvcsr with MIT License | 5 votes |
def test_pickle_unpickle_with_reoptimization(): mode = theano.config.mode if mode in ["DEBUG_MODE", "DebugMode"]: mode = "FAST_RUN" x1 = T.fmatrix('x1') x2 = T.fmatrix('x2') x3 = theano.shared(numpy.ones((10, 10), dtype=floatX)) x4 = theano.shared(numpy.ones((10, 10), dtype=floatX)) y = T.sum(T.sum(T.sum(x1 ** 2 + x2) + x3) + x4) updates = OrderedDict() updates[x3] = x3 + 1 updates[x4] = x4 + 1 f = theano.function([x1, x2], y, updates=updates, mode=mode) # now pickle the compiled theano fn string_pkl = pickle.dumps(f, -1) in1 = numpy.ones((10, 10), dtype=floatX) in2 = numpy.ones((10, 10), dtype=floatX) # test unpickle with optimization default = theano.config.reoptimize_unpickled_function try: # the default is True theano.config.reoptimize_unpickled_function = True f_ = pickle.loads(string_pkl) assert f(in1, in2) == f_(in1, in2) finally: theano.config.reoptimize_unpickled_function = default
Example #27
Source File: test_hdf5.py From attention-lvcsr with MIT License | 5 votes |
def test_pickling(self): dataset = cPickle.loads(cPickle.dumps(self.dataset)) assert_equal(len(dataset.nodes), 1)
Example #28
Source File: test_transformers.py From attention-lvcsr with MIT License | 5 votes |
def test_unpack_picklable(self): wrapper = Unpack(self.stream_np) epoch = wrapper.get_epoch_iterator() cPickle.dumps(epoch)
Example #29
Source File: test_pickle_unpickle_theano_fn.py From attention-lvcsr with MIT License | 5 votes |
def test_pickle_unpickle_without_reoptimization(): mode = theano.config.mode if mode in ["DEBUG_MODE", "DebugMode"]: mode = "FAST_RUN" x1 = T.fmatrix('x1') x2 = T.fmatrix('x2') x3 = theano.shared(numpy.ones((10, 10), dtype=floatX)) x4 = theano.shared(numpy.ones((10, 10), dtype=floatX)) y = T.sum(T.sum(T.sum(x1**2 + x2) + x3) + x4) updates = OrderedDict() updates[x3] = x3 + 1 updates[x4] = x4 + 1 f = theano.function([x1, x2], y, updates=updates, mode=mode) # now pickle the compiled theano fn string_pkl = pickle.dumps(f, -1) # compute f value in1 = numpy.ones((10, 10), dtype=floatX) in2 = numpy.ones((10, 10), dtype=floatX) # test unpickle without optimization default = theano.config.reoptimize_unpickled_function try: # the default is True theano.config.reoptimize_unpickled_function = False f_ = pickle.loads(string_pkl) assert f(in1, in2) == f_(in1, in2) finally: theano.config.reoptimize_unpickled_function = default
Example #30
Source File: test_server.py From fuel with MIT License | 5 votes |
def test_pickling(self): try: self.stream = cPickle.loads(cPickle.dumps(self.stream)) # regression test: pickling of an unpickled stream used it fail cPickle.dumps(self.stream) server_data = self.stream.get_epoch_iterator() expected_data = get_stream().get_epoch_iterator() for _, s, e in zip(range(3), server_data, expected_data): for data in zip(s, e): assert_allclose(*data, rtol=1e-3) except AssertionError as e: raise SkipTest("Skip test_that failed with: {}".format(e)) assert_raises(StopIteration, next, server_data)