Python bson.binary.Binary() Examples
The following are 30
code examples of bson.binary.Binary().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
bson.binary
, or try the search function
.
Example #1
Source File: grid_file.py From recruit with Apache License 2.0 | 6 votes |
def __flush_data(self, data): """Flush `data` to a chunk. """ # Ensure the index, even if there's nothing to write, so # the filemd5 command always succeeds. self._ensure_index() if not data: return assert(len(data) <= self.chunk_size) chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} try: self._chunks.insert(chunk) except DuplicateKeyError: self._raise_file_exists(self._file['_id']) self._chunk_number += 1 self._position += len(data)
Example #2
Source File: test_databaseapi.py From NEXT with Apache License 2.0 | 6 votes |
def test_to_db_fmt(): import cPickle import numpy as np from bson.binary import Binary # standard types should be passed through assert to_db_fmt(1) == 1 assert to_db_fmt(4.2) == 4.2 assert to_db_fmt('foobarbaz') == 'foobarbaz' assert to_db_fmt(1+2j) == 1+2j # lists and dicts should be recursively formatted assert to_db_fmt([1, 1+2j, 'foo', [1,2.3]]) == [1, 1+2j, 'foo', [1,2.3]] assert to_db_fmt({'a': 1, 'b': ['foo', 2]}) == {'a': 1, 'b': ['foo', 2]} # numpy arrays should be converted to lists assert to_db_fmt(np.array([1,2,3])) == [1,2,3] # objects should be pickled x = object() assert to_db_fmt(x) == Binary(cPickle.dumps(x, protocol=2))
Example #3
Source File: DatabaseAPI.py From NEXT with Apache License 2.0 | 6 votes |
def from_db_fmt(x): # recursive descent through lists if isinstance(x, list): return [from_db_fmt(v) for v in x] # recursive descent through dicts if isinstance(x, dict): return {k: from_db_fmt(v) for k, v in x.items()} # further code occasionally serializes `ObjectId`s to json, so stringify them now if isinstance(x, ObjectId): return str(x) if isinstance(x, Binary): # this might be pickled data; let's attempt to deserialize it try: return cPickle.loads(x) except cPickle.UnpicklingError: # this wasn't pickled data. just return it. return x # not a datatype we need to deserialize! just pass it out return x
Example #4
Source File: grid_file.py From satori with Apache License 2.0 | 6 votes |
def __flush_data(self, data): """Flush `data` to a chunk. """ # Ensure the index, even if there's nothing to write, so # the filemd5 command always succeeds. self.__ensure_indexes() self._file['md5'].update(data) if not data: return assert(len(data) <= self.chunk_size) chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} try: self._chunks.insert_one(chunk) except DuplicateKeyError: self._raise_file_exists(self._file['_id']) self._chunk_number += 1 self._position += len(data)
Example #5
Source File: auth.py From satori with Apache License 2.0 | 6 votes |
def _authenticate_cram_md5(credentials, sock_info): """Authenticate using CRAM-MD5 (RFC 2195) """ source = credentials.source username = credentials.username password = credentials.password # The password used as the mac key is the # same as what we use for MONGODB-CR passwd = _password_digest(username, password) cmd = SON([('saslStart', 1), ('mechanism', 'CRAM-MD5'), ('payload', Binary(b'')), ('autoAuthorize', 1)]) response = sock_info.command(source, cmd) # MD5 as implicit default digest for digestmod is deprecated # in python 3.4 mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=md5) mac.update(response['payload']) challenge = username.encode('utf-8') + b' ' + b(mac.hexdigest()) cmd = SON([('saslContinue', 1), ('conversationId', response['conversationId']), ('payload', Binary(challenge))]) sock_info.command(source, cmd)
Example #6
Source File: grid_file.py From vnpy_crypto with MIT License | 6 votes |
def __flush_data(self, data): """Flush `data` to a chunk. """ self.__ensure_indexes() if 'md5' in self._file: self._file['md5'].update(data) if not data: return assert(len(data) <= self.chunk_size) chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} try: self._chunks.insert_one(chunk, session=self._session) except DuplicateKeyError: self._raise_file_exists(self._file['_id']) self._chunk_number += 1 self._position += len(data)
Example #7
Source File: auth.py From vnpy_crypto with MIT License | 6 votes |
def _authenticate_cram_md5(credentials, sock_info): """Authenticate using CRAM-MD5 (RFC 2195) """ source = credentials.source username = credentials.username password = credentials.password # The password used as the mac key is the # same as what we use for MONGODB-CR passwd = _password_digest(username, password) cmd = SON([('saslStart', 1), ('mechanism', 'CRAM-MD5'), ('payload', Binary(b'')), ('autoAuthorize', 1)]) response = sock_info.command(source, cmd) # MD5 as implicit default digest for digestmod is deprecated # in python 3.4 mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=hashlib.md5) mac.update(response['payload']) challenge = username.encode('utf-8') + b' ' + mac.hexdigest().encode('utf-8') cmd = SON([('saslContinue', 1), ('conversationId', response['conversationId']), ('payload', Binary(challenge))]) sock_info.command(source, cmd)
Example #8
Source File: auth.py From learn_python3_spider with MIT License | 6 votes |
def _authenticate_cram_md5(credentials, sock_info): """Authenticate using CRAM-MD5 (RFC 2195) """ source = credentials.source username = credentials.username password = credentials.password # The password used as the mac key is the # same as what we use for MONGODB-CR passwd = _password_digest(username, password) cmd = SON([('saslStart', 1), ('mechanism', 'CRAM-MD5'), ('payload', Binary(b'')), ('autoAuthorize', 1)]) response = sock_info.command(source, cmd) # MD5 as implicit default digest for digestmod is deprecated # in python 3.4 mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=hashlib.md5) mac.update(response['payload']) challenge = username.encode('utf-8') + b' ' + mac.hexdigest().encode('utf-8') cmd = SON([('saslContinue', 1), ('conversationId', response['conversationId']), ('payload', Binary(challenge))]) sock_info.command(source, cmd)
Example #9
Source File: grid_file.py From learn_python3_spider with MIT License | 6 votes |
def __flush_data(self, data): """Flush `data` to a chunk. """ self.__ensure_indexes() if 'md5' in self._file: self._file['md5'].update(data) if not data: return assert(len(data) <= self.chunk_size) chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} try: self._chunks.insert_one(chunk, session=self._session) except DuplicateKeyError: self._raise_file_exists(self._file['_id']) self._chunk_number += 1 self._position += len(data)
Example #10
Source File: auth.py From recruit with Apache License 2.0 | 6 votes |
def _authenticate_cram_md5(credentials, sock_info, cmd_func): """Authenticate using CRAM-MD5 (RFC 2195) """ source, username, password = credentials # The password used as the mac key is the # same as what we use for MONGODB-CR passwd = _password_digest(username, password) cmd = SON([('saslStart', 1), ('mechanism', 'CRAM-MD5'), ('payload', Binary(b(''))), ('autoAuthorize', 1)]) response, _ = cmd_func(sock_info, source, cmd) # MD5 as implicit default digest for digestmod is deprecated # in python 3.4 mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=_DMOD) mac.update(response['payload']) challenge = username.encode('utf-8') + b(' ') + b(mac.hexdigest()) cmd = SON([('saslContinue', 1), ('conversationId', response['conversationId']), ('payload', Binary(challenge))]) cmd_func(sock_info, source, cmd)
Example #11
Source File: test_bsonjs.py From python-bsonjs with Apache License 2.0 | 6 votes |
def test_binary(self): bin_type_dict = {"bin": Binary(b"\x00\x01\x02\x03\x04")} md5_type_dict = { "md5": Binary(b" n7\x18\xaf\t/\xd1\xd1/\x80\xca\xe7q\xcc\xac", MD5_SUBTYPE) } custom_type_dict = {"custom": Binary(b"hello", USER_DEFINED_SUBTYPE)} self.round_trip(bin_type_dict) self.round_trip(md5_type_dict) self.round_trip(custom_type_dict) json_bin_dump = bsonjs_dumps(md5_type_dict) # Order should be $binary then $type. self.assertEqual( ('{ "md5" : { "$binary" : "IG43GK8JL9HRL4DK53HMrA==", ' '"$type" : "05" } }'), json_bin_dump) json_bin_dump = bsonjs_dumps(custom_type_dict) self.assertTrue('"$type" : "80"' in json_bin_dump) # Check loading invalid binary self.assertRaises(ValueError, bsonjs.loads, '{"a": {"$binary": "invalid", "$type": "80"}}')
Example #12
Source File: test_pickle_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_pickle_store_future_version(): data = {'foo': b'abcdefghijklmnopqrstuvwxyz'} version = {'_id': sentinel._id, 'blob': '__chunked__VERSION_ONE_MILLION'} coll = Mock() arctic_lib = Mock() datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL)) data_1 = datap[0:5] data_2 = datap[5:] coll.find.return_value = [{'data': Binary(data_1), 'symbol': 'sentinel.symbol', 'segment': 0}, {'data': Binary(data_2), 'symbol': 'sentinel.symbol', 'segment': 1}, ] arctic_lib.get_top_level_collection.return_value = coll ps = PickleStore() with pytest.raises(UnsupportedPickleStoreVersion) as e: ps.read(arctic_lib, version, sentinel.symbol) assert('unsupported version of pickle store' in str(e.value))
Example #13
Source File: auth.py From opsbro with MIT License | 6 votes |
def _authenticate_cram_md5(credentials, sock_info): """Authenticate using CRAM-MD5 (RFC 2195) """ source = credentials.source username = credentials.username password = credentials.password # The password used as the mac key is the # same as what we use for MONGODB-CR passwd = _password_digest(username, password) cmd = SON([('saslStart', 1), ('mechanism', 'CRAM-MD5'), ('payload', Binary(b'')), ('autoAuthorize', 1)]) response = sock_info.command(source, cmd) # MD5 as implicit default digest for digestmod is deprecated # in python 3.4 mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=md5) mac.update(response['payload']) challenge = username.encode('utf-8') + b' ' + b(mac.hexdigest()) cmd = SON([('saslContinue', 1), ('conversationId', response['conversationId']), ('payload', Binary(challenge))]) sock_info.command(source, cmd)
Example #14
Source File: datastore.py From lightflow with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _encode_value(self, value): """ Encodes the value such that it can be stored into MongoDB. Any primitive types are stored directly into MongoDB, while non-primitive types are pickled and stored as GridFS objects. The id pointing to a GridFS object replaces the original value. Args: value (object): The object that should be encoded for storing in MongoDB. Returns: object: The encoded value ready to be stored in MongoDB. """ if isinstance(value, (int, float, str, bool, datetime)): return value elif isinstance(value, list): return [self._encode_value(item) for item in value] elif isinstance(value, dict): result = {} for key, item in value.items(): result[key] = self._encode_value(item) return result else: return self._gridfs.put(Binary(pickle.dumps(value)), workflow_id=self._workflow_id)
Example #15
Source File: test_pickle_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_pickle_chunk_V1_read(): data = {'foo': b'abcdefghijklmnopqrstuvwxyz'} version = {'_id': sentinel._id, 'blob': '__chunked__'} coll = Mock() arctic_lib = Mock() datap = compressHC(cPickle.dumps(data, protocol=cPickle.HIGHEST_PROTOCOL)) data_1 = datap[0:5] data_2 = datap[5:] coll.find.return_value = [{'data': Binary(data_1), 'symbol': 'sentinel.symbol', 'segment': 0}, {'data': Binary(data_2), 'symbol': 'sentinel.symbol', 'segment': 1}, ] arctic_lib.get_top_level_collection.return_value = coll ps = PickleStore() assert(data == ps.read(arctic_lib, version, sentinel.symbol))
Example #16
Source File: cache.py From CrisisLex with MIT License | 5 votes |
def store(self, key, value): from bson.binary import Binary now = datetime.datetime.utcnow() blob = Binary(pickle.dumps(value)) self.col.insert({'created': now, '_id': key, 'value': blob})
Example #17
Source File: how_to_custom_arctic_library.py From arctic with GNU Lesser General Public License v2.1 | 5 votes |
def store(self, thing): """ Simple persistence method """ to_store = {'field1': thing.field1, 'date_field': thing.date_field, } to_store['stuff'] = Binary(cPickle.dumps(thing.stuff)) # Respect any soft-quota on write - raises if stats().totals.size > quota self._arctic_lib.check_quota() self._collection.insert_one(to_store)
Example #18
Source File: DatabaseAPI.py From NEXT with Apache License 2.0 | 5 votes |
def to_db_fmt(x): # leave None as is if x is None: return x # convert tuples to lists if isinstance(x, tuple): return to_db_fmt(list(x)) # recursive descent through lists if isinstance(x, list): return [to_db_fmt(v) for v in x] # recursive descent through dicts if isinstance(x, dict): return {k: to_db_fmt(v) for k, v in x.items()} # convert Numpy arrays to python arrays # note: assumes that .tolist() will return only database-acceptable types if isinstance(x, np.ndarray): return x.tolist() # types that MongoDB can natively store if type(x) in {bool, int, float, long, complex, str, unicode, datetime}: return x # interface types. don't repickle these if type(x) in {Binary, ObjectId}: return x # pickle everything else, wrap in MongoDB `Binary` return Binary(cPickle.dumps(x, protocol=2))
Example #19
Source File: mongodb.py From Tautulli with GNU General Public License v3.0 | 5 votes |
def update_job(self, job): changes = { 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) } result = self.collection.update({'_id': job.id}, {'$set': changes}) if result and result['n'] == 0: raise JobLookupError(job.id)
Example #20
Source File: mongodb.py From Tautulli with GNU General Public License v3.0 | 5 votes |
def add_job(self, job): try: self.collection.insert({ '_id': job.id, 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) }) except DuplicateKeyError: raise ConflictingIdError(job.id)
Example #21
Source File: mongodb.py From bazarr with GNU General Public License v3.0 | 5 votes |
def update_job(self, job): changes = { 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) } result = self.collection.update({'_id': job.id}, {'$set': changes}) if result and result['n'] == 0: raise JobLookupError(job.id)
Example #22
Source File: session_mongodb.py From INGInious with GNU Affero General Public License v3.0 | 5 votes |
def encode(self, sessiondict): return dict((k, Binary(Store.encode(self, v), USER_DEFINED_SUBTYPE) if needs_encode(v) else v) for (k, v) in sessiondict.items())
Example #23
Source File: session_mongodb.py From INGInious with GNU Affero General Public License v3.0 | 5 votes |
def decode(self, sessiondict): return dict((k, Store.decode(self, v) if isinstance(v, Binary) and v.subtype == USER_DEFINED_SUBTYPE else v) for (k, v) in sessiondict.items())
Example #24
Source File: mongodb.py From bazarr with GNU General Public License v3.0 | 5 votes |
def add_job(self, job): try: self.collection.insert({ '_id': job.id, 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) }) except DuplicateKeyError: raise ConflictingIdError(job.id)
Example #25
Source File: cache.py From pheme-twitter-conversation-collection with Apache License 2.0 | 5 votes |
def store(self, key, value): from bson.binary import Binary now = datetime.datetime.utcnow() blob = Binary(pickle.dumps(value)) self.col.insert({'created': now, '_id': key, 'value': blob})
Example #26
Source File: client_session.py From learn_python3_spider with MIT License | 5 votes |
def __init__(self): # Ensure id is type 4, regardless of CodecOptions.uuid_representation. self.session_id = {'id': Binary(uuid.uuid4().bytes, 4)} self.last_use = monotonic.time() self._transaction_id = 0 self.dirty = False
Example #27
Source File: auth.py From learn_python3_spider with MIT License | 5 votes |
def _authenticate_plain(credentials, sock_info): """Authenticate using SASL PLAIN (RFC 4616) """ source = credentials.source username = credentials.username password = credentials.password payload = ('\x00%s\x00%s' % (username, password)).encode('utf-8') cmd = SON([('saslStart', 1), ('mechanism', 'PLAIN'), ('payload', Binary(payload)), ('autoAuthorize', 1)]) sock_info.command(source, cmd)
Example #28
Source File: cache.py From twitter-stock-recommendation with MIT License | 5 votes |
def store(self, key, value): from bson.binary import Binary now = datetime.datetime.utcnow() blob = Binary(pickle.dumps(value)) self.col.insert({'created': now, '_id': key, 'value': blob})
Example #29
Source File: auth.py From opsbro with MIT License | 5 votes |
def _authenticate_plain(credentials, sock_info): """Authenticate using SASL PLAIN (RFC 4616) """ source = credentials.source username = credentials.username password = credentials.password payload = ('\x00%s\x00%s' % (username, password)).encode('utf-8') cmd = SON([('saslStart', 1), ('mechanism', 'PLAIN'), ('payload', Binary(payload)), ('autoAuthorize', 1)]) sock_info.command(source, cmd)
Example #30
Source File: Logger.py From ml_board with MIT License | 5 votes |
def add_image(self,image_name,image): processed_image = Binary(pickle.dumps(image,protocol=2)) self.runs.update_one({"Experimental Parameters.Time":self.date},{'$set':{"Images."+image_name:processed_image}},upsert= True)