Python pymongo.errors.DuplicateKeyError() Examples
The following are 30
code examples of pymongo.errors.DuplicateKeyError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
pymongo.errors
, or try the search function
.
Example #1
Source File: hgnc.py From scout with BSD 3-Clause "New" or "Revised" License | 6 votes |
def load_hgnc_bulk(self, gene_objs): """Load a bulk of hgnc gene objects Raises IntegrityError if there are any write concerns Args: gene_objs(iterable(scout.models.hgnc_gene)) Returns: result (pymongo.results.InsertManyResult) """ LOG.info("Loading gene bulk with length %s", len(gene_objs)) try: result = self.hgnc_collection.insert_many(gene_objs) except (DuplicateKeyError, BulkWriteError) as err: raise IntegrityError(err) return result
Example #2
Source File: opcount.py From vj4 with GNU Affero General Public License v3.0 | 6 votes |
def inc(op: str, ident: str, period_secs: int, max_operations: int): coll = db.coll('opcount') cur_time = int(time.time()) begin_at = datetime.datetime.utcfromtimestamp(cur_time - cur_time % period_secs) expire_at = begin_at + datetime.timedelta(seconds=period_secs) try: doc = await coll.find_one_and_update(filter={'ident': ident, 'begin_at': begin_at, 'expire_at': expire_at, op: {'$not': {'$gte': max_operations}}}, update={'$inc': {op: 1}}, upsert=True, return_document=ReturnDocument.AFTER) return doc except errors.DuplicateKeyError: raise error.OpcountExceededError(op, period_secs, max_operations)
Example #3
Source File: grid_file.py From recruit with Apache License 2.0 | 6 votes |
def __flush_data(self, data): """Flush `data` to a chunk. """ # Ensure the index, even if there's nothing to write, so # the filemd5 command always succeeds. self._ensure_index() if not data: return assert(len(data) <= self.chunk_size) chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} try: self._chunks.insert(chunk) except DuplicateKeyError: self._raise_file_exists(self._file['_id']) self._chunk_number += 1 self._position += len(data)
Example #4
Source File: test_version_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_append_insert_version_duplicatekey(): read_handler = Mock(append=Mock(__name__="")) previous_version = TPL_VERSION.copy() previous_version['version'] = 1 vs = create_autospec(VersionStore, instance=True, _collection=Mock(), _version_nums=Mock(find_one_and_update=Mock(return_value={'version': previous_version['version']+1})), _versions=Mock(insert_one=Mock(__name__="insert_one"), find_one=Mock(__name__="find_one", return_value=previous_version)), _arctic_lib=create_autospec(ArcticLibraryBinding, arctic=create_autospec(Arctic, mongo_host='some_host'))) vs._insert_version = lambda version: VersionStore._insert_version(vs, version) vs._versions.insert_one.side_effect = [DuplicateKeyError("dup key error"), None] vs._collection.database.connection.nodes = [] vs._read_handler.return_value = read_handler VersionStore.append(vs, 'sym', [1, 2, 3], prune_previous_version=False, upsert=False) assert vs._version_nums.find_one_and_update.call_count == 2 assert vs._versions.find_one.call_count == 2 assert read_handler.append.call_count == 2 assert vs._versions.insert_one.call_count == 2
Example #5
Source File: test_version_store.py From arctic with GNU Lesser General Public License v2.1 | 6 votes |
def test_write_insert_version_duplicatekey(): write_handler = Mock(write=Mock(__name__="")) vs = create_autospec(VersionStore, instance=True, _collection=Mock(), _version_nums=Mock(find_one_and_update=Mock(return_value={'version': 1})), _versions=Mock(insert_one=Mock(__name__="insert_one"), find_one=Mock(__name__="find_one")), _arctic_lib=create_autospec(ArcticLibraryBinding, arctic=create_autospec(Arctic, mongo_host='some_host'))) vs._insert_version = lambda version: VersionStore._insert_version(vs, version) vs._versions.insert_one.side_effect = [DuplicateKeyError("dup key error"), None] vs._collection.database.connection.nodes = [] vs._write_handler.return_value = write_handler VersionStore.write(vs, 'sym', sentinel.data, prune_previous_version=False) assert vs._version_nums.find_one_and_update.call_count == 2 assert vs._versions.find_one.call_count == 2 assert write_handler.write.call_count == 2 assert vs._versions.insert_one.call_count == 2
Example #6
Source File: domain.py From vj4 with GNU Affero General Public License v3.0 | 6 votes |
def add(domain_id: str, owner_uid: int, roles=builtin.DOMAIN_SYSTEM['roles'], name: str=None, gravatar: str=None, bulletin: str=''): validator.check_domain_id(domain_id) validator.check_name(name) validator.check_bulletin(bulletin) for domain in builtin.DOMAINS: if domain['_id'] == domain_id: raise error.DomainAlreadyExistError(domain_id) coll = db.coll('domain') try: result = await coll.insert_one({'_id': domain_id, 'pending': True, 'owner_uid': owner_uid, 'roles': roles, 'name': name, 'gravatar': gravatar, 'bulletin': bulletin}) domain_id = result.inserted_id except errors.DuplicateKeyError: raise error.DomainAlreadyExistError(domain_id) from None # grant root role to owner by default await add_user_role(domain_id, owner_uid, builtin.ROLE_ROOT) await coll.update_one({'_id': domain_id}, {'$unset': {'pending': ''}}) return domain_id
Example #7
Source File: lock.py From distributed_framework with Apache License 2.0 | 6 votes |
def _acquire(self): ttl = datetime.now() + timedelta(seconds=self._lease_time) try: self.collection.insert({ '_id': self.lock_name, 'ttl': ttl, 'client_id': self._client_id}, w=1, j=1) except errors.DuplicateKeyError: self.collection.remove( {"_id": self.lock_name, 'ttl': {'$lt': datetime.now()}}) try: self.collection.insert( {'_id': self.lock_name, 'ttl': ttl, 'client_id': self._client_id}, w=1, j=1) except errors.DuplicateKeyError: self._locked = False return self._locked self._lock_expires = ttl self._locked = True return self._locked
Example #8
Source File: user.py From scout with BSD 3-Clause "New" or "Revised" License | 6 votes |
def add_user(self, user_obj): """Add a user object to the database Args: user_obj(scout.models.User): A dictionary with user information Returns: user_info(dict): a copy of what was inserted """ LOG.info("Adding user %s to the database", user_obj["email"]) if not "_id" in user_obj: user_obj["_id"] = user_obj["email"] try: self.user_collection.insert_one(user_obj) LOG.debug("User inserted") except DuplicateKeyError as err: raise IntegrityError("User {} already exists in database".format(user_obj["email"])) return user_obj
Example #9
Source File: user.py From zimfarm with GNU General Public License v3.0 | 6 votes |
def post(self, token: AccessToken.Payload): try: request_json = UserCreateSchema().load(request.get_json()) except ValidationError as e: raise errors.InvalidRequestJSON(e.messages) # generate password hash password = request_json.pop("password") request_json["password_hash"] = generate_password_hash(password) # fetch permissions request_json["scope"] = ROLES.get(request_json.pop("role")) try: user_id = Users().insert_one(request_json).inserted_id return jsonify({"_id": user_id}) except DuplicateKeyError: raise errors.BadRequest("User already exists")
Example #10
Source File: hpo.py From scout with BSD 3-Clause "New" or "Revised" License | 6 votes |
def load_hpo_bulk(self, hpo_bulk): """Add a hpo object Arguments: hpo_bulk(list(scout.models.HpoTerm)) Returns: result: pymongo bulkwrite result """ LOG.debug("Loading hpo bulk") try: result = self.hpo_term_collection.insert_many(hpo_bulk) except (DuplicateKeyError, BulkWriteError) as err: raise IntegrityError(err) return result
Example #11
Source File: variant_loader.py From scout with BSD 3-Clause "New" or "Revised" License | 6 votes |
def load_variant_bulk(self, variants): """Load a bulk of variants Args: variants(iterable(scout.models.Variant)) Returns: object_ids """ if len(variants) == 0: return LOG.debug("Loading variant bulk") try: result = self.variant_collection.insert_many(variants) except (DuplicateKeyError, BulkWriteError) as err: # If the bulk write is wrong there are probably some variants already existing # In the database. So insert each variant for var_obj in variants: try: self.upsert_variant(var_obj) except IntegrityError as err: pass return
Example #12
Source File: variant_loader.py From scout with BSD 3-Clause "New" or "Revised" License | 6 votes |
def upsert_variant(self, variant_obj): """Load a variant object, if the object already exists update compounds. Args: variant_obj(dict) Returns: result """ LOG.debug("Upserting variant %s", variant_obj["_id"]) try: result = self.variant_collection.insert_one(variant_obj) except DuplicateKeyError as err: LOG.debug("Variant %s already exists in database", variant_obj["_id"]) result = self.variant_collection.find_one_and_update( {"_id": variant_obj["_id"]}, {"$set": {"compounds": variant_obj.get("compounds", [])}}, ) variant = self.variant_collection.find_one({"_id": variant_obj["_id"]}) return result
Example #13
Source File: mongo_store.py From bii-server with MIT License | 6 votes |
def create(self, value, collection, enable_update_if_current=False): dbcol = self.db[collection] txn_k = update_if_current.SERIAL_TXN_COUNTER_KEY if enable_update_if_current: update_if_current.enable_check_for(value) serial = value.serialize() if hasattr(value, txn_k): serial[txn_k] = 0 setattr(value, txn_k, 0) try: id_or_error = dbcol.insert(serial, getLastError=1) if isinstance(id_or_error, basestring) \ and '_id' in serial and id_or_error != serial['_id']: raise BiiStoreException(id_or_error) return id_or_error except DuplicateKeyError as e: raise AlreadyInStoreException(e) except Exception as e: logger.error(traceback.format_exc()) raise e
Example #14
Source File: mongo_store.py From bii-server with MIT License | 6 votes |
def _request_transaction(self, collection, entity_name, brl): transaction_definition = {'_id': brl.serialize(), 'state': 'initial'} dbcol = self.db[collection] try: id_or_error = dbcol.insert(transaction_definition, getLastError=1) except DuplicateKeyError: raise BiiPendingTransactionException('There\'s a pending transaction for %s %s' ', please retry later' % (entity_name, transaction_definition['_id'])) if isinstance(id_or_error, basestring) \ and id_or_error != transaction_definition['_id']: raise BiiPendingTransactionException('There\'s a pending transaction for %s %s' + ', please retry later' % (entity_name, transaction_definition['_id'])) return dbcol.find_one({'_id': transaction_definition['_id'], 'state': "initial"})
Example #15
Source File: auth.py From allura with Apache License 2.0 | 5 votes |
def add_login_detail(self, detail): try: session(detail).flush(detail) except DuplicateKeyError: session(detail).expunge(detail)
Example #16
Source File: db_interface_frontend_editing.py From FACT_core with GNU General Public License v3.0 | 5 votes |
def add_to_search_query_cache(self, search_query: str, query_title: Optional[str] = None) -> str: query_uid = create_uid(search_query) with suppress(DuplicateKeyError): self.search_query_cache.insert_one({'_id': query_uid, 'search_query': search_query, 'query_title': query_title}) return query_uid
Example #17
Source File: methods.py From steemdata-mongo with MIT License | 5 votes |
def update_account_ops(mongo, username): """ This method will fetch entire account history, and back-fill any missing ops. """ for event in Account(username).history(): with suppress(DuplicateKeyError): transform = compose(strip_dot_from_keys, remove_body, json_expand, typify) mongo.AccountOperations.insert_one(transform(event))
Example #18
Source File: scraper.py From steemdata-mongo with MIT License | 5 votes |
def insert_blocks(mongo, full_blocks): for block in full_blocks: if not block.get('block_num'): block['block_num'] = int(block['block_id'][:8], base=16) if block['block_num'] > 1: assert block_id_exists(mongo, block['previous']), \ 'Missing Previous Block (%s)' % block['previous'] with suppress(DuplicateKeyError): mongo.db['Blockchain'].insert_one(block)
Example #19
Source File: scraper.py From steemdata-mongo with MIT License | 5 votes |
def scrape_operations(mongo): """Fetch all operations (including virtual) from last known block forward.""" indexer = Indexer(mongo) last_block = indexer.get_checkpoint('operations') log.info('\n> Fetching operations, starting with block %d...' % last_block) blockchain = Blockchain(mode="irreversible") history = blockchain.history( start_block=last_block, ) for operation in history: # insert operation with suppress(DuplicateKeyError): transform = compose(strip_dot_from_keys, json_expand, typify) mongo.Operations.insert_one(transform(operation)) # if this is a new block, checkpoint it, and schedule batch processing if operation['block_num'] != last_block: last_block = operation['block_num'] indexer.set_checkpoint('operations', last_block - 1) if last_block % 10 == 0: log.info("Checkpoint: %s (%s)" % ( last_block, blockchain.steem.hostname )) # Posts, Comments # ---------------
Example #20
Source File: methods.py From steemdata-mongo with MIT License | 5 votes |
def update_account_ops_quick(mongo, username, batch_size=200, steemd_instance=None): """ Only update the latest history, limited to 1 batch of defined batch_size. """ start_index = account_operations_index(mongo, username) # fetch latest records and update the db history = \ Account(username, steemd_instance=steemd_instance).history_reverse(batch_size=batch_size) for event in take(batch_size, history): if event['index'] < start_index: return with suppress(DuplicateKeyError): mongo.AccountOperations.insert_one(json_expand(typify(event)))
Example #21
Source File: mongodb.py From Tautulli with GNU General Public License v3.0 | 5 votes |
def add_job(self, job): try: self.collection.insert({ '_id': job.id, 'next_run_time': datetime_to_utc_timestamp(job.next_run_time), 'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol)) }) except DuplicateKeyError: raise ConflictingIdError(job.id)
Example #22
Source File: auth.py From allura with Apache License 2.0 | 5 votes |
def upsert(cls, **kw): obj = cls.query.get(**kw) if obj is not None: return obj try: obj = cls(**kw) session(obj).insert_now(obj, state(obj)) except pymongo.errors.DuplicateKeyError: session(obj).expunge(obj) obj = cls.query.get(**kw) return obj
Example #23
Source File: auth.py From allura with Apache License 2.0 | 5 votes |
def upsert(cls, username): u = cls.query.get(username=username) if u is not None: return u try: u = cls(username=username) session(u).flush(u) except pymongo.errors.DuplicateKeyError: session(u).expunge(u) u = cls.query.get(username=username) return u
Example #24
Source File: taranis_service.py From taranis with BSD 3-Clause "New" or "Revised" License | 5 votes |
def create_index(self, index: NewIndexModel): try: t = int((datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() * 1000) new_index = IndexModel() new_index.created_at = t new_index.updated_at = t new_index.state = IndexModel.State.CREATED new_dict_index = MessageToDict(ParseDict(MessageToDict(index, preserving_proto_field_name=True), new_index), preserving_proto_field_name=True) res = self.repo.create_one_index(new_dict_index) config = json.loads(index.config) if config["index_type"] == "IVFPQ": dimension = config["dimension"] n_list = config["n_list"] n_probes = config["n_probes"] index_type = "IVF{},PQ{}np".format(n_list, n_probes) metric_type = cpp_taranis.Faiss.MetricType.METRIC_L2 if config["metric"] == "METRIC_L1": metric_type = cpp_taranis.Faiss.MetricType.METRIC_L1 elif config["metric"] == "METRIC_L2": metric_type = cpp_taranis.Faiss.MetricType.METRIC_L2 self.faiss_wrapper.create_index(index.db_name, index.index_name, dimension, index_type, metric_type, n_probes) else: raise TaranisNotImplementedError( "Can't create index because of unknown index type {}".format(index.config["index_type"])) except DuplicateKeyError as e: raise TaranisAlreadyExistsError("Index name {} already exists".format(index.index_name)) return index
Example #25
Source File: auth.py From allura with Apache License 2.0 | 5 votes |
def upsert(cls): r = cls.query.get() if r is not None: return r try: r = cls(_id=0) session(r).flush(r) return r except pymongo.errors.DuplicateKeyError: # pragma no cover session(r).flush(r) r = cls.query.get() return r
Example #26
Source File: crispr.py From SelfTarget with MIT License | 5 votes |
def map_ids_from_line(self, line: CrisprLine, mode, wges_dict=None): crispr: Crispr = self.create_crispr_from_crispr_line(line) try: crispr.extract_and_save_wge(mode, wges_dict) except NoWGEException as ex: logger.error(ex.msg()) pass except (DuplicateKeyError, NotUniqueError) as ex: logger.warning("Skipping duplicate wge_id") pass except Exception as e: traceback.print_exc() logger.error(f"A problem happened for {self.filename}, oligo_id {crispr.crispr_line.get_oligo_id}") pass
Example #27
Source File: mongo_db_repository.py From taranis with BSD 3-Clause "New" or "Revised" License | 5 votes |
def create_one_database(self, database): try: res = self.databases_collection.insert_one(database) return res.inserted_id except DuplicateKeyError: raise TaranisAlreadyExistsError("Database {} already exists".format(database['name']))
Example #28
Source File: discuss.py From allura with Apache License 2.0 | 5 votes |
def new(cls, **props): '''Creates a new Thread instance, ensuring a unique _id.''' for i in range(5): try: thread = cls(**props) session(thread).flush(thread) return thread except DuplicateKeyError as err: log.warning( 'Got DuplicateKeyError: attempt #%s, trying again. %s', i, err) if i == 4: raise session(thread).expunge(thread) continue
Example #29
Source File: test_decorators_unit.py From arctic with GNU Lesser General Public License v2.1 | 5 votes |
def test_duplicate_key_failure_no_retry(): error = DuplicateKeyError('duplicate key') with patch('arctic.decorators._log_exception', autospec=True) as le: @mongo_retry def foo(): raise error with pytest.raises(OperationFailure) as e: foo() assert 'duplicate key' in str(e.value) assert le.call_count == 1
Example #30
Source File: transcript.py From scout with BSD 3-Clause "New" or "Revised" License | 5 votes |
def load_exon_bulk(self, exon_objs): """Load a bulk of exon objects to the database Arguments: exon_objs(iterable(scout.models.hgnc_exon)) """ try: LOG.debug("Loading exon bulk") result = self.exon_collection.insert_many(exon_objs) except (DuplicateKeyError, BulkWriteError) as err: raise IntegrityError(err) return result