Python pymongo.ReplaceOne() Examples

The following are 6 code examples of pymongo.ReplaceOne(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pymongo , or try the search function .
Example #1
Source File: db_repository.py    From pepy with MIT License 6 votes vote down vote up
def save_projects(self, projects: List[Project]):
        requests = []
        for project in projects:
            requests.append(ReplaceOne({"name": project.name.name}, self._convert_to_raw(project), upsert=True))
        self._client.projects.bulk_write(requests) 
Example #2
Source File: s3_to_mongo_operator.py    From mongo_plugin with Apache License 2.0 5 votes vote down vote up
def replace_records(self, mongo, docs):
        operations = []
        for doc in docs:
            mongo_replacement_filter = dict()
            if isinstance(self.mongo_replacement_filter, str):
                mongo_replacement_filter = {self.mongo_replacement_filter: doc.get(self.mongo_replacement_filter, False)}
            elif isinstance(self.mongo_replacement_filter, dict):
                for k, v in self.mongo_replacement_filter.items():
                    if k == v:
                        mongo_replacement_filter[k] = doc.get(k, False)
                    else:
                        mongo_replacement_filter[k] = self.mongo_replacement_filter.get(k, False)

            operations.append(ReplaceOne(mongo_replacement_filter,
                                         doc,
                                         upsert=True))

            # Send once every 1000 in batch
            if (len(operations) == 1000):
                logging.info('Making Request....')
                mongo.bulk_write(self.mongo_collection,
                                 operations,
                                 mongo_db=self.mongo_db,
                                 ordered=False)
                operations = []
                logging.info('Request successfully finished....')

        if (len(operations) > 0):
            logging.info('Making Final Request....')
            mongo.bulk_write(self.mongo_collection,
                             operations,
                             mongo_db=self.mongo_db,
                             ordered=False)
            logging.info('Final Request Finished.') 
Example #3
Source File: utils.py    From FPLbot with MIT License 5 votes vote down vote up
def update_players():
    """Updates all players in the database."""
    logger.info("Updating FPL players in database.")
    async with aiohttp.ClientSession() as session:
        fpl = FPL(session)
        players = await fpl.get_players(include_summary=True, return_json=True)
        for player in players:
            player["team"] = team_converter(player["team"])

    requests = [ReplaceOne({"id": player["id"]}, player, upsert=True)
                for player in players]
    database.players.bulk_write(requests)
    create_text_indexes()

    logger.info("Adding Understat data to players in database.")
    understat_players = await get_understat_players()

    for player in understat_players:
        # Only update FPL player with desired attributes
        understat_attributes = {
            attribute: value for attribute, value in player.items()
            if attribute in desired_attributes
        }

        # Use player's full name and team to try and find the correct player
        search_string = f"{player['player_name']} {player['team_title']}"
        players = database.players.find(
            {"$text": {"$search": search_string}},
            {"score": {"$meta": "textScore"}}
        ).sort([("score", {"$meta": "textScore"})])
        try:
            relevant_player = list(players)[0]
        except IndexError:
            continue

        database.players.update_one(
            {"id": relevant_player["id"]},
            {"$set": understat_attributes}
        ) 
Example #4
Source File: utils.py    From FPLbot with MIT License 5 votes vote down vote up
def update_results():
    async with aiohttp.ClientSession() as session:
        understat = Understat(session)
        results = await understat.get_league_results("EPL", 2019)
        for result in results:
            result["h"]["title"] = understat_team_converter(result["h"]["title"])
            result["a"]["title"] = understat_team_converter(result["a"]["title"])

    requests = [ReplaceOne({"id": result["id"]}, result, upsert=True)
                for result in results]
    database.results.bulk_write(requests) 
Example #5
Source File: indexing.py    From signac with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def export_pymongo(docs, index, mirrors=None, update=False, num_tries=3, timeout=60, chunksize=100):
    """Optimized :py:func:`~.export` function for pymongo index collections.

    The behavior of this function is rougly equivalent to:

    .. code-block:: python

        for doc in docs:
            export_one(doc, index, mirrors, num_tries)

    .. note::

        All index documents must be JSON-serializable to
        be able to be exported to a MongoDB collection.

    :param docs: The index documents to export.
    :param index: The database collection to export the index to.
    :type index: :class:`pymongo.collection.Collection`
    :param num_tries: The number of automatic retry attempts in case of
        mirror connection errors.
    :type num_tries: int
    :param timeout: The time in seconds to wait before an
        automatic retry attempt.
    :type timeout: int
    :param chunksize: The buffer size for export operations.
    :type chunksize: int"""
    import pymongo
    logger.info("Exporting to pymongo database collection index '{}'.".format(index))
    chunk = []
    operations = []
    ids = defaultdict(list)
    for doc in docs:
        f = {'_id': doc['_id']}
        if update:
            root = doc.get('root')
            if root is not None:
                ids[root].append(doc['_id'])
        chunk.append(doc)
        operations.append(pymongo.ReplaceOne(f, doc, upsert=True))
        if len(chunk) >= chunksize:
            logger.debug("Pushing chunk.")
            _export_pymongo(chunk, operations, index, mirrors, num_tries, timeout)
            chunk[:] = []
            operations[:] = []
    if len(operations):
        logger.debug("Pushing final chunk.")
        _export_pymongo(chunk, operations, index, mirrors, num_tries, timeout)
    if update:
        if ids:
            stale = set()
            for root in ids:
                docs_ = index.find({'root': root})
                all_ = {doc['_id'] for doc in docs_}
                stale.update(all_.difference(ids[root]))
            logger.info("Removing {} stale documents.".format(len(stale)))
            for _id in set(stale):
                index.delete_one(dict(_id=_id))
        else:
            raise errors.ExportError(
                "The exported docs sequence is empty! Unable to update!") 
Example #6
Source File: mongo.py    From airflow with Apache License 2.0 4 votes vote down vote up
def replace_many(self, mongo_collection, docs,
                     filter_docs=None, mongo_db=None, upsert=False, collation=None,
                     **kwargs):
        """
        Replaces many documents in a mongo collection.

        Uses bulk_write with multiple ReplaceOne operations
        https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.bulk_write

        .. note::
            If no ``filter_docs``are given, it is assumed that all
            replacement documents contain the ``_id`` field which are then
            used as filters.

        :param mongo_collection: The name of the collection to update.
        :type mongo_collection: str
        :param docs: The new documents.
        :type docs: list[dict]
        :param filter_docs: A list of queries that match the documents to replace.
            Can be omitted; then the _id fields from docs will be used.
        :type filter_docs: list[dict]
        :param mongo_db: The name of the database to use.
            Can be omitted; then the database from the connection string is used.
        :type mongo_db: str
        :param upsert: If ``True``, perform an insert if no documents
            match the filters for the replace operation.
        :type upsert: bool
        :param collation: An instance of
            :class:`~pymongo.collation.Collation`. This option is only
            supported on MongoDB 3.4 and above.
        :type collation: pymongo.collation.Collation

        """
        collection = self.get_collection(mongo_collection, mongo_db=mongo_db)

        if not filter_docs:
            filter_docs = [{'_id': doc['_id']} for doc in docs]

        requests = [
            ReplaceOne(
                filter_docs[i],
                docs[i],
                upsert=upsert,
                collation=collation)
            for i in range(len(docs))
        ]

        return collection.bulk_write(requests, **kwargs)