Python lmdb.Error() Examples

The following are 13 code examples of lmdb.Error(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module lmdb , or try the search function .
Example #1
Source File: formats.py    From tsinfer with GNU General Public License v3.0 6 votes vote down vote up
def _open_lmbd_readonly(self):
        # We set the mapsize here because LMBD will map 1TB of virtual memory if
        # we don't, making it hard to figure out how much memory we're actually
        # using.
        map_size = None
        try:
            map_size = os.path.getsize(self.path)
        except OSError as e:
            raise exceptions.FileFormatError(str(e)) from e
        try:
            store = zarr.LMDBStore(
                self.path, map_size=map_size, readonly=True, subdir=False, lock=False
            )
        except lmdb.InvalidError as e:
            raise exceptions.FileFormatError(
                "Unknown file format:{}".format(str(e))
            ) from e
        except lmdb.Error as e:
            raise exceptions.FileFormatError(str(e)) from e
        return store 
Example #2
Source File: checkout.py    From hangar-py with Apache License 2.0 6 votes vote down vote up
def close(self) -> None:
        """Close all handles to the writer checkout and release the writer lock.

        Failure to call this method after the writer checkout has been used
        will result in a lock being placed on the repository which will not
        allow any writes until it has been manually cleared.
        """
        with suppress(lmdb.Error):
            self._verify_alive()

        if isinstance(self._stack, ExitStack):
            self._stack.close()

        if hasattr(self, '_columns'):
            if hasattr(self._columns, '_destruct'):
                self._columns._destruct()

        with suppress(lmdb.Error):
            heads.release_writer_lock(self._branchenv, self._writer_lock)

        for attr in list(self.__dict__.keys()):
            delattr(self, attr)
        atexit.unregister(self.close)
        return 
Example #3
Source File: repository.py    From bugbug with Mozilla Public License 2.0 6 votes vote down vote up
def __init__(self, save):
        self.save = save

        try:
            self.db_experiences = shelve.Shelf(
                LMDBDict("data/commit_experiences.lmdb", readonly=not save),
                protocol=pickle.DEFAULT_PROTOCOL,
                writeback=save,
            )
        except lmdb.Error as e:
            if not save and "No such file or directory" in str(e):
                self.db_experiences = {}
            else:
                raise

        if not save:
            self.mem_experiences = {} 
Example #4
Source File: indexed_database.py    From sawtooth-core with Apache License 2.0 5 votes vote down vote up
def _wrap_iterator(iterator, cursor_chain, deserializer):
        class _WrapperIter:
            def __iter__(self):
                return self

            def __next__(self):
                try:
                    return _read(
                        next(iterator),
                        cursor_chain,
                        deserializer)
                except lmdb.Error:
                    raise StopIteration()

        return _WrapperIter() 
Example #5
Source File: Embeddings.py    From delft with Apache License 2.0 5 votes vote down vote up
def get_word_vector(self, word):
        """
            Get static embeddings (e.g. glove) for a given token
        """
        if (self.name == 'wiki.fr') or (self.name == 'wiki.fr.bin'):
            # the pre-trained embeddings are not cased
            word = word.lower()
        if self.env is None or self.extension == 'bin':
            # db not available or embeddings in bin format, the embeddings should be available in memory (normally!)
            return self.get_word_vector_in_memory(word)
        try:    
            with self.env.begin() as txn:
                vector = txn.get(word.encode(encoding='UTF-8'))
                if vector:
                    word_vector = _deserialize_pickle(vector)
                    vector = None
                else:
                    word_vector = np.zeros((self.static_embed_size,), dtype=np.float32)
                    # alternatively, initialize with random negative values
                    #word_vector = np.random.uniform(low=-0.5, high=0.0, size=(self.embed_size,))
                    # alternatively use fasttext OOV ngram possibilities (if ngram available)
        except lmdb.Error:
            # no idea why, but we need to close and reopen the environment to avoid
            # mdb_txn_begin: MDB_BAD_RSLOT: Invalid reuse of reader locktable slot
            # when opening new transaction !
            self.env.close()
            envFilePath = os.path.join(self.embedding_lmdb_path, self.name)
            self.env = lmdb.open(envFilePath, readonly=True, max_readers=2048, max_spare_txns=2, lock=False)
            return self.get_word_vector(word)
        return word_vector 
Example #6
Source File: Embeddings.py    From delft with Apache License 2.0 5 votes vote down vote up
def get_ELMo_lmdb_vector(self, token_list, max_size_sentence):
        """
            Try to get the ELMo embeddings for a sequence cached in LMDB
        """
        if self.env_ELMo is None:
            # db cache not available, we don't cache ELMo stuff
            return None
        try:    
            ELMo_vector = np.zeros((len(token_list), max_size_sentence-2, ELMo_embed_size), dtype='float32')
            with self.env_ELMo.begin() as txn:
                for i in range(0, len(token_list)):
                    txn = self.env_ELMo.begin()
                    # get a hash for the token_list
                    the_hash = list_digest(token_list[i])
                    vector = txn.get(the_hash.encode(encoding='UTF-8'))
                    if vector:
                        # adapt expected shape/padding
                        local_embeddings = _deserialize_pickle(vector)
                        if local_embeddings.shape[0] > max_size_sentence-2:
                            # squeeze the extra padding space
                            ELMo_vector[i] = local_embeddings[:max_size_sentence-2,]
                        elif local_embeddings.shape[0] == max_size_sentence-2:
                            # bingo~!
                            ELMo_vector[i] = local_embeddings
                        else:
                            # fill the missing space with padding
                            filler = np.zeros((max_size_sentence-(local_embeddings.shape[0]+2), ELMo_embed_size), dtype='float32')
                            ELMo_vector[i] = np.concatenate((local_embeddings, filler))
                        vector = None
                    else:
                        return None
        except lmdb.Error:
            # no idea why, but we need to close and reopen the environment to avoid
            # mdb_txn_begin: MDB_BAD_RSLOT: Invalid reuse of reader locktable slot
            # when opening new transaction !
            self.env_ELMo.close()
            self.env_ELMo = lmdb.open(self.embedding_ELMo_cache, readonly=True, max_readers=2048, max_spare_txns=2, lock=False)
            return self.get_ELMo_lmdb_vector(token_list)
        return ELMo_vector 
Example #7
Source File: test_dataSource.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_does_not_exist(self):

        path_lmdb = os.path.join(self.dir_tmp, 'test_num_entries_does_not_exist_lmdb')
        assert_false(os.path.exists(path_lmdb))
        assert_raises(lmdb.Error, ds.DataSourceLMDB, path_lmdb) 
Example #8
Source File: dataSource.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def exists(self):
        if not os.path.isdir(self.p):
            raise lmdb.Error("LMDB not found (%s)") 
Example #9
Source File: test_read_lmdb.py    From nideep with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_num_entries_does_not_exist(self):

        path_lmdb = os.path.join(self.dir_tmp, 'test_num_entries_does_not_exist_lmdb')
        assert_false(os.path.exists(path_lmdb))
        assert_raises(lmdb.Error, r.num_entries, path_lmdb) 
Example #10
Source File: engine.py    From eavatar-me with Apache License 2.0 5 votes vote down vote up
def start(self, ctx=None):
        logger.debug("Starting data engine...")

        # register with the context
        if ctx:
            ctx.bind('dataengine', self)
        if not self.datapath:
            self.datapath = os.path.join(environ.data_dir(), 'stores')
            if not os.path.exists(self.datapath):
                os.makedirs(self.datapath)
        logger.debug("Data path: %s", self.datapath)

        try:
            self.database = lmdb.Environment(self.datapath,
                                             map_size=2000000000,
                                             max_dbs=1024)
            with self.database.begin(write=False) as txn:
                cur = txn.cursor()
                for k, v in iter(cur):
                    logger.debug("Found existing store: %s", k)
                    _db = self.database.open_db(k, create=False)
                    self.stores[k] = Store(k, _db, self)
        except lmdb.Error:
            logger.exception("Failed to open database.", exc_info=True)
            raise

        logger.debug("Data engine started.") 
Example #11
Source File: engine.py    From eavatar-me with Apache License 2.0 5 votes vote down vote up
def create_store(self, name):
        if isinstance(name, unicode):
            name = name.encode('utf-8')

        try:
            _db = self.database.open_db(name, dupsort=False, create=True)
            store = Store(name, _db, self)
            self.stores[name] = store
            return store
        except lmdb.Error as ex:
            logger.exception(ex)
            raise DataError(ex.message) 
Example #12
Source File: engine.py    From eavatar-me with Apache License 2.0 5 votes vote down vote up
def remove_store(self, name):
        try:
            store = self.stores.get(name)
            if store is not None:
                with self.database.begin(write=True) as txn:
                    txn.drop(store._db)
                del self.stores[name]
        except lmdb.Error as ex:
            logger.exception("Failed to remove store.", ex)
            raise DataError(ex.message) 
Example #13
Source File: Embeddings.py    From delft with Apache License 2.0 4 votes vote down vote up
def get_BERT_lmdb_vector(self, sentence):
        """
            Try to get the BERT extracted embeddings for a sequence cached in LMDB
        """
        if self.env_BERT is None:
            # db cache not available, we don't cache ELMo stuff
            return None
        try:    
            BERT_vector = np.zeros((BERT_sentence_size, BERT_embed_size), dtype='float32')
            with self.env_BERT.begin() as txn:
                txn = self.env_BERT.begin()
                # get a hash for the token_list
                the_hash = list_digest(sentence)
                vector = txn.get(the_hash.encode(encoding='UTF-8'))
                
                if vector:
                    # adapt expected shape/padding
                    BERT_vector = _deserialize_pickle(vector)
                    '''
                    if local_embeddings.shape[0] > max_size_sentence:
                        # squeeze the extra padding space
                        BERT_vector = local_embeddings[:max_size_sentence,]
                    elif local_embeddings.shape[0] == max_size_sentence:
                        # bingo~!
                        BERT_vector = local_embeddings
                    else:
                        # fill the missing space with padding
                        filler = np.zeros((max_size_sentence-(local_embeddings.shape[0]), BERT_embed_size), dtype='float32')
                        BERT_vector = np.concatenate((local_embeddings, filler))
                    '''
                    vector = None
                else:
                    return None
                
        except lmdb.Error:
            # no idea why, but we need to close and reopen the environment to avoid
            # mdb_txn_begin: MDB_BAD_RSLOT: Invalid reuse of reader locktable slot
            # when opening new transaction !
            self.env_BERT.close()
            self.env_BERT = lmdb.open(self.embedding_BERT_cache, readonly=True, max_readers=2048, max_spare_txns=2, lock=False)
            return self.get_BERT_lmdb_vector(sentence)
        return BERT_vector