Python db.DBLockDeadlockError() Examples
The following are 6
code examples of db.DBLockDeadlockError().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
db
, or try the search function
.
Example #1
Source File: dbutils.py From ironpython2 with Apache License 2.0 | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------
Example #2
Source File: dbutils.py From BinderFilter with MIT License | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------
Example #3
Source File: dbutils.py From Computable with MIT License | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------
Example #4
Source File: dbutils.py From oss-ftp with MIT License | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------
Example #5
Source File: dbutils.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------
Example #6
Source File: dbutils.py From PokemonGo-DesktopMap with MIT License | 5 votes |
def DeadlockWrap(function, *_args, **_kwargs): """DeadlockWrap(function, *_args, **_kwargs) - automatically retries function in case of a database deadlock. This is a function intended to be used to wrap database calls such that they perform retrys with exponentially backing off sleeps in between when a DBLockDeadlockError exception is raised. A 'max_retries' parameter may optionally be passed to prevent it from retrying forever (in which case the exception will be reraised). d = DB(...) d.open(...) DeadlockWrap(d.put, "foo", data="bar") # set key "foo" to "bar" """ sleeptime = _deadlock_MinSleepTime max_retries = _kwargs.get('max_retries', -1) if 'max_retries' in _kwargs: del _kwargs['max_retries'] while True: try: return function(*_args, **_kwargs) except db.DBLockDeadlockError: if _deadlock_VerboseFile: _deadlock_VerboseFile.write( 'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime) _sleep(sleeptime) # exponential backoff in the sleep time sleeptime *= 2 if sleeptime > _deadlock_MaxSleepTime: sleeptime = _deadlock_MaxSleepTime max_retries -= 1 if max_retries == -1: raise #------------------------------------------------------------------------