Python gevent.get_hub() Examples
The following are 16
code examples of gevent.get_hub().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
gevent
, or try the search function
.
Example #1
Source File: test_gipc.py From gipc with MIT License | 5 votes |
def test_threadpool_resolver_mp(self): h = gevent.get_hub() t = h.threadpool r = h.resolver p = start_process(target=complchild_test_threadpool_resolver_mp) # Note(JP): seen this fail once on Windows CI with a timeout of 1 s. p.join(timeout=2) assert p.exitcode == 0
Example #2
Source File: test_gipc.py From gipc with MIT License | 5 votes |
def complchild_test_threadpool_resolver_mp(): h = gevent.get_hub() t = h.threadpool r = h.resolver
Example #3
Source File: gipc.py From gipc with MIT License | 5 votes |
def start(self): # Start grabbing SIGCHLD within libev event loop. gevent.get_hub().loop.install_sigchld() # Run new process (based on `fork()` on POSIX-compliant systems). super(_GProcess, self).start() # The occurrence of SIGCHLD is recorded asynchronously in libev. # This guarantees proper behavior even if the child watcher is # started after the child exits. Start child watcher now. self._sigchld_watcher = gevent.get_hub().loop.child(self.pid) self._returnevent = gevent.event.Event() self._sigchld_watcher.start( self._on_sigchld, self._sigchld_watcher) log.debug("SIGCHLD watcher for %s started.", self.pid)
Example #4
Source File: gipc.py From gipc with MIT License | 5 votes |
def get(self, timeout=None): """Receive, decode and return data from the pipe. Block gevent-cooperatively until data is available or timeout expires. The default decoder is ``pickle.loads``. :arg timeout: ``None`` (default) or a ``gevent.Timeout`` instance. The timeout must be started to take effect and is canceled when the first byte of a new message arrives (i.e. providing a timeout does not guarantee that the method completes within the timeout interval). :returns: a Python object. Raises: - :exc:`gevent.Timeout` (if provided) - :exc:`GIPCError` - :exc:`GIPCClosed` - :exc:`pickle.UnpicklingError` Recommended usage for silent timeout control:: with gevent.Timeout(TIME_SECONDS, False) as t: reader.get(timeout=t) .. warning:: The timeout control is currently not available on Windows, because Windows can't apply select() to pipe handles. An ``OSError`` is expected to be raised in case you set a timeout. """ self._validate() with self._lock: if timeout: # Wait for ready-to-read event. h = gevent.get_hub() h.wait(h.loop.io(self._fd, 1)) timeout.cancel() msize, = struct.unpack("!i", self._recv_in_buffer(4).getvalue()) bindata = self._recv_in_buffer(msize).getvalue() return self._decoder(bindata)
Example #5
Source File: task.py From x-proxies with Apache License 2.0 | 5 votes |
def timer(after, repeat): """ :param after: :param repeat: :return: """ return gevent.get_hub().loop.timer(after, repeat)
Example #6
Source File: greentest.py From gsmtpd with MIT License | 5 votes |
def wrap_error_fatal(method): @wraps(method) def wrapped(self, *args, **kwargs): # XXX should also be able to do gevent.SYSTEM_ERROR = object # which is a global default to all hubs SYSTEM_ERROR = gevent.get_hub().SYSTEM_ERROR gevent.get_hub().SYSTEM_ERROR = object try: return method(self, *args, **kwargs) finally: gevent.get_hub().SYSTEM_ERROR = SYSTEM_ERROR return wrapped
Example #7
Source File: greentest.py From gsmtpd with MIT License | 5 votes |
def wrap_restore_handle_error(method): @wraps(method) def wrapped(self, *args, **kwargs): old = gevent.get_hub().handle_error try: return method(self, *args, **kwargs) finally: gevent.get_hub().handle_error = old if self.peek_error()[0] is not None: gevent.getcurrent().throw(*self.peek_error()[1:]) return wrapped
Example #8
Source File: greentest.py From gsmtpd with MIT License | 5 votes |
def expect_one_error(self): assert self._error == self._none, self._error self._old_handle_error = gevent.get_hub().handle_error gevent.get_hub().handle_error = self._store_error
Example #9
Source File: greentest.py From gsmtpd with MIT License | 5 votes |
def _store_error(self, where, type, value, tb): del tb if self._error != self._none: gevent.get_hub().parent.throw(type, value) else: self._error = (where, type, value)
Example #10
Source File: app.py From pyethapp with BSD 3-Clause "New" or "Revised" License | 5 votes |
def run(ctx, dev): """Start the client ( --dev to stop on error)""" # create app app = EthApp(ctx.obj['config']) # development mode if dev: gevent.get_hub().SYSTEM_ERROR = BaseException try: ctx.obj['config']['client_version'] += '/' + os.getlogin() except: log.warn("can't get and add login name to client_version") pass # dump config konfig.dump_config(ctx.obj['config']) # register services for service in services: assert issubclass(service, BaseService) if service.name not in app.config['deactivated_services']: assert service.name not in app.services service.register_with_app(app) assert hasattr(app.services, service.name) # start app app.start() # wait for interrupt evt = Event() gevent.signal(signal.SIGQUIT, evt.set) gevent.signal(signal.SIGTERM, evt.set) gevent.signal(signal.SIGINT, evt.set) evt.wait() # finally stop app.stop()
Example #11
Source File: pow_service.py From pyethapp with BSD 3-Clause "New" or "Revised" License | 5 votes |
def powworker_process(cpipe, cpu_pct): "entry point in forked sub processes, setup env" gevent.get_hub().SYSTEM_ERROR = BaseException # stop on any exception PoWWorker(cpipe, cpu_pct).run() # parent process defined below ##############################################3
Example #12
Source File: pypyodbc.py From edwin with Apache License 2.0 | 5 votes |
def monkey_patch_for_gevent(): import functools, gevent apply_e = gevent.get_hub().threadpool.apply_e def monkey_patch(func): @functools.wraps(func) def wrap(*args, **kwargs): #if DEBUG:print('%s called with %s %s' % (func, args, kwargs)) return apply_e(Exception, func, args, kwargs) return wrap for attr in dir(ODBC_API): if attr.startswith('SQL') and hasattr(getattr(ODBC_API, attr), 'argtypes'): setattr(ODBC_API, attr, monkey_patch(getattr(ODBC_API, attr)))
Example #13
Source File: gevent.py From easypy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def apply_patch(hogging_detection=False, real_threads=1): _logger.info('applying gevent patch (%s real threads)', real_threads) # real_threads is 1 by default so it will be possible to run watch_threads concurrently if hogging_detection: real_threads += 1 if real_threads: _RealThreadsPool(real_threads) _patch_module_locks() import gevent import gevent.monkey for m in ["easypy.threadtree", "easypy.concurrency"]: assert m not in sys.modules, "Must apply the gevent patch before importing %s" % m gevent.monkey.patch_all(Event=True, sys=True) _unpatch_logging_handlers_lock() global HUB HUB = gevent.get_hub() global threading import threading for thread in threading.enumerate(): _set_thread_uuid(thread.ident) _set_main_uuid() # the patched threading has a new ident for the main thread # this will declutter the thread dumps from gevent/greenlet frames from .threadtree import _BOOTSTRAPPERS import gevent, gevent.threading, gevent.greenlet _BOOTSTRAPPERS.update([gevent, gevent.threading, gevent.greenlet]) if hogging_detection: import greenlet greenlet.settrace(lambda *args: _greenlet_trace_func(*args)) defer_to_thread(detect_hogging, 'detect-hogging')
Example #14
Source File: logging.py From taserver with GNU Affero General Public License v3.0 | 5 votes |
def set_up_logging(filename): gevent.get_hub().exception_stream = io.StringIO() logging.config.dictConfig({ 'version': 1, 'formatters': { 'default': {'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s'} }, 'handlers': { 'console': { 'class': 'logging.StreamHandler', 'formatter': 'default', 'stream': 'ext://sys.stdout' }, 'file': { 'class': 'logging.handlers.RotatingFileHandler', 'formatter': 'default', 'filename': filename, 'maxBytes': 20 * 1024 * 1024, 'backupCount': 5, } }, 'loggers': { '': { 'level': 'INFO', 'handlers': ['console', 'file'] } } })
Example #15
Source File: runner.py From recipes-py with Apache License 2.0 | 4 votes |
def main(recipe_deps, cov_file, is_train, cover_module_imports): # TODO(iannucci): Route and log greenlet exception information somewhere # useful as part of each test case. gevent.get_hub().exception_stream = None main_repo = recipe_deps.main_repo cov_data = coverage.CoverageData() if cover_module_imports: cov_data.update(_cover_all_imports(main_repo)) test_data_cache = {} path_cleaner = _make_path_cleaner(recipe_deps) fatal = False while True: test_desc = _read_test_desc() if not test_desc: break # EOF or error result = Outcome() try: full_name = '%s.%s' % (test_desc.recipe_name, test_desc.test_name) test_result = result.test_results[full_name] recipe = main_repo.recipes[test_desc.recipe_name] if cov_file: # We have to start coverage now because we want to cover the importation # of the covered recipe and/or covered recipe modules. cov = coverage.Coverage(config_file=False, concurrency='gevent', include=recipe.coverage_patterns) cov.start() # to cover execfile of recipe/module.__init__ test_data = _get_test_data(test_data_cache, recipe, test_desc.test_name) try: _run_test(path_cleaner, test_result, recipe_deps, test_desc, test_data, is_train) except Exception as ex: # pylint: disable=broad-except test_result.internal_error.append('Uncaught exception: %r' % (ex,)) test_result.internal_error.extend(traceback.format_exc().splitlines()) if cov_file: cov.stop() cov_data.update(cov.get_data()) except Exception as ex: # pylint: disable=broad-except result.internal_error.append('Uncaught exception: %r' % (ex,)) result.internal_error.extend(traceback.format_exc().splitlines()) fatal = True if not write_message(sys.stdout, result) or fatal: break # EOF if cov_file: # Sometimes we stop when the cov_file hasn't gotten created yet if os.path.exists(os.path.dirname(cov_file)): coverage.data.CoverageDataFiles(basename=cov_file).write(cov_data)
Example #16
Source File: subproc.py From recipes-py with Apache License 2.0 | 4 votes |
def _safe_close(debug_log, handle_name, handle): """Safely attempt to close the given handle. Args: * debug_log (Stream) - Stream to write debug information to about closing this handle. * handle_name (str) - The name of the handle (like 'stdout', 'stderr') * handle (file-like-object) - The file object to call .close() on. NOTE: On Windows this may end up leaking threads for processes which spawn 'daemon' children that hang onto the handles we pass. In this case debug_log is updated with as much detail as we know and the gevent threadpool's maxsize is increased by 2 (one thread blocked on reading from the handle, and one thread blocked on trying to close the handle). """ try: debug_log.write_line('closing handle %r' % handle_name) with gevent.Timeout(.1): handle.close() debug_log.write_line(' closed!') except gevent.Timeout: # This should never happen... except on Windows when the process we launched # itself leaked. debug_log.write_line(' LEAKED: timeout closing handle') # We assume we've now leaked 2 threads; one is blocked on 'read' and the # other is blocked on 'close'. Add two more threads to the pool so we do not # globally block the recipe engine on subsequent steps. gevent.get_hub().threadpool.maxsize += 2 except IOError as ex: # TODO(iannucci): Currently this leaks handles on Windows for processes like # the goma compiler proxy; because of python2.7's inability to set # close_fds=True and also redirect std handles, daemonized subprocesses # actually inherit our handles (yuck). # # This is fixable on python3, but not likely to be fixable on python 2. debug_log.write_line(' LEAKED: unable to close: %r' % (ex,)) # We assume we've now leaked 2 threads; one is blocked on 'read' and the # other is blocked on 'close'. Add two more threads to the pool so we do not # globally block the recipe engine on subsequent steps. gevent.get_hub().threadpool.maxsize += 2 except RuntimeError: # NOTE(gevent): This can happen as a race between the worker greenlet and # the process ending. See gevent.subprocess.Popen.communicate, which does # the same thing. debug_log.write_line(' LEAKED?: race with IO worker')