Python logging.handlers.QueueListener() Examples

The following are 8 code examples of logging.handlers.QueueListener(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module logging.handlers , or try the search function .
Example #1
Source File: pool.py    From PyPlanet with GNU General Public License v3.0 7 votes vote down vote up
def __init__(self, pool_names, max_restarts=0, options=None):
		self.names = pool_names
		self.queue = multiprocessing.Queue()
		self.pool = dict()
		self.max_restarts = max_restarts
		self.options = options or dict()

		self.dog_path = os.curdir
		self.dog_handler = LiveReload(self)
		# self.dog_observer = Observer()
		# self.dog_observer.schedule(self.dog_handler, self.dog_path, recursive=True)

		if multiprocessing.get_start_method() != 'fork':  # pragma: no cover
			root_logger = logging.getLogger()
			self.log_listener = QueueListener(self.queue, *root_logger.handlers)

		# TODO: Find out how to get the watchdog + livereload working on a later moment.
		# self.dog_observer.start()

		self._restarts = dict() 
Example #2
Source File: logging.py    From quart with MIT License 5 votes vote down vote up
def _setup_logging_queue(*handlers: Handler) -> QueueHandler:
    """Create a new LocalQueueHandler and start an associated QueueListener.
    """
    queue: Queue = Queue()
    queue_handler = LocalQueueHandler(queue)

    serving_listener = QueueListener(queue, *handlers, respect_handler_level=True)
    serving_listener.start()

    return queue_handler 
Example #3
Source File: autotune.py    From scVI with MIT License 5 votes vote down vote up
def _cleanup_processes_files():
    """Cleanup function, starts with latest processes/files.

    Terminates processes, sets stop events to stop threads, closes open files.
    """
    logger_all.info("Cleaning up")
    logger_all.debug("Cleaning up: closing files.")
    for f in open_files[::-1]:
        if not f.closed:
            f.close()
    logger_all.debug("Cleaning up: closing queues.")
    for q in started_queues:
        q.close()
    logger_all.debug("Cleaning up: setting cleanup_event and joining threads.")
    for t in started_threads[::-1]:
        if t.is_alive():
            logger_all.debug("Closing Thread {}.".format(t.name))
            t.stop_event.set()
            t.join()
        else:
            logger_all.debug("Thread {} already done.".format(t.name))
    logger_all.debug("Cleaning up: terminating processes.")
    for p in started_processes[::-1]:
        if isinstance(p, Popen):
            if p.poll() is not None:
                logger_all.debug("Terminating mongod process.")
                p.terminate()
                p.wait()
            else:
                logger_all.debug("mongodd process already done.")
        if isinstance(p, multiprocessing.Process):
            if p.is_alive():
                logger_all.debug("Terminating Process {}.".format(p.name))
                p.terminate()
            else:
                logger_all.debug("Process {} already done.".format(p.name))
        if isinstance(p, QueueListener):
            if p._thread is not None and not p.queue._closed:
                p.stop() 
Example #4
Source File: queue.py    From concurrent-log-handler with Apache License 2.0 5 votes vote down vote up
def setup_logging_queues():
    if sys.version_info.major < 3:
        raise RuntimeError("This feature requires Python 3.")

    queue_listeners = []

    # Q: What about loggers created after this is called?
    # A: if they don't attach their own handlers they should be fine
    for logger in get_all_logger_names(include_root=True):
        logger = logging.getLogger(logger)
        if logger.handlers:
            log_queue = queue.Queue(-1)  # No limit on size

            queue_handler = QueueHandler(log_queue)
            queue_listener = QueueListener(
                log_queue, respect_handler_level=True)

            queuify_logger(logger, queue_handler, queue_listener)
            # print("Replaced logger %s with queue listener: %s" % (
            #     logger, queue_listener
            # ))
            queue_listeners.append(queue_listener)

    for listener in queue_listeners:
        listener.start()

    atexit.register(stop_queue_listeners, *queue_listeners)
    return 
Example #5
Source File: queue.py    From concurrent-log-handler with Apache License 2.0 5 votes vote down vote up
def queuify_logger(logger, queue_handler, queue_listener):
    """Replace logger's handlers with a queue handler while adding existing
    handlers to a queue listener.

    This is useful when you want to use a default logging config but then
    optionally add a logger's handlers to a queue during runtime.

    Args:
        logger (mixed): Logger instance or string name of logger to queue-ify
            handlers.
        queue_handler (QueueHandler): Instance of a ``QueueHandler``.
        queue_listener (QueueListener): Instance of a ``QueueListener``.

    """
    if isinstance(logger, str):
        logger = logging.getLogger(logger)

    # Get handlers that aren't being listened for.
    handlers = [handler for handler in logger.handlers
                if handler not in queue_listener.handlers]

    if handlers:
        # The default QueueListener stores handlers as a tuple.
        queue_listener.handlers = \
            tuple(list(queue_listener.handlers) + handlers)

    # Remove logger's handlers and replace with single queue handler.
    del logger.handlers[:]
    logger.addHandler(queue_handler) 
Example #6
Source File: log.py    From lkpy with MIT License 5 votes vote down vote up
def log_queue():
    """
    Get the log queue for child process logging.
    """
    global _log_queue, _log_listener
    from lenskit.util.parallel import LKContext
    ctx = LKContext.INSTANCE
    if _log_queue is None:
        _log_queue = ctx.Queue()
        _log_listener = QueueListener(_log_queue, InjectHandler())
        _log_listener.start()
    return _log_queue 
Example #7
Source File: logging.py    From flambe with MIT License 5 votes vote down vote up
def __init__(self,
                 log_dir: str,
                 verbose: bool = False,
                 root_log_level: Optional[int] = None,
                 capture_warnings: bool = True,
                 console_prefix: Optional[str] = None,
                 hyper_params: Optional[Dict] = None) -> None:
        self.log_dir = log_dir
        self.verbose = verbose
        self.log_level = logging.NOTSET
        self.capture_warnings = capture_warnings
        self.listener: handlers.QueueListener
        self.console_prefix = console_prefix
        self.handlers: List[logging.Handler] = []
        self.queue_handler: handlers.QueueHandler
        self.old_root_log_level: int = logging.NOTSET
        self.hyper_params: Dict = hyper_params or {} 
Example #8
Source File: compare.py    From mikado with GNU Lesser General Public License v3.0 4 votes vote down vote up
def setup_logger(args):

    """
    Function to setup the logger for the compare function.
    :param args:
    :param manager:
    :return:
    """

    args.log_queue = mp.Queue(-1)
    args.queue_handler = log_handlers.QueueHandler(args.log_queue)

    if args.log is not None:
        _log_folder = os.path.dirname(args.log)
        if _log_folder and not os.path.exists(_log_folder):
            os.makedirs(_log_folder)
        handler = logging.FileHandler(args.log, mode="w")
        logger = logging.getLogger("main_compare")
        handler.setFormatter(formatter)
        logger.addHandler(handler)
        logger.propagate = False
    else:
        logger = create_default_logger("main_compare")
        handler = logger.handlers[0]

    if args.verbose is False:
        logger.setLevel(logging.INFO)
    else:
        logger.setLevel(logging.DEBUG)

    logger.propagate = False
    log_queue_listener = log_handlers.QueueListener(args.log_queue, logger)
    log_queue_listener.propagate = False
    log_queue_listener.start()

    queue_logger = logging.getLogger("main_queue")
    for handler in queue_logger.handlers:
        queue_logger.removeHandler(handler)
    if args.verbose is False:
        queue_logger.setLevel(logging.INFO)
    else:
        queue_logger.setLevel(logging.DEBUG)
    main_queue_handler = log_handlers.QueueHandler(args.log_queue)
    queue_logger.propagate = False
    queue_logger.addHandler(main_queue_handler)

    return args, handler, logger, log_queue_listener, queue_logger