Python structlog.configure() Examples

The following are 21 code examples of structlog.configure(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module structlog , or try the search function .
Example #1
Source File: app.py    From openconnect-sso with GNU General Public License v3.0 6 votes vote down vote up
def configure_logger(logger, level):
    structlog.configure(
        processors=[
            structlog.stdlib.add_log_level,
            structlog.stdlib.add_logger_name,
            structlog.processors.format_exc_info,
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        logger_factory=structlog.stdlib.LoggerFactory(),
    )

    formatter = structlog.stdlib.ProcessorFormatter(
        processor=structlog.dev.ConsoleRenderer()
    )

    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(level) 
Example #2
Source File: util.py    From pygreynoise with MIT License 6 votes vote down vote up
def configure_logging():
    """Configure logging."""
    logging.basicConfig(stream=sys.stderr, format="%(message)s", level=logging.CRITICAL)
    logging.getLogger("greynoise").setLevel(logging.WARNING)
    structlog.configure(
        processors=[
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M.%S"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.dev.ConsoleRenderer(),
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    ) 
Example #3
Source File: app.py    From GovLens with MIT License 6 votes vote down vote up
def __config_logger__(self):
        logging.basicConfig(
            level=logging.INFO, format=self.log_format, stream=sys.stdout,
        )

        structlog.configure(
            processors=[
                structlog.stdlib.filter_by_level,
                structlog.stdlib.PositionalArgumentsFormatter(),
                structlog.processors.StackInfoRenderer(),
                structlog.processors.format_exc_info,
                structlog.processors.JSONRenderer(),
            ],
            logger_factory=structlog.stdlib.LoggerFactory(),
            wrapper_class=structlog.stdlib.BoundLogger,
        )
        return structlog.get_logger("Startup") 
Example #4
Source File: logging.py    From build-relengapi with Mozilla Public License 2.0 6 votes vote down vote up
def configure_logging(app):
    processors = [
        structlog.stdlib.filter_by_level,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]
    # send to mozdef before formatting into a string
    if app.config.get('MOZDEF_TARGET'):
        processors.append(mozdef_sender(app.config['MOZDEF_TARGET']))
    processors.append(UnstructuredRenderer())

    structlog.configure(
        context_class=structlog.threadlocal.wrap_dict(dict),
        processors=processors,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    ) 
Example #5
Source File: log.py    From fleece with Apache License 2.0 6 votes vote down vote up
def _configure_logger(logger_factory=None, wrapper_class=None):

    if not logger_factory:
        logger_factory = structlog.stdlib.LoggerFactory()
    if not wrapper_class:
        wrapper_class = structlog.stdlib.BoundLogger

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            add_request_ids_from_environment,
            structlog.stdlib.add_log_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.JSONRenderer(sort_keys=True),
        ],
        context_class=WRAPPED_DICT_CLASS,
        logger_factory=logger_factory,
        wrapper_class=wrapper_class,
        cache_logger_on_first_use=True,
    ) 
Example #6
Source File: conftest.py    From parsec-cloud with GNU Affero General Public License v3.0 6 votes vote down vote up
def blockstore(request, backend_store):
    # TODO: allow to test against swift ?
    if backend_store.startswith("postgresql://"):
        config = PostgreSQLBlockStoreConfig()
    else:
        config = MockedBlockStoreConfig()

    # More or less a hack to be able to to configure this fixture from
    # the test function by adding tags to it
    if request.node.get_closest_marker("raid0_blockstore"):
        config = RAID0BlockStoreConfig(blockstores=[config, MockedBlockStoreConfig()])
    if request.node.get_closest_marker("raid1_blockstore"):
        config = RAID1BlockStoreConfig(blockstores=[config, MockedBlockStoreConfig()])
    if request.node.get_closest_marker("raid5_blockstore"):
        config = RAID5BlockStoreConfig(
            blockstores=[config, MockedBlockStoreConfig(), MockedBlockStoreConfig()]
        )

    return config 
Example #7
Source File: config.py    From graphite-api with Apache License 2.0 5 votes vote down vote up
def configure_logging(config):
    structlog.configure(processors=processors,
                        logger_factory=structlog.stdlib.LoggerFactory(),
                        wrapper_class=structlog.stdlib.BoundLogger,
                        cache_logger_on_first_use=True)
    config.setdefault('logging', {})
    config['logging'].setdefault('version', 1)
    config['logging'].setdefault('handlers', {})
    config['logging'].setdefault('formatters', {})
    config['logging'].setdefault('loggers', {})
    config['logging']['handlers'].setdefault('raw', {
        'level': 'DEBUG',
        'class': 'logging.StreamHandler',
        'formatter': 'raw',
    })
    config['logging']['loggers'].setdefault('root', {
        'handlers': ['raw'],
        'level': 'DEBUG',
        'propagate': False,
    })
    config['logging']['loggers'].setdefault('graphite_api', {
        'handlers': ['raw'],
        'level': 'DEBUG',
    })
    config['logging']['formatters']['raw'] = {'()': StructlogFormatter}
    dictConfig(config['logging'])
    if 'path' in config:
        logger.info("loading configuration", path=config['path'])
    else:
        logger.info("loading default configuration") 
Example #8
Source File: structlog_setup.py    From voltha with Apache License 2.0 5 votes vote down vote up
def update_logging(instance_id, vcore_id, cache_on_use=True):
    """
    Add the vcore id to the structured logger
    :param vcore_id:  The assigned vcore id
    :return: structured logger
    """

    def add_instance_id(_, __, event_dict):
        event_dict['instance_id'] = instance_id
        return event_dict

    def add_vcore_id(_, __, event_dict):
        event_dict['vcore_id'] = vcore_id
        return event_dict

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            add_instance_id,
            add_vcore_id,
            structlog.processors.UnicodeEncoder(),
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=cache_on_use,
    )

    # Mark first line of log
    log = structlog.get_logger()
    log.info("updated-logger")
    return log 
Example #9
Source File: structlog_setup.py    From voltha with Apache License 2.0 5 votes vote down vote up
def setup_logging(log_config, instance_id,
                  verbosity_adjust=0, cache_on_use=True):
    """
    Set up logging such that:
    - The primary logging entry method is structlog
      (see http://structlog.readthedocs.io/en/stable/index.html)
    - Optionally cache the logger on first use
    :return: structured logger
    """

    def add_instance_id(_, __, event_dict):
        event_dict['instance_id'] = instance_id
        return event_dict

    # Configure standard logging
    logging.config.dictConfig(log_config)
    logging.root.level -= 10 * verbosity_adjust

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            add_instance_id,
            structlog.processors.UnicodeEncoder(),
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=cache_on_use,
    )

    # Mark first line of log
    log = structlog.get_logger()
    log.info("first-log-line, logging level %d" % logging.root.level)
    return log 
Example #10
Source File: logging.py    From raiden-services with MIT License 5 votes vote down vote up
def setup_logging(log_level: str, log_json: bool) -> None:
    """ Basic structlog setup """

    logging.basicConfig(level=log_level, stream=sys.stdout, format="%(message)s")

    logging.getLogger("web3").setLevel("INFO")
    logging.getLogger("urllib3").setLevel("INFO")

    shared_processors = [
        format_to_hex,
        structlog.stdlib.add_log_level,
        structlog.stdlib.add_logger_name,
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S.%f"),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
    ]

    if log_json:
        processors = shared_processors + [structlog.processors.JSONRenderer()]
    else:
        processors = shared_processors + [structlog.dev.ConsoleRenderer()]

    structlog.configure(
        processors=processors,
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    ) 
Example #11
Source File: conftest.py    From parsec-cloud with GNU Affero General Public License v3.0 5 votes vote down vote up
def pytest_configure(config):
    # Patch pytest-trio
    patch_pytest_trio()
    # Configure structlog to redirect everything in logging
    structlog.configure(
        logger_factory=structlog.stdlib.LoggerFactory(),
        processors=[
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.dev.ConsoleRenderer(),
        ],
    )
    # Lock configuration
    structlog.configure = lambda *args, **kwargs: None
    # Add helper to caplog
    patch_caplog()
    if config.getoption("--run-postgresql-cluster"):
        pgurl = bootstrap_postgresql_testbed()
        capturemanager = config.pluginmanager.getplugin("capturemanager")
        if capturemanager:
            capturemanager.suspend(in_=True)
        print(f"usage: PG_URL={pgurl} py.test --postgresql tests")
        input("Press enter when you're done with...")
        pytest.exit("bye")
    elif config.getoption("--postgresql") and not is_xdist_master(config):
        bootstrap_postgresql_testbed() 
Example #12
Source File: test_logging.py    From tasktiger with MIT License 5 votes vote down vote up
def test_structlog_processor(self):
        try:
            # Use ReturnLogger for testing
            structlog.configure(
                processors=[tasktiger_processor],
                context_class=dict,
                logger_factory=structlog.ReturnLoggerFactory(),
                wrapper_class=structlog.stdlib.BoundLogger,
                cache_logger_on_first_use=True,
            )

            # Run a simple task. Logging output is verified in
            # the task.
            self.tiger.delay(logging_task)
            queues = self._ensure_queues(queued={"default": 1})
            task = queues["queued"]["default"][0]
            assert task["func"] == "tests.test_logging:logging_task"
            Worker(self.tiger).run(once=True)
            self._ensure_queues(queued={"default": 0})
            assert not self.conn.exists("t:task:%s" % task["id"])
        finally:
            structlog.configure(
                processors=[
                    structlog.stdlib.add_log_level,
                    structlog.stdlib.filter_by_level,
                    structlog.processors.TimeStamper(fmt="iso", utc=True),
                    structlog.processors.StackInfoRenderer(),
                    structlog.processors.format_exc_info,
                    structlog.processors.JSONRenderer(),
                ],
                context_class=dict,
                logger_factory=structlog.ReturnLoggerFactory(),
                wrapper_class=structlog.stdlib.BoundLogger,
                cache_logger_on_first_use=True,
            ) 
Example #13
Source File: utils.py    From tasktiger with MIT License 5 votes vote down vote up
def setup_structlog():
    structlog.configure(
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
    )
    logging.basicConfig(format='%(message)s') 
Example #14
Source File: logging.py    From chainerui with MIT License 5 votes vote down vote up
def get_logger():
    global _logger

    with _mutex:
        if _logger is not None:
            return _logger
        structlog.configure(
            processors=[
                structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
            ],
            logger_factory=structlog.stdlib.LoggerFactory(),
        )

        own_logger = _get_library_logger()
        if not logging.getLogger().handlers:
            # own hander is set only when python root logger is not setup
            formatter = structlog.stdlib.ProcessorFormatter(
                processor=structlog.dev.ConsoleRenderer(colors=False),
            )
            handler = logging.StreamHandler()
            handler.setFormatter(formatter)
            own_logger.addHandler(handler)

        own_logger.setLevel(logging.INFO)
        _logger = structlog.get_logger(__name__)
    return _logger 
Example #15
Source File: config.py    From epicbox with MIT License 5 votes vote down vote up
def configure(profiles=None, docker_url=None):
    global IS_CONFIGURED, PROFILES, DOCKER_URL

    IS_CONFIGURED = True
    if isinstance(profiles, dict):
        profiles_map = {name: Profile(name, **profile_kwargs)
                        for name, profile_kwargs in profiles.items()}
    else:
        profiles_map = {profile.name: profile for profile in profiles or []}
    PROFILES.update(profiles_map)
    DOCKER_URL = docker_url


# structlog.is_configured() was added in 18.1 
Example #16
Source File: logging.py    From parsec-cloud with GNU Affero General Public License v3.0 4 votes vote down vote up
def configure_logging(log_level=None, log_format=None, log_file=None, log_filter=None):
    global _log_level

    _log_level = None
    shared_processors = [
        structlog.stdlib.add_logger_name,
        structlog.stdlib.add_log_level,
        structlog.processors.format_exc_info,
        structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S"),
    ]

    if log_filter:
        log_filter = re.compile(log_filter)

        def dropper(logger, method_name, event_dict):
            if not log_filter.match(str(event_dict)):
                raise structlog.DropEvent
            return event_dict

        shared_processors.append(dropper)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            *shared_processors,
            structlog.processors.StackInfoRenderer(),
            # structlog.processors.format_exc_info,
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        logger_factory=structlog.stdlib.LoggerFactory(),
        cache_logger_on_first_use=True,
    )

    if not log_format:
        log_format = "JSON" if log_file else "CONSOLE"
    if log_format == "CONSOLE":
        formatter_renderer = structlog.dev.ConsoleRenderer
    elif log_format == "JSON":
        formatter_renderer = structlog.processors.JSONRenderer
    else:
        raise ValueError(f"Unknown log format `{log_format}`")

    formatter = structlog.stdlib.ProcessorFormatter(
        processor=formatter_renderer(), foreign_pre_chain=shared_processors
    )

    if log_file:
        handler = logging.FileHandler(log_file)
    else:
        handler = logging.StreamHandler()
    handler.setFormatter(formatter)

    root_logger = logging.getLogger()
    root_logger.addHandler(handler)
    if log_level is not None:
        root_logger.setLevel(log_level.upper()) 
Example #17
Source File: logging.py    From FlowKit with Mozilla Public License 2.0 4 votes vote down vote up
def init_logging():
    """
    Initialise root logger 'flowmachine' and sub-logger 'flowmachine.debug',
    and configure structlog so that it passes any messages on to the standard
    library loggers.
    """
    global FLOWKIT_LOGGERS_HAVE_BEEN_INITIALISED
    if FLOWKIT_LOGGERS_HAVE_BEEN_INITIALISED:
        # Only initialise loggers once, to avoid adding multiple
        # handlers and accidentally re-setting the log level.
        return

    root_logger = logging.getLogger("flowmachine")
    root_logger.setLevel(logging.DEBUG)

    debug_logger = logging.getLogger("flowmachine").getChild("debug")
    debug_logger.setLevel(logging.DEBUG)
    ch = logging.StreamHandler()
    ch.setLevel(logging.DEBUG)
    debug_logger.addHandler(ch)

    # Logger for all queries run or accessed (used by flowmachine server)
    query_run_log = logging.getLogger("flowmachine").getChild("query_run_log")
    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.INFO)
    query_run_log.addHandler(ch)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.JSONRenderer(serializer=rapidjson.dumps),
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    FLOWKIT_LOGGERS_HAVE_BEEN_INITIALISED = True 
Example #18
Source File: logs.py    From johnnydep with MIT License 4 votes vote down vote up
def configure_logging(verbosity=0):
    level = "DEBUG" if verbosity > 1 else "INFO" if verbosity == 1 else "WARNING"
    timestamper = structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S")
    # Add the log level and a timestamp to the event_dict if the log entry is not from structlog
    pre_chain = [structlog.stdlib.add_log_level, timestamper]
    logging.config.dictConfig(
        {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {
                "plain": {
                    "()": structlog.stdlib.ProcessorFormatter,
                    "processor": structlog.dev.ConsoleRenderer(colors=False),
                    "foreign_pre_chain": pre_chain,
                },
                "colored": {
                    "()": structlog.stdlib.ProcessorFormatter,
                    "processor": structlog.dev.ConsoleRenderer(colors=True),
                    "foreign_pre_chain": pre_chain,
                },
            },
            "handlers": {
                "default": {
                    "level": level,
                    "class": "logging.StreamHandler",
                    "formatter": "colored",
                },
                # "file": {
                #     "level": "DEBUG",
                #     "class": "logging.handlers.WatchedFileHandler",
                #     "filename": "johnnydep.log",
                #     "formatter": "plain",
                # },
            },
            "loggers": {
                "": {
                    "handlers": ["default"],
                    # "handlers": ["default", "file"],
                    "level": "DEBUG",
                    "propagate": True,
                }
            },
        }
    )
    structlog.configure(
        processors=[
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            timestamper,
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    ) 
Example #19
Source File: logs.py    From crypto-signal with MIT License 4 votes vote down vote up
def configure_logging(loglevel, log_mode):
    """Configure the application logger

    Args:
        loglevel (str): The level of logging for the application.
        log_mode (str): What kind of logging output to apply...
            text: Text logging is intended for users / developers.
            json: Json logging is intended for parsing with a log aggregation system.
    """

    if not loglevel:
        loglevel = logging.INFO

    if log_mode == 'json':
        log_formatter = jsonlogger.JsonFormatter()
    elif log_mode == 'text':
        log_formatter = logging.Formatter('%(message)s')
    elif log_mode == 'standard':
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )
    else:
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(log_formatter)
    root_logger = logging.getLogger()
    root_logger.addHandler(handler)
    root_logger.setLevel(loglevel)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True
    ) 
Example #20
Source File: log.py    From code-coverage with Mozilla Public License 2.0 4 votes vote down vote up
def init_logger(
    project_name,
    channel=None,
    level=logbook.INFO,
    PAPERTRAIL_HOST=None,
    PAPERTRAIL_PORT=None,
    sentry_dsn=None,
):

    if not channel:
        channel = os.environ.get("APP_CHANNEL")

    # Output logs on stderr, with color support on consoles
    fmt = "{record.time} [{record.level_name:<8}] {record.channel}: {record.message}"
    handler = logbook.more.ColorizedStderrHandler(level=level, format_string=fmt)
    handler.push_application()

    # Log to papertrail
    if channel and PAPERTRAIL_HOST and PAPERTRAIL_PORT:
        setup_papertrail(project_name, channel, PAPERTRAIL_HOST, PAPERTRAIL_PORT)

    # Log to senty
    if channel and sentry_dsn:
        setup_sentry(project_name, channel, sentry_dsn)

    def logbook_factory(*args, **kwargs):
        # Logger given to structlog
        logbook.compat.redirect_logging()
        return logbook.Logger(level=level, *args, **kwargs)

    # Setup structlog over logbook, with args list at the end
    processors = [
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        UnstructuredRenderer(),
    ]

    structlog.configure(
        context_class=structlog.threadlocal.wrap_dict(dict),
        processors=processors,
        logger_factory=logbook_factory,
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    ) 
Example #21
Source File: log.py    From code-review with Mozilla Public License 2.0 4 votes vote down vote up
def init_logger(
    project_name,
    channel=None,
    level=logbook.INFO,
    PAPERTRAIL_HOST=None,
    PAPERTRAIL_PORT=None,
    SENTRY_DSN=None,
):

    if not channel:
        channel = os.environ.get("APP_CHANNEL")

    # Output logs on stderr, with color support on consoles
    fmt = "{record.time} [{record.level_name:<8}] {record.channel}: {record.message}"
    handler = logbook.more.ColorizedStderrHandler(level=level, format_string=fmt)
    handler.push_application()

    # Log to papertrail
    if channel and PAPERTRAIL_HOST and PAPERTRAIL_PORT:
        setup_papertrail(project_name, channel, PAPERTRAIL_HOST, PAPERTRAIL_PORT)

    # Log to senty
    if channel and SENTRY_DSN:
        setup_sentry(project_name, channel, SENTRY_DSN)

    def logbook_factory(*args, **kwargs):
        # Logger given to structlog
        logbook.compat.redirect_logging()
        return logbook.Logger(level=level, *args, **kwargs)

    # Setup structlog over logbook, with args list at the end
    processors = [
        structlog.stdlib.PositionalArgumentsFormatter(),
        structlog.processors.StackInfoRenderer(),
        structlog.processors.format_exc_info,
        UnstructuredRenderer(),
    ]

    structlog.configure(
        context_class=structlog.threadlocal.wrap_dict(dict),
        processors=processors,
        logger_factory=logbook_factory,
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )