Python pythonjsonlogger.jsonlogger.JsonFormatter() Examples

The following are 20 code examples of pythonjsonlogger.jsonlogger.JsonFormatter(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pythonjsonlogger.jsonlogger , or try the search function .
Example #1
Source File: Logger.py    From DownloaderForReddit with GNU General Public License v3.0 7 votes vote down vote up
def make_logger():
    logger = logging.getLogger('DownloaderForReddit')
    logger.setLevel(logging.DEBUG)

    stream_formatter = JsonStreamFormatter('%(asctime)s: %(levelname)s : %(name)s : %(message)s',
                                           datefmt='%m/%d/%Y %I:%M:%S %p')

    json_formatter = jsonlogger.JsonFormatter(fmt='%(levelname) %(name) %(filename) %(module) %(funcName) %(lineno) '
                                              '%(message) %(asctime)', datefmt='%m/%d/%Y %I:%M:%S %p')

    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(logging.DEBUG)
    stream_handler.setFormatter(stream_formatter)

    log_path = os.path.join(SystemUtil.get_data_directory(), 'DownloaderForReddit.log')
    file_handler = RotatingFileHandler(log_path, maxBytes=1024*1024, backupCount=2)
    file_handler.setLevel(logging.INFO)
    file_handler.setFormatter(json_formatter)

    logger.addHandler(stream_handler)
    logger.addHandler(file_handler) 
Example #2
Source File: events.py    From binderhub with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.log = logging.getLogger(__name__)
        # We don't want events to show up in the default logs
        self.log.propagate = False
        self.log.setLevel(logging.INFO)

        if self.handlers_maker:
            self.handlers = self.handlers_maker(self)
            formatter = jsonlogger.JsonFormatter(json_serializer=_skip_message)
            for handler in self.handlers:
                handler.setFormatter(formatter)
                self.log.addHandler(handler)

        self.schemas = {} 
Example #3
Source File: eventlog.py    From telemetry with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # Use a unique name for the logger so that multiple instances of EventLog do not write
        # to each other's handlers.
        log_name = __name__ + '.' + str(id(self))
        self.log = logging.getLogger(log_name)
        # We don't want events to show up in the default logs
        self.log.propagate = False
        # We will use log.info to emit
        self.log.setLevel(logging.INFO)

        if self.handlers:
            formatter = jsonlogger.JsonFormatter(json_serializer=_skip_message)
            for handler in self.handlers:
                handler.setFormatter(formatter)
                self.log.addHandler(handler)

        self.schemas = {} 
Example #4
Source File: config.py    From zeus with Apache License 2.0 6 votes vote down vote up
def configure_logging(app):
    from pythonjsonlogger import jsonlogger

    if os.environ.get("SUPPRESS_LOGGING"):
        while app.logger.handlers:
            app.logger.removeHandler(app.logger.handlers[0])
        handler = logging.NullHandler()
        app.logger.addHandler(handler)
    elif not app.config["DEBUG"]:
        while app.logger.handlers:
            app.logger.removeHandler(app.logger.handlers[0])
        handler = logging.StreamHandler()
        handler.setFormatter(
            jsonlogger.JsonFormatter("%(message)%(levelname)%(name)%(asctime)")
        )
        app.logger.addHandler(handler)

    if app.config.get("LOG_LEVEL"):
        app.logger.setLevel(getattr(logging, app.config["LOG_LEVEL"].upper())) 
Example #5
Source File: stop_random_instance.py    From aws-chaos-scripts with MIT License 5 votes vote down vote up
def setup_logging(log_level):
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s'
    )
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler) 
Example #6
Source File: app.py    From repo2docker with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def initialize(self):
        """Init repo2docker configuration before start"""
        # FIXME: Remove this function, move it to setters / traitlet reactors
        if self.json_logs:
            # register JSON excepthook to avoid non-JSON output on errors
            sys.excepthook = self.json_excepthook
            # Need to reset existing handlers, or we repeat messages
            logHandler = logging.StreamHandler()
            formatter = jsonlogger.JsonFormatter()
            logHandler.setFormatter(formatter)
            self.log = logging.getLogger("repo2docker")
            self.log.handlers = []
            self.log.addHandler(logHandler)
            self.log.setLevel(self.log_level)
        else:
            # due to json logger stuff above,
            # our log messages include carriage returns, newlines, etc.
            # remove the additional newline from the stream handler
            self.log.handlers[0].terminator = ""
            # We don't want a [Repo2Docker] on all messages
            self.log.handlers[0].formatter = logging.Formatter(fmt="%(message)s")

        if self.dry_run and (self.run or self.push):
            raise ValueError("Cannot push or run image if we are not building it")

        if self.volumes and not self.run:
            raise ValueError("Cannot mount volumes if container is not run") 
Example #7
Source File: stackdriver.py    From mac-graph with The Unlicense 5 votes vote down vote up
def __init__(self, fmt="%(levelname) %(name) %(message)", style='%', *args, **kwargs):
		jsonlogger.JsonFormatter.__init__(self, fmt=fmt, *args, **kwargs) 
Example #8
Source File: utils.py    From anycast_healthchecker with Apache License 2.0 5 votes vote down vote up
def process_log_record(self, log_record):
        """Add customer record keys and rename threadName key."""
        log_record["version"] = __version__
        log_record["program"] = PROGRAM_NAME
        log_record["service_name"] = log_record.pop('threadName', None)
        # return jsonlogger.JsonFormatter.process_log_record(self, log_record)

        return log_record 
Example #9
Source File: __init__.py    From logmatic-python with MIT License 5 votes vote down vote up
def process_log_record(self, log_record):
        # Enforce the presence of a timestamp
        if "asctime" in log_record:
            log_record["timestamp"] = log_record["asctime"]
        else:
            log_record["timestamp"] = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ%z")

        if self._extra is not None:
            for key, value in self._extra.items():
                log_record[key] = value
        return super(JsonFormatter, self).process_log_record(log_record)


# Derive from object to force a new-style class and thus allow super() to work
# on Python 2.6 
Example #10
Source File: __init__.py    From logmatic-python with MIT License 5 votes vote down vote up
def __init__(self,
                 fmt="%(asctime) %(name) %(processName) %(filename)  %(funcName) %(levelname) %(lineno) %(module) %(threadName) %(message)",
                 datefmt="%Y-%m-%dT%H:%M:%SZ%z",
                 style='%',
                 extra={}, *args, **kwargs):
        self._extra = extra
        jsonlogger.JsonFormatter.__init__(self, fmt=fmt, datefmt=datefmt, *args, **kwargs) 
Example #11
Source File: logs.py    From scrapekit with MIT License 5 votes vote down vote up
def make_logger(scraper):
    """ Create two log handlers, one to output info-level ouput to the
    console, the other to store all logging in a JSON file which will
    later be used to generate reports. """

    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)

    requests_log = logging.getLogger("requests")
    requests_log.setLevel(logging.WARNING)

    json_handler = logging.FileHandler(log_path(scraper))
    json_handler.setLevel(logging.DEBUG)
    json_formatter = jsonlogger.JsonFormatter(make_json_format())
    json_handler.setFormatter(json_formatter)
    logger.addHandler(json_handler)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)
    fmt = '%(name)s [%(levelname)-8s]: %(message)s'
    formatter = logging.Formatter(fmt)
    console_handler.setFormatter(formatter)
    logger.addHandler(console_handler)

    logger = logging.getLogger(scraper.name)
    logger = TaskAdapter(logger, scraper)
    return logger 
Example #12
Source File: cli.py    From honeycomb with MIT License 5 votes vote down vote up
def setup_logging(home, verbose):
    """Configure logging for honeycomb."""
    logging.setLoggerClass(MyLogger)
    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "console": {
                "format": "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s",
            },
            "json": {
                "()": jsonlogger.JsonFormatter,
                "format": "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s",
            },
        },
        "handlers": {
            "default": {
                "level": "DEBUG" if verbose else "INFO",
                "class": "logging.StreamHandler",
                "formatter": "console",
            },
            "file": {
                "level": "DEBUG",
                "class": "logging.handlers.WatchedFileHandler",
                "filename": os.path.join(home, DEBUG_LOG_FILE),
                "formatter": "json",
            },
        },
        "loggers": {
            "": {
                "handlers": ["default", "file"],
                "level": "DEBUG",
                "propagate": True,
            },
        }
    }) 
Example #13
Source File: log.py    From aw-core with Mozilla Public License 2.0 5 votes vote down vote up
def _create_json_formatter() -> logging.Formatter:  # pragma: no cover
    supported_keys = [
        "asctime",
        # 'created',
        "filename",
        "funcName",
        "levelname",
        # 'levelno',
        "lineno",
        "module",
        # 'msecs',
        "message",
        "name",
        "pathname",
        # 'process',
        # 'processName',
        # 'relativeCreated',
        # 'thread',
        # 'threadName'
    ]

    def log_format(x):
        """Used to give JsonFormatter proper parameter format"""
        return ["%({0:s})".format(i) for i in x]

    custom_format = " ".join(log_format(supported_keys))

    return jsonlogger.JsonFormatter(custom_format) 
Example #14
Source File: fail_rds.py    From aws-chaos-scripts with MIT License 5 votes vote down vote up
def setup_logging(log_level):
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s'
    )
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler) 
Example #15
Source File: fail_elasticache.py    From aws-chaos-scripts with MIT License 5 votes vote down vote up
def setup_logging(log_level):
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s'
    )
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler) 
Example #16
Source File: fail_az.py    From aws-chaos-scripts with MIT License 5 votes vote down vote up
def setup_logging(log_level):
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s'
    )
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler) 
Example #17
Source File: settings.py    From open-raadsinformatie with MIT License 5 votes vote down vote up
def __init__(self, fmt="%(levelname) %(message)", *args, **kwargs):
        jsonlogger.JsonFormatter.__init__(self, fmt=fmt, *args, **kwargs) 
Example #18
Source File: log_factory.py    From scrapy-cluster with MIT License 5 votes vote down vote up
def _get_formatter(self, json):
        '''
        Return the proper log formatter

        @param json: Boolean value
        '''
        if json:
            return jsonlogger.JsonFormatter()
        else:
            return logging.Formatter(self.format_string) 
Example #19
Source File: logs.py    From crypto-signal with MIT License 4 votes vote down vote up
def configure_logging(loglevel, log_mode):
    """Configure the application logger

    Args:
        loglevel (str): The level of logging for the application.
        log_mode (str): What kind of logging output to apply...
            text: Text logging is intended for users / developers.
            json: Json logging is intended for parsing with a log aggregation system.
    """

    if not loglevel:
        loglevel = logging.INFO

    if log_mode == 'json':
        log_formatter = jsonlogger.JsonFormatter()
    elif log_mode == 'text':
        log_formatter = logging.Formatter('%(message)s')
    elif log_mode == 'standard':
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )
    else:
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(log_formatter)
    root_logger = logging.getLogger()
    root_logger.addHandler(handler)
    root_logger.setLevel(loglevel)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True
    ) 
Example #20
Source File: logging.py    From chaostoolkit with Apache License 2.0 4 votes vote down vote up
def configure_logger(verbose: bool = False, log_format: str = "string",
                     log_file: str = None, logger_name: str = "chaostoolkit",
                     context_id: str = None):
    """
    Configure the chaostoolkit logger.

    By default logs as strings to stdout and the given file. When `log_format`
    is `"json"`, records are set to the console as JSON strings but remain
    as strings in the log file. The rationale is that the log file is mostly
    for grepping purpose while records written to the console can be forwarded
    out of band to anywhere else.
    """
    log_level = logging.INFO

    # we define colors ourselves as critical is missing in default ones
    colors = {
        logging.DEBUG: ForegroundColors.CYAN,
        logging.INFO: ForegroundColors.GREEN,
        logging.WARNING: ForegroundColors.YELLOW,
        logging.ERROR: ForegroundColors.RED,
        logging.CRITICAL: ForegroundColors.RED
    }
    fmt = "%(color)s[%(asctime)s %(levelname)s]%(end_color)s %(message)s"
    if verbose:
        log_level = logging.DEBUG
        fmt = "%(color)s[%(asctime)s %(levelname)s] "\
              "[%(module)s:%(lineno)d]%(end_color)s %(message)s"

    formatter = LogFormatter(
        fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S", colors=colors)
    if log_format == 'json':
        fmt = "(process) (asctime) (levelname) (module) (lineno) (message)"
        if context_id:
            fmt = "(context_id) {}".format(fmt)
        formatter = jsonlogger.JsonFormatter(
            fmt, json_default=encoder, timestamp=True)

    # sadly, no other way to specify the name of the default logger publicly
    LOGZERO_DEFAULT_LOGGER = logger_name
    logger = setup_default_logger(level=log_level, formatter=formatter)
    if context_id:
        logger.addFilter(ChaosToolkitContextFilter(logger_name, context_id))

    if log_file:
        # always everything as strings in the log file
        logger.setLevel(logging.DEBUG)
        fmt = "%(color)s[%(asctime)s %(levelname)s] "\
              "[%(module)s:%(lineno)d]%(end_color)s %(message)s"
        formatter = LogFormatter(fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S",
                                 colors=colors)
        logzero.logfile(log_file, formatter=formatter, mode='a',
                        loglevel=logging.DEBUG)