简体   繁体   English

使用dictConfig的Python日志记录,使用两个流处理程序以不同的消息级别发布到stdout和stderr

[英]Python logging with dictConfig using two streamhandlers posting to stdout and stderr at different message levels

I'm trying to achieve what's done in these posts Python logging split between stdout and stderr 我正在尝试实现这些帖子中的操作, 在stdout和stderr之间进行Python日志记录拆分

Python logging split between stdout and stderr Python日志记录在stdout和stderr之间拆分

but with dictConfig, so far without success. 但是使用dictConfig,到目前为止没有成功。 Here is my code and config: 这是我的代码和配置:

This is the config I use for generating stdout log 这是我用于生成标准输出日志的配置

# logging.json
{
    "version": 1,
    "disable_existing_loggers": false,
    "formatters": {
        "console": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: \n%(message)s\n"
        },
        "file": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: %(lineno)d: \n%(message)s\n"
        }
    },
    "handlers": {
            "console": {
                "level": "INFO",
                "formatter": "console",
                "class": "logging.StreamHandler",
                "stream": "ext://sys.stdout"
            },
            "file": {
                "level": "DEBUG",
                "formatter": "file",
                "class": "logging.FileHandler",
                "encoding": "utf-8",
                "filename": "app.log"
            }
    },
    "loggers": {
        "": {
            "handlers": ["console", "file"],
            "level": "INFO",
            "propagate": false
        },
        "default": {
            "handlers": ["console", "file"],
            "level": "DEBUG",
            "propagate": false
        }

    }
}

And this one is for stderr logs 这是用于stderr日志的

# logging_stderr.json

{
    "version": 1,
    "disable_existing_loggers": false,
    "formatters": {
        "console": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: \n%(message)s\n"
        },
        "file": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: %(lineno)d: \n%(message)s\n"
        }
    },
    "handlers": {
            "console": {
                "level": "WARN",
                "formatter": "console",
                "class": "logging.StreamHandler",
                "stream": "ext://sys.stderr"
            },
            "file": {
                "level": "DEBUG",
                "formatter": "file",
                "class": "logging.FileHandler",
                "encoding": "utf-8",
                "filename": "wusync.log"
            }
    },
    "loggers": {
        "": {
            "handlers": ["console", "file"],
            "level": "INFO",
            "propagate": false
        },
        "default": {
            "handlers": ["console", "file"],
            "level": "DEBUG",
            "propagate": false
        }

    }
}

Then in my code, I have a helper function. 然后在我的代码中,我有一个辅助函数。

import logging
import logging.config
import os
from os.path import abspath, basename, dirname, exists, isfile, isdir, join, split, splitext
import sys

_script_dir = abspath(dirname(__file__))


def build_default_logger(logdir, name=None, cfgfile=None):
    """
    Create per-file logger and output to shared log file.
    - If found config file under script folder, use it;
    - Otherwise use default config: save to /project_root/project_name.log.
    - 'filename' in config is a filename; must prepend folder path to it.
    :logdir: directory the log file is saved into.
    :name: basename of the log file,
    :cfgfile: config file in the format of dictConfig.
    :return: logger object.
    """
    try:
        os.makedirs(logdir)
    except:
        pass

    cfg_file = cfgfile or join(_script_dir, 'logging.json')
    logging_config = None
    try:
        if sys.version_info.major > 2:
            with open(cfg_file, 'r', encoding=TXT_CODEC, errors='backslashreplace', newline=None) as f:
                text = f.read()
        else:
            with open(cfg_file, 'rU') as f:
                text = f.read()
        # Add object_pairs_hook=collections.OrderedDict hook for py3.5 and lower.
        logging_config = json.loads(text, object_pairs_hook=collections.OrderedDict)
        logging_config['handlers']['file']['filename'] = join(logdir, logging_config['handlers']['file']['filename'])
    except Exception:
        filename = name or basename(basename(logdir.strip('\\/')))
        log_path = join(logdir, '{}.log'.format(filename))
        logging_config = {
            "version": 1,
            "disable_existing_loggers": False,
            "formatters": {
                "console": {
                    "format": "%(asctime)s: %(levelname)s: %(pathname)s: \n%(message)s\n"
                },
                "file": {
                    "format": "%(asctime)s: %(levelname)s: %(pathname)s: %(lineno)d: \n%(message)s\n"
                }
            },
            "handlers": {
                    "console": {
                        "level": "INFO",
                        "formatter": "console",
                        "class": "logging.StreamHandler",
                        "stream": "ext://sys.stdout"
                    },
                    "file": {
                        "level": "DEBUG",
                        "formatter": "file",
                        "class": "logging.FileHandler",
                        "encoding": "utf-8",
                        "filename": log_path
                    }
            },
            "loggers": {
                "": {
                    "handlers": ["console", "file"],
                    "level": "INFO",
                    "propagate": True
                },
                "default": {
                    "handlers": ["console", "file"],
                    "level": "WARN",
                    "propagate": True
                }
            }
        }
    if name:
        logging_config['loggers'][name] = logging_config['loggers']['default']
    logging.config.dictConfig(logging_config)
    return logging.getLogger(name or 'default')

Finally in my main routine 终于在我的日常工作中

# main.py

_script_dir = abspath(dirname(__file__))
_logger = util.build_default_logger(logdir='temp', cfgfile=abspath(join(_script_dir, 'logging_stderr.json')))
_stderr_logger = util.build_default_logger(logdir='temp', name='myerrorlog', cfgfile=abspath(join(_script_dir, 'logging_stderr.json')))


...

_logger.info('my info')
_stderr_logger.warning('my warning')

I expect that the info will show through stdout, and warning through stderr. 我希望该信息将通过stdout显示,并通过stderr警告。 But in the result, there are only warnings and the info is completely gone. 但是结果是,只有警告,信息完全消失了。

If using only _logger , then everything goes through stdout. 如果仅使用_logger ,那么所有内容都会通过stdout进行处理。

Where was I wrong? 我哪里错了? Is it that dictconfig only supports one streamhandler? dictconfig仅支持一个流处理程序吗?

I solved my own problem by using filters. 我通过使用过滤器解决了自己的问题。

This post helped me: 这篇文章对我有帮助:

install filter on logging level in python using dictConfig 使用dictConfig在python的日志记录级别安装过滤器

I basically want to send INFO to stdout, and WARNING-to-CRITICAL to stderr. 我基本上想将INFO发送到stdout,将WARNING-to-CRITICAL发送到stderr。 This implies having a scope with both ends defined for handlers. 这意味着要有一个范围,两端都为处理程序定义。 The level attribute of handlers defines the low end only. 处理程序的level属性仅定义低端。

Now filter to the rescue. 现在过滤救援。 I ended up using this config: 我最终使用了此配置:

{
    "version": 1,
    "disable_existing_loggers": false,
    "filters": {
        "infofilter": {
          "()": "util.LowPassFilter",
          "level": 20
        },
        "warnfilter": {
          "()": "util.HighPassFilter",
          "level": 30
        }
    },
    "formatters": {
        "console": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: \n%(message)s\n"
        },
        "file": {
            "format": "%(asctime)s: %(levelname)s: %(pathname)s: %(lineno)d: \n%(message)s\n"
        }
    },
    "handlers": {
            "console": {
                "level": "INFO",
                "formatter": "console",
                "class": "logging.StreamHandler",
                "stream": "ext://sys.stdout",
                "filters": ["infofilter"]
            },
            "console_err": {
                "level": "WARN",
                "formatter": "console",
                "class": "logging.StreamHandler",
                "stream": "ext://sys.stderr",
                "filters": ["warnfilter"]
            },
            "file": {
                "level": "DEBUG",
                "formatter": "file",
                "class": "logging.FileHandler",
                "encoding": "utf-8",
                "filename": "app.log"
            }
    },
    "loggers": {
        "": {
            "handlers": ["console", "console_err", "file"],
            "level": "INFO",
            "propagate": false
        },
        "default": {
            "handlers": ["console", "console_err", "file"],
            "level": "DEBUG",
            "propagate": true
        }
    }
}

and the filters 和过滤器

class LowPassFilter(object):
    """
    Logging filter: Show log messages below input level.
    - CRITICAL = 50
    - FATAL = CRITICAL
    - ERROR = 40
    - WARNING = 30
    - WARN = WARNING
    - INFO = 20
    - DEBUG = 10
    - NOTSET = 0
    """
    def __init__(self, level):
        self.__level = level

    def filter(self, log):
        return log.levelno <= self.__level


class HighPassFilter(object):
    """Logging filter: Show log messages above input level."""
    def __init__(self, level):
        self.__level = level

    def filter(self, log):
        return log.levelno >= self.__level

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM