繁体   English   中英

Python:记录器在登录到stdout和两个文件时会在屏幕上复制消息

[英]Python: logger duplicates messages on screen when logging to stdout and two files

我有一个带有三个处理程序的记录器:所有进程的公共日志文件,控制台和每个进程的其他日志文件。 Propagate设置为false,但是进程内部打印的消息在屏幕和常规日志文件上都加倍。

这是主模块的代码:

import multiprocessing
import global_vars

import logging

logPath = 'logs'
fileName = "stdout.log"


def init_logger(secondary_logfile=None):
    logFormatter = logging.Formatter("%(asctime)s [%(processName)-12.12s] [%(threadName)-12.12s] [%(levelname)-5.5s] [%(filename)s:%(lineno)d] %(message)s")
    rootLogger = logging.getLogger(__name__)

    # first handler is general log
    fileHandler = logging.FileHandler("{0}/{1}".format(logPath, fileName))
    fileHandler.setFormatter(logFormatter)
    rootLogger.addHandler(fileHandler)
    # second handler is logging to console
    consoleHandler = logging.StreamHandler()
    consoleHandler.setFormatter(logFormatter)
    rootLogger.addHandler(consoleHandler) 

    # third handler is process-specific log
    if secondary_logfile:
        fileHandler1 = logging.FileHandler("{0}/{1}".format(logPath, secondary_logfile))
        fileHandler1.setFormatter(logFormatter)
        rootLogger.addHandler(fileHandler1)

    rootLogger.setLevel("DEBUG") # log everything

    rootLogger.propagate = False
    return rootLogger

rootLogger = init_logger()
logger = rootLogger.getChild(__name__)

# this class contains process-specific globals, shared among all functions
# of the specific process
class Shared():
    def __init__(self, shared_var, logger):
        self.shared_var = shared_var
        self.logger = logger
    def get(self):
        return self.shared_var

def make_global(process_id, logger):

    g = Shared(process_id, logger)
    global_vars.multiprocess_globals["g"] = g


def process_func(process_id):

    g = global_vars.multiprocess_globals["g"] # take g from process global scope
    # use g
    g.logger.debug('Message from specific logger {}, mirrored into main log: {}'.format(process_id, g.get()))


def run(process_id):

    # init secondary logger which will mirror into common log
    secondary_logger = init_logger(process_id)

    # create variable in process global scope
    make_global(process_id, secondary_logger)

    # run function which will use it
    process_func(process_id)


if __name__ == '__main__':

    logger.debug('This is log message in the main program')
    # init processes
    processes = []
    for i in range(1,4):
        p = multiprocessing.Process(target=run, args=(i,))
        p.daemon = True # for tensorflow https://github.com/tensorflow/tensorflow/issues/5448
        processes.append(p)

    # Run processes:

    for p in processes:
        p.start()

    for p in processes:
        p.join()

    logger.debug("This is again log message in main program")

Global_vars模块的内容仅是:

multiprocess_globals = {}

屏幕和主日志( logs/stdout.log )输出为:

$ python3 multiprocess_globals.py
2018-08-31 18:33:41,754 [MainProcess ] [MainThread  ] [DEBUG] [multiprocess_globals.py:75] This is log message in the main program
2018-08-31 18:33:41,756 [Process-1   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 1, mirrored into main log: 1
2018-08-31 18:33:41,756 [Process-1   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 1, mirrored into main log: 1
2018-08-31 18:33:41,757 [Process-2   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 2, mirrored into main log: 2
2018-08-31 18:33:41,757 [Process-2   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 2, mirrored into main log: 2
2018-08-31 18:33:41,757 [Process-3   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 3, mirrored into main log: 3
2018-08-31 18:33:41,757 [Process-3   ] [MainThread  ] [DEBUG] [multiprocess_globals.py:58] Message from specific logger 3, mirrored into main log: 3
2018-08-31 18:33:41,758 [MainProcess ] [MainThread  ] [DEBUG] [multiprocess_globals.py:91] This is again log message in main program

如何删除重复的邮件? 单进程日志文件(1、2、3等)也可以。

PS初始化指向相同日志文件的新记录器实际上是否安全? 我只是不知道如何创建这样的设置。

通过在外部初始化rootLogger然后在流程内添加新的处理程序来解决该问题:

import multiprocessing
import global_vars

import logging

logPath = 'logs'
fileName = "stdout.log"


#def init_logger(secondary_logfile=None):
logFormatter = logging.Formatter("%(asctime)s [%(processName)-12.12s] [%(threadName)-12.12s] [%(levelname)-5.5s] [%(filename)s:%(lineno)d] %(message)s")
rootLogger = logging.getLogger(__name__)

# first handler is general log
fileHandler = logging.FileHandler("{0}/{1}".format(logPath, fileName))
fileHandler.setFormatter(logFormatter)
rootLogger.addHandler(fileHandler)
# second handler is logging to console
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
rootLogger.addHandler(consoleHandler) 
rootLogger.setLevel("DEBUG") # log everything
rootLogger.propagate = False


# third handler is process-specific log
def init_logger2(secondary_logfile, rootLogger):
    fileHandler1 = logging.FileHandler("{0}/{1}".format(logPath, secondary_logfile))
    fileHandler1.setFormatter(logFormatter)
    rootLogger.addHandler(fileHandler1)

    return rootLogger

#rootLogger = init_logger()
logger = rootLogger.getChild(__name__)

# this class contains process-specific globals, shared among all functions
# of the specific process
class Shared():
    def __init__(self, shared_var, logger):
        self.shared_var = shared_var
        self.logger = logger
    def get(self):
        return self.shared_var

def make_global(process_id, logger):

    g = Shared(process_id, logger)
    global_vars.multiprocess_globals["g"] = g


def process_func(process_id):

    g = global_vars.multiprocess_globals["g"] # take g from process global scope
    # use g
    g.logger.debug('Message from specific logger {}, mirrored into main log: {}'.format(process_id, g.get()))


def run(process_id):

    # init secondary logger which will mirror into common log
    secondary_logger = init_logger2(process_id, rootLogger)

    # create variable in process global scope
    make_global(process_id, secondary_logger)

    # run function which will use it
    process_func(process_id)


if __name__ == '__main__':

    logger.debug('This is log message in the main program')
    # init processes
    processes = []
    for i in range(1,4):
        p = multiprocessing.Process(target=run, args=(i,))
        p.daemon = True # for tensorflow https://github.com/tensorflow/tensorflow/issues/5448
        processes.append(p)

    # Run processes:

    for p in processes:
        p.start()

    for p in processes:
        p.join()

    logger.debug("This is again log message in main program")

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM