[英]Celery: log each task run to it's own file?
我希望运行的每个作业都记录到文件名是 taskid 的 logs/ 目录中它自己的文件。
logger = get_task_logger(__name__)
@app.task(base=CallbackTask)
def calc(syntax):
some_func()
logger.info('started')
在我的工作人员中,我使用-f
参数将日志文件设置为 output 。 我想确保它将每个任务输出到它自己的日志文件中。
好像我晚了3年。 不过,这是我的解决方案,灵感来自@Mikko Ohtamaa 的想法。 我只是通过使用 Celery 信号和 python 的内置日志框架来准备和清理日志句柄,使它有点不同。
from celery.signals import task_prerun, task_postrun
import logging
# to control the tasks that required logging mechanism
TASK_WITH_LOGGING = ['Proj.tasks.calc']
@task_prerun.connect(sender=TASK_WITH_LOGGING)
def prepare_logging(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None)
logger = logging.getLogger(task_id)
formatter = logging.Formatter('[%(asctime)s][%(levelname)s] %(message)s')
# optionally logging on the Console as well as file
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
# Adding File Handle with file path. Filename is task_id
task_handler = logging.FileHandler(os.path.join('/tmp/', task_id+'.log'))
task_handler.setFormatter(formatter)
task_handler.setLevel(logging.INFO)
logger.addHandler(stream_handler)
logger.addHandler(task_handler)
@task_postrun.connect(sender=TASK_WITH_LOGGING)
def close_logging(signal=None, sender=None, task_id=None, task=None, args=None, kwargs=None, retval=None, state=None):
# getting the same logger and closing all handles associated with it
logger = logging.getLogger(task_id)
for handler in logger.handlers:
handler.flush()
handler.close()
logger.handlers = []
@app.task(base=CallbackTask, bind=True)
def calc(self, syntax):
# getting logger with name Task ID. This is already
# created and setup in prepare_logging
logger = logging.getLogger(self.request.id)
some_func()
logger.info('started')
bind=True
在这里是必要的,以便在任务中使用 id。 这将在每次执行任务calc
时使用<task_id>.log
创建单独的日志文件。
下面是我粗鲁的,写出来的,未经测试的方法。 将其视为指导而不是生产级代码。
def get_or_create_task_logger(func):
""" A helper function to create function specific logger lazily. """
# https://docs.python.org/2/library/logging.html?highlight=logging#logging.getLogger
# This will always result the same singleton logger
# based on the task's function name (does not check cross-module name clash,
# for demo purposes only)
logger = logging.getLogger(func.__name__)
# Add our custom logging handler for this logger only
# You could also peek into Celery task context variables here
# http://celery.readthedocs.org/en/latest/userguide/tasks.html#context
if len(logger.handlers) == 0:
# Log to output file based on the function name
hdlr = logging.FileHandler('%s.log' % func.__name__)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.DEBUG)
return logger
@app.task(base=CallbackTask)
def calc(syntax):
logger = get_or_create_task_logger(calc)
some_func()
logger.info('started')
这是我的代码,
import logging
from celery.app.log import TaskFormatter
from celery.signals import task_prerun, task_postrun
@task_prerun.connect
def overload_task_logger(task_id, task, args, **kwargs):
logger = logging.getLogger("celery.task")
file_handler = logging.FileHandler(f'/tmp/{task_id}.log')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(
TaskFormatter("[%(asctime)s: %(levelname)s/%(processName)s] %(task_name)s[%(task_id)s]: %(message)s")
)
logger.addHandler(file_handler)
@task_postrun.connect
def cleanup_logger(task_id, task, args, **kwargs):
logger = logging.getLogger("celery.task")
for handler in logger.handlers:
if isinstance(handler, logging.FileHandler) and handler.baseFilename == f'/tmp/{task_id}.log':
logger.removeHandler(handler)
在 celery 上测试:v5.2.7
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.