python logging多进程多线程输出到同一个日志文件的实战案例
短信预约 -IT技能 免费直播动态提醒
参考官方案例:https://docs.python.org/zh-cn/3.8/howto/logging-cookbook.html
import logging
import logging.config
import logging.handlers
from multiprocessing import Process, Queue
import random
import threading
import time
def logger_thread(q):
while True:
record = q.get()
if record is None:
break
logger = logging.getLogger(record.name)
logger.handle(record)
def worker_process(q):
qh = logging.handlers.QueueHandler(q)
root = logging.getLogger()
root.setLevel(logging.DEBUG)
root.addHandler(qh)
levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR,
logging.CRITICAL]
loggers = ['foo', 'foo.bar', 'foo.bar.baz',
'spam', 'spam.ham', 'spam.ham.eggs']
for i in range(100):
lvl = random.choice(levels)
logger = logging.getLogger(random.choice(loggers))
logger.log(lvl, 'Message no. %d', i)
if __name__ == '__main__':
q = Queue()
d = {
'version': 1,
'formatters': {
'detailed': {
'class': 'logging.Formatter',
'format': '%(asctime)s %(name)-15s %(levelname)-8s %(processName)-10s %(message)s'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
},
'file': {
'class': 'logging.FileHandler',
'filename': 'mplog.log',
'mode': 'w',
'formatter': 'detailed',
'foofile': {
'filename': 'mplog-foo.log',
'errors': {
'filename': 'mplog-errors.log',
'level': 'ERROR',
'loggers': {
'foo': {
'handlers': ['foofile']
'root': {
'level': 'DEBUG',
'handlers': ['console', 'file', 'errors']
}
workers = []
for i in range(5):
wp = Process(target=worker_process, name='worker %d' % (i + 1), args=(q,))
workers.append(wp)
wp.start()
logging.config.dictConfig(d)
lp = threading.Thread(target=logger_thread, args=(q,))
lp.start()
# At this point, the main process could do some useful work of its own
# Once it's done that, it can wait for the workers to terminate...
for wp in workers:
wp.join()
# And now tell the logging thread to finish up, too
q.put(None)
lp.join()
实战案例:
1、字典形式配置日志
log_conf_dict = {
'version': 1,
'formatters': {
'my_formatter': {
'class': 'logging.Formatter',
'format': '%(asctime)s %(processName)s(%(process)d) %(threadName)s(%(thread)d) %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'my_formatter',
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'filename': '/log/test.log',
'maxBytes': 5*1024*1024,
'backupCount': 60,
'mode': 'w',
'delay': True,
'formatter': 'my_formatter',
'encoding': 'utf-8',
'level': 'INFO',
},
},
'loggers': {
'my_logger': {
'handlers': ['file']
}
},
'root': {
'level': _level,
'handlers': ['console', 'file']
},
}
2、主进程中开启独立的日志写入监听线程
"""主进程中开启独立的日志写入监听线程"""
queue = Queue(-1)
logging.config.dictConfig(dict)
log_thread = threading.Thread(target=logger_main, args=(queue,))
log_thread.start() """其他逻辑代码段""" queue.put(None) log_thread.join()
日志写入函数
def logger_main(q):
'''日志队列写入文件'''
while True:
record = q.get()
if record is None:
break
logger = logging.getLogger()
logger.handle(record)
3、子进程中将日志输入QueueHandler日志队列
def child_proc_main(queue):
lqh = logging.handlers.QueueHandler(queue)
lqh.set_name("my_queue_handler")
root = logging.getLogger() #很关键的一步,必须先清空,再加入。原因:多进程多线程复杂环境下,在window和linux平台运行表现不一致,linux会复制主进程的日志配置,造成同时输出多个日志文件。
root.handlers.clear()
root.addHandler(lqh)
root.setLevel(level)
到此这篇关于python logging多进程多线程输出到同一个日志文件的文章就介绍到这了,更多相关python logging日志文件内容请搜索编程网以前的文章或继续浏览下面的相关文章希望大家以后多多支持编程网!
免责声明:
① 本站未注明“稿件来源”的信息均来自网络整理。其文字、图片和音视频稿件的所属权归原作者所有。本站收集整理出于非商业性的教育和科研之目的,并不意味着本站赞同其观点或证实其内容的真实性。仅作为临时的测试数据,供内部测试之用。本站并未授权任何人以任何方式主动获取本站任何信息。
② 本站未注明“稿件来源”的临时测试数据将在测试完成后最终做删除处理。有问题或投稿请发送至: 邮箱/279061341@qq.com QQ/279061341