(logging): Add multiprocess support for logging.

This commit is contained in:
2023-07-18 10:35:10 +03:00
parent e54c6bc62d
commit f4769b407e
5 changed files with 51 additions and 32 deletions

View File

@@ -3,7 +3,7 @@ import logging
from typing import Optional from typing import Optional
from meta import conf, appname from meta import conf, appname
from meta.logger import log_context, log_action_stack, logging_context, log_app, log_wrap from meta.logger import log_context, log_action_stack, logging_context, log_app, log_wrap, setup_main_logger
from meta.ipc import AppClient from meta.ipc import AppClient
from meta.app import appname_from_shard from meta.app import appname_from_shard
from meta.sharding import shard_count from meta.sharding import shard_count
@@ -113,6 +113,7 @@ class AnalyticsServer:
await asyncio.sleep(self.snap_retry_period) await asyncio.sleep(self.snap_retry_period)
async def run(self): async def run(self):
setup_main_logger()
log_action_stack.set(['Analytics']) log_action_stack.set(['Analytics'])
log_app.set(conf.analytics['appname']) log_app.set(conf.analytics['appname'])

View File

@@ -7,7 +7,7 @@ from discord.ext import commands
from meta import LionBot, conf, sharding, appname, shard_talk from meta import LionBot, conf, sharding, appname, shard_talk
from meta.app import shardname from meta.app import shardname
from meta.logger import log_context, log_action_stack, logging_context from meta.logger import log_context, log_action_stack, logging_context, setup_main_logger
from meta.context import ctx_bot from meta.context import ctx_bot
from data import Database from data import Database
@@ -20,6 +20,10 @@ from constants import DATA_VERSION
for name in conf.config.options('LOGGING_LEVELS', no_defaults=True): for name in conf.config.options('LOGGING_LEVELS', no_defaults=True):
logging.getLogger(name).setLevel(conf.logging_levels[name]) logging.getLogger(name).setLevel(conf.logging_levels[name])
setup_main_logger()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
db = Database(conf.data['args']) db = Database(conf.data['args'])

Submodule src/gui updated: fff2314019...0561b43b07

View File

@@ -4,10 +4,14 @@ import logging
import string import string
import random import random
from ..logger import log_context, log_app, logging_context from ..logger import log_context, log_app, logging_context, setup_main_logger
from ..config import conf
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
for name in conf.config.options('LOGGING_LEVELS', no_defaults=True):
logging.getLogger(name).setLevel(conf.logging_levels[name])
uuid_alphabet = string.ascii_lowercase + string.digits uuid_alphabet = string.ascii_lowercase + string.digits
@@ -166,6 +170,7 @@ class AppServer:
async def start_server(): async def start_server():
setup_main_logger()
address = {'host': '127.0.0.1', 'port': '5000'} address = {'host': '127.0.0.1', 'port': '5000'}
server = AppServer() server = AppServer()
await server.start(address) await server.start(address)

View File

@@ -3,7 +3,8 @@ import logging
import asyncio import asyncio
from typing import List from typing import List
from logging.handlers import QueueListener, QueueHandler from logging.handlers import QueueListener, QueueHandler
from queue import SimpleQueue import queue
import multiprocessing
from contextlib import contextmanager from contextlib import contextmanager
from io import StringIO from io import StringIO
from functools import wraps from functools import wraps
@@ -199,6 +200,7 @@ class WebHookHandler(logging.StreamHandler):
return self.loop return self.loop
def emit(self, record): def emit(self, record):
self.format(record)
self.get_loop().call_soon_threadsafe(self._post, record) self.get_loop().call_soon_threadsafe(self._post, record)
def _post(self, record): def _post(self, record):
@@ -292,6 +294,16 @@ if webhook := conf.logging['critical_log']:
handler.setLevel(logging.CRITICAL) handler.setLevel(logging.CRITICAL)
handlers.append(handler) handlers.append(handler)
def make_queue_handler(queue):
qhandler = QueueHandler(queue)
qhandler.setLevel(logging.INFO)
qhandler.addFilter(ContextInjection())
return qhandler
def setup_main_logger(multiprocess=False):
q = multiprocessing.Queue() if multiprocess else queue.SimpleQueue()
if handlers: if handlers:
# First create a separate loop to run the handlers on # First create a separate loop to run the handlers on
import threading import threading
@@ -312,15 +324,12 @@ if handlers:
for handler in handlers: for handler in handlers:
handler.loop = loop handler.loop = loop
queue: SimpleQueue[logging.LogRecord] = SimpleQueue() qhandler = make_queue_handler(q)
qhandler = QueueHandler(queue)
qhandler.setLevel(logging.INFO)
qhandler.addFilter(ContextInjection())
# qhandler.addFilter(ThreadFilter('MainThread')) # qhandler.addFilter(ThreadFilter('MainThread'))
logger.addHandler(qhandler) logger.addHandler(qhandler)
listener = QueueListener( listener = QueueListener(
queue, *handlers, respect_handler_level=True q, *handlers, respect_handler_level=True
) )
listener.start() listener.start()
return q