diff --git a/src/analytics/server.py b/src/analytics/server.py index b81bcfe4..619cb60f 100644 --- a/src/analytics/server.py +++ b/src/analytics/server.py @@ -3,7 +3,7 @@ import logging from typing import Optional from meta import conf, appname -from meta.logger import log_context, log_action_stack, logging_context, log_app, log_wrap +from meta.logger import log_context, log_action_stack, logging_context, log_app, log_wrap, setup_main_logger from meta.ipc import AppClient from meta.app import appname_from_shard from meta.sharding import shard_count @@ -113,6 +113,7 @@ class AnalyticsServer: await asyncio.sleep(self.snap_retry_period) async def run(self): + setup_main_logger() log_action_stack.set(['Analytics']) log_app.set(conf.analytics['appname']) diff --git a/src/bot.py b/src/bot.py index bd347b3c..72765fdf 100644 --- a/src/bot.py +++ b/src/bot.py @@ -7,7 +7,7 @@ from discord.ext import commands from meta import LionBot, conf, sharding, appname, shard_talk from meta.app import shardname -from meta.logger import log_context, log_action_stack, logging_context +from meta.logger import log_context, log_action_stack, logging_context, setup_main_logger from meta.context import ctx_bot from data import Database @@ -20,6 +20,10 @@ from constants import DATA_VERSION for name in conf.config.options('LOGGING_LEVELS', no_defaults=True): logging.getLogger(name).setLevel(conf.logging_levels[name]) + +setup_main_logger() + + logger = logging.getLogger(__name__) db = Database(conf.data['args']) diff --git a/src/gui b/src/gui index fff23140..0561b43b 160000 --- a/src/gui +++ b/src/gui @@ -1 +1 @@ -Subproject commit fff23140193b301db86b62f00e60ae62b7075174 +Subproject commit 0561b43b07a87ef6bdee8ff01f78de73155576dc diff --git a/src/meta/ipc/server.py b/src/meta/ipc/server.py index 3e46e900..43181d8c 100644 --- a/src/meta/ipc/server.py +++ b/src/meta/ipc/server.py @@ -4,10 +4,14 @@ import logging import string import random -from ..logger import log_context, log_app, logging_context +from ..logger import log_context, log_app, logging_context, setup_main_logger +from ..config import conf logger = logging.getLogger(__name__) +for name in conf.config.options('LOGGING_LEVELS', no_defaults=True): + logging.getLogger(name).setLevel(conf.logging_levels[name]) + uuid_alphabet = string.ascii_lowercase + string.digits @@ -166,6 +170,7 @@ class AppServer: async def start_server(): + setup_main_logger() address = {'host': '127.0.0.1', 'port': '5000'} server = AppServer() await server.start(address) diff --git a/src/meta/logger.py b/src/meta/logger.py index 3beb707f..a38a1589 100644 --- a/src/meta/logger.py +++ b/src/meta/logger.py @@ -3,7 +3,8 @@ import logging import asyncio from typing import List from logging.handlers import QueueListener, QueueHandler -from queue import SimpleQueue +import queue +import multiprocessing from contextlib import contextmanager from io import StringIO from functools import wraps @@ -199,6 +200,7 @@ class WebHookHandler(logging.StreamHandler): return self.loop def emit(self, record): + self.format(record) self.get_loop().call_soon_threadsafe(self._post, record) def _post(self, record): @@ -292,35 +294,42 @@ if webhook := conf.logging['critical_log']: handler.setLevel(logging.CRITICAL) handlers.append(handler) -if handlers: - # First create a separate loop to run the handlers on - import threading - - def run_loop(loop): - asyncio.set_event_loop(loop) - try: - loop.run_forever() - finally: - loop.run_until_complete(loop.shutdown_asyncgens()) - loop.close() - - loop = asyncio.new_event_loop() - loop_thread = threading.Thread(target=lambda: run_loop(loop)) - loop_thread.daemon = True - loop_thread.start() - - for handler in handlers: - handler.loop = loop - - queue: SimpleQueue[logging.LogRecord] = SimpleQueue() +def make_queue_handler(queue): qhandler = QueueHandler(queue) qhandler.setLevel(logging.INFO) qhandler.addFilter(ContextInjection()) - # qhandler.addFilter(ThreadFilter('MainThread')) - logger.addHandler(qhandler) + return qhandler - listener = QueueListener( - queue, *handlers, respect_handler_level=True - ) - listener.start() + +def setup_main_logger(multiprocess=False): + q = multiprocessing.Queue() if multiprocess else queue.SimpleQueue() + if handlers: + # First create a separate loop to run the handlers on + import threading + + def run_loop(loop): + asyncio.set_event_loop(loop) + try: + loop.run_forever() + finally: + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + loop = asyncio.new_event_loop() + loop_thread = threading.Thread(target=lambda: run_loop(loop)) + loop_thread.daemon = True + loop_thread.start() + + for handler in handlers: + handler.loop = loop + + qhandler = make_queue_handler(q) + # qhandler.addFilter(ThreadFilter('MainThread')) + logger.addHandler(qhandler) + + listener = QueueListener( + q, *handlers, respect_handler_level=True + ) + listener.start() + return q