Skip to content

Commit e09f30d

Browse files
committed
Dump & Load settings to setting_path as jsonl format, with a readable meta data.
1 parent e8c226e commit e09f30d

File tree

1 file changed

+168
-37
lines changed

1 file changed

+168
-37
lines changed

morebuiltins/cmd/log_server.py

Lines changed: 168 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
import asyncio
2+
import base64
23
import json
34
import logging
45
import logging.handlers
56
import os
7+
import pickle
8+
import shutil
69
import signal
710
import sys
811
import time
@@ -22,11 +25,6 @@
2225
__all__ = ["LogServer"]
2326

2427

25-
CONNECTED_HANDLERS: typing.Dict[
26-
tuple, typing.Union[logging.handlers.SocketHandler, logging.NullHandler]
27-
] = {}
28-
29-
3028
class QueueMsg:
3129
__slots__ = ("name", "record")
3230

@@ -43,27 +41,87 @@ class DefaultLogSetting:
4341
formatter: logging.Formatter = LogHelper.DEFAULT_FORMATTER
4442
max_size: int = 10 * 1024**2
4543
max_backups: int = 1
46-
max_queue_size: int = 100000
47-
max_queue_buffer: int = 20000
4844

4945
_key_name = "log_setting"
5046

47+
# log server options
48+
host: str = "127.0.0.1"
49+
port: int = 8901
50+
log_dir: typing.Optional[str] = None
51+
max_queue_size: int = 100000
52+
max_queue_buffer: int = 20000
53+
handler_signals: tuple = (2, 15) # SIGINT, SIGTERM
54+
log_stream: typing.Optional[typing.TextIO] = sys.stderr
55+
compress: bool = False
56+
shorten_level: bool = False
57+
idle_close_time: int = 60
58+
5159

5260
@dataclass
5361
class LogSetting(Validator):
5462
formatter: logging.Formatter = DefaultLogSetting.formatter
5563
max_size: int = DefaultLogSetting.max_size
5664
max_backups: int = DefaultLogSetting.max_backups
57-
level_specs: list[str] = field(default_factory=list)
65+
level_specs: list[int] = field(default_factory=list)
66+
create_time: str = field(default_factory=ttime)
67+
68+
@property
69+
def fmt(self) -> str:
70+
return getattr(self.formatter, "_fmt", "")
71+
72+
@property
73+
def datefmt(self) -> str:
74+
return getattr(self.formatter, "datefmt", "")
75+
76+
def __post_init__(self):
77+
for index, level in enumerate(self.level_specs):
78+
if isinstance(level, int):
79+
continue
80+
level = str(level).upper()
81+
if level not in logging._nameToLevel:
82+
raise ValueError(
83+
f"level_specs[{index}] invalid log level name: {level}"
84+
)
85+
self.level_specs[index] = logging._nameToLevel[level]
86+
super().__post_init__()
5887

5988
@classmethod
6089
def get_default(cls):
6190
return cls()
6291

92+
@staticmethod
93+
def pickle_to_base64(obj) -> str:
94+
return base64.b64encode(pickle.dumps(obj)).decode("utf-8")
95+
96+
@staticmethod
97+
def pickle_from_base64(data: str):
98+
return pickle.loads(base64.b64decode(data.encode("utf-8")))
99+
63100
@classmethod
64-
def from_dict(cls, **kwargs):
101+
def from_dict(
102+
cls, formatter: typing.Union[str, logging.Formatter, None] = None, **kwargs
103+
):
104+
if isinstance(formatter, str):
105+
# base64 formatter
106+
kwargs["formatter"] = cls.pickle_from_base64(formatter)
107+
elif isinstance(formatter, logging.Formatter):
108+
kwargs["formatter"] = formatter
109+
else:
110+
kwargs["formatter"] = DefaultLogSetting.formatter
111+
kwargs = {k: v for k, v in kwargs.items() if k in cls.__annotations__}
65112
return cls(**kwargs)
66113

114+
def to_dict_with_meta(self) -> dict:
115+
data = asdict(self)
116+
data["formatter"] = self.pickle_to_base64(self.formatter)
117+
data["fmt"] = self.fmt
118+
data["datefmt"] = self.datefmt
119+
# int to str
120+
data["level_specs"] = [
121+
logging.getLevelName(level) for level in self.level_specs
122+
]
123+
return data
124+
67125
def __eq__(self, other):
68126
if not isinstance(other, LogSetting):
69127
return False
@@ -162,24 +220,20 @@ async def main():
162220
> python -m morebuiltins.cmd.log_server -h
163221
"""
164222

165-
DEFAULT_HOST = "127.0.0.1"
166-
DEFAULT_PORT = 8901
167-
HANDLER_SIGNALS = (2, 15) # SIGINT, SIGTERM
168-
169223
def __init__(
170224
self,
171-
host=DEFAULT_HOST,
172-
port=DEFAULT_PORT,
173-
log_dir=None,
225+
host=DefaultLogSetting.host,
226+
port=DefaultLogSetting.port,
227+
log_dir=DefaultLogSetting.log_dir,
174228
name="log_server",
175229
max_size=DefaultLogSetting.max_size,
176230
max_backups=DefaultLogSetting.max_backups,
177231
max_queue_size=DefaultLogSetting.max_queue_size,
178232
max_queue_buffer=DefaultLogSetting.max_queue_buffer,
179-
log_stream=sys.stderr,
180-
compress=False,
181-
shorten_level=True,
182-
idle_close_time=300,
233+
log_stream=DefaultLogSetting.log_stream,
234+
compress=DefaultLogSetting.compress,
235+
shorten_level=DefaultLogSetting.shorten_level,
236+
idle_close_time=DefaultLogSetting.idle_close_time,
183237
):
184238
super().__init__(
185239
host,
@@ -208,10 +262,10 @@ def _init_settings(
208262
shorten_level=True,
209263
max_queue_size=DefaultLogSetting.max_queue_size,
210264
max_queue_buffer=DefaultLogSetting.max_queue_buffer,
211-
log_stream=sys.stderr,
212-
compress=False,
213-
log_dir=None,
214-
idle_close_time=300,
265+
log_stream=DefaultLogSetting.log_stream,
266+
compress=DefaultLogSetting.compress,
267+
log_dir=DefaultLogSetting.log_dir,
268+
idle_close_time=DefaultLogSetting.idle_close_time,
215269
max_size=DefaultLogSetting.max_size,
216270
max_backups=DefaultLogSetting.max_backups,
217271
):
@@ -224,6 +278,11 @@ def _init_settings(
224278
self.log_dir = Path(log_dir).resolve() if log_dir else None
225279
if self.log_dir:
226280
self.log_dir.mkdir(exist_ok=True, parents=True)
281+
self.setting_path: typing.Optional[Path] = self.log_dir.joinpath(
282+
f"{self.name}_settings.jsonl"
283+
)
284+
else:
285+
self.setting_path = None
227286
self._server_log_setting = LogSetting(
228287
max_size=max_size, max_backups=max_backups
229288
)
@@ -237,10 +296,28 @@ def _init_settings(
237296
self.max_queue_size = max_queue_size
238297
self._write_queue: Queue = Queue(maxsize=max_queue_size)
239298
self.max_queue_buffer = max_queue_buffer
240-
self.handle_signals = self.HANDLER_SIGNALS
241-
for sig in self.HANDLER_SIGNALS:
299+
self.handle_signals = DefaultLogSetting.handler_signals
300+
for sig in self.handle_signals:
242301
signal.signal(sig, self.handle_signal)
243-
self._log_settings = typing.cast(typing.Dict[str, LogSetting], {})
302+
self._log_settings = self.load_settings()
303+
304+
def load_settings(self):
305+
result = typing.cast(typing.Dict[str, LogSetting], {})
306+
if not self.setting_path:
307+
return result
308+
try:
309+
with self.setting_path.open("r", encoding="utf-8") as f:
310+
for line in f:
311+
data = json.loads(line)
312+
name = data["name"]
313+
setting = LogSetting.from_dict(**data["setting"])
314+
result[name] = setting
315+
self.send_log(
316+
f"Loaded log settings from {self.setting_path}, {len(result)} items"
317+
)
318+
except Exception as e:
319+
self.send_log(f"Failed to load log settings from {self.setting_path}: {e}")
320+
return result
244321

245322
async def __aenter__(self):
246323
await super().__aenter__()
@@ -253,6 +330,10 @@ async def __aexit__(self, *_errors):
253330
await asyncio.sleep(0.01)
254331
await super().__aexit__(*_errors)
255332

333+
@staticmethod
334+
def default_settings():
335+
return DefaultLogSetting
336+
256337
@property
257338
def loop(self):
258339
if not self._loop:
@@ -341,6 +422,31 @@ def save_new_setting(self, name, setting: LogSetting):
341422
return False
342423
self._log_settings[name] = setting
343424
self.send_log(f"`{name}` update setting: {setting}", level=logging.INFO)
425+
self.dump_settings()
426+
return True
427+
428+
def dump_settings(self):
429+
"""Dump & Load settings to setting_path as jsonl format, with a readable meta data."""
430+
if not self.setting_path:
431+
return True
432+
temp = self.setting_path.with_suffix(".tmp")
433+
lines = [
434+
json.dumps(
435+
{"name": name, "setting": setting.to_dict_with_meta()},
436+
ensure_ascii=False,
437+
)
438+
for name, setting in self._log_settings.items()
439+
]
440+
text = "\n".join(lines) + "\n"
441+
try:
442+
temp.write_text(text, encoding="utf-8")
443+
shutil.move(temp.as_posix(), self.setting_path.as_posix())
444+
except Exception as e:
445+
self.send_log(
446+
f"error in dump_settings {traceback.format_exc()}",
447+
e,
448+
level=logging.WARNING,
449+
)
344450

345451
def save_setting(self, name, record: dict):
346452
if DefaultLogSetting._key_name in record:
@@ -539,6 +645,11 @@ def __exit__(self, exc_type, exc_value, traceback):
539645
return self
540646

541647

648+
CONNECTED_HANDLERS: typing.Dict[
649+
tuple, typing.Union[logging.handlers.SocketHandler, logging.NullHandler]
650+
] = {}
651+
652+
542653
def clear_handlers():
543654
for handler in CONNECTED_HANDLERS.values():
544655
if hasattr(handler, "close"):
@@ -564,8 +675,8 @@ def create_handler(host: str, port: int, level=logging.DEBUG):
564675

565676
def get_logger(
566677
name: str,
567-
host: str = LogServer.DEFAULT_HOST,
568-
port: int = LogServer.DEFAULT_PORT,
678+
host: str = DefaultLogSetting.host,
679+
port: int = DefaultLogSetting.port,
569680
log_level: int = logging.DEBUG,
570681
socket_handler_level: int = logging.DEBUG,
571682
shorten_level: bool = True,
@@ -576,9 +687,27 @@ def get_logger(
576687
formatter: typing.Optional[logging.Formatter] = LogHelper.DEFAULT_FORMATTER,
577688
max_size: int = DefaultLogSetting.max_size,
578689
max_backups: int = DefaultLogSetting.max_backups,
579-
level_specs: typing.Optional[typing.List[str]] = None,
690+
level_specs: typing.Optional[typing.List[int]] = None,
580691
) -> logging.Logger:
581-
"Get a singleton logger that sends logs to the LogServer."
692+
"""Get a singleton logger that sends logs to the LogServer.
693+
For easy use, you can use original logging.handlers.SocketHandler, but you need to manage the handler yourself.
694+
695+
Demo::
696+
# python -m morebuiltins.cmd.log_server --host localhost --port 8901
697+
import logging
698+
import logging.handlers
699+
logger = logging.getLogger("client")
700+
logger.setLevel(logging.DEBUG)
701+
h = logging.handlers.SocketHandler("localhost", 8901)
702+
h.setLevel(logging.DEBUG)
703+
logger.addHandler(h)
704+
# add custom settings
705+
formatter = logging.Formatter(fmt="%(asctime)s - %(filename)s - %(message)s")
706+
# add error log to specific log file
707+
logger.info("", extra={"max_size": 1024**2, "formatter": formatter, "level_specs": [logging.ERROR]})
708+
for _ in range(5):
709+
logger.info("hello world!")
710+
"""
582711
if shorten_level:
583712
LogHelper.shorten_level()
584713
logger = logging.getLogger(name)
@@ -619,8 +748,8 @@ async def main():
619748
import argparse
620749

621750
parser = argparse.ArgumentParser(usage=(LogServer.__doc__ or "").replace("%", "%%"))
622-
parser.add_argument("--host", default=LogServer.DEFAULT_HOST)
623-
parser.add_argument("--port", default=LogServer.DEFAULT_PORT, type=int)
751+
parser.add_argument("--host", default=DefaultLogSetting.host)
752+
parser.add_argument("--port", default=DefaultLogSetting.port, type=int)
624753
parser.add_argument(
625754
"-t",
626755
"--log-dir",
@@ -702,15 +831,17 @@ def sync_test():
702831

703832

704833
async def async_test():
705-
async with LogServer() as ls:
706-
logger = get_logger("test_logger", host=ls.host, port=ls.port)
834+
async with LogServer(log_dir="logs"):
835+
logger = get_logger("test_logger", level_specs=[logging.ERROR])
707836
for i in range(5):
708837
logger.info(f"log server test message {i + 1}")
838+
logger.error(f"log server test message {i + 1}")
839+
shutil.rmtree("logs")
709840

710841

711842
def entrypoint():
712-
# return asyncio.run(main())
713-
return asyncio.run(async_test())
843+
return asyncio.run(main())
844+
# return asyncio.run(async_test())
714845
# return sync_test()
715846

716847

0 commit comments

Comments
 (0)