Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 42 additions & 0 deletions loguru/_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@
import contextlib
import functools
import logging
import os
import re
import sys
import warnings
Expand Down Expand Up @@ -230,6 +231,7 @@ class Logger:
def __init__(self, core, exception, depth, record, lazy, colors, raw, capture, patchers, extra):
self._core = core
self._options = (exception, depth, record, lazy, colors, raw, capture, patchers, extra)
self._own_pid = os.getpid()

def __repr__(self):
return "<loguru.logger handlers=%r>" % list(self._core.handlers.values())
Expand Down Expand Up @@ -1752,6 +1754,46 @@ def configure(self, *, handlers=None, levels=None, extra=None, patcher=None, act

return [self.add(**params) for params in handlers]

def _replace_core(self, core: Core):
self._core = core

def reinstall(self):
"""Reinstall the core of logger.

When using multiprocessing, you can pass logger as a parameter to the target of
``multiprocessing.Process``, and run this method once, thus you don't need to pass logger to every
function you called in the same process with spawn multiprocessing.

Examples
--------
>>> def subworker(logger_):
... logger_.reinstall()
... logger.info("Child")
... deeper_subworker()

>>> def deeper_subworker():
... logger.info("Grandchild")

>>> def test_process_spawn():
... spawn_context = multiprocessing.get_context("spawn")
... logger.add("file.log", context=spawn_context, enqueue=True, catch=False)
...
... process = spawn_context.Process(target=subworker, args=(logger,))
... process.start()
... process.join()

... assert process.exitcode == 0

... logger.info("Main")
... logger.remove()
"""
if self._own_pid == os.getpid(): # same process
return
from loguru import logger

logger._replace_core(self._core)


def _change_activation(self, name, status):
if not (name is None or isinstance(name, str)):
raise TypeError(
Expand Down
14 changes: 12 additions & 2 deletions tests/test_multiprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,16 @@ def subworker(logger_):
logger_.info("Child")


def subworker_spawn(logger_):
logger_.reinstall()
logger.info("Child")
deeper_subworker()


def deeper_subworker():
logger.info("Grandchild")


def subworker_inheritance():
logger.info("Child")

Expand Down Expand Up @@ -209,7 +219,7 @@ def test_process_spawn(spawn_context):

logger.add(writer, context=spawn_context, format="{message}", enqueue=True, catch=False)

process = spawn_context.Process(target=subworker, args=(logger,))
process = spawn_context.Process(target=subworker_spawn, args=(logger,))
process.start()
process.join()

Expand All @@ -218,7 +228,7 @@ def test_process_spawn(spawn_context):
logger.info("Main")
logger.remove()

assert writer.read() == "Child\nMain\n"
assert writer.read() == "Child\nGrandchild\nMain\n"


@pytest.mark.skipif(os.name == "nt", reason="Windows does not support forking")
Expand Down