Skip to content

Commit 1c0eb18

Browse files
author
Ask Solem
committed
Changed the way logging is configured.
By default we now configure the root logger, also we don't hijack the multiprocessing logger anymore, but instead use our own logger name ("celery".) Users can choose to configure logging by subscribing to the ``setup_logging`` signal:: from celery import signals def setup_logging(**kwargs): from logging.config import fileConfig fileConfig("logging.conf") signals.setup_logging.connect(setup_logging) If there are no receivers for this signal, the logging subsystem will be configured using the --loglevel/--logfile argument, this will be used for *all defined loggers*, also stdout+stderr will be redirected to the celery logger, if you want to manually configure logging *and* redirect stdouts, you need to enable this manually:: def setup_logging(**kwargs): import logging from logging.config import fileConfig from celery import log fileConfig("logging.conf") stdouts = logging.getLogger("mystdoutslogger") log.redirect_stdouts_to_logger(stdouts, loglevel=logging.WARNING)
1 parent 495511a commit 1c0eb18

File tree

5 files changed

+73
-48
lines changed

5 files changed

+73
-48
lines changed

celery/bin/celeryd.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -199,6 +199,7 @@ def __init__(self, concurrency=conf.CELERYD_CONCURRENCY,
199199
def run(self):
200200
self.init_loader()
201201
self.init_queues()
202+
self.worker_init()
202203
self.redirect_stdouts_to_logger()
203204
print("celery@%s v%s is starting." % (self.hostname,
204205
celery.__version__))
@@ -209,7 +210,6 @@ def run(self):
209210

210211
if self.discard:
211212
self.purge_messages()
212-
self.worker_init()
213213

214214
# Dump configuration to screen so we have some basic information
215215
# for when users sends bug reports.
@@ -245,10 +245,12 @@ def init_loader(self):
245245

246246
def redirect_stdouts_to_logger(self):
247247
from celery import log
248+
handled = log.setup_logging_subsystem(loglevel=self.loglevel,
249+
logfile=self.logfile)
248250
# Redirect stdout/stderr to our logger.
249-
logger = log.setup_logger(loglevel=self.loglevel,
250-
logfile=self.logfile)
251-
log.redirect_stdouts_to_logger(logger, loglevel=logging.WARNING)
251+
if not handled:
252+
logger = log.get_default_logger()
253+
log.redirect_stdouts_to_logger(logger, loglevel=logging.WARNING)
252254

253255
def purge_messages(self):
254256
discarded_count = discard_all()

celery/log.py

Lines changed: 48 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -7,12 +7,15 @@
77
import traceback
88

99
from celery import conf
10+
from celery import signals
1011
from celery.utils import noop
1112
from celery.utils.compat import LoggerAdapter
1213
from celery.utils.patch import ensure_process_aware_logger
1314

14-
_hijacked = False
15-
_monkeypatched = False
15+
# The logging subsystem is only configured once per process.
16+
# setup_logging_subsystem sets this flag, and subsequent calls
17+
# will do nothing.
18+
_setup = False
1619

1720
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
1821
RESET_SEQ = "\033[0m"
@@ -25,6 +28,7 @@
2528

2629

2730
class ColorFormatter(logging.Formatter):
31+
2832
def __init__(self, msg, use_color=True):
2933
logging.Formatter.__init__(self, msg)
3034
self.use_color = use_color
@@ -38,32 +42,37 @@ def format(self, record):
3842

3943

4044
def get_task_logger(loglevel=None, name=None):
41-
ensure_process_aware_logger()
4245
logger = logging.getLogger(name or "celery.task.default")
4346
if loglevel is not None:
4447
logger.setLevel(loglevel)
4548
return logger
4649

4750

48-
def _hijack_multiprocessing_logger():
49-
from multiprocessing import util as mputil
50-
global _hijacked
51-
52-
if _hijacked:
53-
return mputil.get_logger()
54-
55-
ensure_process_aware_logger()
56-
57-
logging.Logger.manager.loggerDict.clear()
58-
59-
try:
60-
if mputil._logger is not None:
61-
mputil.logger = None
62-
except AttributeError:
63-
pass
64-
65-
_hijacked = True
66-
return mputil.get_logger()
51+
def setup_logging_subsystem(loglevel=conf.CELERYD_LOG_LEVEL, logfile=None,
52+
format=conf.CELERYD_LOG_FORMAT, colorize=conf.CELERYD_LOG_COLOR,
53+
**kwargs):
54+
global _setup
55+
if not _setup:
56+
print("SETTING LOGGER TO %s" % (logfile, ))
57+
ensure_process_aware_logger()
58+
logging.Logger.manager.loggerDict.clear()
59+
from multiprocessing import util as mputil
60+
try:
61+
if mputil._logger is not None:
62+
mputil.logger = None
63+
except AttributeError:
64+
pass
65+
receivers = signals.setup_logging.send(sender=None,
66+
loglevel=loglevel,
67+
logfile=logfile,
68+
format=format,
69+
colorize=colorize)
70+
if not receivers:
71+
root = logging.getLogger()
72+
_setup_logger(root, logfile, loglevel, format, colorize, **kwargs)
73+
root.setLevel(loglevel)
74+
_setup = True
75+
return receivers
6776

6877

6978
def _detect_handler(logfile=None):
@@ -74,34 +83,37 @@ def _detect_handler(logfile=None):
7483
return logging.FileHandler(logfile)
7584

7685

77-
def get_default_logger(loglevel=None):
86+
def get_default_logger(loglevel=None, name="celery"):
7887
"""Get default logger instance.
7988
8089
:keyword loglevel: Initial log level.
8190
8291
"""
83-
logger = _hijack_multiprocessing_logger()
92+
logger = logging.getLogger(name)
8493
if loglevel is not None:
8594
logger.setLevel(loglevel)
8695
return logger
8796

8897

8998
def setup_logger(loglevel=conf.CELERYD_LOG_LEVEL, logfile=None,
9099
format=conf.CELERYD_LOG_FORMAT, colorize=conf.CELERYD_LOG_COLOR,
91-
**kwargs):
100+
name="celery", root=True, **kwargs):
92101
"""Setup the ``multiprocessing`` logger. If ``logfile`` is not specified,
93102
then ``stderr`` is used.
94103
95104
Returns logger object.
96105
97106
"""
98-
return _setup_logger(get_default_logger(loglevel),
99-
logfile, format, colorize, **kwargs)
107+
if not root:
108+
return _setup_logger(get_default_logger(loglevel, name),
109+
logfile, format, colorize, **kwargs)
110+
setup_logging_subsystem(loglevel, logfile, format, colorize, **kwargs)
111+
return get_default_logger(name=name)
100112

101113

102114
def setup_task_logger(loglevel=conf.CELERYD_LOG_LEVEL, logfile=None,
103115
format=conf.CELERYD_TASK_LOG_FORMAT, colorize=conf.CELERYD_LOG_COLOR,
104-
task_kwargs=None, **kwargs):
116+
task_kwargs=None, root=True, **kwargs):
105117
"""Setup the task logger. If ``logfile`` is not specified, then
106118
``stderr`` is used.
107119
@@ -113,11 +125,17 @@ def setup_task_logger(loglevel=conf.CELERYD_LOG_LEVEL, logfile=None,
113125
task_kwargs.setdefault("task_id", "-?-")
114126
task_name = task_kwargs.get("task_name")
115127
task_kwargs.setdefault("task_name", "-?-")
116-
logger = _setup_logger(get_task_logger(loglevel, task_name),
117-
logfile, format, colorize, **kwargs)
128+
if not root:
129+
logger = _setup_logger(get_task_logger(loglevel, task_name),
130+
logfile, format, colorize, **kwargs)
131+
else:
132+
setup_logging_subsystem(loglevel, logfile, format, colorize, **kwargs)
133+
logger = get_task_logger(name=task_name)
118134
return LoggerAdapter(logger, task_kwargs)
119135

120136

137+
138+
121139
def _setup_logger(logger, logfile, format, colorize,
122140
formatter=ColorFormatter, **kwargs):
123141

celery/signals.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,6 @@
1414
worker_process_init = Signal(providing_args=[])
1515
worker_ready = Signal(providing_args=[])
1616
worker_shutdown = Signal(providing_args=[])
17+
18+
setup_logging = Signal(providing_args=["loglevel", "logfile",
19+
"format", "colorize"])

celery/tests/test_log.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414

1515
from carrot.utils import rpartition
1616

17+
from celery import log
1718
from celery.log import (setup_logger, setup_task_logger, emergency_error,
1819
get_default_logger, get_task_logger,
1920
redirect_stdouts_to_logger, LoggingProxy)
@@ -50,6 +51,7 @@ class test_default_logger(unittest.TestCase):
5051
def setUp(self):
5152
self.setup_logger = setup_logger
5253
self.get_logger = get_default_logger
54+
log._setup = False
5355

5456
def _assertLog(self, logger, logmsg, loglevel=logging.ERROR):
5557

@@ -69,9 +71,11 @@ def assertDidLogFalse(self, logger, logmsg, reason, loglevel=None):
6971
return self.assertFalse(val, reason)
7072

7173
def test_setup_logger(self):
72-
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None)
74+
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
75+
root=False)
7376
set_handlers(logger, [])
74-
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None)
77+
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
78+
root=False)
7579
self.assertIs(get_handlers(logger)[0].stream, sys.__stderr__,
7680
"setup_logger logs to stderr without logfile argument.")
7781
self.assertDidLogFalse(logger, "Logging something",
@@ -90,7 +94,8 @@ def test_setup_logger_no_handlers_stream(self):
9094

9195
def with_override_stdouts(outs):
9296
stdout, stderr = outs
93-
l = self.setup_logger(logfile=stderr, loglevel=logging.INFO)
97+
l = self.setup_logger(logfile=stderr, loglevel=logging.INFO,
98+
root=False)
9499
l.info("The quick brown fox...")
95100
self.assertIn("The quick brown fox...", stderr.getvalue())
96101

@@ -101,9 +106,9 @@ def test_setup_logger_no_handlers_file(self):
101106
l = self.get_logger()
102107
set_handlers(l, [])
103108
tempfile = mktemp(suffix="unittest", prefix="celery")
104-
l = self.setup_logger(logfile=tempfile, loglevel=0)
105-
print(get_handlers(l)[0].stream)
106-
self.assertIsInstance(get_handlers(l)[0], logging.FileHandler)
109+
l = self.setup_logger(logfile=tempfile, loglevel=0, root=False)
110+
self.assertIsInstance(get_handlers(l)[0 ],
111+
logging.FileHandler)
107112

108113
def test_emergency_error_stderr(self):
109114
def with_override_stdouts(outs):
@@ -126,7 +131,8 @@ def test_emergency_error_file(self):
126131
os.unlink(tempfile)
127132

128133
def test_redirect_stdouts(self):
129-
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None)
134+
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
135+
root=False)
130136
try:
131137
def with_wrap_logger(sio):
132138
redirect_stdouts_to_logger(logger, loglevel=logging.ERROR)
@@ -139,7 +145,8 @@ def with_wrap_logger(sio):
139145
sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
140146

141147
def test_logging_proxy(self):
142-
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None)
148+
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
149+
root=False)
143150

144151
def with_wrap_logger(sio):
145152
p = LoggingProxy(logger)

celery/worker/__init__.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from celery import registry
1313
from celery import platform
1414
from celery import signals
15-
from celery.log import setup_logger, _hijack_multiprocessing_logger
15+
from celery.log import setup_logger
1616
from celery.beat import EmbeddedClockService
1717
from celery.utils import noop, instantiate
1818

@@ -37,11 +37,6 @@ def process_initializer():
3737
Used for multiprocessing environments.
3838
3939
"""
40-
# There seems to a bug in multiprocessing (backport?)
41-
# when detached, where the worker gets EOFErrors from time to time
42-
# and the logger is left from the parent process causing a crash.
43-
_hijack_multiprocessing_logger()
44-
4540
map(platform.reset_signal, WORKER_SIGRESET)
4641
map(platform.ignore_signal, WORKER_SIGIGNORE)
4742
platform.set_mp_process_title("celeryd")

0 commit comments

Comments
 (0)