xref: /aosp_15_r20/external/autotest/client/common_lib/logging_manager.py (revision 9c5db1993ded3edbeafc8092d69fe5de2ee02df7)
1*9c5db199SXin Li# Lint as: python2, python3
2*9c5db199SXin Li"""
3*9c5db199SXin LiAutotest has some surprisingly complicated logging behaviour.
4*9c5db199SXin Li
5*9c5db199SXin LiManagers allow you to define logging contexts, which define a set of rules on
6*9c5db199SXin Lihow to handle stdout/stderr.  The niceness that you get by going through the
7*9c5db199SXin Lilogging_manager for this is that you can push/pop these contexts.  So if you
8*9c5db199SXin Liwish to temporarily change how logging works, this gives you that mechanism.
9*9c5db199SXin Li
10*9c5db199SXin LiMost of this file is actually setting up for a confusing
11*9c5db199SXin Lifork-for-a-logging-subprocess ordeal that's better explained as
12*9c5db199SXin Li
13*9c5db199SXin Li                                           normal python logging
14*9c5db199SXin Li                                                     ^
15*9c5db199SXin Li                                                     |
16*9c5db199SXin Li                      +--------+                     |
17*9c5db199SXin Li                      |AUTOSERV|                 +---+---+
18*9c5db199SXin Li                      |        +------stdout---->+LOGGING|
19*9c5db199SXin Li                      | fork() |                 +---+---+
20*9c5db199SXin Li                      ++------++                     ^
21*9c5db199SXin Li                       |      |                      |
22*9c5db199SXin Li              +--------+      +--------+           stdout
23*9c5db199SXin Li              |                        |             |
24*9c5db199SXin Li    +---------+--------+      +--------+---------+   |
25*9c5db199SXin Li    |     AUTOSERV     |      |     AUTOSERV     |   |
26*9c5db199SXin Li    |                  |      |                  |   |
27*9c5db199SXin Li    ++----------------++      ++----------------++   |
28*9c5db199SXin Li    ||      test      ||      ||      test      ||   |
29*9c5db199SXin Li    ||                ||      ||                ||---+
30*9c5db199SXin Li    ||logging.info('')||      ||logging.info('')||
31*9c5db199SXin Li    |------------------|      |------------------|
32*9c5db199SXin Li    +------------------+      +------------------+
33*9c5db199SXin Li
34*9c5db199SXin LiEach top-level box is a process.  When autoserv starts up, it'll fork off a
35*9c5db199SXin Lilogging subprocess, and set its stdout/stderr fd's to the subprocess's stdin.
36*9c5db199SXin LiWhen we fork to create the processes that will run the test code, they inherit
37*9c5db199SXin Lithis.  As do any processes they themselves fork and exec (such as shelling out
38*9c5db199SXin Lito some command).  This isn't the nicest, as it involves closing and dup'ing
39*9c5db199SXin Liover stdout/stderr, but it does make sure that the whole process tree logs to
40*9c5db199SXin Lipython logging in a very consistent way.
41*9c5db199SXin Li"""
42*9c5db199SXin Li
43*9c5db199SXin Li
44*9c5db199SXin Liimport fcntl
45*9c5db199SXin Liimport logging
46*9c5db199SXin Liimport os
47*9c5db199SXin Liimport signal
48*9c5db199SXin Liimport six
49*9c5db199SXin Liimport sys
50*9c5db199SXin Liimport time
51*9c5db199SXin Li
52*9c5db199SXin Li# primary public APIs
53*9c5db199SXin Li
54*9c5db199SXin Lidef configure_logging(logging_config, **kwargs):
55*9c5db199SXin Li    """
56*9c5db199SXin Li    Configure the logging module using the specific configuration object, which
57*9c5db199SXin Li    should be an instance of logging_config.LoggingConfig (usually of a
58*9c5db199SXin Li    subclass).  Any keyword args will be passed to the object's
59*9c5db199SXin Li    configure_logging() method.
60*9c5db199SXin Li
61*9c5db199SXin Li    Every entry point should call this method at application startup.
62*9c5db199SXin Li    """
63*9c5db199SXin Li    LoggingManager.logging_config_object = logging_config
64*9c5db199SXin Li    logging_config.configure_logging(**kwargs)
65*9c5db199SXin Li
66*9c5db199SXin Li
67*9c5db199SXin Lidef get_logging_manager(manage_stdout_and_stderr=False, redirect_fds=False):
68*9c5db199SXin Li    """
69*9c5db199SXin Li    Create a LoggingManager that's managing sys.stdout and sys.stderr.
70*9c5db199SXin Li
71*9c5db199SXin Li    Every entry point that wants to capture stdout/stderr and/or use
72*9c5db199SXin Li    LoggingManager to manage a stack of destinations should call this method
73*9c5db199SXin Li    at application startup.
74*9c5db199SXin Li    """
75*9c5db199SXin Li    if redirect_fds:
76*9c5db199SXin Li        manager = FdRedirectionLoggingManager()
77*9c5db199SXin Li    else:
78*9c5db199SXin Li        manager = LoggingManager()
79*9c5db199SXin Li    if manage_stdout_and_stderr:
80*9c5db199SXin Li        manager.manage_stdout()
81*9c5db199SXin Li        manager.manage_stderr()
82*9c5db199SXin Li    return manager
83*9c5db199SXin Li
84*9c5db199SXin Li
85*9c5db199SXin Li# implementation follows
86*9c5db199SXin Li
87*9c5db199SXin Lilogger = logging.getLogger()
88*9c5db199SXin Li
89*9c5db199SXin Li
90*9c5db199SXin Lidef _current_handlers():
91*9c5db199SXin Li    return set(logger.handlers)
92*9c5db199SXin Li
93*9c5db199SXin Li
94*9c5db199SXin Li_caller_code_to_skip_in_logging_stack = set()
95*9c5db199SXin Li
96*9c5db199SXin Li
97*9c5db199SXin Lidef do_not_report_as_logging_caller(func):
98*9c5db199SXin Li    """Decorator to annotate functions we will tell logging not to log."""
99*9c5db199SXin Li    # These are not the droids you are looking for.
100*9c5db199SXin Li    # You may go about your business.
101*9c5db199SXin Li    _caller_code_to_skip_in_logging_stack.add(func.__code__)
102*9c5db199SXin Li    return func
103*9c5db199SXin Li
104*9c5db199SXin Li
105*9c5db199SXin Li# Copied from Python 2.4 logging/__init__.py Logger.findCaller and enhanced.
106*9c5db199SXin Li# The logging code remains the same and compatible with this monkey patching
107*9c5db199SXin Li# through at least Python version 2.6.2.
108*9c5db199SXin Lidef _logging_manager_aware_logger__find_caller(unused):
109*9c5db199SXin Li    """
110*9c5db199SXin Li    Find the stack frame of the caller so that we can note the source
111*9c5db199SXin Li    file name, line number and function name.
112*9c5db199SXin Li    """
113*9c5db199SXin Li    f = sys._getframe(2).f_back
114*9c5db199SXin Li    rv = "(unknown file)", 0, "(unknown function)"
115*9c5db199SXin Li    while hasattr(f, "f_code"):
116*9c5db199SXin Li        co = f.f_code
117*9c5db199SXin Li        filename = os.path.normcase(co.co_filename)
118*9c5db199SXin Li        if filename == logging._srcfile:
119*9c5db199SXin Li            f = f.f_back
120*9c5db199SXin Li            continue
121*9c5db199SXin Li        ### START additional code.
122*9c5db199SXin Li        if co in _caller_code_to_skip_in_logging_stack:
123*9c5db199SXin Li            f = f.f_back
124*9c5db199SXin Li            continue
125*9c5db199SXin Li        ### END additional code.
126*9c5db199SXin Li        rv = (filename, f.f_lineno, co.co_name)
127*9c5db199SXin Li        break
128*9c5db199SXin Li    return rv
129*9c5db199SXin Li
130*9c5db199SXin Li
131*9c5db199SXin Li# Monkey patch our way around logging's design...
132*9c5db199SXin Li_original_logger__find_caller = logging.Logger.findCaller
133*9c5db199SXin Li# Do not overwrite in Python 3 and on. It breaks the "<module>:<line num>|"
134*9c5db199SXin Li# formatting in Python 3.
135*9c5db199SXin Liif six.PY2:
136*9c5db199SXin Li    logging.Logger.findCaller = _logging_manager_aware_logger__find_caller
137*9c5db199SXin Li
138*9c5db199SXin Li
139*9c5db199SXin Li
140*9c5db199SXin Liclass LoggingFile(object):
141*9c5db199SXin Li    """
142*9c5db199SXin Li    File-like object that will receive messages pass them to the logging
143*9c5db199SXin Li    infrastructure in an appropriate way.
144*9c5db199SXin Li    """
145*9c5db199SXin Li
146*9c5db199SXin Li    # This object is used to replace stdout and stderr, but doesn't expose
147*9c5db199SXin Li    # the same interface as a file object. To work around the most troublesome
148*9c5db199SXin Li    # part of the API, |fileno()|, we need to be able to provide a fake fd that
149*9c5db199SXin Li    # can pass basic checks.
150*9c5db199SXin Li    _fake_fds = os.pipe()
151*9c5db199SXin Li
152*9c5db199SXin Li    def __init__(self, prefix='', level=logging.DEBUG):
153*9c5db199SXin Li        """
154*9c5db199SXin Li        @param prefix - The prefix for each line logged by this object.
155*9c5db199SXin Li        """
156*9c5db199SXin Li        self._prefix = prefix
157*9c5db199SXin Li        self._level = level
158*9c5db199SXin Li        self._buffer = []
159*9c5db199SXin Li
160*9c5db199SXin Li
161*9c5db199SXin Li    @do_not_report_as_logging_caller
162*9c5db199SXin Li    def write(self, data):
163*9c5db199SXin Li        """"
164*9c5db199SXin Li        Writes data only if it constitutes a whole line. If it's not the case,
165*9c5db199SXin Li        store it in a buffer and wait until we have a complete line.
166*9c5db199SXin Li        @param data - Raw data (a string) that will be processed.
167*9c5db199SXin Li        """
168*9c5db199SXin Li        # splitlines() discards a trailing blank line, so use split() instead
169*9c5db199SXin Li        data_lines = data.split('\n')
170*9c5db199SXin Li        if len(data_lines) > 1:
171*9c5db199SXin Li            self._buffer.append(data_lines[0])
172*9c5db199SXin Li            self._flush_buffer()
173*9c5db199SXin Li        for line in data_lines[1:-1]:
174*9c5db199SXin Li            self._log_line(line)
175*9c5db199SXin Li        if data_lines[-1]:
176*9c5db199SXin Li            self._buffer.append(data_lines[-1])
177*9c5db199SXin Li
178*9c5db199SXin Li
179*9c5db199SXin Li    @do_not_report_as_logging_caller
180*9c5db199SXin Li    def _log_line(self, line):
181*9c5db199SXin Li        """
182*9c5db199SXin Li        Passes lines of output to the logging module.
183*9c5db199SXin Li        """
184*9c5db199SXin Li        logging.log(self._level, self._prefix + line)
185*9c5db199SXin Li
186*9c5db199SXin Li
187*9c5db199SXin Li    @do_not_report_as_logging_caller
188*9c5db199SXin Li    def _flush_buffer(self):
189*9c5db199SXin Li        if self._buffer:
190*9c5db199SXin Li            self._log_line(''.join(self._buffer))
191*9c5db199SXin Li            self._buffer = []
192*9c5db199SXin Li
193*9c5db199SXin Li
194*9c5db199SXin Li    @do_not_report_as_logging_caller
195*9c5db199SXin Li    def flush(self):
196*9c5db199SXin Li        self._flush_buffer()
197*9c5db199SXin Li
198*9c5db199SXin Li
199*9c5db199SXin Li    @do_not_report_as_logging_caller
200*9c5db199SXin Li    def isatty(self):
201*9c5db199SXin Li        return False
202*9c5db199SXin Li
203*9c5db199SXin Li
204*9c5db199SXin Li    @do_not_report_as_logging_caller
205*9c5db199SXin Li    def fileno(self):
206*9c5db199SXin Li        # We return the read end of the pipe here becauase if we return the
207*9c5db199SXin Li        # write end, one can make the reasonable assumption that writing to the
208*9c5db199SXin Li        # fd is the same as stdout.write(). As we aren't reading from the other
209*9c5db199SXin Li        # end of the pipe, writing to this fd should be an error.
210*9c5db199SXin Li        return self._fake_fds[0]
211*9c5db199SXin Li
212*9c5db199SXin Li
213*9c5db199SXin Liclass _StreamManager(object):
214*9c5db199SXin Li    """
215*9c5db199SXin Li    Redirects all output for some output stream (normally stdout or stderr) to
216*9c5db199SXin Li    the logging module by replacing the file objects with a new LoggingFile
217*9c5db199SXin Li    that calls logging.log().
218*9c5db199SXin Li    """
219*9c5db199SXin Li    def __init__(self, stream, level, stream_setter):
220*9c5db199SXin Li        """
221*9c5db199SXin Li        @param stream: stream object to manage
222*9c5db199SXin Li        @param level: level at which data written to the stream will be logged
223*9c5db199SXin Li        @param stream_setter: function accepting a stream object that will
224*9c5db199SXin Li                replace the given stream in its original location.
225*9c5db199SXin Li        """
226*9c5db199SXin Li        self._stream = stream
227*9c5db199SXin Li        self._level = level
228*9c5db199SXin Li        self._stream_setter = stream_setter
229*9c5db199SXin Li        self._logging_stream = None
230*9c5db199SXin Li
231*9c5db199SXin Li
232*9c5db199SXin Li    def _replace_with_logger(self):
233*9c5db199SXin Li        self._logging_stream = LoggingFile(level=self._level)
234*9c5db199SXin Li        self._stream_setter(self._logging_stream)
235*9c5db199SXin Li
236*9c5db199SXin Li
237*9c5db199SXin Li    def _restore_stream(self):
238*9c5db199SXin Li        self._stream_setter(self._stream)
239*9c5db199SXin Li
240*9c5db199SXin Li
241*9c5db199SXin Li    def flush(self):
242*9c5db199SXin Li        self._logging_stream.flush()
243*9c5db199SXin Li
244*9c5db199SXin Li
245*9c5db199SXin Li    def start_logging(self):
246*9c5db199SXin Li        """Start directing the stream to the logging module."""
247*9c5db199SXin Li        self._replace_with_logger()
248*9c5db199SXin Li
249*9c5db199SXin Li
250*9c5db199SXin Li    def stop_logging(self):
251*9c5db199SXin Li        """Restore the stream to its original settings."""
252*9c5db199SXin Li        self._restore_stream()
253*9c5db199SXin Li
254*9c5db199SXin Li
255*9c5db199SXin Li    def on_push_context(self, context):
256*9c5db199SXin Li        """
257*9c5db199SXin Li        Called when the logging manager is about to push a new context onto the
258*9c5db199SXin Li        stack and has changed logging settings.  The StreamHandler can modify
259*9c5db199SXin Li        the context to be saved before returning.
260*9c5db199SXin Li        """
261*9c5db199SXin Li        pass
262*9c5db199SXin Li
263*9c5db199SXin Li
264*9c5db199SXin Li    def on_restore_context(self, context):
265*9c5db199SXin Li        """
266*9c5db199SXin Li        Called when the logging manager is restoring a previous context.
267*9c5db199SXin Li        """
268*9c5db199SXin Li        pass
269*9c5db199SXin Li
270*9c5db199SXin Li
271*9c5db199SXin Li
272*9c5db199SXin Liclass LoggingManager(object):
273*9c5db199SXin Li    """
274*9c5db199SXin Li    Manages a stack of logging configurations, allowing clients to conveniently
275*9c5db199SXin Li    add and remove logging destinations.  Also keeps a list of StreamManagers
276*9c5db199SXin Li    to easily direct streams into the logging module.
277*9c5db199SXin Li    """
278*9c5db199SXin Li
279*9c5db199SXin Li    STREAM_MANAGER_CLASS = _StreamManager
280*9c5db199SXin Li
281*9c5db199SXin Li    logging_config_object = None
282*9c5db199SXin Li
283*9c5db199SXin Li    def __init__(self):
284*9c5db199SXin Li        """
285*9c5db199SXin Li        This class should not ordinarily be constructed directly (other than in
286*9c5db199SXin Li        tests).  Use the module-global factory method get_logging_manager()
287*9c5db199SXin Li        instead.
288*9c5db199SXin Li        """
289*9c5db199SXin Li        if self.logging_config_object is None:
290*9c5db199SXin Li            raise RuntimeError('You must call configure_logging() before this')
291*9c5db199SXin Li
292*9c5db199SXin Li        # _context_stack holds a stack of context dicts.  Each context dict
293*9c5db199SXin Li        # contains:
294*9c5db199SXin Li        # * old_handlers: list of registered logging Handlers
295*9c5db199SXin Li        # contexts may also be extended by _StreamHandlers
296*9c5db199SXin Li        self._context_stack = []
297*9c5db199SXin Li        self._streams = []
298*9c5db199SXin Li        self._started = False
299*9c5db199SXin Li
300*9c5db199SXin Li
301*9c5db199SXin Li    def manage_stream(self, stream, level, stream_setter):
302*9c5db199SXin Li        """
303*9c5db199SXin Li        Tells this manager to manage the given stream.  All data written to the
304*9c5db199SXin Li        stream will be directed to the logging module instead.  Must be called
305*9c5db199SXin Li        before start_logging().
306*9c5db199SXin Li
307*9c5db199SXin Li        @param stream: stream to manage
308*9c5db199SXin Li        @param level: level to log data written to this stream
309*9c5db199SXin Li        @param stream_setter: function to set the stream to a new object
310*9c5db199SXin Li        """
311*9c5db199SXin Li        if self._started:
312*9c5db199SXin Li            raise RuntimeError('You must call this before start_logging()')
313*9c5db199SXin Li        self._streams.append(self.STREAM_MANAGER_CLASS(stream, level,
314*9c5db199SXin Li                                                       stream_setter))
315*9c5db199SXin Li
316*9c5db199SXin Li
317*9c5db199SXin Li    def _sys_stream_setter(self, stream_name):
318*9c5db199SXin Li        assert stream_name in ('stdout', 'stderr'), stream_name
319*9c5db199SXin Li        def set_stream(file_object):
320*9c5db199SXin Li            setattr(sys, stream_name, file_object)
321*9c5db199SXin Li        return set_stream
322*9c5db199SXin Li
323*9c5db199SXin Li
324*9c5db199SXin Li    def manage_stdout(self):
325*9c5db199SXin Li        self.manage_stream(sys.stdout, logging.INFO,
326*9c5db199SXin Li                           self._sys_stream_setter('stdout'))
327*9c5db199SXin Li
328*9c5db199SXin Li
329*9c5db199SXin Li    def manage_stderr(self):
330*9c5db199SXin Li        self.manage_stream(sys.stderr, self.logging_config_object.stderr_level,
331*9c5db199SXin Li                           self._sys_stream_setter('stderr'))
332*9c5db199SXin Li
333*9c5db199SXin Li
334*9c5db199SXin Li    def start_logging(self):
335*9c5db199SXin Li        """
336*9c5db199SXin Li        Begin capturing output to the logging module.
337*9c5db199SXin Li        """
338*9c5db199SXin Li        for stream_manager in self._streams:
339*9c5db199SXin Li            stream_manager.start_logging()
340*9c5db199SXin Li        self._started = True
341*9c5db199SXin Li
342*9c5db199SXin Li
343*9c5db199SXin Li    def stop_logging(self):
344*9c5db199SXin Li        """
345*9c5db199SXin Li        Restore output to its original state.
346*9c5db199SXin Li        """
347*9c5db199SXin Li        while self._context_stack:
348*9c5db199SXin Li            self._pop_context()
349*9c5db199SXin Li
350*9c5db199SXin Li        for stream_manager in self._streams:
351*9c5db199SXin Li            stream_manager.stop_logging()
352*9c5db199SXin Li
353*9c5db199SXin Li        self._started = False
354*9c5db199SXin Li
355*9c5db199SXin Li
356*9c5db199SXin Li    def _clear_all_handlers(self):
357*9c5db199SXin Li        for handler in _current_handlers():
358*9c5db199SXin Li            logger.removeHandler(handler)
359*9c5db199SXin Li
360*9c5db199SXin Li
361*9c5db199SXin Li    def _get_context(self):
362*9c5db199SXin Li        return {'old_handlers': _current_handlers()}
363*9c5db199SXin Li
364*9c5db199SXin Li
365*9c5db199SXin Li    def _push_context(self, context):
366*9c5db199SXin Li        for stream_manager in self._streams:
367*9c5db199SXin Li            stream_manager.on_push_context(context)
368*9c5db199SXin Li        self._context_stack.append(context)
369*9c5db199SXin Li
370*9c5db199SXin Li
371*9c5db199SXin Li    def _flush_all_streams(self):
372*9c5db199SXin Li        for stream_manager in self._streams:
373*9c5db199SXin Li            stream_manager.flush()
374*9c5db199SXin Li
375*9c5db199SXin Li
376*9c5db199SXin Li    def _add_log_handlers(self, add_handlers_fn):
377*9c5db199SXin Li        """
378*9c5db199SXin Li        Modify the logging module's registered handlers and push a new context
379*9c5db199SXin Li        onto the stack.
380*9c5db199SXin Li        @param add_handlers_fn: function to modify the registered logging
381*9c5db199SXin Li        handlers. Accepts a context dictionary which may be modified.
382*9c5db199SXin Li        """
383*9c5db199SXin Li        self._flush_all_streams()
384*9c5db199SXin Li        context = self._get_context()
385*9c5db199SXin Li
386*9c5db199SXin Li        add_handlers_fn(context)
387*9c5db199SXin Li
388*9c5db199SXin Li        self._push_context(context)
389*9c5db199SXin Li
390*9c5db199SXin Li
391*9c5db199SXin Li    class _TaggingFormatter(logging.Formatter):
392*9c5db199SXin Li        """
393*9c5db199SXin Li        Delegates to a given formatter, but prefixes each line of output with a
394*9c5db199SXin Li        tag.
395*9c5db199SXin Li        """
396*9c5db199SXin Li        def __init__(self, base_formatter, tag):
397*9c5db199SXin Li            self.base_formatter = base_formatter
398*9c5db199SXin Li            prefix = tag + ' : '
399*9c5db199SXin Li            self._fmt = base_formatter._fmt.replace('%(message)s',
400*9c5db199SXin Li                                                    prefix + '%(message)s')
401*9c5db199SXin Li            self.datefmt = base_formatter.datefmt
402*9c5db199SXin Li
403*9c5db199SXin Li
404*9c5db199SXin Li    def _add_tagging_formatter(self, tag):
405*9c5db199SXin Li        for handler in _current_handlers():
406*9c5db199SXin Li            tagging_formatter = self._TaggingFormatter(handler.formatter, tag)
407*9c5db199SXin Li            handler.setFormatter(tagging_formatter)
408*9c5db199SXin Li
409*9c5db199SXin Li
410*9c5db199SXin Li    def _do_redirect(self, stream=None, filename=None, level=None,
411*9c5db199SXin Li                     clear_other_handlers=False):
412*9c5db199SXin Li        """
413*9c5db199SXin Li        @param clear_other_handlers - if true, clear out all other logging
414*9c5db199SXin Li        handlers.
415*9c5db199SXin Li        """
416*9c5db199SXin Li        assert bool(stream) != bool(filename) # xor
417*9c5db199SXin Li        if not self._started:
418*9c5db199SXin Li            raise RuntimeError('You must call start_logging() before this')
419*9c5db199SXin Li
420*9c5db199SXin Li        def add_handler(context):
421*9c5db199SXin Li            if clear_other_handlers:
422*9c5db199SXin Li                self._clear_all_handlers()
423*9c5db199SXin Li
424*9c5db199SXin Li            if stream:
425*9c5db199SXin Li                handler = self.logging_config_object.add_stream_handler(stream)
426*9c5db199SXin Li            else:
427*9c5db199SXin Li                handler = self.logging_config_object.add_file_handler(filename)
428*9c5db199SXin Li
429*9c5db199SXin Li            if level:
430*9c5db199SXin Li                handler.setLevel(level)
431*9c5db199SXin Li
432*9c5db199SXin Li        self._add_log_handlers(add_handler)
433*9c5db199SXin Li
434*9c5db199SXin Li
435*9c5db199SXin Li    def redirect(self, filename):
436*9c5db199SXin Li        """Redirect output to the specified file"""
437*9c5db199SXin Li        self._do_redirect(filename=filename, clear_other_handlers=True)
438*9c5db199SXin Li
439*9c5db199SXin Li
440*9c5db199SXin Li    def redirect_to_stream(self, stream):
441*9c5db199SXin Li        """Redirect output to the given stream"""
442*9c5db199SXin Li        self._do_redirect(stream=stream, clear_other_handlers=True)
443*9c5db199SXin Li
444*9c5db199SXin Li
445*9c5db199SXin Li    def tee_redirect(self, filename, level=None):
446*9c5db199SXin Li        """Tee output to the specified file"""
447*9c5db199SXin Li        self._do_redirect(filename=filename, level=level)
448*9c5db199SXin Li
449*9c5db199SXin Li
450*9c5db199SXin Li    def tee_redirect_to_stream(self, stream):
451*9c5db199SXin Li        """Tee output to the given stream"""
452*9c5db199SXin Li        self._do_redirect(stream=stream)
453*9c5db199SXin Li
454*9c5db199SXin Li
455*9c5db199SXin Li    def tee_redirect_debug_dir(self, debug_dir, log_name=None, tag=None):
456*9c5db199SXin Li        """
457*9c5db199SXin Li        Tee output to a full new set of debug logs in the given directory.
458*9c5db199SXin Li        """
459*9c5db199SXin Li        def add_handlers(context):
460*9c5db199SXin Li            if tag:
461*9c5db199SXin Li                self._add_tagging_formatter(tag)
462*9c5db199SXin Li                context['tag_added'] = True
463*9c5db199SXin Li            self.logging_config_object.add_debug_file_handlers(
464*9c5db199SXin Li                    debug_dir, log_name=log_name)
465*9c5db199SXin Li        self._add_log_handlers(add_handlers)
466*9c5db199SXin Li
467*9c5db199SXin Li
468*9c5db199SXin Li    def _restore_context(self, context):
469*9c5db199SXin Li        for stream_handler in self._streams:
470*9c5db199SXin Li            stream_handler.on_restore_context(context)
471*9c5db199SXin Li
472*9c5db199SXin Li        # restore logging handlers
473*9c5db199SXin Li        old_handlers = context['old_handlers']
474*9c5db199SXin Li        for handler in _current_handlers() - old_handlers:
475*9c5db199SXin Li            handler.close()
476*9c5db199SXin Li        self._clear_all_handlers()
477*9c5db199SXin Li        for handler in old_handlers:
478*9c5db199SXin Li            logger.addHandler(handler)
479*9c5db199SXin Li
480*9c5db199SXin Li        if 'tag_added' in context:
481*9c5db199SXin Li            for handler in _current_handlers():
482*9c5db199SXin Li                tagging_formatter = handler.formatter
483*9c5db199SXin Li                handler.setFormatter(tagging_formatter.base_formatter)
484*9c5db199SXin Li
485*9c5db199SXin Li
486*9c5db199SXin Li    def _pop_context(self):
487*9c5db199SXin Li        self._flush_all_streams()
488*9c5db199SXin Li        context = self._context_stack.pop()
489*9c5db199SXin Li        self._restore_context(context)
490*9c5db199SXin Li
491*9c5db199SXin Li
492*9c5db199SXin Li    def undo_redirect(self):
493*9c5db199SXin Li        """
494*9c5db199SXin Li        Undo the last redirection (that hasn't yet been undone).
495*9c5db199SXin Li
496*9c5db199SXin Li        If any subprocesses have been launched since the redirection was
497*9c5db199SXin Li        performed, they must have ended by the time this is called.  Otherwise,
498*9c5db199SXin Li        this will hang waiting for the logging subprocess to end.
499*9c5db199SXin Li        """
500*9c5db199SXin Li        if not self._context_stack:
501*9c5db199SXin Li            raise RuntimeError('No redirects to undo')
502*9c5db199SXin Li        self._pop_context()
503*9c5db199SXin Li
504*9c5db199SXin Li
505*9c5db199SXin Li    def restore(self):
506*9c5db199SXin Li        """
507*9c5db199SXin Li        Same as undo_redirect().  For backwards compatibility with
508*9c5db199SXin Li        fd_stack.
509*9c5db199SXin Li        """
510*9c5db199SXin Li        self.undo_redirect()
511*9c5db199SXin Li
512*9c5db199SXin Li
513*9c5db199SXin Liclass _FdRedirectionStreamManager(_StreamManager):
514*9c5db199SXin Li    """
515*9c5db199SXin Li    Like StreamManager, but also captures output from subprocesses by modifying
516*9c5db199SXin Li    the underlying file descriptors.
517*9c5db199SXin Li
518*9c5db199SXin Li    For the underlying file descriptors, we spawn a subprocess that writes all
519*9c5db199SXin Li    input to the logging module, and we point the FD to that subprocess.  As a
520*9c5db199SXin Li    result, every time we redirect output we need to spawn a new subprocess to
521*9c5db199SXin Li    pick up the new logging settings (without disturbing any existing processes
522*9c5db199SXin Li    using the old logging subprocess).
523*9c5db199SXin Li
524*9c5db199SXin Li    If, one day, we could get all code using utils.run() and friends to launch
525*9c5db199SXin Li    subprocesses, we'd no longer need to handle raw FD output, and we could
526*9c5db199SXin Li    get rid of all this business with subprocesses.  Another option would be
527*9c5db199SXin Li    to capture all stray output to a single, separate destination.
528*9c5db199SXin Li    """
529*9c5db199SXin Li
530*9c5db199SXin Li    WAITPID_TIMEOUT_S = 5
531*9c5db199SXin Li    WAITPID_SAMPLE_TIME_S = 0.1
532*9c5db199SXin Li
533*9c5db199SXin Li    def __init__(self, stream, level, stream_setter):
534*9c5db199SXin Li        if not hasattr(stream, 'fileno'):
535*9c5db199SXin Li            # with fake, in-process file objects, subprocess output won't be
536*9c5db199SXin Li            # captured. this should never happen in normal use, since the
537*9c5db199SXin Li            # factory methods will only pass sys.stdout and sys.stderr.
538*9c5db199SXin Li            raise ValueError("FdRedirectionLoggingManager won't work with "
539*9c5db199SXin Li                             "streams that aren't backed by file "
540*9c5db199SXin Li                             "descriptors")
541*9c5db199SXin Li
542*9c5db199SXin Li        super(_FdRedirectionStreamManager, self).__init__(stream, level,
543*9c5db199SXin Li                                                          stream_setter)
544*9c5db199SXin Li        self._fd = stream.fileno()
545*9c5db199SXin Li        self._fd_copy_stream = None
546*9c5db199SXin Li
547*9c5db199SXin Li
548*9c5db199SXin Li    def _point_stream_handlers_to_copy(self):
549*9c5db199SXin Li        """
550*9c5db199SXin Li        point logging StreamHandlers that point to this stream to a safe
551*9c5db199SXin Li        copy of the underlying FD. otherwise, StreamHandler output will go
552*9c5db199SXin Li        to the logging subprocess, effectively getting doubly logged.
553*9c5db199SXin Li        """
554*9c5db199SXin Li        fd_copy = os.dup(self._fd)
555*9c5db199SXin Li        self._fd_copy_stream = os.fdopen(fd_copy, 'w')
556*9c5db199SXin Li        self._redirect_logging_stream_handlers(self._stream,
557*9c5db199SXin Li                                               self._fd_copy_stream)
558*9c5db199SXin Li
559*9c5db199SXin Li
560*9c5db199SXin Li    def _restore_stream_handlers(self):
561*9c5db199SXin Li        """ point logging StreamHandlers back to the original FD """
562*9c5db199SXin Li        self._redirect_logging_stream_handlers(self._fd_copy_stream,
563*9c5db199SXin Li                                               self._stream)
564*9c5db199SXin Li        self._fd_copy_stream.close()
565*9c5db199SXin Li
566*9c5db199SXin Li
567*9c5db199SXin Li    def _redirect_logging_stream_handlers(self, old_stream, new_stream):
568*9c5db199SXin Li        """
569*9c5db199SXin Li        Redirect all configured logging StreamHandlers pointing to
570*9c5db199SXin Li        old_stream to point to new_stream instead.
571*9c5db199SXin Li        """
572*9c5db199SXin Li        for handler in _current_handlers():
573*9c5db199SXin Li            points_to_stream = (isinstance(handler, logging.StreamHandler) and
574*9c5db199SXin Li                                hasattr(handler.stream, 'fileno') and
575*9c5db199SXin Li                                handler.stream.fileno() == old_stream.fileno())
576*9c5db199SXin Li            if points_to_stream:
577*9c5db199SXin Li                logger.removeHandler(handler)
578*9c5db199SXin Li                handler.close() # doesn't close the stream, just the handler
579*9c5db199SXin Li
580*9c5db199SXin Li                new_handler = logging.StreamHandler(new_stream)
581*9c5db199SXin Li                new_handler.setLevel(handler.level)
582*9c5db199SXin Li                new_handler.setFormatter(handler.formatter)
583*9c5db199SXin Li                for log_filter in handler.filters:
584*9c5db199SXin Li                    new_handler.addFilter(log_filter)
585*9c5db199SXin Li                logger.addHandler(new_handler)
586*9c5db199SXin Li
587*9c5db199SXin Li
588*9c5db199SXin Li    def start_logging(self):
589*9c5db199SXin Li        super(_FdRedirectionStreamManager, self).start_logging()
590*9c5db199SXin Li        self._point_stream_handlers_to_copy()
591*9c5db199SXin Li
592*9c5db199SXin Li
593*9c5db199SXin Li    def stop_logging(self):
594*9c5db199SXin Li        super(_FdRedirectionStreamManager, self).stop_logging()
595*9c5db199SXin Li        self._restore_stream_handlers()
596*9c5db199SXin Li
597*9c5db199SXin Li
598*9c5db199SXin Li    def _spawn_logging_subprocess(self):
599*9c5db199SXin Li        """
600*9c5db199SXin Li        Spawn a subprocess to log all input to the logging module with the
601*9c5db199SXin Li        current settings, and direct output to it.
602*9c5db199SXin Li        """
603*9c5db199SXin Li        read_end, write_end = os.pipe()
604*9c5db199SXin Li        pid = os.fork()
605*9c5db199SXin Li        if pid: # parent
606*9c5db199SXin Li            os.close(read_end)
607*9c5db199SXin Li            os.dup2(write_end, self._fd) # point FD to the subprocess
608*9c5db199SXin Li            os.close(write_end)
609*9c5db199SXin Li            fd_flags = fcntl.fcntl(self._fd, fcntl.F_GETFD)
610*9c5db199SXin Li            fcntl.fcntl(self._fd, fcntl.F_SETFD,
611*9c5db199SXin Li                        fd_flags | fcntl.FD_CLOEXEC)
612*9c5db199SXin Li            return pid
613*9c5db199SXin Li        else: # child
614*9c5db199SXin Li            try:
615*9c5db199SXin Li                os.close(write_end)
616*9c5db199SXin Li                # ensure this subprocess doesn't hold any pipes to others
617*9c5db199SXin Li                os.close(1)
618*9c5db199SXin Li                os.close(2)
619*9c5db199SXin Li                self._run_logging_subprocess(read_end) # never returns
620*9c5db199SXin Li            except:
621*9c5db199SXin Li                # don't let exceptions in the child escape
622*9c5db199SXin Li                try:
623*9c5db199SXin Li                    logging.exception('Logging subprocess died:')
624*9c5db199SXin Li                finally:
625*9c5db199SXin Li                    os._exit(1)
626*9c5db199SXin Li
627*9c5db199SXin Li
628*9c5db199SXin Li    def _run_logging_subprocess(self, read_fd):
629*9c5db199SXin Li        """
630*9c5db199SXin Li        Always run from a subprocess.  Read from read_fd and write to the
631*9c5db199SXin Li        logging module until EOF.
632*9c5db199SXin Li        """
633*9c5db199SXin Li        # A SIGTERM will be issued by the main process if it believes this
634*9c5db199SXin Li        # subprocess to the stuck. In that case, simply exit with status 1.
635*9c5db199SXin Li        # See below for pending TODO on how to remove this.
636*9c5db199SXin Li        signal.signal(signal.SIGTERM, lambda signum, stack: os._exit(1))
637*9c5db199SXin Li        input_file = os.fdopen(read_fd, 'r')
638*9c5db199SXin Li        for line in iter(input_file.readline, ''):
639*9c5db199SXin Li            logging.log(self._level, line.rstrip('\n'))
640*9c5db199SXin Li        logging.debug('Logging subprocess finished')
641*9c5db199SXin Li        os._exit(0)
642*9c5db199SXin Li
643*9c5db199SXin Li
644*9c5db199SXin Li    def _context_id(self):
645*9c5db199SXin Li        return '%s_context' % id(self)
646*9c5db199SXin Li
647*9c5db199SXin Li
648*9c5db199SXin Li    def on_push_context(self, context):
649*9c5db199SXin Li        # adds a context dict for this stream, $id_context, with the following:
650*9c5db199SXin Li        # * old_fd: FD holding a copy of the managed FD before launching a new
651*9c5db199SXin Li        #   subprocess.
652*9c5db199SXin Li        # * child_pid: PID of the logging subprocess launched
653*9c5db199SXin Li        fd_copy = os.dup(self._fd)
654*9c5db199SXin Li        child_pid = self._spawn_logging_subprocess()
655*9c5db199SXin Li        my_context = {'old_fd': fd_copy, 'child_pid': child_pid}
656*9c5db199SXin Li        context[self._context_id()] = my_context
657*9c5db199SXin Li
658*9c5db199SXin Li
659*9c5db199SXin Li    def on_restore_context(self, context):
660*9c5db199SXin Li        my_context = context[self._context_id()]
661*9c5db199SXin Li
662*9c5db199SXin Li        # shut down subprocess
663*9c5db199SXin Li        child_pid = my_context['child_pid']
664*9c5db199SXin Li        try:
665*9c5db199SXin Li            os.close(self._fd)
666*9c5db199SXin Li            # TODO(crbug.com/970115): remove this work-around in favor of
667*9c5db199SXin Li            # a clean waitpid(child_pid, 0) once the issue described in the
668*9c5db199SXin Li            # bug is root caused and resolved.
669*9c5db199SXin Li            end = time.time() + self.WAITPID_TIMEOUT_S
670*9c5db199SXin Li            while time.time() < end:
671*9c5db199SXin Li                if os.waitpid(child_pid, os.WNOHANG) != (0, 0):
672*9c5db199SXin Li                    break
673*9c5db199SXin Li                time.sleep(self.WAITPID_SAMPLE_TIME_S)
674*9c5db199SXin Li            else:
675*9c5db199SXin Li                # After timeout the process still hasn't finished. Send a
676*9c5db199SXin Li                # SIGTERM as it's likely stuck.
677*9c5db199SXin Li                os.kill(child_pid, signal.SIGTERM)
678*9c5db199SXin Li                logging.exception('Ended up killing logging subprocess at %s.',
679*9c5db199SXin Li                                  child_pid)
680*9c5db199SXin Li        except OSError:
681*9c5db199SXin Li            logging.exception('Failed to cleanly shutdown logging subprocess:')
682*9c5db199SXin Li
683*9c5db199SXin Li        # restore previous FD
684*9c5db199SXin Li        old_fd = my_context['old_fd']
685*9c5db199SXin Li        os.dup2(old_fd, self._fd)
686*9c5db199SXin Li        os.close(old_fd)
687*9c5db199SXin Li
688*9c5db199SXin Li
689*9c5db199SXin Liclass FdRedirectionLoggingManager(LoggingManager):
690*9c5db199SXin Li    """
691*9c5db199SXin Li    A simple extension of LoggingManager to use FdRedirectionStreamManagers,
692*9c5db199SXin Li    so that managed streams have their underlying FDs redirected.
693*9c5db199SXin Li    """
694*9c5db199SXin Li
695*9c5db199SXin Li    STREAM_MANAGER_CLASS = _FdRedirectionStreamManager
696*9c5db199SXin Li
697*9c5db199SXin Li    def start_logging(self):
698*9c5db199SXin Li        super(FdRedirectionLoggingManager, self).start_logging()
699*9c5db199SXin Li        # spawn the initial logging subprocess
700*9c5db199SXin Li        self._push_context(self._get_context())
701*9c5db199SXin Li
702*9c5db199SXin Li
703*9c5db199SXin Li    def undo_redirect(self):
704*9c5db199SXin Li        # len == 1 would mean only start_logging() had been called (but no
705*9c5db199SXin Li        # redirects had occurred)
706*9c5db199SXin Li        if len(self._context_stack) < 2:
707*9c5db199SXin Li            raise RuntimeError('No redirects to undo')
708*9c5db199SXin Li        super(FdRedirectionLoggingManager, self).undo_redirect()
709