1# Copyright 2017 The Abseil Authors.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15import datetime
16import io
17import os
18import re
19import subprocess
20import sys
21import tempfile
22import threading
23import time
24import unittest
25from unittest import mock
26from xml.etree import ElementTree
27from xml.parsers import expat
28
29from absl import logging
30from absl.testing import _bazelize_command
31from absl.testing import absltest
32from absl.testing import parameterized
33from absl.testing import xml_reporter
34
35
36class StringIOWriteLn(io.StringIO):
37
38  def writeln(self, line):
39    self.write(line + '\n')
40
41
42class MockTest(absltest.TestCase):
43  failureException = AssertionError
44
45  def __init__(self, name):
46    super(MockTest, self).__init__()
47    self.name = name
48
49  def id(self):
50    return self.name
51
52  def runTest(self):
53    return
54
55  def shortDescription(self):
56    return "This is this test's description."
57
58
59# str(exception_type) is different between Python 2 and 3.
60def xml_escaped_exception_type(exception_type):
61  return xml_reporter._escape_xml_attr(str(exception_type))
62
63
64# Matches the entire XML output. Captures all <testcase> tags except for the
65# last closing </testcase> in a single group.
66OUTPUT_STRING = """\
67<\\?xml version="1.0"\\?>
68<testsuites name="" tests="%(tests)d" failures="%(failures)d"\
69 errors="%(errors)d" time="%(run_time).3f" timestamp="%(start_time)s">
70<testsuite name="%(suite_name)s" tests="%(tests)d"\
71 failures="%(failures)d" errors="%(errors)d" time="%(run_time).3f"\
72 timestamp="%(start_time)s">
73(  <testcase .*)
74  </testcase>
75</testsuite>
76</testsuites>"""
77
78# Matches a single <testcase> tag and its contents, without the closing
79# </testcase>, which we use as a separator to split multiple <testcase> tags.
80TESTCASE_STRING = """\
81  <testcase name="%(test_name)s" status="%(status)s" result="%(result)s"\
82 time="%(run_time).3f" classname="%(classname)s" timestamp="%(start_time)s">\
83%(message)s"""
84
85FAILURE_MESSAGE = r"""
86  <failure message="e" type="{}"><!\[CDATA\[Traceback \(most recent call last\):
87  File ".*xml_reporter_test\.py", line \d+, in get_sample_failure
88    self.fail\(\'e\'\)
89AssertionError: e
90\]\]></failure>""".format(xml_escaped_exception_type(AssertionError))
91
92ERROR_MESSAGE = r"""
93  <error message="invalid&#x20;literal&#x20;for&#x20;int\(\)&#x20;with&#x20;base&#x20;10:&#x20;(&apos;)?a(&apos;)?" type="{}"><!\[CDATA\[Traceback \(most recent call last\):
94  File ".*xml_reporter_test\.py", line \d+, in get_sample_error
95    int\('a'\)
96ValueError: invalid literal for int\(\) with base 10: '?a'?
97\]\]></error>""".format(xml_escaped_exception_type(ValueError))
98
99UNICODE_MESSAGE = r"""
100  <%s message="{0}" type="{1}"><!\[CDATA\[Traceback \(most recent call last\):
101  File ".*xml_reporter_test\.py", line \d+, in get_unicode_sample_failure
102    raise AssertionError\(u'\\xe9'\)
103AssertionError: {0}
104\]\]></%s>""".format(
105        r'\xe9',
106        xml_escaped_exception_type(AssertionError))
107
108NEWLINE_MESSAGE = r"""
109  <%s message="{0}" type="{1}"><!\[CDATA\[Traceback \(most recent call last\):
110  File ".*xml_reporter_test\.py", line \d+, in get_newline_message_sample_failure
111    raise AssertionError\(\'{2}'\)
112AssertionError: {3}
113\]\]></%s>""".format(
114    'new&#xA;line',
115    xml_escaped_exception_type(AssertionError),
116    r'new\\nline',
117    'new\nline')
118
119UNEXPECTED_SUCCESS_MESSAGE = '\n'.join([
120    '',
121    (r'  <error message="" type=""><!\[CDATA\[Test case '
122     r'__main__.MockTest.unexpectedly_passing_test should have failed, '
123     r'but passed.\]\]></error>'),
124])
125
126UNICODE_ERROR_MESSAGE = UNICODE_MESSAGE % ('error', 'error')
127NEWLINE_ERROR_MESSAGE = NEWLINE_MESSAGE % ('error', 'error')
128
129
130class TextAndXMLTestResultTest(absltest.TestCase):
131
132  def setUp(self):
133    super().setUp()
134    self.stream = StringIOWriteLn()
135    self.xml_stream = io.StringIO()
136
137  def _make_result(self, times):
138    timer = mock.Mock()
139    timer.side_effect = times
140    return xml_reporter._TextAndXMLTestResult(self.xml_stream, self.stream,
141                                              'foo', 0, timer)
142
143  def _assert_match(self, regex, output, flags=0):
144    fail_msg = 'Expected regex:\n{}\nTo match:\n{}'.format(regex, output)
145    result = re.match(regex, output, flags)
146    if result is None:
147      self.fail(fail_msg)
148    return result.groups()
149
150  def _assert_valid_xml(self, xml_output):
151    try:
152      expat.ParserCreate().Parse(xml_output)
153    except expat.ExpatError as e:
154      raise AssertionError('Bad XML output: {}\n{}'.format(e, xml_output))
155
156  def _simulate_error_test(self, test, result):
157    result.startTest(test)
158    result.addError(test, self.get_sample_error())
159    result.stopTest(test)
160
161  def _simulate_failing_test(self, test, result):
162    result.startTest(test)
163    result.addFailure(test, self.get_sample_failure())
164    result.stopTest(test)
165
166  def _simulate_passing_test(self, test, result):
167    result.startTest(test)
168    result.addSuccess(test)
169    result.stopTest(test)
170
171  def _iso_timestamp(self, timestamp):
172    return datetime.datetime.utcfromtimestamp(timestamp).isoformat() + '+00:00'
173
174  def test_with_passing_test(self):
175    start_time = 0
176    end_time = 2
177    result = self._make_result((start_time, start_time, end_time, end_time))
178
179    test = MockTest('__main__.MockTest.passing_test')
180    result.startTestRun()
181    result.startTest(test)
182    result.addSuccess(test)
183    result.stopTest(test)
184    result.stopTestRun()
185    result.printErrors()
186
187    start_time_str = re.escape(self._iso_timestamp(start_time))
188    run_time = end_time - start_time
189    expected_re = OUTPUT_STRING % {
190        'suite_name': 'MockTest',
191        'tests': 1,
192        'failures': 0,
193        'errors': 0,
194        'run_time': run_time,
195        'start_time': start_time_str,
196    }
197    expected_testcase_re = TESTCASE_STRING % {
198        'run_time': run_time,
199        'start_time': start_time_str,
200        'test_name': 'passing_test',
201        'classname': '__main__.MockTest',
202        'status': 'run',
203        'result': 'completed',
204        'attributes': '',
205        'message': ''
206    }
207    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
208                                     re.DOTALL)
209    self._assert_match(expected_testcase_re, testcase)
210
211  def test_with_passing_subtest(self):
212    start_time = 0
213    end_time = 2
214    result = self._make_result((start_time, start_time, end_time, end_time))
215
216    test = MockTest('__main__.MockTest.passing_test')
217    subtest = unittest.case._SubTest(test, 'msg', None)  # pytype: disable=module-attr
218    result.startTestRun()
219    result.startTest(test)
220    result.addSubTest(test, subtest, None)
221    result.stopTestRun()
222    result.printErrors()
223
224    start_time_str = re.escape(self._iso_timestamp(start_time))
225    run_time = end_time - start_time
226    expected_re = OUTPUT_STRING % {
227        'suite_name': 'MockTest',
228        'tests': 1,
229        'failures': 0,
230        'errors': 0,
231        'run_time': run_time,
232        'start_time': start_time_str,
233    }
234    expected_testcase_re = TESTCASE_STRING % {
235        'run_time': run_time,
236        'start_time': start_time_str,
237        'test_name': r'passing_test&#x20;\[msg\]',
238        'classname': '__main__.MockTest',
239        'status': 'run',
240        'result': 'completed',
241        'attributes': '',
242        'message': ''
243    }
244    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
245                                     re.DOTALL)
246    self._assert_match(expected_testcase_re, testcase)
247
248  def test_with_passing_subtest_with_dots_in_parameter_name(self):
249    start_time = 0
250    end_time = 2
251    result = self._make_result((start_time, start_time, end_time, end_time))
252
253    test = MockTest('__main__.MockTest.passing_test')
254    subtest = unittest.case._SubTest(test, 'msg', {'case': 'a.b.c'})  # pytype: disable=module-attr
255    result.startTestRun()
256    result.startTest(test)
257    result.addSubTest(test, subtest, None)
258    result.stopTestRun()
259    result.printErrors()
260
261    start_time_str = re.escape(self._iso_timestamp(start_time))
262    run_time = end_time - start_time
263    expected_re = OUTPUT_STRING % {
264        'suite_name': 'MockTest',
265        'tests': 1,
266        'failures': 0,
267        'errors': 0,
268        'run_time': run_time,
269        'start_time': start_time_str,
270    }
271    expected_testcase_re = TESTCASE_STRING % {
272        'run_time': run_time,
273        'start_time': start_time_str,
274        'test_name':
275            r'passing_test&#x20;\[msg\]&#x20;\(case=&apos;a.b.c&apos;\)',
276        'classname':
277            '__main__.MockTest',
278        'status':
279            'run',
280        'result':
281            'completed',
282        'attributes':
283            '',
284        'message':
285            ''
286    }
287    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
288                                     re.DOTALL)
289    self._assert_match(expected_testcase_re, testcase)
290
291  def get_sample_error(self):
292    try:
293      int('a')
294    except ValueError:
295      error_values = sys.exc_info()
296      return error_values
297
298  def get_sample_failure(self):
299    try:
300      self.fail('e')
301    except AssertionError:
302      error_values = sys.exc_info()
303      return error_values
304
305  def get_newline_message_sample_failure(self):
306    try:
307      raise AssertionError('new\nline')
308    except AssertionError:
309      error_values = sys.exc_info()
310      return error_values
311
312  def get_unicode_sample_failure(self):
313    try:
314      raise AssertionError(u'\xe9')
315    except AssertionError:
316      error_values = sys.exc_info()
317      return error_values
318
319  def get_terminal_escape_sample_failure(self):
320    try:
321      raise AssertionError('\x1b')
322    except AssertionError:
323      error_values = sys.exc_info()
324      return error_values
325
326  def test_with_failing_test(self):
327    start_time = 10
328    end_time = 20
329    result = self._make_result((start_time, start_time, end_time, end_time))
330
331    test = MockTest('__main__.MockTest.failing_test')
332    result.startTestRun()
333    result.startTest(test)
334    result.addFailure(test, self.get_sample_failure())
335    result.stopTest(test)
336    result.stopTestRun()
337    result.printErrors()
338
339    start_time_str = re.escape(self._iso_timestamp(start_time))
340    run_time = end_time - start_time
341    expected_re = OUTPUT_STRING % {
342        'suite_name': 'MockTest',
343        'tests': 1,
344        'failures': 1,
345        'errors': 0,
346        'run_time': run_time,
347        'start_time': start_time_str,
348    }
349    expected_testcase_re = TESTCASE_STRING % {
350        'run_time': run_time,
351        'start_time': start_time_str,
352        'test_name': 'failing_test',
353        'classname': '__main__.MockTest',
354        'status': 'run',
355        'result': 'completed',
356        'attributes': '',
357        'message': FAILURE_MESSAGE
358    }
359    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
360                                     re.DOTALL)
361    self._assert_match(expected_testcase_re, testcase)
362
363  def test_with_failing_subtest(self):
364    start_time = 10
365    end_time = 20
366    result = self._make_result((start_time, start_time, end_time, end_time))
367
368    test = MockTest('__main__.MockTest.failing_test')
369    subtest = unittest.case._SubTest(test, 'msg', None)  # pytype: disable=module-attr
370    result.startTestRun()
371    result.startTest(test)
372    result.addSubTest(test, subtest, self.get_sample_failure())
373    result.stopTestRun()
374    result.printErrors()
375
376    start_time_str = re.escape(self._iso_timestamp(start_time))
377    run_time = end_time - start_time
378    expected_re = OUTPUT_STRING % {
379        'suite_name': 'MockTest',
380        'tests': 1,
381        'failures': 1,
382        'errors': 0,
383        'run_time': run_time,
384        'start_time': start_time_str,
385    }
386    expected_testcase_re = TESTCASE_STRING % {
387        'run_time': run_time,
388        'start_time': start_time_str,
389        'test_name': r'failing_test&#x20;\[msg\]',
390        'classname': '__main__.MockTest',
391        'status': 'run',
392        'result': 'completed',
393        'attributes': '',
394        'message': FAILURE_MESSAGE
395    }
396    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
397                                     re.DOTALL)
398    self._assert_match(expected_testcase_re, testcase)
399
400  def test_with_error_test(self):
401    start_time = 100
402    end_time = 200
403    result = self._make_result((start_time, start_time, end_time, end_time))
404
405    test = MockTest('__main__.MockTest.failing_test')
406    result.startTestRun()
407    result.startTest(test)
408    result.addError(test, self.get_sample_error())
409    result.stopTest(test)
410    result.stopTestRun()
411    result.printErrors()
412    xml = self.xml_stream.getvalue()
413
414    self._assert_valid_xml(xml)
415
416    start_time_str = re.escape(self._iso_timestamp(start_time))
417    run_time = end_time - start_time
418    expected_re = OUTPUT_STRING % {
419        'suite_name': 'MockTest',
420        'tests': 1,
421        'failures': 0,
422        'errors': 1,
423        'run_time': run_time,
424        'start_time': start_time_str,
425    }
426    expected_testcase_re = TESTCASE_STRING % {
427        'run_time': run_time,
428        'start_time': start_time_str,
429        'test_name': 'failing_test',
430        'classname': '__main__.MockTest',
431        'status': 'run',
432        'result': 'completed',
433        'attributes': '',
434        'message': ERROR_MESSAGE
435    }
436    (testcase,) = self._assert_match(expected_re, xml, re.DOTALL)
437    self._assert_match(expected_testcase_re, testcase)
438
439  def test_with_error_subtest(self):
440    start_time = 10
441    end_time = 20
442    result = self._make_result((start_time, start_time, end_time, end_time))
443
444    test = MockTest('__main__.MockTest.error_test')
445    subtest = unittest.case._SubTest(test, 'msg', None)  # pytype: disable=module-attr
446    result.startTestRun()
447    result.startTest(test)
448    result.addSubTest(test, subtest, self.get_sample_error())
449    result.stopTestRun()
450    result.printErrors()
451
452    start_time_str = re.escape(self._iso_timestamp(start_time))
453    run_time = end_time - start_time
454    expected_re = OUTPUT_STRING % {
455        'suite_name': 'MockTest',
456        'tests': 1,
457        'failures': 0,
458        'errors': 1,
459        'run_time': run_time,
460        'start_time': start_time_str,
461    }
462    expected_testcase_re = TESTCASE_STRING % {
463        'run_time': run_time,
464        'start_time': start_time_str,
465        'test_name': r'error_test&#x20;\[msg\]',
466        'classname': '__main__.MockTest',
467        'status': 'run',
468        'result': 'completed',
469        'attributes': '',
470        'message': ERROR_MESSAGE
471    }
472    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
473                                     re.DOTALL)
474    self._assert_match(expected_testcase_re, testcase)
475
476  def test_with_fail_and_error_test(self):
477    """Tests a failure and subsequent error within a single result."""
478    start_time = 123
479    end_time = 456
480    result = self._make_result((start_time, start_time, end_time, end_time))
481
482    test = MockTest('__main__.MockTest.failing_test')
483    result.startTestRun()
484    result.startTest(test)
485    result.addFailure(test, self.get_sample_failure())
486    # This could happen in tearDown
487    result.addError(test, self.get_sample_error())
488    result.stopTest(test)
489    result.stopTestRun()
490    result.printErrors()
491    xml = self.xml_stream.getvalue()
492
493    self._assert_valid_xml(xml)
494
495    start_time_str = re.escape(self._iso_timestamp(start_time))
496    run_time = end_time - start_time
497    expected_re = OUTPUT_STRING % {
498        'suite_name': 'MockTest',
499        'tests': 1,
500        'failures': 1,  # Only the failure is tallied (because it was first).
501        'errors': 0,
502        'run_time': run_time,
503        'start_time': start_time_str,
504    }
505    expected_testcase_re = TESTCASE_STRING % {
506        'run_time': run_time,
507        'start_time': start_time_str,
508        'test_name': 'failing_test',
509        'classname': '__main__.MockTest',
510        'status': 'run',
511        'result': 'completed',
512        'attributes': '',
513        # Messages from failure and error should be concatenated in order.
514        'message': FAILURE_MESSAGE + ERROR_MESSAGE
515    }
516    (testcase,) = self._assert_match(expected_re, xml, re.DOTALL)
517    self._assert_match(expected_testcase_re, testcase)
518
519  def test_with_error_and_fail_test(self):
520    """Tests an error and subsequent failure within a single result."""
521    start_time = 123
522    end_time = 456
523    result = self._make_result((start_time, start_time, end_time, end_time))
524
525    test = MockTest('__main__.MockTest.failing_test')
526    result.startTestRun()
527    result.startTest(test)
528    result.addError(test, self.get_sample_error())
529    result.addFailure(test, self.get_sample_failure())
530    result.stopTest(test)
531    result.stopTestRun()
532    result.printErrors()
533    xml = self.xml_stream.getvalue()
534
535    self._assert_valid_xml(xml)
536
537    start_time_str = re.escape(self._iso_timestamp(start_time))
538    run_time = end_time - start_time
539    expected_re = OUTPUT_STRING % {
540        'suite_name': 'MockTest',
541        'tests': 1,
542        'failures': 0,
543        'errors': 1,  # Only the error is tallied (because it was first).
544        'run_time': run_time,
545        'start_time': start_time_str,
546    }
547    expected_testcase_re = TESTCASE_STRING % {
548        'run_time': run_time,
549        'start_time': start_time_str,
550        'test_name': 'failing_test',
551        'classname': '__main__.MockTest',
552        'status': 'run',
553        'result': 'completed',
554        'attributes': '',
555        # Messages from error and failure should be concatenated in order.
556        'message': ERROR_MESSAGE + FAILURE_MESSAGE
557    }
558    (testcase,) = self._assert_match(expected_re, xml, re.DOTALL)
559    self._assert_match(expected_testcase_re, testcase)
560
561  def test_with_newline_error_test(self):
562    start_time = 100
563    end_time = 200
564    result = self._make_result((start_time, start_time, end_time, end_time))
565
566    test = MockTest('__main__.MockTest.failing_test')
567    result.startTestRun()
568    result.startTest(test)
569    result.addError(test, self.get_newline_message_sample_failure())
570    result.stopTest(test)
571    result.stopTestRun()
572    result.printErrors()
573    xml = self.xml_stream.getvalue()
574
575    self._assert_valid_xml(xml)
576
577    start_time_str = re.escape(self._iso_timestamp(start_time))
578    run_time = end_time - start_time
579    expected_re = OUTPUT_STRING % {
580        'suite_name': 'MockTest',
581        'tests': 1,
582        'failures': 0,
583        'errors': 1,
584        'run_time': run_time,
585        'start_time': start_time_str,
586    } + '\n'
587    expected_testcase_re = TESTCASE_STRING % {
588        'run_time': run_time,
589        'start_time': start_time_str,
590        'test_name': 'failing_test',
591        'classname': '__main__.MockTest',
592        'status': 'run',
593        'result': 'completed',
594        'attributes': '',
595        'message': NEWLINE_ERROR_MESSAGE
596    }
597    (testcase,) = self._assert_match(expected_re, xml, re.DOTALL)
598    self._assert_match(expected_testcase_re, testcase)
599
600  def test_with_unicode_error_test(self):
601    start_time = 100
602    end_time = 200
603    result = self._make_result((start_time, start_time, end_time, end_time))
604
605    test = MockTest('__main__.MockTest.failing_test')
606    result.startTestRun()
607    result.startTest(test)
608    result.addError(test, self.get_unicode_sample_failure())
609    result.stopTest(test)
610    result.stopTestRun()
611    result.printErrors()
612    xml = self.xml_stream.getvalue()
613
614    self._assert_valid_xml(xml)
615
616    start_time_str = re.escape(self._iso_timestamp(start_time))
617    run_time = end_time - start_time
618    expected_re = OUTPUT_STRING % {
619        'suite_name': 'MockTest',
620        'tests': 1,
621        'failures': 0,
622        'errors': 1,
623        'run_time': run_time,
624        'start_time': start_time_str,
625    }
626    expected_testcase_re = TESTCASE_STRING % {
627        'run_time': run_time,
628        'start_time': start_time_str,
629        'test_name': 'failing_test',
630        'classname': '__main__.MockTest',
631        'status': 'run',
632        'result': 'completed',
633        'attributes': '',
634        'message': UNICODE_ERROR_MESSAGE
635    }
636    (testcase,) = self._assert_match(expected_re, xml, re.DOTALL)
637    self._assert_match(expected_testcase_re, testcase)
638
639  def test_with_terminal_escape_error(self):
640    start_time = 100
641    end_time = 200
642    result = self._make_result((start_time, start_time, end_time, end_time))
643
644    test = MockTest('__main__.MockTest.failing_test')
645    result.startTestRun()
646    result.startTest(test)
647    result.addError(test, self.get_terminal_escape_sample_failure())
648    result.stopTest(test)
649    result.stopTestRun()
650    result.printErrors()
651
652    self._assert_valid_xml(self.xml_stream.getvalue())
653
654  def test_with_expected_failure_test(self):
655    start_time = 100
656    end_time = 200
657    result = self._make_result((start_time, start_time, end_time, end_time))
658    error_values = ''
659
660    try:
661      raise RuntimeError('Test expectedFailure')
662    except RuntimeError:
663      error_values = sys.exc_info()
664
665    test = MockTest('__main__.MockTest.expected_failing_test')
666    result.startTestRun()
667    result.startTest(test)
668    result.addExpectedFailure(test, error_values)
669    result.stopTest(test)
670    result.stopTestRun()
671    result.printErrors()
672
673    start_time_str = re.escape(self._iso_timestamp(start_time))
674    run_time = end_time - start_time
675    expected_re = OUTPUT_STRING % {
676        'suite_name': 'MockTest',
677        'tests': 1,
678        'failures': 0,
679        'errors': 0,
680        'run_time': run_time,
681        'start_time': start_time_str,
682    }
683    expected_testcase_re = TESTCASE_STRING % {
684        'run_time': run_time,
685        'start_time': start_time_str,
686        'test_name': 'expected_failing_test',
687        'classname': '__main__.MockTest',
688        'status': 'run',
689        'result': 'completed',
690        'attributes': '',
691        'message': ''
692    }
693    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
694                                     re.DOTALL)
695    self._assert_match(expected_testcase_re, testcase, re.DOTALL)
696
697  def test_with_unexpected_success_error_test(self):
698    start_time = 100
699    end_time = 200
700    result = self._make_result((start_time, start_time, end_time, end_time))
701
702    test = MockTest('__main__.MockTest.unexpectedly_passing_test')
703    result.startTestRun()
704    result.startTest(test)
705    result.addUnexpectedSuccess(test)
706    result.stopTest(test)
707    result.stopTestRun()
708    result.printErrors()
709
710    start_time_str = re.escape(self._iso_timestamp(start_time))
711    run_time = end_time - start_time
712    expected_re = OUTPUT_STRING % {
713        'suite_name': 'MockTest',
714        'tests': 1,
715        'failures': 0,
716        'errors': 1,
717        'run_time': run_time,
718        'start_time': start_time_str,
719    }
720    expected_testcase_re = TESTCASE_STRING % {
721        'run_time': run_time,
722        'start_time': start_time_str,
723        'test_name': 'unexpectedly_passing_test',
724        'classname': '__main__.MockTest',
725        'status': 'run',
726        'result': 'completed',
727        'attributes': '',
728        'message': UNEXPECTED_SUCCESS_MESSAGE
729    }
730    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
731                                     re.DOTALL)
732    self._assert_match(expected_testcase_re, testcase)
733
734  def test_with_skipped_test(self):
735    start_time = 100
736    end_time = 100
737    result = self._make_result((start_time, start_time, end_time, end_time))
738
739    test = MockTest('__main__.MockTest.skipped_test_with_reason')
740    result.startTestRun()
741    result.startTest(test)
742    result.addSkip(test, 'b"r')
743    result.stopTest(test)
744    result.stopTestRun()
745    result.printErrors()
746
747    start_time_str = re.escape(self._iso_timestamp(start_time))
748    run_time = end_time - start_time
749    expected_re = OUTPUT_STRING % {
750        'suite_name': 'MockTest',
751        'tests': 1,
752        'failures': 0,
753        'errors': 0,
754        'run_time': run_time,
755        'start_time': start_time_str,
756    }
757    expected_testcase_re = TESTCASE_STRING % {
758        'run_time': run_time,
759        'start_time': start_time_str,
760        'test_name': 'skipped_test_with_reason',
761        'classname': '__main__.MockTest',
762        'status': 'notrun',
763        'result': 'suppressed',
764        'message': ''
765    }
766    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
767                                     re.DOTALL)
768    self._assert_match(expected_testcase_re, testcase)
769
770  def test_two_tests_with_time(self):
771    start_time1 = 100
772    end_time1 = 200
773    start_time2 = 400
774    end_time2 = 700
775    name = '__main__.MockTest.'
776    result = self._make_result((start_time1, start_time1, end_time1,
777                                start_time2, end_time2, end_time2))
778
779    test = MockTest(f'{name}one_test')
780    result.startTestRun()
781    result.startTest(test)
782    result.addSuccess(test)
783    result.stopTest(test)
784
785    test = MockTest(f'{name}another_test')
786    result.startTest(test)
787    result.addSuccess(test)
788    result.stopTest(test)
789    result.stopTestRun()
790    result.printErrors()
791
792    start_time = min(start_time1, start_time2)
793    run_time = max(end_time1, end_time2) - start_time
794    start_time_str = re.escape(self._iso_timestamp(start_time))
795    start_time_str1 = re.escape(self._iso_timestamp(start_time1))
796    start_time_str2 = re.escape(self._iso_timestamp(start_time2))
797    expected_re = OUTPUT_STRING % {
798        'suite_name': 'MockTest',
799        'tests': 2,
800        'failures': 0,
801        'errors': 0,
802        'run_time': run_time,
803        'start_time': start_time_str,
804    }
805    expected_testcase1_re = TESTCASE_STRING % {
806        'run_time': end_time1 - start_time1,
807        'start_time': start_time_str1,
808        'test_name': 'one_test',
809        'classname': '__main__.MockTest',
810        'status': 'run',
811        'result': 'completed',
812        'message': ''
813    }
814    expected_testcase2_re = TESTCASE_STRING % {
815        'run_time': end_time2 - start_time2,
816        'start_time': start_time_str2,
817        'test_name': 'another_test',
818        'classname': '__main__.MockTest',
819        'status': 'run',
820        'result': 'completed',
821        'message': ''
822    }
823
824    (testcases,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
825                                      re.DOTALL)
826    [testcase1, testcase2] = testcases.split('\n  </testcase>\n')
827    # Sorting by test name flips the order of the two tests.
828    self._assert_match(expected_testcase2_re, testcase1)
829    self._assert_match(expected_testcase1_re, testcase2)
830
831  def test_with_no_suite_name(self):
832    start_time = 1000
833    end_time = 1200
834    result = self._make_result((start_time, start_time, end_time, end_time))
835
836    test = MockTest('__main__.MockTest.bad_name')
837    result.startTestRun()
838    result.startTest(test)
839    result.addSuccess(test)
840    result.stopTest(test)
841    result.stopTestRun()
842    result.printErrors()
843
844    start_time_str = re.escape(self._iso_timestamp(start_time))
845    run_time = end_time - start_time
846    expected_re = OUTPUT_STRING % {
847        'suite_name': 'MockTest',
848        'tests': 1,
849        'failures': 0,
850        'errors': 0,
851        'run_time': run_time,
852        'start_time': start_time_str,
853    }
854    expected_testcase_re = TESTCASE_STRING % {
855        'run_time': run_time,
856        'start_time': start_time_str,
857        'test_name': 'bad_name',
858        'classname': '__main__.MockTest',
859        'status': 'run',
860        'result': 'completed',
861        'attributes': '',
862        'message': ''
863    }
864    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
865                                     re.DOTALL)
866    self._assert_match(expected_testcase_re, testcase)
867
868  def test_unnamed_parameterized_testcase(self):
869    """Test unnamed parameterized test cases.
870
871    Unnamed parameterized test cases might have non-alphanumeric characters in
872    their test method names. This test ensures xml_reporter handles them
873    correctly.
874    """
875
876    class ParameterizedTest(parameterized.TestCase):
877
878      @parameterized.parameters(('a (b.c)',))
879      def test_prefix(self, case):
880        self.assertTrue(case.startswith('a'))
881
882    start_time = 1000
883    end_time = 1200
884    result = self._make_result((start_time, start_time, end_time, end_time))
885    test = ParameterizedTest(methodName='test_prefix0')
886    result.startTestRun()
887    result.startTest(test)
888    result.addSuccess(test)
889    result.stopTest(test)
890    result.stopTestRun()
891    result.printErrors()
892
893    start_time_str = re.escape(self._iso_timestamp(start_time))
894    run_time = end_time - start_time
895    classname = xml_reporter._escape_xml_attr(
896        unittest.util.strclass(test.__class__))
897    expected_re = OUTPUT_STRING % {
898        'suite_name': 'ParameterizedTest',
899        'tests': 1,
900        'failures': 0,
901        'errors': 0,
902        'run_time': run_time,
903        'start_time': start_time_str,
904    }
905    expected_testcase_re = TESTCASE_STRING % {
906        'run_time': run_time,
907        'start_time': start_time_str,
908        'test_name': re.escape('test_prefix0&#x20;(&apos;a&#x20;(b.c)&apos;)'),
909        'classname': classname,
910        'status': 'run',
911        'result': 'completed',
912        'attributes': '',
913        'message': ''
914    }
915    (testcase,) = self._assert_match(expected_re, self.xml_stream.getvalue(),
916                                     re.DOTALL)
917    self._assert_match(expected_testcase_re, testcase)
918
919  def teststop_test_without_pending_test(self):
920    end_time = 1200
921    result = self._make_result((end_time,))
922
923    test = MockTest('__main__.MockTest.bad_name')
924    result.stopTest(test)
925    result.stopTestRun()
926    # Just verify that this doesn't crash
927
928  def test_text_and_xmltest_runner(self):
929    runner = xml_reporter.TextAndXMLTestRunner(self.xml_stream, self.stream,
930                                               'foo', 1)
931    result1 = runner._makeResult()
932    result2 = xml_reporter._TextAndXMLTestResult(None, None, None, 0, None)
933    self.assertIs(type(result1), type(result2))
934
935  def test_timing_with_time_stub(self):
936    """Make sure that timing is correct even if time.time is stubbed out."""
937    saved_time = time.time
938    try:
939      time.time = lambda: -1
940      reporter = xml_reporter._TextAndXMLTestResult(self.xml_stream,
941                                                    self.stream,
942                                                    'foo', 0)
943      test = MockTest('bar')
944      reporter.startTest(test)
945      self.assertNotEqual(reporter.start_time, -1)
946    finally:
947      time.time = saved_time
948
949  def test_concurrent_add_and_delete_pending_test_case_result(self):
950    """Make sure adding/deleting pending test case results are thread safe."""
951    result = xml_reporter._TextAndXMLTestResult(None, self.stream, None, 0,
952                                                None)
953    def add_and_delete_pending_test_case_result(test_name):
954      test = MockTest(test_name)
955      result.addSuccess(test)
956      result.delete_pending_test_case_result(test)
957
958    for i in range(50):
959      add_and_delete_pending_test_case_result('add_and_delete_test%s' % i)
960    self.assertEqual(result.pending_test_case_results, {})
961
962  def test_concurrent_test_runs(self):
963    """Make sure concurrent test runs do not race each other."""
964    num_passing_tests = 20
965    num_failing_tests = 20
966    num_error_tests = 20
967    total_num_tests = num_passing_tests + num_failing_tests + num_error_tests
968
969    times = [0] + [i for i in range(2 * total_num_tests)
970                  ] + [2 * total_num_tests - 1]
971    result = self._make_result(times)
972    threads = []
973    names = []
974    result.startTestRun()
975    for i in range(num_passing_tests):
976      name = 'passing_concurrent_test_%s' % i
977      names.append(name)
978      test_name = '__main__.MockTest.%s' % name
979      # xml_reporter uses id(test) as the test identifier.
980      # In a real testing scenario, all the test instances are created before
981      # running them. So all ids will be unique.
982      # We must do the same here: create test instance beforehand.
983      test = MockTest(test_name)
984      threads.append(threading.Thread(
985          target=self._simulate_passing_test, args=(test, result)))
986    for i in range(num_failing_tests):
987      name = 'failing_concurrent_test_%s' % i
988      names.append(name)
989      test_name = '__main__.MockTest.%s' % name
990      test = MockTest(test_name)
991      threads.append(threading.Thread(
992          target=self._simulate_failing_test, args=(test, result)))
993    for i in range(num_error_tests):
994      name = 'error_concurrent_test_%s' % i
995      names.append(name)
996      test_name = '__main__.MockTest.%s' % name
997      test = MockTest(test_name)
998      threads.append(threading.Thread(
999          target=self._simulate_error_test, args=(test, result)))
1000    for t in threads:
1001      t.start()
1002    for t in threads:
1003      t.join()
1004
1005    result.stopTestRun()
1006    result.printErrors()
1007    tests_not_in_xml = []
1008    for tn in names:
1009      if tn not in self.xml_stream.getvalue():
1010        tests_not_in_xml.append(tn)
1011    msg = ('Expected xml_stream to contain all test %s results, but %s tests '
1012           'are missing. List of missing tests: %s' % (
1013               total_num_tests, len(tests_not_in_xml), tests_not_in_xml))
1014    self.assertEqual([], tests_not_in_xml, msg)
1015
1016  def test_add_failure_during_stop_test(self):
1017    """Tests an addFailure() call from within a stopTest() call stack."""
1018    result = self._make_result((0, 2))
1019    test = MockTest('__main__.MockTest.failing_test')
1020    result.startTestRun()
1021    result.startTest(test)
1022
1023    # Replace parent stopTest method from unittest.TextTestResult with
1024    # a version that calls self.addFailure().
1025    with mock.patch.object(
1026        unittest.TextTestResult,
1027        'stopTest',
1028        side_effect=lambda t: result.addFailure(t, self.get_sample_failure())):
1029      # Run stopTest in a separate thread since we are looking to verify that
1030      # it does not deadlock, and would otherwise prevent the test from
1031      # completing.
1032      stop_test_thread = threading.Thread(target=result.stopTest, args=(test,))
1033      stop_test_thread.daemon = True
1034      stop_test_thread.start()
1035
1036    stop_test_thread.join(10.0)
1037    self.assertFalse(stop_test_thread.is_alive(),
1038                     'result.stopTest(test) call failed to complete')
1039
1040
1041class XMLTest(absltest.TestCase):
1042
1043  def test_escape_xml(self):
1044    self.assertEqual(xml_reporter._escape_xml_attr('"Hi" <\'>\t\r\n'),
1045                     '&quot;Hi&quot;&#x20;&lt;&apos;&gt;&#x9;&#xD;&#xA;')
1046
1047
1048class XmlReporterFixtureTest(absltest.TestCase):
1049
1050  def _get_helper(self):
1051    binary_name = 'absl/testing/tests/xml_reporter_helper_test'
1052    return _bazelize_command.get_executable_path(binary_name)
1053
1054  def _run_test_and_get_xml(self, flag):
1055    """Runs xml_reporter_helper_test and returns an Element instance.
1056
1057    Runs xml_reporter_helper_test in a new process so that it can
1058    exercise the entire test infrastructure, and easily test issues in
1059    the test fixture.
1060
1061    Args:
1062      flag: flag to pass to xml_reporter_helper_test
1063
1064    Returns:
1065      The Element instance of the XML output.
1066    """
1067
1068    xml_fhandle, xml_fname = tempfile.mkstemp()
1069    os.close(xml_fhandle)
1070
1071    try:
1072      binary = self._get_helper()
1073      args = [binary, flag, '--xml_output_file=%s' % xml_fname]
1074      ret = subprocess.call(args)
1075      self.assertEqual(ret, 0)
1076
1077      xml = ElementTree.parse(xml_fname).getroot()
1078    finally:
1079      os.remove(xml_fname)
1080
1081    return xml
1082
1083  def _run_test(self, flag, num_errors, num_failures, suites):
1084    xml_fhandle, xml_fname = tempfile.mkstemp()
1085    os.close(xml_fhandle)
1086
1087    try:
1088      binary = self._get_helper()
1089      args = [binary, flag, '--xml_output_file=%s' % xml_fname]
1090      ret = subprocess.call(args)
1091      self.assertNotEqual(ret, 0)
1092
1093      xml = ElementTree.parse(xml_fname).getroot()
1094      logging.info('xml output is:\n%s', ElementTree.tostring(xml))
1095    finally:
1096      os.remove(xml_fname)
1097
1098    self.assertEqual(int(xml.attrib['errors']), num_errors)
1099    self.assertEqual(int(xml.attrib['failures']), num_failures)
1100    self.assertLen(xml, len(suites))
1101    actual_suites = sorted(
1102        xml.findall('testsuite'), key=lambda x: x.attrib['name'])
1103    suites = sorted(suites, key=lambda x: x['name'])
1104    for actual_suite, expected_suite in zip(actual_suites, suites):
1105      self.assertEqual(actual_suite.attrib['name'], expected_suite['name'])
1106      self.assertLen(actual_suite, len(expected_suite['cases']))
1107      actual_cases = sorted(actual_suite.findall('testcase'),
1108                            key=lambda x: x.attrib['name'])
1109      expected_cases = sorted(expected_suite['cases'], key=lambda x: x['name'])
1110      for actual_case, expected_case in zip(actual_cases, expected_cases):
1111        self.assertEqual(actual_case.attrib['name'], expected_case['name'])
1112        self.assertEqual(actual_case.attrib['classname'],
1113                         expected_case['classname'])
1114        if 'error' in expected_case:
1115          actual_error = actual_case.find('error')
1116          self.assertEqual(actual_error.attrib['message'],
1117                           expected_case['error'])
1118        if 'failure' in expected_case:
1119          actual_failure = actual_case.find('failure')
1120          self.assertEqual(actual_failure.attrib['message'],
1121                           expected_case['failure'])
1122
1123    return xml
1124
1125  def test_set_up_module_error(self):
1126    self._run_test(
1127        flag='--set_up_module_error',
1128        num_errors=1,
1129        num_failures=0,
1130        suites=[{'name': '__main__',
1131                 'cases': [{'name': 'setUpModule',
1132                            'classname': '__main__',
1133                            'error': 'setUpModule Errored!'}]}])
1134
1135  def test_tear_down_module_error(self):
1136    self._run_test(
1137        flag='--tear_down_module_error',
1138        num_errors=1,
1139        num_failures=0,
1140        suites=[{'name': 'FailableTest',
1141                 'cases': [{'name': 'test',
1142                            'classname': '__main__.FailableTest'}]},
1143                {'name': '__main__',
1144                 'cases': [{'name': 'tearDownModule',
1145                            'classname': '__main__',
1146                            'error': 'tearDownModule Errored!'}]}])
1147
1148  def test_set_up_class_error(self):
1149    self._run_test(
1150        flag='--set_up_class_error',
1151        num_errors=1,
1152        num_failures=0,
1153        suites=[{'name': 'FailableTest',
1154                 'cases': [{'name': 'setUpClass',
1155                            'classname': '__main__.FailableTest',
1156                            'error': 'setUpClass Errored!'}]}])
1157
1158  def test_tear_down_class_error(self):
1159    self._run_test(
1160        flag='--tear_down_class_error',
1161        num_errors=1,
1162        num_failures=0,
1163        suites=[{'name': 'FailableTest',
1164                 'cases': [{'name': 'test',
1165                            'classname': '__main__.FailableTest'},
1166                           {'name': 'tearDownClass',
1167                            'classname': '__main__.FailableTest',
1168                            'error': 'tearDownClass Errored!'}]}])
1169
1170  def test_set_up_error(self):
1171    self._run_test(
1172        flag='--set_up_error',
1173        num_errors=1,
1174        num_failures=0,
1175        suites=[{'name': 'FailableTest',
1176                 'cases': [{'name': 'test',
1177                            'classname': '__main__.FailableTest',
1178                            'error': 'setUp Errored!'}]}])
1179
1180  def test_tear_down_error(self):
1181    self._run_test(
1182        flag='--tear_down_error',
1183        num_errors=1,
1184        num_failures=0,
1185        suites=[{'name': 'FailableTest',
1186                 'cases': [{'name': 'test',
1187                            'classname': '__main__.FailableTest',
1188                            'error': 'tearDown Errored!'}]}])
1189
1190  def test_test_error(self):
1191    self._run_test(
1192        flag='--test_error',
1193        num_errors=1,
1194        num_failures=0,
1195        suites=[{'name': 'FailableTest',
1196                 'cases': [{'name': 'test',
1197                            'classname': '__main__.FailableTest',
1198                            'error': 'test Errored!'}]}])
1199
1200  def test_set_up_failure(self):
1201    self._run_test(
1202        flag='--set_up_fail',
1203        num_errors=0,
1204        num_failures=1,
1205        suites=[{'name': 'FailableTest',
1206                 'cases': [{'name': 'test',
1207                            'classname': '__main__.FailableTest',
1208                            'failure': 'setUp Failed!'}]}])
1209
1210  def test_tear_down_failure(self):
1211    self._run_test(
1212        flag='--tear_down_fail',
1213        num_errors=0,
1214        num_failures=1,
1215        suites=[{'name': 'FailableTest',
1216                 'cases': [{'name': 'test',
1217                            'classname': '__main__.FailableTest',
1218                            'failure': 'tearDown Failed!'}]}])
1219
1220  def test_test_fail(self):
1221    self._run_test(
1222        flag='--test_fail',
1223        num_errors=0,
1224        num_failures=1,
1225        suites=[{'name': 'FailableTest',
1226                 'cases': [{'name': 'test',
1227                            'classname': '__main__.FailableTest',
1228                            'failure': 'test Failed!'}]}])
1229
1230  def test_test_randomization_seed_logging(self):
1231    # We expect the resulting XML to start as follows:
1232    # <testsuites ...>
1233    #  <properties>
1234    #   <property name="test_randomize_ordering_seed" value="17" />
1235    # ...
1236    #
1237    # which we validate here.
1238    out = self._run_test_and_get_xml('--test_randomize_ordering_seed=17')
1239    expected_attrib = {'name': 'test_randomize_ordering_seed', 'value': '17'}
1240    property_attributes = [
1241        prop.attrib for prop in out.findall('./properties/property')]
1242    self.assertIn(expected_attrib, property_attributes)
1243
1244
1245if __name__ == '__main__':
1246  absltest.main()
1247