mirror of
https://github.com/python/cpython.git
synced 2025-12-08 06:10:17 +00:00
Merge 9200d2e4b9 into 7099af8f5e
This commit is contained in:
commit
ef8aa3c779
5 changed files with 117 additions and 2 deletions
|
|
@ -1505,6 +1505,25 @@ Test cases
|
|||
|
||||
.. versionadded:: 3.2
|
||||
|
||||
.. attribute:: status
|
||||
|
||||
This attribute contains resulting status of the single test run.
|
||||
This can be used to check the test status in :meth:`tearDown` and
|
||||
cleanup phase. Possible values are ``None`` or any of the values
|
||||
stored in ``unittest.TestStatus`` enum:
|
||||
|
||||
======================= =================================================
|
||||
Enum name Details
|
||||
======================= =================================================
|
||||
``SUCCESS`` Test succeeded.
|
||||
``UNEXPECTED_SUCCESS`` Test succeeded, but was expected to fail.
|
||||
``FAILED`` Test failed.
|
||||
``EXPECTED_FAILURE`` Test failed, as was expected.
|
||||
``SKIPPED`` Test was skipped.
|
||||
======================= =================================================
|
||||
|
||||
.. versionadded:: 3.14
|
||||
|
||||
|
||||
Testing frameworks can use the following methods to collect information on
|
||||
the test:
|
||||
|
|
|
|||
|
|
@ -2340,6 +2340,76 @@ def test2(self):
|
|||
gc_collect() # For PyPy or other GCs.
|
||||
self.assertEqual(MyException.ninstance, 0)
|
||||
|
||||
def test_status(self):
|
||||
# Issue 68437 - unittest API for detecting test failure in cleanup/teardown
|
||||
class Foo(unittest.TestCase):
|
||||
def test_success(self):
|
||||
pass
|
||||
@unittest.expectedFailure
|
||||
def test_unexpected_success(self):
|
||||
pass
|
||||
def test_failed(self):
|
||||
self.assertTrue(False)
|
||||
@unittest.expectedFailure
|
||||
def test_expected_failure(self):
|
||||
self.assertTrue(False)
|
||||
@unittest.skip
|
||||
def test_skipped(self):
|
||||
self.assertTrue(False)
|
||||
def test_skipped_exception(self):
|
||||
raise unittest.SkipTest()
|
||||
def test_subtest_success(self):
|
||||
with self.subTest():
|
||||
pass
|
||||
with self.subTest():
|
||||
pass
|
||||
def test_subtest_failed1(self):
|
||||
with self.subTest():
|
||||
self.assertTrue(True)
|
||||
with self.subTest():
|
||||
self.assertTrue(False)
|
||||
def test_subtest_failed2(self):
|
||||
with self.subTest():
|
||||
self.assertTrue(False)
|
||||
with self.subTest():
|
||||
self.assertTrue(True)
|
||||
|
||||
test = Foo('test_success')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.SUCCESS)
|
||||
|
||||
test = Foo('test_unexpected_success')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.UNEXPECTED_SUCCESS)
|
||||
|
||||
test = Foo('test_failed')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||
|
||||
test = Foo('test_expected_failure')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.EXPECTED_FAILURE)
|
||||
|
||||
test = Foo('test_skipped')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.SKIPPED)
|
||||
|
||||
test = Foo('test_skipped_exception')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.SKIPPED)
|
||||
|
||||
test = Foo('test_subtest_success')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.SUCCESS)
|
||||
|
||||
test = Foo('test_subtest_failed1')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||
|
||||
test = Foo('test_subtest_failed2')
|
||||
test.run()
|
||||
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
|||
|
|
@ -49,14 +49,15 @@ def testMultiply(self):
|
|||
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
|
||||
'expectedFailure', 'TextTestResult', 'installHandler',
|
||||
'registerResult', 'removeResult', 'removeHandler',
|
||||
'addModuleCleanup', 'doModuleCleanups', 'enterModuleContext']
|
||||
'addModuleCleanup', 'doModuleCleanups', 'enterModuleContext',
|
||||
'TestStatus']
|
||||
|
||||
__unittest = True
|
||||
|
||||
from .result import TestResult
|
||||
from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
|
||||
skipIf, skipUnless, expectedFailure, doModuleCleanups,
|
||||
enterModuleContext)
|
||||
enterModuleContext, TestStatus)
|
||||
from .suite import BaseTestSuite, TestSuite # noqa: F401
|
||||
from .loader import TestLoader, defaultTestLoader
|
||||
from .main import TestProgram, main # noqa: F401
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
import sys
|
||||
import functools
|
||||
import difflib
|
||||
import enum
|
||||
import pprint
|
||||
import re
|
||||
import warnings
|
||||
|
|
@ -42,24 +43,35 @@ class _UnexpectedSuccess(Exception):
|
|||
"""
|
||||
|
||||
|
||||
class TestStatus(enum.StrEnum):
|
||||
SUCCESS = "success"
|
||||
UNEXPECTED_SUCCESS = "unexpected_success"
|
||||
FAILED = "failed"
|
||||
EXPECTED_FAILURE = "expected_failure"
|
||||
SKIPPED = "skipped"
|
||||
|
||||
|
||||
class _Outcome(object):
|
||||
def __init__(self, result=None):
|
||||
self.expecting_failure = False
|
||||
self.result = result
|
||||
self.result_supports_subtests = hasattr(result, "addSubTest")
|
||||
self.success = True
|
||||
self.status = None
|
||||
self.expectedFailure = None
|
||||
|
||||
@contextlib.contextmanager
|
||||
def testPartExecutor(self, test_case, subTest=False):
|
||||
old_success = self.success
|
||||
self.success = True
|
||||
status = TestStatus.SUCCESS
|
||||
try:
|
||||
yield
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except SkipTest as e:
|
||||
self.success = False
|
||||
status = TestStatus.SKIPPED
|
||||
_addSkip(self.result, test_case, str(e))
|
||||
except _ShouldStop:
|
||||
pass
|
||||
|
|
@ -67,8 +79,10 @@ def testPartExecutor(self, test_case, subTest=False):
|
|||
exc_info = sys.exc_info()
|
||||
if self.expecting_failure:
|
||||
self.expectedFailure = exc_info
|
||||
status = TestStatus.EXPECTED_FAILURE
|
||||
else:
|
||||
self.success = False
|
||||
status = TestStatus.FAILED
|
||||
if subTest:
|
||||
self.result.addSubTest(test_case.test_case, test_case, exc_info)
|
||||
else:
|
||||
|
|
@ -77,10 +91,15 @@ def testPartExecutor(self, test_case, subTest=False):
|
|||
# exc_info -> frame -> exc_info
|
||||
exc_info = None
|
||||
else:
|
||||
if self.expecting_failure:
|
||||
status = TestStatus.UNEXPECTED_SUCCESS
|
||||
if subTest and self.success:
|
||||
self.result.addSubTest(test_case.test_case, test_case, None)
|
||||
finally:
|
||||
self.success = self.success and old_success
|
||||
# If any of the previous subTests failed, keep the failed status
|
||||
if self.status not in (TestStatus.FAILED, TestStatus.EXPECTED_FAILURE):
|
||||
self.status = status
|
||||
|
||||
|
||||
def _addSkip(result, test_case, reason):
|
||||
|
|
@ -435,6 +454,7 @@ def __init__(self, methodName='runTest'):
|
|||
self._testMethodDoc = testMethod.__doc__
|
||||
self._cleanups = []
|
||||
self._subtest = None
|
||||
self.status = None
|
||||
|
||||
# Map types to custom assertEqual functions that will compare
|
||||
# instances of said type in more detail to generate a more useful
|
||||
|
|
@ -647,6 +667,7 @@ def run(self, result=None):
|
|||
# If the class or method was skipped.
|
||||
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
|
||||
or getattr(testMethod, '__unittest_skip_why__', ''))
|
||||
self.status = TestStatus.SKIPPED
|
||||
_addSkip(result, self, skip_why)
|
||||
return result
|
||||
|
||||
|
|
@ -665,6 +686,7 @@ def run(self, result=None):
|
|||
outcome.expecting_failure = expecting_failure
|
||||
with outcome.testPartExecutor(self):
|
||||
self._callTestMethod(testMethod)
|
||||
self.status = outcome.status
|
||||
outcome.expecting_failure = False
|
||||
with outcome.testPartExecutor(self):
|
||||
self._callTearDown()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
Add new attribute ``status`` to :class:`unittest.TestCase` containing member
|
||||
of (also new) ``unittest.TestStatus`` enum. This can be used to check
|
||||
the test result in ``tearDown`` / ``cleanup`` phase.
|
||||
Loading…
Add table
Add a link
Reference in a new issue