mirror of
https://github.com/python/cpython.git
synced 2025-12-08 06:10:17 +00:00
gh-68437: Add TestCase.status attribute
This attribute can be used in teardown/cleanup phase to check status of the test being run.
This commit is contained in:
parent
0759cecd9d
commit
9200d2e4b9
5 changed files with 117 additions and 2 deletions
|
|
@ -1442,6 +1442,25 @@ Test cases
|
||||||
|
|
||||||
.. versionadded:: 3.2
|
.. versionadded:: 3.2
|
||||||
|
|
||||||
|
.. attribute:: status
|
||||||
|
|
||||||
|
This attribute contains resulting status of the single test run.
|
||||||
|
This can be used to check the test status in :meth:`tearDown` and
|
||||||
|
cleanup phase. Possible values are ``None`` or any of the values
|
||||||
|
stored in ``unittest.TestStatus`` enum:
|
||||||
|
|
||||||
|
======================= =================================================
|
||||||
|
Enum name Details
|
||||||
|
======================= =================================================
|
||||||
|
``SUCCESS`` Test succeeded.
|
||||||
|
``UNEXPECTED_SUCCESS`` Test succeeded, but was expected to fail.
|
||||||
|
``FAILED`` Test failed.
|
||||||
|
``EXPECTED_FAILURE`` Test failed, as was expected.
|
||||||
|
``SKIPPED`` Test was skipped.
|
||||||
|
======================= =================================================
|
||||||
|
|
||||||
|
.. versionadded:: 3.14
|
||||||
|
|
||||||
|
|
||||||
Testing frameworks can use the following methods to collect information on
|
Testing frameworks can use the following methods to collect information on
|
||||||
the test:
|
the test:
|
||||||
|
|
|
||||||
|
|
@ -2016,6 +2016,76 @@ def test2(self):
|
||||||
gc_collect() # For PyPy or other GCs.
|
gc_collect() # For PyPy or other GCs.
|
||||||
self.assertEqual(MyException.ninstance, 0)
|
self.assertEqual(MyException.ninstance, 0)
|
||||||
|
|
||||||
|
def test_status(self):
|
||||||
|
# Issue 68437 - unittest API for detecting test failure in cleanup/teardown
|
||||||
|
class Foo(unittest.TestCase):
|
||||||
|
def test_success(self):
|
||||||
|
pass
|
||||||
|
@unittest.expectedFailure
|
||||||
|
def test_unexpected_success(self):
|
||||||
|
pass
|
||||||
|
def test_failed(self):
|
||||||
|
self.assertTrue(False)
|
||||||
|
@unittest.expectedFailure
|
||||||
|
def test_expected_failure(self):
|
||||||
|
self.assertTrue(False)
|
||||||
|
@unittest.skip
|
||||||
|
def test_skipped(self):
|
||||||
|
self.assertTrue(False)
|
||||||
|
def test_skipped_exception(self):
|
||||||
|
raise unittest.SkipTest()
|
||||||
|
def test_subtest_success(self):
|
||||||
|
with self.subTest():
|
||||||
|
pass
|
||||||
|
with self.subTest():
|
||||||
|
pass
|
||||||
|
def test_subtest_failed1(self):
|
||||||
|
with self.subTest():
|
||||||
|
self.assertTrue(True)
|
||||||
|
with self.subTest():
|
||||||
|
self.assertTrue(False)
|
||||||
|
def test_subtest_failed2(self):
|
||||||
|
with self.subTest():
|
||||||
|
self.assertTrue(False)
|
||||||
|
with self.subTest():
|
||||||
|
self.assertTrue(True)
|
||||||
|
|
||||||
|
test = Foo('test_success')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.SUCCESS)
|
||||||
|
|
||||||
|
test = Foo('test_unexpected_success')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.UNEXPECTED_SUCCESS)
|
||||||
|
|
||||||
|
test = Foo('test_failed')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||||
|
|
||||||
|
test = Foo('test_expected_failure')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.EXPECTED_FAILURE)
|
||||||
|
|
||||||
|
test = Foo('test_skipped')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.SKIPPED)
|
||||||
|
|
||||||
|
test = Foo('test_skipped_exception')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.SKIPPED)
|
||||||
|
|
||||||
|
test = Foo('test_subtest_success')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.SUCCESS)
|
||||||
|
|
||||||
|
test = Foo('test_subtest_failed1')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||||
|
|
||||||
|
test = Foo('test_subtest_failed2')
|
||||||
|
test.run()
|
||||||
|
self.assertEqual(test.status, unittest.TestStatus.FAILED)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
||||||
|
|
@ -49,14 +49,15 @@ def testMultiply(self):
|
||||||
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
|
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
|
||||||
'expectedFailure', 'TextTestResult', 'installHandler',
|
'expectedFailure', 'TextTestResult', 'installHandler',
|
||||||
'registerResult', 'removeResult', 'removeHandler',
|
'registerResult', 'removeResult', 'removeHandler',
|
||||||
'addModuleCleanup', 'doModuleCleanups', 'enterModuleContext']
|
'addModuleCleanup', 'doModuleCleanups', 'enterModuleContext',
|
||||||
|
'TestStatus']
|
||||||
|
|
||||||
__unittest = True
|
__unittest = True
|
||||||
|
|
||||||
from .result import TestResult
|
from .result import TestResult
|
||||||
from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
|
from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
|
||||||
skipIf, skipUnless, expectedFailure, doModuleCleanups,
|
skipIf, skipUnless, expectedFailure, doModuleCleanups,
|
||||||
enterModuleContext)
|
enterModuleContext, TestStatus)
|
||||||
from .suite import BaseTestSuite, TestSuite # noqa: F401
|
from .suite import BaseTestSuite, TestSuite # noqa: F401
|
||||||
from .loader import TestLoader, defaultTestLoader
|
from .loader import TestLoader, defaultTestLoader
|
||||||
from .main import TestProgram, main # noqa: F401
|
from .main import TestProgram, main # noqa: F401
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
import sys
|
import sys
|
||||||
import functools
|
import functools
|
||||||
import difflib
|
import difflib
|
||||||
|
import enum
|
||||||
import pprint
|
import pprint
|
||||||
import re
|
import re
|
||||||
import warnings
|
import warnings
|
||||||
|
|
@ -42,24 +43,35 @@ class _UnexpectedSuccess(Exception):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TestStatus(enum.StrEnum):
|
||||||
|
SUCCESS = "success"
|
||||||
|
UNEXPECTED_SUCCESS = "unexpected_success"
|
||||||
|
FAILED = "failed"
|
||||||
|
EXPECTED_FAILURE = "expected_failure"
|
||||||
|
SKIPPED = "skipped"
|
||||||
|
|
||||||
|
|
||||||
class _Outcome(object):
|
class _Outcome(object):
|
||||||
def __init__(self, result=None):
|
def __init__(self, result=None):
|
||||||
self.expecting_failure = False
|
self.expecting_failure = False
|
||||||
self.result = result
|
self.result = result
|
||||||
self.result_supports_subtests = hasattr(result, "addSubTest")
|
self.result_supports_subtests = hasattr(result, "addSubTest")
|
||||||
self.success = True
|
self.success = True
|
||||||
|
self.status = None
|
||||||
self.expectedFailure = None
|
self.expectedFailure = None
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def testPartExecutor(self, test_case, subTest=False):
|
def testPartExecutor(self, test_case, subTest=False):
|
||||||
old_success = self.success
|
old_success = self.success
|
||||||
self.success = True
|
self.success = True
|
||||||
|
status = TestStatus.SUCCESS
|
||||||
try:
|
try:
|
||||||
yield
|
yield
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
raise
|
raise
|
||||||
except SkipTest as e:
|
except SkipTest as e:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
status = TestStatus.SKIPPED
|
||||||
_addSkip(self.result, test_case, str(e))
|
_addSkip(self.result, test_case, str(e))
|
||||||
except _ShouldStop:
|
except _ShouldStop:
|
||||||
pass
|
pass
|
||||||
|
|
@ -67,8 +79,10 @@ def testPartExecutor(self, test_case, subTest=False):
|
||||||
exc_info = sys.exc_info()
|
exc_info = sys.exc_info()
|
||||||
if self.expecting_failure:
|
if self.expecting_failure:
|
||||||
self.expectedFailure = exc_info
|
self.expectedFailure = exc_info
|
||||||
|
status = TestStatus.EXPECTED_FAILURE
|
||||||
else:
|
else:
|
||||||
self.success = False
|
self.success = False
|
||||||
|
status = TestStatus.FAILED
|
||||||
if subTest:
|
if subTest:
|
||||||
self.result.addSubTest(test_case.test_case, test_case, exc_info)
|
self.result.addSubTest(test_case.test_case, test_case, exc_info)
|
||||||
else:
|
else:
|
||||||
|
|
@ -77,10 +91,15 @@ def testPartExecutor(self, test_case, subTest=False):
|
||||||
# exc_info -> frame -> exc_info
|
# exc_info -> frame -> exc_info
|
||||||
exc_info = None
|
exc_info = None
|
||||||
else:
|
else:
|
||||||
|
if self.expecting_failure:
|
||||||
|
status = TestStatus.UNEXPECTED_SUCCESS
|
||||||
if subTest and self.success:
|
if subTest and self.success:
|
||||||
self.result.addSubTest(test_case.test_case, test_case, None)
|
self.result.addSubTest(test_case.test_case, test_case, None)
|
||||||
finally:
|
finally:
|
||||||
self.success = self.success and old_success
|
self.success = self.success and old_success
|
||||||
|
# If any of the previous subTests failed, keep the failed status
|
||||||
|
if self.status not in (TestStatus.FAILED, TestStatus.EXPECTED_FAILURE):
|
||||||
|
self.status = status
|
||||||
|
|
||||||
|
|
||||||
def _addSkip(result, test_case, reason):
|
def _addSkip(result, test_case, reason):
|
||||||
|
|
@ -428,6 +447,7 @@ def __init__(self, methodName='runTest'):
|
||||||
self._testMethodDoc = testMethod.__doc__
|
self._testMethodDoc = testMethod.__doc__
|
||||||
self._cleanups = []
|
self._cleanups = []
|
||||||
self._subtest = None
|
self._subtest = None
|
||||||
|
self.status = None
|
||||||
|
|
||||||
# Map types to custom assertEqual functions that will compare
|
# Map types to custom assertEqual functions that will compare
|
||||||
# instances of said type in more detail to generate a more useful
|
# instances of said type in more detail to generate a more useful
|
||||||
|
|
@ -640,6 +660,7 @@ def run(self, result=None):
|
||||||
# If the class or method was skipped.
|
# If the class or method was skipped.
|
||||||
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
|
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
|
||||||
or getattr(testMethod, '__unittest_skip_why__', ''))
|
or getattr(testMethod, '__unittest_skip_why__', ''))
|
||||||
|
self.status = TestStatus.SKIPPED
|
||||||
_addSkip(result, self, skip_why)
|
_addSkip(result, self, skip_why)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
@ -658,6 +679,7 @@ def run(self, result=None):
|
||||||
outcome.expecting_failure = expecting_failure
|
outcome.expecting_failure = expecting_failure
|
||||||
with outcome.testPartExecutor(self):
|
with outcome.testPartExecutor(self):
|
||||||
self._callTestMethod(testMethod)
|
self._callTestMethod(testMethod)
|
||||||
|
self.status = outcome.status
|
||||||
outcome.expecting_failure = False
|
outcome.expecting_failure = False
|
||||||
with outcome.testPartExecutor(self):
|
with outcome.testPartExecutor(self):
|
||||||
self._callTearDown()
|
self._callTearDown()
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
Add new attribute ``status`` to :class:`unittest.TestCase` containing member
|
||||||
|
of (also new) ``unittest.TestStatus`` enum. This can be used to check
|
||||||
|
the test result in ``tearDown`` / ``cleanup`` phase.
|
||||||
Loading…
Add table
Add a link
Reference in a new issue