mirror of
https://github.com/python/cpython.git
synced 2026-01-06 07:22:09 +00:00
gh-128657: Run test_hashlib with --parallel-threads (GH-129833)
* gh-128657: Run test_hashlib with `--parallel-threads` This catches the race in `py_digest_by_name` that is fixed separately in gh-128886. * Adjust assertion order
This commit is contained in:
parent
5ce70ad129
commit
c1f352bf08
2 changed files with 21 additions and 23 deletions
|
|
@ -33,6 +33,7 @@
|
|||
# the regression test runner with the `--parallel-threads` option enabled.
|
||||
TSAN_PARALLEL_TESTS = [
|
||||
'test_abc',
|
||||
'test_hashlib',
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -13,13 +13,13 @@
|
|||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
import tempfile
|
||||
import threading
|
||||
import unittest
|
||||
import warnings
|
||||
from test import support
|
||||
from test.support import _4G, bigmemtest
|
||||
from test.support.import_helper import import_fresh_module
|
||||
from test.support import os_helper
|
||||
from test.support import requires_resource
|
||||
from test.support import threading_helper
|
||||
from http.client import HTTPException
|
||||
|
|
@ -414,21 +414,18 @@ def check_file_digest(self, name, data, hexdigest):
|
|||
digests = [name]
|
||||
digests.extend(self.constructors_to_test[name])
|
||||
|
||||
with open(os_helper.TESTFN, "wb") as f:
|
||||
with tempfile.TemporaryFile() as f:
|
||||
f.write(data)
|
||||
|
||||
try:
|
||||
for digest in digests:
|
||||
buf = io.BytesIO(data)
|
||||
buf.seek(0)
|
||||
self.assertEqual(
|
||||
hashlib.file_digest(buf, digest).hexdigest(), hexdigest
|
||||
)
|
||||
with open(os_helper.TESTFN, "rb") as f:
|
||||
digestobj = hashlib.file_digest(f, digest)
|
||||
f.seek(0)
|
||||
digestobj = hashlib.file_digest(f, digest)
|
||||
self.assertEqual(digestobj.hexdigest(), hexdigest)
|
||||
finally:
|
||||
os.unlink(os_helper.TESTFN)
|
||||
|
||||
def check_no_unicode(self, algorithm_name):
|
||||
# Unicode objects are not allowed as input.
|
||||
|
|
@ -1172,30 +1169,30 @@ def test_normalized_name(self):
|
|||
def test_file_digest(self):
|
||||
data = b'a' * 65536
|
||||
d1 = hashlib.sha256()
|
||||
self.addCleanup(os.unlink, os_helper.TESTFN)
|
||||
with open(os_helper.TESTFN, "wb") as f:
|
||||
with tempfile.NamedTemporaryFile(delete_on_close=False) as fp:
|
||||
for _ in range(10):
|
||||
d1.update(data)
|
||||
f.write(data)
|
||||
fp.write(data)
|
||||
fp.close()
|
||||
|
||||
with open(os_helper.TESTFN, "rb") as f:
|
||||
d2 = hashlib.file_digest(f, hashlib.sha256)
|
||||
with open(fp.name, "rb") as f:
|
||||
d2 = hashlib.file_digest(f, hashlib.sha256)
|
||||
|
||||
self.assertEqual(d1.hexdigest(), d2.hexdigest())
|
||||
self.assertEqual(d1.name, d2.name)
|
||||
self.assertIs(type(d1), type(d2))
|
||||
self.assertEqual(d1.hexdigest(), d2.hexdigest())
|
||||
self.assertEqual(d1.name, d2.name)
|
||||
self.assertIs(type(d1), type(d2))
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with open(fp.name, "r") as f:
|
||||
hashlib.file_digest(f, "sha256")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with open(fp.name, "wb") as f:
|
||||
hashlib.file_digest(f, "sha256")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
hashlib.file_digest(None, "sha256")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with open(os_helper.TESTFN, "r") as f:
|
||||
hashlib.file_digest(f, "sha256")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
with open(os_helper.TESTFN, "wb") as f:
|
||||
hashlib.file_digest(f, "sha256")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue