Closes #15910: MD5 and SHA1 crash when "updated" with strings bigger than 2**32 bytes
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 114b09b..e6365c5 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -167,6 +167,21 @@
% (name, hash_object_constructor,
computed, len(data), digest))
+ def check_update(self, name, data, digest):
+ constructors = self.constructors_to_test[name]
+ # 2 is for hashlib.name(...) and hashlib.new(name, ...)
+ self.assertGreaterEqual(len(constructors), 2)
+ for hash_object_constructor in constructors:
+ h = hash_object_constructor()
+ h.update(data)
+ computed = h.hexdigest()
+ self.assertEqual(
+ computed, digest,
+ "Hash algorithm %s using %s when updated returned hexdigest"
+ " %r for %d byte input data that should have hashed to %r."
+ % (name, hash_object_constructor,
+ computed, len(data), digest))
+
def check_unicode(self, algorithm_name):
# Unicode objects are not allowed as input.
expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
@@ -200,6 +215,15 @@
except OverflowError:
pass # 32-bit arch
+ @precisionbigmemtest(size=_4G + 5, memuse=1)
+ def test_case_md5_huge_update(self, size):
+ if size == _4G + 5:
+ try:
+ self.check_update('md5', 'A'*size,
+ 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
+ except OverflowError:
+ pass # 32-bit arch
+
@precisionbigmemtest(size=_4G - 1, memuse=1)
def test_case_md5_uintmax(self, size):
if size == _4G - 1:
@@ -237,6 +261,15 @@
except OverflowError:
pass # 32-bit arch
+ @precisionbigmemtest(size=_4G + 5, memuse=1)
+ def test_case_sha1_huge_update(self, size):
+ if size == _4G + 5:
+ try:
+ self.check_update('sha1', 'A'*size,
+ '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
+ except OverflowError:
+ pass # 32-bit arch
+
# use the examples from Federal Information Processing Standards
# Publication 180-2, Secure Hash Standard, 2002 August 1
# http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf