Fix generate_checksum routine to avoid MemoryError crash (#649)

with very large files and 32-bit OS

Issue #10690
This commit is contained in:
Paul Culley 2018-09-03 19:41:27 -05:00 committed by Sam Manzi
parent fd399323a6
commit 2aec83f057

View File

@ -251,9 +251,15 @@ def create_checksum(full_path):
Create a md5 hash for the given file.
"""
full_path = os.path.normpath(full_path)
md5 = hashlib.md5()
try:
with open(full_path, 'rb') as media_file:
md5sum = hashlib.md5(media_file.read()).hexdigest()
while True:
buf = media_file.read(65536)
if not buf:
break
md5.update(buf)
md5sum = md5.hexdigest()
except IOError:
md5sum = ''
except UnicodeEncodeError: