From d4d2d2e367b93b455101a74fb7dbe35426fabb4b Mon Sep 17 00:00:00 2001 From: John-Mark Gurney Date: Wed, 27 Jul 2022 16:27:18 -0700 Subject: [PATCH] minor changes to sorted, and make hashing large files work.. --- ui/medashare/cli.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/ui/medashare/cli.py b/ui/medashare/cli.py index 3c5683e..97ec07c 100644 --- a/ui/medashare/cli.py +++ b/ui/medashare/cli.py @@ -29,9 +29,7 @@ _validhashes = set([ 'sha256', 'sha512' ]) _hashlengths = { len(getattr(hashlib, x)().hexdigest()): x for x in _validhashes } def _iterdictlist(obj): - itms = list(obj.items()) - itms.sort() - for k, v in itms: + for k, v in sorted(obj.items()): if isinstance(v, list): v = v[:] v.sort() @@ -200,10 +198,7 @@ class CanonicalCoder(pasn1.ASN1DictCoder): def enc_dict(self, obj, **kwargs): class FakeIter: def items(self): - itms = list(obj.items()) - itms.sort() - - return iter(itms) + return iter(sorted(obj.items())) return pasn1.ASN1DictCoder.enc_dict(self, FakeIter(), **kwargs) @@ -478,11 +473,19 @@ class ObjectStore(object): else: raise KeyError('unable to find metadata for file') +def _readfp(fp): + while True: + r = fp.read(64*1024) + if r == b'': + return + + yield r + def _hashfile(fname): hash = getattr(hashlib, _defaulthash)() with open(fname, 'rb') as fp: - r = fp.read() - hash.update(r) + for r in _readfp(fp): + hash.update(r) return '%s:%s' % (_defaulthash, hash.hexdigest())