Browse Source

fix an issue with large work counts...

My testing machine has 10 cpus, and so didn't trigger the failure
where not all the work was submitted..  We need to pop the completed
work items, and keep doing the for loop while we have futures to
process...  this submits and processes all work..
main
John-Mark Gurney 1 year ago
parent
commit
108d1dff3d
1 changed files with 14 additions and 12 deletions
  1. +14
    -12
      ui/medashare/btv/__init__.py

+ 14
- 12
ui/medashare/btv/__init__.py View File

@@ -275,20 +275,21 @@ def validate(torrent, basedir, with_file_hashes=None):

futures[fut] = num, hash, i

for i in range(min(len(pending_work), os.cpu_count() + 1)):
for i in range(min(len(pending_work), os.cpu_count() + 2)):
submit_work(*pending_work.pop(0))

for future in concurrent.futures.as_completed(futures):
if pending_work:
submit_work(*pending_work.pop(0))
while futures:
for future in concurrent.futures.as_completed(futures):
if pending_work:
submit_work(*pending_work.pop(0))

future.result()
num, hash, i = futures[future]
future.result()
num, hash, i = futures.pop(future)

if hash.digest() == i:
valid[num] = True
else:
valid[num] = False
if hash.digest() == i:
valid[num] = True
else:
valid[num] = False

if files is None:
filesizes = { pathlib.PurePosixPath(info['name'].decode(
@@ -448,7 +449,8 @@ class _TestCases(unittest.TestCase):
list(origrglob(pathlib.PosixPath('.'), 'somedir'))

with unittest.mock.patch.object(pathlib.PosixPath, 'rglob',
side_effect=_rglob_patch):
side_effect=_rglob_patch), unittest.mock.patch.object(os,
'cpu_count', side_effect=lambda: 3):
good, bad = validate_file(tf)

self.assertFalse(bad)
@@ -512,7 +514,7 @@ class _TestCases(unittest.TestCase):
with open(sd / '40plus1.txt', 'w') as fp:
fp.write('w')

good, bad, hashes = validate_file(tor, with_file_hashes=sha512)
good, bad, hashes = validate_file(tor, with_file_hashes=sha512)

self.assertEqual(bad, { sd / '40plus1.txt' })
self.assertFalse(good)


Loading…
Cancel
Save