mirror of
https://github.com/arsenetar/dupeguru.git
synced 2025-03-10 13:44:37 +00:00
Limit the size of arguments sent to multiprocessing because it could cause crashes.
This commit is contained in:
parent
7b1a1ff4bb
commit
32d88e9249
@ -111,8 +111,13 @@ def getmatches(pictures, cache_path, threshold=75, match_scaled=False, j=job.nul
|
|||||||
others = [pic for pic in others if not pic.is_ref]
|
others = [pic for pic in others if not pic.is_ref]
|
||||||
if others:
|
if others:
|
||||||
cache_ids = [f.cache_id for f in others]
|
cache_ids = [f.cache_id for f in others]
|
||||||
args = (ref.cache_id, cache_ids, cache_path, threshold)
|
# We limit the number of cache_ids we send for multi-processing because otherwise, we
|
||||||
|
# might get an error saying "String or BLOB exceeded size limit"
|
||||||
|
ARG_LIMIT = 1000
|
||||||
|
while cache_ids:
|
||||||
|
args = (ref.cache_id, cache_ids[:ARG_LIMIT], cache_path, threshold)
|
||||||
async_results.append(pool.apply_async(async_compare, args))
|
async_results.append(pool.apply_async(async_compare, args))
|
||||||
|
cache_ids = cache_ids[ARG_LIMIT:]
|
||||||
if len(async_results) > RESULTS_QUEUE_LIMIT:
|
if len(async_results) > RESULTS_QUEUE_LIMIT:
|
||||||
result = async_results.pop(0)
|
result = async_results.pop(0)
|
||||||
matches.extend(result.get())
|
matches.extend(result.get())
|
||||||
|
Loading…
x
Reference in New Issue
Block a user