mirror of
https://github.com/arsenetar/dupeguru.git
synced 2025-03-10 05:34:36 +00:00
Work around a crash in picture analysis where there's sometimes a null path and improved debug logging calls in the matchblock unit.
This commit is contained in:
parent
03712860b1
commit
2daaaee893
@ -58,8 +58,16 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
|
|||||||
prepared = [] # only pictures for which there was no error getting blocks
|
prepared = [] # only pictures for which there was no error getting blocks
|
||||||
try:
|
try:
|
||||||
for picture in j.iter_with_progress(pictures, tr("Analyzed %d/%d pictures")):
|
for picture in j.iter_with_progress(pictures, tr("Analyzed %d/%d pictures")):
|
||||||
|
if not picture.path:
|
||||||
|
# XXX Find the root cause of this. I've received reports of crashes where we had
|
||||||
|
# "Analyzing picture at " (without a path) in the debug log. It was an iPhoto scan.
|
||||||
|
# For now, I'm simply working around the crash by ignoring those, but it would be
|
||||||
|
# interesting to know exactly why this happens. I'm suspecting a malformed
|
||||||
|
# entry in iPhoto library.
|
||||||
|
logging.warning("We have a picture with a null path here")
|
||||||
|
continue
|
||||||
picture.unicode_path = str(picture.path)
|
picture.unicode_path = str(picture.path)
|
||||||
logging.debug("Analyzing picture at {}".format(picture.unicode_path))
|
logging.debug("Analyzing picture at %s", picture.unicode_path)
|
||||||
if with_dimensions:
|
if with_dimensions:
|
||||||
picture.dimensions # pre-read dimensions
|
picture.dimensions # pre-read dimensions
|
||||||
try:
|
try:
|
||||||
@ -70,7 +78,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
|
|||||||
except (IOError, ValueError) as e:
|
except (IOError, ValueError) as e:
|
||||||
logging.warning(str(e))
|
logging.warning(str(e))
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
logging.warning('Ran out of memory while reading %s of size %d' % (picture.unicode_path, picture.size))
|
logging.warning("Ran out of memory while reading %s of size %d", picture.unicode_path, picture.size)
|
||||||
if picture.size < 10 * 1024 * 1024: # We're really running out of memory
|
if picture.size < 10 * 1024 * 1024: # We're really running out of memory
|
||||||
raise
|
raise
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
@ -84,8 +92,8 @@ def get_chunks(pictures):
|
|||||||
chunk_count = max(min_chunk_count, chunk_count)
|
chunk_count = max(min_chunk_count, chunk_count)
|
||||||
chunk_size = (len(pictures) // chunk_count) + 1
|
chunk_size = (len(pictures) // chunk_count) + 1
|
||||||
chunk_size = max(MIN_CHUNK_SIZE, chunk_size)
|
chunk_size = max(MIN_CHUNK_SIZE, chunk_size)
|
||||||
logging.info("Creating {} chunks with a chunk size of {} for {} pictures".format(
|
logging.info("Creating %d chunks with a chunk size of %d for %d pictures", chunk_count,
|
||||||
chunk_count, chunk_size, len(pictures)))
|
chunk_size, len(pictures))
|
||||||
chunks = [pictures[i:i+chunk_size] for i in range(0, len(pictures), chunk_size)]
|
chunks = [pictures[i:i+chunk_size] for i in range(0, len(pictures), chunk_size)]
|
||||||
return chunks
|
return chunks
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user