1
0
mirror of https://github.com/arsenetar/dupeguru.git synced 2026-01-22 14:41:39 +00:00

Format all files with black correcting line length

This commit is contained in:
2021-08-15 04:10:18 -05:00
parent 9446f37fad
commit ffe6b7047c
80 changed files with 517 additions and 970 deletions

View File

@@ -33,8 +33,7 @@ CacheRow = namedtuple("CacheRow", "id path blocks mtime")
class ShelveCache:
"""A class to cache picture blocks in a shelve backend.
"""
"""A class to cache picture blocks in a shelve backend."""
def __init__(self, db=None, readonly=False):
self.istmp = db is None
@@ -81,9 +80,7 @@ class ShelveCache:
self.shelve[wrap_id(rowid)] = wrap_path(path_str)
def _compute_maxid(self):
return max(
(unwrap_id(k) for k in self.shelve if k.startswith("id:")), default=1
)
return max((unwrap_id(k) for k in self.shelve if k.startswith("id:")), default=1)
def _get_new_id(self):
self.maxid += 1

View File

@@ -13,8 +13,7 @@ from .cache import string_to_colors, colors_to_string
class SqliteCache:
"""A class to cache picture blocks in a sqlite backend.
"""
"""A class to cache picture blocks in a sqlite backend."""
def __init__(self, db=":memory:", readonly=False):
# readonly is not used in the sqlite version of the cache
@@ -71,18 +70,14 @@ class SqliteCache:
except sqlite.OperationalError:
logging.warning("Picture cache could not set value for key %r", path_str)
except sqlite.DatabaseError as e:
logging.warning(
"DatabaseError while setting value for key %r: %s", path_str, str(e)
)
logging.warning("DatabaseError while setting value for key %r: %s", path_str, str(e))
def _create_con(self, second_try=False):
def create_tables():
logging.debug("Creating picture cache tables.")
self.con.execute("drop table if exists pictures")
self.con.execute("drop index if exists idx_path")
self.con.execute(
"create table pictures(path TEXT, mtime INTEGER, blocks TEXT)"
)
self.con.execute("create table pictures(path TEXT, mtime INTEGER, blocks TEXT)")
self.con.execute("create index idx_path on pictures (path)")
self.con = sqlite.connect(self.dbname, isolation_level=None)
@@ -93,9 +88,7 @@ class SqliteCache:
except sqlite.DatabaseError as e: # corrupted db
if second_try:
raise # Something really strange is happening
logging.warning(
"Could not create picture cache because of an error: %s", str(e)
)
logging.warning("Could not create picture cache because of an error: %s", str(e))
self.con.close()
os.remove(self.dbname)
self._create_con(second_try=True)
@@ -125,9 +118,7 @@ class SqliteCache:
raise ValueError(path)
def get_multiple(self, rowids):
sql = "select rowid, blocks from pictures where rowid in (%s)" % ",".join(
map(str, rowids)
)
sql = "select rowid, blocks from pictures where rowid in (%s)" % ",".join(map(str, rowids))
cur = self.con.execute(sql)
return ((rowid, string_to_colors(blocks)) for rowid, blocks in cur)
@@ -148,7 +139,5 @@ class SqliteCache:
continue
todelete.append(rowid)
if todelete:
sql = "delete from pictures where rowid in (%s)" % ",".join(
map(str, todelete)
)
sql = "delete from pictures where rowid in (%s)" % ",".join(map(str, todelete))
self.con.execute(sql)

View File

@@ -256,9 +256,7 @@ class TIFF_file:
for j in range(count):
if type in {5, 10}:
# The type is either 5 or 10
value_j = Fraction(
self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed)
)
value_j = Fraction(self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed))
else:
# Not a fraction
value_j = self.s2n(offset, typelen, signed)
@@ -296,9 +294,7 @@ def get_fields(fp):
logging.debug("Exif header length: %d bytes", length)
data = fp.read(length - 8)
data_format = data[0]
logging.debug(
"%s format", {INTEL_ENDIAN: "Intel", MOTOROLA_ENDIAN: "Motorola"}[data_format]
)
logging.debug("%s format", {INTEL_ENDIAN: "Intel", MOTOROLA_ENDIAN: "Motorola"}[data_format])
T = TIFF_file(data)
# There may be more than one IFD per file, but we only read the first one because others are
# most likely thumbnails.

View File

@@ -95,9 +95,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
picture.unicode_path,
picture.size,
)
if (
picture.size < 10 * 1024 * 1024
): # We're really running out of memory
if picture.size < 10 * 1024 * 1024: # We're really running out of memory
raise
except MemoryError:
logging.warning("Ran out of memory while preparing pictures")
@@ -106,9 +104,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
def get_chunks(pictures):
min_chunk_count = (
multiprocessing.cpu_count() * 2
) # have enough chunks to feed all subprocesses
min_chunk_count = multiprocessing.cpu_count() * 2 # have enough chunks to feed all subprocesses
chunk_count = len(pictures) // DEFAULT_CHUNK_SIZE
chunk_count = max(min_chunk_count, chunk_count)
chunk_size = (len(pictures) // chunk_count) + 1
@@ -185,9 +181,7 @@ def getmatches(pictures, cache_path, threshold, match_scaled=False, j=job.nulljo
j.set_progress(comparison_count, progress_msg)
j = j.start_subjob([3, 7])
pictures = prepare_pictures(
pictures, cache_path, with_dimensions=not match_scaled, j=j
)
pictures = prepare_pictures(pictures, cache_path, with_dimensions=not match_scaled, j=j)
j = j.start_subjob([9, 1], tr("Preparing for matching"))
cache = get_cache(cache_path)
id2picture = {}
@@ -231,12 +225,8 @@ def getmatches(pictures, cache_path, threshold, match_scaled=False, j=job.nulljo
chunks,
pictures,
) # some wiggle room for the next statements
logging.warning(
"Ran out of memory when scanning! We had %d matches.", len(matches)
)
del matches[
-len(matches) // 3 :
] # some wiggle room to ensure we don't run out of memory again.
logging.warning("Ran out of memory when scanning! We had %d matches.", len(matches))
del matches[-len(matches) // 3 :] # some wiggle room to ensure we don't run out of memory again.
pool.close()
result = []
myiter = j.iter_with_progress(