mirror of
https://github.com/arsenetar/dupeguru.git
synced 2026-01-25 08:01:39 +00:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
573d088088 | ||
|
|
75b08125c0 | ||
|
|
20320f539f | ||
|
|
24771af955 | ||
|
|
2bfe9960f1 | ||
|
|
215bcb0d76 |
12
.hgignore
12
.hgignore
@@ -8,8 +8,6 @@ run.py
|
|||||||
*.pyd
|
*.pyd
|
||||||
*.waf*
|
*.waf*
|
||||||
.lock-waf*
|
.lock-waf*
|
||||||
*.xcodeproj/xcuserdata
|
|
||||||
*.xcodeproj/project.xcworkspace/xcuserdata
|
|
||||||
conf.json
|
conf.json
|
||||||
build
|
build
|
||||||
dist
|
dist
|
||||||
@@ -18,16 +16,6 @@ installer_tmp-cache
|
|||||||
cocoa/autogen
|
cocoa/autogen
|
||||||
cocoa/*/Info.plist
|
cocoa/*/Info.plist
|
||||||
cocoa/*/build
|
cocoa/*/build
|
||||||
cocoa/*/*.app
|
|
||||||
cs.lproj
|
|
||||||
de.lproj
|
|
||||||
fr.lproj
|
|
||||||
it.lproj
|
|
||||||
hy.lproj
|
|
||||||
ru.lproj
|
|
||||||
uk.lproj
|
|
||||||
zh_CN.lproj
|
|
||||||
pt_BR.lproj
|
|
||||||
qt/base/*_rc.py
|
qt/base/*_rc.py
|
||||||
help/*/conf.py
|
help/*/conf.py
|
||||||
help/*/changelog.rst
|
help/*/changelog.rst
|
||||||
1
.hgtags
1
.hgtags
@@ -79,3 +79,4 @@ e772f1de86744999ffbbe5845554417965b1dfba me6.4.1
|
|||||||
c8a9a4d355927e509f514308c82306192bc71f92 pe2.6.0
|
c8a9a4d355927e509f514308c82306192bc71f92 pe2.6.0
|
||||||
a618e954f01e4bbdbe9a03e5667a67d62be995a7 me6.4.2
|
a618e954f01e4bbdbe9a03e5667a67d62be995a7 me6.4.2
|
||||||
0f18c4498a6c7529bf77207db70aed8a5ec96ee4 se3.6.0
|
0f18c4498a6c7529bf77207db70aed8a5ec96ee4 se3.6.0
|
||||||
|
8f478379ec62fd1329d527aafb1ab0f2410f3a79 me6.5.0
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ http://www.hardcoded.net/licenses/bsd_license
|
|||||||
{@"size", 63, 16, 0, YES, nil},
|
{@"size", 63, 16, 0, YES, nil},
|
||||||
{@"extension", 40, 16, 0, YES, nil},
|
{@"extension", 40, 16, 0, YES, nil},
|
||||||
{@"dimensions", 73, 16, 0, YES, nil},
|
{@"dimensions", 73, 16, 0, YES, nil},
|
||||||
|
{@"exif_timestamp", 120, 16, 0, YES, nil},
|
||||||
{@"mtime", 120, 16, 0, YES, nil},
|
{@"mtime", 120, 16, 0, YES, nil},
|
||||||
{@"percentage", 58, 16, 0, YES, nil},
|
{@"percentage", 58, 16, 0, YES, nil},
|
||||||
{@"dupe_count", 80, 16, 0, YES, nil},
|
{@"dupe_count", 80, 16, 0, YES, nil},
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
__version__ = '2.6.0'
|
__version__ = '2.7.0'
|
||||||
__appname__ = 'dupeGuru Picture Edition'
|
__appname__ = 'dupeGuru Picture Edition'
|
||||||
@@ -24,7 +24,7 @@ def get_delta_dimensions(value, ref_value):
|
|||||||
|
|
||||||
class DupeGuru(DupeGuruBase):
|
class DupeGuru(DupeGuruBase):
|
||||||
NAME = __appname__
|
NAME = __appname__
|
||||||
METADATA_TO_READ = ['size', 'mtime', 'dimensions']
|
METADATA_TO_READ = ['size', 'mtime', 'dimensions', 'exif_timestamp']
|
||||||
|
|
||||||
def __init__(self, view, appdata):
|
def __init__(self, view, appdata):
|
||||||
DupeGuruBase.__init__(self, view, appdata)
|
DupeGuruBase.__init__(self, view, appdata)
|
||||||
@@ -54,6 +54,7 @@ class DupeGuru(DupeGuruBase):
|
|||||||
'size': format_size(size, 0, 1, False),
|
'size': format_size(size, 0, 1, False),
|
||||||
'extension': dupe.extension,
|
'extension': dupe.extension,
|
||||||
'dimensions': format_dimensions(dimensions),
|
'dimensions': format_dimensions(dimensions),
|
||||||
|
'exif_timestamp': dupe.exif_timestamp,
|
||||||
'mtime': format_timestamp(mtime, delta and m),
|
'mtime': format_timestamp(mtime, delta and m),
|
||||||
'percentage': format_perc(percentage),
|
'percentage': format_perc(percentage),
|
||||||
'dupe_count': format_dupe_count(dupe_count),
|
'dupe_count': format_dupe_count(dupe_count),
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import os.path as op
|
||||||
import logging
|
import logging
|
||||||
import sqlite3 as sqlite
|
import sqlite3 as sqlite
|
||||||
|
|
||||||
@@ -30,7 +31,7 @@ def colors_to_string(colors):
|
|||||||
# result.append((number >> 16, (number >> 8) & 0xff, number & 0xff))
|
# result.append((number >> 16, (number >> 8) & 0xff, number & 0xff))
|
||||||
# return result
|
# return result
|
||||||
|
|
||||||
class Cache(object):
|
class Cache:
|
||||||
"""A class to cache picture blocks.
|
"""A class to cache picture blocks.
|
||||||
"""
|
"""
|
||||||
def __init__(self, db=':memory:'):
|
def __init__(self, db=':memory:'):
|
||||||
@@ -72,29 +73,34 @@ class Cache(object):
|
|||||||
result = self.con.execute(sql).fetchall()
|
result = self.con.execute(sql).fetchall()
|
||||||
return result[0][0]
|
return result[0][0]
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, path_str, blocks):
|
||||||
value = colors_to_string(value)
|
blocks = colors_to_string(blocks)
|
||||||
if key in self:
|
if op.exists(path_str):
|
||||||
sql = "update pictures set blocks = ? where path = ?"
|
mtime = int(os.stat(path_str).st_mtime)
|
||||||
else:
|
else:
|
||||||
sql = "insert into pictures(blocks,path) values(?,?)"
|
mtime = 0
|
||||||
|
if path_str in self:
|
||||||
|
sql = "update pictures set blocks = ?, mtime = ? where path = ?"
|
||||||
|
else:
|
||||||
|
sql = "insert into pictures(blocks,mtime,path) values(?,?,?)"
|
||||||
try:
|
try:
|
||||||
self.con.execute(sql, [value, key])
|
self.con.execute(sql, [blocks, mtime, path_str])
|
||||||
except sqlite.OperationalError:
|
except sqlite.OperationalError:
|
||||||
logging.warning('Picture cache could not set value for key %r', key)
|
logging.warning('Picture cache could not set value for key %r', path_str)
|
||||||
except sqlite.DatabaseError as e:
|
except sqlite.DatabaseError as e:
|
||||||
logging.warning('DatabaseError while setting value for key %r: %s', key, str(e))
|
logging.warning('DatabaseError while setting value for key %r: %s', path_str, str(e))
|
||||||
|
|
||||||
def _create_con(self, second_try=False):
|
def _create_con(self, second_try=False):
|
||||||
def create_tables():
|
def create_tables():
|
||||||
sql = "create table pictures(path TEXT, blocks TEXT)"
|
logging.debug("Creating picture cache tables.")
|
||||||
self.con.execute(sql);
|
self.con.execute("drop table if exists pictures");
|
||||||
sql = "create index idx_path on pictures (path)"
|
self.con.execute("drop index if exists idx_path");
|
||||||
self.con.execute(sql)
|
self.con.execute("create table pictures(path TEXT, mtime INTEGER, blocks TEXT)");
|
||||||
|
self.con.execute("create index idx_path on pictures (path)")
|
||||||
|
|
||||||
self.con = sqlite.connect(self.dbname, isolation_level=None)
|
self.con = sqlite.connect(self.dbname, isolation_level=None)
|
||||||
try:
|
try:
|
||||||
self.con.execute("select * from pictures where 1=2")
|
self.con.execute("select path, mtime, blocks from pictures where 1=2")
|
||||||
except sqlite.OperationalError: # new db
|
except sqlite.OperationalError: # new db
|
||||||
create_tables()
|
create_tables()
|
||||||
except sqlite.DatabaseError as e: # corrupted db
|
except sqlite.DatabaseError as e: # corrupted db
|
||||||
@@ -134,3 +140,23 @@ class Cache(object):
|
|||||||
cur = self.con.execute(sql)
|
cur = self.con.execute(sql)
|
||||||
return ((rowid, string_to_colors(blocks)) for rowid, blocks in cur)
|
return ((rowid, string_to_colors(blocks)) for rowid, blocks in cur)
|
||||||
|
|
||||||
|
def purge_outdated(self):
|
||||||
|
"""Go through the cache and purge outdated records.
|
||||||
|
|
||||||
|
A record is outdated if the picture doesn't exist or if its mtime is greater than the one in
|
||||||
|
the db.
|
||||||
|
"""
|
||||||
|
todelete = []
|
||||||
|
sql = "select rowid, path, mtime from pictures"
|
||||||
|
cur = self.con.execute(sql)
|
||||||
|
for rowid, path_str, mtime in cur:
|
||||||
|
if mtime and op.exists(path_str):
|
||||||
|
picture_mtime = os.stat(path_str).st_mtime
|
||||||
|
if int(picture_mtime) <= mtime:
|
||||||
|
# not outdated
|
||||||
|
continue
|
||||||
|
todelete.append(rowid)
|
||||||
|
if todelete:
|
||||||
|
sql = "delete from pictures where rowid in (%s)" % ','.join(map(str, todelete))
|
||||||
|
self.con.execute(sql)
|
||||||
|
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
|
|||||||
# MemoryError happens when trying to read an image file, which is freed from memory by the
|
# MemoryError happens when trying to read an image file, which is freed from memory by the
|
||||||
# time that MemoryError is raised.
|
# time that MemoryError is raised.
|
||||||
cache = Cache(cache_path)
|
cache = Cache(cache_path)
|
||||||
|
cache.purge_outdated()
|
||||||
prepared = [] # only pictures for which there was no error getting blocks
|
prepared = [] # only pictures for which there was no error getting blocks
|
||||||
try:
|
try:
|
||||||
for picture in j.iter_with_progress(pictures, tr("Analyzed %d/%d pictures")):
|
for picture in j.iter_with_progress(pictures, tr("Analyzed %d/%d pictures")):
|
||||||
|
|||||||
@@ -6,26 +6,18 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
import logging
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from itertools import combinations
|
from itertools import combinations
|
||||||
|
|
||||||
from hscommon import io
|
|
||||||
from hscommon.trans import tr
|
from hscommon.trans import tr
|
||||||
|
|
||||||
from core.engine import Match
|
from core.engine import Match
|
||||||
from . import exif
|
|
||||||
|
|
||||||
def getmatches(files, match_scaled, j):
|
def getmatches(files, match_scaled, j):
|
||||||
timestamp2pic = defaultdict(set)
|
timestamp2pic = defaultdict(set)
|
||||||
for picture in j.iter_with_progress(files, tr("Read EXIF of %d/%d pictures")):
|
for picture in j.iter_with_progress(files, tr("Read EXIF of %d/%d pictures")):
|
||||||
try:
|
timestamp = picture.exif_timestamp
|
||||||
with io.open(picture.path, 'rb') as fp:
|
|
||||||
exifdata = exif.get_fields(fp)
|
|
||||||
timestamp = exifdata['DateTimeOriginal']
|
|
||||||
timestamp2pic[timestamp].add(picture)
|
timestamp2pic[timestamp].add(picture)
|
||||||
except Exception:
|
|
||||||
logging.info("Couldn't read EXIF of picture: %s", picture.path)
|
|
||||||
if '0000:00:00 00:00:00' in timestamp2pic: # very likely false matches
|
if '0000:00:00 00:00:00' in timestamp2pic: # very likely false matches
|
||||||
del timestamp2pic['0000:00:00 00:00:00']
|
del timestamp2pic['0000:00:00 00:00:00']
|
||||||
matches = []
|
matches = []
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
from hscommon import io
|
import logging
|
||||||
from hscommon.util import get_file_ext
|
from hscommon.util import get_file_ext
|
||||||
from core import fs
|
from core import fs
|
||||||
from . import exif
|
from . import exif
|
||||||
@@ -15,6 +15,7 @@ class Photo(fs.File):
|
|||||||
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
|
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
|
||||||
INITIAL_INFO.update({
|
INITIAL_INFO.update({
|
||||||
'dimensions': (0,0),
|
'dimensions': (0,0),
|
||||||
|
'exif_timestamp': '',
|
||||||
})
|
})
|
||||||
__slots__ = fs.File.__slots__ + tuple(INITIAL_INFO.keys())
|
__slots__ = fs.File.__slots__ + tuple(INITIAL_INFO.keys())
|
||||||
|
|
||||||
@@ -30,7 +31,7 @@ class Photo(fs.File):
|
|||||||
def _get_orientation(self):
|
def _get_orientation(self):
|
||||||
if not hasattr(self, '_cached_orientation'):
|
if not hasattr(self, '_cached_orientation'):
|
||||||
try:
|
try:
|
||||||
with io.open(self.path, 'rb') as fp:
|
with self.path.open('rb') as fp:
|
||||||
exifdata = exif.get_fields(fp)
|
exifdata = exif.get_fields(fp)
|
||||||
# the value is a list (probably one-sized) of ints
|
# the value is a list (probably one-sized) of ints
|
||||||
orientations = exifdata['Orientation']
|
orientations = exifdata['Orientation']
|
||||||
@@ -49,6 +50,13 @@ class Photo(fs.File):
|
|||||||
self.dimensions = self._plat_get_dimensions()
|
self.dimensions = self._plat_get_dimensions()
|
||||||
if self._get_orientation() in {5, 6, 7, 8}:
|
if self._get_orientation() in {5, 6, 7, 8}:
|
||||||
self.dimensions = (self.dimensions[1], self.dimensions[0])
|
self.dimensions = (self.dimensions[1], self.dimensions[0])
|
||||||
|
elif field == 'exif_timestamp':
|
||||||
|
try:
|
||||||
|
with self.path.open('rb') as fp:
|
||||||
|
exifdata = exif.get_fields(fp)
|
||||||
|
self.exif_timestamp = exifdata['DateTimeOriginal']
|
||||||
|
except Exception:
|
||||||
|
logging.info("Couldn't read EXIF of picture: %s", self.path)
|
||||||
|
|
||||||
def get_blocks(self, block_count_per_side):
|
def get_blocks(self, block_count_per_side):
|
||||||
return self._plat_get_blocks(block_count_per_side, self._get_orientation())
|
return self._plat_get_blocks(block_count_per_side, self._get_orientation())
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ class ResultTable(ResultTableBase):
|
|||||||
Column('size', coltr("Size (KB)"), optional=True),
|
Column('size', coltr("Size (KB)"), optional=True),
|
||||||
Column('extension', coltr("Kind"), visible=False, optional=True),
|
Column('extension', coltr("Kind"), visible=False, optional=True),
|
||||||
Column('dimensions', coltr("Dimensions"), optional=True),
|
Column('dimensions', coltr("Dimensions"), optional=True),
|
||||||
|
Column('exif_timestamp', coltr("EXIF Timestamp"), visible=False, optional=True),
|
||||||
Column('mtime', coltr("Modification"), visible=False, optional=True),
|
Column('mtime', coltr("Modification"), visible=False, optional=True),
|
||||||
Column('percentage', coltr("Match %"), optional=True),
|
Column('percentage', coltr("Match %"), optional=True),
|
||||||
Column('dupe_count', coltr("Dupe Count"), visible=False, optional=True),
|
Column('dupe_count', coltr("Dupe Count"), visible=False, optional=True),
|
||||||
|
|||||||
@@ -141,5 +141,5 @@ class TestCaseCacheSQLEscape:
|
|||||||
try:
|
try:
|
||||||
del c["foo'bar"]
|
del c["foo'bar"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self.fail()
|
assert False
|
||||||
|
|
||||||
|
|||||||
@@ -4,8 +4,8 @@
|
|||||||
* Added "Replace with symlinks" to complement "Replace with hardlinks". [Mac, Linux] (#194)
|
* Added "Replace with symlinks" to complement "Replace with hardlinks". [Mac, Linux] (#194)
|
||||||
* dupeGuru now tells how many duplicates were affected after each re-prioritization operation. (#204)
|
* dupeGuru now tells how many duplicates were affected after each re-prioritization operation. (#204)
|
||||||
* Added Longest/Shortest filename criteria in the re-prioritize dialog. (#198)
|
* Added Longest/Shortest filename criteria in the re-prioritize dialog. (#198)
|
||||||
* Fixed result table cells which mistakenly became writable in v3.5.0. [Mac] (#203)
|
* Fixed result table cells which mistakenly became writable in v6.4.0. [Mac] (#203)
|
||||||
* Fixed "Rename Selected" which was broken since v3.5.0. [Mac] (#202)
|
* Fixed "Rename Selected" which was broken since v6.4.0. [Mac] (#202)
|
||||||
* Fixed a bug where "Reset to Defaults" in the Columns menu wouldn't refresh menu items' marked state.
|
* Fixed a bug where "Reset to Defaults" in the Columns menu wouldn't refresh menu items' marked state.
|
||||||
* Improved OGG metadata reading.
|
* Improved OGG metadata reading.
|
||||||
* Improved Russian localization by Kyrill Detinov.
|
* Improved Russian localization by Kyrill Detinov.
|
||||||
|
|||||||
@@ -1,3 +1,17 @@
|
|||||||
|
=== 2.7.0 (2012-08-11)
|
||||||
|
|
||||||
|
* Added "Export to CSV". (#189)
|
||||||
|
* Added "Replace with symlinks" to complement "Replace with hardlinks". [Mac, Linux] (#194)
|
||||||
|
* Added "Exif Timestamp" column. (#201)
|
||||||
|
* dupeGuru now tells how many duplicates were affected after each re-prioritization operation. (#204)
|
||||||
|
* Added Longest/Shortest filename criteria in the re-prioritize dialog. (#198)
|
||||||
|
* Fixed result table cells which mistakenly became writable in v2.6.0. [Mac] (#203)
|
||||||
|
* Fixed "Rename Selected" which was broken since v2.6.0. [Mac] (#202)
|
||||||
|
* Fixed a bug where "Reset to Defaults" in the Columns menu wouldn't refresh menu items' marked state.
|
||||||
|
* Fixed a bug where outdated picture cache entries would result in false matches. (#199)
|
||||||
|
* Added Brazilian localization by Victor Figueiredo.
|
||||||
|
* Improved Russian localization by Kyrill Detinov.
|
||||||
|
|
||||||
=== 2.6.0 (2012-06-06)
|
=== 2.6.0 (2012-06-06)
|
||||||
|
|
||||||
* Added Aperture support. (#42)
|
* Added Aperture support. (#42)
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ class ResultsModel(ResultsModelBase):
|
|||||||
Column('size', defaultWidth=60),
|
Column('size', defaultWidth=60),
|
||||||
Column('extension', defaultWidth=40),
|
Column('extension', defaultWidth=40),
|
||||||
Column('dimensions', defaultWidth=100),
|
Column('dimensions', defaultWidth=100),
|
||||||
|
Column('exif_timestamp', defaultWidth=120),
|
||||||
Column('mtime', defaultWidth=120),
|
Column('mtime', defaultWidth=120),
|
||||||
Column('percentage', defaultWidth=60),
|
Column('percentage', defaultWidth=60),
|
||||||
Column('dupe_count', defaultWidth=80),
|
Column('dupe_count', defaultWidth=80),
|
||||||
|
|||||||
Reference in New Issue
Block a user