mirror of
https://github.com/arsenetar/dupeguru.git
synced 2025-03-09 21:24:36 +00:00
Removed data modules and moved their functionalities to core_*.app.
This commit is contained in:
parent
60462698ac
commit
70689ce057
76
core/app.py
76
core/app.py
@ -11,6 +11,8 @@ import os.path as op
|
||||
import logging
|
||||
import subprocess
|
||||
import re
|
||||
import time
|
||||
from collections import namedtuple
|
||||
|
||||
from send2trash import send2trash
|
||||
from hscommon import io
|
||||
@ -18,7 +20,7 @@ from hscommon.reg import RegistrableApplication
|
||||
from hscommon.notify import Broadcaster
|
||||
from hscommon.path import Path
|
||||
from hscommon.conflict import smart_move, smart_copy
|
||||
from hscommon.util import delete_if_empty, first, escape, nonone
|
||||
from hscommon.util import delete_if_empty, first, escape, nonone, format_time_decimal
|
||||
from hscommon.trans import tr
|
||||
|
||||
from . import directories, results, scanner, export, fs
|
||||
@ -40,6 +42,36 @@ class DestType:
|
||||
Relative = 1
|
||||
Absolute = 2
|
||||
|
||||
|
||||
Column = namedtuple('Column', 'attr display')
|
||||
|
||||
def format_timestamp(t, delta):
|
||||
if delta:
|
||||
return format_time_decimal(t)
|
||||
else:
|
||||
if t > 0:
|
||||
return time.strftime('%Y/%m/%d %H:%M:%S', time.localtime(t))
|
||||
else:
|
||||
return '---'
|
||||
|
||||
def format_words(w):
|
||||
def do_format(w):
|
||||
if isinstance(w, list):
|
||||
return '(%s)' % ', '.join(do_format(item) for item in w)
|
||||
else:
|
||||
return w.replace('\n', ' ')
|
||||
|
||||
return ', '.join(do_format(item) for item in w)
|
||||
|
||||
def format_perc(p):
|
||||
return "%0.0f" % p
|
||||
|
||||
def format_dupe_count(c):
|
||||
return str(c) if c else '---'
|
||||
|
||||
def cmp_value(value):
|
||||
return value.lower() if isinstance(value, str) else value
|
||||
|
||||
class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
#--- View interface
|
||||
# open_path(path)
|
||||
@ -49,7 +81,7 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
# set_default(key_name, value)
|
||||
# show_extra_fairware_reminder()
|
||||
|
||||
def __init__(self, view, data_module, appdata):
|
||||
def __init__(self, view, appdata):
|
||||
self.view = view
|
||||
if self.get_default(DEBUG_MODE_PREFERENCE, False):
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
@ -62,9 +94,8 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
self.appdata = appdata
|
||||
if not op.exists(self.appdata):
|
||||
os.makedirs(self.appdata)
|
||||
self.data = data_module
|
||||
self.directories = directories.Directories()
|
||||
self.results = results.Results(data_module)
|
||||
self.results = results.Results(self)
|
||||
self.scanner = scanner.Scanner()
|
||||
self.options = {
|
||||
'escape_filter_regexp': True,
|
||||
@ -73,6 +104,19 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
}
|
||||
self.selected_dupes = []
|
||||
|
||||
#--- Virtual
|
||||
def _get_display_info(self, dupe, group, delta):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _prioritization_categories(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
#--- Private
|
||||
def _do_delete(self, j, replace_with_hardlinks):
|
||||
def op(dupe):
|
||||
@ -92,15 +136,6 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
os.link(str(ref.path), str(dupe.path))
|
||||
self.clean_empty_dirs(dupe.path[:-1])
|
||||
|
||||
def _get_display_info(self, dupe, group, delta=False):
|
||||
if (dupe is None) or (group is None):
|
||||
return ['---'] * len(self.data.COLUMNS)
|
||||
try:
|
||||
return self.data.GetDisplayInfo(dupe, group, delta)
|
||||
except Exception as e:
|
||||
logging.warning("Exception on GetDisplayInfo for %s: %s", str(dupe.path), str(e))
|
||||
return ['---'] * len(self.data.COLUMNS)
|
||||
|
||||
def _create_file(self, path):
|
||||
# We add fs.Folder to fileclasses in case the file we're loading contains folder paths.
|
||||
return fs.get_file(path, self.directories.fileclasses + [fs.Folder])
|
||||
@ -111,7 +146,7 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
if f is None:
|
||||
return None
|
||||
try:
|
||||
f._read_all_info(attrnames=self.data.METADATA_TO_READ)
|
||||
f._read_all_info(attrnames=self.METADATA_TO_READ)
|
||||
return f
|
||||
except EnvironmentError:
|
||||
return None
|
||||
@ -232,16 +267,25 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
||||
column_ids = [colid for colid in column_ids if colid.isdigit()]
|
||||
column_ids = list(map(int, column_ids))
|
||||
column_ids.sort()
|
||||
colnames = [col.display for i, col in enumerate(self.data.COLUMNS) if i in column_ids]
|
||||
colnames = [col.display for i, col in enumerate(self.COLUMNS) if i in column_ids]
|
||||
rows = []
|
||||
for group in self.results.groups:
|
||||
for dupe in group:
|
||||
data = self._get_display_info(dupe, group)
|
||||
data = self.get_display_info(dupe, group)
|
||||
row = [data[colid] for colid in column_ids]
|
||||
row.insert(0, dupe is not group.ref)
|
||||
rows.append(row)
|
||||
return export.export_to_xhtml(colnames, rows)
|
||||
|
||||
def get_display_info(self, dupe, group, delta=False):
|
||||
if (dupe is None) or (group is None):
|
||||
return ['---'] * len(self.COLUMNS)
|
||||
try:
|
||||
return self._get_display_info(dupe, group, delta)
|
||||
except Exception as e:
|
||||
logging.warning("Exception on GetDisplayInfo for %s: %s", str(dupe.path), str(e))
|
||||
return ['---'] * len(self.COLUMNS)
|
||||
|
||||
def invoke_command(self, cmd):
|
||||
"""Calls command `cmd` with %d and %r placeholders replaced.
|
||||
|
||||
|
@ -138,7 +138,7 @@ class PyDupeGuruBase(PyFairware):
|
||||
return self.py.results.is_modified
|
||||
|
||||
def deltaColumns(self):
|
||||
return list(self.py.data.DELTA_COLUMNS)
|
||||
return list(self.py.DELTA_COLUMNS)
|
||||
|
||||
#---Properties
|
||||
@signature('v@:c')
|
||||
|
41
core/data.py
41
core/data.py
@ -1,41 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/15
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from collections import namedtuple
|
||||
import time
|
||||
|
||||
from hscommon.util import format_time_decimal, format_size
|
||||
|
||||
Column = namedtuple('Column', 'attr display')
|
||||
|
||||
def format_timestamp(t, delta):
|
||||
if delta:
|
||||
return format_time_decimal(t)
|
||||
else:
|
||||
if t > 0:
|
||||
return time.strftime('%Y/%m/%d %H:%M:%S', time.localtime(t))
|
||||
else:
|
||||
return '---'
|
||||
|
||||
def format_words(w):
|
||||
def do_format(w):
|
||||
if isinstance(w, list):
|
||||
return '(%s)' % ', '.join(do_format(item) for item in w)
|
||||
else:
|
||||
return w.replace('\n', ' ')
|
||||
|
||||
return ', '.join(do_format(item) for item in w)
|
||||
|
||||
def format_perc(p):
|
||||
return "%0.0f" % p
|
||||
|
||||
def format_dupe_count(c):
|
||||
return str(c) if c else '---'
|
||||
|
||||
def cmp_value(value):
|
||||
return value.lower() if isinstance(value, str) else value
|
@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2010-02-05
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
@ -27,11 +26,11 @@ class DetailsPanel(GUIObject):
|
||||
else:
|
||||
dupe = None
|
||||
group = None
|
||||
l1 = self.app._get_display_info(dupe, group, False)
|
||||
l1 = self.app.get_display_info(dupe, group, False)
|
||||
# we don't want the two sides of the table to display the stats for the same file
|
||||
ref = group.ref if group is not None and group.ref is not dupe else None
|
||||
l2 = self.app._get_display_info(ref, group, False)
|
||||
names = [c.display for c in self.app.data.COLUMNS]
|
||||
l2 = self.app.get_display_info(ref, group, False)
|
||||
names = [c.display for c in self.app.COLUMNS]
|
||||
self._table = list(zip(names, l1, l2))
|
||||
|
||||
#--- Public
|
||||
|
@ -42,7 +42,7 @@ class PrioritizationList(GUISelectableList):
|
||||
class PrioritizeDialog:
|
||||
def __init__(self, view, app):
|
||||
self.app = app
|
||||
self.categories = [cat(app.results) for cat in app.data.prioritization_categories()]
|
||||
self.categories = [cat(app.results) for cat in app._prioritization_categories()]
|
||||
self.category_list = CriterionCategoryList(self)
|
||||
self.criteria = []
|
||||
self.criteria_list = GUISelectableList()
|
||||
|
@ -25,13 +25,13 @@ class DupeRow(Row):
|
||||
@property
|
||||
def data(self):
|
||||
if self._data is None:
|
||||
self._data = self._app._get_display_info(self._dupe, self._group, False)
|
||||
self._data = self._app.get_display_info(self._dupe, self._group, False)
|
||||
return self._data
|
||||
|
||||
@property
|
||||
def data_delta(self):
|
||||
if self._data_delta is None:
|
||||
self._data_delta = self._app._get_display_info(self._dupe, self._group, True)
|
||||
self._data_delta = self._app.get_display_info(self._dupe, self._group, True)
|
||||
return self._data_delta
|
||||
|
||||
@property
|
||||
|
@ -21,7 +21,7 @@ from hscommon.trans import tr
|
||||
|
||||
class Results(Markable):
|
||||
#---Override
|
||||
def __init__(self, data_module):
|
||||
def __init__(self, app):
|
||||
super(Results, self).__init__()
|
||||
self.__groups = []
|
||||
self.__group_of_duplicate = {}
|
||||
@ -33,7 +33,7 @@ class Results(Markable):
|
||||
self.__filtered_groups = None
|
||||
self.__recalculate_stats()
|
||||
self.__marked_size = 0
|
||||
self.data = data_module
|
||||
self.app = app
|
||||
self.problems = [] # (dupe, error_msg)
|
||||
self.is_modified = False
|
||||
|
||||
@ -328,12 +328,12 @@ class Results(Markable):
|
||||
def sort_dupes(self, key, asc=True, delta=False):
|
||||
if not self.__dupes:
|
||||
self.__get_dupe_list()
|
||||
keyfunc = lambda d: self.data.GetDupeSortKey(d, lambda: self.get_group_of_duplicate(d), key, delta)
|
||||
keyfunc = lambda d: self.app._get_dupe_sort_key(d, lambda: self.get_group_of_duplicate(d), key, delta)
|
||||
self.__dupes.sort(key=keyfunc, reverse=not asc)
|
||||
self.__dupes_sort_descriptor = (key,asc,delta)
|
||||
|
||||
def sort_groups(self,key,asc=True):
|
||||
keyfunc = lambda g: self.data.GetGroupSortKey(g, key)
|
||||
keyfunc = lambda g: self.app._get_group_sort_key(g, key)
|
||||
self.groups.sort(key=keyfunc, reverse=not asc)
|
||||
self.__groups_sort_descriptor = (key,asc)
|
||||
|
||||
|
@ -8,17 +8,17 @@
|
||||
|
||||
from hscommon.testutil import TestApp as TestAppBase, eq_, with_app
|
||||
from hscommon.path import Path
|
||||
from hscommon.util import get_file_ext
|
||||
from hscommon.util import get_file_ext, format_size
|
||||
from jobprogress.job import nulljob, JobCancelled
|
||||
|
||||
from .. import engine
|
||||
from .. import prioritize
|
||||
from ..engine import getwords
|
||||
from ..app import DupeGuru as DupeGuruBase
|
||||
from ..app import DupeGuru as DupeGuruBase, Column, cmp_value
|
||||
from ..gui.details_panel import DetailsPanel
|
||||
from ..gui.directory_tree import DirectoryTree
|
||||
from ..gui.result_table import ResultTable
|
||||
from ..gui.prioritize_dialog import PrioritizeDialog
|
||||
from . import data
|
||||
|
||||
class DupeGuruView:
|
||||
JOB = nulljob
|
||||
@ -37,8 +37,42 @@ class DupeGuruView:
|
||||
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
COLUMNS = [
|
||||
Column('name', 'Filename'),
|
||||
Column('folder_path', 'Directory'),
|
||||
Column('size', 'Size (KB)'),
|
||||
Column('extension', 'Kind'),
|
||||
]
|
||||
DELTA_COLUMNS = {2,}
|
||||
METADATA_TO_READ = ['size']
|
||||
|
||||
def __init__(self):
|
||||
DupeGuruBase.__init__(self, DupeGuruView(), data, '/tmp')
|
||||
DupeGuruBase.__init__(self, DupeGuruView(), '/tmp')
|
||||
|
||||
def _get_display_info(self, dupe, group, delta):
|
||||
size = dupe.size
|
||||
m = group.get_match_of(dupe)
|
||||
if m and delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension if hasattr(dupe, 'extension') else '---',
|
||||
]
|
||||
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
r = cmp_value(getattr(dupe, self.COLUMNS[key].attr))
|
||||
if delta and (key in self.DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, self.COLUMNS[key].attr))
|
||||
return r
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
return cmp_value(getattr(group.ref, self.COLUMNS[key].attr))
|
||||
|
||||
def _prioritization_categories(self):
|
||||
return prioritize.all_categories()
|
||||
|
||||
class NamedObject:
|
||||
def __init__(self, name="foobar", with_words=False, size=1, folder=None):
|
||||
|
@ -1,48 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-10-23
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
# data module for tests
|
||||
|
||||
from hscommon.util import format_size
|
||||
from ..data import cmp_value, Column
|
||||
from .. import prioritize
|
||||
|
||||
COLUMNS = [
|
||||
Column('name', 'Filename'),
|
||||
Column('folder_path', 'Directory'),
|
||||
Column('size', 'Size (KB)'),
|
||||
Column('extension', 'Kind'),
|
||||
]
|
||||
|
||||
METADATA_TO_READ = ['size']
|
||||
DELTA_COLUMNS = {2,}
|
||||
|
||||
def GetDisplayInfo(dupe, group, delta):
|
||||
size = dupe.size
|
||||
m = group.get_match_of(dupe)
|
||||
if m and delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension if hasattr(dupe, 'extension') else '---',
|
||||
]
|
||||
|
||||
def GetDupeSortKey(dupe, get_group, key, delta):
|
||||
r = cmp_value(getattr(dupe, COLUMNS[key].attr))
|
||||
if delta and (key in DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, COLUMNS[key].attr))
|
||||
return r
|
||||
|
||||
def GetGroupSortKey(group, key):
|
||||
return cmp_value(getattr(group.ref, COLUMNS[key].attr))
|
||||
|
||||
def prioritization_categories():
|
||||
return prioritize.all_categories()
|
@ -14,14 +14,14 @@ from xml.etree import ElementTree as ET
|
||||
from hscommon.testutil import eq_
|
||||
from hscommon.util import first
|
||||
|
||||
from . import data
|
||||
from .. import engine
|
||||
from .base import NamedObject, GetTestGroups
|
||||
from .base import NamedObject, GetTestGroups, DupeGuru
|
||||
from ..results import Results
|
||||
|
||||
class TestCaseResultsEmpty:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
|
||||
def test_apply_invalid_filter(self):
|
||||
# If the applied filter is an invalid regexp, just ignore the filter.
|
||||
@ -68,7 +68,8 @@ class TestCaseResultsEmpty:
|
||||
|
||||
class TestCaseResultsWithSomeGroups:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects,self.matches,self.groups = GetTestGroups()
|
||||
self.results.groups = self.groups
|
||||
|
||||
@ -186,7 +187,8 @@ class TestCaseResultsWithSomeGroups:
|
||||
|
||||
def test_sort_empty_list(self):
|
||||
#There was an infinite loop when sorting an empty list.
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = app.results
|
||||
r.sort_dupes(0)
|
||||
eq_([],r.dupes)
|
||||
|
||||
@ -231,7 +233,8 @@ class TestCaseResultsWithSomeGroups:
|
||||
|
||||
class TestCaseResultsWithSavedResults:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects,self.matches,self.groups = GetTestGroups()
|
||||
self.results.groups = self.groups
|
||||
self.f = io.BytesIO()
|
||||
@ -264,7 +267,8 @@ class TestCaseResultsWithSavedResults:
|
||||
|
||||
class TestCaseResultsMarkings:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects,self.matches,self.groups = GetTestGroups()
|
||||
self.results.groups = self.groups
|
||||
|
||||
@ -407,7 +411,8 @@ class TestCaseResultsMarkings:
|
||||
f = io.BytesIO()
|
||||
self.results.save_to_xml(f)
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f,get_file)
|
||||
assert not r.is_marked(self.objects[0])
|
||||
assert not r.is_marked(self.objects[1])
|
||||
@ -418,7 +423,8 @@ class TestCaseResultsMarkings:
|
||||
|
||||
class TestCaseResultsXML:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects, self.matches, self.groups = GetTestGroups()
|
||||
self.results.groups = self.groups
|
||||
|
||||
@ -470,7 +476,8 @@ class TestCaseResultsXML:
|
||||
f = io.BytesIO()
|
||||
self.results.save_to_xml(f)
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f,get_file)
|
||||
eq_(2,len(r.groups))
|
||||
g1,g2 = r.groups
|
||||
@ -499,7 +506,8 @@ class TestCaseResultsXML:
|
||||
filename = str(tmpdir.join('dupeguru_results.xml'))
|
||||
self.objects[4].name = 'ibabtu 2' #we can't have 2 files with the same path
|
||||
self.results.save_to_xml(filename)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(filename,get_file)
|
||||
eq_(2,len(r.groups))
|
||||
|
||||
@ -513,7 +521,8 @@ class TestCaseResultsXML:
|
||||
f = io.BytesIO()
|
||||
self.results.save_to_xml(f)
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f,get_file)
|
||||
eq_(1,len(r.groups))
|
||||
eq_(3,len(r.groups[0]))
|
||||
@ -553,7 +562,8 @@ class TestCaseResultsXML:
|
||||
tree = ET.ElementTree(root)
|
||||
tree.write(f, encoding='utf-8')
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f, get_file)
|
||||
eq_(1,len(r.groups))
|
||||
eq_(3,len(r.groups[0]))
|
||||
@ -570,12 +580,14 @@ class TestCaseResultsXML:
|
||||
groups = engine.get_groups(matches) #We should have 2 groups
|
||||
for g in groups:
|
||||
g.prioritize(lambda x:objects.index(x)) #We want the dupes to be in the same order as the list is
|
||||
results = Results(data)
|
||||
app = DupeGuru()
|
||||
results = Results(app)
|
||||
results.groups = groups
|
||||
f = io.BytesIO()
|
||||
results.save_to_xml(f)
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f,get_file)
|
||||
g = r.groups[0]
|
||||
eq_("\xe9foo bar",g[0].name)
|
||||
@ -585,12 +597,14 @@ class TestCaseResultsXML:
|
||||
f = io.BytesIO()
|
||||
f.write(b'<this is invalid')
|
||||
f.seek(0)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.load_from_xml(f,None)
|
||||
eq_(0,len(r.groups))
|
||||
|
||||
def test_load_non_existant_xml(self):
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
try:
|
||||
r.load_from_xml('does_not_exist.xml', None)
|
||||
except IOError:
|
||||
@ -609,7 +623,8 @@ class TestCaseResultsXML:
|
||||
results = self.results
|
||||
results.save_to_xml(f)
|
||||
f.seek(0)
|
||||
results = Results(data)
|
||||
app = DupeGuru()
|
||||
results = Results(app)
|
||||
results.load_from_xml(f, self.get_file)
|
||||
group = results.groups[0]
|
||||
d1, d2, d3 = group
|
||||
@ -642,7 +657,8 @@ class TestCaseResultsXML:
|
||||
|
||||
class TestCaseResultsFilter:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects, self.matches, self.groups = GetTestGroups()
|
||||
self.results.groups = self.groups
|
||||
self.results.apply_filter(r'foo')
|
||||
@ -729,7 +745,8 @@ class TestCaseResultsFilter:
|
||||
filename = str(tmpdir.join('dupeguru_results.xml'))
|
||||
self.objects[4].name = 'ibabtu 2' #we can't have 2 files with the same path
|
||||
self.results.save_to_xml(filename)
|
||||
r = Results(data)
|
||||
app = DupeGuru()
|
||||
r = Results(app)
|
||||
r.apply_filter('foo')
|
||||
r.load_from_xml(filename,get_file)
|
||||
eq_(2,len(r.groups))
|
||||
@ -767,7 +784,8 @@ class TestCaseResultsFilter:
|
||||
|
||||
class TestCaseResultsRefFile:
|
||||
def setup_method(self, method):
|
||||
self.results = Results(data)
|
||||
self.app = DupeGuru()
|
||||
self.results = self.app.results
|
||||
self.objects, self.matches, self.groups = GetTestGroups()
|
||||
self.objects[0].is_ref = True
|
||||
self.objects[1].is_ref = True
|
||||
|
102
core_me/app.py
102
core_me/app.py
@ -5,10 +5,104 @@
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from core.app import DupeGuru as DupeGuruBase
|
||||
from . import data
|
||||
from hscommon.trans import tr as trbase
|
||||
from hscommon.util import format_size
|
||||
|
||||
from core.app import (DupeGuru as DupeGuruBase, Column, format_time, format_timestamp,
|
||||
format_perc, format_words, format_dupe_count, cmp_value)
|
||||
from . import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, data, appdata)
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (MB)")),
|
||||
Column('duration', tr("Time")),
|
||||
Column('bitrate', tr("Bitrate")),
|
||||
Column('samplerate', tr("Sample Rate")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('title', tr("Title")),
|
||||
Column('artist', tr("Artist")),
|
||||
Column('album', tr("Album")),
|
||||
Column('genre', tr("Genre")),
|
||||
Column('year', tr("Year")),
|
||||
Column('track', tr("Track Number")),
|
||||
Column('comment', tr("Comment")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('words', tr("Words Used")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
DELTA_COLUMNS = {2, 3, 4, 5, 7}
|
||||
METADATA_TO_READ = ['size', 'mtime', 'duration', 'bitrate', 'samplerate', 'title', 'artist',
|
||||
'album', 'genre', 'year', 'track', 'comment']
|
||||
MATCHPERC_COL = 15
|
||||
DUPECOUNT_COL = 17
|
||||
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, appdata)
|
||||
|
||||
def _get_display_info(self, dupe, group, delta):
|
||||
size = dupe.size
|
||||
duration = dupe.duration
|
||||
bitrate = dupe.bitrate
|
||||
samplerate = dupe.samplerate
|
||||
mtime = dupe.mtime
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
duration -= r.duration
|
||||
bitrate -= r.bitrate
|
||||
samplerate -= r.samplerate
|
||||
mtime -= r.mtime
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 2, 2, False),
|
||||
format_time(duration, with_hours=False),
|
||||
str(bitrate),
|
||||
str(samplerate),
|
||||
dupe.extension,
|
||||
format_timestamp(mtime,delta and m),
|
||||
dupe.title,
|
||||
dupe.artist,
|
||||
dupe.album,
|
||||
dupe.genre,
|
||||
dupe.year,
|
||||
str(dupe.track),
|
||||
dupe.comment,
|
||||
format_perc(percentage),
|
||||
format_words(dupe.words) if hasattr(dupe, 'words') else '',
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
if key == self.MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return 0
|
||||
r = cmp_value(getattr(dupe, self.COLUMNS[key].attr, ''))
|
||||
if delta and (key in self.DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, self.COLUMNS[key].attr, ''))
|
||||
return r
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
if key == self.MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return len(group)
|
||||
return cmp_value(getattr(group.ref, self.COLUMNS[key].attr, ''))
|
||||
|
||||
def _prioritization_categories(self):
|
||||
return prioritize.all_categories()
|
||||
|
||||
|
105
core_me/data.py
105
core_me/data.py
@ -1,105 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/15
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from hscommon.util import format_time, format_size
|
||||
from hscommon.trans import tr as trbase
|
||||
from core.data import (format_timestamp, format_words, format_perc, format_dupe_count, cmp_value,
|
||||
Column)
|
||||
from . import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (MB)")),
|
||||
Column('duration', tr("Time")),
|
||||
Column('bitrate', tr("Bitrate")),
|
||||
Column('samplerate', tr("Sample Rate")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('title', tr("Title")),
|
||||
Column('artist', tr("Artist")),
|
||||
Column('album', tr("Album")),
|
||||
Column('genre', tr("Genre")),
|
||||
Column('year', tr("Year")),
|
||||
Column('track', tr("Track Number")),
|
||||
Column('comment', tr("Comment")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('words', tr("Words Used")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
|
||||
MATCHPERC_COL = 15
|
||||
DUPECOUNT_COL = 17
|
||||
DELTA_COLUMNS = {2, 3, 4, 5, 7}
|
||||
|
||||
METADATA_TO_READ = ['size', 'mtime', 'duration', 'bitrate', 'samplerate', 'title', 'artist',
|
||||
'album', 'genre', 'year', 'track', 'comment']
|
||||
|
||||
def GetDisplayInfo(dupe, group, delta):
|
||||
size = dupe.size
|
||||
duration = dupe.duration
|
||||
bitrate = dupe.bitrate
|
||||
samplerate = dupe.samplerate
|
||||
mtime = dupe.mtime
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
duration -= r.duration
|
||||
bitrate -= r.bitrate
|
||||
samplerate -= r.samplerate
|
||||
mtime -= r.mtime
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 2, 2, False),
|
||||
format_time(duration, with_hours=False),
|
||||
str(bitrate),
|
||||
str(samplerate),
|
||||
dupe.extension,
|
||||
format_timestamp(mtime,delta and m),
|
||||
dupe.title,
|
||||
dupe.artist,
|
||||
dupe.album,
|
||||
dupe.genre,
|
||||
dupe.year,
|
||||
str(dupe.track),
|
||||
dupe.comment,
|
||||
format_perc(percentage),
|
||||
format_words(dupe.words) if hasattr(dupe, 'words') else '',
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def GetDupeSortKey(dupe, get_group, key, delta):
|
||||
if key == MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return 0
|
||||
r = cmp_value(getattr(dupe, COLUMNS[key].attr, ''))
|
||||
if delta and (key in DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, COLUMNS[key].attr, ''))
|
||||
return r
|
||||
|
||||
def GetGroupSortKey(group, key):
|
||||
if key == MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return len(group)
|
||||
return cmp_value(getattr(group.ref, COLUMNS[key].attr, ''))
|
||||
|
||||
def prioritization_categories():
|
||||
return prioritize.all_categories()
|
@ -5,10 +5,97 @@
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from core.app import DupeGuru as DupeGuruBase
|
||||
from . import data
|
||||
from hscommon.trans import tr as trbase
|
||||
from hscommon.util import format_size
|
||||
|
||||
from core.app import (DupeGuru as DupeGuruBase, Column, format_timestamp, format_perc,
|
||||
format_dupe_count, cmp_value)
|
||||
from . import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
def format_dimensions(dimensions):
|
||||
return '%d x %d' % (dimensions[0], dimensions[1])
|
||||
|
||||
def get_delta_dimensions(value, ref_value):
|
||||
return (value[0]-ref_value[0], value[1]-ref_value[1])
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, data, appdata)
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (KB)")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('dimensions', tr("Dimensions")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
DELTA_COLUMNS = {2, 4, 5}
|
||||
METADATA_TO_READ = ['size', 'mtime', 'dimensions']
|
||||
FOLDER_COL = 1
|
||||
MATCHPERC_COL = 6
|
||||
DUPECOUNT_COL = 7
|
||||
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, appdata)
|
||||
|
||||
def _get_display_info(self, dupe, group, delta):
|
||||
size = dupe.size
|
||||
mtime = dupe.mtime
|
||||
dimensions = dupe.dimensions
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
mtime -= r.mtime
|
||||
dimensions = get_delta_dimensions(dimensions, r.dimensions)
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
dupe_folder_path = getattr(dupe, 'display_folder_path', dupe.folder_path)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe_folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension,
|
||||
format_dimensions(dimensions),
|
||||
format_timestamp(mtime, delta and m),
|
||||
format_perc(percentage),
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
if key == self.MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return 0
|
||||
if key == self.FOLDER_COL:
|
||||
dupe_folder_path = getattr(dupe, 'display_folder_path', dupe.folder_path)
|
||||
return cmp_value(str(dupe_folder_path))
|
||||
r = cmp_value(getattr(dupe, self.COLUMNS[key].attr, ''))
|
||||
if delta and (key in self.DELTA_COLUMNS):
|
||||
ref_value = cmp_value(getattr(get_group().ref, self.COLUMNS[key].attr, ''))
|
||||
if key == 4: # dimensions
|
||||
r = get_delta_dimensions(r, ref_value)
|
||||
else:
|
||||
r -= ref_value
|
||||
return r
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
if key == self.MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return len(group)
|
||||
if key == self.FOLDER_COL:
|
||||
dupe_folder_path = getattr(group.ref, 'display_folder_path', group.ref.folder_path)
|
||||
return cmp_value(str(dupe_folder_path))
|
||||
return cmp_value(getattr(group.ref, self.COLUMNS[key].attr, ''))
|
||||
|
||||
def _prioritization_categories(self):
|
||||
return prioritize.all_categories()
|
||||
|
||||
|
100
core_pe/data.py
100
core_pe/data.py
@ -1,100 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/15
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from hscommon.util import format_size
|
||||
from hscommon.trans import tr as trbase
|
||||
from core.data import format_timestamp, format_perc, format_dupe_count, cmp_value, Column
|
||||
|
||||
from . import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
def format_dimensions(dimensions):
|
||||
return '%d x %d' % (dimensions[0], dimensions[1])
|
||||
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (KB)")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('dimensions', tr("Dimensions")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
|
||||
FOLDER_COL = 1
|
||||
MATCHPERC_COL = 6
|
||||
DUPECOUNT_COL = 7
|
||||
DELTA_COLUMNS = {2, 4, 5}
|
||||
|
||||
METADATA_TO_READ = ['size', 'mtime', 'dimensions']
|
||||
|
||||
def get_delta_dimensions(value, ref_value):
|
||||
return (value[0]-ref_value[0], value[1]-ref_value[1])
|
||||
|
||||
def GetDisplayInfo(dupe,group,delta=False):
|
||||
if (dupe is None) or (group is None):
|
||||
return ['---'] * len(COLUMNS)
|
||||
size = dupe.size
|
||||
mtime = dupe.mtime
|
||||
dimensions = dupe.dimensions
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
mtime -= r.mtime
|
||||
dimensions = get_delta_dimensions(dimensions, r.dimensions)
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
dupe_folder_path = getattr(dupe, 'display_folder_path', dupe.folder_path)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe_folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension,
|
||||
format_dimensions(dimensions),
|
||||
format_timestamp(mtime, delta and m),
|
||||
format_perc(percentage),
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def GetDupeSortKey(dupe, get_group, key, delta):
|
||||
if key == MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return 0
|
||||
if key == FOLDER_COL:
|
||||
dupe_folder_path = getattr(dupe, 'display_folder_path', dupe.folder_path)
|
||||
return cmp_value(str(dupe_folder_path))
|
||||
r = cmp_value(getattr(dupe, COLUMNS[key].attr, ''))
|
||||
if delta and (key in DELTA_COLUMNS):
|
||||
ref_value = cmp_value(getattr(get_group().ref, COLUMNS[key].attr, ''))
|
||||
if key == 4: # dimensions
|
||||
r = get_delta_dimensions(r, ref_value)
|
||||
else:
|
||||
r -= ref_value
|
||||
return r
|
||||
|
||||
def GetGroupSortKey(group, key):
|
||||
if key == MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return len(group)
|
||||
if key == FOLDER_COL:
|
||||
dupe_folder_path = getattr(group.ref, 'display_folder_path', group.ref.folder_path)
|
||||
return cmp_value(str(dupe_folder_path))
|
||||
return cmp_value(getattr(group.ref, COLUMNS[key].attr, ''))
|
||||
|
||||
def prioritization_categories():
|
||||
return prioritize.all_categories()
|
@ -5,10 +5,77 @@
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from core.app import DupeGuru as DupeGuruBase
|
||||
from . import data
|
||||
from hscommon.trans import tr as trbase
|
||||
from hscommon.util import format_size
|
||||
|
||||
from core.app import (DupeGuru as DupeGuruBase, Column, format_timestamp, format_perc,
|
||||
format_words, format_dupe_count, cmp_value)
|
||||
from core import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, data, appdata)
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (KB)")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('words', tr("Words Used")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
DELTA_COLUMNS = {2, 4}
|
||||
METADATA_TO_READ = ['size', 'mtime']
|
||||
MATCHPERC_COL = 5
|
||||
DUPECOUNT_COL = 7
|
||||
|
||||
def __init__(self, view, appdata):
|
||||
DupeGuruBase.__init__(self, view, appdata)
|
||||
|
||||
def _get_display_info(self, dupe, group, delta):
|
||||
size = dupe.size
|
||||
mtime = dupe.mtime
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
mtime -= r.mtime
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension,
|
||||
format_timestamp(mtime, delta and m),
|
||||
format_perc(percentage),
|
||||
format_words(dupe.words) if hasattr(dupe, 'words') else '',
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
if key == self.MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return 0
|
||||
r = cmp_value(getattr(dupe, self.COLUMNS[key].attr, ''))
|
||||
if delta and (key in self.DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, self.COLUMNS[key].attr, ''))
|
||||
return r
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
if key == self.MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == self.DUPECOUNT_COL:
|
||||
return len(group)
|
||||
return cmp_value(getattr(group.ref, self.COLUMNS[key].attr, ''))
|
||||
|
||||
def _prioritization_categories(self):
|
||||
return prioritize.all_categories()
|
||||
|
||||
|
@ -1,78 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/15
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from hscommon.util import format_size
|
||||
from hscommon.trans import tr as trbase
|
||||
from core.data import (format_timestamp, format_words, format_perc, format_dupe_count, cmp_value,
|
||||
Column)
|
||||
from core import prioritize
|
||||
|
||||
tr = lambda s: trbase(s, 'columns')
|
||||
|
||||
COLUMNS = [
|
||||
Column('name', tr("Filename")),
|
||||
Column('folder_path', tr("Folder")),
|
||||
Column('size', tr("Size (KB)")),
|
||||
Column('extension', tr("Kind")),
|
||||
Column('mtime', tr("Modification")),
|
||||
Column('percentage', tr("Match %")),
|
||||
Column('words', tr("Words Used")),
|
||||
Column('dupe_count', tr("Dupe Count")),
|
||||
]
|
||||
|
||||
MATCHPERC_COL = 5
|
||||
DUPECOUNT_COL = 7
|
||||
DELTA_COLUMNS = {2, 4}
|
||||
|
||||
METADATA_TO_READ = ['size', 'mtime']
|
||||
|
||||
def GetDisplayInfo(dupe, group, delta):
|
||||
size = dupe.size
|
||||
mtime = dupe.mtime
|
||||
m = group.get_match_of(dupe)
|
||||
if m:
|
||||
percentage = m.percentage
|
||||
dupe_count = 0
|
||||
if delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
mtime -= r.mtime
|
||||
else:
|
||||
percentage = group.percentage
|
||||
dupe_count = len(group.dupes)
|
||||
return [
|
||||
dupe.name,
|
||||
str(dupe.folder_path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension,
|
||||
format_timestamp(mtime, delta and m),
|
||||
format_perc(percentage),
|
||||
format_words(dupe.words) if hasattr(dupe, 'words') else '',
|
||||
format_dupe_count(dupe_count)
|
||||
]
|
||||
|
||||
def GetDupeSortKey(dupe, get_group, key, delta):
|
||||
if key == MATCHPERC_COL:
|
||||
m = get_group().get_match_of(dupe)
|
||||
return m.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return 0
|
||||
r = cmp_value(getattr(dupe, COLUMNS[key].attr, ''))
|
||||
if delta and (key in DELTA_COLUMNS):
|
||||
r -= cmp_value(getattr(get_group().ref, COLUMNS[key].attr, ''))
|
||||
return r
|
||||
|
||||
def GetGroupSortKey(group, key):
|
||||
if key == MATCHPERC_COL:
|
||||
return group.percentage
|
||||
if key == DUPECOUNT_COL:
|
||||
return len(group)
|
||||
return cmp_value(getattr(group.ref, COLUMNS[key].attr, ''))
|
||||
|
||||
def prioritization_categories():
|
||||
return prioritize.all_categories()
|
@ -140,7 +140,7 @@ class ResultWindow(QMainWindow):
|
||||
# Columns menu
|
||||
menu = self.menuColumns
|
||||
self._column_actions = []
|
||||
for index, column in enumerate(self.app.model.data.COLUMNS):
|
||||
for index, column in enumerate(self.app.model.COLUMNS):
|
||||
action = menu.addAction(column.display)
|
||||
action.setCheckable(True)
|
||||
action.column_index = index
|
||||
@ -272,7 +272,7 @@ class ResultWindow(QMainWindow):
|
||||
def exportTriggered(self):
|
||||
h = self.resultsView.horizontalHeader()
|
||||
column_ids = []
|
||||
for i in range(len(self.app.model.data.COLUMNS)):
|
||||
for i in range(len(self.app.model.COLUMNS)):
|
||||
if not h.isSectionHidden(i):
|
||||
column_ids.append(str(i))
|
||||
exported_path = self.app.model.export_to_xhtml(column_ids)
|
||||
@ -355,7 +355,7 @@ class ResultWindow(QMainWindow):
|
||||
h = self.resultsView.horizontalHeader()
|
||||
widths = []
|
||||
visible = []
|
||||
for i in range(len(self.app.model.data.COLUMNS)):
|
||||
for i in range(len(self.app.model.COLUMNS)):
|
||||
widths.append(h.sectionSize(i))
|
||||
visible.append(not h.isSectionHidden(i))
|
||||
prefs.columns_width = widths
|
||||
|
@ -7,7 +7,7 @@
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from PyQt4.QtCore import SIGNAL, Qt
|
||||
from PyQt4.QtGui import QBrush, QFont, QTableView, QColor, QItemSelectionModel, QItemSelection
|
||||
from PyQt4.QtGui import QBrush, QFont, QTableView, QColor
|
||||
|
||||
from qtlib.table import Table
|
||||
|
||||
@ -16,14 +16,13 @@ from core.gui.result_table import ResultTable as ResultTableModel
|
||||
class ResultsModel(Table):
|
||||
def __init__(self, app, view):
|
||||
model = ResultTableModel(self, app.model)
|
||||
self._app = app
|
||||
self._data = app.model.data
|
||||
self._delta_columns = app.model.data.DELTA_COLUMNS
|
||||
self._app = app.model
|
||||
self._delta_columns = app.model.DELTA_COLUMNS
|
||||
Table.__init__(self, model, view)
|
||||
self.model.connect()
|
||||
|
||||
def columnCount(self, parent):
|
||||
return len(self._data.COLUMNS)
|
||||
return len(self._app.COLUMNS)
|
||||
|
||||
def data(self, index, role):
|
||||
if not index.isValid():
|
||||
@ -62,8 +61,8 @@ class ResultsModel(Table):
|
||||
return flags
|
||||
|
||||
def headerData(self, section, orientation, role):
|
||||
if orientation == Qt.Horizontal and role == Qt.DisplayRole and section < len(self._data.COLUMNS):
|
||||
return self._data.COLUMNS[section].display
|
||||
if orientation == Qt.Horizontal and role == Qt.DisplayRole and section < len(self._app.COLUMNS):
|
||||
return self._app.COLUMNS[section].display
|
||||
return None
|
||||
|
||||
def setData(self, index, value, role):
|
||||
|
Loading…
x
Reference in New Issue
Block a user