mirror of
https://github.com/arsenetar/dupeguru.git
synced 2026-01-25 16:11:39 +00:00
Compare commits
16 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24643a9b5d | ||
|
|
045051ce06 | ||
|
|
7c3728ca47 | ||
|
|
91be1c7336 | ||
|
|
162378bb0a | ||
|
|
4e3cad5702 | ||
|
|
321f8ab406 | ||
|
|
5b3d5f5d1c | ||
|
|
372a682610 | ||
|
|
44266273bf | ||
|
|
ac32305532 | ||
|
|
87c2fa2573 | ||
|
|
db63b63cfd | ||
|
|
6725b2bf0f | ||
|
|
990e73c383 | ||
|
|
9e9e73aa6b |
@@ -63,12 +63,12 @@ On Arch, it's:
|
||||
|
||||
Use Python's built-in `pyvenv` to create a virtual environment in which we're going to install our.
|
||||
Python-related dependencies. `pyvenv` is built-in Python but, unlike its `virtualenv` predecessor,
|
||||
it doesn't install setuptools and pip, so it has to be installed manually:
|
||||
it doesn't install setuptools and pip (unless you use Python 3.4+), so it has to be installed
|
||||
manually:
|
||||
|
||||
$ pyvenv --system-site-packages env
|
||||
$ source env/bin/activate
|
||||
$ wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python
|
||||
$ easy_install pip
|
||||
$ python get-pip.py
|
||||
|
||||
Then, you can install pip requirements in your virtualenv:
|
||||
|
||||
@@ -96,3 +96,4 @@ You can also package dupeGuru into an installable package with:
|
||||
[pyqt]: http://www.riverbankcomputing.com
|
||||
[cxfreeze]: http://cx-freeze.sourceforge.net/
|
||||
[advinst]: http://www.advancedinstaller.com
|
||||
|
||||
|
||||
11
build.py
11
build.py
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-12-30
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import sys
|
||||
@@ -110,8 +110,9 @@ def build_cocoa(edition, dev):
|
||||
'me': ['core_me'] + appscript_pkgs + ['hsaudiotag'],
|
||||
'pe': ['core_pe'] + appscript_pkgs,
|
||||
}[edition]
|
||||
tocopy = ['core', 'hscommon', 'cocoa/inter', 'cocoalib/cocoa', 'jobprogress', 'objp',
|
||||
'send2trash'] + specific_packages
|
||||
tocopy = [
|
||||
'core', 'hscommon', 'cocoa/inter', 'cocoalib/cocoa', 'objp', 'send2trash'
|
||||
] + specific_packages
|
||||
copy_packages(tocopy, pydep_folder, create_links=dev)
|
||||
sys.path.insert(0, 'build')
|
||||
extra_deps = None
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
<key>NSPrincipalClass</key>
|
||||
<string>NSApplication</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>© Hardcoded Software, 2013</string>
|
||||
<string>© Hardcoded Software, 2014</string>
|
||||
<key>SUFeedURL</key>
|
||||
<string>http://www.hardcoded.net/updates/dupeguru_me.appcast</string>
|
||||
<key>SUPublicDSAKeyFile</key>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
<key>NSPrincipalClass</key>
|
||||
<string>NSApplication</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>© Hardcoded Software, 2013</string>
|
||||
<string>© Hardcoded Software, 2014</string>
|
||||
<key>SUFeedURL</key>
|
||||
<string>http://www.hardcoded.net/updates/dupeguru_pe.appcast</string>
|
||||
<key>SUPublicDSAKeyFile</key>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<key>NSPrincipalClass</key>
|
||||
<string>NSApplication</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>© Hardcoded Software, 2013</string>
|
||||
<string>© Hardcoded Software, 2014</string>
|
||||
<key>SUFeedURL</key>
|
||||
<string>http://www.hardcoded.net/updates/dupeguru.appcast</string>
|
||||
<key>SUPublicDSAKeyFile</key>
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
# Created On: 2007-10-06
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import logging
|
||||
@@ -26,7 +26,7 @@ def autoreleasepool(func):
|
||||
|
||||
def as_fetch(as_list, as_type, step_size=1000):
|
||||
"""When fetching items from a very big list through applescript, the connection with the app
|
||||
will timeout. This function is to circumvent that. 'as_type' is the type of the items in the
|
||||
will timeout. This function is to circumvent that. 'as_type' is the type of the items in the
|
||||
list (found in appscript.k). If we don't pass it to the 'each' arg of 'count()', it doesn't work.
|
||||
applescript is rather stupid..."""
|
||||
result = []
|
||||
@@ -66,7 +66,7 @@ def extract_tb_noline(tb):
|
||||
|
||||
def safe_format_exception(type, value, tb):
|
||||
"""Format exception from type, value and tb and fallback if there's a problem.
|
||||
|
||||
|
||||
In some cases in threaded exceptions under Cocoa, I get tracebacks targeting pyc files instead
|
||||
of py files, which results in traceback.format_exception() trying to print lines from pyc files
|
||||
and then crashing when trying to interpret that binary data as utf-8. We want a fallback in
|
||||
@@ -113,5 +113,6 @@ def patch_threaded_job_performer():
|
||||
# _async_run, under cocoa, has to be run within an autorelease pool to prevent leaks.
|
||||
# You only need this patch is you use one of CocoaProxy's function (which allocate objc
|
||||
# structures) inside a threaded job.
|
||||
from jobprogress.performer import ThreadedJobPerformer
|
||||
from hscommon.jobprogress.performer import ThreadedJobPerformer
|
||||
ThreadedJobPerformer._async_run = autoreleasepool(ThreadedJobPerformer._async_run)
|
||||
|
||||
|
||||
186
core/app.py
186
core/app.py
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/11/11
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import os
|
||||
@@ -15,7 +15,7 @@ import time
|
||||
import shutil
|
||||
|
||||
from send2trash import send2trash
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
from hscommon.notify import Broadcaster
|
||||
from hscommon.path import Path
|
||||
from hscommon.conflict import smart_move, smart_copy
|
||||
@@ -78,7 +78,7 @@ def format_words(w):
|
||||
return '(%s)' % ', '.join(do_format(item) for item in w)
|
||||
else:
|
||||
return w.replace('\n', ' ')
|
||||
|
||||
|
||||
return ', '.join(do_format(item) for item in w)
|
||||
|
||||
def format_perc(p):
|
||||
@@ -110,33 +110,33 @@ def fix_surrogate_encoding(s, encoding='utf-8'):
|
||||
|
||||
class DupeGuru(Broadcaster):
|
||||
"""Holds everything together.
|
||||
|
||||
|
||||
Instantiated once per running application, it holds a reference to every high-level object
|
||||
whose reference needs to be held: :class:`~core.results.Results`, :class:`Scanner`,
|
||||
:class:`~core.directories.Directories`, :mod:`core.gui` instances, etc..
|
||||
|
||||
|
||||
It also hosts high level methods and acts as a coordinator for all those elements. This is why
|
||||
some of its methods seem a bit shallow, like for example :meth:`mark_all` and
|
||||
:meth:`remove_duplicates`. These methos are just proxies for a method in :attr:`results`, but
|
||||
they are also followed by a notification call which is very important if we want GUI elements
|
||||
to be correctly notified of a change in the data they're presenting.
|
||||
|
||||
|
||||
.. attribute:: directories
|
||||
|
||||
|
||||
Instance of :class:`~core.directories.Directories`. It holds the current folder selection.
|
||||
|
||||
|
||||
.. attribute:: results
|
||||
|
||||
|
||||
Instance of :class:`core.results.Results`. Holds the results of the latest scan.
|
||||
|
||||
|
||||
.. attribute:: selected_dupes
|
||||
|
||||
|
||||
List of currently selected dupes from our :attr:`results`. Whenever the user changes its
|
||||
selection at the UI level, :attr:`result_table` takes care of updating this attribute, so
|
||||
you can trust that it's always up-to-date.
|
||||
|
||||
|
||||
.. attribute:: result_table
|
||||
|
||||
|
||||
Instance of :mod:`meta-gui <core.gui>` table listing the results from :attr:`results`
|
||||
"""
|
||||
#--- View interface
|
||||
@@ -154,7 +154,7 @@ class DupeGuru(Broadcaster):
|
||||
|
||||
PROMPT_NAME = "dupeGuru"
|
||||
SCANNER_CLASS = scanner.Scanner
|
||||
|
||||
|
||||
def __init__(self, view):
|
||||
if view.get_default(DEBUG_MODE_PREFERENCE):
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
@@ -185,14 +185,14 @@ class DupeGuru(Broadcaster):
|
||||
children = [self.result_table, self.directory_tree, self.stats_label, self.details_panel]
|
||||
for child in children:
|
||||
child.connect()
|
||||
|
||||
|
||||
#--- Virtual
|
||||
def _prioritization_categories(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _create_result_table(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
#--- Private
|
||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||
if key == 'marked':
|
||||
@@ -212,7 +212,7 @@ class DupeGuru(Broadcaster):
|
||||
same = cmp_value(dupe, key) == refval
|
||||
result = (same, result)
|
||||
return result
|
||||
|
||||
|
||||
def _get_group_sort_key(self, group, key):
|
||||
if key == 'percentage':
|
||||
return group.percentage
|
||||
@@ -221,15 +221,15 @@ class DupeGuru(Broadcaster):
|
||||
if key == 'marked':
|
||||
return len([dupe for dupe in group.dupes if self.results.is_marked(dupe)])
|
||||
return cmp_value(group.ref, key)
|
||||
|
||||
|
||||
def _do_delete(self, j, link_deleted, use_hardlinks, direct_deletion):
|
||||
def op(dupe):
|
||||
j.add_progress()
|
||||
return self._do_delete_dupe(dupe, link_deleted, use_hardlinks, direct_deletion)
|
||||
|
||||
|
||||
j.start_job(self.results.mark_count)
|
||||
self.results.perform_on_marked(op, True)
|
||||
|
||||
|
||||
def _do_delete_dupe(self, dupe, link_deleted, use_hardlinks, direct_deletion):
|
||||
if not dupe.path.exists():
|
||||
return
|
||||
@@ -248,11 +248,11 @@ class DupeGuru(Broadcaster):
|
||||
linkfunc = os.link if use_hardlinks else os.symlink
|
||||
linkfunc(str(ref.path), str_path)
|
||||
self.clean_empty_dirs(dupe.path.parent())
|
||||
|
||||
|
||||
def _create_file(self, path):
|
||||
# We add fs.Folder to fileclasses in case the file we're loading contains folder paths.
|
||||
return fs.get_file(path, self.directories.fileclasses + [fs.Folder])
|
||||
|
||||
|
||||
def _get_file(self, str_path):
|
||||
path = Path(str_path)
|
||||
f = self._create_file(path)
|
||||
@@ -263,7 +263,7 @@ class DupeGuru(Broadcaster):
|
||||
return f
|
||||
except EnvironmentError:
|
||||
return None
|
||||
|
||||
|
||||
def _get_export_data(self):
|
||||
columns = [col for col in self.result_table.columns.ordered_columns
|
||||
if col.visible and col.name != 'marked']
|
||||
@@ -276,20 +276,20 @@ class DupeGuru(Broadcaster):
|
||||
row.insert(0, group_id)
|
||||
rows.append(row)
|
||||
return colnames, rows
|
||||
|
||||
|
||||
def _results_changed(self):
|
||||
self.selected_dupes = [d for d in self.selected_dupes
|
||||
if self.results.get_group_of_duplicate(d) is not None]
|
||||
self.notify('results_changed')
|
||||
|
||||
|
||||
def _start_job(self, jobid, func, args=()):
|
||||
title = JOBID2TITLE[jobid]
|
||||
try:
|
||||
self.progress_window.run(jobid, title, func, args=args)
|
||||
self.progress_window.run(jobid, title, func, args=args)
|
||||
except job.JobInProgressError:
|
||||
msg = tr("A previous action is still hanging in there. You can't start a new one yet. Wait a few seconds, then try again.")
|
||||
self.view.show_message(msg)
|
||||
|
||||
|
||||
def _job_completed(self, jobid):
|
||||
if jobid == JobType.Scan:
|
||||
self._results_changed()
|
||||
@@ -312,7 +312,7 @@ class DupeGuru(Broadcaster):
|
||||
JobType.Delete: tr("All marked files were successfully sent to Trash."),
|
||||
}[jobid]
|
||||
self.view.show_message(msg)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _remove_hardlink_dupes(files):
|
||||
seen_inodes = set()
|
||||
@@ -327,19 +327,19 @@ class DupeGuru(Broadcaster):
|
||||
seen_inodes.add(inode)
|
||||
result.append(file)
|
||||
return result
|
||||
|
||||
|
||||
def _select_dupes(self, dupes):
|
||||
if dupes == self.selected_dupes:
|
||||
return
|
||||
self.selected_dupes = dupes
|
||||
self.notify('dupes_selected')
|
||||
|
||||
|
||||
#--- Public
|
||||
def add_directory(self, d):
|
||||
"""Adds folder ``d`` to :attr:`directories`.
|
||||
|
||||
|
||||
Shows an error message dialog if something bad happens.
|
||||
|
||||
|
||||
:param str d: path of folder to add
|
||||
"""
|
||||
try:
|
||||
@@ -349,7 +349,7 @@ class DupeGuru(Broadcaster):
|
||||
self.view.show_message(tr("'{}' already is in the list.").format(d))
|
||||
except directories.InvalidPathError:
|
||||
self.view.show_message(tr("'{}' does not exist.").format(d))
|
||||
|
||||
|
||||
def add_selected_to_ignore_list(self):
|
||||
"""Adds :attr:`selected_dupes` to :attr:`scanner`'s ignore list.
|
||||
"""
|
||||
@@ -367,10 +367,10 @@ class DupeGuru(Broadcaster):
|
||||
self.scanner.ignore_list.Ignore(str(other.path), str(dupe.path))
|
||||
self.remove_duplicates(dupes)
|
||||
self.ignore_list_dialog.refresh()
|
||||
|
||||
|
||||
def apply_filter(self, filter):
|
||||
"""Apply a filter ``filter`` to the results so that it shows only dupe groups that match it.
|
||||
|
||||
|
||||
:param str filter: filter to apply
|
||||
"""
|
||||
self.results.apply_filter(None)
|
||||
@@ -379,12 +379,12 @@ class DupeGuru(Broadcaster):
|
||||
filter = escape(filter, '*', '.')
|
||||
self.results.apply_filter(filter)
|
||||
self._results_changed()
|
||||
|
||||
|
||||
def clean_empty_dirs(self, path):
|
||||
if self.options['clean_empty_dirs']:
|
||||
while delete_if_empty(path, ['.DS_Store']):
|
||||
path = path.parent()
|
||||
|
||||
|
||||
def copy_or_move(self, dupe, copy: bool, destination: str, dest_type: DestType):
|
||||
source_path = dupe.path
|
||||
location_path = first(p for p in self.directories if dupe.path in p)
|
||||
@@ -406,20 +406,20 @@ class DupeGuru(Broadcaster):
|
||||
else:
|
||||
smart_move(source_path, dest_path)
|
||||
self.clean_empty_dirs(source_path.parent())
|
||||
|
||||
|
||||
def copy_or_move_marked(self, copy):
|
||||
"""Start an async move (or copy) job on marked duplicates.
|
||||
|
||||
|
||||
:param bool copy: If True, duplicates will be copied instead of moved
|
||||
"""
|
||||
def do(j):
|
||||
def op(dupe):
|
||||
j.add_progress()
|
||||
self.copy_or_move(dupe, copy, destination, desttype)
|
||||
|
||||
|
||||
j.start_job(self.results.mark_count)
|
||||
self.results.perform_on_marked(op, not copy)
|
||||
|
||||
|
||||
if not self.results.mark_count:
|
||||
self.view.show_message(MSG_NO_MARKED_DUPES)
|
||||
return
|
||||
@@ -430,7 +430,7 @@ class DupeGuru(Broadcaster):
|
||||
desttype = self.options['copymove_dest_type']
|
||||
jobid = JobType.Copy if copy else JobType.Move
|
||||
self._start_job(jobid, do)
|
||||
|
||||
|
||||
def delete_marked(self):
|
||||
"""Start an async job to send marked duplicates to the trash.
|
||||
"""
|
||||
@@ -443,10 +443,10 @@ class DupeGuru(Broadcaster):
|
||||
self.deletion_options.direct]
|
||||
logging.debug("Starting deletion job with args %r", args)
|
||||
self._start_job(JobType.Delete, self._do_delete, args=args)
|
||||
|
||||
|
||||
def export_to_xhtml(self):
|
||||
"""Export current results to XHTML.
|
||||
|
||||
|
||||
The configuration of the :attr:`result_table` (columns order and visibility) is used to
|
||||
determine how the data is presented in the export. In other words, the exported table in
|
||||
the resulting XHTML will look just like the results table.
|
||||
@@ -454,10 +454,10 @@ class DupeGuru(Broadcaster):
|
||||
colnames, rows = self._get_export_data()
|
||||
export_path = export.export_to_xhtml(colnames, rows)
|
||||
desktop.open_path(export_path)
|
||||
|
||||
|
||||
def export_to_csv(self):
|
||||
"""Export current results to CSV.
|
||||
|
||||
|
||||
The columns and their order in the resulting CSV file is determined in the same way as in
|
||||
:meth:`export_to_xhtml`.
|
||||
"""
|
||||
@@ -465,7 +465,7 @@ class DupeGuru(Broadcaster):
|
||||
if dest_file:
|
||||
colnames, rows = self._get_export_data()
|
||||
export.export_to_csv(dest_file, colnames, rows)
|
||||
|
||||
|
||||
def get_display_info(self, dupe, group, delta=False):
|
||||
def empty_data():
|
||||
return {c.name: '---' for c in self.result_table.COLUMNS[1:]}
|
||||
@@ -476,10 +476,10 @@ class DupeGuru(Broadcaster):
|
||||
except Exception as e:
|
||||
logging.warning("Exception on GetDisplayInfo for %s: %s", str(dupe.path), str(e))
|
||||
return empty_data()
|
||||
|
||||
|
||||
def invoke_custom_command(self):
|
||||
"""Calls command in ``CustomCommand`` pref with ``%d`` and ``%r`` placeholders replaced.
|
||||
|
||||
|
||||
Using the current selection, ``%d`` is replaced with the currently selected dupe and ``%r``
|
||||
is replaced with that dupe's ref file. If there's no selection, the command is not invoked.
|
||||
If the dupe is a ref, ``%d`` and ``%r`` will be the same.
|
||||
@@ -506,10 +506,10 @@ class DupeGuru(Broadcaster):
|
||||
subprocess.Popen(exename + args, shell=True, cwd=path)
|
||||
else:
|
||||
subprocess.Popen(cmd, shell=True)
|
||||
|
||||
|
||||
def load(self):
|
||||
"""Load directory selection and ignore list from files in appdata.
|
||||
|
||||
|
||||
This method is called during startup so that directory selection and ignore list, which
|
||||
is persistent data, is the same as when the last session was closed (when :meth:`save` was
|
||||
called).
|
||||
@@ -519,19 +519,19 @@ class DupeGuru(Broadcaster):
|
||||
p = op.join(self.appdata, 'ignore_list.xml')
|
||||
self.scanner.ignore_list.load_from_xml(p)
|
||||
self.ignore_list_dialog.refresh()
|
||||
|
||||
|
||||
def load_from(self, filename):
|
||||
"""Start an async job to load results from ``filename``.
|
||||
|
||||
|
||||
:param str filename: path of the XML file (created with :meth:`save_as`) to load
|
||||
"""
|
||||
def do(j):
|
||||
self.results.load_from_xml(filename, self._get_file, j)
|
||||
self._start_job(JobType.Load, do)
|
||||
|
||||
|
||||
def make_selected_reference(self):
|
||||
"""Promote :attr:`selected_dupes` to reference position within their respective groups.
|
||||
|
||||
|
||||
Each selected dupe will become the :attr:`~core.engine.Group.ref` of its group. If there's
|
||||
more than one dupe selected for the same group, only the first (in the order currently shown
|
||||
in :attr:`result_table`) dupe will be promoted.
|
||||
@@ -560,28 +560,28 @@ class DupeGuru(Broadcaster):
|
||||
# do is to keep our selection index-wise (different dupe selection, but same index
|
||||
# selection).
|
||||
self.notify('results_changed_but_keep_selection')
|
||||
|
||||
|
||||
def mark_all(self):
|
||||
"""Set all dupes in the results as marked.
|
||||
"""
|
||||
self.results.mark_all()
|
||||
self.notify('marking_changed')
|
||||
|
||||
|
||||
def mark_none(self):
|
||||
"""Set all dupes in the results as unmarked.
|
||||
"""
|
||||
self.results.mark_none()
|
||||
self.notify('marking_changed')
|
||||
|
||||
|
||||
def mark_invert(self):
|
||||
"""Invert the marked state of all dupes in the results.
|
||||
"""
|
||||
self.results.mark_invert()
|
||||
self.notify('marking_changed')
|
||||
|
||||
|
||||
def mark_dupe(self, dupe, marked):
|
||||
"""Change marked status of ``dupe``.
|
||||
|
||||
|
||||
:param dupe: dupe to mark/unmark
|
||||
:type dupe: :class:`~core.fs.File`
|
||||
:param bool marked: True = mark, False = unmark
|
||||
@@ -591,7 +591,7 @@ class DupeGuru(Broadcaster):
|
||||
else:
|
||||
self.results.unmark(dupe)
|
||||
self.notify('marking_changed')
|
||||
|
||||
|
||||
def open_selected(self):
|
||||
"""Open :attr:`selected_dupes` with their associated application.
|
||||
"""
|
||||
@@ -600,16 +600,16 @@ class DupeGuru(Broadcaster):
|
||||
return
|
||||
for dupe in self.selected_dupes:
|
||||
desktop.open_path(dupe.path)
|
||||
|
||||
|
||||
def purge_ignore_list(self):
|
||||
"""Remove files that don't exist from :attr:`ignore_list`.
|
||||
"""
|
||||
self.scanner.ignore_list.Filter(lambda f,s:op.exists(f) and op.exists(s))
|
||||
self.ignore_list_dialog.refresh()
|
||||
|
||||
|
||||
def remove_directories(self, indexes):
|
||||
"""Remove root directories at ``indexes`` from :attr:`directories`.
|
||||
|
||||
|
||||
:param indexes: Indexes of the directories to remove.
|
||||
:type indexes: list of int
|
||||
"""
|
||||
@@ -620,30 +620,30 @@ class DupeGuru(Broadcaster):
|
||||
self.notify('directories_changed')
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
|
||||
def remove_duplicates(self, duplicates):
|
||||
"""Remove ``duplicates`` from :attr:`results`.
|
||||
|
||||
|
||||
Calls :meth:`~core.results.Results.remove_duplicates` and send appropriate notifications.
|
||||
|
||||
|
||||
:param duplicates: duplicates to remove.
|
||||
:type duplicates: list of :class:`~core.fs.File`
|
||||
"""
|
||||
self.results.remove_duplicates(self.without_ref(duplicates))
|
||||
self.notify('results_changed_but_keep_selection')
|
||||
|
||||
|
||||
def remove_marked(self):
|
||||
"""Removed marked duplicates from the results (without touching the files themselves).
|
||||
"""
|
||||
if not self.results.mark_count:
|
||||
self.view.show_message(MSG_NO_MARKED_DUPES)
|
||||
return
|
||||
msg = tr("You are about to remove %d files from results. Continue?")
|
||||
msg = tr("You are about to remove %d files from results. Continue?")
|
||||
if not self.view.ask_yes_no(msg % self.results.mark_count):
|
||||
return
|
||||
self.results.perform_on_marked(lambda x:None, True)
|
||||
self._results_changed()
|
||||
|
||||
|
||||
def remove_selected(self):
|
||||
"""Removed :attr:`selected_dupes` from the results (without touching the files themselves).
|
||||
"""
|
||||
@@ -651,16 +651,16 @@ class DupeGuru(Broadcaster):
|
||||
if not dupes:
|
||||
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
||||
return
|
||||
msg = tr("You are about to remove %d files from results. Continue?")
|
||||
msg = tr("You are about to remove %d files from results. Continue?")
|
||||
if not self.view.ask_yes_no(msg % len(dupes)):
|
||||
return
|
||||
self.remove_duplicates(dupes)
|
||||
|
||||
|
||||
def rename_selected(self, newname):
|
||||
"""Renames the selected dupes's file to ``newname``.
|
||||
|
||||
|
||||
If there's more than one selected dupes, the first one is used.
|
||||
|
||||
|
||||
:param str newname: The filename to rename the dupe's file to.
|
||||
"""
|
||||
try:
|
||||
@@ -670,13 +670,13 @@ class DupeGuru(Broadcaster):
|
||||
except (IndexError, fs.FSError) as e:
|
||||
logging.warning("dupeGuru Warning: %s" % str(e))
|
||||
return False
|
||||
|
||||
|
||||
def reprioritize_groups(self, sort_key):
|
||||
"""Sort dupes in each group (in :attr:`results`) according to ``sort_key``.
|
||||
|
||||
|
||||
Called by the re-prioritize dialog. Calls :meth:`~core.engine.Group.prioritize` and, once
|
||||
the sorting is done, show a message that confirms the action.
|
||||
|
||||
|
||||
:param sort_key: The key being sent to :meth:`~core.engine.Group.prioritize`
|
||||
:type sort_key: f(dupe)
|
||||
"""
|
||||
@@ -687,11 +687,11 @@ class DupeGuru(Broadcaster):
|
||||
self._results_changed()
|
||||
msg = tr("{} duplicate groups were changed by the re-prioritization.").format(count)
|
||||
self.view.show_message(msg)
|
||||
|
||||
|
||||
def reveal_selected(self):
|
||||
if self.selected_dupes:
|
||||
desktop.reveal_path(self.selected_dupes[0].path)
|
||||
|
||||
|
||||
def save(self):
|
||||
if not op.exists(self.appdata):
|
||||
os.makedirs(self.appdata)
|
||||
@@ -699,17 +699,17 @@ class DupeGuru(Broadcaster):
|
||||
p = op.join(self.appdata, 'ignore_list.xml')
|
||||
self.scanner.ignore_list.save_to_xml(p)
|
||||
self.notify('save_session')
|
||||
|
||||
|
||||
def save_as(self, filename):
|
||||
"""Save results in ``filename``.
|
||||
|
||||
|
||||
:param str filename: path of the file to save results (as XML) to.
|
||||
"""
|
||||
self.results.save_to_xml(filename)
|
||||
|
||||
|
||||
def start_scanning(self):
|
||||
"""Starts an async job to scan for duplicates.
|
||||
|
||||
|
||||
Scans folders selected in :attr:`directories` and put the results in :attr:`results`
|
||||
"""
|
||||
def do(j):
|
||||
@@ -722,14 +722,14 @@ class DupeGuru(Broadcaster):
|
||||
files = self._remove_hardlink_dupes(files)
|
||||
logging.info('Scanning %d files' % len(files))
|
||||
self.results.groups = self.scanner.get_dupe_groups(files, j)
|
||||
|
||||
|
||||
if not self.directories.has_any_file():
|
||||
self.view.show_message(tr("The selected directories contain no scannable file."))
|
||||
return
|
||||
self.results.groups = []
|
||||
self._results_changed()
|
||||
self._start_job(JobType.Scan, do)
|
||||
|
||||
|
||||
def toggle_selected_mark_state(self):
|
||||
selected = self.without_ref(self.selected_dupes)
|
||||
if not selected:
|
||||
@@ -741,12 +741,12 @@ class DupeGuru(Broadcaster):
|
||||
for dupe in selected:
|
||||
markfunc(dupe)
|
||||
self.notify('marking_changed')
|
||||
|
||||
|
||||
def without_ref(self, dupes):
|
||||
"""Returns ``dupes`` with all reference elements removed.
|
||||
"""
|
||||
return [dupe for dupe in dupes if self.results.get_group_of_duplicate(dupe).ref is not dupe]
|
||||
|
||||
|
||||
def get_default(self, key, fallback_value=None):
|
||||
result = nonone(self.view.get_default(key), fallback_value)
|
||||
if fallback_value is not None and not isinstance(result, type(fallback_value)):
|
||||
@@ -756,10 +756,10 @@ class DupeGuru(Broadcaster):
|
||||
except Exception:
|
||||
result = fallback_value
|
||||
return result
|
||||
|
||||
|
||||
def set_default(self, key, value):
|
||||
self.view.set_default(key, value)
|
||||
|
||||
|
||||
#--- Properties
|
||||
@property
|
||||
def stat_line(self):
|
||||
@@ -767,4 +767,4 @@ class DupeGuru(Broadcaster):
|
||||
if self.scanner.discarded_file_count:
|
||||
result = tr("%s (%d discarded)") % (result, self.scanner.discarded_file_count)
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/02/27
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from xml.etree import ElementTree as ET
|
||||
import logging
|
||||
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
from hscommon.path import Path
|
||||
from hscommon.util import FileOrPath
|
||||
|
||||
@@ -24,7 +24,7 @@ __all__ = [
|
||||
|
||||
class DirectoryState:
|
||||
"""Enum describing how a folder should be considered.
|
||||
|
||||
|
||||
* DirectoryState.Normal: Scan all files normally
|
||||
* DirectoryState.Reference: Scan files, but make sure never to delete any of them
|
||||
* DirectoryState.Excluded: Don't scan this folder
|
||||
@@ -41,10 +41,10 @@ class InvalidPathError(Exception):
|
||||
|
||||
class Directories:
|
||||
"""Holds user folder selection.
|
||||
|
||||
|
||||
Manages the selection that the user make through the folder selection dialog. It also manages
|
||||
folder states, and how recursion applies to them.
|
||||
|
||||
|
||||
Then, when the user starts the scan, :meth:`get_files` is called to retrieve all files (wrapped
|
||||
in :mod:`core.fs`) that have to be scanned according to the chosen folders/states.
|
||||
"""
|
||||
@@ -55,28 +55,28 @@ class Directories:
|
||||
self.states = {}
|
||||
self.fileclasses = fileclasses
|
||||
self.folderclass = fs.Folder
|
||||
|
||||
|
||||
def __contains__(self, path):
|
||||
for p in self._dirs:
|
||||
if path in p:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def __delitem__(self,key):
|
||||
self._dirs.__delitem__(key)
|
||||
|
||||
|
||||
def __getitem__(self,key):
|
||||
return self._dirs.__getitem__(key)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return len(self._dirs)
|
||||
|
||||
|
||||
#---Private
|
||||
def _default_state_for_path(self, path):
|
||||
# Override this in subclasses to specify the state of some special folders.
|
||||
if path.name.startswith('.'): # hidden
|
||||
return DirectoryState.Excluded
|
||||
|
||||
|
||||
def _get_files(self, from_path, j):
|
||||
j.check_if_cancelled()
|
||||
state = self.get_state(from_path)
|
||||
@@ -102,7 +102,7 @@ class Directories:
|
||||
yield file
|
||||
except (EnvironmentError, fs.InvalidPath):
|
||||
pass
|
||||
|
||||
|
||||
def _get_folders(self, from_folder, j):
|
||||
j.check_if_cancelled()
|
||||
try:
|
||||
@@ -116,16 +116,16 @@ class Directories:
|
||||
yield from_folder
|
||||
except (EnvironmentError, fs.InvalidPath):
|
||||
pass
|
||||
|
||||
|
||||
#---Public
|
||||
def add_path(self, path):
|
||||
"""Adds ``path`` to self, if not already there.
|
||||
|
||||
|
||||
Raises :exc:`AlreadyThereError` if ``path`` is already in self. If path is a directory
|
||||
containing some of the directories already present in self, ``path`` will be added, but all
|
||||
directories under it will be removed. Can also raise :exc:`InvalidPathError` if ``path``
|
||||
does not exist.
|
||||
|
||||
|
||||
:param Path path: path to add
|
||||
"""
|
||||
if path in self:
|
||||
@@ -134,11 +134,11 @@ class Directories:
|
||||
raise InvalidPathError()
|
||||
self._dirs = [p for p in self._dirs if p not in path]
|
||||
self._dirs.append(path)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_subfolders(path):
|
||||
"""Returns a sorted list of paths corresponding to subfolders in ``path``.
|
||||
|
||||
|
||||
:param Path path: get subfolders from there
|
||||
:rtype: list of Path
|
||||
"""
|
||||
@@ -148,29 +148,29 @@ class Directories:
|
||||
return subpaths
|
||||
except EnvironmentError:
|
||||
return []
|
||||
|
||||
|
||||
def get_files(self, j=job.nulljob):
|
||||
"""Returns a list of all files that are not excluded.
|
||||
|
||||
|
||||
Returned files also have their ``is_ref`` attr set if applicable.
|
||||
"""
|
||||
for path in self._dirs:
|
||||
for file in self._get_files(path, j):
|
||||
yield file
|
||||
|
||||
|
||||
def get_folders(self, j=job.nulljob):
|
||||
"""Returns a list of all folders that are not excluded.
|
||||
|
||||
|
||||
Returned folders also have their ``is_ref`` attr set if applicable.
|
||||
"""
|
||||
for path in self._dirs:
|
||||
from_folder = self.folderclass(path)
|
||||
for folder in self._get_folders(from_folder, j):
|
||||
yield folder
|
||||
|
||||
|
||||
def get_state(self, path):
|
||||
"""Returns the state of ``path``.
|
||||
|
||||
|
||||
:rtype: :class:`DirectoryState`
|
||||
"""
|
||||
if path in self.states:
|
||||
@@ -183,12 +183,12 @@ class Directories:
|
||||
return self.get_state(parent)
|
||||
else:
|
||||
return DirectoryState.Normal
|
||||
|
||||
|
||||
def has_any_file(self):
|
||||
"""Returns whether selected folders contain any file.
|
||||
|
||||
|
||||
Because it stops at the first file it finds, it's much faster than get_files().
|
||||
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
try:
|
||||
@@ -196,10 +196,10 @@ class Directories:
|
||||
return True
|
||||
except StopIteration:
|
||||
return False
|
||||
|
||||
|
||||
def load_from_file(self, infile):
|
||||
"""Load folder selection from ``infile``.
|
||||
|
||||
|
||||
:param file infile: path or file pointer to XML generated through :meth:`save_to_file`
|
||||
"""
|
||||
try:
|
||||
@@ -222,10 +222,10 @@ class Directories:
|
||||
path = attrib['path']
|
||||
state = attrib['value']
|
||||
self.states[Path(path)] = int(state)
|
||||
|
||||
|
||||
def save_to_file(self, outfile):
|
||||
"""Save folder selection as XML to ``outfile``.
|
||||
|
||||
|
||||
:param file outfile: path or file pointer to XML file to save to.
|
||||
"""
|
||||
with FileOrPath(outfile, 'wb') as fp:
|
||||
@@ -239,10 +239,10 @@ class Directories:
|
||||
state_node.set('value', str(state))
|
||||
tree = ET.ElementTree(root)
|
||||
tree.write(fp, encoding='utf-8')
|
||||
|
||||
|
||||
def set_state(self, path, state):
|
||||
"""Set the state of folder at ``path``.
|
||||
|
||||
|
||||
:param Path path: path of the target folder
|
||||
:param state: state to set folder to
|
||||
:type state: :class:`DirectoryState`
|
||||
@@ -253,4 +253,4 @@ class Directories:
|
||||
if path.is_parent_of(iter_path):
|
||||
del self.states[iter_path]
|
||||
self.states[path] = state
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/01/29
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import difflib
|
||||
@@ -15,7 +15,7 @@ from unicodedata import normalize
|
||||
|
||||
from hscommon.util import flatten, multi_replace
|
||||
from hscommon.trans import tr
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
|
||||
(WEIGHT_WORDS,
|
||||
MATCH_SIMILAR_WORDS,
|
||||
@@ -45,7 +45,7 @@ def unpack_fields(fields):
|
||||
|
||||
def compare(first, second, flags=()):
|
||||
"""Returns the % of words that match between ``first`` and ``second``
|
||||
|
||||
|
||||
The result is a ``int`` in the range 0..100.
|
||||
``first`` and ``second`` can be either a string or a list (of words).
|
||||
"""
|
||||
@@ -53,7 +53,7 @@ def compare(first, second, flags=()):
|
||||
return 0
|
||||
if any(isinstance(element, list) for element in first):
|
||||
return compare_fields(first, second, flags)
|
||||
second = second[:] #We must use a copy of second because we remove items from it
|
||||
second = second[:] #We must use a copy of second because we remove items from it
|
||||
match_similar = MATCH_SIMILAR_WORDS in flags
|
||||
weight_words = WEIGHT_WORDS in flags
|
||||
joined = first + second
|
||||
@@ -77,9 +77,9 @@ def compare(first, second, flags=()):
|
||||
|
||||
def compare_fields(first, second, flags=()):
|
||||
"""Returns the score for the lowest matching :ref:`fields`.
|
||||
|
||||
|
||||
``first`` and ``second`` must be lists of lists of string. Each sub-list is then compared with
|
||||
:func:`compare`.
|
||||
:func:`compare`.
|
||||
"""
|
||||
if len(first) != len(second):
|
||||
return 0
|
||||
@@ -104,10 +104,10 @@ def compare_fields(first, second, flags=()):
|
||||
|
||||
def build_word_dict(objects, j=job.nulljob):
|
||||
"""Returns a dict of objects mapped by their words.
|
||||
|
||||
|
||||
objects must have a ``words`` attribute being a list of strings or a list of lists of strings
|
||||
(:ref:`fields`).
|
||||
|
||||
|
||||
The result will be a dict with words as keys, lists of objects as values.
|
||||
"""
|
||||
result = defaultdict(set)
|
||||
@@ -118,7 +118,7 @@ def build_word_dict(objects, j=job.nulljob):
|
||||
|
||||
def merge_similar_words(word_dict):
|
||||
"""Take all keys in ``word_dict`` that are similar, and merge them together.
|
||||
|
||||
|
||||
``word_dict`` has been built with :func:`build_word_dict`. Similarity is computed with Python's
|
||||
``difflib.get_close_matches()``, which computes the number of edits that are necessary to make
|
||||
a word equal to the other.
|
||||
@@ -138,9 +138,9 @@ def merge_similar_words(word_dict):
|
||||
|
||||
def reduce_common_words(word_dict, threshold):
|
||||
"""Remove all objects from ``word_dict`` values where the object count >= ``threshold``
|
||||
|
||||
|
||||
``word_dict`` has been built with :func:`build_word_dict`.
|
||||
|
||||
|
||||
The exception to this removal are the objects where all the words of the object are common.
|
||||
Because if we remove them, we will miss some duplicates!
|
||||
"""
|
||||
@@ -181,17 +181,17 @@ class Match(namedtuple('Match', 'first second percentage')):
|
||||
exact scan methods, such as Contents scans, this will always be 100.
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
def get_match(first, second, flags=()):
|
||||
#it is assumed here that first and second both have a "words" attribute
|
||||
percentage = compare(first.words, second.words, flags)
|
||||
return Match(first, second, percentage)
|
||||
|
||||
def getmatches(
|
||||
objects, min_match_percentage=0, match_similar_words=False, weight_words=False,
|
||||
objects, min_match_percentage=0, match_similar_words=False, weight_words=False,
|
||||
no_field_order=False, j=job.nulljob):
|
||||
"""Returns a list of :class:`Match` within ``objects`` after fuzzily matching their words.
|
||||
|
||||
|
||||
:param objects: List of :class:`~core.fs.File` to match.
|
||||
:param int min_match_percentage: minimum % of words that have to match.
|
||||
:param bool match_similar_words: make similar words (see :func:`merge_similar_words`) match.
|
||||
@@ -246,7 +246,7 @@ def getmatches(
|
||||
|
||||
def getmatches_by_contents(files, sizeattr='size', partial=False, j=job.nulljob):
|
||||
"""Returns a list of :class:`Match` within ``files`` if their contents is the same.
|
||||
|
||||
|
||||
:param str sizeattr: attibute name of the :class:`~core.fs.file` that returns the size of the
|
||||
file to use for comparison.
|
||||
:param bool partial: if true, will use the "md5partial" attribute instead of "md5" to compute
|
||||
@@ -278,44 +278,44 @@ class Group:
|
||||
|
||||
This manages match pairs into groups and ensures that all files in the group match to each
|
||||
other.
|
||||
|
||||
|
||||
.. attribute:: ref
|
||||
|
||||
|
||||
The "reference" file, which is the file among the group that isn't going to be deleted.
|
||||
|
||||
|
||||
.. attribute:: ordered
|
||||
|
||||
|
||||
Ordered list of duplicates in the group (including the :attr:`ref`).
|
||||
|
||||
|
||||
.. attribute:: unordered
|
||||
|
||||
|
||||
Set duplicates in the group (including the :attr:`ref`).
|
||||
|
||||
|
||||
.. attribute:: dupes
|
||||
|
||||
|
||||
An ordered list of the group's duplicate, without :attr:`ref`. Equivalent to
|
||||
``ordered[1:]``
|
||||
|
||||
|
||||
.. attribute:: percentage
|
||||
|
||||
|
||||
Average match percentage of match pairs containing :attr:`ref`.
|
||||
"""
|
||||
#---Override
|
||||
def __init__(self):
|
||||
self._clear()
|
||||
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.unordered
|
||||
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.ordered.__getitem__(key)
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.ordered)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
return len(self.ordered)
|
||||
|
||||
|
||||
#---Private
|
||||
def _clear(self):
|
||||
self._percentage = None
|
||||
@@ -324,22 +324,22 @@ class Group:
|
||||
self.candidates = defaultdict(set)
|
||||
self.ordered = []
|
||||
self.unordered = set()
|
||||
|
||||
|
||||
def _get_matches_for_ref(self):
|
||||
if self._matches_for_ref is None:
|
||||
ref = self.ref
|
||||
self._matches_for_ref = [match for match in self.matches if ref in match]
|
||||
return self._matches_for_ref
|
||||
|
||||
|
||||
#---Public
|
||||
def add_match(self, match):
|
||||
"""Adds ``match`` to internal match list and possibly add duplicates to the group.
|
||||
|
||||
|
||||
A duplicate can only be considered as such if it matches all other duplicates in the group.
|
||||
This method registers that pair (A, B) represented in ``match`` as possible candidates and,
|
||||
if A and/or B end up matching every other duplicates in the group, add these duplicates to
|
||||
the group.
|
||||
|
||||
|
||||
:param tuple match: pair of :class:`~core.fs.File` to add
|
||||
"""
|
||||
def add_candidate(item, match):
|
||||
@@ -348,7 +348,7 @@ class Group:
|
||||
if self.unordered <= matches:
|
||||
self.ordered.append(item)
|
||||
self.unordered.add(item)
|
||||
|
||||
|
||||
if match in self.matches:
|
||||
return
|
||||
self.matches.add(match)
|
||||
@@ -359,17 +359,17 @@ class Group:
|
||||
add_candidate(second, first)
|
||||
self._percentage = None
|
||||
self._matches_for_ref = None
|
||||
|
||||
|
||||
def discard_matches(self):
|
||||
"""Remove all recorded matches that didn't result in a duplicate being added to the group.
|
||||
|
||||
|
||||
You can call this after the duplicate scanning process to free a bit of memory.
|
||||
"""
|
||||
discarded = set(m for m in self.matches if not all(obj in self.unordered for obj in [m.first, m.second]))
|
||||
self.matches -= discarded
|
||||
self.candidates = defaultdict(set)
|
||||
return discarded
|
||||
|
||||
|
||||
def get_match_of(self, item):
|
||||
"""Returns the match pair between ``item`` and :attr:`ref`.
|
||||
"""
|
||||
@@ -378,10 +378,10 @@ class Group:
|
||||
for m in self._get_matches_for_ref():
|
||||
if item in m:
|
||||
return m
|
||||
|
||||
|
||||
def prioritize(self, key_func, tie_breaker=None):
|
||||
"""Reorders :attr:`ordered` according to ``key_func``.
|
||||
|
||||
|
||||
:param key_func: Key (f(x)) to be used for sorting
|
||||
:param tie_breaker: function to be used to select the reference position in case the top
|
||||
duplicates have the same key_func() result.
|
||||
@@ -405,7 +405,7 @@ class Group:
|
||||
self.switch_ref(ref)
|
||||
return True
|
||||
return changed
|
||||
|
||||
|
||||
def remove_dupe(self, item, discard_matches=True):
|
||||
try:
|
||||
self.ordered.remove(item)
|
||||
@@ -419,7 +419,7 @@ class Group:
|
||||
self._clear()
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
|
||||
def switch_ref(self, with_dupe):
|
||||
"""Make the :attr:`ref` dupe of the group switch position with ``with_dupe``.
|
||||
"""
|
||||
@@ -433,9 +433,9 @@ class Group:
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
dupes = property(lambda self: self[1:])
|
||||
|
||||
|
||||
@property
|
||||
def percentage(self):
|
||||
if self._percentage is None:
|
||||
@@ -445,16 +445,16 @@ class Group:
|
||||
else:
|
||||
self._percentage = 0
|
||||
return self._percentage
|
||||
|
||||
|
||||
@property
|
||||
def ref(self):
|
||||
if self:
|
||||
return self[0]
|
||||
|
||||
|
||||
|
||||
def get_groups(matches, j=job.nulljob):
|
||||
"""Returns a list of :class:`Group` from ``matches``.
|
||||
|
||||
|
||||
Create groups out of match pairs in the smartest way possible.
|
||||
"""
|
||||
matches.sort(key=lambda match: -match.percentage)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/02/23
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import logging
|
||||
@@ -12,7 +12,7 @@ import os
|
||||
import os.path as op
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from jobprogress.job import nulljob
|
||||
from hscommon.jobprogress.job import nulljob
|
||||
from hscommon.conflict import get_conflicted_name
|
||||
from hscommon.util import flatten, nonone, FileOrPath, format_size
|
||||
from hscommon.trans import tr
|
||||
@@ -22,15 +22,15 @@ from .markable import Markable
|
||||
|
||||
class Results(Markable):
|
||||
"""Manages a collection of duplicate :class:`~core.engine.Group`.
|
||||
|
||||
|
||||
This class takes care or marking, sorting and filtering duplicate groups.
|
||||
|
||||
|
||||
.. attribute:: groups
|
||||
|
||||
|
||||
The list of :class:`~core.engine.Group` contained managed by this instance.
|
||||
|
||||
|
||||
.. attribute:: dupes
|
||||
|
||||
|
||||
A list of all duplicates (:class:`~core.fs.File` instances), without ref, contained in the
|
||||
currently managed :attr:`groups`.
|
||||
"""
|
||||
@@ -50,16 +50,16 @@ class Results(Markable):
|
||||
self.app = app
|
||||
self.problems = [] # (dupe, error_msg)
|
||||
self.is_modified = False
|
||||
|
||||
|
||||
def _did_mark(self, dupe):
|
||||
self.__marked_size += dupe.size
|
||||
|
||||
|
||||
def _did_unmark(self, dupe):
|
||||
self.__marked_size -= dupe.size
|
||||
|
||||
|
||||
def _get_markable_count(self):
|
||||
return self.__total_count
|
||||
|
||||
|
||||
def _is_markable(self, dupe):
|
||||
if dupe.is_ref:
|
||||
return False
|
||||
@@ -71,25 +71,25 @@ class Results(Markable):
|
||||
if self.__filtered_dupes and dupe not in self.__filtered_dupes:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def mark_all(self):
|
||||
if self.__filters:
|
||||
self.mark_multiple(self.__filtered_dupes)
|
||||
else:
|
||||
Markable.mark_all(self)
|
||||
|
||||
|
||||
def mark_invert(self):
|
||||
if self.__filters:
|
||||
self.mark_toggle_multiple(self.__filtered_dupes)
|
||||
else:
|
||||
Markable.mark_invert(self)
|
||||
|
||||
|
||||
def mark_none(self):
|
||||
if self.__filters:
|
||||
self.unmark_multiple(self.__filtered_dupes)
|
||||
else:
|
||||
Markable.mark_none(self)
|
||||
|
||||
|
||||
#---Private
|
||||
def __get_dupe_list(self):
|
||||
if self.__dupes is None:
|
||||
@@ -103,13 +103,13 @@ class Results(Markable):
|
||||
if sd:
|
||||
self.sort_dupes(sd[0], sd[1], sd[2])
|
||||
return self.__dupes
|
||||
|
||||
|
||||
def __get_groups(self):
|
||||
if self.__filtered_groups is None:
|
||||
return self.__groups
|
||||
else:
|
||||
return self.__filtered_groups
|
||||
|
||||
|
||||
def __get_stat_line(self):
|
||||
if self.__filtered_dupes is None:
|
||||
mark_count = self.mark_count
|
||||
@@ -132,7 +132,7 @@ class Results(Markable):
|
||||
if self.__filters:
|
||||
result += tr(" filter: %s") % ' --> '.join(self.__filters)
|
||||
return result
|
||||
|
||||
|
||||
def __recalculate_stats(self):
|
||||
self.__total_size = 0
|
||||
self.__total_count = 0
|
||||
@@ -140,7 +140,7 @@ class Results(Markable):
|
||||
markable = [dupe for dupe in group.dupes if self._is_markable(dupe)]
|
||||
self.__total_count += len(markable)
|
||||
self.__total_size += sum(dupe.size for dupe in markable)
|
||||
|
||||
|
||||
def __set_groups(self, new_groups):
|
||||
self.mark_none()
|
||||
self.__groups = new_groups
|
||||
@@ -155,18 +155,18 @@ class Results(Markable):
|
||||
self.apply_filter(None)
|
||||
for filter_str in old_filters:
|
||||
self.apply_filter(filter_str)
|
||||
|
||||
|
||||
#---Public
|
||||
def apply_filter(self, filter_str):
|
||||
"""Applies a filter ``filter_str`` to :attr:`groups`
|
||||
|
||||
|
||||
When you apply the filter, only dupes with the filename matching ``filter_str`` will be in
|
||||
in the results. To cancel the filter, just call apply_filter with ``filter_str`` to None,
|
||||
in the results. To cancel the filter, just call apply_filter with ``filter_str`` to None,
|
||||
and the results will go back to normal.
|
||||
|
||||
If call apply_filter on a filtered results, the filter will be applied
|
||||
|
||||
If call apply_filter on a filtered results, the filter will be applied
|
||||
*on the filtered results*.
|
||||
|
||||
|
||||
:param str filter_str: a string containing a regexp to filter dupes with.
|
||||
"""
|
||||
if not filter_str:
|
||||
@@ -193,7 +193,7 @@ class Results(Markable):
|
||||
if sd:
|
||||
self.sort_groups(sd[0], sd[1])
|
||||
self.__dupes = None
|
||||
|
||||
|
||||
def get_group_of_duplicate(self, dupe):
|
||||
"""Returns :class:`~core.engine.Group` in which ``dupe`` belongs.
|
||||
"""
|
||||
@@ -201,12 +201,12 @@ class Results(Markable):
|
||||
return self.__group_of_duplicate[dupe]
|
||||
except (TypeError, KeyError):
|
||||
return None
|
||||
|
||||
|
||||
is_markable = _is_markable
|
||||
|
||||
|
||||
def load_from_xml(self, infile, get_file, j=nulljob):
|
||||
"""Load results from ``infile``.
|
||||
|
||||
|
||||
:param infile: a file or path pointing to an XML file created with :meth:`save_to_xml`.
|
||||
:param get_file: a function f(path) returning a :class:`~core.fs.File` wrapping the path.
|
||||
:param j: A :ref:`job progress instance <jobs>`.
|
||||
@@ -217,7 +217,7 @@ class Results(Markable):
|
||||
for other_file in other_files:
|
||||
group.add_match(engine.get_match(ref_file, other_file))
|
||||
do_match(other_files[0], other_files[1:], group)
|
||||
|
||||
|
||||
self.apply_filter(None)
|
||||
try:
|
||||
root = ET.parse(infile).getroot()
|
||||
@@ -255,13 +255,13 @@ class Results(Markable):
|
||||
do_match(dupes[0], dupes[1:], group)
|
||||
group.prioritize(lambda x: dupes.index(x))
|
||||
if len(group):
|
||||
groups.append(group)
|
||||
groups.append(group)
|
||||
j.add_progress()
|
||||
self.groups = groups
|
||||
for dupe_file in marked:
|
||||
self.mark(dupe_file)
|
||||
self.is_modified = False
|
||||
|
||||
|
||||
def make_ref(self, dupe):
|
||||
"""Make ``dupe`` take the :attr:`~core.engine.Group.ref` position of its group.
|
||||
"""
|
||||
@@ -279,13 +279,13 @@ class Results(Markable):
|
||||
self.__dupes = None
|
||||
self.is_modified = True
|
||||
return True
|
||||
|
||||
|
||||
def perform_on_marked(self, func, remove_from_results):
|
||||
"""Performs ``func`` on all marked dupes.
|
||||
|
||||
|
||||
If an ``EnvironmentError`` is raised during the call, the problematic dupe is added to
|
||||
self.problems.
|
||||
|
||||
|
||||
:param bool remove_from_results: If true, dupes which had ``func`` applied and didn't cause
|
||||
any problem.
|
||||
"""
|
||||
@@ -303,10 +303,10 @@ class Results(Markable):
|
||||
self.mark_none()
|
||||
for dupe, _ in self.problems:
|
||||
self.mark(dupe)
|
||||
|
||||
|
||||
def remove_duplicates(self, dupes):
|
||||
"""Remove ``dupes`` from their respective :class:`~core.engine.Group`.
|
||||
|
||||
|
||||
Also, remove the group from :attr:`groups` if it ends up empty.
|
||||
"""
|
||||
affected_groups = set()
|
||||
@@ -331,10 +331,10 @@ class Results(Markable):
|
||||
group.discard_matches()
|
||||
self.__dupes = None
|
||||
self.is_modified = bool(self.__groups)
|
||||
|
||||
|
||||
def save_to_xml(self, outfile):
|
||||
"""Save results to ``outfile`` in XML.
|
||||
|
||||
|
||||
:param outfile: file object or path.
|
||||
"""
|
||||
self.apply_filter(None)
|
||||
@@ -362,11 +362,11 @@ class Results(Markable):
|
||||
match_elem.set('second', str(dupe2index[match.second]))
|
||||
match_elem.set('percentage', str(int(match.percentage)))
|
||||
tree = ET.ElementTree(root)
|
||||
|
||||
|
||||
def do_write(outfile):
|
||||
with FileOrPath(outfile, 'wb') as fp:
|
||||
tree.write(fp, encoding='utf-8')
|
||||
|
||||
|
||||
try:
|
||||
do_write(outfile)
|
||||
except IOError as e:
|
||||
@@ -381,10 +381,10 @@ class Results(Markable):
|
||||
else:
|
||||
raise
|
||||
self.is_modified = False
|
||||
|
||||
|
||||
def sort_dupes(self, key, asc=True, delta=False):
|
||||
"""Sort :attr:`dupes` according to ``key``.
|
||||
|
||||
|
||||
:param str key: key attribute name to sort with.
|
||||
:param bool asc: If false, sorting is reversed.
|
||||
:param bool delta: If true, sorting occurs using :ref:`delta values <deltavalues>`.
|
||||
@@ -394,19 +394,19 @@ class Results(Markable):
|
||||
keyfunc = lambda d: self.app._get_dupe_sort_key(d, lambda: self.get_group_of_duplicate(d), key, delta)
|
||||
self.__dupes.sort(key=keyfunc, reverse=not asc)
|
||||
self.__dupes_sort_descriptor = (key,asc,delta)
|
||||
|
||||
|
||||
def sort_groups(self, key, asc=True):
|
||||
"""Sort :attr:`groups` according to ``key``.
|
||||
|
||||
|
||||
The :attr:`~core.engine.Group.ref` of each group is used to extract values for sorting.
|
||||
|
||||
|
||||
:param str key: key attribute name to sort with.
|
||||
:param bool asc: If false, sorting is reversed.
|
||||
"""
|
||||
keyfunc = lambda g: self.app._get_group_sort_key(g, key)
|
||||
self.groups.sort(key=keyfunc, reverse=not asc)
|
||||
self.__groups_sort_descriptor = (key,asc)
|
||||
|
||||
|
||||
#---Properties
|
||||
dupes = property(__get_dupe_list)
|
||||
groups = property(__get_groups, __set_groups)
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/03
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import logging
|
||||
import re
|
||||
import os.path as op
|
||||
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
from hscommon.util import dedupe, rem_file_ext, get_file_ext
|
||||
from hscommon.trans import tr
|
||||
|
||||
@@ -29,7 +29,7 @@ class ScanType:
|
||||
Folders = 4
|
||||
Contents = 5
|
||||
ContentsAudio = 6
|
||||
|
||||
|
||||
#PE
|
||||
FuzzyBlock = 10
|
||||
ExifTimestamp = 11
|
||||
@@ -72,7 +72,7 @@ class Scanner:
|
||||
def __init__(self):
|
||||
self.ignore_list = IgnoreList()
|
||||
self.discarded_file_count = 0
|
||||
|
||||
|
||||
def _getmatches(self, files, j):
|
||||
if self.size_threshold:
|
||||
j = j.start_subjob([2, 8])
|
||||
@@ -100,11 +100,11 @@ class Scanner:
|
||||
logging.debug("Reading metadata of {}".format(str(f.path)))
|
||||
f.words = func(f)
|
||||
return engine.getmatches(files, j=j, **kw)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _key_func(dupe):
|
||||
return -dupe.size
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _tie_breaker(ref, dupe):
|
||||
refname = rem_file_ext(ref.name).lower()
|
||||
@@ -118,7 +118,7 @@ class Scanner:
|
||||
if is_same_with_digit(refname, dupename):
|
||||
return True
|
||||
return len(dupe.path) > len(ref.path)
|
||||
|
||||
|
||||
def get_dupe_groups(self, files, j=job.nulljob):
|
||||
j = j.start_subjob([8, 2])
|
||||
for f in (f for f in files if not hasattr(f, 'is_ref')):
|
||||
@@ -152,7 +152,7 @@ class Scanner:
|
||||
if self.ignore_list:
|
||||
j = j.start_subjob(2)
|
||||
iter_matches = j.iter_with_progress(matches, tr("Processed %d/%d matches against the ignore list"))
|
||||
matches = [m for m in iter_matches
|
||||
matches = [m for m in iter_matches
|
||||
if not self.ignore_list.AreIgnored(str(m.first.path), str(m.second.path))]
|
||||
logging.info('Grouping matches')
|
||||
groups = engine.get_groups(matches, j)
|
||||
@@ -177,7 +177,7 @@ class Scanner:
|
||||
for g in groups:
|
||||
g.prioritize(self._key_func, self._tie_breaker)
|
||||
return groups
|
||||
|
||||
|
||||
match_similar_words = False
|
||||
min_match_percentage = 80
|
||||
mix_file_kind = True
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2007-06-23
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import os
|
||||
@@ -15,7 +15,7 @@ from hscommon.path import Path
|
||||
import hscommon.conflict
|
||||
import hscommon.util
|
||||
from hscommon.testutil import CallLogger, eq_, log_calls
|
||||
from jobprogress.job import Job
|
||||
from hscommon.jobprogress.job import Job
|
||||
|
||||
from .base import DupeGuru, TestApp
|
||||
from .results_test import GetTestGroups
|
||||
@@ -36,7 +36,7 @@ class TestCaseDupeGuru:
|
||||
assert call['filter_str'] is None
|
||||
call = dgapp.results.apply_filter.calls[1]
|
||||
eq_('foo', call['filter_str'])
|
||||
|
||||
|
||||
def test_apply_filter_escapes_regexp(self, monkeypatch):
|
||||
dgapp = TestApp().app
|
||||
monkeypatch.setattr(dgapp.results, 'apply_filter', log_calls(dgapp.results.apply_filter))
|
||||
@@ -50,7 +50,7 @@ class TestCaseDupeGuru:
|
||||
dgapp.apply_filter('(abc)')
|
||||
call = dgapp.results.apply_filter.calls[5]
|
||||
eq_('(abc)', call['filter_str'])
|
||||
|
||||
|
||||
def test_copy_or_move(self, tmpdir, monkeypatch):
|
||||
# The goal here is just to have a test for a previous blowup I had. I know my test coverage
|
||||
# for this unit is pathetic. What's done is done. My approach now is to add tests for
|
||||
@@ -69,7 +69,7 @@ class TestCaseDupeGuru:
|
||||
call = hscommon.conflict.smart_copy.calls[0]
|
||||
eq_(call['dest_path'], op.join('some_destination', 'foo'))
|
||||
eq_(call['source_path'], f.path)
|
||||
|
||||
|
||||
def test_copy_or_move_clean_empty_dirs(self, tmpdir, monkeypatch):
|
||||
tmppath = Path(str(tmpdir))
|
||||
sourcepath = tmppath['source']
|
||||
@@ -83,13 +83,13 @@ class TestCaseDupeGuru:
|
||||
calls = app.clean_empty_dirs.calls
|
||||
eq_(1, len(calls))
|
||||
eq_(sourcepath, calls[0]['path'])
|
||||
|
||||
|
||||
def test_Scan_with_objects_evaluating_to_false(self):
|
||||
class FakeFile(fs.File):
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
# At some point, any() was used in a wrong way that made Scan() wrongly return 1
|
||||
app = TestApp().app
|
||||
f1, f2 = [FakeFile('foo') for i in range(2)]
|
||||
@@ -97,7 +97,7 @@ class TestCaseDupeGuru:
|
||||
assert not (bool(f1) and bool(f2))
|
||||
add_fake_files_to_directories(app.directories, [f1, f2])
|
||||
app.start_scanning() # no exception
|
||||
|
||||
|
||||
@mark.skipif("not hasattr(os, 'link')")
|
||||
def test_ignore_hardlink_matches(self, tmpdir):
|
||||
# If the ignore_hardlink_matches option is set, don't match files hardlinking to the same
|
||||
@@ -111,7 +111,7 @@ class TestCaseDupeGuru:
|
||||
app.options['ignore_hardlink_matches'] = True
|
||||
app.start_scanning()
|
||||
eq_(len(app.results.groups), 0)
|
||||
|
||||
|
||||
def test_rename_when_nothing_is_selected(self):
|
||||
# Issue #140
|
||||
# It's possible that rename operation has its selected row swept off from under it, thus
|
||||
@@ -127,11 +127,11 @@ class TestCaseDupeGuru_clean_empty_dirs:
|
||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||
monkeypatch.setattr(app, 'delete_if_empty', hscommon.util.delete_if_empty)
|
||||
self.app = TestApp().app
|
||||
|
||||
|
||||
def test_option_off(self, do_setup):
|
||||
self.app.clean_empty_dirs(Path('/foo/bar'))
|
||||
eq_(0, len(hscommon.util.delete_if_empty.calls))
|
||||
|
||||
|
||||
def test_option_on(self, do_setup):
|
||||
self.app.options['clean_empty_dirs'] = True
|
||||
self.app.clean_empty_dirs(Path('/foo/bar'))
|
||||
@@ -139,13 +139,13 @@ class TestCaseDupeGuru_clean_empty_dirs:
|
||||
eq_(1, len(calls))
|
||||
eq_(Path('/foo/bar'), calls[0]['path'])
|
||||
eq_(['.DS_Store'], calls[0]['files_to_delete'])
|
||||
|
||||
|
||||
def test_recurse_up(self, do_setup, monkeypatch):
|
||||
# delete_if_empty must be recursively called up in the path until it returns False
|
||||
@log_calls
|
||||
def mock_delete_if_empty(path, files_to_delete=[]):
|
||||
return len(path) > 1
|
||||
|
||||
|
||||
monkeypatch.setattr(hscommon.util, 'delete_if_empty', mock_delete_if_empty)
|
||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||
monkeypatch.setattr(app, 'delete_if_empty', mock_delete_if_empty)
|
||||
@@ -156,7 +156,7 @@ class TestCaseDupeGuru_clean_empty_dirs:
|
||||
eq_(Path('not-empty/empty/empty'), calls[0]['path'])
|
||||
eq_(Path('not-empty/empty'), calls[1]['path'])
|
||||
eq_(Path('not-empty'), calls[2]['path'])
|
||||
|
||||
|
||||
|
||||
class TestCaseDupeGuruWithResults:
|
||||
def pytest_funcarg__do_setup(self, request):
|
||||
@@ -173,7 +173,7 @@ class TestCaseDupeGuruWithResults:
|
||||
tmppath['foo'].mkdir()
|
||||
tmppath['bar'].mkdir()
|
||||
self.app.directories.add_path(tmppath)
|
||||
|
||||
|
||||
def test_GetObjects(self, do_setup):
|
||||
objects = self.objects
|
||||
groups = self.groups
|
||||
@@ -186,7 +186,7 @@ class TestCaseDupeGuruWithResults:
|
||||
r = self.rtable[4]
|
||||
assert r._group is groups[1]
|
||||
assert r._dupe is objects[4]
|
||||
|
||||
|
||||
def test_GetObjects_after_sort(self, do_setup):
|
||||
objects = self.objects
|
||||
groups = self.groups[:] # we need an un-sorted reference
|
||||
@@ -194,14 +194,14 @@ class TestCaseDupeGuruWithResults:
|
||||
r = self.rtable[1]
|
||||
assert r._group is groups[1]
|
||||
assert r._dupe is objects[4]
|
||||
|
||||
|
||||
def test_selected_result_node_paths_after_deletion(self, do_setup):
|
||||
# cases where the selected dupes aren't there are correctly handled
|
||||
self.rtable.select([1, 2, 3])
|
||||
self.app.remove_selected()
|
||||
# The first 2 dupes have been removed. The 3rd one is a ref. it stays there, in first pos.
|
||||
eq_(self.rtable.selected_indexes, [1]) # no exception
|
||||
|
||||
|
||||
def test_selectResultNodePaths(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -209,7 +209,7 @@ class TestCaseDupeGuruWithResults:
|
||||
eq_(len(app.selected_dupes), 2)
|
||||
assert app.selected_dupes[0] is objects[1]
|
||||
assert app.selected_dupes[1] is objects[2]
|
||||
|
||||
|
||||
def test_selectResultNodePaths_with_ref(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -218,26 +218,26 @@ class TestCaseDupeGuruWithResults:
|
||||
assert app.selected_dupes[0] is objects[1]
|
||||
assert app.selected_dupes[1] is objects[2]
|
||||
assert app.selected_dupes[2] is self.groups[1].ref
|
||||
|
||||
|
||||
def test_selectResultNodePaths_after_sort(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
groups = self.groups[:] #To keep the old order in memory
|
||||
self.rtable.sort('name', False) #0
|
||||
self.rtable.sort('name', False) #0
|
||||
#Now, the group order is supposed to be reversed
|
||||
self.rtable.select([1, 2, 3])
|
||||
eq_(len(app.selected_dupes), 3)
|
||||
assert app.selected_dupes[0] is objects[4]
|
||||
assert app.selected_dupes[1] is groups[0].ref
|
||||
assert app.selected_dupes[2] is objects[1]
|
||||
|
||||
|
||||
def test_selected_powermarker_node_paths(self, do_setup):
|
||||
# app.selected_dupes is correctly converted into paths
|
||||
self.rtable.power_marker = True
|
||||
self.rtable.select([0, 1, 2])
|
||||
self.rtable.power_marker = False
|
||||
eq_(self.rtable.selected_indexes, [1, 2, 4])
|
||||
|
||||
|
||||
def test_selected_powermarker_node_paths_after_deletion(self, do_setup):
|
||||
# cases where the selected dupes aren't there are correctly handled
|
||||
app = self.app
|
||||
@@ -245,7 +245,7 @@ class TestCaseDupeGuruWithResults:
|
||||
self.rtable.select([0, 1, 2])
|
||||
app.remove_selected()
|
||||
eq_(self.rtable.selected_indexes, []) # no exception
|
||||
|
||||
|
||||
def test_selectPowerMarkerRows_after_sort(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -256,7 +256,7 @@ class TestCaseDupeGuruWithResults:
|
||||
assert app.selected_dupes[0] is objects[4]
|
||||
assert app.selected_dupes[1] is objects[2]
|
||||
assert app.selected_dupes[2] is objects[1]
|
||||
|
||||
|
||||
def test_toggle_selected_mark_state(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -270,7 +270,7 @@ class TestCaseDupeGuruWithResults:
|
||||
assert not app.results.is_marked(objects[2])
|
||||
assert not app.results.is_marked(objects[3])
|
||||
assert app.results.is_marked(objects[4])
|
||||
|
||||
|
||||
def test_toggle_selected_mark_state_with_different_selected_state(self, do_setup):
|
||||
# When marking selected dupes with a heterogenous selection, mark all selected dupes. When
|
||||
# it's homogenous, simply toggle.
|
||||
@@ -285,7 +285,7 @@ class TestCaseDupeGuruWithResults:
|
||||
eq_(app.results.mark_count, 2)
|
||||
app.toggle_selected_mark_state()
|
||||
eq_(app.results.mark_count, 0)
|
||||
|
||||
|
||||
def test_refreshDetailsWithSelected(self, do_setup):
|
||||
self.rtable.select([1, 4])
|
||||
eq_(self.dpanel.row(0), ('Filename', 'bar bleh', 'foo bar'))
|
||||
@@ -293,7 +293,7 @@ class TestCaseDupeGuruWithResults:
|
||||
self.rtable.select([])
|
||||
eq_(self.dpanel.row(0), ('Filename', '---', '---'))
|
||||
self.dpanel.view.check_gui_calls(['refresh'])
|
||||
|
||||
|
||||
def test_makeSelectedReference(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -302,7 +302,7 @@ class TestCaseDupeGuruWithResults:
|
||||
app.make_selected_reference()
|
||||
assert groups[0].ref is objects[1]
|
||||
assert groups[1].ref is objects[4]
|
||||
|
||||
|
||||
def test_makeSelectedReference_by_selecting_two_dupes_in_the_same_group(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
@@ -312,7 +312,7 @@ class TestCaseDupeGuruWithResults:
|
||||
app.make_selected_reference()
|
||||
assert groups[0].ref is objects[1]
|
||||
assert groups[1].ref is objects[4]
|
||||
|
||||
|
||||
def test_removeSelected(self, do_setup):
|
||||
app = self.app
|
||||
self.rtable.select([1, 4])
|
||||
@@ -320,7 +320,7 @@ class TestCaseDupeGuruWithResults:
|
||||
eq_(len(app.results.dupes), 1) # the first path is now selected
|
||||
app.remove_selected()
|
||||
eq_(len(app.results.dupes), 0)
|
||||
|
||||
|
||||
def test_addDirectory_simple(self, do_setup):
|
||||
# There's already a directory in self.app, so adding another once makes 2 of em
|
||||
app = self.app
|
||||
@@ -328,7 +328,7 @@ class TestCaseDupeGuruWithResults:
|
||||
otherpath = Path(op.dirname(__file__))
|
||||
app.add_directory(otherpath)
|
||||
eq_(len(app.directories), 2)
|
||||
|
||||
|
||||
def test_addDirectory_already_there(self, do_setup):
|
||||
app = self.app
|
||||
otherpath = Path(op.dirname(__file__))
|
||||
@@ -336,13 +336,13 @@ class TestCaseDupeGuruWithResults:
|
||||
app.add_directory(otherpath)
|
||||
eq_(len(app.view.messages), 1)
|
||||
assert "already" in app.view.messages[0]
|
||||
|
||||
|
||||
def test_addDirectory_does_not_exist(self, do_setup):
|
||||
app = self.app
|
||||
app.add_directory('/does_not_exist')
|
||||
eq_(len(app.view.messages), 1)
|
||||
assert "exist" in app.view.messages[0]
|
||||
|
||||
|
||||
def test_ignore(self, do_setup):
|
||||
app = self.app
|
||||
self.rtable.select([4]) #The dupe of the second, 2 sized group
|
||||
@@ -352,7 +352,7 @@ class TestCaseDupeGuruWithResults:
|
||||
app.add_selected_to_ignore_list()
|
||||
#BOTH the ref and the other dupe should have been added
|
||||
eq_(len(app.scanner.ignore_list), 3)
|
||||
|
||||
|
||||
def test_purgeIgnoreList(self, do_setup, tmpdir):
|
||||
app = self.app
|
||||
p1 = str(tmpdir.join('file1'))
|
||||
@@ -367,19 +367,19 @@ class TestCaseDupeGuruWithResults:
|
||||
eq_(1,len(app.scanner.ignore_list))
|
||||
assert app.scanner.ignore_list.AreIgnored(p1,p2)
|
||||
assert not app.scanner.ignore_list.AreIgnored(dne,p1)
|
||||
|
||||
|
||||
def test_only_unicode_is_added_to_ignore_list(self, do_setup):
|
||||
def FakeIgnore(first,second):
|
||||
if not isinstance(first,str):
|
||||
self.fail()
|
||||
if not isinstance(second,str):
|
||||
self.fail()
|
||||
|
||||
|
||||
app = self.app
|
||||
app.scanner.ignore_list.Ignore = FakeIgnore
|
||||
self.rtable.select([4])
|
||||
app.add_selected_to_ignore_list()
|
||||
|
||||
|
||||
def test_cancel_scan_with_previous_results(self, do_setup):
|
||||
# When doing a scan with results being present prior to the scan, correctly invalidate the
|
||||
# results table.
|
||||
@@ -388,7 +388,7 @@ class TestCaseDupeGuruWithResults:
|
||||
add_fake_files_to_directories(app.directories, self.objects) # We want the scan to at least start
|
||||
app.start_scanning() # will be cancelled immediately
|
||||
eq_(len(self.rtable), 0)
|
||||
|
||||
|
||||
def test_selected_dupes_after_removal(self, do_setup):
|
||||
# Purge the app's `selected_dupes` attribute when removing dupes, or else it might cause a
|
||||
# crash later with None refs.
|
||||
@@ -398,7 +398,7 @@ class TestCaseDupeGuruWithResults:
|
||||
app.remove_marked()
|
||||
eq_(len(self.rtable), 0)
|
||||
eq_(app.selected_dupes, [])
|
||||
|
||||
|
||||
def test_dont_crash_on_delta_powermarker_dupecount_sort(self, do_setup):
|
||||
# Don't crash when sorting by dupe count or percentage while delta+powermarker are enabled.
|
||||
# Ref #238
|
||||
@@ -410,7 +410,7 @@ class TestCaseDupeGuruWithResults:
|
||||
# don't crash
|
||||
self.rtable.sort('percentage', False)
|
||||
# don't crash
|
||||
|
||||
|
||||
|
||||
class TestCaseDupeGuru_renameSelected:
|
||||
def pytest_funcarg__do_setup(self, request):
|
||||
@@ -437,7 +437,7 @@ class TestCaseDupeGuru_renameSelected:
|
||||
self.groups = groups
|
||||
self.p = p
|
||||
self.files = files
|
||||
|
||||
|
||||
def test_simple(self, do_setup):
|
||||
app = self.app
|
||||
g = self.groups[0]
|
||||
@@ -447,7 +447,7 @@ class TestCaseDupeGuru_renameSelected:
|
||||
assert 'renamed' in names
|
||||
assert 'foo bar 2' not in names
|
||||
eq_(g.dupes[0].name, 'renamed')
|
||||
|
||||
|
||||
def test_none_selected(self, do_setup, monkeypatch):
|
||||
app = self.app
|
||||
g = self.groups[0]
|
||||
@@ -460,7 +460,7 @@ class TestCaseDupeGuru_renameSelected:
|
||||
assert 'renamed' not in names
|
||||
assert 'foo bar 2' in names
|
||||
eq_(g.dupes[0].name, 'foo bar 2')
|
||||
|
||||
|
||||
def test_name_already_exists(self, do_setup, monkeypatch):
|
||||
app = self.app
|
||||
g = self.groups[0]
|
||||
@@ -473,7 +473,7 @@ class TestCaseDupeGuru_renameSelected:
|
||||
assert 'foo bar 1' in names
|
||||
assert 'foo bar 2' in names
|
||||
eq_(g.dupes[0].name, 'foo bar 2')
|
||||
|
||||
|
||||
|
||||
class TestAppWithDirectoriesInTree:
|
||||
def pytest_funcarg__do_setup(self, request):
|
||||
@@ -487,7 +487,7 @@ class TestAppWithDirectoriesInTree:
|
||||
self.dtree = app.dtree
|
||||
self.dtree.add_directory(p)
|
||||
self.dtree.view.clear_calls()
|
||||
|
||||
|
||||
def test_set_root_as_ref_makes_subfolders_ref_as_well(self, do_setup):
|
||||
# Setting a node state to something also affect subnodes. These subnodes must be correctly
|
||||
# refreshed.
|
||||
@@ -500,4 +500,4 @@ class TestAppWithDirectoriesInTree:
|
||||
subnode = node[0]
|
||||
eq_(subnode.state, 1)
|
||||
self.dtree.view.check_gui_calls(['refresh_states'])
|
||||
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2011/09/07
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from hscommon.testutil import TestApp as TestAppBase, eq_, with_app
|
||||
from hscommon.path import Path
|
||||
from hscommon.util import get_file_ext, format_size
|
||||
from hscommon.gui.column import Column
|
||||
from jobprogress.job import nulljob, JobCancelled
|
||||
from hscommon.jobprogress.job import nulljob, JobCancelled
|
||||
|
||||
from .. import engine
|
||||
from .. import prioritize
|
||||
@@ -23,28 +23,28 @@ from ..gui.prioritize_dialog import PrioritizeDialog
|
||||
|
||||
class DupeGuruView:
|
||||
JOB = nulljob
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.messages = []
|
||||
|
||||
|
||||
def start_job(self, jobid, func, args=()):
|
||||
try:
|
||||
func(self.JOB, *args)
|
||||
except JobCancelled:
|
||||
return
|
||||
|
||||
|
||||
def get_default(self, key_name):
|
||||
return None
|
||||
|
||||
|
||||
def set_default(self, key_name, value):
|
||||
pass
|
||||
|
||||
|
||||
def show_message(self, msg):
|
||||
self.messages.append(msg)
|
||||
|
||||
|
||||
def ask_yes_no(self, prompt):
|
||||
return True # always answer yes
|
||||
|
||||
|
||||
|
||||
class ResultTable(ResultTableBase):
|
||||
COLUMNS = [
|
||||
@@ -55,21 +55,21 @@ class ResultTable(ResultTableBase):
|
||||
Column('extension', 'Kind'),
|
||||
]
|
||||
DELTA_COLUMNS = {'size', }
|
||||
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
NAME = 'dupeGuru'
|
||||
METADATA_TO_READ = ['size']
|
||||
|
||||
|
||||
def __init__(self):
|
||||
DupeGuruBase.__init__(self, DupeGuruView())
|
||||
self.appdata = '/tmp'
|
||||
|
||||
|
||||
def _prioritization_categories(self):
|
||||
return prioritize.all_categories()
|
||||
|
||||
|
||||
def _create_result_table(self):
|
||||
return ResultTable(self)
|
||||
|
||||
|
||||
|
||||
class NamedObject:
|
||||
def __init__(self, name="foobar", with_words=False, size=1, folder=None):
|
||||
@@ -83,10 +83,10 @@ class NamedObject:
|
||||
if with_words:
|
||||
self.words = getwords(name)
|
||||
self.is_ref = False
|
||||
|
||||
|
||||
def __bool__(self):
|
||||
return False #Make sure that operations are made correctly when the bool value of files is false.
|
||||
|
||||
|
||||
def get_display_info(self, group, delta):
|
||||
size = self.size
|
||||
m = group.get_match_of(self)
|
||||
@@ -99,19 +99,19 @@ class NamedObject:
|
||||
'size': format_size(size, 0, 1, False),
|
||||
'extension': self.extension if hasattr(self, 'extension') else '---',
|
||||
}
|
||||
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._folder[self.name]
|
||||
|
||||
|
||||
@property
|
||||
def folder_path(self):
|
||||
return self.path.parent()
|
||||
|
||||
|
||||
@property
|
||||
def extension(self):
|
||||
return get_file_ext(self.name)
|
||||
|
||||
|
||||
# Returns a group set that looks like that:
|
||||
# "foo bar" (1)
|
||||
# "bar bleh" (1024)
|
||||
@@ -135,7 +135,7 @@ class TestApp(TestAppBase):
|
||||
if hasattr(gui, 'columns'): # tables
|
||||
gui.columns.view = self.make_logger()
|
||||
return gui
|
||||
|
||||
|
||||
TestAppBase.__init__(self)
|
||||
make_gui = self.make_gui
|
||||
self.app = DupeGuru()
|
||||
@@ -153,14 +153,14 @@ class TestApp(TestAppBase):
|
||||
link_gui(self.app.progress_window)
|
||||
link_gui(self.app.progress_window.jobdesc_textfield)
|
||||
link_gui(self.app.progress_window.progressdesc_textfield)
|
||||
|
||||
|
||||
#--- Helpers
|
||||
def select_pri_criterion(self, name):
|
||||
# Select a main prioritize criterion by name instead of by index. Makes tests more
|
||||
# maintainable.
|
||||
index = self.pdialog.category_list.index(name)
|
||||
self.pdialog.category_list.select(index)
|
||||
|
||||
|
||||
def add_pri_criterion(self, name, index):
|
||||
self.select_pri_criterion(name)
|
||||
self.pdialog.criteria_list.select([index])
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/01/29
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import sys
|
||||
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
from hscommon.util import first
|
||||
from hscommon.testutil import eq_, log_calls
|
||||
|
||||
@@ -48,119 +48,119 @@ class TestCasegetwords:
|
||||
def test_spaces(self):
|
||||
eq_(['a', 'b', 'c', 'd'], getwords("a b c d"))
|
||||
eq_(['a', 'b', 'c', 'd'], getwords(" a b c d "))
|
||||
|
||||
|
||||
def test_splitter_chars(self):
|
||||
eq_(
|
||||
[chr(i) for i in range(ord('a'),ord('z')+1)],
|
||||
getwords("a-b_c&d+e(f)g;h\\i[j]k{l}m:n.o,p<q>r/s?t~u!v@w#x$y*z")
|
||||
)
|
||||
|
||||
|
||||
def test_joiner_chars(self):
|
||||
eq_(["aec"], getwords("a'e\u0301c"))
|
||||
|
||||
|
||||
def test_empty(self):
|
||||
eq_([], getwords(''))
|
||||
|
||||
|
||||
def test_returns_lowercase(self):
|
||||
eq_(['foo', 'bar'], getwords('FOO BAR'))
|
||||
|
||||
|
||||
def test_decompose_unicode(self):
|
||||
eq_(getwords('foo\xe9bar'), ['fooebar'])
|
||||
|
||||
|
||||
|
||||
class TestCasegetfields:
|
||||
def test_simple(self):
|
||||
eq_([['a', 'b'], ['c', 'd', 'e']], getfields('a b - c d e'))
|
||||
|
||||
|
||||
def test_empty(self):
|
||||
eq_([], getfields(''))
|
||||
|
||||
|
||||
def test_cleans_empty_fields(self):
|
||||
expected = [['a', 'bc', 'def']]
|
||||
actual = getfields(' - a bc def')
|
||||
eq_(expected, actual)
|
||||
expected = [['bc', 'def']]
|
||||
|
||||
|
||||
|
||||
class TestCaseunpack_fields:
|
||||
def test_with_fields(self):
|
||||
expected = ['a', 'b', 'c', 'd', 'e', 'f']
|
||||
actual = unpack_fields([['a'], ['b', 'c'], ['d', 'e', 'f']])
|
||||
eq_(expected, actual)
|
||||
|
||||
|
||||
def test_without_fields(self):
|
||||
expected = ['a', 'b', 'c', 'd', 'e', 'f']
|
||||
actual = unpack_fields(['a', 'b', 'c', 'd', 'e', 'f'])
|
||||
eq_(expected, actual)
|
||||
|
||||
|
||||
def test_empty(self):
|
||||
eq_([], unpack_fields([]))
|
||||
|
||||
|
||||
|
||||
class TestCaseWordCompare:
|
||||
def test_list(self):
|
||||
eq_(100, compare(['a', 'b', 'c', 'd'],['a', 'b', 'c', 'd']))
|
||||
eq_(86, compare(['a', 'b', 'c', 'd'],['a', 'b', 'c']))
|
||||
|
||||
|
||||
def test_unordered(self):
|
||||
#Sometimes, users don't want fuzzy matching too much When they set the slider
|
||||
#to 100, they don't expect a filename with the same words, but not the same order, to match.
|
||||
#Thus, we want to return 99 in that case.
|
||||
eq_(99, compare(['a', 'b', 'c', 'd'], ['d', 'b', 'c', 'a']))
|
||||
|
||||
|
||||
def test_word_occurs_twice(self):
|
||||
#if a word occurs twice in first, but once in second, we want the word to be only counted once
|
||||
eq_(89, compare(['a', 'b', 'c', 'd', 'a'], ['d', 'b', 'c', 'a']))
|
||||
|
||||
|
||||
def test_uses_copy_of_lists(self):
|
||||
first = ['foo', 'bar']
|
||||
second = ['bar', 'bleh']
|
||||
compare(first, second)
|
||||
eq_(['foo', 'bar'], first)
|
||||
eq_(['bar', 'bleh'], second)
|
||||
|
||||
|
||||
def test_word_weight(self):
|
||||
eq_(int((6.0 / 13.0) * 100), compare(['foo', 'bar'], ['bar', 'bleh'], (WEIGHT_WORDS, )))
|
||||
|
||||
|
||||
def test_similar_words(self):
|
||||
eq_(100, compare(['the', 'white', 'stripes'],['the', 'whites', 'stripe'], (MATCH_SIMILAR_WORDS, )))
|
||||
|
||||
|
||||
def test_empty(self):
|
||||
eq_(0, compare([], []))
|
||||
|
||||
|
||||
def test_with_fields(self):
|
||||
eq_(67, compare([['a', 'b'], ['c', 'd', 'e']], [['a', 'b'], ['c', 'd', 'f']]))
|
||||
|
||||
|
||||
def test_propagate_flags_with_fields(self, monkeypatch):
|
||||
def mock_compare(first, second, flags):
|
||||
eq_((0, 1, 2, 3, 5), flags)
|
||||
|
||||
|
||||
monkeypatch.setattr(engine, 'compare_fields', mock_compare)
|
||||
compare([['a']], [['a']], (0, 1, 2, 3, 5))
|
||||
|
||||
|
||||
|
||||
class TestCaseWordCompareWithFields:
|
||||
def test_simple(self):
|
||||
eq_(67, compare_fields([['a', 'b'], ['c', 'd', 'e']], [['a', 'b'], ['c', 'd', 'f']]))
|
||||
|
||||
|
||||
def test_empty(self):
|
||||
eq_(0, compare_fields([], []))
|
||||
|
||||
|
||||
def test_different_length(self):
|
||||
eq_(0, compare_fields([['a'], ['b']], [['a'], ['b'], ['c']]))
|
||||
|
||||
|
||||
def test_propagates_flags(self, monkeypatch):
|
||||
def mock_compare(first, second, flags):
|
||||
eq_((0, 1, 2, 3, 5), flags)
|
||||
|
||||
|
||||
monkeypatch.setattr(engine, 'compare_fields', mock_compare)
|
||||
compare_fields([['a']], [['a']],(0, 1, 2, 3, 5))
|
||||
|
||||
|
||||
def test_order(self):
|
||||
first = [['a', 'b'], ['c', 'd', 'e']]
|
||||
second = [['c', 'd', 'f'], ['a', 'b']]
|
||||
eq_(0, compare_fields(first, second))
|
||||
|
||||
|
||||
def test_no_order(self):
|
||||
first = [['a','b'],['c','d','e']]
|
||||
second = [['c','d','f'],['a','b']]
|
||||
@@ -168,10 +168,10 @@ class TestCaseWordCompareWithFields:
|
||||
first = [['a','b'],['a','b']] #a field can only be matched once.
|
||||
second = [['c','d','f'],['a','b']]
|
||||
eq_(0, compare_fields(first, second, (NO_FIELD_ORDER, )))
|
||||
first = [['a','b'],['a','b','c']]
|
||||
first = [['a','b'],['a','b','c']]
|
||||
second = [['c','d','f'],['a','b']]
|
||||
eq_(33, compare_fields(first, second, (NO_FIELD_ORDER, )))
|
||||
|
||||
|
||||
def test_compare_fields_without_order_doesnt_alter_fields(self):
|
||||
#The NO_ORDER comp type altered the fields!
|
||||
first = [['a','b'],['c','d','e']]
|
||||
@@ -179,7 +179,7 @@ class TestCaseWordCompareWithFields:
|
||||
eq_(67, compare_fields(first, second, (NO_FIELD_ORDER, )))
|
||||
eq_([['a','b'],['c','d','e']],first)
|
||||
eq_([['c','d','f'],['a','b']],second)
|
||||
|
||||
|
||||
|
||||
class TestCasebuild_word_dict:
|
||||
def test_with_standard_words(self):
|
||||
@@ -199,30 +199,30 @@ class TestCasebuild_word_dict:
|
||||
assert l[2] in d['baz']
|
||||
eq_(1,len(d['bleh']))
|
||||
assert l[2] in d['bleh']
|
||||
|
||||
|
||||
def test_unpack_fields(self):
|
||||
o = NamedObject('')
|
||||
o.words = [['foo','bar'],['baz']]
|
||||
d = build_word_dict([o])
|
||||
eq_(3,len(d))
|
||||
eq_(1,len(d['foo']))
|
||||
|
||||
|
||||
def test_words_are_unaltered(self):
|
||||
o = NamedObject('')
|
||||
o.words = [['foo','bar'],['baz']]
|
||||
build_word_dict([o])
|
||||
eq_([['foo','bar'],['baz']],o.words)
|
||||
|
||||
|
||||
def test_object_instances_can_only_be_once_in_words_object_list(self):
|
||||
o = NamedObject('foo foo',True)
|
||||
d = build_word_dict([o])
|
||||
eq_(1,len(d['foo']))
|
||||
|
||||
|
||||
def test_job(self):
|
||||
def do_progress(p,d=''):
|
||||
self.log.append(p)
|
||||
return True
|
||||
|
||||
|
||||
j = job.Job(1,do_progress)
|
||||
self.log = []
|
||||
s = "foo bar"
|
||||
@@ -230,7 +230,7 @@ class TestCasebuild_word_dict:
|
||||
# We don't have intermediate log because iter_with_progress is called with every > 1
|
||||
eq_(0,self.log[0])
|
||||
eq_(100,self.log[1])
|
||||
|
||||
|
||||
|
||||
class TestCasemerge_similar_words:
|
||||
def test_some_similar_words(self):
|
||||
@@ -242,8 +242,8 @@ class TestCasemerge_similar_words:
|
||||
merge_similar_words(d)
|
||||
eq_(1,len(d))
|
||||
eq_(3,len(d['foobar']))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class TestCasereduce_common_words:
|
||||
def test_typical(self):
|
||||
@@ -254,7 +254,7 @@ class TestCasereduce_common_words:
|
||||
reduce_common_words(d, 50)
|
||||
assert 'foo' not in d
|
||||
eq_(49,len(d['bar']))
|
||||
|
||||
|
||||
def test_dont_remove_objects_with_only_common_words(self):
|
||||
d = {
|
||||
'common': set([NamedObject("common uncommon",True) for i in range(50)] + [NamedObject("common",True)]),
|
||||
@@ -263,7 +263,7 @@ class TestCasereduce_common_words:
|
||||
reduce_common_words(d, 50)
|
||||
eq_(1,len(d['common']))
|
||||
eq_(1,len(d['uncommon']))
|
||||
|
||||
|
||||
def test_values_still_are_set_instances(self):
|
||||
d = {
|
||||
'common': set([NamedObject("common uncommon",True) for i in range(50)] + [NamedObject("common",True)]),
|
||||
@@ -272,7 +272,7 @@ class TestCasereduce_common_words:
|
||||
reduce_common_words(d, 50)
|
||||
assert isinstance(d['common'],set)
|
||||
assert isinstance(d['uncommon'],set)
|
||||
|
||||
|
||||
def test_dont_raise_KeyError_when_a_word_has_been_removed(self):
|
||||
#If a word has been removed by the reduce, an object in a subsequent common word that
|
||||
#contains the word that has been removed would cause a KeyError.
|
||||
@@ -285,14 +285,14 @@ class TestCasereduce_common_words:
|
||||
reduce_common_words(d, 50)
|
||||
except KeyError:
|
||||
self.fail()
|
||||
|
||||
|
||||
def test_unpack_fields(self):
|
||||
#object.words may be fields.
|
||||
def create_it():
|
||||
o = NamedObject('')
|
||||
o.words = [['foo','bar'],['baz']]
|
||||
return o
|
||||
|
||||
|
||||
d = {
|
||||
'foo': set([create_it() for i in range(50)])
|
||||
}
|
||||
@@ -300,7 +300,7 @@ class TestCasereduce_common_words:
|
||||
reduce_common_words(d, 50)
|
||||
except TypeError:
|
||||
self.fail("must support fields.")
|
||||
|
||||
|
||||
def test_consider_a_reduced_common_word_common_even_after_reduction(self):
|
||||
#There was a bug in the code that causeda word that has already been reduced not to
|
||||
#be counted as a common word for subsequent words. For example, if 'foo' is processed
|
||||
@@ -316,7 +316,7 @@ class TestCasereduce_common_words:
|
||||
eq_(1,len(d['foo']))
|
||||
eq_(1,len(d['bar']))
|
||||
eq_(49,len(d['baz']))
|
||||
|
||||
|
||||
|
||||
class TestCaseget_match:
|
||||
def test_simple(self):
|
||||
@@ -328,7 +328,7 @@ class TestCaseget_match:
|
||||
eq_(['bar','bleh'],m.second.words)
|
||||
assert m.first is o1
|
||||
assert m.second is o2
|
||||
|
||||
|
||||
def test_in(self):
|
||||
o1 = NamedObject("foo",True)
|
||||
o2 = NamedObject("bar",True)
|
||||
@@ -336,15 +336,15 @@ class TestCaseget_match:
|
||||
assert o1 in m
|
||||
assert o2 in m
|
||||
assert object() not in m
|
||||
|
||||
|
||||
def test_word_weight(self):
|
||||
eq_(int((6.0 / 13.0) * 100),get_match(NamedObject("foo bar",True),NamedObject("bar bleh",True),(WEIGHT_WORDS,)).percentage)
|
||||
|
||||
|
||||
|
||||
class TestCaseGetMatches:
|
||||
def test_empty(self):
|
||||
eq_(getmatches([]), [])
|
||||
|
||||
|
||||
def test_simple(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("bar bleh"),NamedObject("a b c foo")]
|
||||
r = getmatches(l)
|
||||
@@ -353,7 +353,7 @@ class TestCaseGetMatches:
|
||||
assert_match(m, 'foo bar', 'bar bleh')
|
||||
m = first(m for m in r if m.percentage == 33) #"foo bar" and "a b c foo"
|
||||
assert_match(m, 'foo bar', 'a b c foo')
|
||||
|
||||
|
||||
def test_null_and_unrelated_objects(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("bar bleh"),NamedObject(""),NamedObject("unrelated object")]
|
||||
r = getmatches(l)
|
||||
@@ -361,22 +361,22 @@ class TestCaseGetMatches:
|
||||
m = r[0]
|
||||
eq_(m.percentage, 50)
|
||||
assert_match(m, 'foo bar', 'bar bleh')
|
||||
|
||||
|
||||
def test_twice_the_same_word(self):
|
||||
l = [NamedObject("foo foo bar"),NamedObject("bar bleh")]
|
||||
r = getmatches(l)
|
||||
eq_(1,len(r))
|
||||
|
||||
|
||||
def test_twice_the_same_word_when_preworded(self):
|
||||
l = [NamedObject("foo foo bar",True),NamedObject("bar bleh",True)]
|
||||
r = getmatches(l)
|
||||
eq_(1,len(r))
|
||||
|
||||
|
||||
def test_two_words_match(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("foo bar bleh")]
|
||||
r = getmatches(l)
|
||||
eq_(1,len(r))
|
||||
|
||||
|
||||
def test_match_files_with_only_common_words(self):
|
||||
#If a word occurs more than 50 times, it is excluded from the matching process
|
||||
#The problem with the common_word_threshold is that the files containing only common
|
||||
@@ -385,18 +385,18 @@ class TestCaseGetMatches:
|
||||
l = [NamedObject("foo") for i in range(50)]
|
||||
r = getmatches(l)
|
||||
eq_(1225,len(r))
|
||||
|
||||
|
||||
def test_use_words_already_there_if_there(self):
|
||||
o1 = NamedObject('foo')
|
||||
o2 = NamedObject('bar')
|
||||
o2.words = ['foo']
|
||||
eq_(1, len(getmatches([o1,o2])))
|
||||
|
||||
|
||||
def test_job(self):
|
||||
def do_progress(p,d=''):
|
||||
self.log.append(p)
|
||||
return True
|
||||
|
||||
|
||||
j = job.Job(1,do_progress)
|
||||
self.log = []
|
||||
s = "foo bar"
|
||||
@@ -404,12 +404,12 @@ class TestCaseGetMatches:
|
||||
assert len(self.log) > 2
|
||||
eq_(0,self.log[0])
|
||||
eq_(100,self.log[-1])
|
||||
|
||||
|
||||
def test_weight_words(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("bar bleh")]
|
||||
m = getmatches(l, weight_words=True)[0]
|
||||
eq_(int((6.0 / 13.0) * 100),m.percentage)
|
||||
|
||||
|
||||
def test_similar_word(self):
|
||||
l = [NamedObject("foobar"),NamedObject("foobars")]
|
||||
eq_(len(getmatches(l, match_similar_words=True)), 1)
|
||||
@@ -420,16 +420,16 @@ class TestCaseGetMatches:
|
||||
eq_(len(getmatches(l, match_similar_words=True)), 1)
|
||||
l = [NamedObject("foobar"),NamedObject("foosbar")]
|
||||
eq_(len(getmatches(l, match_similar_words=True)), 1)
|
||||
|
||||
|
||||
def test_single_object_with_similar_words(self):
|
||||
l = [NamedObject("foo foos")]
|
||||
eq_(len(getmatches(l, match_similar_words=True)), 0)
|
||||
|
||||
|
||||
def test_double_words_get_counted_only_once(self):
|
||||
l = [NamedObject("foo bar foo bleh"),NamedObject("foo bar bleh bar")]
|
||||
m = getmatches(l)[0]
|
||||
eq_(75,m.percentage)
|
||||
|
||||
|
||||
def test_with_fields(self):
|
||||
o1 = NamedObject("foo bar - foo bleh")
|
||||
o2 = NamedObject("foo bar - bleh bar")
|
||||
@@ -437,7 +437,7 @@ class TestCaseGetMatches:
|
||||
o2.words = getfields(o2.name)
|
||||
m = getmatches([o1, o2])[0]
|
||||
eq_(50, m.percentage)
|
||||
|
||||
|
||||
def test_with_fields_no_order(self):
|
||||
o1 = NamedObject("foo bar - foo bleh")
|
||||
o2 = NamedObject("bleh bang - foo bar")
|
||||
@@ -445,11 +445,11 @@ class TestCaseGetMatches:
|
||||
o2.words = getfields(o2.name)
|
||||
m = getmatches([o1, o2], no_field_order=True)[0]
|
||||
eq_(m.percentage, 50)
|
||||
|
||||
|
||||
def test_only_match_similar_when_the_option_is_set(self):
|
||||
l = [NamedObject("foobar"),NamedObject("foobars")]
|
||||
eq_(len(getmatches(l, match_similar_words=False)), 0)
|
||||
|
||||
|
||||
def test_dont_recurse_do_match(self):
|
||||
# with nosetests, the stack is increased. The number has to be high enough not to be failing falsely
|
||||
sys.setrecursionlimit(100)
|
||||
@@ -460,19 +460,19 @@ class TestCaseGetMatches:
|
||||
self.fail()
|
||||
finally:
|
||||
sys.setrecursionlimit(1000)
|
||||
|
||||
|
||||
def test_min_match_percentage(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("bar bleh"),NamedObject("a b c foo")]
|
||||
r = getmatches(l, min_match_percentage=50)
|
||||
eq_(1,len(r)) #Only "foo bar" / "bar bleh" should match
|
||||
|
||||
|
||||
def test_MemoryError(self, monkeypatch):
|
||||
@log_calls
|
||||
def mocked_match(first, second, flags):
|
||||
if len(mocked_match.calls) > 42:
|
||||
raise MemoryError()
|
||||
return Match(first, second, 0)
|
||||
|
||||
|
||||
objects = [NamedObject() for i in range(10)] # results in 45 matches
|
||||
monkeypatch.setattr(engine, 'get_match', mocked_match)
|
||||
try:
|
||||
@@ -480,13 +480,13 @@ class TestCaseGetMatches:
|
||||
except MemoryError:
|
||||
self.fail('MemorryError must be handled')
|
||||
eq_(42, len(r))
|
||||
|
||||
|
||||
|
||||
class TestCaseGetMatchesByContents:
|
||||
def test_dont_compare_empty_files(self):
|
||||
o1, o2 = no(size=0), no(size=0)
|
||||
assert not getmatches_by_contents([o1, o2])
|
||||
|
||||
|
||||
|
||||
class TestCaseGroup:
|
||||
def test_empy(self):
|
||||
@@ -494,7 +494,7 @@ class TestCaseGroup:
|
||||
eq_(None,g.ref)
|
||||
eq_([],g.dupes)
|
||||
eq_(0,len(g.matches))
|
||||
|
||||
|
||||
def test_add_match(self):
|
||||
g = Group()
|
||||
m = get_match(NamedObject("foo",True),NamedObject("bar",True))
|
||||
@@ -503,7 +503,7 @@ class TestCaseGroup:
|
||||
eq_([m.second],g.dupes)
|
||||
eq_(1,len(g.matches))
|
||||
assert m in g.matches
|
||||
|
||||
|
||||
def test_multiple_add_match(self):
|
||||
g = Group()
|
||||
o1 = NamedObject("a",True)
|
||||
@@ -529,13 +529,13 @@ class TestCaseGroup:
|
||||
g.add_match(get_match(o3,o4))
|
||||
eq_([o2,o3,o4],g.dupes)
|
||||
eq_(6,len(g.matches))
|
||||
|
||||
|
||||
def test_len(self):
|
||||
g = Group()
|
||||
eq_(0,len(g))
|
||||
g.add_match(get_match(NamedObject("foo",True),NamedObject("bar",True)))
|
||||
eq_(2,len(g))
|
||||
|
||||
|
||||
def test_add_same_match_twice(self):
|
||||
g = Group()
|
||||
m = get_match(NamedObject("foo",True),NamedObject("foo",True))
|
||||
@@ -545,7 +545,7 @@ class TestCaseGroup:
|
||||
g.add_match(m)
|
||||
eq_(2,len(g))
|
||||
eq_(1,len(g.matches))
|
||||
|
||||
|
||||
def test_in(self):
|
||||
g = Group()
|
||||
o1 = NamedObject("foo",True)
|
||||
@@ -554,7 +554,7 @@ class TestCaseGroup:
|
||||
g.add_match(get_match(o1,o2))
|
||||
assert o1 in g
|
||||
assert o2 in g
|
||||
|
||||
|
||||
def test_remove(self):
|
||||
g = Group()
|
||||
o1 = NamedObject("foo",True)
|
||||
@@ -571,7 +571,7 @@ class TestCaseGroup:
|
||||
g.remove_dupe(o1)
|
||||
eq_(0,len(g.matches))
|
||||
eq_(0,len(g))
|
||||
|
||||
|
||||
def test_remove_with_ref_dupes(self):
|
||||
g = Group()
|
||||
o1 = NamedObject("foo",True)
|
||||
@@ -584,7 +584,7 @@ class TestCaseGroup:
|
||||
o2.is_ref = True
|
||||
g.remove_dupe(o3)
|
||||
eq_(0,len(g))
|
||||
|
||||
|
||||
def test_switch_ref(self):
|
||||
o1 = NamedObject(with_words=True)
|
||||
o2 = NamedObject(with_words=True)
|
||||
@@ -598,7 +598,7 @@ class TestCaseGroup:
|
||||
assert o2 is g.ref
|
||||
g.switch_ref(NamedObject('',True))
|
||||
assert o2 is g.ref
|
||||
|
||||
|
||||
def test_switch_ref_from_ref_dir(self):
|
||||
# When the ref dupe is from a ref dir, switch_ref() does nothing
|
||||
o1 = no(with_words=True)
|
||||
@@ -608,7 +608,7 @@ class TestCaseGroup:
|
||||
g.add_match(get_match(o1, o2))
|
||||
g.switch_ref(o2)
|
||||
assert o1 is g.ref
|
||||
|
||||
|
||||
def test_get_match_of(self):
|
||||
g = Group()
|
||||
for m in get_match_triangle():
|
||||
@@ -619,7 +619,7 @@ class TestCaseGroup:
|
||||
assert o in m
|
||||
assert g.get_match_of(NamedObject('',True)) is None
|
||||
assert g.get_match_of(g.ref) is None
|
||||
|
||||
|
||||
def test_percentage(self):
|
||||
#percentage should return the avg percentage in relation to the ref
|
||||
m1,m2,m3 = get_match_triangle()
|
||||
@@ -638,11 +638,11 @@ class TestCaseGroup:
|
||||
g.add_match(m1)
|
||||
g.add_match(m2)
|
||||
eq_(66,g.percentage)
|
||||
|
||||
|
||||
def test_percentage_on_empty_group(self):
|
||||
g = Group()
|
||||
eq_(0,g.percentage)
|
||||
|
||||
|
||||
def test_prioritize(self):
|
||||
m1,m2,m3 = get_match_triangle()
|
||||
o1 = m1.first
|
||||
@@ -658,7 +658,7 @@ class TestCaseGroup:
|
||||
assert o1 is g.ref
|
||||
assert g.prioritize(lambda x:x.name)
|
||||
assert o3 is g.ref
|
||||
|
||||
|
||||
def test_prioritize_with_tie_breaker(self):
|
||||
# if the ref has the same key as one or more of the dupe, run the tie_breaker func among them
|
||||
g = get_test_group()
|
||||
@@ -666,9 +666,9 @@ class TestCaseGroup:
|
||||
tie_breaker = lambda ref, dupe: dupe is o3
|
||||
g.prioritize(lambda x:0, tie_breaker)
|
||||
assert g.ref is o3
|
||||
|
||||
|
||||
def test_prioritize_with_tie_breaker_runs_on_all_dupes(self):
|
||||
# Even if a dupe is chosen to switch with ref with a tie breaker, we still run the tie breaker
|
||||
# Even if a dupe is chosen to switch with ref with a tie breaker, we still run the tie breaker
|
||||
# with other dupes and the newly chosen ref
|
||||
g = get_test_group()
|
||||
o1, o2, o3 = g.ordered
|
||||
@@ -678,7 +678,7 @@ class TestCaseGroup:
|
||||
tie_breaker = lambda ref, dupe: dupe.foo > ref.foo
|
||||
g.prioritize(lambda x:0, tie_breaker)
|
||||
assert g.ref is o3
|
||||
|
||||
|
||||
def test_prioritize_with_tie_breaker_runs_only_on_tie_dupes(self):
|
||||
# The tie breaker only runs on dupes that had the same value for the key_func
|
||||
g = get_test_group()
|
||||
@@ -693,7 +693,7 @@ class TestCaseGroup:
|
||||
tie_breaker = lambda ref, dupe: dupe.bar > ref.bar
|
||||
g.prioritize(key_func, tie_breaker)
|
||||
assert g.ref is o2
|
||||
|
||||
|
||||
def test_prioritize_with_ref_dupe(self):
|
||||
# when the ref dupe of a group is from a ref dir, make it stay on top.
|
||||
g = get_test_group()
|
||||
@@ -702,7 +702,7 @@ class TestCaseGroup:
|
||||
o2.size = 2
|
||||
g.prioritize(lambda x: -x.size)
|
||||
assert g.ref is o1
|
||||
|
||||
|
||||
def test_prioritize_nothing_changes(self):
|
||||
# prioritize() returns False when nothing changes in the group.
|
||||
g = get_test_group()
|
||||
@@ -710,14 +710,14 @@ class TestCaseGroup:
|
||||
g[1].name = 'b'
|
||||
g[2].name = 'c'
|
||||
assert not g.prioritize(lambda x:x.name)
|
||||
|
||||
|
||||
def test_list_like(self):
|
||||
g = Group()
|
||||
o1,o2 = (NamedObject("foo",True),NamedObject("bar",True))
|
||||
g.add_match(get_match(o1,o2))
|
||||
assert g[0] is o1
|
||||
assert g[1] is o2
|
||||
|
||||
|
||||
def test_discard_matches(self):
|
||||
g = Group()
|
||||
o1,o2,o3 = (NamedObject("foo",True),NamedObject("bar",True),NamedObject("baz",True))
|
||||
@@ -726,13 +726,13 @@ class TestCaseGroup:
|
||||
g.discard_matches()
|
||||
eq_(1,len(g.matches))
|
||||
eq_(0,len(g.candidates))
|
||||
|
||||
|
||||
|
||||
class TestCaseget_groups:
|
||||
def test_empty(self):
|
||||
r = get_groups([])
|
||||
eq_([],r)
|
||||
|
||||
|
||||
def test_simple(self):
|
||||
l = [NamedObject("foo bar"),NamedObject("bar bleh")]
|
||||
matches = getmatches(l)
|
||||
@@ -742,7 +742,7 @@ class TestCaseget_groups:
|
||||
g = r[0]
|
||||
assert g.ref is m.first
|
||||
eq_([m.second],g.dupes)
|
||||
|
||||
|
||||
def test_group_with_multiple_matches(self):
|
||||
#This results in 3 matches
|
||||
l = [NamedObject("foo"),NamedObject("foo"),NamedObject("foo")]
|
||||
@@ -751,7 +751,7 @@ class TestCaseget_groups:
|
||||
eq_(1,len(r))
|
||||
g = r[0]
|
||||
eq_(3,len(g))
|
||||
|
||||
|
||||
def test_must_choose_a_group(self):
|
||||
l = [NamedObject("a b"),NamedObject("a b"),NamedObject("b c"),NamedObject("c d"),NamedObject("c d")]
|
||||
#There will be 2 groups here: group "a b" and group "c d"
|
||||
@@ -760,7 +760,7 @@ class TestCaseget_groups:
|
||||
r = get_groups(matches)
|
||||
eq_(2,len(r))
|
||||
eq_(5,len(r[0])+len(r[1]))
|
||||
|
||||
|
||||
def test_should_all_go_in_the_same_group(self):
|
||||
l = [NamedObject("a b"),NamedObject("a b"),NamedObject("a b"),NamedObject("a b")]
|
||||
#There will be 2 groups here: group "a b" and group "c d"
|
||||
@@ -768,7 +768,7 @@ class TestCaseget_groups:
|
||||
matches = getmatches(l)
|
||||
r = get_groups(matches)
|
||||
eq_(1,len(r))
|
||||
|
||||
|
||||
def test_give_priority_to_matches_with_higher_percentage(self):
|
||||
o1 = NamedObject(with_words=True)
|
||||
o2 = NamedObject(with_words=True)
|
||||
@@ -782,14 +782,14 @@ class TestCaseget_groups:
|
||||
assert o1 not in g
|
||||
assert o2 in g
|
||||
assert o3 in g
|
||||
|
||||
|
||||
def test_four_sized_group(self):
|
||||
l = [NamedObject("foobar") for i in range(4)]
|
||||
m = getmatches(l)
|
||||
r = get_groups(m)
|
||||
eq_(1,len(r))
|
||||
eq_(4,len(r[0]))
|
||||
|
||||
|
||||
def test_referenced_by_ref2(self):
|
||||
o1 = NamedObject(with_words=True)
|
||||
o2 = NamedObject(with_words=True)
|
||||
@@ -799,12 +799,12 @@ class TestCaseget_groups:
|
||||
m3 = get_match(o3,o2)
|
||||
r = get_groups([m1,m2,m3])
|
||||
eq_(3,len(r[0]))
|
||||
|
||||
|
||||
def test_job(self):
|
||||
def do_progress(p,d=''):
|
||||
self.log.append(p)
|
||||
return True
|
||||
|
||||
|
||||
self.log = []
|
||||
j = job.Job(1,do_progress)
|
||||
m1,m2,m3 = get_match_triangle()
|
||||
@@ -813,7 +813,7 @@ class TestCaseget_groups:
|
||||
get_groups([m1,m2,m3,m4],j)
|
||||
eq_(0,self.log[0])
|
||||
eq_(100,self.log[-1])
|
||||
|
||||
|
||||
def test_group_admissible_discarded_dupes(self):
|
||||
# If, with a (A, B, C, D) set, all match with A, but C and D don't match with B and that the
|
||||
# (A, B) match is the highest (thus resulting in an (A, B) group), still match C and D
|
||||
@@ -830,4 +830,4 @@ class TestCaseget_groups:
|
||||
assert B in g1
|
||||
assert C in g2
|
||||
assert D in g2
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2006/03/03
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
from hscommon.path import Path
|
||||
from hscommon.testutil import eq_
|
||||
|
||||
@@ -25,10 +25,10 @@ class NamedObject:
|
||||
self.size = size
|
||||
self.path = path
|
||||
self.words = getwords(name)
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return '<NamedObject %r %r>' % (self.name, self.path)
|
||||
|
||||
|
||||
|
||||
no = NamedObject
|
||||
|
||||
@@ -384,7 +384,7 @@ def test_file_evaluates_to_false(fake_fileexists):
|
||||
class FalseNamedObject(NamedObject):
|
||||
def __bool__(self):
|
||||
return False
|
||||
|
||||
|
||||
|
||||
s = Scanner()
|
||||
f1 = FalseNamedObject('foobar', path='p1')
|
||||
@@ -445,7 +445,7 @@ def test_tie_breaker_same_name_plus_digit(fake_fileexists):
|
||||
assert group.ref is o5
|
||||
|
||||
def test_partial_group_match(fake_fileexists):
|
||||
# Count the number of discarded matches (when a file doesn't match all other dupes of the
|
||||
# Count the number of discarded matches (when a file doesn't match all other dupes of the
|
||||
# group) in Scanner.discarded_file_count
|
||||
s = Scanner()
|
||||
o1, o2, o3 = no('a b'), no('a'), no('b')
|
||||
@@ -476,7 +476,7 @@ def test_dont_group_files_that_dont_exist(tmpdir):
|
||||
file2.path.remove()
|
||||
return [Match(file1, file2, 100)]
|
||||
s._getmatches = getmatches
|
||||
|
||||
|
||||
assert not s.get_dupe_groups([file1, file2])
|
||||
|
||||
def test_folder_scan_exclude_subfolder_matches(fake_fileexists):
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
__version__ = '2.10.0'
|
||||
__version__ = '2.10.1'
|
||||
__appname__ = 'dupeGuru Picture Edition'
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2007/02/25
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import logging
|
||||
import multiprocessing
|
||||
from itertools import combinations
|
||||
|
||||
from hscommon.util import extract
|
||||
from hscommon.util import extract, iterconsume
|
||||
from hscommon.trans import tr
|
||||
from jobprogress import job
|
||||
from hscommon.jobprogress import job
|
||||
|
||||
from core.engine import Match
|
||||
from .block import avgdiff, DifferentBlockCountError, NoBlocksError
|
||||
@@ -132,14 +132,14 @@ def async_compare(ref_ids, other_ids, dbname, threshold, picinfo):
|
||||
results.append((ref_id, other_id, percentage))
|
||||
cache.close()
|
||||
return results
|
||||
|
||||
|
||||
def getmatches(pictures, cache_path, threshold=75, match_scaled=False, j=job.nulljob):
|
||||
def get_picinfo(p):
|
||||
if match_scaled:
|
||||
return (None, p.is_ref)
|
||||
else:
|
||||
return (p.dimensions, p.is_ref)
|
||||
|
||||
|
||||
def collect_results(collect_all=False):
|
||||
# collect results and wait until the queue is small enough to accomodate a new results.
|
||||
nonlocal async_results, matches, comparison_count
|
||||
@@ -152,7 +152,7 @@ def getmatches(pictures, cache_path, threshold=75, match_scaled=False, j=job.nul
|
||||
comparison_count += 1
|
||||
progress_msg = tr("Performed %d/%d chunk matches") % (comparison_count, len(comparisons_to_do))
|
||||
j.set_progress(comparison_count, progress_msg)
|
||||
|
||||
|
||||
j = j.start_subjob([3, 7])
|
||||
pictures = prepare_pictures(pictures, cache_path, with_dimensions=not match_scaled, j=j)
|
||||
j = j.start_subjob([9, 1], tr("Preparing for matching"))
|
||||
@@ -175,22 +175,36 @@ def getmatches(pictures, cache_path, threshold=75, match_scaled=False, j=job.nul
|
||||
comparisons_to_do = list(combinations(chunks + [None], 2))
|
||||
comparison_count = 0
|
||||
j.start_job(len(comparisons_to_do))
|
||||
for ref_chunk, other_chunk in comparisons_to_do:
|
||||
picinfo = {p.cache_id: get_picinfo(p) for p in ref_chunk}
|
||||
ref_ids = [p.cache_id for p in ref_chunk]
|
||||
if other_chunk is not None:
|
||||
other_ids = [p.cache_id for p in other_chunk]
|
||||
picinfo.update({p.cache_id: get_picinfo(p) for p in other_chunk})
|
||||
else:
|
||||
other_ids = None
|
||||
args = (ref_ids, other_ids, cache_path, threshold, picinfo)
|
||||
async_results.append(pool.apply_async(async_compare, args))
|
||||
collect_results()
|
||||
collect_results(collect_all=True)
|
||||
try:
|
||||
for ref_chunk, other_chunk in comparisons_to_do:
|
||||
picinfo = {p.cache_id: get_picinfo(p) for p in ref_chunk}
|
||||
ref_ids = [p.cache_id for p in ref_chunk]
|
||||
if other_chunk is not None:
|
||||
other_ids = [p.cache_id for p in other_chunk]
|
||||
picinfo.update({p.cache_id: get_picinfo(p) for p in other_chunk})
|
||||
else:
|
||||
other_ids = None
|
||||
args = (ref_ids, other_ids, cache_path, threshold, picinfo)
|
||||
async_results.append(pool.apply_async(async_compare, args))
|
||||
collect_results()
|
||||
collect_results(collect_all=True)
|
||||
except MemoryError:
|
||||
# Rare, but possible, even in 64bit situations (ref #264). What do we do now? We free us
|
||||
# some wiggle room, log about the incident, and stop matching right here. We then process
|
||||
# the matches we have. The rest of the process doesn't allocate much and we should be
|
||||
# alright.
|
||||
del comparisons_to_do, chunks, pictures # some wiggle room for the next statements
|
||||
logging.warning("Ran out of memory when scanning! We had %d matches.", len(matches))
|
||||
del matches[-len(matches)//3:] # some wiggle room to ensure we don't run out of memory again.
|
||||
pool.close()
|
||||
|
||||
result = []
|
||||
for ref_id, other_id, percentage in j.iter_with_progress(matches, tr("Verified %d/%d matches"), every=10):
|
||||
myiter = j.iter_with_progress(
|
||||
iterconsume(matches, reverse=False),
|
||||
tr("Verified %d/%d matches"),
|
||||
every=10,
|
||||
count=len(matches),
|
||||
)
|
||||
for ref_id, other_id, percentage in myiter:
|
||||
ref = id2picture[ref_id]
|
||||
other = id2picture[other_id]
|
||||
if percentage == 100 and ref.md5 != other.md5:
|
||||
@@ -201,4 +215,5 @@ def getmatches(pictures, cache_path, threshold=75, match_scaled=False, j=job.nul
|
||||
result.append(get_match(ref, other, percentage))
|
||||
return result
|
||||
|
||||
multiprocessing.freeze_support()
|
||||
multiprocessing.freeze_support()
|
||||
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
=== 2.10.1 (2014-10-12)
|
||||
|
||||
* Catch MemoryError better in block matching algo. (#264)
|
||||
* Fix crash when reading some EXIF tags. [Mac] (#263, #265)
|
||||
* Fixed ``AttributeError: 'ComboboxModel' object has no attribute 'reset'``. [Linux, Windows] (#254)
|
||||
* Fixed a build problem introduced by Sphinx 1.2.3.
|
||||
* Updated German localisation, by Frank Weber.
|
||||
|
||||
=== 2.10.0 (2014-05-03)
|
||||
|
||||
* This is mostly a dependencies upgrade.
|
||||
|
||||
@@ -25,7 +25,7 @@ sys.path.insert(0, os.path.abspath(os.path.join('..', '..')))
|
||||
|
||||
def fix_nulljob_in_sig(app, what, name, obj, options, signature, return_annotation):
|
||||
if signature:
|
||||
signature = re.sub(r"<jobprogress.job.NullJob object at 0x[\da-f]+>", "nulljob", signature)
|
||||
signature = re.sub(r"<hscommon.jobprogress.job.NullJob object at 0x[\da-f]+>", "nulljob", signature)
|
||||
return signature, return_annotation
|
||||
|
||||
def setup(app):
|
||||
|
||||
@@ -10,6 +10,8 @@ Unten befindet sich die Liste aller Menschen, die direkt oder indirekt zu dupeGu
|
||||
|
||||
| **Gregor Tätzner, deutsche Übersetzung**
|
||||
|
||||
| **Frank Weber, deutsche Übersetzung**
|
||||
|
||||
| **Eric Dee, chinesische Übersetzung**
|
||||
|
||||
| **Aleš Nehyba, Czech localization**
|
||||
|
||||
@@ -10,6 +10,8 @@ Below is the list of people who contributed, directly or indirectly to dupeGuru.
|
||||
|
||||
| **Gregor Tätzner, German localization**
|
||||
|
||||
| **Frank Weber, German localization**
|
||||
|
||||
| **Eric Dee, Chinese localization**
|
||||
|
||||
| **Aleš Nehyba, Czech localization**
|
||||
|
||||
@@ -3,6 +3,7 @@ hscommon
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
build
|
||||
conflict
|
||||
@@ -10,10 +11,6 @@ hscommon
|
||||
notify
|
||||
path
|
||||
util
|
||||
gui/base
|
||||
gui/text_field
|
||||
gui/selectable_list
|
||||
gui/table
|
||||
gui/tree
|
||||
gui/column
|
||||
gui/progress_window
|
||||
jobprogress/*
|
||||
gui/*
|
||||
|
||||
|
||||
17
help/en/developer/hscommon/jobprogress/job.rst
Normal file
17
help/en/developer/hscommon/jobprogress/job.rst
Normal file
@@ -0,0 +1,17 @@
|
||||
hscommon.jobprogress.job
|
||||
========================
|
||||
|
||||
.. automodule:: hscommon.jobprogress.job
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Job
|
||||
NullJob
|
||||
|
||||
.. autoclass:: Job
|
||||
:members:
|
||||
:private-members:
|
||||
|
||||
.. autoclass:: NullJob
|
||||
:members:
|
||||
|
||||
12
help/en/developer/hscommon/jobprogress/performer.rst
Normal file
12
help/en/developer/hscommon/jobprogress/performer.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
hscommon.jobprogress.performer
|
||||
==============================
|
||||
|
||||
.. automodule:: hscommon.jobprogress.performer
|
||||
|
||||
.. autosummary::
|
||||
|
||||
ThreadedJobPerformer
|
||||
|
||||
.. autoclass:: ThreadedJobPerformer
|
||||
:members:
|
||||
|
||||
12
help/en/developer/hscommon/jobprogress/qt.rst
Normal file
12
help/en/developer/hscommon/jobprogress/qt.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
hscommon.jobprogress.qt
|
||||
=======================
|
||||
|
||||
.. automodule:: hscommon.jobprogress.qt
|
||||
|
||||
.. autosummary::
|
||||
|
||||
Progress
|
||||
|
||||
.. autoclass:: Progress
|
||||
:members:
|
||||
|
||||
@@ -12,16 +12,16 @@ dupeGuru's codebase has quite a few design flaws. The Model, View and Controller
|
||||
different classes, scattered around. If you're aware of that, it might help you to understand what
|
||||
the heck is going on.
|
||||
|
||||
The central piece of dupeGuru is ``dupeguru.app.DupeGuru`` (in the ``core`` code). It's the only
|
||||
The central piece of dupeGuru is :class:`core.app.DupeGuru`. It's the only
|
||||
interface to the python's code for the GUI code. A duplicate scan is started with
|
||||
``start_scanning()``, directories are added through ``add_directory()``, etc..
|
||||
:meth:`core.app.DupeGuru.start_scanning()`, directories are added through
|
||||
:meth:`core.app.DupeGuru.add_directory()`, etc..
|
||||
|
||||
A lot of functionalities of the App are implemented in the platform-specific subclasses of
|
||||
``app.DupeGuru``, like ``app_cocoa.DupeGuru``, or the ``base.app.DupeGuru`` class in the PyQt
|
||||
codebase. For example, when performing "Remove Selected From Results",
|
||||
``app_cocoa.Dupeguru.RemoveSelected()`` on the Obj-C side, and
|
||||
``base.app.DupeGuru.remove_duplicates()`` on the PyQt side, are respectively called to perform the
|
||||
thing. All of this is quite ugly, I know (see the "Refactoring" section below).
|
||||
:class:`core.app.DupeGuru`, like ``DupeGuru`` in ``cocoa/inter/app.py``, or the ``DupeGuru`` class
|
||||
in ``qt/base/app.py``. For example, when performing "Remove Selected From Results",
|
||||
``RemoveSelected()`` on the cocoa side, and ``remove_duplicates()`` on the PyQt side, are
|
||||
respectively called to perform the thing.
|
||||
|
||||
.. _jobs:
|
||||
|
||||
@@ -29,23 +29,26 @@ Jobs
|
||||
----
|
||||
|
||||
A lot of operations in dupeGuru take a significant amount of time. This is why there's a generalized
|
||||
threaded job mechanism built-in ``app.DupeGuru``. First, ``app.DupeGuru`` has a ``progress`` member
|
||||
which is an instance of ``jobprogress.job.ThreadedJobPerformer``. It lets the GUI code know of the
|
||||
progress of the current threaded job. When ``app.DupeGuru`` needs to start a job, it calls
|
||||
threaded job mechanism built-in :class:`~core.app.DupeGuru`. First, :class:`~core.app.DupeGuru` has
|
||||
a ``progress`` member which is an instance of
|
||||
:class:`~hscommon.jobprogress.performer.ThreadedJobPerformer`. It lets the GUI code know of the progress
|
||||
of the current threaded job. When :class:`~core.app.DupeGuru` needs to start a job, it calls
|
||||
``_start_job()`` and the platform specific subclass deals with the details of starting the job.
|
||||
|
||||
Core principles
|
||||
---------------
|
||||
|
||||
The core of the duplicate matching takes place (for SE and ME, not PE) in ``dupeguru.engine``.
|
||||
There's ``MatchFactory.getmatches()`` which take a list of ``fs.File`` instances and return a list
|
||||
of ``(firstfile, secondfile, match_percentage)`` matches. Then, there's ``get_groups()`` which takes
|
||||
a list of matches and returns a list of ``Group`` instances (a ``Group`` is basically a list of
|
||||
``fs.File`` matching together).
|
||||
The core of the duplicate matching takes place (for SE and ME, not PE) in :mod:`core.engine`.
|
||||
There's :func:`core.engine.getmatches` which take a list of :class:`core.fs.File` instances and
|
||||
return a list of ``(firstfile, secondfile, match_percentage)`` matches. Then, there's
|
||||
:func:`core.engine.get_groups` which takes a list of matches and returns a list of
|
||||
:class:`.Group` instances (a :class:`.Group` is basically a list of :class:`.File` matching
|
||||
together).
|
||||
|
||||
When a scan is over, the final result (the list of groups from ``get_groups()``) is placed into
|
||||
``app.DupeGuru.results``, which is a ``results.Results`` instance. The ``Results`` instance is where
|
||||
all the dupe marking, sorting, removing, power marking, etc. takes place.
|
||||
When a scan is over, the final result (the list of groups from :func:`.get_groups`) is placed into
|
||||
:attr:`core.app.DupeGuru.results`, which is a :class:`core.results.Results` instance. The
|
||||
:class:`~.Results` instance is where all the dupe marking, sorting, removing, power marking, etc.
|
||||
takes place.
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
@@ -9,6 +9,8 @@ Voici la liste des contributeurs de dupeGuru. Merci!
|
||||
|
||||
| **Gregor Tätzner, localisation allemande**
|
||||
|
||||
| **Frank Weber, localisation allemande**
|
||||
|
||||
| **Eric Dee, localisation choinoise**
|
||||
|
||||
| **Aleš Nehyba, localisation tchèque**
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
|
||||
| **Gregor Tätzner, Գերմաներեն թարգմանիչը**
|
||||
|
||||
| **Frank Weber, Գերմաներեն թարգմանիչը**
|
||||
|
||||
| **Eric Dee, Չինարեն թարգմանիչը**
|
||||
|
||||
| **Aleš Nehyba, Չեխերեն թարգմանիչը**
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
|
||||
| **Gregor Tätzner, Немецкая локализация**
|
||||
|
||||
| **Frank Weber, Немецкая локализация**
|
||||
|
||||
| **Eric Dee, Китайская локализация**
|
||||
|
||||
| **Aleš Nehyba, Чешский локализации**
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
|
||||
| **Gregor Tätzner, Німецька локалізація**
|
||||
|
||||
| **Frank Weber, Німецька локалізація**
|
||||
|
||||
| **Eric Dee, Китайська локалізація**
|
||||
|
||||
| **Aleš Nehyba, Чеський локалізації**
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2013-10-12
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import os.path as op
|
||||
@@ -30,10 +30,10 @@ def reveal_path(path):
|
||||
|
||||
def special_folder_path(special_folder, appname=None):
|
||||
"""Returns the path of ``special_folder``.
|
||||
|
||||
|
||||
``special_folder`` is a SpecialFolder.* const. The result is the special folder for the current
|
||||
application. The running process' application info is used to determine relevant information.
|
||||
|
||||
|
||||
You can override the application name with ``appname``. This argument is ingored under Qt.
|
||||
"""
|
||||
return _special_folder_path(special_folder, appname)
|
||||
@@ -49,7 +49,7 @@ try:
|
||||
_open_url = proxy.openURL_
|
||||
_open_path = proxy.openPath_
|
||||
_reveal_path = proxy.revealPath_
|
||||
|
||||
|
||||
def _special_folder_path(special_folder, appname=None):
|
||||
if special_folder == SpecialFolder.Cache:
|
||||
base = proxy.getCachePath()
|
||||
@@ -58,7 +58,7 @@ try:
|
||||
if not appname:
|
||||
appname = proxy.bundleInfo_('CFBundleName')
|
||||
return op.join(base, appname)
|
||||
|
||||
|
||||
except ImportError:
|
||||
try:
|
||||
from PyQt5.QtCore import QUrl, QStandardPaths
|
||||
@@ -69,26 +69,25 @@ except ImportError:
|
||||
def _open_path(path):
|
||||
url = QUrl.fromLocalFile(str(path))
|
||||
QDesktopServices.openUrl(url)
|
||||
|
||||
|
||||
def _reveal_path(path):
|
||||
_open_path(op.dirname(str(path)))
|
||||
|
||||
|
||||
def _special_folder_path(special_folder, appname=None):
|
||||
if special_folder == SpecialFolder.Cache:
|
||||
qtfolder = QStandardPaths.CacheLocation
|
||||
else:
|
||||
qtfolder = QStandardPaths.DataLocation
|
||||
return QStandardPaths.standardLocations(qtfolder)[0]
|
||||
|
||||
except ImportError:
|
||||
# We're either running tests, and these functions don't matter much or we're in a really
|
||||
# weird situation. Let's just have dummy fallbacks.
|
||||
logging.warning("Can't setup desktop functions!")
|
||||
def _open_path(path):
|
||||
pass
|
||||
|
||||
|
||||
def _reveal_path(path):
|
||||
pass
|
||||
|
||||
|
||||
def _special_folder_path(special_folder, appname=None):
|
||||
return '/tmp'
|
||||
|
||||
@@ -1,61 +1,58 @@
|
||||
# Created On: 2013/07/01
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from jobprogress.performer import ThreadedJobPerformer
|
||||
|
||||
from ..jobprogress.performer import ThreadedJobPerformer
|
||||
from .base import GUIObject
|
||||
from .text_field import TextField
|
||||
|
||||
class ProgressWindowView:
|
||||
"""Expected interface for :class:`ProgressWindow`'s view.
|
||||
|
||||
|
||||
*Not actually used in the code. For documentation purposes only.*
|
||||
|
||||
|
||||
Our view, some kind window with a progress bar, two labels and a cancel button, is expected
|
||||
to properly respond to its callbacks.
|
||||
|
||||
|
||||
It's also expected to call :meth:`ProgressWindow.cancel` when the cancel button is clicked.
|
||||
"""
|
||||
def show(self):
|
||||
"""Show the dialog.
|
||||
"""
|
||||
|
||||
|
||||
def close(self):
|
||||
"""Close the dialog.
|
||||
"""
|
||||
|
||||
|
||||
def set_progress(self, progress):
|
||||
"""Set the progress of the progress bar to ``progress``.
|
||||
|
||||
|
||||
Not all jobs are equally responsive on their job progress report and it is recommended that
|
||||
you put your progressbar in "indeterminate" mode as long as you haven't received the first
|
||||
``set_progress()`` call to avoid letting the user think that the app is frozen.
|
||||
|
||||
|
||||
:param int progress: a value between ``0`` and ``100``.
|
||||
"""
|
||||
|
||||
class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
||||
"""Cross-toolkit GUI-enabled progress window.
|
||||
|
||||
This class allows you to run a long running, `job enabled`_ function in a separate thread and
|
||||
|
||||
This class allows you to run a long running, job enabled function in a separate thread and
|
||||
allow the user to follow its progress with a progress dialog.
|
||||
|
||||
|
||||
To use it, you start your long-running job with :meth:`run` and then have your UI layer
|
||||
regularly call :meth:`pulse` to refresh the job status in the UI. It is advised that you call
|
||||
:meth:`pulse` in the main thread because GUI toolkit usually only support calling UI-related
|
||||
functions from the main thread.
|
||||
|
||||
We subclass :class:`.GUIObject` and ``ThreadedJobPerformer`` (from the ``jobprogress`` library).
|
||||
|
||||
We subclass :class:`.GUIObject` and :class:`.ThreadedJobPerformer`.
|
||||
Expected view: :class:`ProgressWindowView`.
|
||||
|
||||
|
||||
:param finishfunc: A function ``f(jobid)`` that is called when a job is completed. ``jobid`` is
|
||||
an arbitrary id passed to :meth:`run`.
|
||||
|
||||
.. _job enabled: https://pypi.python.org/pypi/jobprogress
|
||||
"""
|
||||
def __init__(self, finish_func):
|
||||
# finish_func(jobid) is the function that is called when a job is completed.
|
||||
@@ -68,7 +65,7 @@ class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
||||
#: during its course.
|
||||
self.progressdesc_textfield = TextField()
|
||||
self.jobid = None
|
||||
|
||||
|
||||
def cancel(self):
|
||||
"""Call for a user-initiated job cancellation.
|
||||
"""
|
||||
@@ -77,13 +74,13 @@ class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
||||
# we verify that the job is still running.
|
||||
if self._job_running:
|
||||
self.job_cancelled = True
|
||||
|
||||
|
||||
def pulse(self):
|
||||
"""Update progress reports in the GUI.
|
||||
|
||||
|
||||
Call this regularly from the GUI main run loop. The values might change before
|
||||
:meth:`ProgressWindowView.set_progress` happens.
|
||||
|
||||
|
||||
If the job is finished, ``pulse()`` will take care of closing the window and re-raising any
|
||||
exception that might have been raised during the job (in the main thread this time). If
|
||||
there was no exception, ``finish_func(jobid)`` is called to let you take appropriate action.
|
||||
@@ -101,13 +98,13 @@ class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
||||
if last_desc:
|
||||
self.progressdesc_textfield.text = last_desc
|
||||
self.view.set_progress(last_progress)
|
||||
|
||||
|
||||
def run(self, jobid, title, target, args=()):
|
||||
"""Starts a threaded job.
|
||||
|
||||
The ``target`` function will be sent, as its first argument, a ``Job`` instance (from the
|
||||
``jobprogress`` library) which it can use to report on its progress.
|
||||
|
||||
|
||||
The ``target`` function will be sent, as its first argument, a :class:`.Job` instance which
|
||||
it can use to report on its progress.
|
||||
|
||||
:param jobid: Arbitrary identifier which will be passed to ``finish_func()`` at the end.
|
||||
:param title: A title for the task you're starting.
|
||||
:param target: The function that does your famous long running job.
|
||||
@@ -122,4 +119,4 @@ class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
||||
self.run_threaded(target, args)
|
||||
self.jobdesc_textfield.text = title
|
||||
self.view.show()
|
||||
|
||||
|
||||
|
||||
0
hscommon/jobprogress/__init__.py
Normal file
0
hscommon/jobprogress/__init__.py
Normal file
166
hscommon/jobprogress/job.py
Normal file
166
hscommon/jobprogress/job.py
Normal file
@@ -0,0 +1,166 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2004/12/20
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
class JobCancelled(Exception):
|
||||
"The user has cancelled the job"
|
||||
|
||||
class JobInProgressError(Exception):
|
||||
"A job is already being performed, you can't perform more than one at the same time."
|
||||
|
||||
class JobCountError(Exception):
|
||||
"The number of jobs started have exceeded the number of jobs allowed"
|
||||
|
||||
class Job:
|
||||
"""Manages a job's progression and return it's progression through a callback.
|
||||
|
||||
Note that this class is not foolproof. For example, you could call
|
||||
start_subjob, and then call add_progress from the parent job, and nothing
|
||||
would stop you from doing it. However, it would mess your progression
|
||||
because it is the sub job that is supposed to drive the progression.
|
||||
Another example would be to start a subjob, then start another, and call
|
||||
add_progress from the old subjob. Once again, it would mess your progression.
|
||||
There are no stops because it would remove the lightweight aspect of the
|
||||
class (A Job would need to have a Parent instead of just a callback,
|
||||
and the parent could be None. A lot of checks for nothing.).
|
||||
Another one is that nothing stops you from calling add_progress right after
|
||||
SkipJob.
|
||||
"""
|
||||
#---Magic functions
|
||||
def __init__(self, job_proportions, callback):
|
||||
"""Initialize the Job with 'jobcount' jobs. Start every job with
|
||||
start_job(). Every time the job progress is updated, 'callback' is called
|
||||
'callback' takes a 'progress' int param, and a optional 'desc'
|
||||
parameter. Callback must return false if the job must be cancelled.
|
||||
"""
|
||||
if not hasattr(callback, '__call__'):
|
||||
raise TypeError("'callback' MUST be set when creating a Job")
|
||||
if isinstance(job_proportions, int):
|
||||
job_proportions = [1] * job_proportions
|
||||
self._job_proportions = list(job_proportions)
|
||||
self._jobcount = sum(job_proportions)
|
||||
self._callback = callback
|
||||
self._current_job = 0
|
||||
self._passed_jobs = 0
|
||||
self._progress = 0
|
||||
self._currmax = 1
|
||||
|
||||
#---Private
|
||||
def _subjob_callback(self, progress, desc=''):
|
||||
"""This is the callback passed to children jobs.
|
||||
"""
|
||||
self.set_progress(progress, desc)
|
||||
return True #if JobCancelled has to be raised, it will be at the highest level
|
||||
|
||||
def _do_update(self, desc):
|
||||
"""Calls the callback function with a % progress as a parameter.
|
||||
|
||||
The parameter is a int in the 0-100 range.
|
||||
"""
|
||||
if self._current_job:
|
||||
passed_progress = self._passed_jobs * self._currmax
|
||||
current_progress = self._current_job * self._progress
|
||||
total_progress = self._jobcount * self._currmax
|
||||
progress = ((passed_progress + current_progress) * 100) // total_progress
|
||||
else:
|
||||
progress = -1 # indeterminate
|
||||
# It's possible that callback doesn't support a desc arg
|
||||
result = self._callback(progress, desc) if desc else self._callback(progress)
|
||||
if not result:
|
||||
raise JobCancelled()
|
||||
|
||||
#---Public
|
||||
def add_progress(self, progress=1, desc=''):
|
||||
self.set_progress(self._progress + progress, desc)
|
||||
|
||||
def check_if_cancelled(self):
|
||||
self._do_update('')
|
||||
|
||||
def iter_with_progress(self, iterable, desc_format=None, every=1, count=None):
|
||||
"""Iterate through ``iterable`` while automatically adding progress.
|
||||
|
||||
WARNING: We need our iterable's length. If ``iterable`` is not a sequence (that is,
|
||||
something we can call ``len()`` on), you *have* to specify a count through the ``count``
|
||||
argument. If ``count`` is ``None``, ``len(iterable)`` is used.
|
||||
"""
|
||||
if count is None:
|
||||
count = len(iterable)
|
||||
desc = ''
|
||||
if desc_format:
|
||||
desc = desc_format % (0, count)
|
||||
self.start_job(count, desc)
|
||||
for i, element in enumerate(iterable, start=1):
|
||||
yield element
|
||||
if i % every == 0:
|
||||
if desc_format:
|
||||
desc = desc_format % (i, count)
|
||||
self.add_progress(progress=every, desc=desc)
|
||||
if desc_format:
|
||||
desc = desc_format % (count, count)
|
||||
self.set_progress(100, desc)
|
||||
|
||||
def start_job(self, max_progress=100, desc=''):
|
||||
"""Begin work on the next job. You must not call start_job more than
|
||||
'jobcount' (in __init__) times.
|
||||
'max' is the job units you are to perform.
|
||||
'desc' is the description of the job.
|
||||
"""
|
||||
self._passed_jobs += self._current_job
|
||||
try:
|
||||
self._current_job = self._job_proportions.pop(0)
|
||||
except IndexError:
|
||||
raise JobCountError()
|
||||
self._progress = 0
|
||||
self._currmax = max(1, max_progress)
|
||||
self._do_update(desc)
|
||||
|
||||
def start_subjob(self, job_proportions, desc=''):
|
||||
"""Starts a sub job. Use this when you want to split a job into
|
||||
multiple smaller jobs. Pretty handy when starting a process where you
|
||||
know how many subjobs you will have, but don't know the work unit count
|
||||
for every of them.
|
||||
returns the Job object
|
||||
"""
|
||||
self.start_job(100, desc)
|
||||
return Job(job_proportions, self._subjob_callback)
|
||||
|
||||
def set_progress(self, progress, desc=''):
|
||||
"""Sets the progress of the current job to 'progress', and call the
|
||||
callback
|
||||
"""
|
||||
self._progress = progress
|
||||
if self._progress > self._currmax:
|
||||
self._progress = self._currmax
|
||||
if self._progress < 0:
|
||||
self._progress = 0
|
||||
self._do_update(desc)
|
||||
|
||||
|
||||
class NullJob:
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def add_progress(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def check_if_cancelled(self):
|
||||
pass
|
||||
|
||||
def iter_with_progress(self, sequence, *args, **kwargs):
|
||||
return iter(sequence)
|
||||
|
||||
def start_job(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def start_subjob(self, *args, **kwargs):
|
||||
return NullJob()
|
||||
|
||||
def set_progress(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
nulljob = NullJob()
|
||||
72
hscommon/jobprogress/performer.py
Normal file
72
hscommon/jobprogress/performer.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2010-11-19
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from threading import Thread
|
||||
import sys
|
||||
|
||||
from .job import Job, JobInProgressError, JobCancelled
|
||||
|
||||
class ThreadedJobPerformer:
|
||||
"""Run threaded jobs and track progress.
|
||||
|
||||
To run a threaded job, first create a job with _create_job(), then call _run_threaded(), with
|
||||
your work function as a parameter.
|
||||
|
||||
Example:
|
||||
|
||||
j = self._create_job()
|
||||
self._run_threaded(self.some_work_func, (arg1, arg2, j))
|
||||
"""
|
||||
_job_running = False
|
||||
last_error = None
|
||||
|
||||
#--- Protected
|
||||
def create_job(self):
|
||||
if self._job_running:
|
||||
raise JobInProgressError()
|
||||
self.last_progress = -1
|
||||
self.last_desc = ''
|
||||
self.job_cancelled = False
|
||||
return Job(1, self._update_progress)
|
||||
|
||||
def _async_run(self, *args):
|
||||
target = args[0]
|
||||
args = tuple(args[1:])
|
||||
self._job_running = True
|
||||
self.last_error = None
|
||||
try:
|
||||
target(*args)
|
||||
except JobCancelled:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.last_error = e
|
||||
self.last_traceback = sys.exc_info()[2]
|
||||
finally:
|
||||
self._job_running = False
|
||||
self.last_progress = None
|
||||
|
||||
def reraise_if_error(self):
|
||||
"""Reraises the error that happened in the thread if any.
|
||||
|
||||
Call this after the caller of run_threaded detected that self._job_running returned to False
|
||||
"""
|
||||
if self.last_error is not None:
|
||||
raise self.last_error.with_traceback(self.last_traceback)
|
||||
|
||||
def _update_progress(self, newprogress, newdesc=''):
|
||||
self.last_progress = newprogress
|
||||
if newdesc:
|
||||
self.last_desc = newdesc
|
||||
return not self.job_cancelled
|
||||
|
||||
def run_threaded(self, target, args=()):
|
||||
if self._job_running:
|
||||
raise JobInProgressError()
|
||||
args = (target, ) + args
|
||||
Thread(target=self._async_run, args=args).start()
|
||||
|
||||
52
hscommon/jobprogress/qt.py
Normal file
52
hscommon/jobprogress/qt.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-09-14
|
||||
# Copyright 2011 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from PyQt5.QtCore import pyqtSignal, Qt, QTimer
|
||||
from PyQt5.QtWidgets import QProgressDialog
|
||||
|
||||
from . import performer
|
||||
|
||||
class Progress(QProgressDialog, performer.ThreadedJobPerformer):
|
||||
finished = pyqtSignal(['QString'])
|
||||
|
||||
def __init__(self, parent):
|
||||
flags = Qt.CustomizeWindowHint | Qt.WindowTitleHint | Qt.WindowSystemMenuHint
|
||||
QProgressDialog.__init__(self, '', "Cancel", 0, 100, parent, flags)
|
||||
self.setModal(True)
|
||||
self.setAutoReset(False)
|
||||
self.setAutoClose(False)
|
||||
self._timer = QTimer()
|
||||
self._jobid = ''
|
||||
self._timer.timeout.connect(self.updateProgress)
|
||||
|
||||
def updateProgress(self):
|
||||
# the values might change before setValue happens
|
||||
last_progress = self.last_progress
|
||||
last_desc = self.last_desc
|
||||
if not self._job_running or last_progress is None:
|
||||
self._timer.stop()
|
||||
self.close()
|
||||
if not self.job_cancelled:
|
||||
self.finished.emit(self._jobid)
|
||||
return
|
||||
if self.wasCanceled():
|
||||
self.job_cancelled = True
|
||||
return
|
||||
if last_desc:
|
||||
self.setLabelText(last_desc)
|
||||
self.setValue(last_progress)
|
||||
|
||||
def run(self, jobid, title, target, args=()):
|
||||
self._jobid = jobid
|
||||
self.reset()
|
||||
self.setLabelText('')
|
||||
self.run_threaded(target, args)
|
||||
self.setWindowTitle(title)
|
||||
self.show()
|
||||
self._timer.start(500)
|
||||
|
||||
0
hscommon/path.py
Executable file → Normal file
0
hscommon/path.py
Executable file → Normal file
179
hscommon/reg.py
179
hscommon/reg.py
@@ -1,179 +0,0 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-05-16
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import re
|
||||
from hashlib import md5
|
||||
|
||||
from . import desktop
|
||||
from .trans import trget
|
||||
|
||||
tr = trget('hscommon')
|
||||
|
||||
ALL_APPS = [
|
||||
(1, 'dupeGuru'),
|
||||
(2, 'moneyGuru'),
|
||||
(3, 'musicGuru'),
|
||||
(6, 'PdfMasher'),
|
||||
]
|
||||
|
||||
OLDAPPIDS = {
|
||||
1: {1, 4, 5},
|
||||
2: {6, },
|
||||
3: {2, },
|
||||
}
|
||||
|
||||
class InvalidCodeError(Exception):
|
||||
"""The supplied code is invalid."""
|
||||
|
||||
DEMO_PROMPT = tr("{name} is fairware, which means \"open source software developed with expectation "
|
||||
"of fair contributions from users\". It's a very interesting concept, but one year of fairware has "
|
||||
"shown that most people just want to know how much it costs and not be bothered with theories "
|
||||
"about intellectual property."
|
||||
"\n\n"
|
||||
"So I won't bother you and will be very straightforward: You can try {name} for free but you have "
|
||||
"to buy it in order to use it without limitations. In demo mode, {name} {limitation}."
|
||||
"\n\n"
|
||||
"So it's as simple as this. If you're curious about fairware, however, I encourage you to read "
|
||||
"more about it by clicking on the \"Fairware?\" button.")
|
||||
|
||||
class RegistrableApplication:
|
||||
#--- View interface
|
||||
# get_default(key_name)
|
||||
# set_default(key_name, value)
|
||||
# setup_as_registered()
|
||||
# show_message(msg)
|
||||
# show_demo_nag(prompt)
|
||||
|
||||
PROMPT_NAME = "<undefined>"
|
||||
DEMO_LIMITATION = "<undefined>"
|
||||
|
||||
def __init__(self, view, appid):
|
||||
self.view = view
|
||||
self.appid = appid
|
||||
self.registered = False
|
||||
self.fairware_mode = False
|
||||
self.registration_code = ''
|
||||
self.registration_email = ''
|
||||
self._unpaid_hours = None
|
||||
|
||||
@staticmethod
|
||||
def _is_code_valid(appid, code, email):
|
||||
if len(code) != 32:
|
||||
return False
|
||||
appid = str(appid)
|
||||
for i in range(100):
|
||||
blob = appid + email + str(i) + 'aybabtu'
|
||||
digest = md5(blob.encode('utf-8')).hexdigest()
|
||||
if digest == code:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _set_registration(self, code, email):
|
||||
self.validate_code(code, email)
|
||||
self.registration_code = code
|
||||
self.registration_email = email
|
||||
self.registered = True
|
||||
self.view.setup_as_registered()
|
||||
|
||||
def initial_registration_setup(self):
|
||||
# Should be called only after the app is finished launching
|
||||
if self.registered:
|
||||
# We've already set registration in a hardcoded way (for example, for the Ubuntu Store)
|
||||
# Just ignore registration, but not before having set as registered.
|
||||
self.view.setup_as_registered()
|
||||
return
|
||||
code = self.view.get_default('RegistrationCode')
|
||||
email = self.view.get_default('RegistrationEmail')
|
||||
if code and email:
|
||||
try:
|
||||
self._set_registration(code, email)
|
||||
except InvalidCodeError:
|
||||
pass
|
||||
if not self.registered:
|
||||
if self.view.get_default('FairwareMode'):
|
||||
self.fairware_mode = True
|
||||
if not self.fairware_mode:
|
||||
prompt = DEMO_PROMPT.format(name=self.PROMPT_NAME, limitation=self.DEMO_LIMITATION)
|
||||
self.view.show_demo_nag(prompt)
|
||||
|
||||
def validate_code(self, code, email):
|
||||
code = code.strip().lower()
|
||||
email = email.strip().lower()
|
||||
if self._is_code_valid(self.appid, code, email):
|
||||
return
|
||||
# Check if it's not an old reg code
|
||||
for oldappid in OLDAPPIDS.get(self.appid, []):
|
||||
if self._is_code_valid(oldappid, code, email):
|
||||
return
|
||||
# let's see if the user didn't mix the fields up
|
||||
if self._is_code_valid(self.appid, email, code):
|
||||
msg = "Invalid Code. It seems like you inverted the 'Registration Code' and"\
|
||||
"'Registration E-mail' field."
|
||||
raise InvalidCodeError(msg)
|
||||
# Is the code a paypal transaction id?
|
||||
if re.match(r'^[a-z\d]{17}$', code) is not None:
|
||||
msg = "The code you submitted looks like a Paypal transaction ID. Registration codes are "\
|
||||
"32 digits codes which you should have received in a separate e-mail. If you haven't "\
|
||||
"received it yet, please visit http://www.hardcoded.net/support/"
|
||||
raise InvalidCodeError(msg)
|
||||
# Invalid, let's see if it's a code for another app.
|
||||
for appid, appname in ALL_APPS:
|
||||
if self._is_code_valid(appid, code, email):
|
||||
msg = "This code is a {0} code. You're running the wrong application. You can "\
|
||||
"download the correct application at http://www.hardcoded.net".format(appname)
|
||||
raise InvalidCodeError(msg)
|
||||
DEFAULT_MSG = "Your code is invalid. Make sure that you wrote the good code. Also make sure "\
|
||||
"that the e-mail you gave is the same as the e-mail you used for your purchase."
|
||||
raise InvalidCodeError(DEFAULT_MSG)
|
||||
|
||||
def set_registration(self, code, email, register_os):
|
||||
if not self.fairware_mode and 'fairware' in {code.strip().lower(), email.strip().lower()}:
|
||||
self.fairware_mode = True
|
||||
self.view.set_default('FairwareMode', True)
|
||||
self.view.show_message("Fairware mode enabled.")
|
||||
return True
|
||||
try:
|
||||
self._set_registration(code, email)
|
||||
self.view.show_message("Your code is valid. Thanks!")
|
||||
if register_os:
|
||||
self.register_os()
|
||||
self.view.set_default('RegistrationCode', self.registration_code)
|
||||
self.view.set_default('RegistrationEmail', self.registration_email)
|
||||
return True
|
||||
except InvalidCodeError as e:
|
||||
self.view.show_message(str(e))
|
||||
return False
|
||||
|
||||
def register_os(self):
|
||||
# We don't do that anymore.
|
||||
pass
|
||||
|
||||
def contribute(self):
|
||||
desktop.open_url("http://open.hardcoded.net/contribute/")
|
||||
|
||||
def buy(self):
|
||||
desktop.open_url("http://www.hardcoded.net/purchase.htm")
|
||||
|
||||
def about_fairware(self):
|
||||
desktop.open_url("http://open.hardcoded.net/about/")
|
||||
|
||||
@property
|
||||
def should_show_fairware_reminder(self):
|
||||
return (not self.registered) and (self.fairware_mode) and (self.unpaid_hours >= 1)
|
||||
|
||||
@property
|
||||
def should_apply_demo_limitation(self):
|
||||
return (not self.registered) and (not self.fairware_mode)
|
||||
|
||||
@property
|
||||
def unpaid_hours(self):
|
||||
# We don't bother verifying unpaid hours anymore, the only app that still has fairware
|
||||
# dialogs is dupeGuru and it has a huge surplus. Now, "fairware mode" really means
|
||||
# "free mode".
|
||||
return 0
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2011-01-12
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import os.path as op
|
||||
@@ -31,7 +31,7 @@ def tixgen(tixurl):
|
||||
|
||||
def gen(basepath, destpath, changelogpath, tixurl, confrepl=None, confpath=None, changelogtmpl=None):
|
||||
"""Generate sphinx docs with all bells and whistles.
|
||||
|
||||
|
||||
basepath: The base sphinx source path.
|
||||
destpath: The final path of html files
|
||||
changelogpath: The path to the changelog file to insert in changelog.rst.
|
||||
@@ -66,4 +66,8 @@ def gen(basepath, destpath, changelogpath, tixurl, confrepl=None, confpath=None,
|
||||
# missing dependencies which are in the virtualenv). Here, we do exactly what is done when
|
||||
# calling the command from bash.
|
||||
cmd = load_entry_point('Sphinx', 'console_scripts', 'sphinx-build')
|
||||
cmd(['sphinx-build', basepath, destpath])
|
||||
try:
|
||||
cmd(['sphinx-build', basepath, destpath])
|
||||
except SystemExit:
|
||||
print("Sphinx called sys.exit(), but we're cancelling it because we don't actually want to exit")
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2011-01-11
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from io import StringIO
|
||||
@@ -65,6 +65,12 @@ def test_trailiter():
|
||||
eq_(list(trailiter(['foo', 'bar'], skipfirst=True)), [('foo', 'bar')])
|
||||
eq_(list(trailiter([], skipfirst=True)), []) # no crash
|
||||
|
||||
def test_iterconsume():
|
||||
# We just want to make sure that we return *all* items and that we're not mistakenly skipping
|
||||
# one.
|
||||
eq_(list(range(2500)), list(iterconsume(list(range(2500)))))
|
||||
eq_(list(reversed(range(2500))), list(iterconsume(list(range(2500)), reverse=False)))
|
||||
|
||||
#--- String
|
||||
|
||||
def test_escape():
|
||||
@@ -188,63 +194,63 @@ class TestCase_modified_after:
|
||||
monkeyplus.patch_osstat('first', st_mtime=42)
|
||||
monkeyplus.patch_osstat('second', st_mtime=41)
|
||||
assert modified_after('first', 'second')
|
||||
|
||||
|
||||
def test_second_is_modified_after(self, monkeyplus):
|
||||
monkeyplus.patch_osstat('first', st_mtime=42)
|
||||
monkeyplus.patch_osstat('second', st_mtime=43)
|
||||
assert not modified_after('first', 'second')
|
||||
|
||||
|
||||
def test_same_mtime(self, monkeyplus):
|
||||
monkeyplus.patch_osstat('first', st_mtime=42)
|
||||
monkeyplus.patch_osstat('second', st_mtime=42)
|
||||
assert not modified_after('first', 'second')
|
||||
|
||||
|
||||
def test_first_file_does_not_exist(self, monkeyplus):
|
||||
# when the first file doesn't exist, we return False
|
||||
monkeyplus.patch_osstat('second', st_mtime=42)
|
||||
assert not modified_after('does_not_exist', 'second') # no crash
|
||||
|
||||
|
||||
def test_second_file_does_not_exist(self, monkeyplus):
|
||||
# when the second file doesn't exist, we return True
|
||||
monkeyplus.patch_osstat('first', st_mtime=42)
|
||||
assert modified_after('first', 'does_not_exist') # no crash
|
||||
|
||||
|
||||
def test_first_file_is_none(self, monkeyplus):
|
||||
# when the first file is None, we return False
|
||||
monkeyplus.patch_osstat('second', st_mtime=42)
|
||||
assert not modified_after(None, 'second') # no crash
|
||||
|
||||
|
||||
def test_second_file_is_none(self, monkeyplus):
|
||||
# when the second file is None, we return True
|
||||
monkeyplus.patch_osstat('first', st_mtime=42)
|
||||
assert modified_after('first', None) # no crash
|
||||
|
||||
|
||||
|
||||
class TestCase_delete_if_empty:
|
||||
def test_is_empty(self, tmpdir):
|
||||
testpath = Path(str(tmpdir))
|
||||
assert delete_if_empty(testpath)
|
||||
assert not testpath.exists()
|
||||
|
||||
|
||||
def test_not_empty(self, tmpdir):
|
||||
testpath = Path(str(tmpdir))
|
||||
testpath['foo'].mkdir()
|
||||
assert not delete_if_empty(testpath)
|
||||
assert testpath.exists()
|
||||
|
||||
|
||||
def test_with_files_to_delete(self, tmpdir):
|
||||
testpath = Path(str(tmpdir))
|
||||
testpath['foo'].open('w')
|
||||
testpath['bar'].open('w')
|
||||
assert delete_if_empty(testpath, ['foo', 'bar'])
|
||||
assert not testpath.exists()
|
||||
|
||||
|
||||
def test_directory_in_files_to_delete(self, tmpdir):
|
||||
testpath = Path(str(tmpdir))
|
||||
testpath['foo'].mkdir()
|
||||
assert not delete_if_empty(testpath, ['foo'])
|
||||
assert testpath.exists()
|
||||
|
||||
|
||||
def test_delete_files_to_delete_only_if_dir_is_empty(self, tmpdir):
|
||||
testpath = Path(str(tmpdir))
|
||||
testpath['foo'].open('w')
|
||||
@@ -252,25 +258,25 @@ class TestCase_delete_if_empty:
|
||||
assert not delete_if_empty(testpath, ['foo'])
|
||||
assert testpath.exists()
|
||||
assert testpath['foo'].exists()
|
||||
|
||||
|
||||
def test_doesnt_exist(self):
|
||||
# When the 'path' doesn't exist, just do nothing.
|
||||
delete_if_empty(Path('does_not_exist')) # no crash
|
||||
|
||||
|
||||
def test_is_file(self, tmpdir):
|
||||
# When 'path' is a file, do nothing.
|
||||
p = Path(str(tmpdir)) + 'filename'
|
||||
p.open('w').close()
|
||||
delete_if_empty(p) # no crash
|
||||
|
||||
|
||||
def test_ioerror(self, tmpdir, monkeypatch):
|
||||
# if an IO error happens during the operation, ignore it.
|
||||
def do_raise(*args, **kw):
|
||||
raise OSError()
|
||||
|
||||
|
||||
monkeypatch.setattr(Path, 'rmdir', do_raise)
|
||||
delete_if_empty(Path(str(tmpdir))) # no crash
|
||||
|
||||
|
||||
|
||||
class TestCase_open_if_filename:
|
||||
def test_file_name(self, tmpdir):
|
||||
@@ -280,7 +286,7 @@ class TestCase_open_if_filename:
|
||||
assert close
|
||||
eq_(b'test_data', file.read())
|
||||
file.close()
|
||||
|
||||
|
||||
def test_opened_file(self):
|
||||
sio = StringIO()
|
||||
sio.write('test_data')
|
||||
@@ -288,14 +294,14 @@ class TestCase_open_if_filename:
|
||||
file, close = open_if_filename(sio)
|
||||
assert not close
|
||||
eq_('test_data', file.read())
|
||||
|
||||
|
||||
def test_mode_is_passed_to_open(self, tmpdir):
|
||||
filepath = str(tmpdir.join('test.txt'))
|
||||
open(filepath, 'w').close()
|
||||
file, close = open_if_filename(filepath, 'a')
|
||||
eq_('a', file.mode)
|
||||
file.close()
|
||||
|
||||
|
||||
|
||||
class TestCase_FileOrPath:
|
||||
def test_path(self, tmpdir):
|
||||
@@ -303,17 +309,17 @@ class TestCase_FileOrPath:
|
||||
open(filepath, 'wb').write(b'test_data')
|
||||
with FileOrPath(filepath) as fp:
|
||||
eq_(b'test_data', fp.read())
|
||||
|
||||
|
||||
def test_opened_file(self):
|
||||
sio = StringIO()
|
||||
sio.write('test_data')
|
||||
sio.seek(0)
|
||||
with FileOrPath(sio) as fp:
|
||||
eq_('test_data', fp.read())
|
||||
|
||||
|
||||
def test_mode_is_passed_to_open(self, tmpdir):
|
||||
filepath = str(tmpdir.join('test.txt'))
|
||||
open(filepath, 'w').close()
|
||||
with FileOrPath(filepath, 'a') as fp:
|
||||
eq_('a', fp.mode)
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2011-01-11
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import sys
|
||||
@@ -42,7 +42,7 @@ def minmax(value, min_value, max_value):
|
||||
|
||||
def dedupe(iterable):
|
||||
"""Returns a list of elements in ``iterable`` with all dupes removed.
|
||||
|
||||
|
||||
The order of the elements is preserved.
|
||||
"""
|
||||
result = []
|
||||
@@ -56,7 +56,7 @@ def dedupe(iterable):
|
||||
|
||||
def flatten(iterables, start_with=None):
|
||||
"""Takes a list of lists ``iterables`` and returns a list containing elements of every list.
|
||||
|
||||
|
||||
If ``start_with`` is not ``None``, the result will start with ``start_with`` items, exactly as
|
||||
if ``start_with`` would be the first item of lists.
|
||||
"""
|
||||
@@ -104,7 +104,7 @@ def allsame(iterable):
|
||||
|
||||
def trailiter(iterable, skipfirst=False):
|
||||
"""Yields (prev_element, element), starting with (None, first_element).
|
||||
|
||||
|
||||
If skipfirst is True, there will be no (None, item1) element and we'll start
|
||||
directly with (item1, item2).
|
||||
"""
|
||||
@@ -117,6 +117,21 @@ def trailiter(iterable, skipfirst=False):
|
||||
yield prev, item
|
||||
prev = item
|
||||
|
||||
def iterconsume(seq, reverse=True):
|
||||
"""Iterate over ``seq`` and pops yielded objects.
|
||||
|
||||
Because we use the ``pop()`` method, we reverse ``seq`` before proceeding. If you don't need
|
||||
to do that, set ``reverse`` to ``False``.
|
||||
|
||||
This is useful in tight memory situation where you are looping over a sequence of objects that
|
||||
are going to be discarded afterwards. If you're creating other objects during that iteration
|
||||
you might want to use this to avoid ``MemoryError``.
|
||||
"""
|
||||
if reverse:
|
||||
seq.reverse()
|
||||
while seq:
|
||||
yield seq.pop()
|
||||
|
||||
#--- String related
|
||||
|
||||
def escape(s, to_escape, escape_with='\\'):
|
||||
@@ -144,7 +159,7 @@ def rem_file_ext(filename):
|
||||
|
||||
def pluralize(number, word, decimals=0, plural_word=None):
|
||||
"""Returns a pluralized string with ``number`` in front of ``word``.
|
||||
|
||||
|
||||
Adds a 's' to s if ``number`` > 1.
|
||||
``number``: The number to go in front of s
|
||||
``word``: The word to go after number
|
||||
@@ -162,7 +177,7 @@ def pluralize(number, word, decimals=0, plural_word=None):
|
||||
|
||||
def format_time(seconds, with_hours=True):
|
||||
"""Transforms seconds in a hh:mm:ss string.
|
||||
|
||||
|
||||
If ``with_hours`` if false, the format is mm:ss.
|
||||
"""
|
||||
minus = seconds < 0
|
||||
@@ -202,14 +217,14 @@ SIZE_DESC = ('B','KB','MB','GB','TB','PB','EB','ZB','YB')
|
||||
SIZE_VALS = tuple(1024 ** i for i in range(1,9))
|
||||
def format_size(size, decimal=0, forcepower=-1, showdesc=True):
|
||||
"""Transform a byte count in a formatted string (KB, MB etc..).
|
||||
|
||||
|
||||
``size`` is the number of bytes to format.
|
||||
``decimal`` is the number digits after the dot.
|
||||
``forcepower`` is the desired suffix. 0 is B, 1 is KB, 2 is MB etc.. if kept at -1, the suffix
|
||||
will be automatically chosen (so the resulting number is always below 1024).
|
||||
if ``showdesc`` is ``True``, the suffix will be shown after the number.
|
||||
Usage example::
|
||||
|
||||
|
||||
>>> format_size(1234, decimal=2, showdesc=True)
|
||||
'1.21 KB'
|
||||
"""
|
||||
@@ -283,7 +298,7 @@ def iterdaterange(start, end):
|
||||
@pathify
|
||||
def modified_after(first_path: Path, second_path: Path):
|
||||
"""Returns ``True`` if first_path's mtime is higher than second_path's mtime.
|
||||
|
||||
|
||||
If one of the files doesn't exist or is ``None``, it is considered "never modified".
|
||||
"""
|
||||
try:
|
||||
@@ -326,11 +341,11 @@ def delete_if_empty(path: Path, files_to_delete=[]):
|
||||
|
||||
def open_if_filename(infile, mode='rb'):
|
||||
"""If ``infile`` is a string, it opens and returns it. If it's already a file object, it simply returns it.
|
||||
|
||||
|
||||
This function returns ``(file, should_close_flag)``. The should_close_flag is True is a file has
|
||||
effectively been opened (if we already pass a file object, we assume that the responsibility for
|
||||
closing the file has already been taken). Example usage::
|
||||
|
||||
|
||||
fp, shouldclose = open_if_filename(infile)
|
||||
dostuff()
|
||||
if shouldclose:
|
||||
@@ -370,9 +385,9 @@ def delete_files_with_pattern(folder_path, pattern, recursive=True):
|
||||
|
||||
class FileOrPath:
|
||||
"""Does the same as :func:`open_if_filename`, but it can be used with a ``with`` statement.
|
||||
|
||||
|
||||
Example::
|
||||
|
||||
|
||||
with FileOrPath(infile):
|
||||
dostuff()
|
||||
"""
|
||||
@@ -381,12 +396,12 @@ class FileOrPath:
|
||||
self.mode = mode
|
||||
self.mustclose = False
|
||||
self.fp = None
|
||||
|
||||
|
||||
def __enter__(self):
|
||||
self.fp, self.mustclose = open_if_filename(self.file_or_path, self.mode)
|
||||
return self.fp
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if self.fp and self.mustclose:
|
||||
self.fp.close()
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Translators:
|
||||
# Harakiri1337, 2014
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: dupeGuru\n"
|
||||
"PO-Revision-Date: 2013-11-20 11:53+0000\n"
|
||||
"Last-Translator: hsoft <hsoft@hardcoded.net>\n"
|
||||
"PO-Revision-Date: 2014-06-03 21:56+0000\n"
|
||||
"Last-Translator: Harakiri1337\n"
|
||||
"Language-Team: German (http://www.transifex.com/projects/p/dupeguru/language/de/)\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: utf-8\n"
|
||||
@@ -32,7 +33,7 @@ msgstr "Ordner"
|
||||
#: core/prioritize.py:88 core_me/result_table.py:18 core_pe/result_table.py:18
|
||||
#: core_se/result_table.py:18
|
||||
msgid "Filename"
|
||||
msgstr "Filename"
|
||||
msgstr "Dateiname"
|
||||
|
||||
#: core/prioritize.py:147
|
||||
msgid "Size"
|
||||
@@ -41,11 +42,11 @@ msgstr "Größe"
|
||||
#: core/prioritize.py:153 core_me/result_table.py:25
|
||||
#: core_pe/result_table.py:24 core_se/result_table.py:22
|
||||
msgid "Modification"
|
||||
msgstr "Modifikation"
|
||||
msgstr "Geändert"
|
||||
|
||||
#: core_me/prioritize.py:16
|
||||
msgid "Duration"
|
||||
msgstr ""
|
||||
msgstr "Dauer"
|
||||
|
||||
#: core_me/prioritize.py:22 core_me/result_table.py:22
|
||||
msgid "Bitrate"
|
||||
@@ -53,7 +54,7 @@ msgstr "Bitrate"
|
||||
|
||||
#: core_me/prioritize.py:28
|
||||
msgid "Samplerate"
|
||||
msgstr ""
|
||||
msgstr "Abtastrate"
|
||||
|
||||
#: core_me/result_table.py:20
|
||||
msgid "Size (MB)"
|
||||
@@ -89,7 +90,7 @@ msgstr "Jahr"
|
||||
|
||||
#: core_me/result_table.py:31
|
||||
msgid "Track Number"
|
||||
msgstr "Stück Nummer"
|
||||
msgstr "Titel Nummer"
|
||||
|
||||
#: core_me/result_table.py:32
|
||||
msgid "Comment"
|
||||
@@ -102,16 +103,16 @@ msgstr "Übereinstimmung %"
|
||||
|
||||
#: core_me/result_table.py:34 core_se/result_table.py:24
|
||||
msgid "Words Used"
|
||||
msgstr "Wörter genutzt"
|
||||
msgstr "genutzte Wörter"
|
||||
|
||||
#: core_me/result_table.py:35 core_pe/result_table.py:26
|
||||
#: core_se/result_table.py:25
|
||||
msgid "Dupe Count"
|
||||
msgstr "Anzahl Duplikate"
|
||||
msgstr "Anzahl der Duplikate"
|
||||
|
||||
#: core_pe/prioritize.py:16 core_pe/result_table.py:22
|
||||
msgid "Dimensions"
|
||||
msgstr "Dimensionen"
|
||||
msgstr "Auflösung"
|
||||
|
||||
#: core_pe/result_table.py:20 core_se/result_table.py:20
|
||||
msgid "Size (KB)"
|
||||
@@ -119,4 +120,4 @@ msgstr "Größe (KB)"
|
||||
|
||||
#: core_pe/result_table.py:23
|
||||
msgid "EXIF Timestamp"
|
||||
msgstr ""
|
||||
msgstr "EXIF Zeitstempel"
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
# Translators:
|
||||
# Harakiri1337, 2014
|
||||
# Frank Weber <frank.weber@gmail.com>, 2014
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: dupeGuru\n"
|
||||
"PO-Revision-Date: 2013-12-07 15:22+0000\n"
|
||||
"Last-Translator: hsoft <hsoft@hardcoded.net>\n"
|
||||
"PO-Revision-Date: 2014-09-26 21:24+0000\n"
|
||||
"Last-Translator: Frank Weber <frank.weber@gmail.com>\n"
|
||||
"Language-Team: German (http://www.transifex.com/projects/p/dupeguru/language/de/)\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: utf-8\n"
|
||||
@@ -12,17 +14,19 @@ msgstr ""
|
||||
|
||||
#: core/app.py:39
|
||||
msgid "There are no marked duplicates. Nothing has been done."
|
||||
msgstr ""
|
||||
msgstr "Keine markierten Duplikate, daher wurde nichts getan."
|
||||
|
||||
#: core/app.py:40
|
||||
msgid "There are no selected duplicates. Nothing has been done."
|
||||
msgstr ""
|
||||
msgstr "Keine ausgewählten Duplikate, daher wurde nichts getan."
|
||||
|
||||
#: core/app.py:41
|
||||
msgid ""
|
||||
"You're about to open many files at once. Depending on what those files are "
|
||||
"opened with, doing so can create quite a mess. Continue?"
|
||||
msgstr ""
|
||||
"Sie sind dabei, sehr viele Dateien gleichzeitig zu öffnen. Das kann zu "
|
||||
"ziemlichem Durcheinander führen! Trotzdem fortfahren?"
|
||||
|
||||
#: core/app.py:57
|
||||
msgid "Scanning for duplicates"
|
||||
@@ -30,23 +34,23 @@ msgstr "Suche nach Duplikaten"
|
||||
|
||||
#: core/app.py:58
|
||||
msgid "Loading"
|
||||
msgstr "Laden"
|
||||
msgstr "Lade"
|
||||
|
||||
#: core/app.py:59
|
||||
msgid "Moving"
|
||||
msgstr "Verschieben"
|
||||
msgstr "Verschiebe"
|
||||
|
||||
#: core/app.py:60
|
||||
msgid "Copying"
|
||||
msgstr "Kopieren"
|
||||
msgstr "Kopiere"
|
||||
|
||||
#: core/app.py:61
|
||||
msgid "Sending to Trash"
|
||||
msgstr "Verschiebe in den Mülleimer"
|
||||
msgstr "Verschiebe in den Papierkorb"
|
||||
|
||||
#: core/app.py:64
|
||||
msgid "Sending files to the recycle bin"
|
||||
msgstr "Sende Dateien in den Mülleimer"
|
||||
msgstr "Verschiebe Dateien in den Papierkorb"
|
||||
|
||||
#: core/app.py:290
|
||||
msgid ""
|
||||
@@ -62,29 +66,32 @@ msgstr "Keine Duplikate gefunden."
|
||||
|
||||
#: core/app.py:310
|
||||
msgid "All marked files were copied successfully."
|
||||
msgstr ""
|
||||
msgstr "Alle markierten Dateien wurden erfolgreich kopiert."
|
||||
|
||||
#: core/app.py:311
|
||||
msgid "All marked files were moved successfully."
|
||||
msgstr ""
|
||||
msgstr "Alle markierten Dateien wurden erfolgreich verschoben."
|
||||
|
||||
#: core/app.py:312
|
||||
msgid "All marked files were successfully sent to Trash."
|
||||
msgstr ""
|
||||
"Alle markierten Dateien wurden erfolgreich in den Papierkorb verschoben."
|
||||
|
||||
#: core/app.py:349
|
||||
msgid "'{}' already is in the list."
|
||||
msgstr ""
|
||||
msgstr "'{}' ist bereits in der Liste."
|
||||
|
||||
#: core/app.py:351
|
||||
msgid "'{}' does not exist."
|
||||
msgstr ""
|
||||
msgstr "'{}' existiert nicht."
|
||||
|
||||
#: core/app.py:360
|
||||
msgid ""
|
||||
"All selected %d matches are going to be ignored in all subsequent scans. "
|
||||
"Continue?"
|
||||
msgstr "%d Dateien werden in zukünftigen Scans ignoriert werden. Fortfahren?"
|
||||
msgstr ""
|
||||
"Alle %d ausgewählten Dateien werden in zukünftigen Scans ignoriert. "
|
||||
"Fortfahren?"
|
||||
|
||||
#: core/app.py:426
|
||||
msgid "copy"
|
||||
@@ -96,17 +103,17 @@ msgstr "verschieben"
|
||||
|
||||
#: core/app.py:427
|
||||
msgid "Select a directory to {} marked files to"
|
||||
msgstr "Wählen sie einen Ordner zum {} der ausgewählten Dateien"
|
||||
msgstr "Wählen Sie einen Ordner zum {} der ausgewählten Dateien."
|
||||
|
||||
#: core/app.py:464
|
||||
msgid "Select a destination for your exported CSV"
|
||||
msgstr ""
|
||||
msgstr "Zielverzeichnis für den CSV Export angeben"
|
||||
|
||||
#: core/app.py:489
|
||||
msgid "You have no custom command set up. Set it up in your preferences."
|
||||
msgstr ""
|
||||
"Sie haben keinen eigenen Befehl erstellt. Bitte in den Einstellungen "
|
||||
"konfigurieren."
|
||||
"Sie haben noch keinen Befehl erstellt. Bitte dies in den Einstellungen vornehmen.\n"
|
||||
"Bsp.: \"C:\\Program Files\\Diff\\Diff.exe\" \"%d\" \"%r\""
|
||||
|
||||
#: core/app.py:641 core/app.py:654
|
||||
msgid "You are about to remove %d files from results. Continue?"
|
||||
@@ -114,15 +121,15 @@ msgstr "%d Dateien werden aus der Ergebnisliste entfernt. Fortfahren?"
|
||||
|
||||
#: core/app.py:688
|
||||
msgid "{} duplicate groups were changed by the re-prioritization."
|
||||
msgstr ""
|
||||
msgstr "{} Duplikat-Gruppen wurden durch die Neu-Priorisierung geändert."
|
||||
|
||||
#: core/app.py:716
|
||||
msgid "Collecting files to scan"
|
||||
msgstr "Sammle Dateien zum Scannen"
|
||||
msgstr "Sammle zu scannende Dateien..."
|
||||
|
||||
#: core/app.py:727
|
||||
msgid "The selected directories contain no scannable file."
|
||||
msgstr "Der ausgewählte Ordner enthält keine scannbare Dateien."
|
||||
msgstr "Ausgewählte Ordner enthalten keine scannbaren Dateien."
|
||||
|
||||
#: core/app.py:768
|
||||
msgid "%s (%d discarded)"
|
||||
@@ -130,11 +137,11 @@ msgstr "%s (%d verworfen)"
|
||||
|
||||
#: core/engine.py:220 core/engine.py:265
|
||||
msgid "0 matches found"
|
||||
msgstr "0 Paare gefunden"
|
||||
msgstr "0 Übereinstimmungen gefunden"
|
||||
|
||||
#: core/engine.py:238 core/engine.py:273
|
||||
msgid "%d matches found"
|
||||
msgstr "%d Paare gefunden"
|
||||
msgstr "%d Übereinstimmungen gefunden"
|
||||
|
||||
#: core/engine.py:258 core/scanner.py:79
|
||||
msgid "Read size of %d/%d files"
|
||||
@@ -142,51 +149,51 @@ msgstr "Lese Größe von %d/%d Dateien"
|
||||
|
||||
#: core/engine.py:464
|
||||
msgid "Grouped %d/%d matches"
|
||||
msgstr "%d/%d Paare gruppiert"
|
||||
msgstr "%d/%d Übereinstimmungen gruppiert"
|
||||
|
||||
#: core/gui/deletion_options.py:69
|
||||
msgid "You are sending {} file(s) to the Trash."
|
||||
msgstr ""
|
||||
msgstr "Verschiebe {} Datei(en) in den Papierkorb."
|
||||
|
||||
#: core/gui/ignore_list_dialog.py:24
|
||||
msgid "Do you really want to remove all %d items from the ignore list?"
|
||||
msgstr "Möchten Sie wirklich alle %d Einträge aus der Ignorier-Liste löschen?"
|
||||
msgstr "Möchten Sie wirklich alle %d Einträge aus der Ausnahmeliste löschen?"
|
||||
|
||||
#: core/prioritize.py:68
|
||||
msgid "None"
|
||||
msgstr ""
|
||||
msgstr "Nichts"
|
||||
|
||||
#: core/prioritize.py:96
|
||||
msgid "Ends with number"
|
||||
msgstr "Ends with number"
|
||||
msgstr "Endet mit Zahl"
|
||||
|
||||
#: core/prioritize.py:97
|
||||
msgid "Doesn't end with number"
|
||||
msgstr "Doesn't end with number"
|
||||
msgstr "Endet nicht mit Zahl"
|
||||
|
||||
#: core/prioritize.py:98
|
||||
msgid "Longest"
|
||||
msgstr ""
|
||||
msgstr "Längste"
|
||||
|
||||
#: core/prioritize.py:99
|
||||
msgid "Shortest"
|
||||
msgstr ""
|
||||
msgstr "Kürzeste"
|
||||
|
||||
#: core/prioritize.py:132
|
||||
msgid "Highest"
|
||||
msgstr "Highest"
|
||||
msgstr "Höchste"
|
||||
|
||||
#: core/prioritize.py:132
|
||||
msgid "Lowest"
|
||||
msgstr "Lowest"
|
||||
msgstr "Niedrigste"
|
||||
|
||||
#: core/prioritize.py:159
|
||||
msgid "Newest"
|
||||
msgstr "Newest"
|
||||
msgstr "Neuste"
|
||||
|
||||
#: core/prioritize.py:159
|
||||
msgid "Oldest"
|
||||
msgstr "Oldest"
|
||||
msgstr "Älterste"
|
||||
|
||||
#: core/results.py:126
|
||||
msgid "%d / %d (%s / %s) duplicates marked."
|
||||
@@ -202,11 +209,11 @@ msgstr "Lese Metadaten von %d/%d Dateien"
|
||||
|
||||
#: core/scanner.py:130
|
||||
msgid "Removing false matches"
|
||||
msgstr "Entferne Falschpositive."
|
||||
msgstr "Entferne falsche Übereinstimmungen"
|
||||
|
||||
#: core/scanner.py:154
|
||||
msgid "Processed %d/%d matches against the ignore list"
|
||||
msgstr "Verarbeitung von %d/%d Paaren gegen die Ignorier-Liste"
|
||||
msgstr "%d/%d Treffer mit der Ausnahmeliste abgeglichen"
|
||||
|
||||
#: core/scanner.py:176
|
||||
msgid "Doing group prioritization"
|
||||
@@ -214,20 +221,20 @@ msgstr "Gruppenpriorisierung"
|
||||
|
||||
#: core_pe/matchblock.py:61
|
||||
msgid "Analyzed %d/%d pictures"
|
||||
msgstr "Analysiere %d/%d Bilder"
|
||||
msgstr "Analysiere Bild %d/%d"
|
||||
|
||||
#: core_pe/matchblock.py:153
|
||||
msgid "Performed %d/%d chunk matches"
|
||||
msgstr "Performed %d/%d chunk matches"
|
||||
msgstr "%d/%d Chunk-Matches ausgeführt"
|
||||
|
||||
#: core_pe/matchblock.py:158
|
||||
msgid "Preparing for matching"
|
||||
msgstr "Vorbereitung auf den Vergleich"
|
||||
msgstr "Bereite Matching vor"
|
||||
|
||||
#: core_pe/matchblock.py:193
|
||||
msgid "Verified %d/%d matches"
|
||||
msgstr "%d/%d verifizierte Paare"
|
||||
msgstr "%d/%d verifizierte Übereinstimmungen"
|
||||
|
||||
#: core_pe/matchexif.py:18
|
||||
msgid "Read EXIF of %d/%d pictures"
|
||||
msgstr ""
|
||||
msgstr "Lese EXIF von Bild %d/%d"
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
# Translators:
|
||||
# Harakiri1337, 2014
|
||||
# Frank Weber <frank.weber@gmail.com>, 2014
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: dupeGuru\n"
|
||||
"PO-Revision-Date: 2013-12-07 15:22+0000\n"
|
||||
"Last-Translator: hsoft <hsoft@hardcoded.net>\n"
|
||||
"PO-Revision-Date: 2014-09-26 21:15+0000\n"
|
||||
"Last-Translator: Frank Weber <frank.weber@gmail.com>\n"
|
||||
"Language-Team: German (http://www.transifex.com/projects/p/dupeguru/language/de/)\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: utf-8\n"
|
||||
@@ -12,49 +14,50 @@ msgstr ""
|
||||
|
||||
#: cocoa/inter/app_me.py:34
|
||||
msgid "Removing dead tracks from your iTunes Library"
|
||||
msgstr "Entferne tote Stücke aus Ihrer iTunes Bibliothek."
|
||||
msgstr "Entferne tote Tracks aus Ihrer iTunes Bibliothek"
|
||||
|
||||
#: cocoa/inter/app_me.py:35
|
||||
msgid "Scanning the iTunes Library"
|
||||
msgstr "Scanne die iTunes Bibiliothek"
|
||||
msgstr "Durchsuche die iTunes-Bibliothek"
|
||||
|
||||
#: cocoa/inter/app_me.py:158 cocoa/inter/app_pe.py:200
|
||||
msgid "Sending dupes to the Trash"
|
||||
msgstr "Verschiebe Duplikate in den Mülleimer"
|
||||
msgstr "Schicke Duplikate in den Papierkorb"
|
||||
|
||||
#: cocoa/inter/app_me.py:160
|
||||
msgid "Talking to iTunes. Don't touch it!"
|
||||
msgstr ""
|
||||
msgstr "Kommuniziere mit iTunes. Bitte warten!"
|
||||
|
||||
#: cocoa/inter/app_me.py:195
|
||||
msgid ""
|
||||
"Your iTunes Library contains %d dead tracks ready to be removed. Continue?"
|
||||
msgstr ""
|
||||
"Your iTunes Library contains %d dead tracks ready to be removed. Continue?"
|
||||
"Ihre iTunes-Bibliothek enthält %d tote Tracks zum Entfernen. Fortsetzen?"
|
||||
|
||||
#: cocoa/inter/app_me.py:199
|
||||
msgid "You have no dead tracks in your iTunes Library"
|
||||
msgstr "You have no dead tracks in your iTunes Library"
|
||||
msgstr "Sie haben keine toten Tracks in Ihrer iTunes-Bibliothek"
|
||||
|
||||
#: cocoa/inter/app_me.py:217
|
||||
msgid "The iTunes application couldn't be found."
|
||||
msgstr ""
|
||||
msgstr "Das iTunes-Programm konnte nicht gefunden werden."
|
||||
|
||||
#: cocoa/inter/app_pe.py:202
|
||||
msgid "Talking to iPhoto. Don't touch it!"
|
||||
msgstr ""
|
||||
msgstr "Kommuniziere mit iPhoto. Bitte warten!"
|
||||
|
||||
#: cocoa/inter/app_pe.py:211
|
||||
msgid "Talking to Aperture. Don't touch it!"
|
||||
msgstr ""
|
||||
msgstr "Kommuniziere mit Aperture. Bitte warten!"
|
||||
|
||||
#: cocoa/inter/app_pe.py:284
|
||||
msgid "Deleted Aperture photos were sent to a project called \"dupeGuru Trash\"."
|
||||
msgstr ""
|
||||
"Gelöschte Aperture-Fotos wurden dem Projekt \"dupeGuru Trash\" hinzugefügt."
|
||||
|
||||
#: cocoa/inter/app_pe.py:310
|
||||
msgid "The iPhoto application couldn't be found."
|
||||
msgstr "The iPhoto application couldn't be found."
|
||||
msgstr "Das iPhoto-Programm konnte nicht gefunden werden."
|
||||
|
||||
#: qt/base/app.py:83
|
||||
msgid "Quit"
|
||||
@@ -67,7 +70,7 @@ msgstr "Einstellungen"
|
||||
#: qt/base/app.py:85 qt/base/ignore_list_dialog.py:32
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Ignore List"
|
||||
msgstr ""
|
||||
msgstr "Ausnahme-Liste"
|
||||
|
||||
#: qt/base/app.py:86 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru Help"
|
||||
@@ -87,51 +90,56 @@ msgstr "Debug Log öffnen"
|
||||
|
||||
#: qt/base/app.py:198
|
||||
msgid "{} file (*.{})"
|
||||
msgstr ""
|
||||
msgstr "{} Datei (*.{})"
|
||||
|
||||
#: qt/base/deletion_options.py:30 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Deletion Options"
|
||||
msgstr ""
|
||||
msgstr "Lösch-Optionen"
|
||||
|
||||
#: qt/base/deletion_options.py:35 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Link deleted files"
|
||||
msgstr ""
|
||||
msgstr "Verlinke gelöschte Dateien"
|
||||
|
||||
#: qt/base/deletion_options.py:37 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid ""
|
||||
"After having deleted a duplicate, place a link targeting the reference file "
|
||||
"to replace the deleted file."
|
||||
msgstr ""
|
||||
"Doppelte Dateien werden gelöscht, an deren Stelle wird eine Verknüpfung auf "
|
||||
"die Referenz-Datei erstellt."
|
||||
|
||||
#: qt/base/deletion_options.py:42
|
||||
msgid "Hardlink"
|
||||
msgstr ""
|
||||
msgstr "Hardlink"
|
||||
|
||||
#: qt/base/deletion_options.py:42
|
||||
msgid "Symlink"
|
||||
msgstr ""
|
||||
msgstr "Symlink"
|
||||
|
||||
#: qt/base/deletion_options.py:46
|
||||
msgid " (unsupported)"
|
||||
msgstr ""
|
||||
msgstr "(nicht unterstützt)"
|
||||
|
||||
#: qt/base/deletion_options.py:47 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Directly delete files"
|
||||
msgstr ""
|
||||
msgstr "Ohne Papierkorb löschen"
|
||||
|
||||
#: qt/base/deletion_options.py:49 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid ""
|
||||
"Instead of sending files to trash, delete them directly. This option is "
|
||||
"usually used as a workaround when the normal deletion method doesn't work."
|
||||
msgstr ""
|
||||
"Anstatt Dateien in den Papierkorb zu verschieben, können Sie diese direkt "
|
||||
"löschen. Diese Option wird in der Regel genutzt, falls die normale "
|
||||
"Löschmethode nicht funktioniert."
|
||||
|
||||
#: qt/base/deletion_options.py:55 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Proceed"
|
||||
msgstr ""
|
||||
msgstr "Fortfahren"
|
||||
|
||||
#: qt/base/deletion_options.py:56 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Cancel"
|
||||
msgstr "Cancel"
|
||||
msgstr "Abbrechen"
|
||||
|
||||
#: qt/base/details_table.py:16 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Attribute"
|
||||
@@ -148,7 +156,7 @@ msgstr "Referenz"
|
||||
|
||||
#: qt/base/directories_dialog.py:58 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Load Results..."
|
||||
msgstr "Lade Ergebnisse..."
|
||||
msgstr "Ergebnis laden..."
|
||||
|
||||
#: qt/base/directories_dialog.py:59 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Results Window"
|
||||
@@ -161,7 +169,7 @@ msgstr "Ordner hinzufügen..."
|
||||
#: qt/base/directories_dialog.py:68 qt/base/result_window.py:77
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "File"
|
||||
msgstr "Ablage"
|
||||
msgstr "Datei"
|
||||
|
||||
#: qt/base/directories_dialog.py:70 qt/base/result_window.py:85
|
||||
msgid "View"
|
||||
@@ -174,11 +182,11 @@ msgstr "Hilfe"
|
||||
|
||||
#: qt/base/directories_dialog.py:74 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Load Recent Results"
|
||||
msgstr "Lade letzte Ergebnisse"
|
||||
msgstr "Lade letztes Suchergebnis"
|
||||
|
||||
#: qt/base/directories_dialog.py:108 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Select folders to scan and press \"Scan\"."
|
||||
msgstr "Zu scannende Ordner auswählen und \"Scan\" drücken."
|
||||
msgstr "Zu durchsuchende Ordner auswählen und \"Suche starten\" drücken."
|
||||
|
||||
#: qt/base/directories_dialog.py:132 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Load Results"
|
||||
@@ -186,7 +194,7 @@ msgstr "Lade Ergebnisse"
|
||||
|
||||
#: qt/base/directories_dialog.py:135 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Scan"
|
||||
msgstr "Scan"
|
||||
msgstr "Suche starten"
|
||||
|
||||
#: qt/base/directories_dialog.py:179
|
||||
msgid "Unsaved results"
|
||||
@@ -194,15 +202,16 @@ msgstr "Ungespeicherte Ergebnisse"
|
||||
|
||||
#: qt/base/directories_dialog.py:180 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "You have unsaved results, do you really want to quit?"
|
||||
msgstr "Sie haben ungespeicherte Ergebnisse. Wollen Sie wirklich beenden?"
|
||||
msgstr ""
|
||||
"Sie haben ungespeicherte Ergebnisse. Wollen Sie wirklich dupeGuru beenden?"
|
||||
|
||||
#: qt/base/directories_dialog.py:188 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Select a folder to add to the scanning list"
|
||||
msgstr "Wählen Sie einen Ordner aus, um ihn der Scanliste hinzuzufügen."
|
||||
msgstr "Wählen Sie einen Ordner aus, um ihn der Scanliste hinzuzufügen"
|
||||
|
||||
#: qt/base/directories_dialog.py:205 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Select a results file to load"
|
||||
msgstr "Wählen Sie eine Ergebnisliste zum Laden aus."
|
||||
msgstr "Wählen Sie eine Ergebnisdatei zum Laden aus"
|
||||
|
||||
#: qt/base/directories_dialog.py:206
|
||||
msgid "All Files (*.*)"
|
||||
@@ -210,11 +219,11 @@ msgstr "Alle Dateien (*.*)"
|
||||
|
||||
#: qt/base/directories_dialog.py:206 qt/base/result_window.py:287
|
||||
msgid "dupeGuru Results (*.dupeguru)"
|
||||
msgstr "dupeGuru Ergebnisse (*.dupeguru)"
|
||||
msgstr "dupeGuru Suchergebnisse (*.dupeguru)"
|
||||
|
||||
#: qt/base/directories_dialog.py:217
|
||||
msgid "Start a new scan"
|
||||
msgstr "Starte einen neuen Scan"
|
||||
msgstr "Starte einen neuen Suchlauf"
|
||||
|
||||
#: qt/base/directories_dialog.py:218 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "You have unsaved results, do you really want to continue?"
|
||||
@@ -238,11 +247,11 @@ msgstr "Normal"
|
||||
|
||||
#: qt/base/ignore_list_dialog.py:45 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Remove Selected"
|
||||
msgstr ""
|
||||
msgstr "Auswahl löschen"
|
||||
|
||||
#: qt/base/ignore_list_dialog.py:46 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Clear"
|
||||
msgstr ""
|
||||
msgstr "Liste leeren"
|
||||
|
||||
#: qt/base/ignore_list_dialog.py:47 qt/base/problem_dialog.py:57
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
@@ -259,15 +268,15 @@ msgstr "Filter Empfindlichkeit:"
|
||||
|
||||
#: qt/base/preferences_dialog.py:76
|
||||
msgid "More Results"
|
||||
msgstr "mehr Ergebnisse"
|
||||
msgstr "Mehr Ergebnisse"
|
||||
|
||||
#: qt/base/preferences_dialog.py:81
|
||||
msgid "Fewer Results"
|
||||
msgstr "weniger Ergebnisse"
|
||||
msgstr "Weniger Ergebnisse"
|
||||
|
||||
#: qt/base/preferences_dialog.py:88
|
||||
msgid "Font size:"
|
||||
msgstr "Font size:"
|
||||
msgstr "Schriftgröße:"
|
||||
|
||||
#: qt/base/preferences_dialog.py:92
|
||||
msgid "Language:"
|
||||
@@ -279,7 +288,7 @@ msgstr "Kopieren und Verschieben:"
|
||||
|
||||
#: qt/base/preferences_dialog.py:101 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Right in destination"
|
||||
msgstr "Direkt im Ziel"
|
||||
msgstr "Direkt ins Ziel"
|
||||
|
||||
#: qt/base/preferences_dialog.py:102 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Recreate relative path"
|
||||
@@ -291,7 +300,7 @@ msgstr "Absoluten Pfad neu erstellen"
|
||||
|
||||
#: qt/base/preferences_dialog.py:106
|
||||
msgid "Custom Command (arguments: %d for dupe, %r for ref):"
|
||||
msgstr "Eigener Befehl (Argumente: %d für Duplikat, %r für Referenz):"
|
||||
msgstr "Eigener Befehl (Variablen: %d für Duplikat, %r für Referenz):"
|
||||
|
||||
#: qt/base/preferences_dialog.py:184
|
||||
msgid "dupeGuru has to restart for language changes to take effect."
|
||||
@@ -299,7 +308,7 @@ msgstr "dupeGuru muss neustarten, um die Sprachänderung durchzuführen."
|
||||
|
||||
#: qt/base/prioritize_dialog.py:71 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Re-Prioritize duplicates"
|
||||
msgstr "Re-Prioritize duplicates"
|
||||
msgstr "Re-priorisiere Duplikate"
|
||||
|
||||
#: qt/base/prioritize_dialog.py:75 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid ""
|
||||
@@ -307,9 +316,9 @@ msgid ""
|
||||
" the best to these criteria to their respective group's reference position. "
|
||||
"Read the help file for more information."
|
||||
msgstr ""
|
||||
"Add criteria to the right box and click OK to send the dupes that correspond"
|
||||
" the best to these criteria to their respective group's reference position. "
|
||||
"Read the help file for more information."
|
||||
"Fügen Sie Kriterien zur rechten Box hinzu. Klicken Sie OK, um die Duplikate,"
|
||||
" die diesen Kriterien am besten entsprechen, zur Referenzposition der "
|
||||
"entsprechenden Gruppe zu senden. Lesen Sie die Hilfe für mehr Informationen."
|
||||
|
||||
#: qt/base/problem_dialog.py:31 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Problems!"
|
||||
@@ -321,8 +330,9 @@ msgid ""
|
||||
"these problems are described in the table below. Those files were not "
|
||||
"removed from your results."
|
||||
msgstr ""
|
||||
"Es gab Probleme bei der Verarbeitung einiger (aller) Dateien. Der Grund der "
|
||||
"Probleme ist unten in der Tabelle beschrieben."
|
||||
"Es gab Probleme bei der Verarbeitung einiger (aller) Dateien. Der Ursache "
|
||||
"dieser Probleme ist unten genauer beschrieben. Diese Dateien wurden "
|
||||
"\"nicht\" aus Ihren Suchergebnissen entfernt."
|
||||
|
||||
#: qt/base/problem_dialog.py:52
|
||||
msgid "Reveal Selected"
|
||||
@@ -346,11 +356,11 @@ msgstr "Nur Duplikate anzeigen"
|
||||
|
||||
#: qt/base/result_window.py:47 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Show Delta Values"
|
||||
msgstr "Zeige Deltawerte"
|
||||
msgstr "Zeige Delta-Werte"
|
||||
|
||||
#: qt/base/result_window.py:48
|
||||
msgid "Send Marked to Recycle Bin..."
|
||||
msgstr "Verschiebe Markierte in den Mülleimer..."
|
||||
msgstr "Verschiebe Markierte in den Papierkorb..."
|
||||
|
||||
#: qt/base/result_window.py:49 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Move Marked to..."
|
||||
@@ -366,31 +376,31 @@ msgstr "Entferne Markierte aus den Ergebnissen"
|
||||
|
||||
#: qt/base/result_window.py:52 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Re-Prioritize Results..."
|
||||
msgstr "Entferne Ausgewählte aus den Ergebnissen..."
|
||||
msgstr "Re-priorisiere Ergebnisse..."
|
||||
|
||||
#: qt/base/result_window.py:53 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Remove Selected from Results"
|
||||
msgstr "Entferne Ausgewählte aus den Ergebnissen"
|
||||
msgstr "Entferne Auswahl aus den Ergebnissen"
|
||||
|
||||
#: qt/base/result_window.py:54 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Add Selected to Ignore List"
|
||||
msgstr "Füge Ausgewählte der Ignorier-Liste hinzu"
|
||||
msgstr "Füge Auswahl der Ausnahmeliste hinzu"
|
||||
|
||||
#: qt/base/result_window.py:55 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Make Selected into Reference"
|
||||
msgstr ""
|
||||
msgstr "Mache Auswahl zur Referenz"
|
||||
|
||||
#: qt/base/result_window.py:56 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Open Selected with Default Application"
|
||||
msgstr "Öffne Ausgewählte mit Standardanwendung"
|
||||
msgstr "Öffne Auswahl mit Standard-Anwendung"
|
||||
|
||||
#: qt/base/result_window.py:57
|
||||
msgid "Open Containing Folder of Selected"
|
||||
msgstr "Öffne beeinhaltenden Ordner der Ausgewählten"
|
||||
msgstr "Öffne den Über-Ordner der Auswahl"
|
||||
|
||||
#: qt/base/result_window.py:58 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Rename Selected"
|
||||
msgstr "Ausgewählte umbenennen"
|
||||
msgstr "Auswahl umbenennen"
|
||||
|
||||
#: qt/base/result_window.py:59 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Mark All"
|
||||
@@ -402,19 +412,19 @@ msgstr "Nichts markieren"
|
||||
|
||||
#: qt/base/result_window.py:61 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Invert Marking"
|
||||
msgstr "Markierung invertieren"
|
||||
msgstr "Auswahl umkehren"
|
||||
|
||||
#: qt/base/result_window.py:62 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Mark Selected"
|
||||
msgstr "Ausgewählte markieren"
|
||||
msgstr "Auswahl markieren"
|
||||
|
||||
#: qt/base/result_window.py:63
|
||||
msgid "Export To HTML"
|
||||
msgstr "Exportiere als HTML"
|
||||
msgstr "Exportiere als HTML..."
|
||||
|
||||
#: qt/base/result_window.py:64
|
||||
msgid "Export To CSV"
|
||||
msgstr ""
|
||||
msgstr "Exportiere als CSV..."
|
||||
|
||||
#: qt/base/result_window.py:65 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Save Results..."
|
||||
@@ -434,7 +444,7 @@ msgstr "Spalten"
|
||||
|
||||
#: qt/base/result_window.py:141
|
||||
msgid "Reset to Defaults"
|
||||
msgstr "Voreinstellungen"
|
||||
msgstr "Auf Voreinstellung zurücksetzen"
|
||||
|
||||
#: qt/base/result_window.py:163
|
||||
msgid "{} Results"
|
||||
@@ -442,15 +452,15 @@ msgstr "{} (Ergebnisse)"
|
||||
|
||||
#: qt/base/result_window.py:171 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Dupes Only"
|
||||
msgstr "Dupes Only"
|
||||
msgstr "Nur Duplikate anzeigen"
|
||||
|
||||
#: qt/base/result_window.py:172
|
||||
msgid "Delta Values"
|
||||
msgstr ""
|
||||
msgstr "Zeige Delta-Werte"
|
||||
|
||||
#: qt/base/result_window.py:286 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Select a file to save your results to"
|
||||
msgstr "Datei zum Speichern der Ergebnisliste auswählen."
|
||||
msgstr "Datei zum Speichern der Suchergebnisse auswählen"
|
||||
|
||||
#: qt/me/preferences_dialog.py:39 qt/se/preferences_dialog.py:39
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
@@ -472,7 +482,7 @@ msgstr "Tags"
|
||||
#: qt/me/preferences_dialog.py:43 qt/pe/preferences_dialog.py:33
|
||||
#: qt/se/preferences_dialog.py:40 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Contents"
|
||||
msgstr "Inhalt"
|
||||
msgstr "Inhalte"
|
||||
|
||||
#: qt/me/preferences_dialog.py:44
|
||||
msgid "Audio Contents"
|
||||
@@ -480,11 +490,11 @@ msgstr "Audio Inhalte"
|
||||
|
||||
#: qt/me/preferences_dialog.py:55 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Tags to scan:"
|
||||
msgstr "folgende Tags scannen:"
|
||||
msgstr "Folgende Tags scannen:"
|
||||
|
||||
#: qt/me/preferences_dialog.py:61 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Track"
|
||||
msgstr "Stück"
|
||||
msgstr "Track"
|
||||
|
||||
#: qt/me/preferences_dialog.py:63 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Artist"
|
||||
@@ -514,7 +524,7 @@ msgstr "Wortgewichtung"
|
||||
#: qt/me/preferences_dialog.py:77 qt/se/preferences_dialog.py:51
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Match similar words"
|
||||
msgstr "Vergleiche ähnliche Wörter"
|
||||
msgstr "Gleiche ähnliche Wörter ab"
|
||||
|
||||
#: qt/me/preferences_dialog.py:79 qt/pe/preferences_dialog.py:41
|
||||
#: qt/se/preferences_dialog.py:53 cocoa/base/en.lproj/Localizable.strings:0
|
||||
@@ -534,7 +544,7 @@ msgstr "Entferne leere Ordner beim Löschen oder Verschieben"
|
||||
#: qt/me/preferences_dialog.py:85 qt/pe/preferences_dialog.py:47
|
||||
#: qt/se/preferences_dialog.py:76 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Ignore duplicates hardlinking to the same file"
|
||||
msgstr "Ignoriere Duplikate die mit derselben Datei verknüpft sind"
|
||||
msgstr "Ignoriere Duplikate mit Hardlinks auf dieselbe Datei"
|
||||
|
||||
#: qt/me/preferences_dialog.py:87 qt/pe/preferences_dialog.py:49
|
||||
#: qt/se/preferences_dialog.py:78 cocoa/base/en.lproj/Localizable.strings:0
|
||||
@@ -543,16 +553,16 @@ msgstr "Debug Modus (Neustart nötig)"
|
||||
|
||||
#: qt/pe/preferences_dialog.py:34 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "EXIF Timestamp"
|
||||
msgstr "EXIF Timestamp"
|
||||
msgstr "EXIF Zeitstempel"
|
||||
|
||||
#: qt/pe/preferences_dialog.py:39 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Match pictures of different dimensions"
|
||||
msgstr "Vergleiche Bilder mit unterschiedlicher Auflösung"
|
||||
msgstr "Gleiche Bilder mit unterschiedlicher Auflösung ab"
|
||||
|
||||
#: qt/pe/result_window.py:19 qt/pe/result_window.py:25
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Clear Picture Cache"
|
||||
msgstr "Bildzwischenspeicher leeren"
|
||||
msgstr "Bilder-Cache leeren"
|
||||
|
||||
#: qt/pe/result_window.py:26 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Do you really want to remove all your cached picture analysis?"
|
||||
@@ -561,7 +571,7 @@ msgstr ""
|
||||
|
||||
#: qt/pe/result_window.py:29
|
||||
msgid "Picture cache cleared."
|
||||
msgstr "Bildzwischenspeicher geleert."
|
||||
msgstr "Bilder-Cache geleert."
|
||||
|
||||
#: qt/se/preferences_dialog.py:41 cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Folders"
|
||||
@@ -577,35 +587,35 @@ msgstr "KB"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "%@ Results"
|
||||
msgstr ""
|
||||
msgstr "%@ Ergebnisse"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Action"
|
||||
msgstr "Action"
|
||||
msgstr "Aktion"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Add Aperture Library"
|
||||
msgstr ""
|
||||
msgstr "Füge Aperture-Bibliothek hinzu"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Add iPhoto Library"
|
||||
msgstr "Add iPhoto Library"
|
||||
msgstr "Füge iPhoto-Bibliothek hinzu"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Add iTunes Library"
|
||||
msgstr ""
|
||||
msgstr "Füge iTunes-Bibliothek hinzu"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Add New Folder..."
|
||||
msgstr "Add New Folder..."
|
||||
msgstr "Neuer Ordner..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Advanced"
|
||||
msgstr "Advanced"
|
||||
msgstr "Fortgeschritten"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Audio Content"
|
||||
msgstr "Audio Content"
|
||||
msgstr "Audio Inhalt"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Automatically check for updates"
|
||||
@@ -613,7 +623,7 @@ msgstr "Automatisch nach Updates suchen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Basic"
|
||||
msgstr "Basic"
|
||||
msgstr "Einfach"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Bring All to Front"
|
||||
@@ -621,15 +631,15 @@ msgstr "Alle nach vorne bringen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Check for update..."
|
||||
msgstr "Check for update..."
|
||||
msgstr "Auf Updates prüfen..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Close Window"
|
||||
msgstr "Fenster Schließen"
|
||||
msgstr "Fenster schließen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Content"
|
||||
msgstr "Content"
|
||||
msgstr "Inhalt"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Copy"
|
||||
@@ -637,7 +647,7 @@ msgstr "Kopieren"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Custom command (arguments: %d for dupe, %r for ref):"
|
||||
msgstr "Custom command (arguments: %d for dupe, %r for ref):"
|
||||
msgstr "Eigener Befehl (Variablen: %d für Duplikat, %r für Referenz):"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Cut"
|
||||
@@ -649,7 +659,7 @@ msgstr "Delta"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Details of Selected File"
|
||||
msgstr "Details of Selected File"
|
||||
msgstr "Details der ausgewählten Datei"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Details Panel"
|
||||
@@ -657,27 +667,27 @@ msgstr "Details Panel"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Directories"
|
||||
msgstr "Directories"
|
||||
msgstr "Verzeichnisse"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru"
|
||||
msgstr ""
|
||||
msgstr "dupeGuru"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru ME Preferences"
|
||||
msgstr "dupeGuru ME Preferences"
|
||||
msgstr "dupeGuru ME Einstellungen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru PE Preferences"
|
||||
msgstr "dupeGuru PE Preferences"
|
||||
msgstr "dupeGuru PE Einstellungen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru Preferences"
|
||||
msgstr "dupeGuru Preferences"
|
||||
msgstr "dupeGuru Einstellungen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru Results"
|
||||
msgstr "dupeGuru Results"
|
||||
msgstr "dupeGuru Ergebnisse"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "dupeGuru Website"
|
||||
@@ -689,15 +699,15 @@ msgstr "Bearbeiten"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Export Results to CSV"
|
||||
msgstr ""
|
||||
msgstr "Exportiere als CSV..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Export Results to XHTML"
|
||||
msgstr "Export Results to XHTML"
|
||||
msgstr "Exportiere als XHTML..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Fewer results"
|
||||
msgstr "Fewer results"
|
||||
msgstr "Weniger Suchergebnisse"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Filter"
|
||||
@@ -705,19 +715,19 @@ msgstr "Filter"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Filter hardness:"
|
||||
msgstr "Filter hardness:"
|
||||
msgstr "Filter Empfindlichkeit:"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Filter Results..."
|
||||
msgstr ""
|
||||
msgstr "Filter Suchergebnisse..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Folder Selection Window"
|
||||
msgstr "Folder Selection Window"
|
||||
msgstr "Ordner-Auswahlfenster"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Font Size:"
|
||||
msgstr ""
|
||||
msgstr "Schriftgröße:"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Hide dupeGuru"
|
||||
@@ -733,19 +743,19 @@ msgstr "Ignoriere Dateien kleiner als:"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Load from file..."
|
||||
msgstr "Load from file..."
|
||||
msgstr "Lade von Datei..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Minimize"
|
||||
msgstr "Im Dock ablegen"
|
||||
msgstr "Minimieren"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Mode"
|
||||
msgstr "Mode"
|
||||
msgstr "Modus"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "More results"
|
||||
msgstr "More results"
|
||||
msgstr "Mehr Suchergebnisse"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Ok"
|
||||
@@ -753,19 +763,19 @@ msgstr "Ok"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Options"
|
||||
msgstr "Options"
|
||||
msgstr "Optionen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Paste"
|
||||
msgstr "Einsetzen"
|
||||
msgstr "Einfügen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Preferences..."
|
||||
msgstr "Preferences..."
|
||||
msgstr "Einstellungen..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Quick Look"
|
||||
msgstr ""
|
||||
msgstr "Quick Look"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Quit dupeGuru"
|
||||
@@ -773,35 +783,35 @@ msgstr "dupeGuru beenden"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Remove Dead Tracks in iTunes"
|
||||
msgstr "Remove Dead Tracks in iTunes"
|
||||
msgstr "Entferne tote Tracks in iTunes"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Reset to Default"
|
||||
msgstr "Reset to Default"
|
||||
msgstr "Auf Voreinstellung zurücksetzen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Reset To Defaults"
|
||||
msgstr ""
|
||||
msgstr "Auf Voreinstellungen zurücksetzen"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Reveal"
|
||||
msgstr ""
|
||||
msgstr "Zeige"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Reveal Selected in Finder"
|
||||
msgstr "Reveal Selected in Finder"
|
||||
msgstr "Zeige Auswahl im Finder"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Select All"
|
||||
msgstr ""
|
||||
msgstr "Alles markieren"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Send Marked to Trash..."
|
||||
msgstr ""
|
||||
msgstr "Verschiebe Markierte in den Papierkorb..."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Services"
|
||||
msgstr ""
|
||||
msgstr "Services"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Show All"
|
||||
@@ -809,11 +819,11 @@ msgstr "Alle einblenden"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Start Duplicate Scan"
|
||||
msgstr "Start Duplicate Scan"
|
||||
msgstr "Starte Duplikat-Scan"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "The name '%@' already exists."
|
||||
msgstr "The name '%@' already exists."
|
||||
msgstr "Der Name '%@' existiert bereits."
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Window"
|
||||
@@ -821,4 +831,4 @@ msgstr "Fenster"
|
||||
|
||||
#: cocoa/base/en.lproj/Localizable.strings:0
|
||||
msgid "Zoom"
|
||||
msgstr "Zoomen"
|
||||
msgstr "Zoom"
|
||||
|
||||
14
package.py
14
package.py
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-12-30
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
import os
|
||||
@@ -42,10 +42,10 @@ def package_windows(edition, dev):
|
||||
add_to_pythonpath('.')
|
||||
app_version = get_module_version('core_{}'.format(edition))
|
||||
distdir = 'dist'
|
||||
|
||||
|
||||
if op.exists(distdir):
|
||||
shutil.rmtree(distdir)
|
||||
|
||||
|
||||
if not dev:
|
||||
# Copy qt plugins
|
||||
plugin_dest = distdir
|
||||
@@ -129,7 +129,7 @@ def package_debian_distribution(edition, distribution):
|
||||
ed = lambda s: s.format(edition)
|
||||
destpath = op.join('build', 'dupeguru-{0}-{1}'.format(edition, version))
|
||||
srcpath = op.join(destpath, 'src')
|
||||
packages = ['hscommon', 'core', ed('core_{0}'), 'qtlib', 'qt', 'send2trash', 'jobprogress']
|
||||
packages = ['hscommon', 'core', ed('core_{0}'), 'qtlib', 'qt', 'send2trash']
|
||||
if edition == 'me':
|
||||
packages.append('hsaudiotag')
|
||||
copy_files_to_package(srcpath, packages, with_so=False)
|
||||
@@ -171,7 +171,7 @@ def package_arch(edition):
|
||||
print("Packaging for Arch")
|
||||
ed = lambda s: s.format(edition)
|
||||
srcpath = op.join('build', ed('dupeguru-{}-arch'))
|
||||
packages = ['hscommon', 'core', ed('core_{0}'), 'qtlib', 'qt', 'send2trash', 'jobprogress']
|
||||
packages = ['hscommon', 'core', ed('core_{0}'), 'qtlib', 'qt', 'send2trash']
|
||||
if edition == 'me':
|
||||
packages.append('hsaudiotag')
|
||||
copy_files_to_package(srcpath, packages, with_so=True)
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2011-09-06
|
||||
# Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
#
|
||||
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/bsd_license
|
||||
|
||||
from PyQt5.QtCore import Qt, QAbstractListModel, QItemSelection, QItemSelectionModel
|
||||
@@ -16,7 +16,7 @@ class SelectableList(QAbstractListModel):
|
||||
self.model = model
|
||||
self.view.setModel(self)
|
||||
self.model.view = self
|
||||
|
||||
|
||||
#--- Override
|
||||
def data(self, index, role):
|
||||
if not index.isValid():
|
||||
@@ -25,26 +25,27 @@ class SelectableList(QAbstractListModel):
|
||||
if role in {Qt.DisplayRole, Qt.EditRole}:
|
||||
return self.model[index.row()]
|
||||
return None
|
||||
|
||||
|
||||
def rowCount(self, index):
|
||||
if index.isValid():
|
||||
return 0
|
||||
return len(self.model)
|
||||
|
||||
|
||||
#--- Virtual
|
||||
def _updateSelection(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _restoreSelection(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
#--- model --> view
|
||||
def refresh(self):
|
||||
self._updating = True
|
||||
self.reset()
|
||||
self.beginResetModel()
|
||||
self.endResetModel()
|
||||
self._updating = False
|
||||
self._restoreSelection()
|
||||
|
||||
|
||||
def update_selection(self):
|
||||
self._restoreSelection()
|
||||
|
||||
@@ -52,18 +53,18 @@ class ComboboxModel(SelectableList):
|
||||
def __init__(self, model, view, **kwargs):
|
||||
super().__init__(model, view, **kwargs)
|
||||
self.view.currentIndexChanged[int].connect(self.selectionChanged)
|
||||
|
||||
|
||||
#--- Override
|
||||
def _updateSelection(self):
|
||||
index = self.view.currentIndex()
|
||||
if index != self.model.selected_index:
|
||||
self.model.select(index)
|
||||
|
||||
|
||||
def _restoreSelection(self):
|
||||
index = self.model.selected_index
|
||||
if index is not None:
|
||||
self.view.setCurrentIndex(index)
|
||||
|
||||
|
||||
#--- Events
|
||||
def selectionChanged(self, index):
|
||||
if not self._updating:
|
||||
@@ -74,13 +75,13 @@ class ListviewModel(SelectableList):
|
||||
super().__init__(model, view, **kwargs)
|
||||
self.view.selectionModel().selectionChanged[(QItemSelection, QItemSelection)].connect(
|
||||
self.selectionChanged)
|
||||
|
||||
|
||||
#--- Override
|
||||
def _updateSelection(self):
|
||||
newIndexes = [modelIndex.row() for modelIndex in self.view.selectionModel().selectedRows()]
|
||||
if newIndexes != self.model.selected_indexes:
|
||||
self.model.select(newIndexes)
|
||||
|
||||
|
||||
def _restoreSelection(self):
|
||||
newSelection = QItemSelection()
|
||||
for index in self.model.selected_indexes:
|
||||
@@ -94,4 +95,4 @@ class ListviewModel(SelectableList):
|
||||
def selectionChanged(self, index):
|
||||
if not self._updating:
|
||||
self._updateSelection()
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
-r requirements.txt
|
||||
objp>=1.2.0
|
||||
objp>=1.3.1
|
||||
appscript>=1.0.0
|
||||
xibless>=0.4.1
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
jobprogress>=1.0.4
|
||||
Send2Trash>=1.3.0
|
||||
sphinx>=1.2.2
|
||||
polib>=1.0.4
|
||||
|
||||
Reference in New Issue
Block a user