2016-05-29 18:13:19 +00:00
|
|
|
# Copyright 2016 Hardcoded Software (http://www.hardcoded.net)
|
2014-10-05 20:31:16 +00:00
|
|
|
#
|
2015-01-03 21:33:16 +00:00
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
2014-10-05 20:31:16 +00:00
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 21:33:16 +00:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-08-05 08:59:46 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
import os
|
|
|
|
import os.path as op
|
|
|
|
import logging
|
2010-04-12 15:43:24 +00:00
|
|
|
import subprocess
|
2010-04-13 08:02:09 +00:00
|
|
|
import re
|
2012-05-30 16:10:56 +00:00
|
|
|
import shutil
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2010-04-07 07:11:36 +00:00
|
|
|
from send2trash import send2trash
|
2014-10-05 20:31:16 +00:00
|
|
|
from hscommon.jobprogress import job
|
2010-07-13 06:08:18 +00:00
|
|
|
from hscommon.notify import Broadcaster
|
2011-01-11 10:59:53 +00:00
|
|
|
from hscommon.path import Path
|
2011-01-11 12:36:05 +00:00
|
|
|
from hscommon.conflict import smart_move, smart_copy
|
2013-08-03 20:27:36 +00:00
|
|
|
from hscommon.gui.progress_window import ProgressWindow
|
2016-06-01 01:43:24 +00:00
|
|
|
from hscommon.util import delete_if_empty, first, escape, nonone, allsame
|
2011-11-01 19:44:18 +00:00
|
|
|
from hscommon.trans import tr
|
2013-10-12 17:54:13 +00:00
|
|
|
from hscommon import desktop
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2016-06-01 02:32:37 +00:00
|
|
|
from . import se, me, pe
|
|
|
|
from .pe.photo import get_delta_dimensions
|
2016-06-01 01:43:24 +00:00
|
|
|
from .util import cmp_value, fix_surrogate_encoding
|
|
|
|
from . import directories, results, export, fs, prioritize
|
2016-05-29 18:13:19 +00:00
|
|
|
from .ignore import IgnoreList
|
2020-08-29 01:57:00 +00:00
|
|
|
from .exclude import ExcludeDict as ExcludeList
|
2016-06-01 01:43:24 +00:00
|
|
|
from .scanner import ScanType
|
2012-05-30 16:10:56 +00:00
|
|
|
from .gui.deletion_options import DeletionOptions
|
2012-01-12 14:41:03 +00:00
|
|
|
from .gui.details_panel import DetailsPanel
|
2012-01-12 20:19:40 +00:00
|
|
|
from .gui.directory_tree import DirectoryTree
|
2012-03-14 16:47:21 +00:00
|
|
|
from .gui.ignore_list_dialog import IgnoreListDialog
|
2020-07-28 14:33:28 +00:00
|
|
|
from .gui.exclude_list_dialog import ExcludeListDialogCore
|
2012-01-13 17:19:23 +00:00
|
|
|
from .gui.problem_dialog import ProblemDialog
|
2012-01-12 14:41:03 +00:00
|
|
|
from .gui.stats_label import StatsLabel
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
HAD_FIRST_LAUNCH_PREFERENCE = "HadFirstLaunch"
|
|
|
|
DEBUG_MODE_PREFERENCE = "DebugMode"
|
2011-01-24 10:30:45 +00:00
|
|
|
|
2012-03-10 15:58:08 +00:00
|
|
|
MSG_NO_MARKED_DUPES = tr("There are no marked duplicates. Nothing has been done.")
|
|
|
|
MSG_NO_SELECTED_DUPES = tr("There are no selected duplicates. Nothing has been done.")
|
2014-10-13 19:08:59 +00:00
|
|
|
MSG_MANY_FILES_TO_OPEN = tr(
|
|
|
|
"You're about to open many files at once. Depending on what those "
|
|
|
|
"files are opened with, doing so can create quite a mess. Continue?"
|
|
|
|
)
|
2012-03-10 15:58:08 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2011-03-06 16:21:42 +00:00
|
|
|
class DestType:
|
|
|
|
Direct = 0
|
|
|
|
Relative = 1
|
|
|
|
Absolute = 2
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2011-09-21 17:55:26 +00:00
|
|
|
class JobType:
|
2020-01-01 02:16:27 +00:00
|
|
|
Scan = "job_scan"
|
|
|
|
Load = "job_load"
|
|
|
|
Move = "job_move"
|
|
|
|
Copy = "job_copy"
|
|
|
|
Delete = "job_delete"
|
|
|
|
|
2011-09-21 14:26:58 +00:00
|
|
|
|
2016-05-30 02:37:38 +00:00
|
|
|
class AppMode:
|
|
|
|
Standard = 0
|
|
|
|
Music = 1
|
|
|
|
Picture = 2
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2013-08-03 20:27:36 +00:00
|
|
|
JOBID2TITLE = {
|
|
|
|
JobType.Scan: tr("Scanning for duplicates"),
|
|
|
|
JobType.Load: tr("Loading"),
|
|
|
|
JobType.Move: tr("Moving"),
|
|
|
|
JobType.Copy: tr("Copying"),
|
|
|
|
JobType.Delete: tr("Sending to Trash"),
|
|
|
|
}
|
2013-11-23 17:38:55 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2013-11-30 22:54:40 +00:00
|
|
|
class DupeGuru(Broadcaster):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Holds everything together.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Instantiated once per running application, it holds a reference to every high-level object
|
2016-05-29 20:52:07 +00:00
|
|
|
whose reference needs to be held: :class:`~core.results.Results`,
|
2013-08-18 22:36:09 +00:00
|
|
|
:class:`~core.directories.Directories`, :mod:`core.gui` instances, etc..
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
It also hosts high level methods and acts as a coordinator for all those elements. This is why
|
|
|
|
some of its methods seem a bit shallow, like for example :meth:`mark_all` and
|
|
|
|
:meth:`remove_duplicates`. These methos are just proxies for a method in :attr:`results`, but
|
|
|
|
they are also followed by a notification call which is very important if we want GUI elements
|
|
|
|
to be correctly notified of a change in the data they're presenting.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: directories
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Instance of :class:`~core.directories.Directories`. It holds the current folder selection.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: results
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Instance of :class:`core.results.Results`. Holds the results of the latest scan.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: selected_dupes
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
List of currently selected dupes from our :attr:`results`. Whenever the user changes its
|
|
|
|
selection at the UI level, :attr:`result_table` takes care of updating this attribute, so
|
|
|
|
you can trust that it's always up-to-date.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: result_table
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Instance of :mod:`meta-gui <core.gui>` table listing the results from :attr:`results`
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
|
|
|
# --- View interface
|
2013-11-30 22:54:40 +00:00
|
|
|
# get_default(key_name)
|
|
|
|
# set_default(key_name, value)
|
|
|
|
# show_message(msg)
|
|
|
|
# open_url(url)
|
2011-09-20 19:06:29 +00:00
|
|
|
# open_path(path)
|
|
|
|
# reveal_path(path)
|
2012-03-09 16:34:08 +00:00
|
|
|
# ask_yes_no(prompt) --> bool
|
2016-05-30 02:37:38 +00:00
|
|
|
# create_results_window()
|
2012-03-09 18:47:28 +00:00
|
|
|
# show_results_window()
|
|
|
|
# show_problem_dialog()
|
2012-03-10 19:32:56 +00:00
|
|
|
# select_dest_folder(prompt: str) --> str
|
2012-07-31 20:46:51 +00:00
|
|
|
# select_dest_file(prompt: str, ext: str) --> str
|
2014-05-03 17:44:38 +00:00
|
|
|
|
2016-06-01 01:43:24 +00:00
|
|
|
NAME = PROMPT_NAME = "dupeGuru"
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
PICTURE_CACHE_TYPE = "sqlite" # set to 'shelve' for a ShelveCache
|
2016-11-16 00:58:18 +00:00
|
|
|
|
2013-10-12 17:54:13 +00:00
|
|
|
def __init__(self, view):
|
2011-09-24 20:21:20 +00:00
|
|
|
if view.get_default(DEBUG_MODE_PREFERENCE):
|
2011-01-26 11:50:44 +00:00
|
|
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
|
|
logging.debug("Debug mode enabled")
|
2010-02-05 20:09:04 +00:00
|
|
|
Broadcaster.__init__(self)
|
2013-11-30 22:54:40 +00:00
|
|
|
self.view = view
|
2020-01-01 02:16:27 +00:00
|
|
|
self.appdata = desktop.special_folder_path(
|
|
|
|
desktop.SpecialFolder.AppData, appname=self.NAME
|
|
|
|
)
|
2009-06-01 09:55:11 +00:00
|
|
|
if not op.exists(self.appdata):
|
|
|
|
os.makedirs(self.appdata)
|
2016-05-30 02:37:38 +00:00
|
|
|
self.app_mode = AppMode.Standard
|
2016-05-29 20:52:07 +00:00
|
|
|
self.discarded_file_count = 0
|
2020-08-17 02:13:20 +00:00
|
|
|
self.exclude_list = ExcludeList()
|
2020-08-20 00:46:06 +00:00
|
|
|
self.directories = directories.Directories(self.exclude_list)
|
2011-09-21 14:26:58 +00:00
|
|
|
self.results = results.Results(self)
|
2016-05-29 18:13:19 +00:00
|
|
|
self.ignore_list = IgnoreList()
|
2016-05-29 20:52:07 +00:00
|
|
|
# In addition to "app-level" options, this dictionary also holds options that will be
|
|
|
|
# sent to the scanner. They don't have default values because those defaults values are
|
|
|
|
# defined in the scanner class.
|
2009-06-01 09:55:11 +00:00
|
|
|
self.options = {
|
2020-01-01 02:16:27 +00:00
|
|
|
"escape_filter_regexp": True,
|
|
|
|
"clean_empty_dirs": False,
|
|
|
|
"ignore_hardlink_matches": False,
|
|
|
|
"copymove_dest_type": DestType.Relative,
|
|
|
|
"picture_cache_type": self.PICTURE_CACHE_TYPE,
|
2009-06-01 09:55:11 +00:00
|
|
|
}
|
2010-02-05 20:09:04 +00:00
|
|
|
self.selected_dupes = []
|
2012-01-13 21:14:06 +00:00
|
|
|
self.details_panel = DetailsPanel(self)
|
|
|
|
self.directory_tree = DirectoryTree(self)
|
|
|
|
self.problem_dialog = ProblemDialog(self)
|
2012-03-14 16:47:21 +00:00
|
|
|
self.ignore_list_dialog = IgnoreListDialog(self)
|
2020-07-28 14:33:28 +00:00
|
|
|
self.exclude_list_dialog = ExcludeListDialogCore(self)
|
2012-01-13 21:14:06 +00:00
|
|
|
self.stats_label = StatsLabel(self)
|
2016-05-30 02:37:38 +00:00
|
|
|
self.result_table = None
|
2012-05-30 16:10:56 +00:00
|
|
|
self.deletion_options = DeletionOptions()
|
2016-06-08 01:34:04 +00:00
|
|
|
self.progress_window = ProgressWindow(self._job_completed, self._job_error)
|
2016-05-30 02:37:38 +00:00
|
|
|
children = [self.directory_tree, self.stats_label, self.details_panel]
|
2012-01-13 21:34:21 +00:00
|
|
|
for child in children:
|
|
|
|
child.connect()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Private
|
2016-06-07 20:56:59 +00:00
|
|
|
def _recreate_result_table(self):
|
|
|
|
if self.result_table is not None:
|
|
|
|
self.result_table.disconnect()
|
2016-06-01 01:43:24 +00:00
|
|
|
if self.app_mode == AppMode.Picture:
|
2016-06-07 20:56:59 +00:00
|
|
|
self.result_table = pe.result_table.ResultTable(self)
|
2016-06-01 01:43:24 +00:00
|
|
|
elif self.app_mode == AppMode.Music:
|
2016-06-07 20:56:59 +00:00
|
|
|
self.result_table = me.result_table.ResultTable(self)
|
2016-06-01 01:43:24 +00:00
|
|
|
else:
|
2016-06-07 20:56:59 +00:00
|
|
|
self.result_table = se.result_table.ResultTable(self)
|
|
|
|
self.result_table.connect()
|
|
|
|
self.view.create_results_window()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2016-11-22 02:41:43 +00:00
|
|
|
def _get_picture_cache_path(self):
|
2020-01-01 02:16:27 +00:00
|
|
|
cache_type = self.options["picture_cache_type"]
|
|
|
|
cache_name = (
|
|
|
|
"cached_pictures.shelve" if cache_type == "shelve" else "cached_pictures.db"
|
|
|
|
)
|
2016-11-22 02:41:43 +00:00
|
|
|
return op.join(self.appdata, cache_name)
|
|
|
|
|
2012-03-16 18:57:21 +00:00
|
|
|
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
2016-06-01 01:43:24 +00:00
|
|
|
if self.app_mode in (AppMode.Music, AppMode.Picture):
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "folder_path":
|
|
|
|
dupe_folder_path = getattr(
|
|
|
|
dupe, "display_folder_path", dupe.folder_path
|
|
|
|
)
|
2016-06-01 01:43:24 +00:00
|
|
|
return str(dupe_folder_path).lower()
|
|
|
|
if self.app_mode == AppMode.Picture:
|
2020-01-01 02:16:27 +00:00
|
|
|
if delta and key == "dimensions":
|
2016-06-01 01:43:24 +00:00
|
|
|
r = cmp_value(dupe, key)
|
|
|
|
ref_value = cmp_value(get_group().ref, key)
|
|
|
|
return get_delta_dimensions(r, ref_value)
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "marked":
|
2012-03-16 19:05:52 +00:00
|
|
|
return self.results.is_marked(dupe)
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "percentage":
|
2013-07-28 21:45:23 +00:00
|
|
|
m = get_group().get_match_of(dupe)
|
2013-11-09 21:20:33 +00:00
|
|
|
return m.percentage
|
2020-01-01 02:16:27 +00:00
|
|
|
elif key == "dupe_count":
|
2013-11-09 21:20:33 +00:00
|
|
|
return 0
|
2013-07-28 21:45:23 +00:00
|
|
|
else:
|
|
|
|
result = cmp_value(dupe, key)
|
|
|
|
if delta:
|
2013-11-23 20:31:20 +00:00
|
|
|
refval = cmp_value(get_group().ref, key)
|
2013-07-28 21:45:23 +00:00
|
|
|
if key in self.result_table.DELTA_COLUMNS:
|
|
|
|
result -= refval
|
|
|
|
else:
|
2013-11-23 20:31:20 +00:00
|
|
|
same = cmp_value(dupe, key) == refval
|
2013-07-28 21:45:23 +00:00
|
|
|
result = (same, result)
|
|
|
|
return result
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-03-16 18:57:21 +00:00
|
|
|
def _get_group_sort_key(self, group, key):
|
2016-06-01 01:43:24 +00:00
|
|
|
if self.app_mode in (AppMode.Music, AppMode.Picture):
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "folder_path":
|
|
|
|
dupe_folder_path = getattr(
|
|
|
|
group.ref, "display_folder_path", group.ref.folder_path
|
|
|
|
)
|
2016-06-01 01:43:24 +00:00
|
|
|
return str(dupe_folder_path).lower()
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "percentage":
|
2012-03-16 18:57:21 +00:00
|
|
|
return group.percentage
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "dupe_count":
|
2012-03-16 18:57:21 +00:00
|
|
|
return len(group)
|
2020-01-01 02:16:27 +00:00
|
|
|
if key == "marked":
|
2012-03-16 19:05:52 +00:00
|
|
|
return len([dupe for dupe in group.dupes if self.results.is_marked(dupe)])
|
2012-03-16 18:57:21 +00:00
|
|
|
return cmp_value(group.ref, key)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-08-01 16:36:23 +00:00
|
|
|
def _do_delete(self, j, link_deleted, use_hardlinks, direct_deletion):
|
2009-06-01 09:55:11 +00:00
|
|
|
def op(dupe):
|
|
|
|
j.add_progress()
|
2020-01-01 02:16:27 +00:00
|
|
|
return self._do_delete_dupe(
|
|
|
|
dupe, link_deleted, use_hardlinks, direct_deletion
|
|
|
|
)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
j.start_job(self.results.mark_count)
|
2010-04-12 10:21:01 +00:00
|
|
|
self.results.perform_on_marked(op, True)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-08-01 16:36:23 +00:00
|
|
|
def _do_delete_dupe(self, dupe, link_deleted, use_hardlinks, direct_deletion):
|
2012-08-09 14:53:24 +00:00
|
|
|
if not dupe.path.exists():
|
2010-04-12 10:21:01 +00:00
|
|
|
return
|
2012-03-23 19:51:39 +00:00
|
|
|
logging.debug("Sending '%s' to trash", dupe.path)
|
2012-05-30 16:10:56 +00:00
|
|
|
str_path = str(dupe.path)
|
|
|
|
if direct_deletion:
|
|
|
|
if op.isdir(str_path):
|
|
|
|
shutil.rmtree(str_path)
|
|
|
|
else:
|
|
|
|
os.remove(str_path)
|
|
|
|
else:
|
2020-01-01 02:16:27 +00:00
|
|
|
send2trash(str_path) # Raises OSError when there's a problem
|
2012-08-01 16:36:23 +00:00
|
|
|
if link_deleted:
|
2010-09-25 13:37:18 +00:00
|
|
|
group = self.results.get_group_of_duplicate(dupe)
|
|
|
|
ref = group.ref
|
2012-08-01 16:36:23 +00:00
|
|
|
linkfunc = os.link if use_hardlinks else os.symlink
|
|
|
|
linkfunc(str(ref.path), str_path)
|
2013-11-16 17:06:16 +00:00
|
|
|
self.clean_empty_dirs(dupe.path.parent())
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-06-15 15:58:33 +00:00
|
|
|
def _create_file(self, path):
|
|
|
|
# We add fs.Folder to fileclasses in case the file we're loading contains folder paths.
|
2020-06-24 23:23:03 +00:00
|
|
|
return fs.get_file(path, self.fileclasses + [se.fs.Folder])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def _get_file(self, str_path):
|
2009-10-23 12:56:52 +00:00
|
|
|
path = Path(str_path)
|
2011-06-15 15:58:33 +00:00
|
|
|
f = self._create_file(path)
|
2010-08-15 12:23:16 +00:00
|
|
|
if f is None:
|
|
|
|
return None
|
2010-08-12 13:57:47 +00:00
|
|
|
try:
|
2011-09-21 14:26:58 +00:00
|
|
|
f._read_all_info(attrnames=self.METADATA_TO_READ)
|
2010-08-12 13:57:47 +00:00
|
|
|
return f
|
|
|
|
except EnvironmentError:
|
|
|
|
return None
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-07-31 20:46:51 +00:00
|
|
|
def _get_export_data(self):
|
2014-10-13 19:08:59 +00:00
|
|
|
columns = [
|
2020-01-01 02:16:27 +00:00
|
|
|
col
|
|
|
|
for col in self.result_table.columns.ordered_columns
|
|
|
|
if col.visible and col.name != "marked"
|
2014-10-13 19:08:59 +00:00
|
|
|
]
|
2012-07-31 20:46:51 +00:00
|
|
|
colnames = [col.display for col in columns]
|
|
|
|
rows = []
|
|
|
|
for group_id, group in enumerate(self.results.groups):
|
|
|
|
for dupe in group:
|
|
|
|
data = self.get_display_info(dupe, group)
|
2013-11-23 17:38:55 +00:00
|
|
|
row = [fix_surrogate_encoding(data[col.name]) for col in columns]
|
2012-07-31 20:46:51 +00:00
|
|
|
row.insert(0, group_id)
|
|
|
|
rows.append(row)
|
|
|
|
return colnames, rows
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-02-18 09:37:40 +00:00
|
|
|
def _results_changed(self):
|
2014-10-13 19:08:59 +00:00
|
|
|
self.selected_dupes = [
|
2020-01-01 02:16:27 +00:00
|
|
|
d
|
|
|
|
for d in self.selected_dupes
|
2014-10-13 19:08:59 +00:00
|
|
|
if self.results.get_group_of_duplicate(d) is not None
|
|
|
|
]
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("results_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-03 20:27:36 +00:00
|
|
|
def _start_job(self, jobid, func, args=()):
|
|
|
|
title = JOBID2TITLE[jobid]
|
|
|
|
try:
|
2014-10-05 20:31:16 +00:00
|
|
|
self.progress_window.run(jobid, title, func, args=args)
|
2013-08-03 20:27:36 +00:00
|
|
|
except job.JobInProgressError:
|
2014-10-13 19:08:59 +00:00
|
|
|
msg = tr(
|
|
|
|
"A previous action is still hanging in there. You can't start a new one yet. Wait "
|
|
|
|
"a few seconds, then try again."
|
|
|
|
)
|
2013-08-03 20:27:36 +00:00
|
|
|
self.view.show_message(msg)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-03 20:27:36 +00:00
|
|
|
def _job_completed(self, jobid):
|
2011-09-21 17:55:26 +00:00
|
|
|
if jobid == JobType.Scan:
|
2011-02-18 09:37:40 +00:00
|
|
|
self._results_changed()
|
2012-03-09 18:47:28 +00:00
|
|
|
if not self.results.groups:
|
|
|
|
self.view.show_message(tr("No duplicates found."))
|
|
|
|
else:
|
|
|
|
self.view.show_results_window()
|
2016-06-07 20:56:59 +00:00
|
|
|
if jobid in {JobType.Move, JobType.Delete}:
|
2011-02-18 09:37:40 +00:00
|
|
|
self._results_changed()
|
2012-03-09 18:47:28 +00:00
|
|
|
if jobid == JobType.Load:
|
2016-06-07 20:56:59 +00:00
|
|
|
self._recreate_result_table()
|
|
|
|
self._results_changed()
|
2012-03-09 18:47:28 +00:00
|
|
|
self.view.show_results_window()
|
2011-09-21 17:55:26 +00:00
|
|
|
if jobid in {JobType.Copy, JobType.Move, JobType.Delete}:
|
2012-03-09 18:47:28 +00:00
|
|
|
if self.results.problems:
|
2012-03-13 15:58:07 +00:00
|
|
|
self.problem_dialog.refresh()
|
2012-03-09 18:47:28 +00:00
|
|
|
self.view.show_problem_dialog()
|
|
|
|
else:
|
|
|
|
msg = {
|
2013-04-28 14:20:59 +00:00
|
|
|
JobType.Copy: tr("All marked files were copied successfully."),
|
|
|
|
JobType.Move: tr("All marked files were moved successfully."),
|
2020-01-01 02:16:27 +00:00
|
|
|
JobType.Delete: tr(
|
|
|
|
"All marked files were successfully sent to Trash."
|
|
|
|
),
|
2012-03-09 18:47:28 +00:00
|
|
|
}[jobid]
|
|
|
|
self.view.show_message(msg)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2016-06-08 01:34:04 +00:00
|
|
|
def _job_error(self, jobid, err):
|
|
|
|
if jobid == JobType.Load:
|
|
|
|
msg = tr("Could not load file: {}").format(err)
|
|
|
|
self.view.show_message(msg)
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
raise err
|
|
|
|
|
2010-09-25 10:28:34 +00:00
|
|
|
@staticmethod
|
|
|
|
def _remove_hardlink_dupes(files):
|
|
|
|
seen_inodes = set()
|
|
|
|
result = []
|
|
|
|
for file in files:
|
2010-12-29 14:41:12 +00:00
|
|
|
try:
|
2012-08-09 14:53:24 +00:00
|
|
|
inode = file.path.stat().st_ino
|
2010-12-29 14:41:12 +00:00
|
|
|
except OSError:
|
|
|
|
# The file was probably deleted or something
|
|
|
|
continue
|
2010-09-25 10:28:34 +00:00
|
|
|
if inode not in seen_inodes:
|
|
|
|
seen_inodes.add(inode)
|
|
|
|
result.append(file)
|
|
|
|
return result
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-05 20:09:04 +00:00
|
|
|
def _select_dupes(self, dupes):
|
|
|
|
if dupes == self.selected_dupes:
|
|
|
|
return
|
|
|
|
self.selected_dupes = dupes
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("dupes_selected")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Protected
|
2016-06-06 01:18:48 +00:00
|
|
|
def _get_fileclasses(self):
|
|
|
|
if self.app_mode == AppMode.Picture:
|
|
|
|
return [pe.photo.PLAT_SPECIFIC_PHOTO_CLASS]
|
|
|
|
elif self.app_mode == AppMode.Music:
|
|
|
|
return [me.fs.MusicFile]
|
|
|
|
else:
|
|
|
|
return [se.fs.File]
|
2016-06-07 20:56:59 +00:00
|
|
|
|
2016-06-01 01:43:24 +00:00
|
|
|
def _prioritization_categories(self):
|
|
|
|
if self.app_mode == AppMode.Picture:
|
2016-06-01 02:32:37 +00:00
|
|
|
return pe.prioritize.all_categories()
|
2016-06-01 01:43:24 +00:00
|
|
|
elif self.app_mode == AppMode.Music:
|
2016-06-01 02:32:37 +00:00
|
|
|
return me.prioritize.all_categories()
|
2016-06-01 01:43:24 +00:00
|
|
|
else:
|
|
|
|
return prioritize.all_categories()
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Public
|
2009-06-07 07:11:52 +00:00
|
|
|
def add_directory(self, d):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Adds folder ``d`` to :attr:`directories`.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Shows an error message dialog if something bad happens.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param str d: path of folder to add
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
try:
|
|
|
|
self.directories.add_path(Path(d))
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("directories_changed")
|
2009-06-01 09:55:11 +00:00
|
|
|
except directories.AlreadyThereError:
|
2012-07-31 19:33:44 +00:00
|
|
|
self.view.show_message(tr("'{}' already is in the list.").format(d))
|
2009-06-01 09:55:11 +00:00
|
|
|
except directories.InvalidPathError:
|
2012-07-31 19:33:44 +00:00
|
|
|
self.view.show_message(tr("'{}' does not exist.").format(d))
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:12:20 +00:00
|
|
|
def add_selected_to_ignore_list(self):
|
2016-05-29 20:52:07 +00:00
|
|
|
"""Adds :attr:`selected_dupes` to :attr:`ignore_list`.
|
2013-08-18 22:36:09 +00:00
|
|
|
"""
|
2010-02-06 11:12:20 +00:00
|
|
|
dupes = self.without_ref(self.selected_dupes)
|
2012-03-10 15:58:08 +00:00
|
|
|
if not dupes:
|
|
|
|
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
|
|
|
return
|
2020-01-01 02:16:27 +00:00
|
|
|
msg = tr(
|
|
|
|
"All selected %d matches are going to be ignored in all subsequent scans. Continue?"
|
|
|
|
)
|
2012-03-10 15:58:08 +00:00
|
|
|
if not self.view.ask_yes_no(msg % len(dupes)):
|
|
|
|
return
|
2010-02-06 11:12:20 +00:00
|
|
|
for dupe in dupes:
|
2010-02-06 11:14:33 +00:00
|
|
|
g = self.results.get_group_of_duplicate(dupe)
|
|
|
|
for other in g:
|
|
|
|
if other is not dupe:
|
2016-05-29 18:13:19 +00:00
|
|
|
self.ignore_list.Ignore(str(other.path), str(dupe.path))
|
2010-02-06 11:12:20 +00:00
|
|
|
self.remove_duplicates(dupes)
|
2012-03-14 16:47:21 +00:00
|
|
|
self.ignore_list_dialog.refresh()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-07 07:14:47 +00:00
|
|
|
def apply_filter(self, filter):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Apply a filter ``filter`` to the results so that it shows only dupe groups that match it.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param str filter: filter to apply
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
self.results.apply_filter(None)
|
2020-01-01 02:16:27 +00:00
|
|
|
if self.options["escape_filter_regexp"]:
|
|
|
|
filter = escape(filter, set("()[]\\.|+?^"))
|
|
|
|
filter = escape(filter, "*", ".")
|
2009-06-01 09:55:11 +00:00
|
|
|
self.results.apply_filter(filter)
|
2011-02-18 09:37:40 +00:00
|
|
|
self._results_changed()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def clean_empty_dirs(self, path):
|
2020-01-01 02:16:27 +00:00
|
|
|
if self.options["clean_empty_dirs"]:
|
|
|
|
while delete_if_empty(path, [".DS_Store"]):
|
2013-11-16 17:06:16 +00:00
|
|
|
path = path.parent()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2016-06-01 00:55:32 +00:00
|
|
|
def clear_picture_cache(self):
|
2016-11-16 00:58:18 +00:00
|
|
|
try:
|
2016-11-25 00:59:51 +00:00
|
|
|
os.remove(self._get_picture_cache_path())
|
2016-11-16 00:58:18 +00:00
|
|
|
except FileNotFoundError:
|
2020-01-01 02:16:27 +00:00
|
|
|
pass # we don't care
|
2016-06-01 00:55:32 +00:00
|
|
|
|
2011-04-14 10:55:50 +00:00
|
|
|
def copy_or_move(self, dupe, copy: bool, destination: str, dest_type: DestType):
|
2009-06-01 09:55:11 +00:00
|
|
|
source_path = dupe.path
|
2009-10-23 12:56:52 +00:00
|
|
|
location_path = first(p for p in self.directories if dupe.path in p)
|
2009-06-01 09:55:11 +00:00
|
|
|
dest_path = Path(destination)
|
2011-03-06 16:21:42 +00:00
|
|
|
if dest_type in {DestType.Relative, DestType.Absolute}:
|
|
|
|
# no filename, no windows drive letter
|
2013-11-16 17:06:16 +00:00
|
|
|
source_base = source_path.remove_drive_letter().parent()
|
2011-03-06 16:21:42 +00:00
|
|
|
if dest_type == DestType.Relative:
|
|
|
|
source_base = source_base[location_path:]
|
2013-11-16 17:06:16 +00:00
|
|
|
dest_path = dest_path[source_base]
|
2012-08-09 14:53:24 +00:00
|
|
|
if not dest_path.exists():
|
|
|
|
dest_path.makedirs()
|
2011-04-14 10:55:50 +00:00
|
|
|
# Add filename to dest_path. For file move/copy, it's not required, but for folders, yes.
|
2013-11-16 17:06:16 +00:00
|
|
|
dest_path = dest_path[source_path.name]
|
2011-04-14 10:55:50 +00:00
|
|
|
logging.debug("Copy/Move operation from '%s' to '%s'", source_path, dest_path)
|
2010-04-12 10:21:01 +00:00
|
|
|
# Raises an EnvironmentError if there's a problem
|
|
|
|
if copy:
|
2011-01-11 12:36:05 +00:00
|
|
|
smart_copy(source_path, dest_path)
|
2010-04-12 10:21:01 +00:00
|
|
|
else:
|
2011-01-11 12:36:05 +00:00
|
|
|
smart_move(source_path, dest_path)
|
2013-11-16 17:06:16 +00:00
|
|
|
self.clean_empty_dirs(source_path.parent())
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-03-10 19:32:56 +00:00
|
|
|
def copy_or_move_marked(self, copy):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Start an async move (or copy) job on marked duplicates.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param bool copy: If True, duplicates will be copied instead of moved
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def do(j):
|
|
|
|
def op(dupe):
|
|
|
|
j.add_progress()
|
2012-03-10 19:32:56 +00:00
|
|
|
self.copy_or_move(dupe, copy, destination, desttype)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
j.start_job(self.results.mark_count)
|
2010-04-12 10:21:01 +00:00
|
|
|
self.results.perform_on_marked(op, not copy)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-03-10 19:32:56 +00:00
|
|
|
if not self.results.mark_count:
|
|
|
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
|
|
|
return
|
2021-02-09 00:40:00 +00:00
|
|
|
destination = self.view.select_dest_folder(
|
|
|
|
tr("Select a directory to copy marked files to") if copy
|
|
|
|
else tr("Select a directory to move marked files to"))
|
2012-03-10 19:32:56 +00:00
|
|
|
if destination:
|
2020-01-01 02:16:27 +00:00
|
|
|
desttype = self.options["copymove_dest_type"]
|
2012-03-10 19:32:56 +00:00
|
|
|
jobid = JobType.Copy if copy else JobType.Move
|
2013-08-03 20:27:36 +00:00
|
|
|
self._start_job(jobid, do)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-05-30 16:10:56 +00:00
|
|
|
def delete_marked(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Start an async job to send marked duplicates to the trash.
|
|
|
|
"""
|
2012-03-10 19:32:56 +00:00
|
|
|
if not self.results.mark_count:
|
|
|
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
|
|
|
return
|
2012-05-30 16:10:56 +00:00
|
|
|
if not self.deletion_options.show(self.results.mark_count):
|
2012-03-10 19:32:56 +00:00
|
|
|
return
|
2014-10-13 19:08:59 +00:00
|
|
|
args = [
|
2020-01-01 02:16:27 +00:00
|
|
|
self.deletion_options.link_deleted,
|
|
|
|
self.deletion_options.use_hardlinks,
|
|
|
|
self.deletion_options.direct,
|
2014-10-13 19:08:59 +00:00
|
|
|
]
|
2012-05-30 16:10:56 +00:00
|
|
|
logging.debug("Starting deletion job with args %r", args)
|
2013-08-03 20:27:36 +00:00
|
|
|
self._start_job(JobType.Delete, self._do_delete, args=args)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-11-27 18:02:59 +00:00
|
|
|
def export_to_xhtml(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Export current results to XHTML.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
The configuration of the :attr:`result_table` (columns order and visibility) is used to
|
|
|
|
determine how the data is presented in the export. In other words, the exported table in
|
|
|
|
the resulting XHTML will look just like the results table.
|
|
|
|
"""
|
2012-07-31 20:46:51 +00:00
|
|
|
colnames, rows = self._get_export_data()
|
|
|
|
export_path = export.export_to_xhtml(colnames, rows)
|
2013-10-12 17:54:13 +00:00
|
|
|
desktop.open_path(export_path)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-07-31 20:46:51 +00:00
|
|
|
def export_to_csv(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Export current results to CSV.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
The columns and their order in the resulting CSV file is determined in the same way as in
|
|
|
|
:meth:`export_to_xhtml`.
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
dest_file = self.view.select_dest_file(
|
|
|
|
tr("Select a destination for your exported CSV"), "csv"
|
|
|
|
)
|
2012-07-31 20:46:51 +00:00
|
|
|
if dest_file:
|
|
|
|
colnames, rows = self._get_export_data()
|
2014-10-17 19:46:43 +00:00
|
|
|
try:
|
|
|
|
export.export_to_csv(dest_file, colnames, rows)
|
|
|
|
except OSError as e:
|
|
|
|
self.view.show_message(tr("Couldn't write to file: {}").format(str(e)))
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-09-21 14:26:58 +00:00
|
|
|
def get_display_info(self, dupe, group, delta=False):
|
2011-11-27 17:47:00 +00:00
|
|
|
def empty_data():
|
2020-01-01 02:16:27 +00:00
|
|
|
return {c.name: "---" for c in self.result_table.COLUMNS[1:]}
|
|
|
|
|
2011-09-21 14:26:58 +00:00
|
|
|
if (dupe is None) or (group is None):
|
2011-11-27 17:47:00 +00:00
|
|
|
return empty_data()
|
2011-09-21 14:26:58 +00:00
|
|
|
try:
|
2013-07-14 21:43:58 +00:00
|
|
|
return dupe.get_display_info(group, delta)
|
2011-09-21 14:26:58 +00:00
|
|
|
except Exception as e:
|
2020-01-01 02:16:27 +00:00
|
|
|
logging.warning(
|
2020-06-24 20:01:30 +00:00
|
|
|
"Exception (type: %s) on GetDisplayInfo for %s: %s",
|
|
|
|
type(e), str(dupe.path), str(e))
|
2011-11-27 17:47:00 +00:00
|
|
|
return empty_data()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2012-03-10 15:58:08 +00:00
|
|
|
def invoke_custom_command(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Calls command in ``CustomCommand`` pref with ``%d`` and ``%r`` placeholders replaced.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Using the current selection, ``%d`` is replaced with the currently selected dupe and ``%r``
|
|
|
|
is replaced with that dupe's ref file. If there's no selection, the command is not invoked.
|
|
|
|
If the dupe is a ref, ``%d`` and ``%r`` will be the same.
|
2010-04-12 15:43:24 +00:00
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
cmd = self.view.get_default("CustomCommand")
|
2012-03-10 15:58:08 +00:00
|
|
|
if not cmd:
|
2020-01-01 02:16:27 +00:00
|
|
|
msg = tr(
|
|
|
|
"You have no custom command set up. Set it up in your preferences."
|
|
|
|
)
|
2012-03-10 15:58:08 +00:00
|
|
|
self.view.show_message(msg)
|
|
|
|
return
|
2010-04-12 15:43:24 +00:00
|
|
|
if not self.selected_dupes:
|
|
|
|
return
|
|
|
|
dupe = self.selected_dupes[0]
|
|
|
|
group = self.results.get_group_of_duplicate(dupe)
|
|
|
|
ref = group.ref
|
2020-01-01 02:16:27 +00:00
|
|
|
cmd = cmd.replace("%d", str(dupe.path))
|
|
|
|
cmd = cmd.replace("%r", str(ref.path))
|
2010-04-13 08:02:09 +00:00
|
|
|
match = re.match(r'"([^"]+)"(.*)', cmd)
|
|
|
|
if match is not None:
|
|
|
|
# This code here is because subprocess. Popen doesn't seem to accept, under Windows,
|
|
|
|
# executable paths with spaces in it, *even* when they're enclosed in "". So this is
|
|
|
|
# a workaround to make the damn thing work.
|
|
|
|
exepath, args = match.groups()
|
|
|
|
path, exename = op.split(exepath)
|
|
|
|
subprocess.Popen(exename + args, shell=True, cwd=path)
|
|
|
|
else:
|
|
|
|
subprocess.Popen(cmd, shell=True)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def load(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Load directory selection and ignore list from files in appdata.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
This method is called during startup so that directory selection and ignore list, which
|
|
|
|
is persistent data, is the same as when the last session was closed (when :meth:`save` was
|
|
|
|
called).
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
self.directories.load_from_file(op.join(self.appdata, "last_directories.xml"))
|
|
|
|
self.notify("directories_changed")
|
|
|
|
p = op.join(self.appdata, "ignore_list.xml")
|
2016-05-29 18:13:19 +00:00
|
|
|
self.ignore_list.load_from_xml(p)
|
2012-03-14 16:47:21 +00:00
|
|
|
self.ignore_list_dialog.refresh()
|
2020-07-28 14:33:28 +00:00
|
|
|
p = op.join(self.appdata, "exclude_list.xml")
|
|
|
|
self.exclude_list.load_from_xml(p)
|
|
|
|
self.exclude_list_dialog.refresh()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-09-04 16:56:25 +00:00
|
|
|
def load_directories(self, filepath):
|
|
|
|
# Clear out previous entries
|
|
|
|
self.directories.__init__()
|
|
|
|
self.directories.load_from_file(filepath)
|
|
|
|
self.notify("directories_changed")
|
|
|
|
|
2010-08-13 11:06:18 +00:00
|
|
|
def load_from(self, filename):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Start an async job to load results from ``filename``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param str filename: path of the XML file (created with :meth:`save_as`) to load
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2010-08-13 11:06:18 +00:00
|
|
|
def do(j):
|
|
|
|
self.results.load_from_xml(filename, self._get_file, j)
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2013-08-03 20:27:36 +00:00
|
|
|
self._start_job(JobType.Load, do)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:27:11 +00:00
|
|
|
def make_selected_reference(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Promote :attr:`selected_dupes` to reference position within their respective groups.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
Each selected dupe will become the :attr:`~core.engine.Group.ref` of its group. If there's
|
|
|
|
more than one dupe selected for the same group, only the first (in the order currently shown
|
|
|
|
in :attr:`result_table`) dupe will be promoted.
|
|
|
|
"""
|
2010-02-06 11:27:11 +00:00
|
|
|
dupes = self.without_ref(self.selected_dupes)
|
2009-06-01 09:55:11 +00:00
|
|
|
changed_groups = set()
|
2010-02-06 11:27:11 +00:00
|
|
|
for dupe in dupes:
|
2009-06-01 09:55:11 +00:00
|
|
|
g = self.results.get_group_of_duplicate(dupe)
|
|
|
|
if g not in changed_groups:
|
2013-04-28 18:12:08 +00:00
|
|
|
if self.results.make_ref(dupe):
|
|
|
|
changed_groups.add(g)
|
2013-04-27 14:08:38 +00:00
|
|
|
# It's not always obvious to users what this action does, so to make it a bit clearer,
|
|
|
|
# we change our selection to the ref of all changed groups. However, we also want to keep
|
|
|
|
# the files that were ref before and weren't changed by the action. In effect, what this
|
|
|
|
# does is that we keep our old selection, but remove all non-ref dupes from it.
|
2013-04-28 18:12:08 +00:00
|
|
|
# If no group was changed, however, we don't touch the selection.
|
2013-04-28 20:27:45 +00:00
|
|
|
if not self.result_table.power_marker:
|
|
|
|
if changed_groups:
|
2014-10-13 19:08:59 +00:00
|
|
|
self.selected_dupes = [
|
2020-01-01 02:16:27 +00:00
|
|
|
d
|
|
|
|
for d in self.selected_dupes
|
2014-10-13 19:08:59 +00:00
|
|
|
if self.results.get_group_of_duplicate(d).ref is d
|
|
|
|
]
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("results_changed")
|
2013-04-28 20:27:45 +00:00
|
|
|
else:
|
|
|
|
# If we're in "Dupes Only" mode (previously called Power Marker), things are a bit
|
|
|
|
# different. The refs are not shown in the table, and if our operation is successful,
|
|
|
|
# this means that there's no way to follow our dupe selection. Then, the best thing to
|
|
|
|
# do is to keep our selection index-wise (different dupe selection, but same index
|
|
|
|
# selection).
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("results_changed_but_keep_selection")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-11 17:47:45 +00:00
|
|
|
def mark_all(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Set all dupes in the results as marked.
|
|
|
|
"""
|
2010-02-11 17:47:45 +00:00
|
|
|
self.results.mark_all()
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("marking_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-11 17:47:45 +00:00
|
|
|
def mark_none(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Set all dupes in the results as unmarked.
|
|
|
|
"""
|
2010-02-11 17:47:45 +00:00
|
|
|
self.results.mark_none()
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("marking_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-11 17:47:45 +00:00
|
|
|
def mark_invert(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Invert the marked state of all dupes in the results.
|
|
|
|
"""
|
2010-02-11 17:47:45 +00:00
|
|
|
self.results.mark_invert()
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("marking_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:30:00 +00:00
|
|
|
def mark_dupe(self, dupe, marked):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Change marked status of ``dupe``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
:param dupe: dupe to mark/unmark
|
|
|
|
:type dupe: :class:`~core.fs.File`
|
|
|
|
:param bool marked: True = mark, False = unmark
|
|
|
|
"""
|
2010-02-12 11:30:00 +00:00
|
|
|
if marked:
|
|
|
|
self.results.mark(dupe)
|
|
|
|
else:
|
|
|
|
self.results.unmark(dupe)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("marking_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:36:43 +00:00
|
|
|
def open_selected(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Open :attr:`selected_dupes` with their associated application.
|
|
|
|
"""
|
2013-04-28 14:12:25 +00:00
|
|
|
if len(self.selected_dupes) > 10:
|
|
|
|
if not self.view.ask_yes_no(MSG_MANY_FILES_TO_OPEN):
|
|
|
|
return
|
|
|
|
for dupe in self.selected_dupes:
|
2013-10-12 17:54:13 +00:00
|
|
|
desktop.open_path(dupe.path)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:43:50 +00:00
|
|
|
def purge_ignore_list(self):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Remove files that don't exist from :attr:`ignore_list`.
|
|
|
|
"""
|
2016-05-29 18:13:19 +00:00
|
|
|
self.ignore_list.Filter(lambda f, s: op.exists(f) and op.exists(s))
|
2012-03-14 16:47:21 +00:00
|
|
|
self.ignore_list_dialog.refresh()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-11-28 20:25:18 +00:00
|
|
|
def remove_directories(self, indexes):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Remove root directories at ``indexes`` from :attr:`directories`.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
:param indexes: Indexes of the directories to remove.
|
|
|
|
:type indexes: list of int
|
|
|
|
"""
|
2010-02-07 14:26:50 +00:00
|
|
|
try:
|
2011-11-28 20:25:18 +00:00
|
|
|
indexes = sorted(indexes, reverse=True)
|
|
|
|
for index in indexes:
|
|
|
|
del self.directories[index]
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("directories_changed")
|
2010-02-07 14:26:50 +00:00
|
|
|
except IndexError:
|
|
|
|
pass
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:12:20 +00:00
|
|
|
def remove_duplicates(self, duplicates):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Remove ``duplicates`` from :attr:`results`.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
Calls :meth:`~core.results.Results.remove_duplicates` and send appropriate notifications.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
:param duplicates: duplicates to remove.
|
|
|
|
:type duplicates: list of :class:`~core.fs.File`
|
|
|
|
"""
|
2010-02-12 16:15:48 +00:00
|
|
|
self.results.remove_duplicates(self.without_ref(duplicates))
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("results_changed_but_keep_selection")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 12:39:50 +00:00
|
|
|
def remove_marked(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Removed marked duplicates from the results (without touching the files themselves).
|
|
|
|
"""
|
2012-03-10 15:58:08 +00:00
|
|
|
if not self.results.mark_count:
|
|
|
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
|
|
|
return
|
2014-10-05 20:31:16 +00:00
|
|
|
msg = tr("You are about to remove %d files from results. Continue?")
|
2012-03-10 15:58:08 +00:00
|
|
|
if not self.view.ask_yes_no(msg % self.results.mark_count):
|
|
|
|
return
|
2014-10-13 19:08:59 +00:00
|
|
|
self.results.perform_on_marked(lambda x: None, True)
|
2011-02-18 09:37:40 +00:00
|
|
|
self._results_changed()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:44:21 +00:00
|
|
|
def remove_selected(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Removed :attr:`selected_dupes` from the results (without touching the files themselves).
|
|
|
|
"""
|
2012-03-10 15:58:08 +00:00
|
|
|
dupes = self.without_ref(self.selected_dupes)
|
|
|
|
if not dupes:
|
|
|
|
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
|
|
|
return
|
2014-10-05 20:31:16 +00:00
|
|
|
msg = tr("You are about to remove %d files from results. Continue?")
|
2012-03-10 15:58:08 +00:00
|
|
|
if not self.view.ask_yes_no(msg % len(dupes)):
|
|
|
|
return
|
|
|
|
self.remove_duplicates(dupes)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:43:50 +00:00
|
|
|
def rename_selected(self, newname):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Renames the selected dupes's file to ``newname``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
If there's more than one selected dupes, the first one is used.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
:param str newname: The filename to rename the dupe's file to.
|
|
|
|
"""
|
2010-02-12 11:43:50 +00:00
|
|
|
try:
|
|
|
|
d = self.selected_dupes[0]
|
|
|
|
d.rename(newname)
|
|
|
|
return True
|
|
|
|
except (IndexError, fs.FSError) as e:
|
2010-08-11 14:39:06 +00:00
|
|
|
logging.warning("dupeGuru Warning: %s" % str(e))
|
2010-02-12 11:43:50 +00:00
|
|
|
return False
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-09-08 16:36:57 +00:00
|
|
|
def reprioritize_groups(self, sort_key):
|
2013-10-12 17:55:36 +00:00
|
|
|
"""Sort dupes in each group (in :attr:`results`) according to ``sort_key``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
Called by the re-prioritize dialog. Calls :meth:`~core.engine.Group.prioritize` and, once
|
|
|
|
the sorting is done, show a message that confirms the action.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-10-12 17:55:36 +00:00
|
|
|
:param sort_key: The key being sent to :meth:`~core.engine.Group.prioritize`
|
|
|
|
:type sort_key: f(dupe)
|
|
|
|
"""
|
2012-07-31 15:37:51 +00:00
|
|
|
count = 0
|
2011-09-08 16:36:57 +00:00
|
|
|
for group in self.results.groups:
|
2012-07-31 15:37:51 +00:00
|
|
|
if group.prioritize(key_func=sort_key):
|
|
|
|
count += 1
|
2011-09-08 16:36:57 +00:00
|
|
|
self._results_changed()
|
2020-01-01 02:16:27 +00:00
|
|
|
msg = tr("{} duplicate groups were changed by the re-prioritization.").format(
|
|
|
|
count
|
|
|
|
)
|
2012-07-31 15:37:51 +00:00
|
|
|
self.view.show_message(msg)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 14:31:35 +00:00
|
|
|
def reveal_selected(self):
|
|
|
|
if self.selected_dupes:
|
2013-10-12 17:54:13 +00:00
|
|
|
desktop.reveal_path(self.selected_dupes[0].path)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-07 07:18:59 +00:00
|
|
|
def save(self):
|
2010-02-05 16:24:20 +00:00
|
|
|
if not op.exists(self.appdata):
|
|
|
|
os.makedirs(self.appdata)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.directories.save_to_file(op.join(self.appdata, "last_directories.xml"))
|
|
|
|
p = op.join(self.appdata, "ignore_list.xml")
|
2016-05-29 18:13:19 +00:00
|
|
|
self.ignore_list.save_to_xml(p)
|
2020-07-28 14:33:28 +00:00
|
|
|
p = op.join(self.appdata, "exclude_list.xml")
|
|
|
|
self.exclude_list.save_to_xml(p)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("save_session")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-08-13 11:06:18 +00:00
|
|
|
def save_as(self, filename):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Save results in ``filename``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param str filename: path of the file to save results (as XML) to.
|
|
|
|
"""
|
2014-10-17 19:46:43 +00:00
|
|
|
try:
|
|
|
|
self.results.save_to_xml(filename)
|
|
|
|
except OSError as e:
|
2020-09-04 16:56:25 +00:00
|
|
|
self.view.show_message(tr("Couldn't write to file: {}").format(str(e)))
|
|
|
|
|
|
|
|
def save_directories_as(self, filename):
|
|
|
|
"""Save directories in ``filename``.
|
|
|
|
|
|
|
|
:param str filename: path of the file to save directories (as XML) to.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
self.directories.save_to_file(filename)
|
|
|
|
except OSError as e:
|
2014-10-17 19:46:43 +00:00
|
|
|
self.view.show_message(tr("Couldn't write to file: {}").format(str(e)))
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def start_scanning(self):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Starts an async job to scan for duplicates.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Scans folders selected in :attr:`directories` and put the results in :attr:`results`
|
|
|
|
"""
|
2016-05-29 20:52:07 +00:00
|
|
|
scanner = self.SCANNER_CLASS()
|
|
|
|
if not self.directories.has_any_file():
|
2020-01-01 02:16:27 +00:00
|
|
|
self.view.show_message(
|
|
|
|
tr("The selected directories contain no scannable file.")
|
|
|
|
)
|
2016-05-29 20:52:07 +00:00
|
|
|
return
|
|
|
|
# Send relevant options down to the scanner instance
|
|
|
|
for k, v in self.options.items():
|
|
|
|
if hasattr(scanner, k):
|
|
|
|
setattr(scanner, k, v)
|
2016-11-22 02:41:43 +00:00
|
|
|
if self.app_mode == AppMode.Picture:
|
|
|
|
scanner.cache_path = self._get_picture_cache_path()
|
2016-05-29 20:52:07 +00:00
|
|
|
self.results.groups = []
|
2016-06-07 20:56:59 +00:00
|
|
|
self._recreate_result_table()
|
2016-05-29 20:52:07 +00:00
|
|
|
self._results_changed()
|
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def do(j):
|
2011-01-18 16:33:33 +00:00
|
|
|
j.set_progress(0, tr("Collecting files to scan"))
|
2016-05-29 20:52:07 +00:00
|
|
|
if scanner.scan_type == ScanType.Folders:
|
2020-01-01 02:16:27 +00:00
|
|
|
files = list(
|
|
|
|
self.directories.get_folders(folderclass=se.fs.Folder, j=j)
|
|
|
|
)
|
2011-04-12 11:22:29 +00:00
|
|
|
else:
|
2020-01-01 02:16:27 +00:00
|
|
|
files = list(
|
|
|
|
self.directories.get_files(fileclasses=self.fileclasses, j=j)
|
|
|
|
)
|
|
|
|
if self.options["ignore_hardlink_matches"]:
|
2010-09-25 10:28:34 +00:00
|
|
|
files = self._remove_hardlink_dupes(files)
|
2020-01-01 02:16:27 +00:00
|
|
|
logging.info("Scanning %d files" % len(files))
|
2016-05-29 20:52:07 +00:00
|
|
|
self.results.groups = scanner.get_dupe_groups(files, self.ignore_list, j)
|
|
|
|
self.discarded_file_count = scanner.discarded_file_count
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-03 20:27:36 +00:00
|
|
|
self._start_job(JobType.Scan, do)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-11 16:52:18 +00:00
|
|
|
def toggle_selected_mark_state(self):
|
2011-09-22 16:12:43 +00:00
|
|
|
selected = self.without_ref(self.selected_dupes)
|
|
|
|
if not selected:
|
|
|
|
return
|
|
|
|
if allsame(self.results.is_marked(d) for d in selected):
|
|
|
|
markfunc = self.results.mark_toggle
|
|
|
|
else:
|
|
|
|
markfunc = self.results.mark
|
|
|
|
for dupe in selected:
|
|
|
|
markfunc(dupe)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.notify("marking_changed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-06 11:12:20 +00:00
|
|
|
def without_ref(self, dupes):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Returns ``dupes`` with all reference elements removed.
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
return [
|
|
|
|
dupe
|
|
|
|
for dupe in dupes
|
|
|
|
if self.results.get_group_of_duplicate(dupe).ref is not dupe
|
|
|
|
]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-24 10:30:45 +00:00
|
|
|
def get_default(self, key, fallback_value=None):
|
2011-09-20 19:06:29 +00:00
|
|
|
result = nonone(self.view.get_default(key), fallback_value)
|
2011-01-24 10:30:45 +00:00
|
|
|
if fallback_value is not None and not isinstance(result, type(fallback_value)):
|
|
|
|
# we don't want to end up with garbage values from the prefs
|
|
|
|
try:
|
|
|
|
result = type(fallback_value)(result)
|
|
|
|
except Exception:
|
|
|
|
result = fallback_value
|
|
|
|
return result
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-24 10:30:45 +00:00
|
|
|
def set_default(self, key, value):
|
2011-09-20 19:06:29 +00:00
|
|
|
self.view.set_default(key, value)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Properties
|
2009-06-01 09:55:11 +00:00
|
|
|
@property
|
|
|
|
def stat_line(self):
|
|
|
|
result = self.results.stat_line
|
2016-05-29 20:52:07 +00:00
|
|
|
if self.discarded_file_count:
|
|
|
|
result = tr("%s (%d discarded)") % (result, self.discarded_file_count)
|
2009-06-01 09:55:11 +00:00
|
|
|
return result
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2016-06-01 01:43:24 +00:00
|
|
|
@property
|
|
|
|
def fileclasses(self):
|
2016-06-06 01:18:48 +00:00
|
|
|
return self._get_fileclasses()
|
2016-06-01 01:43:24 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def SCANNER_CLASS(self):
|
|
|
|
if self.app_mode == AppMode.Picture:
|
2016-06-01 02:32:37 +00:00
|
|
|
return pe.scanner.ScannerPE
|
2016-06-01 01:43:24 +00:00
|
|
|
elif self.app_mode == AppMode.Music:
|
2016-06-01 02:32:37 +00:00
|
|
|
return me.scanner.ScannerME
|
2016-06-01 01:43:24 +00:00
|
|
|
else:
|
2016-06-01 02:32:37 +00:00
|
|
|
return se.scanner.ScannerSE
|
2016-06-01 01:43:24 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def METADATA_TO_READ(self):
|
|
|
|
if self.app_mode == AppMode.Picture:
|
2020-01-01 02:16:27 +00:00
|
|
|
return ["size", "mtime", "dimensions", "exif_timestamp"]
|
2016-06-01 01:43:24 +00:00
|
|
|
elif self.app_mode == AppMode.Music:
|
|
|
|
return [
|
2020-01-01 02:16:27 +00:00
|
|
|
"size",
|
|
|
|
"mtime",
|
|
|
|
"duration",
|
|
|
|
"bitrate",
|
|
|
|
"samplerate",
|
|
|
|
"title",
|
|
|
|
"artist",
|
|
|
|
"album",
|
|
|
|
"genre",
|
|
|
|
"year",
|
|
|
|
"track",
|
|
|
|
"comment",
|
2016-06-01 01:43:24 +00:00
|
|
|
]
|
|
|
|
else:
|
2020-01-01 02:16:27 +00:00
|
|
|
return ["size", "mtime"]
|