2009-08-05 08:59:46 +00:00
|
|
|
# Created By: Virgil Dupras
|
|
|
|
# Created On: 2006/02/23
|
2015-01-03 21:30:57 +00:00
|
|
|
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
|
2014-10-05 20:31:16 +00:00
|
|
|
#
|
2015-01-03 21:33:16 +00:00
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
2014-10-05 20:31:16 +00:00
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 21:33:16 +00:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-08-05 08:59:46 +00:00
|
|
|
|
2009-08-31 15:33:44 +00:00
|
|
|
import logging
|
2009-06-01 09:55:11 +00:00
|
|
|
import re
|
2011-04-13 14:59:02 +00:00
|
|
|
import os
|
|
|
|
import os.path as op
|
2010-08-15 12:42:55 +00:00
|
|
|
from xml.etree import ElementTree as ET
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2014-10-05 20:31:16 +00:00
|
|
|
from hscommon.jobprogress.job import nulljob
|
2011-04-13 14:59:02 +00:00
|
|
|
from hscommon.conflict import get_conflicted_name
|
2011-01-11 12:36:05 +00:00
|
|
|
from hscommon.util import flatten, nonone, FileOrPath, format_size
|
2011-01-18 16:33:33 +00:00
|
|
|
from hscommon.trans import tr
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2022-05-09 06:40:08 +00:00
|
|
|
from core import engine
|
|
|
|
from core.markable import Markable
|
2011-09-22 15:55:31 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
class Results(Markable):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Manages a collection of duplicate :class:`~core.engine.Group`.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
This class takes care or marking, sorting and filtering duplicate groups.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: groups
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
The list of :class:`~core.engine.Group` contained managed by this instance.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
.. attribute:: dupes
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
A list of all duplicates (:class:`~core.fs.File` instances), without ref, contained in the
|
|
|
|
currently managed :attr:`groups`.
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
|
|
|
# ---Override
|
2011-09-21 14:26:58 +00:00
|
|
|
def __init__(self, app):
|
2011-11-28 18:39:37 +00:00
|
|
|
Markable.__init__(self)
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__groups = []
|
|
|
|
self.__group_of_duplicate = {}
|
2020-01-01 02:16:27 +00:00
|
|
|
self.__groups_sort_descriptor = None # This is a tuple (key, asc)
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__dupes = None
|
2020-01-01 02:16:27 +00:00
|
|
|
self.__dupes_sort_descriptor = None # This is a tuple (key, asc, delta)
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__filters = None
|
|
|
|
self.__filtered_dupes = None
|
|
|
|
self.__filtered_groups = None
|
|
|
|
self.__recalculate_stats()
|
|
|
|
self.__marked_size = 0
|
2011-09-21 14:26:58 +00:00
|
|
|
self.app = app
|
2020-01-01 02:16:27 +00:00
|
|
|
self.problems = [] # (dupe, error_msg)
|
2010-08-13 09:37:45 +00:00
|
|
|
self.is_modified = False
|
2021-06-23 03:36:10 +00:00
|
|
|
self.refresh_required = False
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def _did_mark(self, dupe):
|
|
|
|
self.__marked_size += dupe.size
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def _did_unmark(self, dupe):
|
|
|
|
self.__marked_size -= dupe.size
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def _get_markable_count(self):
|
|
|
|
return self.__total_count
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def _is_markable(self, dupe):
|
|
|
|
if dupe.is_ref:
|
|
|
|
return False
|
|
|
|
g = self.get_group_of_duplicate(dupe)
|
|
|
|
if not g:
|
|
|
|
return False
|
|
|
|
if dupe is g.ref:
|
|
|
|
return False
|
|
|
|
if self.__filtered_dupes and dupe not in self.__filtered_dupes:
|
|
|
|
return False
|
|
|
|
return True
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-11-28 18:39:37 +00:00
|
|
|
def mark_all(self):
|
|
|
|
if self.__filters:
|
|
|
|
self.mark_multiple(self.__filtered_dupes)
|
|
|
|
else:
|
|
|
|
Markable.mark_all(self)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-11-28 18:39:37 +00:00
|
|
|
def mark_invert(self):
|
|
|
|
if self.__filters:
|
|
|
|
self.mark_toggle_multiple(self.__filtered_dupes)
|
|
|
|
else:
|
|
|
|
Markable.mark_invert(self)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-11-28 18:39:37 +00:00
|
|
|
def mark_none(self):
|
|
|
|
if self.__filters:
|
|
|
|
self.unmark_multiple(self.__filtered_dupes)
|
|
|
|
else:
|
|
|
|
Markable.mark_none(self)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# ---Private
|
2009-06-01 09:55:11 +00:00
|
|
|
def __get_dupe_list(self):
|
2021-06-23 03:36:10 +00:00
|
|
|
if self.__dupes is None or self.refresh_required:
|
|
|
|
self.__dupes = flatten(group.dupes for group in self.groups)
|
|
|
|
self.refresh_required = False
|
|
|
|
if None in self.__dupes:
|
|
|
|
# This is debug logging to try to figure out #44
|
|
|
|
logging.warning(
|
|
|
|
"There is a None value in the Results' dupe list. dupes: %r groups: %r",
|
|
|
|
self.__dupes,
|
|
|
|
self.groups,
|
|
|
|
)
|
|
|
|
if self.__filtered_dupes:
|
2021-08-15 08:51:27 +00:00
|
|
|
self.__dupes = [dupe for dupe in self.__dupes if dupe in self.__filtered_dupes]
|
2021-06-23 03:36:10 +00:00
|
|
|
sd = self.__dupes_sort_descriptor
|
|
|
|
if sd:
|
|
|
|
self.sort_dupes(sd[0], sd[1], sd[2])
|
2009-06-01 09:55:11 +00:00
|
|
|
return self.__dupes
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def __get_groups(self):
|
|
|
|
if self.__filtered_groups is None:
|
|
|
|
return self.__groups
|
|
|
|
else:
|
|
|
|
return self.__filtered_groups
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def __get_stat_line(self):
|
|
|
|
if self.__filtered_dupes is None:
|
|
|
|
mark_count = self.mark_count
|
|
|
|
marked_size = self.__marked_size
|
|
|
|
total_count = self.__total_count
|
|
|
|
total_size = self.__total_size
|
|
|
|
else:
|
2021-08-15 08:51:27 +00:00
|
|
|
mark_count = len([dupe for dupe in self.__filtered_dupes if self.is_marked(dupe)])
|
|
|
|
marked_size = sum(dupe.size for dupe in self.__filtered_dupes if self.is_marked(dupe))
|
|
|
|
total_count = len([dupe for dupe in self.__filtered_dupes if self.is_markable(dupe)])
|
|
|
|
total_size = sum(dupe.size for dupe in self.__filtered_dupes if self.is_markable(dupe))
|
2009-06-01 09:55:11 +00:00
|
|
|
if self.mark_inverted:
|
|
|
|
marked_size = self.__total_size - marked_size
|
2011-01-18 16:33:33 +00:00
|
|
|
result = tr("%d / %d (%s / %s) duplicates marked.") % (
|
2009-06-01 09:55:11 +00:00
|
|
|
mark_count,
|
|
|
|
total_count,
|
|
|
|
format_size(marked_size, 2),
|
|
|
|
format_size(total_size, 2),
|
|
|
|
)
|
|
|
|
if self.__filters:
|
2020-01-01 02:16:27 +00:00
|
|
|
result += tr(" filter: %s") % " --> ".join(self.__filters)
|
2009-06-01 09:55:11 +00:00
|
|
|
return result
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def __recalculate_stats(self):
|
|
|
|
self.__total_size = 0
|
|
|
|
self.__total_count = 0
|
|
|
|
for group in self.groups:
|
|
|
|
markable = [dupe for dupe in group.dupes if self._is_markable(dupe)]
|
|
|
|
self.__total_count += len(markable)
|
|
|
|
self.__total_size += sum(dupe.size for dupe in markable)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def __set_groups(self, new_groups):
|
|
|
|
self.mark_none()
|
|
|
|
self.__groups = new_groups
|
|
|
|
self.__group_of_duplicate = {}
|
|
|
|
for g in self.__groups:
|
|
|
|
for dupe in g:
|
|
|
|
self.__group_of_duplicate[dupe] = g
|
2020-01-01 02:16:27 +00:00
|
|
|
if not hasattr(dupe, "is_ref"):
|
2009-06-01 09:55:11 +00:00
|
|
|
dupe.is_ref = False
|
2011-01-14 14:12:02 +00:00
|
|
|
self.is_modified = bool(self.__groups)
|
2009-06-01 09:55:11 +00:00
|
|
|
old_filters = nonone(self.__filters, [])
|
|
|
|
self.apply_filter(None)
|
|
|
|
for filter_str in old_filters:
|
|
|
|
self.apply_filter(filter_str)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# ---Public
|
2009-06-01 09:55:11 +00:00
|
|
|
def apply_filter(self, filter_str):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Applies a filter ``filter_str`` to :attr:`groups`
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
When you apply the filter, only dupes with the filename matching ``filter_str`` will be in
|
2014-10-05 20:31:16 +00:00
|
|
|
in the results. To cancel the filter, just call apply_filter with ``filter_str`` to None,
|
2013-08-18 22:36:09 +00:00
|
|
|
and the results will go back to normal.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
|
|
|
If call apply_filter on a filtered results, the filter will be applied
|
2013-08-18 22:36:09 +00:00
|
|
|
*on the filtered results*.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param str filter_str: a string containing a regexp to filter dupes with.
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
if not filter_str:
|
|
|
|
self.__filtered_dupes = None
|
|
|
|
self.__filtered_groups = None
|
|
|
|
self.__filters = None
|
|
|
|
else:
|
|
|
|
if not self.__filters:
|
|
|
|
self.__filters = []
|
2009-09-29 11:15:36 +00:00
|
|
|
try:
|
|
|
|
filter_re = re.compile(filter_str, re.IGNORECASE)
|
|
|
|
except re.error:
|
2020-01-01 02:16:27 +00:00
|
|
|
return # don't apply this filter.
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__filters.append(filter_str)
|
|
|
|
if self.__filtered_dupes is None:
|
|
|
|
self.__filtered_dupes = flatten(g[:] for g in self.groups)
|
2022-04-28 01:53:12 +00:00
|
|
|
self.__filtered_dupes = {dupe for dupe in self.__filtered_dupes if filter_re.search(str(dupe.path))}
|
2009-06-01 09:55:11 +00:00
|
|
|
filtered_groups = set()
|
|
|
|
for dupe in self.__filtered_dupes:
|
|
|
|
filtered_groups.add(self.get_group_of_duplicate(dupe))
|
|
|
|
self.__filtered_groups = list(filtered_groups)
|
|
|
|
self.__recalculate_stats()
|
|
|
|
sd = self.__groups_sort_descriptor
|
|
|
|
if sd:
|
|
|
|
self.sort_groups(sd[0], sd[1])
|
|
|
|
self.__dupes = None
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def get_group_of_duplicate(self, dupe):
|
2021-08-15 08:51:27 +00:00
|
|
|
"""Returns :class:`~core.engine.Group` in which ``dupe`` belongs."""
|
2009-06-01 09:55:11 +00:00
|
|
|
try:
|
|
|
|
return self.__group_of_duplicate[dupe]
|
|
|
|
except (TypeError, KeyError):
|
|
|
|
return None
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
is_markable = _is_markable
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def load_from_xml(self, infile, get_file, j=nulljob):
|
2013-08-21 02:52:43 +00:00
|
|
|
"""Load results from ``infile``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
:param infile: a file or path pointing to an XML file created with :meth:`save_to_xml`.
|
|
|
|
:param get_file: a function f(path) returning a :class:`~core.fs.File` wrapping the path.
|
|
|
|
:param j: A :ref:`job progress instance <jobs>`.
|
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2010-03-01 11:21:43 +00:00
|
|
|
def do_match(ref_file, other_files, group):
|
|
|
|
if not other_files:
|
|
|
|
return
|
|
|
|
for other_file in other_files:
|
|
|
|
group.add_match(engine.get_match(ref_file, other_file))
|
|
|
|
do_match(other_files[0], other_files[1:], group)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
self.apply_filter(None)
|
2016-06-08 01:34:04 +00:00
|
|
|
root = ET.parse(infile).getroot()
|
2020-06-30 05:51:06 +00:00
|
|
|
group_elems = list(root.iter("group"))
|
2010-03-01 11:21:43 +00:00
|
|
|
groups = []
|
|
|
|
marked = set()
|
|
|
|
for group_elem in j.iter_with_progress(group_elems, every=100):
|
|
|
|
group = engine.Group()
|
|
|
|
dupes = []
|
2020-06-30 05:51:06 +00:00
|
|
|
for file_elem in group_elem.iter("file"):
|
2020-01-01 02:16:27 +00:00
|
|
|
path = file_elem.get("path")
|
|
|
|
words = file_elem.get("words", "")
|
2010-03-01 11:21:43 +00:00
|
|
|
if not path:
|
|
|
|
continue
|
|
|
|
file = get_file(path)
|
|
|
|
if file is None:
|
|
|
|
continue
|
2020-01-01 02:16:27 +00:00
|
|
|
file.words = words.split(",")
|
|
|
|
file.is_ref = file_elem.get("is_ref") == "y"
|
2010-03-01 11:21:43 +00:00
|
|
|
dupes.append(file)
|
2020-01-01 02:16:27 +00:00
|
|
|
if file_elem.get("marked") == "y":
|
2010-03-01 11:21:43 +00:00
|
|
|
marked.add(file)
|
2020-06-30 05:51:06 +00:00
|
|
|
for match_elem in group_elem.iter("match"):
|
2010-03-01 11:21:43 +00:00
|
|
|
try:
|
|
|
|
attrs = match_elem.attrib
|
2020-01-01 02:16:27 +00:00
|
|
|
first_file = dupes[int(attrs["first"])]
|
|
|
|
second_file = dupes[int(attrs["second"])]
|
|
|
|
percentage = int(attrs["percentage"])
|
2010-03-01 11:21:43 +00:00
|
|
|
group.add_match(engine.Match(first_file, second_file, percentage))
|
2014-10-13 19:08:59 +00:00
|
|
|
except (IndexError, KeyError, ValueError):
|
|
|
|
# Covers missing attr, non-int values and indexes out of bounds
|
2010-03-01 11:21:43 +00:00
|
|
|
pass
|
|
|
|
if (not group.matches) and (len(dupes) >= 2):
|
|
|
|
do_match(dupes[0], dupes[1:], group)
|
|
|
|
group.prioritize(lambda x: dupes.index(x))
|
|
|
|
if len(group):
|
2014-10-05 20:31:16 +00:00
|
|
|
groups.append(group)
|
2010-03-01 11:21:43 +00:00
|
|
|
j.add_progress()
|
|
|
|
self.groups = groups
|
|
|
|
for dupe_file in marked:
|
2009-06-01 09:55:11 +00:00
|
|
|
self.mark(dupe_file)
|
2010-08-13 09:37:45 +00:00
|
|
|
self.is_modified = False
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def make_ref(self, dupe):
|
2021-08-15 08:51:27 +00:00
|
|
|
"""Make ``dupe`` take the :attr:`~core.engine.Group.ref` position of its group."""
|
2009-06-01 09:55:11 +00:00
|
|
|
g = self.get_group_of_duplicate(dupe)
|
|
|
|
r = g.ref
|
2013-04-28 18:12:08 +00:00
|
|
|
if not g.switch_ref(dupe):
|
|
|
|
return False
|
2009-06-01 09:55:11 +00:00
|
|
|
self._remove_mark_flag(dupe)
|
|
|
|
if not r.is_ref:
|
|
|
|
self.__total_count += 1
|
|
|
|
self.__total_size += r.size
|
|
|
|
if not dupe.is_ref:
|
|
|
|
self.__total_count -= 1
|
|
|
|
self.__total_size -= dupe.size
|
|
|
|
self.__dupes = None
|
2010-08-13 09:37:45 +00:00
|
|
|
self.is_modified = True
|
2013-04-28 18:12:08 +00:00
|
|
|
return True
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def perform_on_marked(self, func, remove_from_results):
|
2013-08-21 02:52:43 +00:00
|
|
|
"""Performs ``func`` on all marked dupes.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
If an ``EnvironmentError`` is raised during the call, the problematic dupe is added to
|
|
|
|
self.problems.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
:param bool remove_from_results: If true, dupes which had ``func`` applied and didn't cause
|
|
|
|
any problem.
|
|
|
|
"""
|
2010-04-12 10:21:01 +00:00
|
|
|
self.problems = []
|
|
|
|
to_remove = []
|
|
|
|
marked = (dupe for dupe in self.dupes if self.is_marked(dupe))
|
|
|
|
for dupe in marked:
|
|
|
|
try:
|
|
|
|
func(dupe)
|
|
|
|
to_remove.append(dupe)
|
2022-04-28 01:53:12 +00:00
|
|
|
except (OSError, UnicodeEncodeError) as e:
|
2010-08-11 14:39:06 +00:00
|
|
|
self.problems.append((dupe, str(e)))
|
2009-06-01 09:55:11 +00:00
|
|
|
if remove_from_results:
|
|
|
|
self.remove_duplicates(to_remove)
|
|
|
|
self.mark_none()
|
2010-04-12 10:21:01 +00:00
|
|
|
for dupe, _ in self.problems:
|
|
|
|
self.mark(dupe)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def remove_duplicates(self, dupes):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Remove ``dupes`` from their respective :class:`~core.engine.Group`.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Also, remove the group from :attr:`groups` if it ends up empty.
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
affected_groups = set()
|
|
|
|
for dupe in dupes:
|
|
|
|
group = self.get_group_of_duplicate(dupe)
|
|
|
|
if dupe not in group.dupes:
|
|
|
|
return
|
2011-02-18 09:37:40 +00:00
|
|
|
ref = group.ref
|
2009-06-01 09:55:11 +00:00
|
|
|
group.remove_dupe(dupe, False)
|
2011-02-18 09:37:40 +00:00
|
|
|
del self.__group_of_duplicate[dupe]
|
2009-06-01 09:55:11 +00:00
|
|
|
self._remove_mark_flag(dupe)
|
|
|
|
self.__total_count -= 1
|
|
|
|
self.__total_size -= dupe.size
|
|
|
|
if not group:
|
2011-02-18 09:37:40 +00:00
|
|
|
del self.__group_of_duplicate[ref]
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__groups.remove(group)
|
|
|
|
if self.__filtered_groups:
|
|
|
|
self.__filtered_groups.remove(group)
|
|
|
|
else:
|
|
|
|
affected_groups.add(group)
|
|
|
|
for group in affected_groups:
|
2009-09-05 14:58:35 +00:00
|
|
|
group.discard_matches()
|
2009-06-01 09:55:11 +00:00
|
|
|
self.__dupes = None
|
2011-01-26 10:49:30 +00:00
|
|
|
self.is_modified = bool(self.__groups)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-09-01 14:05:00 +00:00
|
|
|
def save_to_xml(self, outfile):
|
2013-08-21 02:52:43 +00:00
|
|
|
"""Save results to ``outfile`` in XML.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
:param outfile: file object or path.
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
self.apply_filter(None)
|
2020-01-01 02:16:27 +00:00
|
|
|
root = ET.Element("results")
|
2009-06-01 09:55:11 +00:00
|
|
|
for g in self.groups:
|
2020-01-01 02:16:27 +00:00
|
|
|
group_elem = ET.SubElement(root, "group")
|
2009-06-01 09:55:11 +00:00
|
|
|
dupe2index = {}
|
|
|
|
for index, d in enumerate(g):
|
|
|
|
dupe2index[d] = index
|
|
|
|
try:
|
|
|
|
words = engine.unpack_fields(d.words)
|
|
|
|
except AttributeError:
|
|
|
|
words = ()
|
2020-01-01 02:16:27 +00:00
|
|
|
file_elem = ET.SubElement(group_elem, "file")
|
2010-07-14 07:19:34 +00:00
|
|
|
try:
|
2020-01-01 02:16:27 +00:00
|
|
|
file_elem.set("path", str(d.path))
|
|
|
|
file_elem.set("words", ",".join(words))
|
|
|
|
except ValueError: # If there's an invalid character, just skip the file
|
|
|
|
file_elem.set("path", "")
|
|
|
|
file_elem.set("is_ref", ("y" if d.is_ref else "n"))
|
|
|
|
file_elem.set("marked", ("y" if self.is_marked(d) else "n"))
|
2009-06-01 09:55:11 +00:00
|
|
|
for match in g.matches:
|
2020-01-01 02:16:27 +00:00
|
|
|
match_elem = ET.SubElement(group_elem, "match")
|
|
|
|
match_elem.set("first", str(dupe2index[match.first]))
|
|
|
|
match_elem.set("second", str(dupe2index[match.second]))
|
|
|
|
match_elem.set("percentage", str(int(match.percentage)))
|
2010-08-15 12:42:55 +00:00
|
|
|
tree = ET.ElementTree(root)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-04-13 14:59:02 +00:00
|
|
|
def do_write(outfile):
|
2020-01-01 02:16:27 +00:00
|
|
|
with FileOrPath(outfile, "wb") as fp:
|
|
|
|
tree.write(fp, encoding="utf-8")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-04-13 14:59:02 +00:00
|
|
|
try:
|
|
|
|
do_write(outfile)
|
2022-04-28 01:53:12 +00:00
|
|
|
except OSError as e:
|
|
|
|
# If our OSError is because dest is already a directory, we want to handle that. 21 is
|
2011-09-29 18:23:35 +00:00
|
|
|
# the code we get on OS X and Linux, 13 is what we get on Windows.
|
|
|
|
if e.errno in {21, 13}:
|
2011-04-13 14:59:02 +00:00
|
|
|
p = str(outfile)
|
|
|
|
dirname, basename = op.split(p)
|
|
|
|
otherfiles = os.listdir(dirname)
|
|
|
|
newname = get_conflicted_name(otherfiles, basename)
|
|
|
|
do_write(op.join(dirname, newname))
|
|
|
|
else:
|
|
|
|
raise
|
2010-08-13 09:37:45 +00:00
|
|
|
self.is_modified = False
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def sort_dupes(self, key, asc=True, delta=False):
|
2013-08-21 02:52:43 +00:00
|
|
|
"""Sort :attr:`dupes` according to ``key``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
:param str key: key attribute name to sort with.
|
|
|
|
:param bool asc: If false, sorting is reversed.
|
|
|
|
:param bool delta: If true, sorting occurs using :ref:`delta values <deltavalues>`.
|
|
|
|
"""
|
2009-06-01 09:55:11 +00:00
|
|
|
if not self.__dupes:
|
|
|
|
self.__get_dupe_list()
|
2021-08-15 08:51:27 +00:00
|
|
|
self.__dupes.sort(
|
|
|
|
key=lambda d: self.app._get_dupe_sort_key(d, lambda: self.get_group_of_duplicate(d), key, delta),
|
|
|
|
reverse=not asc,
|
2020-01-01 02:16:27 +00:00
|
|
|
)
|
2014-10-13 19:08:59 +00:00
|
|
|
self.__dupes_sort_descriptor = (key, asc, delta)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
def sort_groups(self, key, asc=True):
|
|
|
|
"""Sort :attr:`groups` according to ``key``.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
The :attr:`~core.engine.Group.ref` of each group is used to extract values for sorting.
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-08-21 02:52:43 +00:00
|
|
|
:param str key: key attribute name to sort with.
|
|
|
|
:param bool asc: If false, sorting is reversed.
|
|
|
|
"""
|
2021-08-15 08:51:27 +00:00
|
|
|
self.groups.sort(key=lambda g: self.app._get_group_sort_key(g, key), reverse=not asc)
|
2014-10-13 19:08:59 +00:00
|
|
|
self.__groups_sort_descriptor = (key, asc)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# ---Properties
|
2014-10-13 19:08:59 +00:00
|
|
|
dupes = property(__get_dupe_list)
|
|
|
|
groups = property(__get_groups, __set_groups)
|
2009-06-01 09:55:11 +00:00
|
|
|
stat_line = property(__get_stat_line)
|