1
0
mirror of https://github.com/arsenetar/dupeguru.git synced 2025-05-07 17:29:50 +00:00

Compare commits

...

4 Commits

Author SHA1 Message Date
e3828ae2ca
Merge pull request #911 from glubsy/fix_757_fix_regression
Fix infinite recursion
2021-06-22 22:44:12 -05:00
glubsy
23c59787e5 Fix infinite recursion
Force the Results to update its internal __dupes list whenever at least one group has re-prioritized and changed its dupes/ref.
2021-06-23 05:36:10 +02:00
2f8d603251
Merge pull request #910 from glubsy/757_fix
Fix refs appearing in dupes-only view
2021-06-22 21:54:49 -05:00
glubsy
a51f263632 Fix refs appearing in dupes-only view
* Some refs appeared in the dupes-only view after a re-prioritization was done a second time.
* It seems the core.Results.__dupes list was not properly updated whenever core.app.Dupeguru.reprioritize_groups() -> core.Results.sort_dupes() was called.
When a re-prioritization is done, some refs became dupe, and some dupes became ref in their place. So we need to update the new state of the internal list of dupes kept by the Results object, instead of relying on the outdated cached one.
* Fix #757.
2021-06-22 22:57:57 +02:00
2 changed files with 5 additions and 1 deletions

View File

@ -770,6 +770,8 @@ class DupeGuru(Broadcaster):
for group in self.results.groups:
if group.prioritize(key_func=sort_key):
count += 1
if count:
self.results.refresh_required = True
self._results_changed()
msg = tr("{} duplicate groups were changed by the re-prioritization.").format(
count

View File

@ -52,6 +52,7 @@ class Results(Markable):
self.app = app
self.problems = [] # (dupe, error_msg)
self.is_modified = False
self.refresh_required = False
def _did_mark(self, dupe):
self.__marked_size += dupe.size
@ -94,8 +95,9 @@ class Results(Markable):
# ---Private
def __get_dupe_list(self):
if self.__dupes is None:
if self.__dupes is None or self.refresh_required:
self.__dupes = flatten(group.dupes for group in self.groups)
self.refresh_required = False
if None in self.__dupes:
# This is debug logging to try to figure out #44
logging.warning(