2016-05-29 18:13:19 +00:00
|
|
|
# Copyright 2016 Hardcoded Software (http://www.hardcoded.net)
|
2014-10-05 20:31:16 +00:00
|
|
|
#
|
2015-01-03 21:33:16 +00:00
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
2014-10-05 20:31:16 +00:00
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 21:33:16 +00:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-06-07 14:26:46 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
import os
|
2011-01-05 10:11:21 +00:00
|
|
|
import os.path as op
|
2010-02-12 11:43:50 +00:00
|
|
|
import logging
|
|
|
|
|
2020-06-26 04:26:48 +00:00
|
|
|
import pytest
|
2011-01-11 10:59:53 +00:00
|
|
|
from hscommon.path import Path
|
2011-01-11 12:36:05 +00:00
|
|
|
import hscommon.conflict
|
|
|
|
import hscommon.util
|
2016-05-29 18:13:19 +00:00
|
|
|
from hscommon.testutil import eq_, log_calls
|
2014-10-05 20:31:16 +00:00
|
|
|
from hscommon.jobprogress.job import Job
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2016-05-29 18:13:19 +00:00
|
|
|
from .base import TestApp
|
2010-02-12 11:43:50 +00:00
|
|
|
from .results_test import GetTestGroups
|
|
|
|
from .. import app, fs, engine
|
2010-09-25 10:28:34 +00:00
|
|
|
from ..scanner import ScanType
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2010-12-30 09:24:37 +00:00
|
|
|
def add_fake_files_to_directories(directories, files):
|
2011-09-07 13:58:46 +00:00
|
|
|
directories.get_files = lambda j=None: iter(files)
|
2020-01-01 02:16:27 +00:00
|
|
|
directories._dirs.append("this is just so Scan() doesnt return 3")
|
|
|
|
|
2010-12-30 09:24:37 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
class TestCaseDupeGuru:
|
|
|
|
def test_apply_filter_calls_results_apply_filter(self, monkeypatch):
|
2012-03-13 18:27:08 +00:00
|
|
|
dgapp = TestApp().app
|
2021-08-15 09:10:18 +00:00
|
|
|
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
2020-01-01 02:16:27 +00:00
|
|
|
dgapp.apply_filter("foo")
|
2011-01-11 12:36:05 +00:00
|
|
|
eq_(2, len(dgapp.results.apply_filter.calls))
|
|
|
|
call = dgapp.results.apply_filter.calls[0]
|
2020-01-01 02:16:27 +00:00
|
|
|
assert call["filter_str"] is None
|
2011-01-11 12:36:05 +00:00
|
|
|
call = dgapp.results.apply_filter.calls[1]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_("foo", call["filter_str"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_apply_filter_escapes_regexp(self, monkeypatch):
|
2012-03-13 18:27:08 +00:00
|
|
|
dgapp = TestApp().app
|
2021-08-15 09:10:18 +00:00
|
|
|
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
2020-01-01 02:16:27 +00:00
|
|
|
dgapp.apply_filter("()[]\\.|+?^abc")
|
2011-01-11 12:36:05 +00:00
|
|
|
call = dgapp.results.apply_filter.calls[1]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_("\\(\\)\\[\\]\\\\\\.\\|\\+\\?\\^abc", call["filter_str"])
|
2021-08-15 09:10:18 +00:00
|
|
|
dgapp.apply_filter("(*)") # In "simple mode", we want the * to behave as a wilcard
|
2011-01-11 12:36:05 +00:00
|
|
|
call = dgapp.results.apply_filter.calls[3]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(r"\(.*\)", call["filter_str"])
|
|
|
|
dgapp.options["escape_filter_regexp"] = False
|
|
|
|
dgapp.apply_filter("(abc)")
|
2011-01-11 12:36:05 +00:00
|
|
|
call = dgapp.results.apply_filter.calls[5]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_("(abc)", call["filter_str"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_copy_or_move(self, tmpdir, monkeypatch):
|
2009-06-01 09:55:11 +00:00
|
|
|
# The goal here is just to have a test for a previous blowup I had. I know my test coverage
|
|
|
|
# for this unit is pathetic. What's done is done. My approach now is to add tests for
|
|
|
|
# every change I want to make. The blowup was caused by a missing import.
|
2011-01-05 10:11:21 +00:00
|
|
|
p = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
p["foo"].open("w").close()
|
|
|
|
monkeypatch.setattr(
|
|
|
|
hscommon.conflict,
|
|
|
|
"smart_copy",
|
|
|
|
log_calls(lambda source_path, dest_path: None),
|
|
|
|
)
|
2011-01-11 12:36:05 +00:00
|
|
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(app, "smart_copy", hscommon.conflict.smart_copy)
|
2021-08-15 09:10:18 +00:00
|
|
|
monkeypatch.setattr(os, "makedirs", lambda path: None) # We don't want the test to create that fake directory
|
2012-03-13 18:27:08 +00:00
|
|
|
dgapp = TestApp().app
|
2011-01-11 12:36:05 +00:00
|
|
|
dgapp.directories.add_path(p)
|
|
|
|
[f] = dgapp.directories.get_files()
|
2020-01-01 02:16:27 +00:00
|
|
|
dgapp.copy_or_move(f, True, "some_destination", 0)
|
2011-01-11 12:36:05 +00:00
|
|
|
eq_(1, len(hscommon.conflict.smart_copy.calls))
|
|
|
|
call = hscommon.conflict.smart_copy.calls[0]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(call["dest_path"], op.join("some_destination", "foo"))
|
|
|
|
eq_(call["source_path"], f.path)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_copy_or_move_clean_empty_dirs(self, tmpdir, monkeypatch):
|
|
|
|
tmppath = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
sourcepath = tmppath["source"]
|
2013-11-16 17:06:16 +00:00
|
|
|
sourcepath.mkdir()
|
2020-01-01 02:16:27 +00:00
|
|
|
sourcepath["myfile"].open("w")
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp().app
|
2009-10-23 12:56:52 +00:00
|
|
|
app.directories.add_path(tmppath)
|
|
|
|
[myfile] = app.directories.get_files()
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(app, "clean_empty_dirs", log_calls(lambda path: None))
|
|
|
|
app.copy_or_move(myfile, False, tmppath["dest"], 0)
|
2009-06-01 09:55:11 +00:00
|
|
|
calls = app.clean_empty_dirs.calls
|
2011-01-05 10:11:21 +00:00
|
|
|
eq_(1, len(calls))
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(sourcepath, calls[0]["path"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
def test_Scan_with_objects_evaluating_to_false(self):
|
2009-10-23 12:56:52 +00:00
|
|
|
class FakeFile(fs.File):
|
2010-08-11 14:39:06 +00:00
|
|
|
def __bool__(self):
|
2009-10-23 12:56:52 +00:00
|
|
|
return False
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
# At some point, any() was used in a wrong way that made Scan() wrongly return 1
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp().app
|
2020-01-01 02:16:27 +00:00
|
|
|
f1, f2 = [FakeFile("foo") for i in range(2)]
|
2009-06-01 09:55:11 +00:00
|
|
|
f1.is_ref, f2.is_ref = (False, False)
|
|
|
|
assert not (bool(f1) and bool(f2))
|
2010-12-30 09:24:37 +00:00
|
|
|
add_fake_files_to_directories(app.directories, [f1, f2])
|
2020-01-01 02:16:27 +00:00
|
|
|
app.start_scanning() # no exception
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-06-26 04:26:48 +00:00
|
|
|
@pytest.mark.skipif("not hasattr(os, 'link')")
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_ignore_hardlink_matches(self, tmpdir):
|
2010-09-25 10:28:34 +00:00
|
|
|
# If the ignore_hardlink_matches option is set, don't match files hardlinking to the same
|
|
|
|
# inode.
|
2011-01-05 10:11:21 +00:00
|
|
|
tmppath = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
tmppath["myfile"].open("w").write("foo")
|
|
|
|
os.link(str(tmppath["myfile"]), str(tmppath["hardlink"]))
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp().app
|
2010-09-25 10:28:34 +00:00
|
|
|
app.directories.add_path(tmppath)
|
2020-01-01 02:16:27 +00:00
|
|
|
app.options["scan_type"] = ScanType.Contents
|
|
|
|
app.options["ignore_hardlink_matches"] = True
|
2010-09-25 10:28:34 +00:00
|
|
|
app.start_scanning()
|
|
|
|
eq_(len(app.results.groups), 0)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-04-13 14:23:22 +00:00
|
|
|
def test_rename_when_nothing_is_selected(self):
|
2011-04-13 14:59:02 +00:00
|
|
|
# Issue #140
|
2011-04-13 14:23:22 +00:00
|
|
|
# It's possible that rename operation has its selected row swept off from under it, thus
|
|
|
|
# making the selected row None. Don't crash when it happens.
|
2012-03-13 18:27:08 +00:00
|
|
|
dgapp = TestApp().app
|
2011-04-13 14:23:22 +00:00
|
|
|
# selected_row is None because there's no result.
|
2020-01-01 02:16:27 +00:00
|
|
|
assert not dgapp.result_table.rename_selected("foo") # no crash
|
|
|
|
|
2009-06-01 09:55:11 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
class TestCaseDupeGuru_clean_empty_dirs:
|
2020-06-26 04:26:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def do_setup(self, request):
|
|
|
|
monkeypatch = request.getfixturevalue("monkeypatch")
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(
|
|
|
|
hscommon.util,
|
|
|
|
"delete_if_empty",
|
|
|
|
log_calls(lambda path, files_to_delete=[]: None),
|
|
|
|
)
|
2011-01-11 12:36:05 +00:00
|
|
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(app, "delete_if_empty", hscommon.util.delete_if_empty)
|
2012-03-13 18:27:08 +00:00
|
|
|
self.app = TestApp().app
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_option_off(self, do_setup):
|
2020-01-01 02:16:27 +00:00
|
|
|
self.app.clean_empty_dirs(Path("/foo/bar"))
|
2011-01-11 12:36:05 +00:00
|
|
|
eq_(0, len(hscommon.util.delete_if_empty.calls))
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_option_on(self, do_setup):
|
2020-01-01 02:16:27 +00:00
|
|
|
self.app.options["clean_empty_dirs"] = True
|
|
|
|
self.app.clean_empty_dirs(Path("/foo/bar"))
|
2011-01-11 12:36:05 +00:00
|
|
|
calls = hscommon.util.delete_if_empty.calls
|
2011-01-05 10:11:21 +00:00
|
|
|
eq_(1, len(calls))
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(Path("/foo/bar"), calls[0]["path"])
|
|
|
|
eq_([".DS_Store"], calls[0]["files_to_delete"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_recurse_up(self, do_setup, monkeypatch):
|
2009-06-01 09:55:11 +00:00
|
|
|
# delete_if_empty must be recursively called up in the path until it returns False
|
|
|
|
@log_calls
|
|
|
|
def mock_delete_if_empty(path, files_to_delete=[]):
|
|
|
|
return len(path) > 1
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(hscommon.util, "delete_if_empty", mock_delete_if_empty)
|
2011-01-11 12:36:05 +00:00
|
|
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(app, "delete_if_empty", mock_delete_if_empty)
|
|
|
|
self.app.options["clean_empty_dirs"] = True
|
|
|
|
self.app.clean_empty_dirs(Path("not-empty/empty/empty"))
|
2011-01-11 12:36:05 +00:00
|
|
|
calls = hscommon.util.delete_if_empty.calls
|
2011-01-05 10:11:21 +00:00
|
|
|
eq_(3, len(calls))
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(Path("not-empty/empty/empty"), calls[0]["path"])
|
|
|
|
eq_(Path("not-empty/empty"), calls[1]["path"])
|
|
|
|
eq_(Path("not-empty"), calls[2]["path"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:43:50 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
class TestCaseDupeGuruWithResults:
|
2020-06-26 04:26:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def do_setup(self, request):
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp()
|
|
|
|
self.app = app.app
|
2016-05-29 19:02:39 +00:00
|
|
|
self.objects, self.matches, self.groups = GetTestGroups()
|
2010-02-12 11:43:50 +00:00
|
|
|
self.app.results.groups = self.groups
|
2012-03-13 18:27:08 +00:00
|
|
|
self.dpanel = app.dpanel
|
|
|
|
self.dtree = app.dtree
|
|
|
|
self.rtable = app.rtable
|
|
|
|
self.rtable.refresh()
|
2020-06-26 04:26:48 +00:00
|
|
|
tmpdir = request.getfixturevalue("tmpdir")
|
2011-01-05 10:11:21 +00:00
|
|
|
tmppath = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
tmppath["foo"].mkdir()
|
|
|
|
tmppath["bar"].mkdir()
|
2010-02-12 11:43:50 +00:00
|
|
|
self.app.directories.add_path(tmppath)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_GetObjects(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
objects = self.objects
|
|
|
|
groups = self.groups
|
2010-09-24 13:48:59 +00:00
|
|
|
r = self.rtable[0]
|
|
|
|
assert r._group is groups[0]
|
|
|
|
assert r._dupe is objects[0]
|
|
|
|
r = self.rtable[1]
|
|
|
|
assert r._group is groups[0]
|
|
|
|
assert r._dupe is objects[1]
|
|
|
|
r = self.rtable[4]
|
|
|
|
assert r._group is groups[1]
|
|
|
|
assert r._dupe is objects[4]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_GetObjects_after_sort(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
objects = self.objects
|
2020-01-01 02:16:27 +00:00
|
|
|
groups = self.groups[:] # we need an un-sorted reference
|
|
|
|
self.rtable.sort("name", False)
|
2010-09-24 13:48:59 +00:00
|
|
|
r = self.rtable[1]
|
|
|
|
assert r._group is groups[1]
|
|
|
|
assert r._dupe is objects[4]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selected_result_node_paths_after_deletion(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
# cases where the selected dupes aren't there are correctly handled
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 2, 3])
|
2010-02-12 11:43:50 +00:00
|
|
|
self.app.remove_selected()
|
|
|
|
# The first 2 dupes have been removed. The 3rd one is a ref. it stays there, in first pos.
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(self.rtable.selected_indexes, [1]) # no exception
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selectResultNodePaths(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 2])
|
2010-02-12 11:43:50 +00:00
|
|
|
eq_(len(app.selected_dupes), 2)
|
|
|
|
assert app.selected_dupes[0] is objects[1]
|
|
|
|
assert app.selected_dupes[1] is objects[2]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selectResultNodePaths_with_ref(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 2, 3])
|
2010-02-12 11:43:50 +00:00
|
|
|
eq_(len(app.selected_dupes), 3)
|
|
|
|
assert app.selected_dupes[0] is objects[1]
|
|
|
|
assert app.selected_dupes[1] is objects[2]
|
|
|
|
assert app.selected_dupes[2] is self.groups[1].ref
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selectResultNodePaths_after_sort(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
2020-01-01 02:16:27 +00:00
|
|
|
groups = self.groups[:] # To keep the old order in memory
|
|
|
|
self.rtable.sort("name", False) # 0
|
|
|
|
# Now, the group order is supposed to be reversed
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 2, 3])
|
2010-02-12 11:43:50 +00:00
|
|
|
eq_(len(app.selected_dupes), 3)
|
|
|
|
assert app.selected_dupes[0] is objects[4]
|
|
|
|
assert app.selected_dupes[1] is groups[0].ref
|
|
|
|
assert app.selected_dupes[2] is objects[1]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selected_powermarker_node_paths(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
# app.selected_dupes is correctly converted into paths
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.power_marker = True
|
|
|
|
self.rtable.select([0, 1, 2])
|
|
|
|
self.rtable.power_marker = False
|
|
|
|
eq_(self.rtable.selected_indexes, [1, 2, 4])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selected_powermarker_node_paths_after_deletion(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
# cases where the selected dupes aren't there are correctly handled
|
|
|
|
app = self.app
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.power_marker = True
|
|
|
|
self.rtable.select([0, 1, 2])
|
2010-02-12 11:43:50 +00:00
|
|
|
app.remove_selected()
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(self.rtable.selected_indexes, []) # no exception
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_selectPowerMarkerRows_after_sort(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.power_marker = True
|
2020-01-01 02:16:27 +00:00
|
|
|
self.rtable.sort("name", False)
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([0, 1, 2])
|
2010-02-12 11:43:50 +00:00
|
|
|
eq_(len(app.selected_dupes), 3)
|
|
|
|
assert app.selected_dupes[0] is objects[4]
|
|
|
|
assert app.selected_dupes[1] is objects[2]
|
|
|
|
assert app.selected_dupes[2] is objects[1]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-09-22 16:12:43 +00:00
|
|
|
def test_toggle_selected_mark_state(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
|
|
|
app.toggle_selected_mark_state()
|
|
|
|
eq_(app.results.mark_count, 0)
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 4])
|
2010-02-12 11:43:50 +00:00
|
|
|
app.toggle_selected_mark_state()
|
|
|
|
eq_(app.results.mark_count, 2)
|
|
|
|
assert not app.results.is_marked(objects[0])
|
|
|
|
assert app.results.is_marked(objects[1])
|
|
|
|
assert not app.results.is_marked(objects[2])
|
|
|
|
assert not app.results.is_marked(objects[3])
|
|
|
|
assert app.results.is_marked(objects[4])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-09-22 16:12:43 +00:00
|
|
|
def test_toggle_selected_mark_state_with_different_selected_state(self, do_setup):
|
|
|
|
# When marking selected dupes with a heterogenous selection, mark all selected dupes. When
|
|
|
|
# it's homogenous, simply toggle.
|
|
|
|
app = self.app
|
|
|
|
self.rtable.select([1])
|
|
|
|
app.toggle_selected_mark_state()
|
|
|
|
# index 0 is unmarkable, but we throw it in the bunch to be sure that it doesn't make the
|
|
|
|
# selection heterogenoug when it shouldn't.
|
|
|
|
self.rtable.select([0, 1, 4])
|
|
|
|
app.toggle_selected_mark_state()
|
|
|
|
eq_(app.results.mark_count, 2)
|
|
|
|
app.toggle_selected_mark_state()
|
|
|
|
eq_(app.results.mark_count, 0)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_refreshDetailsWithSelected(self, do_setup):
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 4])
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(self.dpanel.row(0), ("Filename", "bar bleh", "foo bar"))
|
|
|
|
self.dpanel.view.check_gui_calls(["refresh"])
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([])
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(self.dpanel.row(0), ("Filename", "---", "---"))
|
|
|
|
self.dpanel.view.check_gui_calls(["refresh"])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_makeSelectedReference(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
|
|
|
groups = self.groups
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 4])
|
2010-02-12 11:43:50 +00:00
|
|
|
app.make_selected_reference()
|
|
|
|
assert groups[0].ref is objects[1]
|
|
|
|
assert groups[1].ref is objects[4]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2021-08-15 09:10:18 +00:00
|
|
|
def test_makeSelectedReference_by_selecting_two_dupes_in_the_same_group(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
objects = self.objects
|
|
|
|
groups = self.groups
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 2, 4])
|
2020-01-01 02:16:27 +00:00
|
|
|
# Only [0, 0] and [1, 0] must go ref, not [0, 1] because it is a part of the same group
|
2010-02-12 11:43:50 +00:00
|
|
|
app.make_selected_reference()
|
|
|
|
assert groups[0].ref is objects[1]
|
|
|
|
assert groups[1].ref is objects[4]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_removeSelected(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1, 4])
|
2010-02-12 11:43:50 +00:00
|
|
|
app.remove_selected()
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(len(app.results.dupes), 1) # the first path is now selected
|
2010-02-12 11:43:50 +00:00
|
|
|
app.remove_selected()
|
|
|
|
eq_(len(app.results.dupes), 0)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_addDirectory_simple(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
# There's already a directory in self.app, so adding another once makes 2 of em
|
|
|
|
app = self.app
|
2011-01-05 10:11:21 +00:00
|
|
|
# any other path that isn't a parent or child of the already added path
|
|
|
|
otherpath = Path(op.dirname(__file__))
|
2012-08-09 14:22:04 +00:00
|
|
|
app.add_directory(otherpath)
|
2010-02-12 11:43:50 +00:00
|
|
|
eq_(len(app.directories), 2)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_addDirectory_already_there(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2011-01-05 10:11:21 +00:00
|
|
|
otherpath = Path(op.dirname(__file__))
|
2012-08-09 14:22:04 +00:00
|
|
|
app.add_directory(otherpath)
|
|
|
|
app.add_directory(otherpath)
|
|
|
|
eq_(len(app.view.messages), 1)
|
|
|
|
assert "already" in app.view.messages[0]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_addDirectory_does_not_exist(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2020-01-01 02:16:27 +00:00
|
|
|
app.add_directory("/does_not_exist")
|
2012-08-09 14:22:04 +00:00
|
|
|
eq_(len(app.view.messages), 1)
|
|
|
|
assert "exist" in app.view.messages[0]
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_ignore(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2020-01-01 02:16:27 +00:00
|
|
|
self.rtable.select([4]) # The dupe of the second, 2 sized group
|
2010-02-12 11:43:50 +00:00
|
|
|
app.add_selected_to_ignore_list()
|
2016-05-29 18:13:19 +00:00
|
|
|
eq_(len(app.ignore_list), 1)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.rtable.select([1]) # first dupe of the 3 dupes group
|
2010-02-12 11:43:50 +00:00
|
|
|
app.add_selected_to_ignore_list()
|
2020-01-01 02:16:27 +00:00
|
|
|
# BOTH the ref and the other dupe should have been added
|
2016-05-29 18:13:19 +00:00
|
|
|
eq_(len(app.ignore_list), 3)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_purgeIgnoreList(self, do_setup, tmpdir):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2020-01-01 02:16:27 +00:00
|
|
|
p1 = str(tmpdir.join("file1"))
|
|
|
|
p2 = str(tmpdir.join("file2"))
|
|
|
|
open(p1, "w").close()
|
|
|
|
open(p2, "w").close()
|
|
|
|
dne = "/does_not_exist"
|
2016-05-29 19:02:39 +00:00
|
|
|
app.ignore_list.Ignore(dne, p1)
|
|
|
|
app.ignore_list.Ignore(p2, dne)
|
|
|
|
app.ignore_list.Ignore(p1, p2)
|
2010-02-12 11:43:50 +00:00
|
|
|
app.purge_ignore_list()
|
2016-05-29 19:02:39 +00:00
|
|
|
eq_(1, len(app.ignore_list))
|
|
|
|
assert app.ignore_list.AreIgnored(p1, p2)
|
|
|
|
assert not app.ignore_list.AreIgnored(dne, p1)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_only_unicode_is_added_to_ignore_list(self, do_setup):
|
2016-05-29 19:02:39 +00:00
|
|
|
def FakeIgnore(first, second):
|
|
|
|
if not isinstance(first, str):
|
2010-02-12 11:43:50 +00:00
|
|
|
self.fail()
|
2016-05-29 19:02:39 +00:00
|
|
|
if not isinstance(second, str):
|
2010-02-12 11:43:50 +00:00
|
|
|
self.fail()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
2016-05-29 18:13:19 +00:00
|
|
|
app.ignore_list.Ignore = FakeIgnore
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([4])
|
2010-02-12 11:43:50 +00:00
|
|
|
app.add_selected_to_ignore_list()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_cancel_scan_with_previous_results(self, do_setup):
|
2010-12-30 09:24:37 +00:00
|
|
|
# When doing a scan with results being present prior to the scan, correctly invalidate the
|
|
|
|
# results table.
|
|
|
|
app = self.app
|
2020-01-01 02:16:27 +00:00
|
|
|
app.JOB = Job(1, lambda *args, **kw: False) # Cancels the task
|
2021-08-15 09:10:18 +00:00
|
|
|
add_fake_files_to_directories(app.directories, self.objects) # We want the scan to at least start
|
2020-01-01 02:16:27 +00:00
|
|
|
app.start_scanning() # will be cancelled immediately
|
2016-05-30 02:37:38 +00:00
|
|
|
eq_(len(app.result_table), 0)
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-02-18 09:37:40 +00:00
|
|
|
def test_selected_dupes_after_removal(self, do_setup):
|
|
|
|
# Purge the app's `selected_dupes` attribute when removing dupes, or else it might cause a
|
|
|
|
# crash later with None refs.
|
|
|
|
app = self.app
|
|
|
|
app.results.mark_all()
|
|
|
|
self.rtable.select([0, 1, 2, 3, 4])
|
|
|
|
app.remove_marked()
|
|
|
|
eq_(len(self.rtable), 0)
|
|
|
|
eq_(app.selected_dupes, [])
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2013-11-09 21:20:33 +00:00
|
|
|
def test_dont_crash_on_delta_powermarker_dupecount_sort(self, do_setup):
|
|
|
|
# Don't crash when sorting by dupe count or percentage while delta+powermarker are enabled.
|
|
|
|
# Ref #238
|
|
|
|
self.rtable.delta_values = True
|
|
|
|
self.rtable.power_marker = True
|
2020-01-01 02:16:27 +00:00
|
|
|
self.rtable.sort("dupe_count", False)
|
2013-11-09 21:20:33 +00:00
|
|
|
# don't crash
|
2020-01-01 02:16:27 +00:00
|
|
|
self.rtable.sort("percentage", False)
|
2013-11-09 21:20:33 +00:00
|
|
|
# don't crash
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2010-02-12 11:43:50 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
class TestCaseDupeGuru_renameSelected:
|
2020-06-26 04:26:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def do_setup(self, request):
|
|
|
|
tmpdir = request.getfixturevalue("tmpdir")
|
2011-01-05 10:11:21 +00:00
|
|
|
p = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
fp = open(str(p["foo bar 1"]), mode="w")
|
2010-02-12 11:43:50 +00:00
|
|
|
fp.close()
|
2020-01-01 02:16:27 +00:00
|
|
|
fp = open(str(p["foo bar 2"]), mode="w")
|
2010-02-12 11:43:50 +00:00
|
|
|
fp.close()
|
2020-01-01 02:16:27 +00:00
|
|
|
fp = open(str(p["foo bar 3"]), mode="w")
|
2010-02-12 11:43:50 +00:00
|
|
|
fp.close()
|
|
|
|
files = fs.get_files(p)
|
2011-09-23 17:14:57 +00:00
|
|
|
for f in files:
|
|
|
|
f.is_ref = False
|
2010-02-12 11:43:50 +00:00
|
|
|
matches = engine.getmatches(files)
|
|
|
|
groups = engine.get_groups(matches)
|
|
|
|
g = groups[0]
|
2016-05-29 19:02:39 +00:00
|
|
|
g.prioritize(lambda x: x.name)
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp()
|
|
|
|
app.app.results.groups = groups
|
|
|
|
self.app = app.app
|
|
|
|
self.rtable = app.rtable
|
|
|
|
self.rtable.refresh()
|
2010-02-12 11:43:50 +00:00
|
|
|
self.groups = groups
|
|
|
|
self.p = p
|
|
|
|
self.files = files
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_simple(self, do_setup):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
g = self.groups[0]
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1])
|
2020-01-01 02:16:27 +00:00
|
|
|
assert app.rename_selected("renamed")
|
2013-11-16 17:06:16 +00:00
|
|
|
names = [p.name for p in self.p.listdir()]
|
2020-01-01 02:16:27 +00:00
|
|
|
assert "renamed" in names
|
|
|
|
assert "foo bar 2" not in names
|
|
|
|
eq_(g.dupes[0].name, "renamed")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_none_selected(self, do_setup, monkeypatch):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
g = self.groups[0]
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([])
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(logging, "warning", log_calls(lambda msg: None))
|
|
|
|
assert not app.rename_selected("renamed")
|
|
|
|
msg = logging.warning.calls[0]["msg"]
|
|
|
|
eq_("dupeGuru Warning: list index out of range", msg)
|
2013-11-16 17:06:16 +00:00
|
|
|
names = [p.name for p in self.p.listdir()]
|
2020-01-01 02:16:27 +00:00
|
|
|
assert "renamed" not in names
|
|
|
|
assert "foo bar 2" in names
|
|
|
|
eq_(g.dupes[0].name, "foo bar 2")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_name_already_exists(self, do_setup, monkeypatch):
|
2010-02-12 11:43:50 +00:00
|
|
|
app = self.app
|
|
|
|
g = self.groups[0]
|
2010-09-24 13:48:59 +00:00
|
|
|
self.rtable.select([1])
|
2020-01-01 02:16:27 +00:00
|
|
|
monkeypatch.setattr(logging, "warning", log_calls(lambda msg: None))
|
|
|
|
assert not app.rename_selected("foo bar 1")
|
|
|
|
msg = logging.warning.calls[0]["msg"]
|
|
|
|
assert msg.startswith("dupeGuru Warning: 'foo bar 1' already exists in")
|
2013-11-16 17:06:16 +00:00
|
|
|
names = [p.name for p in self.p.listdir()]
|
2020-01-01 02:16:27 +00:00
|
|
|
assert "foo bar 1" in names
|
|
|
|
assert "foo bar 2" in names
|
|
|
|
eq_(g.dupes[0].name, "foo bar 2")
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-03-16 08:31:16 +00:00
|
|
|
|
|
|
|
class TestAppWithDirectoriesInTree:
|
2020-06-26 04:26:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def do_setup(self, request):
|
|
|
|
tmpdir = request.getfixturevalue("tmpdir")
|
2011-03-16 08:31:16 +00:00
|
|
|
p = Path(str(tmpdir))
|
2020-01-01 02:16:27 +00:00
|
|
|
p["sub1"].mkdir()
|
|
|
|
p["sub2"].mkdir()
|
|
|
|
p["sub3"].mkdir()
|
2012-03-13 18:27:08 +00:00
|
|
|
app = TestApp()
|
|
|
|
self.app = app.app
|
|
|
|
self.dtree = app.dtree
|
2011-03-16 08:31:16 +00:00
|
|
|
self.dtree.add_directory(p)
|
2012-01-23 20:09:13 +00:00
|
|
|
self.dtree.view.clear_calls()
|
2014-10-05 20:31:16 +00:00
|
|
|
|
2011-03-16 08:31:16 +00:00
|
|
|
def test_set_root_as_ref_makes_subfolders_ref_as_well(self, do_setup):
|
|
|
|
# Setting a node state to something also affect subnodes. These subnodes must be correctly
|
|
|
|
# refreshed.
|
|
|
|
node = self.dtree[0]
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(len(node), 3) # a len() call is required for subnodes to be loaded
|
2011-03-16 08:31:16 +00:00
|
|
|
subnode = node[0]
|
2020-01-01 02:16:27 +00:00
|
|
|
node.state = 1 # the state property is a state index
|
2011-03-16 08:31:16 +00:00
|
|
|
node = self.dtree[0]
|
|
|
|
eq_(len(node), 3)
|
|
|
|
subnode = node[0]
|
|
|
|
eq_(subnode.state, 1)
|
2020-01-01 02:16:27 +00:00
|
|
|
self.dtree.view.check_gui_calls(["refresh_states"])
|