mirror of
https://github.com/arsenetar/dupeguru.git
synced 2026-01-22 06:37:17 +00:00
Format all files with black correcting line length
This commit is contained in:
@@ -29,9 +29,7 @@ def add_fake_files_to_directories(directories, files):
|
||||
class TestCaseDupeGuru:
|
||||
def test_apply_filter_calls_results_apply_filter(self, monkeypatch):
|
||||
dgapp = TestApp().app
|
||||
monkeypatch.setattr(
|
||||
dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter)
|
||||
)
|
||||
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
||||
dgapp.apply_filter("foo")
|
||||
eq_(2, len(dgapp.results.apply_filter.calls))
|
||||
call = dgapp.results.apply_filter.calls[0]
|
||||
@@ -41,15 +39,11 @@ class TestCaseDupeGuru:
|
||||
|
||||
def test_apply_filter_escapes_regexp(self, monkeypatch):
|
||||
dgapp = TestApp().app
|
||||
monkeypatch.setattr(
|
||||
dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter)
|
||||
)
|
||||
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
||||
dgapp.apply_filter("()[]\\.|+?^abc")
|
||||
call = dgapp.results.apply_filter.calls[1]
|
||||
eq_("\\(\\)\\[\\]\\\\\\.\\|\\+\\?\\^abc", call["filter_str"])
|
||||
dgapp.apply_filter(
|
||||
"(*)"
|
||||
) # In "simple mode", we want the * to behave as a wilcard
|
||||
dgapp.apply_filter("(*)") # In "simple mode", we want the * to behave as a wilcard
|
||||
call = dgapp.results.apply_filter.calls[3]
|
||||
eq_(r"\(.*\)", call["filter_str"])
|
||||
dgapp.options["escape_filter_regexp"] = False
|
||||
@@ -70,9 +64,7 @@ class TestCaseDupeGuru:
|
||||
)
|
||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||
monkeypatch.setattr(app, "smart_copy", hscommon.conflict.smart_copy)
|
||||
monkeypatch.setattr(
|
||||
os, "makedirs", lambda path: None
|
||||
) # We don't want the test to create that fake directory
|
||||
monkeypatch.setattr(os, "makedirs", lambda path: None) # We don't want the test to create that fake directory
|
||||
dgapp = TestApp().app
|
||||
dgapp.directories.add_path(p)
|
||||
[f] = dgapp.directories.get_files()
|
||||
@@ -320,9 +312,7 @@ class TestCaseDupeGuruWithResults:
|
||||
assert groups[0].ref is objects[1]
|
||||
assert groups[1].ref is objects[4]
|
||||
|
||||
def test_makeSelectedReference_by_selecting_two_dupes_in_the_same_group(
|
||||
self, do_setup
|
||||
):
|
||||
def test_makeSelectedReference_by_selecting_two_dupes_in_the_same_group(self, do_setup):
|
||||
app = self.app
|
||||
objects = self.objects
|
||||
groups = self.groups
|
||||
@@ -404,9 +394,7 @@ class TestCaseDupeGuruWithResults:
|
||||
# results table.
|
||||
app = self.app
|
||||
app.JOB = Job(1, lambda *args, **kw: False) # Cancels the task
|
||||
add_fake_files_to_directories(
|
||||
app.directories, self.objects
|
||||
) # We want the scan to at least start
|
||||
add_fake_files_to_directories(app.directories, self.objects) # We want the scan to at least start
|
||||
app.start_scanning() # will be cancelled immediately
|
||||
eq_(len(app.result_table), 0)
|
||||
|
||||
|
||||
@@ -140,9 +140,7 @@ def GetTestGroups():
|
||||
matches = engine.getmatches(objects) # we should have 5 matches
|
||||
groups = engine.get_groups(matches) # We should have 2 groups
|
||||
for g in groups:
|
||||
g.prioritize(
|
||||
lambda x: objects.index(x)
|
||||
) # We want the dupes to be in the same order as the list is
|
||||
g.prioritize(lambda x: objects.index(x)) # We want the dupes to be in the same order as the list is
|
||||
groups.sort(key=len, reverse=True) # We want the group with 3 members to be first.
|
||||
return (objects, matches, groups)
|
||||
|
||||
|
||||
@@ -14,9 +14,7 @@ except ImportError:
|
||||
skip("Can't import the block module, probably hasn't been compiled.")
|
||||
|
||||
|
||||
def my_avgdiff(
|
||||
first, second, limit=768, min_iter=3
|
||||
): # this is so I don't have to re-write every call
|
||||
def my_avgdiff(first, second, limit=768, min_iter=3): # this is so I don't have to re-write every call
|
||||
return avgdiff(first, second, limit, min_iter)
|
||||
|
||||
|
||||
|
||||
@@ -254,7 +254,12 @@ def test_invalid_path():
|
||||
def test_set_state_on_invalid_path():
|
||||
d = Directories()
|
||||
try:
|
||||
d.set_state(Path("foobar",), DirectoryState.Normal)
|
||||
d.set_state(
|
||||
Path(
|
||||
"foobar",
|
||||
),
|
||||
DirectoryState.Normal,
|
||||
)
|
||||
except LookupError:
|
||||
assert False
|
||||
|
||||
@@ -345,15 +350,17 @@ def test_default_path_state_override(tmpdir):
|
||||
eq_(len(list(d.get_files())), 2)
|
||||
|
||||
|
||||
class TestExcludeList():
|
||||
class TestExcludeList:
|
||||
def setup_method(self, method):
|
||||
self.d = Directories(exclude_list=ExcludeList(union_regex=False))
|
||||
|
||||
def get_files_and_expect_num_result(self, num_result):
|
||||
"""Calls get_files(), get the filenames only, print for debugging.
|
||||
num_result is how many files are expected as a result."""
|
||||
print(f"EXCLUDED REGEX: paths {self.d._exclude_list.compiled_paths} \
|
||||
files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled}")
|
||||
print(
|
||||
f"EXCLUDED REGEX: paths {self.d._exclude_list.compiled_paths} \
|
||||
files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled}"
|
||||
)
|
||||
files = list(self.d.get_files())
|
||||
files = [file.name for file in files]
|
||||
print(f"FINAL FILES {files}")
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||
|
||||
import io
|
||||
|
||||
# import os.path as op
|
||||
|
||||
from xml.etree import ElementTree as ET
|
||||
@@ -104,7 +105,7 @@ class TestCaseListEmpty:
|
||||
regex1 = r"one"
|
||||
regex2 = r"two"
|
||||
self.exclude_list.add(regex1)
|
||||
assert(regex1 in self.exclude_list)
|
||||
assert regex1 in self.exclude_list
|
||||
self.exclude_list.add(regex2)
|
||||
self.exclude_list.mark(regex1)
|
||||
self.exclude_list.mark(regex2)
|
||||
@@ -113,17 +114,17 @@ class TestCaseListEmpty:
|
||||
compiled_files = [x for x in self.exclude_list.compiled_files]
|
||||
eq_(len(compiled_files), 2)
|
||||
self.exclude_list.remove(regex2)
|
||||
assert(regex2 not in self.exclude_list)
|
||||
assert regex2 not in self.exclude_list
|
||||
eq_(len(self.exclude_list), 1)
|
||||
|
||||
def test_add_duplicate(self):
|
||||
self.exclude_list.add(r"one")
|
||||
eq_(1 , len(self.exclude_list))
|
||||
eq_(1, len(self.exclude_list))
|
||||
try:
|
||||
self.exclude_list.add(r"one")
|
||||
except Exception:
|
||||
pass
|
||||
eq_(1 , len(self.exclude_list))
|
||||
eq_(1, len(self.exclude_list))
|
||||
|
||||
def test_add_not_compilable(self):
|
||||
# Trying to add a non-valid regex should not work and raise exception
|
||||
@@ -230,13 +231,14 @@ class TestCaseListEmpty:
|
||||
if compiled_re.pattern == re:
|
||||
found = True
|
||||
if not found:
|
||||
raise(Exception(f"Default RE {re} not found in compiled list."))
|
||||
raise (Exception(f"Default RE {re} not found in compiled list."))
|
||||
continue
|
||||
eq_(len(default_regexes), len(self.exclude_list.compiled))
|
||||
|
||||
|
||||
class TestCaseListEmptyUnion(TestCaseListEmpty):
|
||||
"""Same but with union regex"""
|
||||
|
||||
def setup_method(self, method):
|
||||
self.app = DupeGuru()
|
||||
self.app.exclude_list = ExcludeList(union_regex=True)
|
||||
@@ -246,7 +248,7 @@ class TestCaseListEmptyUnion(TestCaseListEmpty):
|
||||
regex1 = r"one"
|
||||
regex2 = r"two"
|
||||
self.exclude_list.add(regex1)
|
||||
assert(regex1 in self.exclude_list)
|
||||
assert regex1 in self.exclude_list
|
||||
self.exclude_list.add(regex2)
|
||||
self.exclude_list.mark(regex1)
|
||||
self.exclude_list.mark(regex2)
|
||||
@@ -256,7 +258,7 @@ class TestCaseListEmptyUnion(TestCaseListEmpty):
|
||||
eq_(len(compiled_files), 1) # Two patterns joined together into one
|
||||
assert "|" in compiled_files[0].pattern
|
||||
self.exclude_list.remove(regex2)
|
||||
assert(regex2 not in self.exclude_list)
|
||||
assert regex2 not in self.exclude_list
|
||||
eq_(len(self.exclude_list), 1)
|
||||
|
||||
def test_rename_regex_file_to_path(self):
|
||||
@@ -296,14 +298,15 @@ class TestCaseListEmptyUnion(TestCaseListEmpty):
|
||||
compiled = [x for x in self.exclude_list.compiled]
|
||||
assert regex not in compiled
|
||||
# Need to escape both to get the same strings after compilation
|
||||
compiled_escaped = set([x.encode('unicode-escape').decode() for x in compiled[0].pattern.split("|")])
|
||||
default_escaped = set([x.encode('unicode-escape').decode() for x in default_regexes])
|
||||
compiled_escaped = set([x.encode("unicode-escape").decode() for x in compiled[0].pattern.split("|")])
|
||||
default_escaped = set([x.encode("unicode-escape").decode() for x in default_regexes])
|
||||
assert compiled_escaped == default_escaped
|
||||
eq_(len(default_regexes), len(compiled[0].pattern.split("|")))
|
||||
|
||||
|
||||
class TestCaseDictEmpty(TestCaseListEmpty):
|
||||
"""Same, but with dictionary implementation"""
|
||||
|
||||
def setup_method(self, method):
|
||||
self.app = DupeGuru()
|
||||
self.app.exclude_list = ExcludeDict(union_regex=False)
|
||||
@@ -312,6 +315,7 @@ class TestCaseDictEmpty(TestCaseListEmpty):
|
||||
|
||||
class TestCaseDictEmptyUnion(TestCaseDictEmpty):
|
||||
"""Same, but with union regex"""
|
||||
|
||||
def setup_method(self, method):
|
||||
self.app = DupeGuru()
|
||||
self.app.exclude_list = ExcludeDict(union_regex=True)
|
||||
@@ -321,7 +325,7 @@ class TestCaseDictEmptyUnion(TestCaseDictEmpty):
|
||||
regex1 = r"one"
|
||||
regex2 = r"two"
|
||||
self.exclude_list.add(regex1)
|
||||
assert(regex1 in self.exclude_list)
|
||||
assert regex1 in self.exclude_list
|
||||
self.exclude_list.add(regex2)
|
||||
self.exclude_list.mark(regex1)
|
||||
self.exclude_list.mark(regex2)
|
||||
@@ -331,7 +335,7 @@ class TestCaseDictEmptyUnion(TestCaseDictEmpty):
|
||||
# two patterns joined into one
|
||||
eq_(len(compiled_files), 1)
|
||||
self.exclude_list.remove(regex2)
|
||||
assert(regex2 not in self.exclude_list)
|
||||
assert regex2 not in self.exclude_list
|
||||
eq_(len(self.exclude_list), 1)
|
||||
|
||||
def test_rename_regex_file_to_path(self):
|
||||
@@ -371,8 +375,8 @@ class TestCaseDictEmptyUnion(TestCaseDictEmpty):
|
||||
compiled = [x for x in self.exclude_list.compiled]
|
||||
assert regex not in compiled
|
||||
# Need to escape both to get the same strings after compilation
|
||||
compiled_escaped = set([x.encode('unicode-escape').decode() for x in compiled[0].pattern.split("|")])
|
||||
default_escaped = set([x.encode('unicode-escape').decode() for x in default_regexes])
|
||||
compiled_escaped = set([x.encode("unicode-escape").decode() for x in compiled[0].pattern.split("|")])
|
||||
default_escaped = set([x.encode("unicode-escape").decode() for x in default_regexes])
|
||||
assert compiled_escaped == default_escaped
|
||||
eq_(len(default_regexes), len(compiled[0].pattern.split("|")))
|
||||
|
||||
@@ -382,8 +386,9 @@ def split_union(pattern_object):
|
||||
return [x for x in pattern_object.pattern.split("|")]
|
||||
|
||||
|
||||
class TestCaseCompiledList():
|
||||
class TestCaseCompiledList:
|
||||
"""Test consistency between union or and separate versions."""
|
||||
|
||||
def setup_method(self, method):
|
||||
self.e_separate = ExcludeList(union_regex=False)
|
||||
self.e_separate.restore_defaults()
|
||||
@@ -431,6 +436,7 @@ class TestCaseCompiledList():
|
||||
|
||||
class TestCaseCompiledDict(TestCaseCompiledList):
|
||||
"""Test the dictionary version"""
|
||||
|
||||
def setup_method(self, method):
|
||||
self.e_separate = ExcludeDict(union_regex=False)
|
||||
self.e_separate.restore_defaults()
|
||||
|
||||
@@ -73,9 +73,7 @@ def test_save_to_xml():
|
||||
eq_(len(root), 2)
|
||||
eq_(len([c for c in root if c.tag == "file"]), 2)
|
||||
f1, f2 = root[:]
|
||||
subchildren = [c for c in f1 if c.tag == "file"] + [
|
||||
c for c in f2 if c.tag == "file"
|
||||
]
|
||||
subchildren = [c for c in f1 if c.tag == "file"] + [c for c in f2 if c.tag == "file"]
|
||||
eq_(len(subchildren), 3)
|
||||
|
||||
|
||||
@@ -96,9 +94,7 @@ def test_SaveThenLoad():
|
||||
|
||||
def test_LoadXML_with_empty_file_tags():
|
||||
f = io.BytesIO()
|
||||
f.write(
|
||||
b'<?xml version="1.0" encoding="utf-8"?><ignore_list><file><file/></file></ignore_list>'
|
||||
)
|
||||
f.write(b'<?xml version="1.0" encoding="utf-8"?><ignore_list><file><file/></file></ignore_list>')
|
||||
f.seek(0)
|
||||
il = IgnoreList()
|
||||
il.load_from_xml(f)
|
||||
|
||||
@@ -117,9 +117,7 @@ class TestCaseResultsWithSomeGroups:
|
||||
assert d is g.ref
|
||||
|
||||
def test_sort_groups(self):
|
||||
self.results.make_ref(
|
||||
self.objects[1]
|
||||
) # We want to make the 1024 sized object to go ref.
|
||||
self.results.make_ref(self.objects[1]) # We want to make the 1024 sized object to go ref.
|
||||
g1, g2 = self.groups
|
||||
self.results.sort_groups("size")
|
||||
assert self.results.groups[0] is g2
|
||||
@@ -129,9 +127,7 @@ class TestCaseResultsWithSomeGroups:
|
||||
assert self.results.groups[1] is g2
|
||||
|
||||
def test_set_groups_when_sorted(self):
|
||||
self.results.make_ref(
|
||||
self.objects[1]
|
||||
) # We want to make the 1024 sized object to go ref.
|
||||
self.results.make_ref(self.objects[1]) # We want to make the 1024 sized object to go ref.
|
||||
self.results.sort_groups("size")
|
||||
objects, matches, groups = GetTestGroups()
|
||||
g1, g2 = groups
|
||||
@@ -601,9 +597,7 @@ class TestCaseResultsXML:
|
||||
matches = engine.getmatches(objects) # we should have 5 matches
|
||||
groups = engine.get_groups(matches) # We should have 2 groups
|
||||
for g in groups:
|
||||
g.prioritize(
|
||||
lambda x: objects.index(x)
|
||||
) # We want the dupes to be in the same order as the list is
|
||||
g.prioritize(lambda x: objects.index(x)) # We want the dupes to be in the same order as the list is
|
||||
app = DupeGuru()
|
||||
results = Results(app)
|
||||
results.groups = groups
|
||||
@@ -807,9 +801,7 @@ class TestCaseResultsFilter:
|
||||
# Now the stats should display *2* markable dupes (instead of 1)
|
||||
expected = "0 / 2 (0.00 B / 2.00 B) duplicates marked. filter: foo"
|
||||
eq_(expected, self.results.stat_line)
|
||||
self.results.apply_filter(
|
||||
None
|
||||
) # Now let's make sure our unfiltered results aren't fucked up
|
||||
self.results.apply_filter(None) # Now let's make sure our unfiltered results aren't fucked up
|
||||
expected = "0 / 3 (0.00 B / 3.00 B) duplicates marked."
|
||||
eq_(expected, self.results.stat_line)
|
||||
|
||||
|
||||
@@ -150,8 +150,7 @@ def test_big_file_partial_hashes(fake_fileexists):
|
||||
bigsize = 100 * 1024 * 1024 # 100MB
|
||||
s.big_file_size_threshold = bigsize
|
||||
|
||||
f = [no("bigfoo", bigsize), no("bigbar", bigsize),
|
||||
no("smallfoo", smallsize), no("smallbar", smallsize)]
|
||||
f = [no("bigfoo", bigsize), no("bigbar", bigsize), no("smallfoo", smallsize), no("smallbar", smallsize)]
|
||||
f[0].md5 = f[0].md5partial = f[0].md5samples = "foobar"
|
||||
f[1].md5 = f[1].md5partial = f[1].md5samples = "foobar"
|
||||
f[2].md5 = f[2].md5partial = "bleh"
|
||||
@@ -193,10 +192,8 @@ def test_content_scan_doesnt_put_md5_in_words_at_the_end(fake_fileexists):
|
||||
s = Scanner()
|
||||
s.scan_type = ScanType.Contents
|
||||
f = [no("foo"), no("bar")]
|
||||
f[0].md5 = f[0].md5partial = f[0].md5samples =\
|
||||
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
f[1].md5 = f[1].md5partial = f[1].md5samples =\
|
||||
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
f[0].md5 = f[0].md5partial = f[0].md5samples = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
f[1].md5 = f[1].md5partial = f[1].md5samples = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||
r = s.get_dupe_groups(f)
|
||||
# FIXME looks like we are missing something here?
|
||||
r[0]
|
||||
|
||||
Reference in New Issue
Block a user