1
0
mirror of https://github.com/arsenetar/dupeguru.git synced 2026-01-22 14:41:39 +00:00

Add partial hashes optimization for big files

* Big files above the user selected threshold can be partially hashed in 3 places.
* If the user is willing to take the risk, we consider files with identical md5samples as being identical.
This commit is contained in:
glubsy
2021-06-21 19:03:21 +02:00
parent 4641bd6ec9
commit e07dfd5955
6 changed files with 97 additions and 24 deletions

View File

@@ -187,7 +187,14 @@ class DupeGuru(QObject):
)
self.model.options["size_threshold"] = (
threshold * 1024
) # threshold is in KB. the scanner wants bytes
) # threshold is in KB. The scanner wants bytes
big_file_size_threshold = (
self.prefs.big_file_size_threshold if self.prefs.big_file_partial_hashes else 0
)
self.model.options["big_file_size_threshold"] = (
big_file_size_threshold * 1024 * 1024
# threshold is in MiB. The scanner wants bytes
)
scanned_tags = set()
if self.prefs.scan_tag_track:
scanned_tags.add("track")